@arela/uploader 0.2.13 ā 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.template +66 -0
- package/README.md +263 -62
- package/docs/API_ENDPOINTS_FOR_DETECTION.md +647 -0
- package/docs/QUICK_REFERENCE_API_DETECTION.md +264 -0
- package/docs/REFACTORING_SUMMARY_DETECT_PEDIMENTOS.md +200 -0
- package/package.json +3 -2
- package/scripts/cleanup-ds-store.js +109 -0
- package/scripts/cleanup-system-files.js +69 -0
- package/scripts/tests/phase-7-features.test.js +415 -0
- package/scripts/tests/signal-handling.test.js +275 -0
- package/scripts/tests/smart-watch-integration.test.js +554 -0
- package/scripts/tests/watch-service-integration.test.js +584 -0
- package/src/commands/UploadCommand.js +31 -4
- package/src/commands/WatchCommand.js +1342 -0
- package/src/config/config.js +270 -2
- package/src/document-type-shared.js +2 -0
- package/src/document-types/support-document.js +200 -0
- package/src/file-detection.js +9 -1
- package/src/index.js +163 -4
- package/src/services/AdvancedFilterService.js +505 -0
- package/src/services/AutoProcessingService.js +749 -0
- package/src/services/BenchmarkingService.js +381 -0
- package/src/services/DatabaseService.js +1019 -539
- package/src/services/ErrorMonitor.js +275 -0
- package/src/services/LoggingService.js +419 -1
- package/src/services/MonitoringService.js +401 -0
- package/src/services/PerformanceOptimizer.js +511 -0
- package/src/services/ReportingService.js +511 -0
- package/src/services/SignalHandler.js +255 -0
- package/src/services/SmartWatchDatabaseService.js +527 -0
- package/src/services/WatchService.js +783 -0
- package/src/services/upload/ApiUploadService.js +447 -3
- package/src/services/upload/MultiApiUploadService.js +233 -0
- package/src/services/upload/SupabaseUploadService.js +12 -5
- package/src/services/upload/UploadServiceFactory.js +24 -0
- package/src/utils/CleanupManager.js +262 -0
- package/src/utils/FileOperations.js +44 -0
- package/src/utils/WatchEventHandler.js +522 -0
- package/supabase/migrations/001_create_initial_schema.sql +366 -0
- package/supabase/migrations/002_align_with_arela_api_schema.sql +145 -0
- package/.envbackup +0 -37
- package/SUPABASE_UPLOAD_FIX.md +0 -157
- package/commands.md +0 -14
|
@@ -0,0 +1,749 @@
|
|
|
1
|
+
import { appConfig } from '../config/config.js';
|
|
2
|
+
import logger from './LoggingService.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* AutoProcessingService - Handles automatic processing workflow for newly detected files
|
|
6
|
+
* Executes the 4-step processing pipeline:
|
|
7
|
+
* 1. Stats collection (stats --stats-only) - uses sourceApi in cross-tenant mode
|
|
8
|
+
* 2. PDF/Pedimento detection (detect --detect-pdfs) - uses sourceApi in cross-tenant mode
|
|
9
|
+
* 3. Arela path propagation (detect --propagate-arela-path) - uses sourceApi in cross-tenant mode
|
|
10
|
+
* 4. RFC-based upload with folder structure (upload --upload-by-rfc --folder-structure) - uses targetApi in cross-tenant mode
|
|
11
|
+
*
|
|
12
|
+
* In cross-tenant mode (--source-api and --target-api):
|
|
13
|
+
* - Phases 1-3 write/update to the SOURCE API
|
|
14
|
+
* - Phase 4 uploads files to the TARGET API
|
|
15
|
+
*
|
|
16
|
+
* In single API mode (--api):
|
|
17
|
+
* - All phases use the same API
|
|
18
|
+
*/
|
|
19
|
+
export class AutoProcessingService {
|
|
20
|
+
constructor() {
|
|
21
|
+
this.isProcessing = false;
|
|
22
|
+
this.processingStats = {
|
|
23
|
+
totalProcessed: 0,
|
|
24
|
+
successfulSteps: 0,
|
|
25
|
+
failedSteps: 0,
|
|
26
|
+
errors: [],
|
|
27
|
+
};
|
|
28
|
+
this.errorTracker = new Map(); // Track errors by directory
|
|
29
|
+
this.processingTimeout = null; // Auto-reset processing flag
|
|
30
|
+
this.processingTimeoutDuration = 30000; // 30 seconds max
|
|
31
|
+
this.lastProcessedFile = null; // Track last file to avoid duplicates
|
|
32
|
+
this.processingDebounceMs = 500; // Debounce delay
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Execute the complete 4-step processing pipeline
|
|
37
|
+
* @param {Object} options - Configuration options
|
|
38
|
+
* @param {string} options.filePath - Path of the newly detected file
|
|
39
|
+
* @param {string} options.watchDir - Watch directory being monitored
|
|
40
|
+
* @param {string} options.folderStructure - Folder structure for upload
|
|
41
|
+
* @param {number} options.batchSize - Batch size for processing
|
|
42
|
+
* @returns {Promise<Object>} Result of the processing pipeline
|
|
43
|
+
*/
|
|
44
|
+
async executeProcessingPipeline(options = {}) {
|
|
45
|
+
const { filePath, watchDir, folderStructure, batchSize = 10 } = options;
|
|
46
|
+
|
|
47
|
+
const pipelineId = this.#generatePipelineId();
|
|
48
|
+
const startTime = Date.now();
|
|
49
|
+
|
|
50
|
+
logger.debug(`š Pipeline ${pipelineId} started for: ${filePath}`);
|
|
51
|
+
|
|
52
|
+
// Prevent concurrent processing with debounce
|
|
53
|
+
if (this.isProcessing) {
|
|
54
|
+
// Check if this is a duplicate of the last processed file
|
|
55
|
+
if (this.lastProcessedFile === filePath) {
|
|
56
|
+
logger.debug(
|
|
57
|
+
`āļø Skipping duplicate file request (same file within ${this.processingDebounceMs}ms): ${filePath}`,
|
|
58
|
+
);
|
|
59
|
+
return {
|
|
60
|
+
success: false,
|
|
61
|
+
reason: 'Duplicate file request (debounced)',
|
|
62
|
+
pipelineId,
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
logger.warn(
|
|
67
|
+
'ā ļø Processing pipeline already running, skipping new request',
|
|
68
|
+
);
|
|
69
|
+
return {
|
|
70
|
+
success: false,
|
|
71
|
+
reason: 'Pipeline already processing',
|
|
72
|
+
pipelineId,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
this.isProcessing = true;
|
|
77
|
+
this.lastProcessedFile = filePath;
|
|
78
|
+
|
|
79
|
+
// Set automatic timeout to reset processing flag (fail-safe)
|
|
80
|
+
this.#setProcessingTimeout();
|
|
81
|
+
const results = {
|
|
82
|
+
pipelineId,
|
|
83
|
+
filePath,
|
|
84
|
+
watchDir,
|
|
85
|
+
folderStructure,
|
|
86
|
+
steps: {
|
|
87
|
+
statsOnly: { status: 'pending', error: null },
|
|
88
|
+
detectPdfs: { status: 'pending', error: null },
|
|
89
|
+
propagateArelaPath: { status: 'pending', error: null },
|
|
90
|
+
uploadByRfc: { status: 'pending', error: null },
|
|
91
|
+
},
|
|
92
|
+
summary: {},
|
|
93
|
+
totalDuration: 0,
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
try {
|
|
97
|
+
// Determine API target for phases 1-3 (write operations)
|
|
98
|
+
// In cross-tenant mode: use sourceApi for phases 1-3
|
|
99
|
+
// In single API mode: use activeTarget
|
|
100
|
+
const isCrossTenant = appConfig.isCrossTenantMode();
|
|
101
|
+
const sourceApiTarget = isCrossTenant
|
|
102
|
+
? appConfig.api.sourceTarget
|
|
103
|
+
: appConfig.api.activeTarget !== 'default'
|
|
104
|
+
? appConfig.api.activeTarget
|
|
105
|
+
: null;
|
|
106
|
+
|
|
107
|
+
if (isCrossTenant) {
|
|
108
|
+
logger.debug(
|
|
109
|
+
`š Cross-tenant mode: phases 1-3 will use ${appConfig.api.sourceTarget}, phase 4 will use ${appConfig.api.targetTarget}`,
|
|
110
|
+
);
|
|
111
|
+
} else if (sourceApiTarget) {
|
|
112
|
+
logger.debug(
|
|
113
|
+
`šÆ Single API mode: all phases will use ${sourceApiTarget}`,
|
|
114
|
+
);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Step 1: Run stats collection (uses sourceApi in cross-tenant mode)
|
|
118
|
+
logger.debug(`š [Step 1/4] Stats collection...`);
|
|
119
|
+
results.steps.statsOnly = await this.#executeStatsOnly({
|
|
120
|
+
filePath,
|
|
121
|
+
watchDir,
|
|
122
|
+
batchSize,
|
|
123
|
+
apiTarget: sourceApiTarget,
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
if (results.steps.statsOnly.status === 'failed') {
|
|
127
|
+
logger.debug(`Stats failed: ${results.steps.statsOnly.error}`);
|
|
128
|
+
this.#trackError(watchDir, 'statsOnly', results.steps.statsOnly.error);
|
|
129
|
+
this.isProcessing = false;
|
|
130
|
+
results.summary = this.#generateSummary(results);
|
|
131
|
+
results.totalDuration = Date.now() - startTime;
|
|
132
|
+
return results;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// Step 2: Run PDF detection (uses sourceApi in cross-tenant mode)
|
|
136
|
+
logger.debug(`š [Step 2/4] PDF detection...`);
|
|
137
|
+
results.steps.detectPdfs = await this.#executeDetectPdfs({
|
|
138
|
+
batchSize,
|
|
139
|
+
apiTarget: sourceApiTarget,
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
if (results.steps.detectPdfs.status === 'failed') {
|
|
143
|
+
logger.debug(`PDF detection failed: ${results.steps.detectPdfs.error}`);
|
|
144
|
+
this.#trackError(
|
|
145
|
+
watchDir,
|
|
146
|
+
'detectPdfs',
|
|
147
|
+
results.steps.detectPdfs.error,
|
|
148
|
+
);
|
|
149
|
+
this.isProcessing = false;
|
|
150
|
+
results.summary = this.#generateSummary(results);
|
|
151
|
+
results.totalDuration = Date.now() - startTime;
|
|
152
|
+
return results;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Step 3: Propagate Arela path (uses sourceApi in cross-tenant mode)
|
|
156
|
+
logger.debug(`š [Step 3/4] Arela path propagation...`);
|
|
157
|
+
results.steps.propagateArelaPath = await this.#executePropagateArelaPath({
|
|
158
|
+
apiTarget: sourceApiTarget,
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
if (results.steps.propagateArelaPath.status === 'failed') {
|
|
162
|
+
logger.debug(
|
|
163
|
+
`Arela path propagation failed: ${results.steps.propagateArelaPath.error}`,
|
|
164
|
+
);
|
|
165
|
+
this.#trackError(
|
|
166
|
+
watchDir,
|
|
167
|
+
'propagateArelaPath',
|
|
168
|
+
results.steps.propagateArelaPath.error,
|
|
169
|
+
);
|
|
170
|
+
// Don't stop pipeline on this step, as it might succeed partially
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Step 4: Upload by RFC with folder structure
|
|
174
|
+
// In cross-tenant mode: uses sourceApi for reading, targetApi for uploading
|
|
175
|
+
// In single API mode: uses the same API for both
|
|
176
|
+
logger.debug(`š¤ [Step 4/4] RFC upload...`);
|
|
177
|
+
results.steps.uploadByRfc = await this.#executeUploadByRfc({
|
|
178
|
+
batchSize,
|
|
179
|
+
folderStructure,
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
if (results.steps.uploadByRfc.status === 'failed') {
|
|
183
|
+
logger.debug(`RFC upload failed: ${results.steps.uploadByRfc.error}`);
|
|
184
|
+
this.#trackError(
|
|
185
|
+
watchDir,
|
|
186
|
+
'uploadByRfc',
|
|
187
|
+
results.steps.uploadByRfc.error,
|
|
188
|
+
);
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
results.summary = this.#generateSummary(results);
|
|
192
|
+
results.totalDuration = Date.now() - startTime;
|
|
193
|
+
|
|
194
|
+
// Solo muestra resumen en modo info
|
|
195
|
+
if (results.summary.success) {
|
|
196
|
+
logger.info(`ā
Pipeline completed: ${watchDir}`);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
return results;
|
|
200
|
+
} catch (error) {
|
|
201
|
+
logger.error(`Fatal error in pipeline: ${error.message}`);
|
|
202
|
+
this.#trackError(watchDir, 'fatal', error.message);
|
|
203
|
+
results.summary = {
|
|
204
|
+
success: false,
|
|
205
|
+
message: `Fatal error: ${error.message}`,
|
|
206
|
+
};
|
|
207
|
+
results.totalDuration = Date.now() - startTime;
|
|
208
|
+
return results;
|
|
209
|
+
} finally {
|
|
210
|
+
this.isProcessing = false;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
/**
|
|
215
|
+
* Execute stats collection (Step 1)
|
|
216
|
+
* Scans the entire directory containing the detected file, not just the file itself
|
|
217
|
+
* @private
|
|
218
|
+
* @param {Object} options - Options for stats collection
|
|
219
|
+
* @returns {Promise<Object>} Result of stats collection
|
|
220
|
+
*/
|
|
221
|
+
async #executeStatsOnly(options = {}) {
|
|
222
|
+
let stepStartTime = Date.now();
|
|
223
|
+
try {
|
|
224
|
+
// Import DatabaseService to collect stats directly
|
|
225
|
+
// This bypasses UploadCommand which has watch mode restrictions
|
|
226
|
+
const databaseService = (await import('./DatabaseService.js')).default;
|
|
227
|
+
const FileOperations = (await import('../utils/FileOperations.js'))
|
|
228
|
+
.default;
|
|
229
|
+
const fs = (await import('fs')).default;
|
|
230
|
+
const path = (await import('path')).default;
|
|
231
|
+
|
|
232
|
+
logger.debug(
|
|
233
|
+
`[AutoProcessingService] Executing stats collection for watch mode`,
|
|
234
|
+
);
|
|
235
|
+
|
|
236
|
+
// Get files from the watch directory
|
|
237
|
+
const { filePath, watchDir } = options;
|
|
238
|
+
|
|
239
|
+
if (!filePath || !watchDir) {
|
|
240
|
+
throw new Error(
|
|
241
|
+
'filePath and watchDir are required for stats collection',
|
|
242
|
+
);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// Wait for file to be fully written (with retries)
|
|
246
|
+
let fileStats = null;
|
|
247
|
+
let attempts = 0;
|
|
248
|
+
const maxAttempts = 10; // 10 attempts * 100ms = 1 second maximum wait
|
|
249
|
+
const retryDelay = 100; // milliseconds
|
|
250
|
+
|
|
251
|
+
while (attempts < maxAttempts && !fileStats) {
|
|
252
|
+
try {
|
|
253
|
+
if (fs.existsSync(filePath)) {
|
|
254
|
+
fileStats = FileOperations.getFileStats(filePath);
|
|
255
|
+
} else {
|
|
256
|
+
attempts++;
|
|
257
|
+
if (attempts < maxAttempts) {
|
|
258
|
+
logger.debug(
|
|
259
|
+
`File not yet ready (attempt ${attempts}/${maxAttempts}), waiting...`,
|
|
260
|
+
);
|
|
261
|
+
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
} catch (error) {
|
|
265
|
+
attempts++;
|
|
266
|
+
if (attempts < maxAttempts) {
|
|
267
|
+
logger.debug(
|
|
268
|
+
`Error reading file stats (attempt ${attempts}/${maxAttempts}): ${error.message}, retrying...`,
|
|
269
|
+
);
|
|
270
|
+
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
|
271
|
+
} else {
|
|
272
|
+
throw error;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
if (!fileStats) {
|
|
278
|
+
throw new Error(
|
|
279
|
+
`File not found after ${maxAttempts} retries: ${filePath}`,
|
|
280
|
+
);
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
// Get the parent directory of the detected file
|
|
284
|
+
const parentDir = path.dirname(filePath);
|
|
285
|
+
const fileObjects = [];
|
|
286
|
+
|
|
287
|
+
// First, add the detected file
|
|
288
|
+
fileObjects.push({
|
|
289
|
+
path: filePath,
|
|
290
|
+
originalName: path.basename(filePath),
|
|
291
|
+
stats: fileStats,
|
|
292
|
+
});
|
|
293
|
+
|
|
294
|
+
// Then, scan the entire parent directory for related files
|
|
295
|
+
logger.debug(`š Scanning directory for related files: ${parentDir}`);
|
|
296
|
+
const relatedFiles = FileOperations.listFilesInDirectory(parentDir, {
|
|
297
|
+
excludePattern: /(^|[\/\\])\.|node_modules|\.git/,
|
|
298
|
+
onlyPdf: false,
|
|
299
|
+
});
|
|
300
|
+
|
|
301
|
+
// Add all related files (except the one already detected)
|
|
302
|
+
for (const relatedFilePath of relatedFiles) {
|
|
303
|
+
// Skip the file already detected and .DS_Store
|
|
304
|
+
if (
|
|
305
|
+
relatedFilePath === filePath ||
|
|
306
|
+
relatedFilePath.endsWith('.DS_Store')
|
|
307
|
+
) {
|
|
308
|
+
continue;
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
try {
|
|
312
|
+
const relatedStats = FileOperations.getFileStats(relatedFilePath);
|
|
313
|
+
if (relatedStats) {
|
|
314
|
+
fileObjects.push({
|
|
315
|
+
path: relatedFilePath,
|
|
316
|
+
originalName: path.basename(relatedFilePath),
|
|
317
|
+
stats: relatedStats,
|
|
318
|
+
});
|
|
319
|
+
}
|
|
320
|
+
} catch (error) {
|
|
321
|
+
logger.debug(
|
|
322
|
+
`Could not read stats for related file: ${relatedFilePath}`,
|
|
323
|
+
);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
logger.debug(
|
|
328
|
+
`š Found ${fileObjects.length} file(s) to process (1 detected + ${fileObjects.length - 1} related)`,
|
|
329
|
+
);
|
|
330
|
+
|
|
331
|
+
// Insert into database directly, using the specified API target
|
|
332
|
+
const insertOptions = {
|
|
333
|
+
quietMode: true, // Suppress verbose logging when in auto-pipeline
|
|
334
|
+
};
|
|
335
|
+
|
|
336
|
+
// Pass apiTarget if specified (for cross-tenant or single API mode)
|
|
337
|
+
if (options.apiTarget) {
|
|
338
|
+
insertOptions.apiTarget = options.apiTarget;
|
|
339
|
+
logger.debug(
|
|
340
|
+
`[AutoProcessingService] Stats using API target: ${options.apiTarget}`,
|
|
341
|
+
);
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
const result = await databaseService.insertStatsOnlyToUploaderTable(
|
|
345
|
+
fileObjects,
|
|
346
|
+
insertOptions,
|
|
347
|
+
);
|
|
348
|
+
|
|
349
|
+
const duration = Date.now() - stepStartTime;
|
|
350
|
+
logger.info(`ā
Stats collection completed in ${duration}ms`);
|
|
351
|
+
|
|
352
|
+
return {
|
|
353
|
+
status: 'success',
|
|
354
|
+
duration,
|
|
355
|
+
result: {
|
|
356
|
+
totalInserted: result.totalInserted,
|
|
357
|
+
totalSkipped: result.totalSkipped,
|
|
358
|
+
},
|
|
359
|
+
error: null,
|
|
360
|
+
};
|
|
361
|
+
} catch (error) {
|
|
362
|
+
logger.error(
|
|
363
|
+
`[AutoProcessingService] Stats collection error: ${error.message}`,
|
|
364
|
+
);
|
|
365
|
+
return {
|
|
366
|
+
status: 'failed',
|
|
367
|
+
duration: Date.now() - stepStartTime,
|
|
368
|
+
error: error.message,
|
|
369
|
+
result: null,
|
|
370
|
+
};
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
/**
|
|
375
|
+
* Execute PDF detection (Step 2)
|
|
376
|
+
* @private
|
|
377
|
+
* @param {Object} options - Options for PDF detection
|
|
378
|
+
* @param {string} options.apiTarget - API target to use (for cross-tenant or single API mode)
|
|
379
|
+
* @returns {Promise<Object>} Result of PDF detection
|
|
380
|
+
*/
|
|
381
|
+
async #executeDetectPdfs(options = {}) {
|
|
382
|
+
let stepStartTime = Date.now();
|
|
383
|
+
try {
|
|
384
|
+
// Import databaseService singleton instance
|
|
385
|
+
const databaseService = (await import('./DatabaseService.js')).default;
|
|
386
|
+
|
|
387
|
+
logger.debug(
|
|
388
|
+
`[AutoProcessingService] Executing PDF detection with batch size: ${options.batchSize}`,
|
|
389
|
+
);
|
|
390
|
+
|
|
391
|
+
// Pass apiTarget if specified
|
|
392
|
+
const detectOptions = {
|
|
393
|
+
batchSize: options.batchSize || 10,
|
|
394
|
+
};
|
|
395
|
+
|
|
396
|
+
if (options.apiTarget) {
|
|
397
|
+
detectOptions.apiTarget = options.apiTarget;
|
|
398
|
+
logger.debug(
|
|
399
|
+
`[AutoProcessingService] PDF detection using API target: ${options.apiTarget}`,
|
|
400
|
+
);
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
const result =
|
|
404
|
+
await databaseService.detectPedimentosInDatabase(detectOptions);
|
|
405
|
+
|
|
406
|
+
const duration = Date.now() - stepStartTime;
|
|
407
|
+
logger.info(`ā
PDF detection completed in ${duration}ms`);
|
|
408
|
+
logger.info(
|
|
409
|
+
` š Results: ${result.detectedCount} detected, ${result.processedCount} processed`,
|
|
410
|
+
);
|
|
411
|
+
|
|
412
|
+
return {
|
|
413
|
+
status: 'success',
|
|
414
|
+
duration,
|
|
415
|
+
result,
|
|
416
|
+
error: null,
|
|
417
|
+
};
|
|
418
|
+
} catch (error) {
|
|
419
|
+
logger.error(
|
|
420
|
+
`[AutoProcessingService] PDF detection error: ${error.message}`,
|
|
421
|
+
);
|
|
422
|
+
return {
|
|
423
|
+
status: 'failed',
|
|
424
|
+
duration: Date.now() - stepStartTime,
|
|
425
|
+
error: error.message,
|
|
426
|
+
result: null,
|
|
427
|
+
};
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
/**
|
|
432
|
+
* Execute arela_path propagation (Step 3)
|
|
433
|
+
* @private
|
|
434
|
+
* @param {Object} options - Options for propagation
|
|
435
|
+
* @param {string} options.apiTarget - API target to use (for cross-tenant or single API mode)
|
|
436
|
+
* @returns {Promise<Object>} Result of arela_path propagation
|
|
437
|
+
*/
|
|
438
|
+
async #executePropagateArelaPath(options = {}) {
|
|
439
|
+
let stepStartTime = Date.now();
|
|
440
|
+
try {
|
|
441
|
+
// Import databaseService singleton instance
|
|
442
|
+
const databaseService = (await import('./DatabaseService.js')).default;
|
|
443
|
+
|
|
444
|
+
logger.debug(`[AutoProcessingService] Executing arela_path propagation`);
|
|
445
|
+
|
|
446
|
+
// Pass apiTarget if specified
|
|
447
|
+
const propagateOptions = {
|
|
448
|
+
showProgress: true,
|
|
449
|
+
};
|
|
450
|
+
|
|
451
|
+
if (options.apiTarget) {
|
|
452
|
+
propagateOptions.apiTarget = options.apiTarget;
|
|
453
|
+
logger.debug(
|
|
454
|
+
`[AutoProcessingService] Propagation using API target: ${options.apiTarget}`,
|
|
455
|
+
);
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
const result = await databaseService.propagateArelaPath(propagateOptions);
|
|
459
|
+
|
|
460
|
+
const duration = Date.now() - stepStartTime;
|
|
461
|
+
logger.info(`ā
Arela path propagation completed in ${duration}ms`);
|
|
462
|
+
logger.info(
|
|
463
|
+
` š Results: ${result.processedCount} processed, ${result.updatedCount} updated`,
|
|
464
|
+
);
|
|
465
|
+
|
|
466
|
+
return {
|
|
467
|
+
status: 'success',
|
|
468
|
+
duration,
|
|
469
|
+
result,
|
|
470
|
+
error: null,
|
|
471
|
+
};
|
|
472
|
+
} catch (error) {
|
|
473
|
+
logger.error(
|
|
474
|
+
`[AutoProcessingService] Arela path propagation error: ${error.message}`,
|
|
475
|
+
);
|
|
476
|
+
return {
|
|
477
|
+
status: 'failed',
|
|
478
|
+
duration: Date.now() - stepStartTime,
|
|
479
|
+
error: error.message,
|
|
480
|
+
result: null,
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
/**
|
|
486
|
+
* Execute RFC-based upload with folder structure (Step 4)
|
|
487
|
+
* @private
|
|
488
|
+
* @param {Object} options - Options for RFC upload
|
|
489
|
+
* @param {string} options.folderStructure - Folder structure for organization
|
|
490
|
+
* @param {number} options.batchSize - Batch size for upload
|
|
491
|
+
* @returns {Promise<Object>} Result of RFC upload
|
|
492
|
+
*/
|
|
493
|
+
async #executeUploadByRfc(options = {}) {
|
|
494
|
+
let stepStartTime = Date.now();
|
|
495
|
+
try {
|
|
496
|
+
// Import databaseService singleton instance
|
|
497
|
+
const databaseService = (await import('./DatabaseService.js')).default;
|
|
498
|
+
|
|
499
|
+
// Build upload options, including cross-tenant config if set globally
|
|
500
|
+
const uploadOptions = {
|
|
501
|
+
batchSize: options.batchSize || 10,
|
|
502
|
+
showProgress: true,
|
|
503
|
+
folderStructure: options.folderStructure,
|
|
504
|
+
};
|
|
505
|
+
|
|
506
|
+
// Check if cross-tenant mode is enabled globally and pass it to uploadFilesByRfc
|
|
507
|
+
if (appConfig.isCrossTenantMode()) {
|
|
508
|
+
// Cross-tenant mode: source API for reading, target API for uploading
|
|
509
|
+
uploadOptions.sourceApi = appConfig.api.sourceTarget;
|
|
510
|
+
uploadOptions.targetApi = appConfig.api.targetTarget;
|
|
511
|
+
logger.debug(
|
|
512
|
+
`[AutoProcessingService] Cross-tenant upload: source=${uploadOptions.sourceApi}, target=${uploadOptions.targetApi}`,
|
|
513
|
+
);
|
|
514
|
+
} else if (
|
|
515
|
+
appConfig.api.activeTarget &&
|
|
516
|
+
appConfig.api.activeTarget !== 'default'
|
|
517
|
+
) {
|
|
518
|
+
// Single API mode: use the same API for both reading and uploading
|
|
519
|
+
uploadOptions.apiTarget = appConfig.api.activeTarget;
|
|
520
|
+
logger.debug(
|
|
521
|
+
`[AutoProcessingService] Single API upload: target=${uploadOptions.apiTarget}`,
|
|
522
|
+
);
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
logger.debug(
|
|
526
|
+
`[AutoProcessingService] Executing RFC-based upload with options:`,
|
|
527
|
+
{
|
|
528
|
+
folderStructure: options.folderStructure,
|
|
529
|
+
batchSize: options.batchSize,
|
|
530
|
+
sourceApi: uploadOptions.sourceApi,
|
|
531
|
+
targetApi: uploadOptions.targetApi,
|
|
532
|
+
apiTarget: uploadOptions.apiTarget,
|
|
533
|
+
},
|
|
534
|
+
);
|
|
535
|
+
|
|
536
|
+
const result = await databaseService.uploadFilesByRfc(uploadOptions);
|
|
537
|
+
|
|
538
|
+
const duration = Date.now() - stepStartTime;
|
|
539
|
+
logger.info(`ā
RFC upload completed in ${duration}ms`);
|
|
540
|
+
logger.info(
|
|
541
|
+
` š Results: ${result.processedCount} processed, ${result.uploadedCount} uploaded`,
|
|
542
|
+
);
|
|
543
|
+
|
|
544
|
+
return {
|
|
545
|
+
status: 'success',
|
|
546
|
+
duration,
|
|
547
|
+
result,
|
|
548
|
+
error: null,
|
|
549
|
+
};
|
|
550
|
+
} catch (error) {
|
|
551
|
+
logger.error(
|
|
552
|
+
`[AutoProcessingService] RFC upload error: ${error.message}`,
|
|
553
|
+
);
|
|
554
|
+
return {
|
|
555
|
+
status: 'failed',
|
|
556
|
+
duration: Date.now() - stepStartTime,
|
|
557
|
+
error: error.message,
|
|
558
|
+
result: null,
|
|
559
|
+
};
|
|
560
|
+
}
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
/**
|
|
564
|
+
* Generate a unique pipeline ID for tracking
|
|
565
|
+
* @private
|
|
566
|
+
* @returns {string} Pipeline ID
|
|
567
|
+
*/
|
|
568
|
+
#generatePipelineId() {
|
|
569
|
+
return `pipeline-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
/**
|
|
573
|
+
* Generate processing summary
|
|
574
|
+
* @private
|
|
575
|
+
* @param {Object} results - Processing results
|
|
576
|
+
* @returns {Object} Summary object
|
|
577
|
+
*/
|
|
578
|
+
#generateSummary(results) {
|
|
579
|
+
const steps = Object.entries(results.steps);
|
|
580
|
+
const successful = steps.filter(
|
|
581
|
+
([_, step]) => step.status === 'success',
|
|
582
|
+
).length;
|
|
583
|
+
const failed = steps.filter(([_, step]) => step.status === 'failed').length;
|
|
584
|
+
|
|
585
|
+
let message = '';
|
|
586
|
+
if (failed === 0) {
|
|
587
|
+
message = `ā
All 4 steps completed successfully!`;
|
|
588
|
+
} else if (successful > 0) {
|
|
589
|
+
message = `ā ļø Completed with issues: ${successful}/4 steps successful, ${failed} failed`;
|
|
590
|
+
} else {
|
|
591
|
+
message = `ā Processing failed at step 1`;
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
return {
|
|
595
|
+
success: failed === 0,
|
|
596
|
+
totalSteps: steps.length,
|
|
597
|
+
successfulSteps: successful,
|
|
598
|
+
failedSteps: failed,
|
|
599
|
+
message,
|
|
600
|
+
details: steps.reduce((acc, [stepName, stepResult]) => {
|
|
601
|
+
acc[stepName] = {
|
|
602
|
+
status: stepResult.status,
|
|
603
|
+
duration: stepResult.duration,
|
|
604
|
+
error: stepResult.error,
|
|
605
|
+
};
|
|
606
|
+
return acc;
|
|
607
|
+
}, {}),
|
|
608
|
+
};
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
/**
|
|
612
|
+
* Get processing statistics
|
|
613
|
+
* @returns {Object} Processing statistics
|
|
614
|
+
*/
|
|
615
|
+
getStats() {
|
|
616
|
+
return {
|
|
617
|
+
...this.processingStats,
|
|
618
|
+
isProcessing: this.isProcessing,
|
|
619
|
+
};
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
/**
|
|
623
|
+
* Reset processing statistics
|
|
624
|
+
* @returns {void}
|
|
625
|
+
*/
|
|
626
|
+
resetStats() {
|
|
627
|
+
this.processingStats = {
|
|
628
|
+
totalProcessed: 0,
|
|
629
|
+
successfulSteps: 0,
|
|
630
|
+
failedSteps: 0,
|
|
631
|
+
errors: [],
|
|
632
|
+
};
|
|
633
|
+
this.errorTracker.clear();
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
/**
|
|
637
|
+
* Track errors by directory for summary reporting
|
|
638
|
+
* @private
|
|
639
|
+
* @param {string} watchDir - Watch directory
|
|
640
|
+
* @param {string} step - Failed step
|
|
641
|
+
* @param {string} error - Error message
|
|
642
|
+
*/
|
|
643
|
+
#trackError(watchDir, step, error) {
|
|
644
|
+
if (!this.errorTracker.has(watchDir)) {
|
|
645
|
+
this.errorTracker.set(watchDir, {
|
|
646
|
+
count: 0,
|
|
647
|
+
steps: new Map(),
|
|
648
|
+
lastError: null,
|
|
649
|
+
});
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
const dirErrors = this.errorTracker.get(watchDir);
|
|
653
|
+
dirErrors.count++;
|
|
654
|
+
dirErrors.lastError = error;
|
|
655
|
+
|
|
656
|
+
if (!dirErrors.steps.has(step)) {
|
|
657
|
+
dirErrors.steps.set(step, 0);
|
|
658
|
+
}
|
|
659
|
+
dirErrors.steps.set(step, dirErrors.steps.get(step) + 1);
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
/**
|
|
663
|
+
* Get error summary by directory
|
|
664
|
+
* @returns {Object} Errors grouped by directory
|
|
665
|
+
*/
|
|
666
|
+
getErrorSummary() {
|
|
667
|
+
const summary = {};
|
|
668
|
+
|
|
669
|
+
for (const [dir, errors] of this.errorTracker.entries()) {
|
|
670
|
+
const stepDetails = {};
|
|
671
|
+
for (const [step, count] of errors.steps.entries()) {
|
|
672
|
+
stepDetails[step] = count;
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
summary[dir] = {
|
|
676
|
+
totalErrors: errors.count,
|
|
677
|
+
steps: stepDetails,
|
|
678
|
+
lastError: errors.lastError,
|
|
679
|
+
};
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
return summary;
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
/**
|
|
686
|
+
* Print error summary
|
|
687
|
+
* @returns {void}
|
|
688
|
+
*/
|
|
689
|
+
printErrorSummary() {
|
|
690
|
+
const summary = this.getErrorSummary();
|
|
691
|
+
|
|
692
|
+
if (Object.keys(summary).length === 0) {
|
|
693
|
+
logger.info('ā
No errors detected');
|
|
694
|
+
return;
|
|
695
|
+
}
|
|
696
|
+
|
|
697
|
+
logger.info('\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā');
|
|
698
|
+
logger.info('š ERROR SUMMARY BY DIRECTORY');
|
|
699
|
+
logger.info('āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā');
|
|
700
|
+
|
|
701
|
+
for (const [dir, errors] of Object.entries(summary)) {
|
|
702
|
+
logger.info(`\nš ${dir}`);
|
|
703
|
+
logger.info(` ā Total errors: ${errors.totalErrors}`);
|
|
704
|
+
|
|
705
|
+
for (const [step, count] of Object.entries(errors.steps)) {
|
|
706
|
+
logger.info(` ⢠${step}: ${count} failure(s)`);
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
logger.info(` Last error: ${errors.lastError}`);
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
logger.info('\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n');
|
|
713
|
+
}
|
|
714
|
+
|
|
715
|
+
/**
|
|
716
|
+
* Set automatic timeout to reset processing flag (fail-safe)
|
|
717
|
+
* @private
|
|
718
|
+
* @returns {void}
|
|
719
|
+
*/
|
|
720
|
+
#setProcessingTimeout() {
|
|
721
|
+
this.#clearProcessingTimeout();
|
|
722
|
+
this.processingTimeout = setTimeout(() => {
|
|
723
|
+
if (this.isProcessing) {
|
|
724
|
+
logger.warn(
|
|
725
|
+
'ā ļø Processing pipeline timeout - forcing reset after ' +
|
|
726
|
+
`${this.processingTimeoutDuration}ms`,
|
|
727
|
+
);
|
|
728
|
+
this.isProcessing = false;
|
|
729
|
+
this.lastProcessedFile = null;
|
|
730
|
+
}
|
|
731
|
+
}, this.processingTimeoutDuration);
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
/**
|
|
735
|
+
* Clear processing timeout
|
|
736
|
+
* @private
|
|
737
|
+
* @returns {void}
|
|
738
|
+
*/
|
|
739
|
+
#clearProcessingTimeout() {
|
|
740
|
+
if (this.processingTimeout) {
|
|
741
|
+
clearTimeout(this.processingTimeout);
|
|
742
|
+
this.processingTimeout = null;
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
|
|
747
|
+
// Export singleton instance
|
|
748
|
+
const autoProcessingService = new AutoProcessingService();
|
|
749
|
+
export default autoProcessingService;
|