@arela/uploader 0.2.12 ā 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.template +66 -0
- package/.vscode/settings.json +1 -0
- package/README.md +134 -58
- package/SUPABASE_UPLOAD_FIX.md +157 -0
- package/package.json +3 -2
- package/scripts/cleanup-ds-store.js +109 -0
- package/scripts/cleanup-system-files.js +69 -0
- package/scripts/tests/phase-7-features.test.js +415 -0
- package/scripts/tests/signal-handling.test.js +275 -0
- package/scripts/tests/smart-watch-integration.test.js +554 -0
- package/scripts/tests/watch-service-integration.test.js +584 -0
- package/src/commands/UploadCommand.js +36 -2
- package/src/commands/WatchCommand.js +1305 -0
- package/src/config/config.js +113 -0
- package/src/document-type-shared.js +2 -0
- package/src/document-types/support-document.js +201 -0
- package/src/file-detection.js +2 -1
- package/src/index.js +44 -0
- package/src/services/AdvancedFilterService.js +505 -0
- package/src/services/AutoProcessingService.js +639 -0
- package/src/services/BenchmarkingService.js +381 -0
- package/src/services/DatabaseService.js +723 -170
- package/src/services/ErrorMonitor.js +275 -0
- package/src/services/LoggingService.js +419 -1
- package/src/services/MonitoringService.js +401 -0
- package/src/services/PerformanceOptimizer.js +511 -0
- package/src/services/ReportingService.js +511 -0
- package/src/services/SignalHandler.js +255 -0
- package/src/services/SmartWatchDatabaseService.js +527 -0
- package/src/services/WatchService.js +783 -0
- package/src/services/upload/ApiUploadService.js +30 -4
- package/src/services/upload/SupabaseUploadService.js +28 -6
- package/src/utils/CleanupManager.js +262 -0
- package/src/utils/FileOperations.js +41 -0
- package/src/utils/WatchEventHandler.js +517 -0
- package/supabase/migrations/001_create_initial_schema.sql +366 -0
- package/supabase/migrations/002_align_with_arela_api_schema.sql +145 -0
- package/commands.md +0 -6
|
@@ -0,0 +1,639 @@
|
|
|
1
|
+
import logger from './LoggingService.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AutoProcessingService - Handles automatic processing workflow for newly detected files
|
|
5
|
+
* Executes the 4-step processing pipeline:
|
|
6
|
+
* 1. Stats collection (stats --stats-only)
|
|
7
|
+
* 2. PDF/Pedimento detection (detect --detect-pdfs)
|
|
8
|
+
* 3. Arela path propagation (detect --propagate-arela-path)
|
|
9
|
+
* 4. RFC-based upload with folder structure (upload --upload-by-rfc --folder-structure)
|
|
10
|
+
*/
|
|
11
|
+
export class AutoProcessingService {
|
|
12
|
+
constructor() {
|
|
13
|
+
this.isProcessing = false;
|
|
14
|
+
this.processingStats = {
|
|
15
|
+
totalProcessed: 0,
|
|
16
|
+
successfulSteps: 0,
|
|
17
|
+
failedSteps: 0,
|
|
18
|
+
errors: [],
|
|
19
|
+
};
|
|
20
|
+
this.errorTracker = new Map(); // Track errors by directory
|
|
21
|
+
this.processingTimeout = null; // Auto-reset processing flag
|
|
22
|
+
this.processingTimeoutDuration = 30000; // 30 seconds max
|
|
23
|
+
this.lastProcessedFile = null; // Track last file to avoid duplicates
|
|
24
|
+
this.processingDebounceMs = 500; // Debounce delay
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Execute the complete 4-step processing pipeline
|
|
29
|
+
* @param {Object} options - Configuration options
|
|
30
|
+
* @param {string} options.filePath - Path of the newly detected file
|
|
31
|
+
* @param {string} options.watchDir - Watch directory being monitored
|
|
32
|
+
* @param {string} options.folderStructure - Folder structure for upload
|
|
33
|
+
* @param {number} options.batchSize - Batch size for processing
|
|
34
|
+
* @returns {Promise<Object>} Result of the processing pipeline
|
|
35
|
+
*/
|
|
36
|
+
async executeProcessingPipeline(options = {}) {
|
|
37
|
+
const { filePath, watchDir, folderStructure, batchSize = 10 } = options;
|
|
38
|
+
|
|
39
|
+
const pipelineId = this.#generatePipelineId();
|
|
40
|
+
const startTime = Date.now();
|
|
41
|
+
|
|
42
|
+
logger.debug(`š Pipeline ${pipelineId} started for: ${filePath}`);
|
|
43
|
+
|
|
44
|
+
// Prevent concurrent processing with debounce
|
|
45
|
+
if (this.isProcessing) {
|
|
46
|
+
// Check if this is a duplicate of the last processed file
|
|
47
|
+
if (this.lastProcessedFile === filePath) {
|
|
48
|
+
logger.debug(
|
|
49
|
+
`āļø Skipping duplicate file request (same file within ${this.processingDebounceMs}ms): ${filePath}`,
|
|
50
|
+
);
|
|
51
|
+
return {
|
|
52
|
+
success: false,
|
|
53
|
+
reason: 'Duplicate file request (debounced)',
|
|
54
|
+
pipelineId,
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
logger.warn(
|
|
59
|
+
'ā ļø Processing pipeline already running, skipping new request',
|
|
60
|
+
);
|
|
61
|
+
return {
|
|
62
|
+
success: false,
|
|
63
|
+
reason: 'Pipeline already processing',
|
|
64
|
+
pipelineId,
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
this.isProcessing = true;
|
|
69
|
+
this.lastProcessedFile = filePath;
|
|
70
|
+
|
|
71
|
+
// Set automatic timeout to reset processing flag (fail-safe)
|
|
72
|
+
this.#setProcessingTimeout();
|
|
73
|
+
const results = {
|
|
74
|
+
pipelineId,
|
|
75
|
+
filePath,
|
|
76
|
+
watchDir,
|
|
77
|
+
folderStructure,
|
|
78
|
+
steps: {
|
|
79
|
+
statsOnly: { status: 'pending', error: null },
|
|
80
|
+
detectPdfs: { status: 'pending', error: null },
|
|
81
|
+
propagateArelaPath: { status: 'pending', error: null },
|
|
82
|
+
uploadByRfc: { status: 'pending', error: null },
|
|
83
|
+
},
|
|
84
|
+
summary: {},
|
|
85
|
+
totalDuration: 0,
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
try {
|
|
89
|
+
// Step 1: Run stats collection
|
|
90
|
+
logger.debug(`š [Step 1/4] Stats collection...`);
|
|
91
|
+
results.steps.statsOnly = await this.#executeStatsOnly({
|
|
92
|
+
filePath,
|
|
93
|
+
watchDir,
|
|
94
|
+
batchSize,
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
if (results.steps.statsOnly.status === 'failed') {
|
|
98
|
+
logger.debug(`Stats failed: ${results.steps.statsOnly.error}`);
|
|
99
|
+
this.#trackError(watchDir, 'statsOnly', results.steps.statsOnly.error);
|
|
100
|
+
this.isProcessing = false;
|
|
101
|
+
results.summary = this.#generateSummary(results);
|
|
102
|
+
results.totalDuration = Date.now() - startTime;
|
|
103
|
+
return results;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Step 2: Run PDF detection
|
|
107
|
+
logger.debug(`š [Step 2/4] PDF detection...`);
|
|
108
|
+
results.steps.detectPdfs = await this.#executeDetectPdfs({
|
|
109
|
+
batchSize,
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
if (results.steps.detectPdfs.status === 'failed') {
|
|
113
|
+
logger.debug(`PDF detection failed: ${results.steps.detectPdfs.error}`);
|
|
114
|
+
this.#trackError(
|
|
115
|
+
watchDir,
|
|
116
|
+
'detectPdfs',
|
|
117
|
+
results.steps.detectPdfs.error,
|
|
118
|
+
);
|
|
119
|
+
this.isProcessing = false;
|
|
120
|
+
results.summary = this.#generateSummary(results);
|
|
121
|
+
results.totalDuration = Date.now() - startTime;
|
|
122
|
+
return results;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Step 3: Propagate Arela path
|
|
126
|
+
logger.debug(`š [Step 3/4] Arela path propagation...`);
|
|
127
|
+
results.steps.propagateArelaPath =
|
|
128
|
+
await this.#executePropagateArelaPath();
|
|
129
|
+
|
|
130
|
+
if (results.steps.propagateArelaPath.status === 'failed') {
|
|
131
|
+
logger.debug(
|
|
132
|
+
`Arela path propagation failed: ${results.steps.propagateArelaPath.error}`,
|
|
133
|
+
);
|
|
134
|
+
this.#trackError(
|
|
135
|
+
watchDir,
|
|
136
|
+
'propagateArelaPath',
|
|
137
|
+
results.steps.propagateArelaPath.error,
|
|
138
|
+
);
|
|
139
|
+
// Don't stop pipeline on this step, as it might succeed partially
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Step 4: Upload by RFC with folder structure
|
|
143
|
+
logger.debug(`š¤ [Step 4/4] RFC upload...`);
|
|
144
|
+
results.steps.uploadByRfc = await this.#executeUploadByRfc({
|
|
145
|
+
batchSize,
|
|
146
|
+
folderStructure,
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
if (results.steps.uploadByRfc.status === 'failed') {
|
|
150
|
+
logger.debug(`RFC upload failed: ${results.steps.uploadByRfc.error}`);
|
|
151
|
+
this.#trackError(
|
|
152
|
+
watchDir,
|
|
153
|
+
'uploadByRfc',
|
|
154
|
+
results.steps.uploadByRfc.error,
|
|
155
|
+
);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
results.summary = this.#generateSummary(results);
|
|
159
|
+
results.totalDuration = Date.now() - startTime;
|
|
160
|
+
|
|
161
|
+
// Solo muestra resumen en modo info
|
|
162
|
+
if (results.summary.success) {
|
|
163
|
+
logger.info(`ā
Pipeline completed: ${watchDir}`);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
return results;
|
|
167
|
+
} catch (error) {
|
|
168
|
+
logger.error(`Fatal error in pipeline: ${error.message}`);
|
|
169
|
+
this.#trackError(watchDir, 'fatal', error.message);
|
|
170
|
+
results.summary = {
|
|
171
|
+
success: false,
|
|
172
|
+
message: `Fatal error: ${error.message}`,
|
|
173
|
+
};
|
|
174
|
+
results.totalDuration = Date.now() - startTime;
|
|
175
|
+
return results;
|
|
176
|
+
} finally {
|
|
177
|
+
this.isProcessing = false;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Execute stats collection (Step 1)
|
|
183
|
+
* Scans the entire directory containing the detected file, not just the file itself
|
|
184
|
+
* @private
|
|
185
|
+
* @param {Object} options - Options for stats collection
|
|
186
|
+
* @returns {Promise<Object>} Result of stats collection
|
|
187
|
+
*/
|
|
188
|
+
async #executeStatsOnly(options = {}) {
|
|
189
|
+
let stepStartTime = Date.now();
|
|
190
|
+
try {
|
|
191
|
+
// Import DatabaseService to collect stats directly
|
|
192
|
+
// This bypasses UploadCommand which has watch mode restrictions
|
|
193
|
+
const databaseService = (await import('./DatabaseService.js')).default;
|
|
194
|
+
const FileOperations = (await import('../utils/FileOperations.js')).default;
|
|
195
|
+
const fs = (await import('fs')).default;
|
|
196
|
+
const path = (await import('path')).default;
|
|
197
|
+
|
|
198
|
+
logger.debug(`[AutoProcessingService] Executing stats collection for watch mode`);
|
|
199
|
+
|
|
200
|
+
// Get files from the watch directory
|
|
201
|
+
const { filePath, watchDir } = options;
|
|
202
|
+
|
|
203
|
+
if (!filePath || !watchDir) {
|
|
204
|
+
throw new Error('filePath and watchDir are required for stats collection');
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// Wait for file to be fully written (with retries)
|
|
208
|
+
let fileStats = null;
|
|
209
|
+
let attempts = 0;
|
|
210
|
+
const maxAttempts = 10; // 10 attempts * 100ms = 1 second maximum wait
|
|
211
|
+
const retryDelay = 100; // milliseconds
|
|
212
|
+
|
|
213
|
+
while (attempts < maxAttempts && !fileStats) {
|
|
214
|
+
try {
|
|
215
|
+
if (fs.existsSync(filePath)) {
|
|
216
|
+
fileStats = FileOperations.getFileStats(filePath);
|
|
217
|
+
} else {
|
|
218
|
+
attempts++;
|
|
219
|
+
if (attempts < maxAttempts) {
|
|
220
|
+
logger.debug(`File not yet ready (attempt ${attempts}/${maxAttempts}), waiting...`);
|
|
221
|
+
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
} catch (error) {
|
|
225
|
+
attempts++;
|
|
226
|
+
if (attempts < maxAttempts) {
|
|
227
|
+
logger.debug(`Error reading file stats (attempt ${attempts}/${maxAttempts}): ${error.message}, retrying...`);
|
|
228
|
+
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
|
229
|
+
} else {
|
|
230
|
+
throw error;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
if (!fileStats) {
|
|
236
|
+
throw new Error(`File not found after ${maxAttempts} retries: ${filePath}`);
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Get the parent directory of the detected file
|
|
240
|
+
const parentDir = path.dirname(filePath);
|
|
241
|
+
const fileObjects = [];
|
|
242
|
+
|
|
243
|
+
// First, add the detected file
|
|
244
|
+
fileObjects.push({
|
|
245
|
+
path: filePath,
|
|
246
|
+
originalName: path.basename(filePath),
|
|
247
|
+
stats: fileStats,
|
|
248
|
+
});
|
|
249
|
+
|
|
250
|
+
// Then, scan the entire parent directory for related files
|
|
251
|
+
logger.debug(`š Scanning directory for related files: ${parentDir}`);
|
|
252
|
+
const relatedFiles = FileOperations.listFilesInDirectory(parentDir, {
|
|
253
|
+
excludePattern: /(^|[\/\\])\.|node_modules|\.git/,
|
|
254
|
+
onlyPdf: false,
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
// Add all related files (except the one already detected)
|
|
258
|
+
for (const relatedFilePath of relatedFiles) {
|
|
259
|
+
// Skip the file already detected and .DS_Store
|
|
260
|
+
if (relatedFilePath === filePath || relatedFilePath.endsWith('.DS_Store')) {
|
|
261
|
+
continue;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
try {
|
|
265
|
+
const relatedStats = FileOperations.getFileStats(relatedFilePath);
|
|
266
|
+
if (relatedStats) {
|
|
267
|
+
fileObjects.push({
|
|
268
|
+
path: relatedFilePath,
|
|
269
|
+
originalName: path.basename(relatedFilePath),
|
|
270
|
+
stats: relatedStats,
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
} catch (error) {
|
|
274
|
+
logger.debug(`Could not read stats for related file: ${relatedFilePath}`);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
logger.debug(`š Found ${fileObjects.length} file(s) to process (1 detected + ${fileObjects.length - 1} related)`);
|
|
279
|
+
|
|
280
|
+
// Insert into database directly
|
|
281
|
+
const result = await databaseService.insertStatsOnlyToUploaderTable(
|
|
282
|
+
fileObjects,
|
|
283
|
+
{
|
|
284
|
+
quietMode: true, // Suppress verbose logging when in auto-pipeline
|
|
285
|
+
},
|
|
286
|
+
);
|
|
287
|
+
|
|
288
|
+
const duration = Date.now() - stepStartTime;
|
|
289
|
+
logger.info(`ā
Stats collection completed in ${duration}ms`);
|
|
290
|
+
|
|
291
|
+
return {
|
|
292
|
+
status: 'success',
|
|
293
|
+
duration,
|
|
294
|
+
result: {
|
|
295
|
+
totalInserted: result.totalInserted,
|
|
296
|
+
totalSkipped: result.totalSkipped,
|
|
297
|
+
},
|
|
298
|
+
error: null,
|
|
299
|
+
};
|
|
300
|
+
} catch (error) {
|
|
301
|
+
logger.error(
|
|
302
|
+
`[AutoProcessingService] Stats collection error: ${error.message}`,
|
|
303
|
+
);
|
|
304
|
+
return {
|
|
305
|
+
status: 'failed',
|
|
306
|
+
duration: Date.now() - stepStartTime,
|
|
307
|
+
error: error.message,
|
|
308
|
+
result: null,
|
|
309
|
+
};
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
/**
|
|
314
|
+
* Execute PDF detection (Step 2)
|
|
315
|
+
* @private
|
|
316
|
+
* @param {Object} options - Options for PDF detection
|
|
317
|
+
* @returns {Promise<Object>} Result of PDF detection
|
|
318
|
+
*/
|
|
319
|
+
async #executeDetectPdfs(options = {}) {
|
|
320
|
+
let stepStartTime = Date.now();
|
|
321
|
+
try {
|
|
322
|
+
// Import databaseService singleton instance
|
|
323
|
+
const databaseService = (await import('./DatabaseService.js')).default;
|
|
324
|
+
|
|
325
|
+
logger.debug(
|
|
326
|
+
`[AutoProcessingService] Executing PDF detection with batch size: ${options.batchSize}`,
|
|
327
|
+
);
|
|
328
|
+
|
|
329
|
+
const result = await databaseService.detectPedimentosInDatabase({
|
|
330
|
+
batchSize: options.batchSize || 10,
|
|
331
|
+
});
|
|
332
|
+
|
|
333
|
+
const duration = Date.now() - stepStartTime;
|
|
334
|
+
logger.info(`ā
PDF detection completed in ${duration}ms`);
|
|
335
|
+
logger.info(
|
|
336
|
+
` š Results: ${result.detectedCount} detected, ${result.processedCount} processed`,
|
|
337
|
+
);
|
|
338
|
+
|
|
339
|
+
return {
|
|
340
|
+
status: 'success',
|
|
341
|
+
duration,
|
|
342
|
+
result,
|
|
343
|
+
error: null,
|
|
344
|
+
};
|
|
345
|
+
} catch (error) {
|
|
346
|
+
logger.error(
|
|
347
|
+
`[AutoProcessingService] PDF detection error: ${error.message}`,
|
|
348
|
+
);
|
|
349
|
+
return {
|
|
350
|
+
status: 'failed',
|
|
351
|
+
duration: Date.now() - stepStartTime,
|
|
352
|
+
error: error.message,
|
|
353
|
+
result: null,
|
|
354
|
+
};
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
/**
|
|
359
|
+
* Execute arela_path propagation (Step 3)
|
|
360
|
+
* @private
|
|
361
|
+
* @returns {Promise<Object>} Result of arela_path propagation
|
|
362
|
+
*/
|
|
363
|
+
async #executePropagateArelaPath() {
|
|
364
|
+
let stepStartTime = Date.now();
|
|
365
|
+
try {
|
|
366
|
+
// Import databaseService singleton instance
|
|
367
|
+
const databaseService = (await import('./DatabaseService.js')).default;
|
|
368
|
+
|
|
369
|
+
logger.debug(`[AutoProcessingService] Executing arela_path propagation`);
|
|
370
|
+
|
|
371
|
+
const result = await databaseService.propagateArelaPath({
|
|
372
|
+
showProgress: true,
|
|
373
|
+
});
|
|
374
|
+
|
|
375
|
+
const duration = Date.now() - stepStartTime;
|
|
376
|
+
logger.info(`ā
Arela path propagation completed in ${duration}ms`);
|
|
377
|
+
logger.info(
|
|
378
|
+
` š Results: ${result.processedCount} processed, ${result.updatedCount} updated`,
|
|
379
|
+
);
|
|
380
|
+
|
|
381
|
+
return {
|
|
382
|
+
status: 'success',
|
|
383
|
+
duration,
|
|
384
|
+
result,
|
|
385
|
+
error: null,
|
|
386
|
+
};
|
|
387
|
+
} catch (error) {
|
|
388
|
+
logger.error(
|
|
389
|
+
`[AutoProcessingService] Arela path propagation error: ${error.message}`,
|
|
390
|
+
);
|
|
391
|
+
return {
|
|
392
|
+
status: 'failed',
|
|
393
|
+
duration: Date.now() - stepStartTime,
|
|
394
|
+
error: error.message,
|
|
395
|
+
result: null,
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
/**
|
|
401
|
+
* Execute RFC-based upload with folder structure (Step 4)
|
|
402
|
+
* @private
|
|
403
|
+
* @param {Object} options - Options for RFC upload
|
|
404
|
+
* @param {string} options.folderStructure - Folder structure for organization
|
|
405
|
+
* @param {number} options.batchSize - Batch size for upload
|
|
406
|
+
* @returns {Promise<Object>} Result of RFC upload
|
|
407
|
+
*/
|
|
408
|
+
async #executeUploadByRfc(options = {}) {
|
|
409
|
+
let stepStartTime = Date.now();
|
|
410
|
+
try {
|
|
411
|
+
// Import databaseService singleton instance
|
|
412
|
+
const databaseService = (await import('./DatabaseService.js')).default;
|
|
413
|
+
|
|
414
|
+
logger.debug(
|
|
415
|
+
`[AutoProcessingService] Executing RFC-based upload with options:`,
|
|
416
|
+
{
|
|
417
|
+
folderStructure: options.folderStructure,
|
|
418
|
+
batchSize: options.batchSize,
|
|
419
|
+
},
|
|
420
|
+
);
|
|
421
|
+
|
|
422
|
+
const result = await databaseService.uploadFilesByRfc({
|
|
423
|
+
batchSize: options.batchSize || 10,
|
|
424
|
+
showProgress: true,
|
|
425
|
+
folderStructure: options.folderStructure,
|
|
426
|
+
});
|
|
427
|
+
|
|
428
|
+
const duration = Date.now() - stepStartTime;
|
|
429
|
+
logger.info(`ā
RFC upload completed in ${duration}ms`);
|
|
430
|
+
logger.info(
|
|
431
|
+
` š Results: ${result.processedCount} processed, ${result.uploadedCount} uploaded`,
|
|
432
|
+
);
|
|
433
|
+
|
|
434
|
+
return {
|
|
435
|
+
status: 'success',
|
|
436
|
+
duration,
|
|
437
|
+
result,
|
|
438
|
+
error: null,
|
|
439
|
+
};
|
|
440
|
+
} catch (error) {
|
|
441
|
+
logger.error(
|
|
442
|
+
`[AutoProcessingService] RFC upload error: ${error.message}`,
|
|
443
|
+
);
|
|
444
|
+
return {
|
|
445
|
+
status: 'failed',
|
|
446
|
+
duration: Date.now() - stepStartTime,
|
|
447
|
+
error: error.message,
|
|
448
|
+
result: null,
|
|
449
|
+
};
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
/**
|
|
454
|
+
* Generate a unique pipeline ID for tracking
|
|
455
|
+
* @private
|
|
456
|
+
* @returns {string} Pipeline ID
|
|
457
|
+
*/
|
|
458
|
+
#generatePipelineId() {
|
|
459
|
+
return `pipeline-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
/**
|
|
463
|
+
* Generate processing summary
|
|
464
|
+
* @private
|
|
465
|
+
* @param {Object} results - Processing results
|
|
466
|
+
* @returns {Object} Summary object
|
|
467
|
+
*/
|
|
468
|
+
#generateSummary(results) {
|
|
469
|
+
const steps = Object.entries(results.steps);
|
|
470
|
+
const successful = steps.filter(
|
|
471
|
+
([_, step]) => step.status === 'success',
|
|
472
|
+
).length;
|
|
473
|
+
const failed = steps.filter(([_, step]) => step.status === 'failed').length;
|
|
474
|
+
|
|
475
|
+
let message = '';
|
|
476
|
+
if (failed === 0) {
|
|
477
|
+
message = `ā
All 4 steps completed successfully!`;
|
|
478
|
+
} else if (successful > 0) {
|
|
479
|
+
message = `ā ļø Completed with issues: ${successful}/4 steps successful, ${failed} failed`;
|
|
480
|
+
} else {
|
|
481
|
+
message = `ā Processing failed at step 1`;
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
return {
|
|
485
|
+
success: failed === 0,
|
|
486
|
+
totalSteps: steps.length,
|
|
487
|
+
successfulSteps: successful,
|
|
488
|
+
failedSteps: failed,
|
|
489
|
+
message,
|
|
490
|
+
details: steps.reduce((acc, [stepName, stepResult]) => {
|
|
491
|
+
acc[stepName] = {
|
|
492
|
+
status: stepResult.status,
|
|
493
|
+
duration: stepResult.duration,
|
|
494
|
+
error: stepResult.error,
|
|
495
|
+
};
|
|
496
|
+
return acc;
|
|
497
|
+
}, {}),
|
|
498
|
+
};
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
/**
|
|
502
|
+
* Get processing statistics
|
|
503
|
+
* @returns {Object} Processing statistics
|
|
504
|
+
*/
|
|
505
|
+
getStats() {
|
|
506
|
+
return {
|
|
507
|
+
...this.processingStats,
|
|
508
|
+
isProcessing: this.isProcessing,
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
/**
|
|
513
|
+
* Reset processing statistics
|
|
514
|
+
* @returns {void}
|
|
515
|
+
*/
|
|
516
|
+
resetStats() {
|
|
517
|
+
this.processingStats = {
|
|
518
|
+
totalProcessed: 0,
|
|
519
|
+
successfulSteps: 0,
|
|
520
|
+
failedSteps: 0,
|
|
521
|
+
errors: [],
|
|
522
|
+
};
|
|
523
|
+
this.errorTracker.clear();
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
/**
|
|
527
|
+
* Track errors by directory for summary reporting
|
|
528
|
+
* @private
|
|
529
|
+
* @param {string} watchDir - Watch directory
|
|
530
|
+
* @param {string} step - Failed step
|
|
531
|
+
* @param {string} error - Error message
|
|
532
|
+
*/
|
|
533
|
+
#trackError(watchDir, step, error) {
|
|
534
|
+
if (!this.errorTracker.has(watchDir)) {
|
|
535
|
+
this.errorTracker.set(watchDir, {
|
|
536
|
+
count: 0,
|
|
537
|
+
steps: new Map(),
|
|
538
|
+
lastError: null,
|
|
539
|
+
});
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
const dirErrors = this.errorTracker.get(watchDir);
|
|
543
|
+
dirErrors.count++;
|
|
544
|
+
dirErrors.lastError = error;
|
|
545
|
+
|
|
546
|
+
if (!dirErrors.steps.has(step)) {
|
|
547
|
+
dirErrors.steps.set(step, 0);
|
|
548
|
+
}
|
|
549
|
+
dirErrors.steps.set(step, dirErrors.steps.get(step) + 1);
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
/**
|
|
553
|
+
* Get error summary by directory
|
|
554
|
+
* @returns {Object} Errors grouped by directory
|
|
555
|
+
*/
|
|
556
|
+
getErrorSummary() {
|
|
557
|
+
const summary = {};
|
|
558
|
+
|
|
559
|
+
for (const [dir, errors] of this.errorTracker.entries()) {
|
|
560
|
+
const stepDetails = {};
|
|
561
|
+
for (const [step, count] of errors.steps.entries()) {
|
|
562
|
+
stepDetails[step] = count;
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
summary[dir] = {
|
|
566
|
+
totalErrors: errors.count,
|
|
567
|
+
steps: stepDetails,
|
|
568
|
+
lastError: errors.lastError,
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
return summary;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
/**
|
|
576
|
+
* Print error summary
|
|
577
|
+
* @returns {void}
|
|
578
|
+
*/
|
|
579
|
+
printErrorSummary() {
|
|
580
|
+
const summary = this.getErrorSummary();
|
|
581
|
+
|
|
582
|
+
if (Object.keys(summary).length === 0) {
|
|
583
|
+
logger.info('ā
No errors detected');
|
|
584
|
+
return;
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
logger.info('\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā');
|
|
588
|
+
logger.info('š ERROR SUMMARY BY DIRECTORY');
|
|
589
|
+
logger.info('āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā');
|
|
590
|
+
|
|
591
|
+
for (const [dir, errors] of Object.entries(summary)) {
|
|
592
|
+
logger.info(`\nš ${dir}`);
|
|
593
|
+
logger.info(` ā Total errors: ${errors.totalErrors}`);
|
|
594
|
+
|
|
595
|
+
for (const [step, count] of Object.entries(errors.steps)) {
|
|
596
|
+
logger.info(` ⢠${step}: ${count} failure(s)`);
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
logger.info(` Last error: ${errors.lastError}`);
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
logger.info('\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n');
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
/**
|
|
606
|
+
* Set automatic timeout to reset processing flag (fail-safe)
|
|
607
|
+
* @private
|
|
608
|
+
* @returns {void}
|
|
609
|
+
*/
|
|
610
|
+
#setProcessingTimeout() {
|
|
611
|
+
this.#clearProcessingTimeout();
|
|
612
|
+
this.processingTimeout = setTimeout(() => {
|
|
613
|
+
if (this.isProcessing) {
|
|
614
|
+
logger.warn(
|
|
615
|
+
'ā ļø Processing pipeline timeout - forcing reset after ' +
|
|
616
|
+
`${this.processingTimeoutDuration}ms`,
|
|
617
|
+
);
|
|
618
|
+
this.isProcessing = false;
|
|
619
|
+
this.lastProcessedFile = null;
|
|
620
|
+
}
|
|
621
|
+
}, this.processingTimeoutDuration);
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
/**
|
|
625
|
+
* Clear processing timeout
|
|
626
|
+
* @private
|
|
627
|
+
* @returns {void}
|
|
628
|
+
*/
|
|
629
|
+
#clearProcessingTimeout() {
|
|
630
|
+
if (this.processingTimeout) {
|
|
631
|
+
clearTimeout(this.processingTimeout);
|
|
632
|
+
this.processingTimeout = null;
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
// Export singleton instance
|
|
638
|
+
const autoProcessingService = new AutoProcessingService();
|
|
639
|
+
export default autoProcessingService;
|