@arela/uploader 0.2.4 → 0.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -1
- package/src/commands/UploadCommand.js +388 -0
- package/src/config/config.js +173 -0
- package/src/errors/ErrorHandler.js +271 -0
- package/src/errors/ErrorTypes.js +104 -0
- package/src/index-old.js +2650 -0
- package/src/index.js +248 -2594
- package/src/services/DatabaseService.js +953 -0
- package/src/services/LoggingService.js +194 -0
- package/src/services/upload/ApiUploadService.js +147 -0
- package/src/services/upload/BaseUploadService.js +36 -0
- package/src/services/upload/SupabaseUploadService.js +107 -0
- package/src/services/upload/UploadServiceFactory.js +68 -0
- package/src/utils/FileOperations.js +148 -0
- package/src/utils/FileSanitizer.js +99 -0
- package/src/utils/PathDetector.js +196 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@arela/uploader",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.5",
|
|
4
4
|
"description": "CLI to upload files/directories to Arela",
|
|
5
5
|
"bin": {
|
|
6
6
|
"arela": "./src/index.js"
|
|
@@ -33,6 +33,7 @@
|
|
|
33
33
|
"commander": "13.1.0",
|
|
34
34
|
"dotenv": "16.5.0",
|
|
35
35
|
"form-data": "4.0.4",
|
|
36
|
+
"formdata-node": "^6.0.3",
|
|
36
37
|
"globby": "14.1.0",
|
|
37
38
|
"mime-types": "3.0.1",
|
|
38
39
|
"node-fetch": "3.3.2",
|
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
import cliProgress from 'cli-progress';
|
|
2
|
+
import { globby } from 'globby';
|
|
3
|
+
import mime from 'mime-types';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
|
|
6
|
+
import appConfig from '../config/config.js';
|
|
7
|
+
import ErrorHandler from '../errors/ErrorHandler.js';
|
|
8
|
+
import { ConfigurationError, FileOperationError } from '../errors/ErrorTypes.js';
|
|
9
|
+
import databaseService from '../services/DatabaseService.js';
|
|
10
|
+
import logger from '../services/LoggingService.js';
|
|
11
|
+
import uploadServiceFactory from '../services/upload/UploadServiceFactory.js';
|
|
12
|
+
import FileOperations from '../utils/FileOperations.js';
|
|
13
|
+
import fileSanitizer from '../utils/FileSanitizer.js';
|
|
14
|
+
import pathDetector from '../utils/PathDetector.js';
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Upload Command Handler
|
|
18
|
+
* Handles the main upload functionality
|
|
19
|
+
*/
|
|
20
|
+
export class UploadCommand {
|
|
21
|
+
constructor() {
|
|
22
|
+
this.errorHandler = new ErrorHandler(logger);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Execute the upload command
|
|
27
|
+
* @param {Object} options - Command options
|
|
28
|
+
*/
|
|
29
|
+
async execute(options) {
|
|
30
|
+
try {
|
|
31
|
+
// Validate configuration
|
|
32
|
+
this.#validateOptions(options);
|
|
33
|
+
|
|
34
|
+
// Initialize services
|
|
35
|
+
const uploadService = await uploadServiceFactory.getUploadService(options.forceSupabase);
|
|
36
|
+
const sources = appConfig.getUploadSources();
|
|
37
|
+
const basePath = appConfig.getBasePath();
|
|
38
|
+
|
|
39
|
+
// Log command start
|
|
40
|
+
logger.info(`Starting upload with ${uploadService.getServiceName()}`);
|
|
41
|
+
|
|
42
|
+
if (options.clearLog) {
|
|
43
|
+
logger.clearLogFile();
|
|
44
|
+
logger.info('Log file cleared');
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Process each source
|
|
48
|
+
let globalResults = {
|
|
49
|
+
successCount: 0,
|
|
50
|
+
detectedCount: 0,
|
|
51
|
+
organizedCount: 0,
|
|
52
|
+
failureCount: 0,
|
|
53
|
+
skippedCount: 0,
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
for (const source of sources) {
|
|
57
|
+
const sourcePath = path.resolve(basePath, source).replace(/\\/g, '/');
|
|
58
|
+
logger.info(`Processing folder: ${sourcePath}`);
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
const files = await this.#discoverFiles(sourcePath);
|
|
62
|
+
logger.info(`Found ${files.length} files to process`);
|
|
63
|
+
|
|
64
|
+
const result = await this.#processFilesInBatches(
|
|
65
|
+
files,
|
|
66
|
+
options,
|
|
67
|
+
uploadService,
|
|
68
|
+
basePath,
|
|
69
|
+
source,
|
|
70
|
+
sourcePath
|
|
71
|
+
);
|
|
72
|
+
|
|
73
|
+
this.#updateGlobalResults(globalResults, result);
|
|
74
|
+
this.#logSourceSummary(source, result, options);
|
|
75
|
+
} catch (error) {
|
|
76
|
+
this.errorHandler.handleError(error, { source, sourcePath });
|
|
77
|
+
globalResults.failureCount++;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
this.#logFinalSummary(globalResults, options, uploadService);
|
|
82
|
+
|
|
83
|
+
// Handle additional phases if requested
|
|
84
|
+
if (options.runAllPhases && options.statsOnly) {
|
|
85
|
+
await this.#runAdditionalPhases(options);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
} catch (error) {
|
|
89
|
+
this.errorHandler.handleFatalError(error, { command: 'upload', options });
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Validate command options
|
|
95
|
+
* @private
|
|
96
|
+
* @param {Object} options - Options to validate
|
|
97
|
+
*/
|
|
98
|
+
#validateOptions(options) {
|
|
99
|
+
try {
|
|
100
|
+
appConfig.validateConfiguration(options.forceSupabase);
|
|
101
|
+
} catch (error) {
|
|
102
|
+
throw new ConfigurationError(error.message);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (options.batchSize && (options.batchSize < 1 || options.batchSize > 100)) {
|
|
106
|
+
throw new ConfigurationError('Batch size must be between 1 and 100');
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Discover files in a source path
|
|
112
|
+
* @private
|
|
113
|
+
* @param {string} sourcePath - Path to discover files in
|
|
114
|
+
* @returns {Promise<string[]>} Array of file paths
|
|
115
|
+
*/
|
|
116
|
+
async #discoverFiles(sourcePath) {
|
|
117
|
+
try {
|
|
118
|
+
if (!FileOperations.fileExists(sourcePath)) {
|
|
119
|
+
throw new FileOperationError(`Source path does not exist: ${sourcePath}`);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const stats = FileOperations.getFileStats(sourcePath);
|
|
123
|
+
|
|
124
|
+
if (stats?.isDirectory()) {
|
|
125
|
+
return await globby([`${sourcePath}/**/*`], { onlyFiles: true });
|
|
126
|
+
} else {
|
|
127
|
+
return [sourcePath];
|
|
128
|
+
}
|
|
129
|
+
} catch (error) {
|
|
130
|
+
throw new FileOperationError(`Failed to discover files in ${sourcePath}`, sourcePath, { originalError: error.message });
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Process files in batches
|
|
136
|
+
* @private
|
|
137
|
+
* @param {string[]} files - Files to process
|
|
138
|
+
* @param {Object} options - Processing options
|
|
139
|
+
* @param {Object} uploadService - Upload service instance
|
|
140
|
+
* @param {string} basePath - Base path
|
|
141
|
+
* @param {string} source - Source name
|
|
142
|
+
* @param {string} sourcePath - Source path
|
|
143
|
+
* @returns {Promise<Object>} Processing results
|
|
144
|
+
*/
|
|
145
|
+
async #processFilesInBatches(files, options, uploadService, basePath, source, sourcePath) {
|
|
146
|
+
const batchSize = parseInt(options.batchSize) || 10;
|
|
147
|
+
const results = {
|
|
148
|
+
successCount: 0,
|
|
149
|
+
detectedCount: 0,
|
|
150
|
+
organizedCount: 0,
|
|
151
|
+
failureCount: 0,
|
|
152
|
+
skippedCount: 0,
|
|
153
|
+
};
|
|
154
|
+
|
|
155
|
+
// Get processed paths if available
|
|
156
|
+
const processedPaths = options.skipProcessed ? databaseService.getProcessedPaths() : new Set();
|
|
157
|
+
|
|
158
|
+
// Create progress bar
|
|
159
|
+
const progressBar = new cliProgress.SingleBar({
|
|
160
|
+
format: `📤 ${source} |{bar}| {percentage}% | {value}/{total} | Success: {success} | Errors: {errors}`,
|
|
161
|
+
barCompleteChar: '█',
|
|
162
|
+
barIncompleteChar: '░',
|
|
163
|
+
hideCursor: true,
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
progressBar.start(files.length, 0, { success: 0, errors: 0 });
|
|
167
|
+
|
|
168
|
+
// Process files in batches
|
|
169
|
+
for (let i = 0; i < files.length; i += batchSize) {
|
|
170
|
+
const batch = files.slice(i, i + batchSize);
|
|
171
|
+
|
|
172
|
+
try {
|
|
173
|
+
const batchResult = await this.#processBatch(
|
|
174
|
+
batch,
|
|
175
|
+
options,
|
|
176
|
+
uploadService,
|
|
177
|
+
basePath,
|
|
178
|
+
processedPaths
|
|
179
|
+
);
|
|
180
|
+
|
|
181
|
+
this.#updateResults(results, batchResult);
|
|
182
|
+
|
|
183
|
+
progressBar.update(Math.min(i + batchSize, files.length), {
|
|
184
|
+
success: results.successCount,
|
|
185
|
+
errors: results.failureCount
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
// Delay between batches if configured
|
|
189
|
+
if (appConfig.performance.batchDelay > 0) {
|
|
190
|
+
await new Promise(resolve => setTimeout(resolve, appConfig.performance.batchDelay));
|
|
191
|
+
}
|
|
192
|
+
} catch (error) {
|
|
193
|
+
this.errorHandler.handleError(error, { batch: i / batchSize + 1, batchSize });
|
|
194
|
+
results.failureCount += batch.length;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
progressBar.stop();
|
|
199
|
+
return results;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Process a batch of files
|
|
204
|
+
* @private
|
|
205
|
+
* @param {string[]} batch - Files in this batch
|
|
206
|
+
* @param {Object} options - Processing options
|
|
207
|
+
* @param {Object} uploadService - Upload service
|
|
208
|
+
* @param {string} basePath - Base path
|
|
209
|
+
* @param {Set} processedPaths - Already processed paths
|
|
210
|
+
* @returns {Promise<Object>} Batch results
|
|
211
|
+
*/
|
|
212
|
+
async #processBatch(batch, options, uploadService, basePath, processedPaths) {
|
|
213
|
+
const batchResults = {
|
|
214
|
+
successCount: 0,
|
|
215
|
+
detectedCount: 0,
|
|
216
|
+
organizedCount: 0,
|
|
217
|
+
failureCount: 0,
|
|
218
|
+
skippedCount: 0,
|
|
219
|
+
};
|
|
220
|
+
|
|
221
|
+
if (options.statsOnly) {
|
|
222
|
+
// Stats-only mode: just record file information
|
|
223
|
+
const fileObjects = batch.map(filePath => ({
|
|
224
|
+
path: filePath,
|
|
225
|
+
originalName: path.basename(filePath),
|
|
226
|
+
stats: FileOperations.getFileStats(filePath),
|
|
227
|
+
}));
|
|
228
|
+
|
|
229
|
+
try {
|
|
230
|
+
const result = await databaseService.insertStatsOnlyToUploaderTable(fileObjects, options);
|
|
231
|
+
batchResults.successCount = result.totalInserted;
|
|
232
|
+
batchResults.skippedCount = result.totalSkipped;
|
|
233
|
+
} catch (error) {
|
|
234
|
+
throw new Error(`Failed to insert stats: ${error.message}`);
|
|
235
|
+
}
|
|
236
|
+
} else {
|
|
237
|
+
// Upload mode: process files for upload
|
|
238
|
+
for (const filePath of batch) {
|
|
239
|
+
try {
|
|
240
|
+
await this.#processFile(filePath, options, uploadService, basePath, processedPaths, batchResults);
|
|
241
|
+
} catch (error) {
|
|
242
|
+
this.errorHandler.handleError(error, { filePath });
|
|
243
|
+
batchResults.failureCount++;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
return batchResults;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
/**
|
|
252
|
+
* Process a single file
|
|
253
|
+
* @private
|
|
254
|
+
*/
|
|
255
|
+
async #processFile(filePath, options, uploadService, basePath, processedPaths, batchResults) {
|
|
256
|
+
// Skip if already processed
|
|
257
|
+
if (processedPaths.has(filePath)) {
|
|
258
|
+
batchResults.skippedCount++;
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Prepare file for upload
|
|
263
|
+
const sanitizedName = fileSanitizer.sanitizeFileName(path.basename(filePath));
|
|
264
|
+
const pathInfo = pathDetector.extractYearAndPedimentoFromPath(filePath, basePath);
|
|
265
|
+
|
|
266
|
+
let uploadPath = sanitizedName;
|
|
267
|
+
if (pathInfo.detected && options.autoDetectStructure) {
|
|
268
|
+
uploadPath = `${pathInfo.year}/${pathInfo.pedimento}/${sanitizedName}`;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
const fileObject = {
|
|
272
|
+
path: filePath,
|
|
273
|
+
name: sanitizedName,
|
|
274
|
+
contentType: this.#getMimeType(filePath),
|
|
275
|
+
};
|
|
276
|
+
|
|
277
|
+
// Upload based on service type
|
|
278
|
+
if (uploadService.getServiceName() === 'Arela API') {
|
|
279
|
+
const result = await uploadService.upload([fileObject], {
|
|
280
|
+
...options,
|
|
281
|
+
uploadPath,
|
|
282
|
+
});
|
|
283
|
+
|
|
284
|
+
batchResults.successCount++;
|
|
285
|
+
if (result.detectedCount) batchResults.detectedCount += result.detectedCount;
|
|
286
|
+
if (result.organizedCount) batchResults.organizedCount += result.organizedCount;
|
|
287
|
+
} else {
|
|
288
|
+
// Supabase direct upload
|
|
289
|
+
await uploadService.upload([fileObject], { uploadPath });
|
|
290
|
+
batchResults.successCount++;
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
logger.info(`SUCCESS: ${path.basename(filePath)} -> ${uploadPath}`);
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
/**
|
|
297
|
+
* Get MIME type for file
|
|
298
|
+
* @private
|
|
299
|
+
*/
|
|
300
|
+
#getMimeType(filePath) {
|
|
301
|
+
return mime.lookup(filePath) || 'application/octet-stream';
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
/**
|
|
305
|
+
* Update results object
|
|
306
|
+
* @private
|
|
307
|
+
*/
|
|
308
|
+
#updateResults(target, source) {
|
|
309
|
+
target.successCount += source.successCount;
|
|
310
|
+
target.detectedCount += source.detectedCount;
|
|
311
|
+
target.organizedCount += source.organizedCount;
|
|
312
|
+
target.failureCount += source.failureCount;
|
|
313
|
+
target.skippedCount += source.skippedCount;
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
/**
|
|
317
|
+
* Update global results
|
|
318
|
+
* @private
|
|
319
|
+
*/
|
|
320
|
+
#updateGlobalResults(global, source) {
|
|
321
|
+
this.#updateResults(global, source);
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
/**
|
|
325
|
+
* Log source summary
|
|
326
|
+
* @private
|
|
327
|
+
*/
|
|
328
|
+
#logSourceSummary(source, result, options) {
|
|
329
|
+
console.log(`\n📦 Summary for ${source}:`);
|
|
330
|
+
if (options.statsOnly) {
|
|
331
|
+
console.log(` 📊 Stats recorded: ${result.successCount}`);
|
|
332
|
+
console.log(` ⏭️ Duplicates: ${result.skippedCount}`);
|
|
333
|
+
} else {
|
|
334
|
+
console.log(` ✅ Uploaded: ${result.successCount}`);
|
|
335
|
+
if (result.detectedCount) console.log(` 🔍 Detected: ${result.detectedCount}`);
|
|
336
|
+
if (result.organizedCount) console.log(` 📁 Organized: ${result.organizedCount}`);
|
|
337
|
+
console.log(` ⏭️ Skipped: ${result.skippedCount}`);
|
|
338
|
+
}
|
|
339
|
+
console.log(` ❌ Errors: ${result.failureCount}`);
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
/**
|
|
343
|
+
* Log final summary
|
|
344
|
+
* @private
|
|
345
|
+
*/
|
|
346
|
+
#logFinalSummary(results, options, uploadService) {
|
|
347
|
+
console.log(`\n${'='.repeat(60)}`);
|
|
348
|
+
if (options.statsOnly) {
|
|
349
|
+
console.log(`📊 STATS COLLECTION COMPLETED`);
|
|
350
|
+
console.log(` 📊 Total stats recorded: ${results.successCount}`);
|
|
351
|
+
console.log(` ⏭️ Total duplicates: ${results.skippedCount}`);
|
|
352
|
+
} else {
|
|
353
|
+
console.log(`🎯 ${uploadService.getServiceName().toUpperCase()} UPLOAD COMPLETED`);
|
|
354
|
+
console.log(` ✅ Total uploaded: ${results.successCount}`);
|
|
355
|
+
if (results.detectedCount) console.log(` 🔍 Total detected: ${results.detectedCount}`);
|
|
356
|
+
if (results.organizedCount) console.log(` 📁 Total organized: ${results.organizedCount}`);
|
|
357
|
+
console.log(` ⏭️ Total skipped: ${results.skippedCount}`);
|
|
358
|
+
}
|
|
359
|
+
console.log(` ❌ Total errors: ${results.failureCount}`);
|
|
360
|
+
console.log(` 📜 Log file: ${logger.getLogFilePath()}`);
|
|
361
|
+
console.log(`${'='.repeat(60)}\n`);
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
/**
|
|
365
|
+
* Run additional phases
|
|
366
|
+
* @private
|
|
367
|
+
*/
|
|
368
|
+
async #runAdditionalPhases(options) {
|
|
369
|
+
try {
|
|
370
|
+
// Phase 2: PDF Detection
|
|
371
|
+
console.log('\n🔍 === PHASE 2: PDF Detection ===');
|
|
372
|
+
const detectionResult = await databaseService.detectPedimentosInDatabase({
|
|
373
|
+
batchSize: parseInt(options.batchSize) || 10,
|
|
374
|
+
});
|
|
375
|
+
console.log(
|
|
376
|
+
`✅ Phase 2 Complete: ${detectionResult.detectedCount} detected, ${detectionResult.errorCount} errors`,
|
|
377
|
+
);
|
|
378
|
+
|
|
379
|
+
// Additional phases would be implemented here
|
|
380
|
+
console.log('\n🎉 All phases completed successfully!');
|
|
381
|
+
} catch (error) {
|
|
382
|
+
this.errorHandler.handleError(error, { phase: 'additional-phases' });
|
|
383
|
+
throw error;
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
export default UploadCommand;
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import { config } from 'dotenv';
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
|
|
5
|
+
config();
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Configuration management for Arela Uploader
|
|
9
|
+
* Centralizes all environment variable handling and validation
|
|
10
|
+
*/
|
|
11
|
+
class Config {
|
|
12
|
+
constructor() {
|
|
13
|
+
this.packageVersion = this.#loadPackageVersion();
|
|
14
|
+
this.supabase = this.#loadSupabaseConfig();
|
|
15
|
+
this.api = this.#loadApiConfig();
|
|
16
|
+
this.upload = this.#loadUploadConfig();
|
|
17
|
+
this.performance = this.#loadPerformanceConfig();
|
|
18
|
+
this.logging = this.#loadLoggingConfig();
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Load package version from package.json
|
|
23
|
+
* @private
|
|
24
|
+
*/
|
|
25
|
+
#loadPackageVersion() {
|
|
26
|
+
try {
|
|
27
|
+
const __filename = new URL(import.meta.url).pathname;
|
|
28
|
+
const __dirname = path.dirname(__filename);
|
|
29
|
+
const packageJsonPath = path.resolve(__dirname, '../../package.json');
|
|
30
|
+
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
|
|
31
|
+
return packageJson.version || '1.0.0';
|
|
32
|
+
} catch (error) {
|
|
33
|
+
console.warn('⚠️ Could not read package.json version, using fallback');
|
|
34
|
+
return '0.2.4';
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Load Supabase configuration
|
|
40
|
+
* @private
|
|
41
|
+
*/
|
|
42
|
+
#loadSupabaseConfig() {
|
|
43
|
+
return {
|
|
44
|
+
url: process.env.SUPABASE_URL,
|
|
45
|
+
key: process.env.SUPABASE_KEY,
|
|
46
|
+
bucket: process.env.SUPABASE_BUCKET,
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Load API configuration
|
|
52
|
+
* @private
|
|
53
|
+
*/
|
|
54
|
+
#loadApiConfig() {
|
|
55
|
+
return {
|
|
56
|
+
baseUrl: process.env.ARELA_API_URL,
|
|
57
|
+
token: process.env.ARELA_API_TOKEN,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Load upload configuration
|
|
63
|
+
* @private
|
|
64
|
+
*/
|
|
65
|
+
#loadUploadConfig() {
|
|
66
|
+
const basePath = process.env.UPLOAD_BASE_PATH;
|
|
67
|
+
const sources = process.env.UPLOAD_SOURCES?.split('|')
|
|
68
|
+
.map((s) => s.trim())
|
|
69
|
+
.filter(Boolean);
|
|
70
|
+
|
|
71
|
+
const uploadRfcs = process.env.UPLOAD_RFCS?.split('|')
|
|
72
|
+
.map((s) => s.trim())
|
|
73
|
+
.filter(Boolean);
|
|
74
|
+
|
|
75
|
+
return {
|
|
76
|
+
basePath,
|
|
77
|
+
sources,
|
|
78
|
+
rfcs: uploadRfcs,
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Load performance configuration
|
|
84
|
+
* @private
|
|
85
|
+
*/
|
|
86
|
+
#loadPerformanceConfig() {
|
|
87
|
+
return {
|
|
88
|
+
batchDelay: parseInt(process.env.BATCH_DELAY) || 100,
|
|
89
|
+
progressUpdateInterval: parseInt(process.env.PROGRESS_UPDATE_INTERVAL) || 10,
|
|
90
|
+
logBufferSize: 100,
|
|
91
|
+
logFlushInterval: 5000,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Load logging configuration
|
|
97
|
+
* @private
|
|
98
|
+
*/
|
|
99
|
+
#loadLoggingConfig() {
|
|
100
|
+
return {
|
|
101
|
+
verbose: process.env.VERBOSE_LOGGING === 'true',
|
|
102
|
+
logFilePath: path.resolve(process.cwd(), 'arela-upload.log'),
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Check if API mode is available
|
|
108
|
+
* @returns {boolean}
|
|
109
|
+
*/
|
|
110
|
+
isApiModeAvailable() {
|
|
111
|
+
return !!(this.api.baseUrl && this.api.token);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Check if Supabase mode is available
|
|
116
|
+
* @returns {boolean}
|
|
117
|
+
*/
|
|
118
|
+
isSupabaseModeAvailable() {
|
|
119
|
+
return !!(this.supabase.url && this.supabase.key && this.supabase.bucket);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Validate configuration for the requested mode
|
|
124
|
+
* @param {boolean} forceSupabase - Whether to force Supabase mode
|
|
125
|
+
* @throws {Error} If required configuration is missing
|
|
126
|
+
*/
|
|
127
|
+
validateConfiguration(forceSupabase = false) {
|
|
128
|
+
if (forceSupabase) {
|
|
129
|
+
if (!this.isSupabaseModeAvailable()) {
|
|
130
|
+
throw new Error(
|
|
131
|
+
'⚠️ Missing Supabase credentials. Please set SUPABASE_URL, SUPABASE_KEY, and SUPABASE_BUCKET'
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
return;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
if (!this.isApiModeAvailable() && !this.isSupabaseModeAvailable()) {
|
|
138
|
+
throw new Error(
|
|
139
|
+
'⚠️ Missing credentials. Please set either:\n' +
|
|
140
|
+
' - ARELA_API_URL and ARELA_API_TOKEN for API mode, or\n' +
|
|
141
|
+
' - SUPABASE_URL, SUPABASE_KEY, and SUPABASE_BUCKET for direct mode'
|
|
142
|
+
);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Get upload sources with validation
|
|
148
|
+
* @returns {string[]} Array of upload sources
|
|
149
|
+
* @throws {Error} If sources are not configured
|
|
150
|
+
*/
|
|
151
|
+
getUploadSources() {
|
|
152
|
+
if (!this.upload.sources || this.upload.sources.length === 0) {
|
|
153
|
+
throw new Error('⚠️ No upload sources configured. Please set UPLOAD_SOURCES environment variable.');
|
|
154
|
+
}
|
|
155
|
+
return this.upload.sources;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Get base path with validation
|
|
160
|
+
* @returns {string} Base path for uploads
|
|
161
|
+
* @throws {Error} If base path is not configured
|
|
162
|
+
*/
|
|
163
|
+
getBasePath() {
|
|
164
|
+
if (!this.upload.basePath) {
|
|
165
|
+
throw new Error('⚠️ No base path configured. Please set UPLOAD_BASE_PATH environment variable.');
|
|
166
|
+
}
|
|
167
|
+
return this.upload.basePath;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Export singleton instance
|
|
172
|
+
export const appConfig = new Config();
|
|
173
|
+
export default appConfig;
|