sfdx-hardis 6.4.3 → 6.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -3
- package/lib/commands/hardis/org/diagnose/unsecure-connected-apps.js +32 -3
- package/lib/commands/hardis/org/diagnose/unsecure-connected-apps.js.map +1 -1
- package/lib/commands/hardis/org/files/export.js +56 -4
- package/lib/commands/hardis/org/files/export.js.map +1 -1
- package/lib/common/utils/filesUtils.d.ts +34 -14
- package/lib/common/utils/filesUtils.js +260 -70
- package/lib/common/utils/filesUtils.js.map +1 -1
- package/lib/common/utils/index.js +14 -4
- package/lib/common/utils/index.js.map +1 -1
- package/lib/common/utils/limitUtils.d.ts +42 -0
- package/lib/common/utils/limitUtils.js +163 -0
- package/lib/common/utils/limitUtils.js.map +1 -0
- package/oclif.manifest.json +1016 -1009
- package/package.json +1 -1
|
@@ -5,6 +5,7 @@ import c from 'chalk';
|
|
|
5
5
|
import open from 'open';
|
|
6
6
|
import * as split from 'split';
|
|
7
7
|
import { PromisePool } from '@supercharge/promise-pool';
|
|
8
|
+
import crypto from 'crypto';
|
|
8
9
|
// Salesforce Specific and Other Specific Libraries
|
|
9
10
|
import { SfError } from '@salesforce/core';
|
|
10
11
|
import Papa from 'papaparse';
|
|
@@ -16,6 +17,7 @@ import { prompts } from './prompts.js';
|
|
|
16
17
|
import { getApiVersion, getReportDirectory } from '../../config/index.js';
|
|
17
18
|
import { WebSocketClient } from '../websocketClient.js';
|
|
18
19
|
import { FileDownloader } from './fileDownloader.js';
|
|
20
|
+
import { ApiLimitsManager } from './limitUtils.js';
|
|
19
21
|
export const filesFolderRoot = path.join('.', 'scripts', 'files');
|
|
20
22
|
export class FilesExporter {
|
|
21
23
|
filesPath;
|
|
@@ -35,7 +37,10 @@ export class FilesExporter {
|
|
|
35
37
|
bulkApiRecordsEnded = false;
|
|
36
38
|
recordChunksNumber = 0;
|
|
37
39
|
logFile;
|
|
38
|
-
|
|
40
|
+
hasExistingFiles;
|
|
41
|
+
resumeExport;
|
|
42
|
+
totalRestApiCalls = 0;
|
|
43
|
+
totalBulkApiCalls = 0;
|
|
39
44
|
totalParentRecords = 0;
|
|
40
45
|
parentRecordsWithFiles = 0;
|
|
41
46
|
recordsIgnored = 0;
|
|
@@ -44,8 +49,10 @@ export class FilesExporter {
|
|
|
44
49
|
filesIgnoredType = 0;
|
|
45
50
|
filesIgnoredExisting = 0;
|
|
46
51
|
filesIgnoredSize = 0;
|
|
47
|
-
|
|
48
|
-
|
|
52
|
+
filesValidationErrors = 0;
|
|
53
|
+
filesValidated = 0; // Count of files that went through validation (downloaded or existing)
|
|
54
|
+
// Optimized API Limits Management System
|
|
55
|
+
apiLimitsManager;
|
|
49
56
|
constructor(filesPath, conn, options, commandThis) {
|
|
50
57
|
this.filesPath = filesPath;
|
|
51
58
|
this.conn = conn;
|
|
@@ -53,10 +60,14 @@ export class FilesExporter {
|
|
|
53
60
|
this.recordsChunkSize = options?.recordsChunkSize || 1000;
|
|
54
61
|
this.parentRecordsChunkSize = 100000;
|
|
55
62
|
this.startChunkNumber = options?.startChunkNumber || 0;
|
|
63
|
+
this.resumeExport = options?.resumeExport || false;
|
|
64
|
+
this.hasExistingFiles = fs.existsSync(path.join(this.filesPath, 'export'));
|
|
56
65
|
this.commandThis = commandThis;
|
|
57
66
|
if (options.exportConfig) {
|
|
58
67
|
this.dtl = options.exportConfig;
|
|
59
68
|
}
|
|
69
|
+
// Initialize the optimized API limits manager
|
|
70
|
+
this.apiLimitsManager = new ApiLimitsManager(conn, commandThis);
|
|
60
71
|
}
|
|
61
72
|
async processExport() {
|
|
62
73
|
// Get config
|
|
@@ -68,6 +79,17 @@ export class FilesExporter {
|
|
|
68
79
|
// Make sure export folder for files is existing
|
|
69
80
|
this.exportedFilesFolder = path.join(this.filesPath, 'export');
|
|
70
81
|
await fs.ensureDir(this.exportedFilesFolder);
|
|
82
|
+
// Handle resume/restart mode
|
|
83
|
+
if (!this.resumeExport) {
|
|
84
|
+
if (this.hasExistingFiles) {
|
|
85
|
+
// Restart mode: clear the output folder
|
|
86
|
+
uxLog("action", this.commandThis, c.yellow(`Restart mode: clearing output folder ${this.exportedFilesFolder}`));
|
|
87
|
+
await fs.emptyDir(this.exportedFilesFolder);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
else {
|
|
91
|
+
uxLog("action", this.commandThis, c.cyan(`Resume mode: existing files will be validated and skipped if valid`));
|
|
92
|
+
}
|
|
71
93
|
await this.calculateApiConsumption();
|
|
72
94
|
const reportDir = await getReportDirectory();
|
|
73
95
|
const reportExportDir = path.join(reportDir, 'files-export-log');
|
|
@@ -90,7 +112,8 @@ export class FilesExporter {
|
|
|
90
112
|
let totalFiles = 0;
|
|
91
113
|
// Get parent records count to estimate batching
|
|
92
114
|
const countSoqlQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, 'SELECT COUNT() FROM');
|
|
93
|
-
this.
|
|
115
|
+
await this.waitIfApiLimitApproached('REST');
|
|
116
|
+
this.totalRestApiCalls++;
|
|
94
117
|
const countSoqlQueryRes = await soqlQuery(countSoqlQuery, this.conn);
|
|
95
118
|
const totalParentRecords = countSoqlQueryRes.totalSize;
|
|
96
119
|
// Count Attachments - use COUNT() query with IN clause batching for memory efficiency
|
|
@@ -100,12 +123,14 @@ export class FilesExporter {
|
|
|
100
123
|
if (sampleSize > 0) {
|
|
101
124
|
// Get sample of parent IDs
|
|
102
125
|
const sampleQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, 'SELECT Id FROM') + ` LIMIT ${sampleSize}`;
|
|
103
|
-
this.
|
|
126
|
+
await this.waitIfApiLimitApproached('REST');
|
|
127
|
+
this.totalRestApiCalls++;
|
|
104
128
|
const sampleParents = await soqlQuery(sampleQuery, this.conn);
|
|
105
129
|
if (sampleParents.records.length > 0) {
|
|
106
130
|
const sampleParentIds = sampleParents.records.map((record) => `'${record.Id}'`).join(',');
|
|
107
131
|
const attachmentCountQuery = `SELECT COUNT() FROM Attachment WHERE ParentId IN (${sampleParentIds})`;
|
|
108
|
-
this.
|
|
132
|
+
await this.waitIfApiLimitApproached('REST');
|
|
133
|
+
this.totalRestApiCalls++;
|
|
109
134
|
const attachmentCountRes = await soqlQuery(attachmentCountQuery, this.conn);
|
|
110
135
|
// Extrapolate from sample
|
|
111
136
|
const avgAttachmentsPerRecord = attachmentCountRes.totalSize / sampleParents.records.length;
|
|
@@ -120,7 +145,7 @@ export class FilesExporter {
|
|
|
120
145
|
const sampleParentIds = sampleParents.records.map((record) => `'${record.Id}'`).join(',');
|
|
121
146
|
// Count ContentDocumentLinks for sample
|
|
122
147
|
const linkCountQuery = `SELECT COUNT() FROM ContentDocumentLink WHERE LinkedEntityId IN (${sampleParentIds})`;
|
|
123
|
-
this.
|
|
148
|
+
this.totalRestApiCalls++;
|
|
124
149
|
const linkCountRes = await soqlQuery(linkCountQuery, this.conn);
|
|
125
150
|
// Extrapolate from sample (ContentVersions ≈ ContentDocumentLinks for latest versions)
|
|
126
151
|
const avgContentVersionsPerRecord = linkCountRes.totalSize / sampleParents.records.length;
|
|
@@ -156,7 +181,12 @@ export class FilesExporter {
|
|
|
156
181
|
// All chunks processed, use actual total
|
|
157
182
|
actualTotalFiles = totalFilesDiscovered;
|
|
158
183
|
}
|
|
159
|
-
|
|
184
|
+
// Get API usage for display (non-blocking)
|
|
185
|
+
this.getApiUsageStatus().then(apiUsage => {
|
|
186
|
+
uxLog("other", this, c.grey(`Discovered ${filesDiscoveredInChunk} files in chunk, updated total estimate to ${actualTotalFiles} ${apiUsage.message}`));
|
|
187
|
+
}).catch(() => {
|
|
188
|
+
uxLog("other", this, c.grey(`Discovered ${filesDiscoveredInChunk} files in chunk, updated total estimate to ${actualTotalFiles}`));
|
|
189
|
+
});
|
|
160
190
|
}
|
|
161
191
|
WebSocketClient.sendProgressStepMessage(filesProcessed, actualTotalFiles);
|
|
162
192
|
};
|
|
@@ -167,24 +197,36 @@ export class FilesExporter {
|
|
|
167
197
|
// End progress tracking with final total
|
|
168
198
|
WebSocketClient.sendProgressEndMessage(actualTotalFiles);
|
|
169
199
|
}
|
|
170
|
-
// Calculate API consumption
|
|
200
|
+
// Calculate API consumption and validate limits - optimized with new ApiLimitsManager
|
|
171
201
|
async calculateApiConsumption() {
|
|
202
|
+
// Initialize the API limits manager
|
|
203
|
+
await this.apiLimitsManager.initialize();
|
|
172
204
|
const countSoqlQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, 'SELECT COUNT() FROM');
|
|
173
|
-
this.
|
|
205
|
+
await this.apiLimitsManager.trackApiCall('REST');
|
|
206
|
+
this.totalRestApiCalls++;
|
|
174
207
|
const countSoqlQueryRes = await soqlQuery(countSoqlQuery, this.conn);
|
|
175
208
|
this.chunksNumber = Math.round(countSoqlQueryRes.totalSize / this.recordsChunkSize);
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
//
|
|
182
|
-
|
|
183
|
-
|
|
209
|
+
// Get current usage for API consumption estimation
|
|
210
|
+
const currentUsage = this.apiLimitsManager.getCurrentUsage();
|
|
211
|
+
// More accurate API consumption estimation:
|
|
212
|
+
// - 1 Bulk API v2 call for main parent records query
|
|
213
|
+
// - Multiple REST API calls for Attachment queries (batches of 200)
|
|
214
|
+
// - Multiple Bulk API v2 calls for ContentDocumentLink and ContentVersion queries
|
|
215
|
+
const estimatedRestApiCalls = Math.round(this.chunksNumber * (countSoqlQueryRes.totalSize / 200)) + 5; // Attachment batches + counting queries
|
|
216
|
+
const estimatedBulkApiCalls = Math.round(this.chunksNumber * 3) + 1; // Parent records + ContentDocumentLink + ContentVersion per chunk
|
|
217
|
+
// Check REST API limit with safety buffer
|
|
218
|
+
const restApiSafetyBuffer = 500;
|
|
219
|
+
if (currentUsage.restRemaining < estimatedRestApiCalls + restApiSafetyBuffer) {
|
|
220
|
+
throw new SfError(`You don't have enough REST API calls available (${c.bold(currentUsage.restRemaining)}) to perform this export that could consume ${c.bold(estimatedRestApiCalls)} REST API calls`);
|
|
221
|
+
}
|
|
222
|
+
// Check Bulk API v2 limit with safety buffer
|
|
223
|
+
const bulkApiSafetyBuffer = 100;
|
|
224
|
+
if (currentUsage.bulkRemaining < estimatedBulkApiCalls + bulkApiSafetyBuffer) {
|
|
225
|
+
throw new SfError(`You don't have enough Bulk API v2 calls available (${c.bold(currentUsage.bulkRemaining)}) to perform this export that could consume ${c.bold(estimatedBulkApiCalls)} Bulk API v2 calls`);
|
|
184
226
|
}
|
|
185
227
|
// Request user confirmation
|
|
186
228
|
if (!isCI) {
|
|
187
|
-
const warningMessage = c.cyanBright(`This export of files could run on ${c.bold(c.yellow(countSoqlQueryRes.totalSize))} records, in ${c.bold(c.yellow(this.chunksNumber))} chunks, and consume up to ${c.bold(c.yellow(
|
|
229
|
+
const warningMessage = c.cyanBright(`This export of files could run on ${c.bold(c.yellow(countSoqlQueryRes.totalSize))} records, in ${c.bold(c.yellow(this.chunksNumber))} chunks, and consume up to ${c.bold(c.yellow(estimatedRestApiCalls))} REST API calls (${c.bold(c.yellow(currentUsage.restRemaining))} remaining) and ${c.bold(c.yellow(estimatedBulkApiCalls))} Bulk API v2 calls (${c.bold(c.yellow(currentUsage.bulkRemaining))} remaining). Do you want to proceed ?`);
|
|
188
230
|
const promptRes = await prompts({
|
|
189
231
|
type: 'confirm',
|
|
190
232
|
message: warningMessage,
|
|
@@ -198,6 +240,14 @@ export class FilesExporter {
|
|
|
198
240
|
}
|
|
199
241
|
}
|
|
200
242
|
}
|
|
243
|
+
// Monitor API usage during operations using the optimized ApiLimitsManager
|
|
244
|
+
async waitIfApiLimitApproached(operationType) {
|
|
245
|
+
await this.apiLimitsManager.trackApiCall(operationType);
|
|
246
|
+
}
|
|
247
|
+
// Get current API usage percentages for display
|
|
248
|
+
async getApiUsageStatus() {
|
|
249
|
+
return this.apiLimitsManager.getUsageStatus();
|
|
250
|
+
}
|
|
201
251
|
// Run chunks one by one, and don't wait to have all the records fetched to start it
|
|
202
252
|
startQueue(progressCallback) {
|
|
203
253
|
this.queueInterval = setInterval(async () => {
|
|
@@ -242,7 +292,8 @@ export class FilesExporter {
|
|
|
242
292
|
}
|
|
243
293
|
async processParentRecords(progressCallback) {
|
|
244
294
|
// Query parent records using SOQL defined in export.json file
|
|
245
|
-
this.
|
|
295
|
+
await this.waitIfApiLimitApproached('BULK');
|
|
296
|
+
this.totalBulkApiCalls++;
|
|
246
297
|
this.conn.bulk.pollTimeout = this.pollTimeout || 600000; // Increase timeout in case we are on a bad internet connection or if the bulk api batch is queued
|
|
247
298
|
// Use bulkQueryByChunks to handle large queries
|
|
248
299
|
const queryRes = await bulkQueryByChunks(this.dtl.soqlQuery, this.conn, this.parentRecordsChunkSize);
|
|
@@ -283,7 +334,8 @@ export class FilesExporter {
|
|
|
283
334
|
// Request all Attachment related to all records of the batch using REST API
|
|
284
335
|
const parentIdIn = batch.map((record) => `'${record.Id}'`).join(',');
|
|
285
336
|
const attachmentQuery = `SELECT Id, Name, ContentType, ParentId, BodyLength FROM Attachment WHERE ParentId IN (${parentIdIn})`;
|
|
286
|
-
this.
|
|
337
|
+
await this.waitIfApiLimitApproached('REST');
|
|
338
|
+
this.totalRestApiCalls++;
|
|
287
339
|
const attachments = await this.conn.query(attachmentQuery);
|
|
288
340
|
actualFilesInChunk += attachments.records.length; // Count actual files discovered
|
|
289
341
|
if (attachments.records.length > 0) {
|
|
@@ -305,7 +357,7 @@ export class FilesExporter {
|
|
|
305
357
|
});
|
|
306
358
|
}
|
|
307
359
|
else {
|
|
308
|
-
uxLog("log", this, c.grey(
|
|
360
|
+
uxLog("log", this, c.grey(`No Attachments found for the ${batch.length} parent records in this batch`));
|
|
309
361
|
}
|
|
310
362
|
}
|
|
311
363
|
for (let i = 0; i < records.length; i += contentVersionBatchSize) {
|
|
@@ -313,7 +365,9 @@ export class FilesExporter {
|
|
|
313
365
|
// Request all ContentDocumentLink related to all records of the batch
|
|
314
366
|
const linkedEntityIdIn = batch.map((record) => `'${record.Id}'`).join(',');
|
|
315
367
|
const linkedEntityInQuery = `SELECT ContentDocumentId,LinkedEntityId FROM ContentDocumentLink WHERE LinkedEntityId IN (${linkedEntityIdIn})`;
|
|
316
|
-
this.
|
|
368
|
+
await this.waitIfApiLimitApproached('BULK');
|
|
369
|
+
this.totalBulkApiCalls++;
|
|
370
|
+
uxLog("log", this, c.grey(`Querying ContentDocumentLinks for ${linkedEntityInQuery.length} parent records in this batch...`));
|
|
317
371
|
const contentDocumentLinks = await bulkQueryByChunks(linkedEntityInQuery, this.conn, this.parentRecordsChunkSize);
|
|
318
372
|
if (contentDocumentLinks.records.length > 0) {
|
|
319
373
|
// Retrieve all ContentVersion related to ContentDocumentLink
|
|
@@ -324,8 +378,9 @@ export class FilesExporter {
|
|
|
324
378
|
// Log the progression of contentDocIdBatch
|
|
325
379
|
uxLog("action", this, c.cyan(`Processing ContentDocumentId chunk #${Math.ceil((j + 1) / contentVersionBatchSize)} on ${Math.ceil(contentDocIdIn.length / contentVersionBatchSize)}`));
|
|
326
380
|
// Request all ContentVersion related to all records of the batch
|
|
327
|
-
const contentVersionSoql = `SELECT Id,ContentDocumentId,Description,FileExtension,FileType,PathOnClient,Title,ContentSize FROM ContentVersion WHERE ContentDocumentId IN (${contentDocIdBatch}) AND IsLatest = true`;
|
|
328
|
-
this.
|
|
381
|
+
const contentVersionSoql = `SELECT Id,ContentDocumentId,Description,FileExtension,FileType,PathOnClient,Title,ContentSize,Checksum FROM ContentVersion WHERE ContentDocumentId IN (${contentDocIdBatch}) AND IsLatest = true`;
|
|
382
|
+
await this.waitIfApiLimitApproached('BULK');
|
|
383
|
+
this.totalBulkApiCalls++;
|
|
329
384
|
const contentVersions = await bulkQueryByChunks(contentVersionSoql, this.conn, this.parentRecordsChunkSize);
|
|
330
385
|
// ContentDocument object can be linked to multiple other objects even with same type (for example: same attachment can be linked to multiple EmailMessage objects).
|
|
331
386
|
// Because of this when we fetch ContentVersion for ContentDocument it can return less results than there is ContentDocumentLink objects to link.
|
|
@@ -375,7 +430,7 @@ export class FilesExporter {
|
|
|
375
430
|
// Initialize CSV log file with headers
|
|
376
431
|
async initializeCsvLog() {
|
|
377
432
|
await fs.ensureDir(path.dirname(this.logFile));
|
|
378
|
-
const headers = 'Status,Folder,File Name,Extension,File Size (KB),Error Detail,ContentDocument Id,ContentVersion Id,Attachment Id\n';
|
|
433
|
+
const headers = 'Status,Folder,File Name,Extension,File Size (KB),Error Detail,ContentDocument Id,ContentVersion Id,Attachment Id,Validation Status,Download URL\n';
|
|
379
434
|
await fs.writeFile(this.logFile, headers, 'utf8');
|
|
380
435
|
uxLog("log", this, c.grey(`CSV log file initialized: ${this.logFile}`));
|
|
381
436
|
WebSocketClient.sendReportFileMessage(this.logFile, "Exported files report (CSV)", 'report');
|
|
@@ -392,12 +447,63 @@ export class FilesExporter {
|
|
|
392
447
|
return { fileName, extension, folderPath };
|
|
393
448
|
}
|
|
394
449
|
// Helper method to log skipped files
|
|
395
|
-
async logSkippedFile(outputFile, errorDetail, contentDocumentId = '', contentVersionId = '', attachmentId = '') {
|
|
450
|
+
async logSkippedFile(outputFile, errorDetail, contentDocumentId = '', contentVersionId = '', attachmentId = '', downloadUrl = '') {
|
|
396
451
|
const { fileName, extension, folderPath } = this.extractFileInfo(outputFile);
|
|
397
|
-
await this.writeCsvLogEntry('skipped', folderPath, fileName, extension, 0, errorDetail, contentDocumentId, contentVersionId, attachmentId);
|
|
452
|
+
await this.writeCsvLogEntry('skipped', folderPath, fileName, extension, 0, errorDetail, contentDocumentId, contentVersionId, attachmentId, 'Skipped', downloadUrl);
|
|
453
|
+
}
|
|
454
|
+
// Helper method to calculate MD5 checksum of a file
|
|
455
|
+
async calculateMD5(filePath) {
|
|
456
|
+
const hash = crypto.createHash('md5');
|
|
457
|
+
const stream = fs.createReadStream(filePath);
|
|
458
|
+
return new Promise((resolve, reject) => {
|
|
459
|
+
stream.on('error', reject);
|
|
460
|
+
stream.on('data', chunk => hash.update(chunk));
|
|
461
|
+
stream.on('end', () => resolve(hash.digest('hex')));
|
|
462
|
+
});
|
|
463
|
+
}
|
|
464
|
+
// Helper method to validate downloaded file
|
|
465
|
+
async validateDownloadedFile(outputFile, expectedSize, expectedChecksum) {
|
|
466
|
+
try {
|
|
467
|
+
// Check if file exists
|
|
468
|
+
if (!fs.existsSync(outputFile)) {
|
|
469
|
+
return { valid: false, actualSize: 0, error: 'File does not exist' };
|
|
470
|
+
}
|
|
471
|
+
// Get actual file size
|
|
472
|
+
const stats = await fs.stat(outputFile);
|
|
473
|
+
const actualSize = stats.size;
|
|
474
|
+
// Validate file size if expected size is provided
|
|
475
|
+
if (actualSize !== expectedSize) {
|
|
476
|
+
return {
|
|
477
|
+
valid: false,
|
|
478
|
+
actualSize,
|
|
479
|
+
error: `Size mismatch: expected ${expectedSize} bytes, got ${actualSize} bytes`
|
|
480
|
+
};
|
|
481
|
+
}
|
|
482
|
+
// Validate checksum if expected checksum is provided
|
|
483
|
+
if (expectedChecksum) {
|
|
484
|
+
const actualChecksum = await this.calculateMD5(outputFile);
|
|
485
|
+
if (actualChecksum.toLowerCase() !== expectedChecksum.toLowerCase()) {
|
|
486
|
+
return {
|
|
487
|
+
valid: false,
|
|
488
|
+
actualSize,
|
|
489
|
+
actualChecksum,
|
|
490
|
+
error: `Checksum mismatch: expected ${expectedChecksum}, got ${actualChecksum}`
|
|
491
|
+
};
|
|
492
|
+
}
|
|
493
|
+
return { valid: true, actualSize, actualChecksum };
|
|
494
|
+
}
|
|
495
|
+
return { valid: true, actualSize };
|
|
496
|
+
}
|
|
497
|
+
catch (error) {
|
|
498
|
+
return {
|
|
499
|
+
valid: false,
|
|
500
|
+
actualSize: 0,
|
|
501
|
+
error: `Validation error: ${error.message}`
|
|
502
|
+
};
|
|
503
|
+
}
|
|
398
504
|
}
|
|
399
505
|
// Write a CSV entry for each file processed (fileSize in KB)
|
|
400
|
-
async writeCsvLogEntry(status, folder, fileName, extension, fileSizeKB, errorDetail = '', contentDocumentId = '', contentVersionId = '', attachmentId = '') {
|
|
506
|
+
async writeCsvLogEntry(status, folder, fileName, extension, fileSizeKB, errorDetail = '', contentDocumentId = '', contentVersionId = '', attachmentId = '', validationStatus = '', downloadUrl = '') {
|
|
401
507
|
try {
|
|
402
508
|
// Escape CSV values to handle commas, quotes, and newlines
|
|
403
509
|
const escapeCsvValue = (value) => {
|
|
@@ -416,7 +522,9 @@ export class FilesExporter {
|
|
|
416
522
|
escapeCsvValue(errorDetail),
|
|
417
523
|
escapeCsvValue(contentDocumentId),
|
|
418
524
|
escapeCsvValue(contentVersionId),
|
|
419
|
-
escapeCsvValue(attachmentId)
|
|
525
|
+
escapeCsvValue(attachmentId),
|
|
526
|
+
escapeCsvValue(validationStatus),
|
|
527
|
+
escapeCsvValue(downloadUrl)
|
|
420
528
|
].join(',') + '\n';
|
|
421
529
|
await fs.appendFile(this.logFile, csvLine, 'utf8');
|
|
422
530
|
}
|
|
@@ -424,38 +532,100 @@ export class FilesExporter {
|
|
|
424
532
|
uxLog("warning", this, c.yellow(`Error writing to CSV log: ${e.message}`));
|
|
425
533
|
}
|
|
426
534
|
}
|
|
427
|
-
async downloadFile(fetchUrl, outputFile, contentDocumentId = '', contentVersionId = '', attachmentId = '') {
|
|
535
|
+
async downloadFile(fetchUrl, outputFile, contentDocumentId = '', contentVersionId = '', attachmentId = '', expectedSize, expectedChecksum) {
|
|
536
|
+
// In resume mode, check if file already exists and is valid
|
|
537
|
+
if (this.resumeExport && fs.existsSync(outputFile)) {
|
|
538
|
+
const { fileName, extension, folderPath } = this.extractFileInfo(outputFile);
|
|
539
|
+
let fileSizeKB = 0;
|
|
540
|
+
try {
|
|
541
|
+
const stats = await fs.stat(outputFile);
|
|
542
|
+
fileSizeKB = Math.round(stats.size / 1024); // Convert bytes to KB
|
|
543
|
+
// Validate existing file (always have validation data: checksum for ContentVersion, size for Attachment)
|
|
544
|
+
const validation = await this.validateDownloadedFile(outputFile, expectedSize, expectedChecksum);
|
|
545
|
+
if (validation.valid) {
|
|
546
|
+
this.filesValidated++; // Count only valid files
|
|
547
|
+
// File exists and is valid - skip download
|
|
548
|
+
const fileDisplay = path.join(folderPath, fileName).replace(/\\/g, '/');
|
|
549
|
+
uxLog("success", this, c.grey(`Skipped (valid existing file) ${fileDisplay}`));
|
|
550
|
+
this.filesIgnoredExisting++;
|
|
551
|
+
// Write success entry to CSV log
|
|
552
|
+
await this.writeCsvLogEntry('success', folderPath, fileName, extension, fileSizeKB, 'Existing valid file', contentDocumentId, contentVersionId, attachmentId, 'Valid (existing)', fetchUrl);
|
|
553
|
+
return;
|
|
554
|
+
}
|
|
555
|
+
else {
|
|
556
|
+
// File exists but is invalid - will re-download
|
|
557
|
+
uxLog("log", this, c.yellow(`Existing file ${fileName} is invalid (${validation.error}) - re-downloading`));
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
catch (e) {
|
|
561
|
+
uxLog("warning", this, c.yellow(`Could not validate existing file ${fileName}: ${e.message}`));
|
|
562
|
+
// Continue with download if we can't validate existing file
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
// Proceed with normal download process
|
|
428
566
|
const downloadResult = await new FileDownloader(fetchUrl, { conn: this.conn, outputFile: outputFile, label: 'file' }).download();
|
|
429
567
|
// Extract file information for CSV logging
|
|
430
568
|
const { fileName, extension, folderPath } = this.extractFileInfo(outputFile);
|
|
431
569
|
let fileSizeKB = 0;
|
|
432
570
|
let errorDetail = '';
|
|
571
|
+
let validationError = ''; // Store validation error separately
|
|
572
|
+
let validationStatus = '';
|
|
573
|
+
let isValidFile = false; // Track if file is both downloaded and valid
|
|
433
574
|
// Get file size if download was successful
|
|
434
575
|
if (downloadResult.success && fs.existsSync(outputFile)) {
|
|
435
576
|
try {
|
|
436
577
|
const stats = await fs.stat(outputFile);
|
|
437
578
|
fileSizeKB = Math.round(stats.size / 1024); // Convert bytes to KB
|
|
579
|
+
// Perform file validation (always have validation data: checksum for ContentVersion, size for Attachment)
|
|
580
|
+
const validation = await this.validateDownloadedFile(outputFile, expectedSize, expectedChecksum);
|
|
581
|
+
if (validation.valid) {
|
|
582
|
+
this.filesValidated++; // Count only valid files
|
|
583
|
+
validationStatus = 'Valid';
|
|
584
|
+
isValidFile = true;
|
|
585
|
+
uxLog("success", this, c.green(`✓ Validation passed for ${fileName}`));
|
|
586
|
+
}
|
|
587
|
+
else {
|
|
588
|
+
validationStatus = 'Invalid';
|
|
589
|
+
validationError = validation.error || 'Unknown validation error';
|
|
590
|
+
isValidFile = false;
|
|
591
|
+
this.filesValidationErrors++;
|
|
592
|
+
uxLog("warning", this, c.yellow(`⚠ Validation failed for ${fileName}: ${validation.error}`));
|
|
593
|
+
}
|
|
438
594
|
}
|
|
439
595
|
catch (e) {
|
|
440
596
|
uxLog("warning", this, c.yellow(`Could not get file size for ${fileName}: ${e.message}`));
|
|
597
|
+
validationStatus = 'Invalid';
|
|
598
|
+
validationError = e.message;
|
|
599
|
+
isValidFile = false;
|
|
441
600
|
}
|
|
442
601
|
}
|
|
443
602
|
else if (!downloadResult.success) {
|
|
444
603
|
errorDetail = downloadResult.error || 'Unknown download error';
|
|
604
|
+
validationStatus = 'Download failed';
|
|
605
|
+
isValidFile = false;
|
|
445
606
|
}
|
|
446
607
|
// Use file folder and file name for log display
|
|
447
608
|
const fileDisplay = path.join(folderPath, fileName).replace(/\\/g, '/');
|
|
448
|
-
|
|
609
|
+
// Log based on download success AND validation success
|
|
610
|
+
if (downloadResult.success && isValidFile) {
|
|
449
611
|
uxLog("success", this, c.grey(`Downloaded ${fileDisplay}`));
|
|
450
612
|
this.filesDownloaded++;
|
|
451
|
-
// Write success entry to CSV log with Salesforce IDs
|
|
452
|
-
await this.writeCsvLogEntry('success', folderPath, fileName, extension, fileSizeKB, '', contentDocumentId, contentVersionId, attachmentId);
|
|
613
|
+
// Write success entry to CSV log with Salesforce IDs and validation status
|
|
614
|
+
await this.writeCsvLogEntry('success', folderPath, fileName, extension, fileSizeKB, '', contentDocumentId, contentVersionId, attachmentId, validationStatus, fetchUrl);
|
|
615
|
+
}
|
|
616
|
+
else if (downloadResult.success && !isValidFile) {
|
|
617
|
+
// File was downloaded but validation failed
|
|
618
|
+
uxLog("warning", this, c.red(`Invalid ${fileDisplay} - validation failed`));
|
|
619
|
+
this.filesErrors++;
|
|
620
|
+
// Write invalid entry to CSV log with validation error details
|
|
621
|
+
await this.writeCsvLogEntry('invalid', folderPath, fileName, extension, fileSizeKB, validationError, contentDocumentId, contentVersionId, attachmentId, validationStatus, fetchUrl);
|
|
453
622
|
}
|
|
454
623
|
else {
|
|
624
|
+
// Download failed
|
|
455
625
|
uxLog("warning", this, c.red(`Error ${fileDisplay}`));
|
|
456
626
|
this.filesErrors++;
|
|
457
|
-
// Write failed entry to CSV log with Salesforce IDs
|
|
458
|
-
await this.writeCsvLogEntry('failed', folderPath, fileName, extension, fileSizeKB, errorDetail, contentDocumentId, contentVersionId, attachmentId);
|
|
627
|
+
// Write failed entry to CSV log with Salesforce IDs and validation status
|
|
628
|
+
await this.writeCsvLogEntry('failed', folderPath, fileName, extension, fileSizeKB, errorDetail, contentDocumentId, contentVersionId, attachmentId, validationStatus, fetchUrl);
|
|
459
629
|
}
|
|
460
630
|
}
|
|
461
631
|
async downloadAttachmentFile(attachment, records) {
|
|
@@ -469,7 +639,8 @@ export class FilesExporter {
|
|
|
469
639
|
const attachmentParentFolderName = (parentAttachment[this.dtl.outputFolderNameField] || parentAttachment.Id).replace(/[/\\?%*:|"<>]/g, '-');
|
|
470
640
|
const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, attachmentParentFolderName));
|
|
471
641
|
const outputFile = path.join(parentRecordFolderForFiles, attachment.Name.replace(/[/\\?%*:|"<>]/g, '-'));
|
|
472
|
-
|
|
642
|
+
const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/Attachment/${attachment.Id}/Body`;
|
|
643
|
+
await this.logSkippedFile(outputFile, `File size (${fileSizeKB} KB) below minimum (${this.dtl.fileSizeMin} KB)`, '', '', attachment.Id, fetchUrl);
|
|
473
644
|
return;
|
|
474
645
|
}
|
|
475
646
|
// Retrieve initial record to build output files folder name
|
|
@@ -481,9 +652,9 @@ export class FilesExporter {
|
|
|
481
652
|
const outputFile = path.join(parentRecordFolderForFiles, attachment.Name.replace(/[/\\?%*:|"<>]/g, '-'));
|
|
482
653
|
// Create directory if not existing
|
|
483
654
|
await fs.ensureDir(parentRecordFolderForFiles);
|
|
484
|
-
// Download file locally
|
|
655
|
+
// Download file locally with validation (Attachments have BodyLength but no checksum)
|
|
485
656
|
const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/Attachment/${attachment.Id}/Body`;
|
|
486
|
-
await this.downloadFile(fetchUrl, outputFile, '', '', attachment.Id);
|
|
657
|
+
await this.downloadFile(fetchUrl, outputFile, '', '', attachment.Id, Number(attachment.BodyLength), undefined);
|
|
487
658
|
}
|
|
488
659
|
async downloadContentVersionFile(contentVersion, records, contentDocumentLink) {
|
|
489
660
|
// Check file size filter (ContentSize is in bytes)
|
|
@@ -496,7 +667,8 @@ export class FilesExporter {
|
|
|
496
667
|
const parentFolderName = (parentRecord[this.dtl.outputFolderNameField] || parentRecord.Id).replace(/[/\\?%*:|"<>]/g, '-');
|
|
497
668
|
const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, parentFolderName));
|
|
498
669
|
const outputFile = path.join(parentRecordFolderForFiles, contentVersion.Title.replace(/[/\\?%*:|"<>]/g, '-'));
|
|
499
|
-
|
|
670
|
+
const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`;
|
|
671
|
+
await this.logSkippedFile(outputFile, `File size (${fileSizeKB} KB) below minimum (${this.dtl.fileSizeMin} KB)`, contentVersion.ContentDocumentId, contentVersion.Id, '', fetchUrl);
|
|
500
672
|
return;
|
|
501
673
|
}
|
|
502
674
|
// Retrieve initial record to build output files folder name
|
|
@@ -527,44 +699,59 @@ export class FilesExporter {
|
|
|
527
699
|
uxLog("log", this, c.grey(`Skipped - ${outputFile.replace(this.exportedFilesFolder, '')} - File type ignored`));
|
|
528
700
|
this.filesIgnoredType++;
|
|
529
701
|
// Log skipped file to CSV
|
|
530
|
-
|
|
702
|
+
const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`;
|
|
703
|
+
await this.logSkippedFile(outputFile, 'File type ignored', contentVersion.ContentDocumentId, contentVersion.Id, '', fetchUrl);
|
|
531
704
|
return;
|
|
532
705
|
}
|
|
533
|
-
// Check file overwrite
|
|
534
|
-
if (this.dtl.overwriteFiles !== true && fs.existsSync(outputFile)) {
|
|
706
|
+
// Check file overwrite (unless in resume mode where downloadFile handles existing files)
|
|
707
|
+
if (this.dtl.overwriteFiles !== true && !this.resumeExport && fs.existsSync(outputFile)) {
|
|
535
708
|
uxLog("warning", this, c.yellow(`Skipped - ${outputFile.replace(this.exportedFilesFolder, '')} - File already existing`));
|
|
536
709
|
this.filesIgnoredExisting++;
|
|
537
710
|
// Log skipped file to CSV
|
|
538
|
-
|
|
711
|
+
const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`;
|
|
712
|
+
await this.logSkippedFile(outputFile, 'File already exists', contentVersion.ContentDocumentId, contentVersion.Id, '', fetchUrl);
|
|
539
713
|
return;
|
|
540
714
|
}
|
|
541
715
|
// Create directory if not existing
|
|
542
716
|
await fs.ensureDir(parentRecordFolderForFiles);
|
|
543
|
-
// Download file locally
|
|
717
|
+
// Download file locally with validation (ContentVersion has both Checksum and ContentSize)
|
|
544
718
|
const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`;
|
|
545
|
-
await this.downloadFile(fetchUrl, outputFile, contentVersion.ContentDocumentId, contentVersion.Id);
|
|
719
|
+
await this.downloadFile(fetchUrl, outputFile, contentVersion.ContentDocumentId, contentVersion.Id, '', Number(contentVersion.ContentSize), contentVersion.Checksum);
|
|
546
720
|
}
|
|
547
721
|
// Build stats & result
|
|
548
722
|
async buildResult() {
|
|
549
|
-
|
|
550
|
-
const
|
|
551
|
-
|
|
552
|
-
|
|
723
|
+
// Get final API usage from the limits manager
|
|
724
|
+
const finalUsage = await this.apiLimitsManager.getFinalUsage();
|
|
725
|
+
// Display final API usage summary
|
|
726
|
+
try {
|
|
727
|
+
const finalApiUsage = await this.getApiUsageStatus();
|
|
728
|
+
uxLog("success", this, c.green(`Export completed! Final API usage: ${finalApiUsage.message}`));
|
|
729
|
+
}
|
|
730
|
+
catch (error) {
|
|
731
|
+
uxLog("warning", this, c.yellow(`Could not retrieve final API usage: ${error.message}`));
|
|
732
|
+
}
|
|
553
733
|
const result = {
|
|
554
734
|
stats: {
|
|
735
|
+
filesValidated: this.filesValidated,
|
|
555
736
|
filesDownloaded: this.filesDownloaded,
|
|
556
737
|
filesErrors: this.filesErrors,
|
|
557
738
|
filesIgnoredType: this.filesIgnoredType,
|
|
558
739
|
filesIgnoredExisting: this.filesIgnoredExisting,
|
|
559
740
|
filesIgnoredSize: this.filesIgnoredSize,
|
|
560
|
-
|
|
741
|
+
filesValidationErrors: this.filesValidationErrors,
|
|
742
|
+
totalRestApiCalls: this.totalRestApiCalls,
|
|
743
|
+
totalBulkApiCalls: this.totalBulkApiCalls,
|
|
561
744
|
totalParentRecords: this.totalParentRecords,
|
|
562
745
|
parentRecordsWithFiles: this.parentRecordsWithFiles,
|
|
563
746
|
recordsIgnored: this.recordsIgnored,
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
747
|
+
restApiUsedBefore: finalUsage.restUsed,
|
|
748
|
+
restApiUsedAfter: finalUsage.restUsed,
|
|
749
|
+
restApiLimit: finalUsage.restLimit,
|
|
750
|
+
restApiCallsRemaining: finalUsage.restRemaining,
|
|
751
|
+
bulkApiUsedBefore: finalUsage.bulkUsed,
|
|
752
|
+
bulkApiUsedAfter: finalUsage.bulkUsed,
|
|
753
|
+
bulkApiLimit: finalUsage.bulkLimit,
|
|
754
|
+
bulkApiCallsRemaining: finalUsage.bulkRemaining,
|
|
568
755
|
},
|
|
569
756
|
logFile: this.logFile
|
|
570
757
|
};
|
|
@@ -587,8 +774,8 @@ export class FilesImporter {
|
|
|
587
774
|
filesOverwritten = 0;
|
|
588
775
|
filesErrors = 0;
|
|
589
776
|
filesSkipped = 0;
|
|
590
|
-
|
|
591
|
-
|
|
777
|
+
// Optimized API Limits Management System
|
|
778
|
+
apiLimitsManager;
|
|
592
779
|
constructor(filesPath, conn, options, commandThis) {
|
|
593
780
|
this.filesPath = filesPath;
|
|
594
781
|
this.exportedFilesFolder = path.join(this.filesPath, 'export');
|
|
@@ -598,6 +785,8 @@ export class FilesImporter {
|
|
|
598
785
|
if (options.exportConfig) {
|
|
599
786
|
this.dtl = options.exportConfig;
|
|
600
787
|
}
|
|
788
|
+
// Initialize the optimized API limits manager
|
|
789
|
+
this.apiLimitsManager = new ApiLimitsManager(conn, commandThis);
|
|
601
790
|
// Initialize log file path
|
|
602
791
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, -5);
|
|
603
792
|
this.logFile = path.join(this.filesPath, `import-log-${timestamp}.csv`);
|
|
@@ -768,10 +957,8 @@ export class FilesImporter {
|
|
|
768
957
|
}
|
|
769
958
|
// Build stats & result
|
|
770
959
|
async buildResult() {
|
|
771
|
-
|
|
772
|
-
const
|
|
773
|
-
? (connAny?.limitInfo?.apiUsage?.limit || 0) - (connAny?.limitInfo?.apiUsage?.used || 0)
|
|
774
|
-
: null;
|
|
960
|
+
// Get final API usage from the limits manager
|
|
961
|
+
const finalUsage = await this.apiLimitsManager.getFinalUsage();
|
|
775
962
|
const result = {
|
|
776
963
|
stats: {
|
|
777
964
|
filesUploaded: this.filesUploaded,
|
|
@@ -780,22 +967,24 @@ export class FilesImporter {
|
|
|
780
967
|
filesSkipped: this.filesSkipped,
|
|
781
968
|
totalFolders: this.totalFolders,
|
|
782
969
|
totalFiles: this.totalFiles,
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
970
|
+
restApiUsedBefore: finalUsage.restUsed,
|
|
971
|
+
restApiUsedAfter: finalUsage.restUsed,
|
|
972
|
+
restApiLimit: finalUsage.restLimit,
|
|
973
|
+
restApiCallsRemaining: finalUsage.restRemaining,
|
|
974
|
+
bulkApiUsedBefore: finalUsage.bulkUsed,
|
|
975
|
+
bulkApiUsedAfter: finalUsage.bulkUsed,
|
|
976
|
+
bulkApiLimit: finalUsage.bulkLimit,
|
|
977
|
+
bulkApiCallsRemaining: finalUsage.bulkRemaining,
|
|
787
978
|
},
|
|
788
979
|
logFile: this.logFile
|
|
789
980
|
};
|
|
790
981
|
await createXlsxFromCsv(this.logFile, { fileTitle: "Imported files report" }, result);
|
|
791
982
|
return result;
|
|
792
983
|
}
|
|
793
|
-
// Calculate API consumption
|
|
984
|
+
// Calculate API consumption using the optimized ApiLimitsManager
|
|
794
985
|
async calculateApiConsumption(totalFilesNumber) {
|
|
795
|
-
//
|
|
796
|
-
|
|
797
|
-
this.apiUsedBefore = connAny?.limitInfo?.apiUsage?.used || 0;
|
|
798
|
-
this.apiLimit = connAny?.limitInfo?.apiUsage?.limit || 0;
|
|
986
|
+
// Initialize the API limits manager
|
|
987
|
+
await this.apiLimitsManager.initialize();
|
|
799
988
|
const bulkCallsNb = 1;
|
|
800
989
|
if (this.handleOverwrite) {
|
|
801
990
|
totalFilesNumber = totalFilesNumber * 2;
|
|
@@ -866,6 +1055,7 @@ export async function getFilesWorkspaceDetail(filesWorkspace) {
|
|
|
866
1055
|
const fileSizeMin = exportFileJson.fileSizeMin || 0;
|
|
867
1056
|
return {
|
|
868
1057
|
full_label: `[${folderName}]${folderName != hardisLabel ? `: ${hardisLabel}` : ''}`,
|
|
1058
|
+
name: folderName,
|
|
869
1059
|
label: hardisLabel,
|
|
870
1060
|
description: hardisDescription,
|
|
871
1061
|
soqlQuery: soqlQuery,
|