delegate-sf-mcp 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/.eslintrc.json +20 -0
  2. package/LICENSE +24 -0
  3. package/README.md +76 -0
  4. package/auth.js +148 -0
  5. package/bin/config-helper.js +51 -0
  6. package/bin/mcp-salesforce.js +12 -0
  7. package/bin/setup.js +266 -0
  8. package/bin/status.js +134 -0
  9. package/docs/README.md +52 -0
  10. package/docs/step1.png +0 -0
  11. package/docs/step2.png +0 -0
  12. package/docs/step3.png +0 -0
  13. package/docs/step4.png +0 -0
  14. package/examples/README.md +35 -0
  15. package/package.json +16 -0
  16. package/scripts/README.md +30 -0
  17. package/src/auth/file-storage.js +447 -0
  18. package/src/auth/oauth.js +417 -0
  19. package/src/auth/token-manager.js +207 -0
  20. package/src/backup/manager.js +949 -0
  21. package/src/index.js +168 -0
  22. package/src/salesforce/client.js +388 -0
  23. package/src/sf-client.js +79 -0
  24. package/src/tools/auth.js +190 -0
  25. package/src/tools/backup.js +486 -0
  26. package/src/tools/create.js +109 -0
  27. package/src/tools/delegate-hygiene.js +268 -0
  28. package/src/tools/delegate-validate.js +212 -0
  29. package/src/tools/delegate-verify.js +143 -0
  30. package/src/tools/delete.js +72 -0
  31. package/src/tools/describe.js +132 -0
  32. package/src/tools/installation-info.js +656 -0
  33. package/src/tools/learn-context.js +1077 -0
  34. package/src/tools/learn.js +351 -0
  35. package/src/tools/query.js +82 -0
  36. package/src/tools/repair-credentials.js +77 -0
  37. package/src/tools/setup.js +120 -0
  38. package/src/tools/time_machine.js +347 -0
  39. package/src/tools/update.js +138 -0
  40. package/src/tools.js +214 -0
  41. package/src/utils/cache.js +120 -0
  42. package/src/utils/debug.js +52 -0
  43. package/src/utils/logger.js +19 -0
  44. package/tokens.json +8 -0
@@ -0,0 +1,949 @@
1
+ /**
2
+ * Salesforce Backup Manager - Core Implementation
3
+ *
4
+ * This module provides the core backup functionality including
5
+ * file attachment download from all three Salesforce file systems:
6
+ * - Modern Files (ContentDocument/ContentVersion)
7
+ * - Legacy Attachments
8
+ * - Documents
9
+ *
10
+ * Supports both synchronous and asynchronous backup operations with
11
+ * lock file management for background processing.
12
+ */
13
+
14
+ import fs from 'fs/promises';
15
+ import path from 'path';
16
+ import { fileURLToPath } from 'url';
17
+ import { Worker } from 'worker_threads';
18
+ import { debug as logger } from '../utils/debug.js';
19
+
20
+ const __filename = fileURLToPath(import.meta.url);
21
+ const __dirname = path.dirname(__filename);
22
+
23
+ /**
24
+ * Asynchronous Backup Job Manager
25
+ * Manages background backup operations with lock files
26
+ */
27
+
28
+ /**
29
+ * Salesforce File Downloader Class
30
+ * Handles downloading binary files from all Salesforce file systems
31
+ */
32
+ export class SalesforceFileDownloader {
33
+ constructor(salesforceClient, options = {}) {
34
+ this.client = salesforceClient;
35
+ this.parallelLimit = options.parallelLimit || 5;
36
+ this.retryAttempts = options.retryAttempts || 3;
37
+ this.downloadStats = {
38
+ contentVersions: 0,
39
+ attachments: 0,
40
+ documents: 0,
41
+ totalBytes: 0,
42
+ errors: 0
43
+ };
44
+ }
45
+
46
+ /**
47
+ * Download ContentVersion file (Modern Files system)
48
+ */
49
+ async downloadContentVersion(contentVersionId, outputPath) {
50
+ const endpoint = `/services/data/v58.0/sobjects/ContentVersion/${contentVersionId}/VersionData`;
51
+
52
+ try {
53
+ const result = await this.downloadBinaryFile(endpoint, outputPath);
54
+ this.downloadStats.contentVersions++;
55
+ this.downloadStats.totalBytes += result.size;
56
+ return result;
57
+ } catch (error) {
58
+ this.downloadStats.errors++;
59
+ throw new Error(`ContentVersion download failed for ${contentVersionId}: ${error.message}`);
60
+ }
61
+ }
62
+
63
+ /**
64
+ * Download Attachment file (Legacy system)
65
+ */
66
+ async downloadAttachment(attachmentId, outputPath) {
67
+ const endpoint = `/services/data/v58.0/sobjects/Attachment/${attachmentId}/Body`;
68
+
69
+ try {
70
+ const result = await this.downloadBinaryFile(endpoint, outputPath);
71
+ this.downloadStats.attachments++;
72
+ this.downloadStats.totalBytes += result.size;
73
+ return result;
74
+ } catch (error) {
75
+ this.downloadStats.errors++;
76
+ throw new Error(`Attachment download failed for ${attachmentId}: ${error.message}`);
77
+ }
78
+ }
79
+
80
+ /**
81
+ * Download Document file (Legacy folder-based system)
82
+ */
83
+ async downloadDocument(documentId, outputPath) {
84
+ const endpoint = `/services/data/v58.0/sobjects/Document/${documentId}/Body`;
85
+
86
+ try {
87
+ const result = await this.downloadBinaryFile(endpoint, outputPath);
88
+ this.downloadStats.documents++;
89
+ this.downloadStats.totalBytes += result.size;
90
+ return result;
91
+ } catch (error) {
92
+ this.downloadStats.errors++;
93
+ throw new Error(`Document download failed for ${documentId}: ${error.message}`);
94
+ }
95
+ }
96
+
97
+ /**
98
+ * Core binary file download logic with retry
99
+ */
100
+ async downloadBinaryFile(endpoint, outputPath) {
101
+ await this.client.ensureValidConnection();
102
+
103
+ for (let attempt = 1; attempt <= this.retryAttempts; attempt++) {
104
+ try {
105
+ // Use fetch with proper authentication
106
+ const response = await fetch(`${this.client.instanceUrl}${endpoint}`, {
107
+ headers: {
108
+ 'Authorization': `Bearer ${await this.client.tokenManager.getValidAccessToken()}`
109
+ }
110
+ });
111
+
112
+ if (!response.ok) {
113
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
114
+ }
115
+
116
+ // Get binary data
117
+ const arrayBuffer = await response.arrayBuffer();
118
+
119
+ // Ensure directory exists
120
+ await fs.mkdir(path.dirname(outputPath), { recursive: true });
121
+
122
+ // Write file to disk
123
+ await fs.writeFile(outputPath, Buffer.from(arrayBuffer));
124
+
125
+ return {
126
+ success: true,
127
+ size: arrayBuffer.byteLength,
128
+ path: outputPath,
129
+ attempt: attempt
130
+ };
131
+
132
+ } catch (error) {
133
+ if (attempt === this.retryAttempts) {
134
+ throw error;
135
+ }
136
+
137
+ // Wait before retry (exponential backoff)
138
+ const delay = Math.pow(2, attempt) * 1000;
139
+ await new Promise(resolve => setTimeout(resolve, delay));
140
+ }
141
+ }
142
+ }
143
+
144
+ /**
145
+ * Download multiple files in parallel with concurrency limit
146
+ */
147
+ async downloadFilesInParallel(fileList) {
148
+ const results = [];
149
+
150
+ // Process files in batches to respect parallel limit
151
+ for (let i = 0; i < fileList.length; i += this.parallelLimit) {
152
+ const batch = fileList.slice(i, i + this.parallelLimit);
153
+
154
+ const batchPromises = batch.map(async (file) => {
155
+ try {
156
+ let result;
157
+
158
+ switch (file.type) {
159
+ case 'ContentVersion':
160
+ result = await this.downloadContentVersion(file.id, file.outputPath);
161
+ break;
162
+ case 'Attachment':
163
+ result = await this.downloadAttachment(file.id, file.outputPath);
164
+ break;
165
+ case 'Document':
166
+ result = await this.downloadDocument(file.id, file.outputPath);
167
+ break;
168
+ default:
169
+ throw new Error(`Unknown file type: ${file.type}`);
170
+ }
171
+
172
+ return { ...result, fileInfo: file };
173
+
174
+ } catch (error) {
175
+ return {
176
+ success: false,
177
+ error: error.message,
178
+ fileInfo: file
179
+ };
180
+ }
181
+ });
182
+
183
+ const batchResults = await Promise.all(batchPromises);
184
+ results.push(...batchResults);
185
+
186
+ // Progress indication
187
+ logger.log(`📁 Downloaded batch ${Math.floor(i / this.parallelLimit) + 1}/${Math.ceil(fileList.length / this.parallelLimit)} (${results.length}/${fileList.length} files)`);
188
+ }
189
+
190
+ return results;
191
+ }
192
+
193
+ /**
194
+ * Get download statistics
195
+ */
196
+ getStats() {
197
+ return {
198
+ ...this.downloadStats,
199
+ totalMB: Math.round(this.downloadStats.totalBytes / (1024 * 1024) * 100) / 100
200
+ };
201
+ }
202
+ }
203
+
204
+ /**
205
+ * Salesforce Backup Manager Class
206
+ * Orchestrates the complete backup process
207
+ */
208
+ export class SalesforceBackupManager {
209
+ constructor(salesforceClient, options = {}) {
210
+ this.client = salesforceClient;
211
+
212
+ // Resolve outputDirectory relative to project root, not current working directory
213
+ const projectRoot = path.resolve(__dirname, '../..');
214
+ const defaultOutputDir = path.join(projectRoot, 'backups');
215
+
216
+ this.options = {
217
+ outputDirectory: options.outputDirectory ?
218
+ (path.isAbsolute(options.outputDirectory) ?
219
+ options.outputDirectory :
220
+ path.resolve(projectRoot, options.outputDirectory)
221
+ ) : defaultOutputDir,
222
+ includeFiles: true,
223
+ includeAttachments: true,
224
+ includeDocuments: true,
225
+ compression: false,
226
+ parallelDownloads: 5,
227
+ ...options
228
+ };
229
+
230
+ // Update outputDirectory in options to use resolved path
231
+ this.options.outputDirectory = this.options.outputDirectory;
232
+
233
+ this.downloader = new SalesforceFileDownloader(this.client, {
234
+ parallelLimit: this.options.parallelDownloads
235
+ });
236
+
237
+ // Initialize job manager for asynchronous backups
238
+ this.jobManager = new BackupJobManager(this.options.outputDirectory);
239
+ }
240
+
241
+ /**
242
+ * Start an asynchronous backup job
243
+ */
244
+ async startAsyncBackup(backupType = 'incremental', sinceDate = null, options = {}) {
245
+ const mergedOptions = { ...this.options, ...options };
246
+ return await this.jobManager.startBackupJob(this.client, {
247
+ backupType,
248
+ sinceDate,
249
+ ...mergedOptions
250
+ });
251
+ }
252
+
253
+ /**
254
+ * Get status of all backup jobs (running and completed)
255
+ */
256
+ async getBackupJobStatuses() {
257
+ return await this.jobManager.getJobStatuses();
258
+ }
259
+
260
+ /**
261
+ * Check if a specific backup job is still running
262
+ */
263
+ async isBackupJobRunning(jobId) {
264
+ return await this.jobManager.isJobRunning(jobId);
265
+ }
266
+
267
+ /**
268
+ * Clean up old completed/failed job lock files
269
+ */
270
+ async cleanupOldBackupJobs(maxAgeHours = 24) {
271
+ return await this.jobManager.cleanupOldJobs(maxAgeHours);
272
+ }
273
+
274
+ /**
275
+ * Create a comprehensive backup
276
+ */
277
+ async createBackup(backupType = 'incremental', sinceDate = null) {
278
+ const startTime = Date.now();
279
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
280
+ const backupDir = path.join(this.options.outputDirectory, `salesforce-backup-${timestamp}`);
281
+
282
+ logger.log(`🚀 Starting ${backupType} backup...`);
283
+ logger.log(`📁 Backup directory: ${backupDir}`);
284
+
285
+ try {
286
+ // Ensure base output directory exists
287
+ await fs.mkdir(this.options.outputDirectory, { recursive: true });
288
+
289
+ // Create backup directory structure
290
+ await this.createBackupStructure(backupDir);
291
+
292
+ // 1. Backup metadata (schemas, etc.)
293
+ logger.log('\n📊 Backing up metadata...');
294
+ await this.backupMetadata(backupDir);
295
+
296
+ // 2. Backup object data
297
+ logger.log('\n📋 Backing up object data...');
298
+ await this.backupObjectData(backupDir, sinceDate);
299
+
300
+ // 3. Backup files if enabled
301
+ if (this.options.includeFiles || this.options.includeAttachments || this.options.includeDocuments) {
302
+ logger.log('\n📎 Backing up file attachments...');
303
+ await this.backupFiles(backupDir, sinceDate);
304
+ }
305
+
306
+ // 4. Create backup manifest
307
+ logger.log('\n📝 Creating backup manifest...');
308
+ await this.createBackupManifest(backupDir, startTime);
309
+
310
+ const duration = Math.round((Date.now() - startTime) / 1000);
311
+ logger.log(`\n✅ Backup completed successfully in ${duration} seconds!`);
312
+ logger.log(`📁 Backup location: ${backupDir}`);
313
+
314
+ return {
315
+ success: true,
316
+ backupDirectory: backupDir,
317
+ duration: duration,
318
+ stats: this.downloader.getStats()
319
+ };
320
+
321
+ } catch (error) {
322
+ logger.error(`❌ Backup failed: ${error.message}`);
323
+ throw error;
324
+ }
325
+ }
326
+
327
+ /**
328
+ * Create backup directory structure
329
+ */
330
+ async createBackupStructure(backupDir) {
331
+ const dirs = [
332
+ path.join(backupDir, 'metadata'),
333
+ path.join(backupDir, 'data'),
334
+ path.join(backupDir, 'files', 'content-versions'),
335
+ path.join(backupDir, 'files', 'attachments'),
336
+ path.join(backupDir, 'files', 'documents'),
337
+ path.join(backupDir, 'logs')
338
+ ];
339
+
340
+ for (const dir of dirs) {
341
+ await fs.mkdir(dir, { recursive: true });
342
+ }
343
+ }
344
+
345
+ /**
346
+ * Backup Salesforce metadata (schemas, etc.)
347
+ */
348
+ async backupMetadata(backupDir) {
349
+ // Get all available objects
350
+ const objects = await this.client.describeGlobal();
351
+
352
+ const metadata = {
353
+ backupTimestamp: new Date().toISOString(),
354
+ salesforceInstance: this.client.instanceUrl,
355
+ apiVersion: this.client.version || '58.0',
356
+ totalObjects: objects.length,
357
+ objects: objects
358
+ };
359
+
360
+ await fs.writeFile(
361
+ path.join(backupDir, 'metadata', 'objects-schema.json'),
362
+ JSON.stringify(metadata, null, 2)
363
+ );
364
+
365
+ logger.log(` ✅ Saved metadata for ${objects.length} objects`);
366
+ }
367
+
368
+ /**
369
+ * Backup object data
370
+ */
371
+ async backupObjectData(backupDir, sinceDate) {
372
+ const objects = await this.client.describeGlobal();
373
+ const dataObjects = objects.filter(obj => obj.queryable && !obj.name.endsWith('__History'));
374
+
375
+ logger.log(` 📊 Backing up ${dataObjects.length} queryable objects...`);
376
+
377
+ let backedUpObjects = 0;
378
+ let totalRecords = 0;
379
+
380
+ for (const obj of dataObjects) {
381
+ try {
382
+ // Get all queryable fields for this object
383
+ const describe = await this.client.describe(obj.name);
384
+ const queryableFields = describe.fields
385
+ .filter(field => field.type !== 'base64' && field.name !== 'Body') // Exclude binary fields
386
+ .map(field => field.name)
387
+ .slice(0, 20); // Limit to first 20 fields to avoid query complexity
388
+
389
+ const fieldList = queryableFields.length > 0 ? queryableFields.join(', ') : 'Id, Name';
390
+ const whereClause = sinceDate ? `WHERE LastModifiedDate > ${sinceDate}` : '';
391
+ const soql = `SELECT ${fieldList} FROM ${obj.name} ${whereClause} LIMIT 1000`;
392
+
393
+ const result = await this.client.query(soql);
394
+
395
+ if (result.records && result.records.length > 0) {
396
+ await fs.writeFile(
397
+ path.join(backupDir, 'data', `${obj.name}.json`),
398
+ JSON.stringify(result.records, null, 2)
399
+ );
400
+
401
+ backedUpObjects++;
402
+ totalRecords += result.records.length;
403
+ logger.log(` ✅ ${obj.name}: ${result.records.length} records (${queryableFields.length} fields)`);
404
+ } else {
405
+ logger.log(` ℹ️ ${obj.name}: No records found`);
406
+ }
407
+
408
+ } catch (error) {
409
+ logger.warn(` ⚠️ ${obj.name}: ${error.message}`);
410
+ }
411
+ }
412
+
413
+ logger.log(` 📈 Summary: ${backedUpObjects} objects, ${totalRecords} total records`);
414
+ }
415
+
416
+ /**
417
+ * Backup all file attachments
418
+ */
419
+ async backupFiles(backupDir, sinceDate) {
420
+ const fileList = [];
421
+
422
+ // 1. Modern Files (ContentVersion)
423
+ if (this.options.includeFiles) {
424
+ logger.log(' 📁 Discovering ContentVersion files...');
425
+ try {
426
+ const whereClause = sinceDate ? `WHERE LastModifiedDate > ${sinceDate} AND` : 'WHERE';
427
+ const query = `SELECT Id, Title, FileType, ContentSize FROM ContentVersion ${whereClause} IsLatest = true LIMIT 2000`;
428
+
429
+ const result = await this.client.query(query);
430
+
431
+ for (const cv of result.records) {
432
+ fileList.push({
433
+ type: 'ContentVersion',
434
+ id: cv.Id,
435
+ name: cv.Title,
436
+ fileType: cv.FileType,
437
+ size: cv.ContentSize,
438
+ outputPath: path.join(backupDir, 'files', 'content-versions', `${cv.Id}.${cv.FileType || 'bin'}`)
439
+ });
440
+ }
441
+
442
+ logger.log(` ✅ Found ${result.records.length} ContentVersion files`);
443
+ } catch (error) {
444
+ logger.warn(` ⚠️ ContentVersion query failed: ${error.message}`);
445
+ }
446
+ }
447
+
448
+ // 2. Legacy Attachments
449
+ if (this.options.includeAttachments) {
450
+ logger.log(' 📎 Discovering Attachment files...');
451
+ try {
452
+ const whereClause = sinceDate ? `WHERE LastModifiedDate > ${sinceDate}` : '';
453
+ const query = `SELECT Id, Name, ContentType, BodyLength FROM Attachment ${whereClause} LIMIT 1000`;
454
+
455
+ const result = await this.client.query(query);
456
+
457
+ for (const att of result.records) {
458
+ const extension = this.getFileExtension(att.ContentType);
459
+ fileList.push({
460
+ type: 'Attachment',
461
+ id: att.Id,
462
+ name: att.Name,
463
+ contentType: att.ContentType,
464
+ size: att.BodyLength,
465
+ outputPath: path.join(backupDir, 'files', 'attachments', `${att.Id}${extension}`)
466
+ });
467
+ }
468
+
469
+ logger.log(` ✅ Found ${result.records.length} Attachment files`);
470
+ } catch (error) {
471
+ logger.warn(` ⚠️ Attachment query failed: ${error.message}`);
472
+ }
473
+ }
474
+
475
+ // 3. Documents
476
+ if (this.options.includeDocuments) {
477
+ logger.log(' 📄 Discovering Document files...');
478
+ try {
479
+ const whereClause = sinceDate ? `WHERE LastModifiedDate > ${sinceDate}` : '';
480
+ const query = `SELECT Id, Name, Type, BodyLength FROM Document ${whereClause} LIMIT 1000`;
481
+
482
+ const result = await this.client.query(query);
483
+
484
+ for (const doc of result.records) {
485
+ const extension = this.getFileExtension(doc.Type);
486
+ fileList.push({
487
+ type: 'Document',
488
+ id: doc.Id,
489
+ name: doc.Name,
490
+ contentType: doc.Type,
491
+ size: doc.BodyLength,
492
+ outputPath: path.join(backupDir, 'files', 'documents', `${doc.Id}${extension}`)
493
+ });
494
+ }
495
+
496
+ logger.log(` ✅ Found ${result.records.length} Document files`);
497
+ } catch (error) {
498
+ logger.warn(` ⚠️ Document query failed: ${error.message}`);
499
+ }
500
+ }
501
+
502
+ // Download all files
503
+ if (fileList.length > 0) {
504
+ logger.log(`\n 📥 Downloading ${fileList.length} files...`);
505
+ const downloadResults = await this.downloader.downloadFilesInParallel(fileList);
506
+
507
+ const successful = downloadResults.filter(r => r.success).length;
508
+ const failed = downloadResults.filter(r => !r.success).length;
509
+
510
+ logger.log(` ✅ Successfully downloaded: ${successful} files`);
511
+ if (failed > 0) {
512
+ logger.warn(` ❌ Failed downloads: ${failed} files`);
513
+ }
514
+
515
+ // Save file manifest
516
+ await fs.writeFile(
517
+ path.join(backupDir, 'metadata', 'file-manifest.json'),
518
+ JSON.stringify({
519
+ totalFiles: fileList.length,
520
+ downloadResults: downloadResults,
521
+ stats: this.downloader.getStats()
522
+ }, null, 2)
523
+ );
524
+ } else {
525
+ logger.log(' ℹ️ No files found to backup');
526
+ }
527
+ }
528
+
529
+ /**
530
+ * Create backup manifest with summary
531
+ */
532
+ async createBackupManifest(backupDir, startTime) {
533
+ const endTime = Date.now();
534
+ const stats = this.downloader.getStats();
535
+
536
+ const manifest = {
537
+ backupInfo: {
538
+ timestamp: new Date().toISOString(),
539
+ type: 'comprehensive',
540
+ duration: Math.round((endTime - startTime) / 1000),
541
+ salesforceInstance: this.client.instanceUrl
542
+ },
543
+ options: this.options,
544
+ downloadStats: stats,
545
+ directories: {
546
+ metadata: 'metadata/',
547
+ data: 'data/',
548
+ files: 'files/',
549
+ logs: 'logs/'
550
+ }
551
+ };
552
+
553
+ await fs.writeFile(
554
+ path.join(backupDir, 'backup-manifest.json'),
555
+ JSON.stringify(manifest, null, 2)
556
+ );
557
+ }
558
+
559
+ /**
560
+ * Helper: Get file extension from content type
561
+ */
562
+ getFileExtension(contentType) {
563
+ const typeMap = {
564
+ 'application/pdf': '.pdf',
565
+ 'application/msword': '.doc',
566
+ 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx',
567
+ 'application/vnd.ms-excel': '.xls',
568
+ 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx',
569
+ 'image/jpeg': '.jpg',
570
+ 'image/png': '.png',
571
+ 'image/gif': '.gif',
572
+ 'text/plain': '.txt',
573
+ 'text/csv': '.csv'
574
+ };
575
+
576
+ return typeMap[contentType] || '.bin';
577
+ }
578
+
579
+ /**
580
+ * Start an asynchronous backup that runs in the background
581
+ * Returns immediately with job information
582
+ */
583
+ async startAsyncBackup(backupType = 'incremental', sinceDate = null, options = {}) {
584
+ const jobManager = new BackupJobManager(options);
585
+
586
+ const jobOptions = {
587
+ ...this.options,
588
+ backupType,
589
+ sinceDate,
590
+ ...options
591
+ };
592
+
593
+ return await jobManager.startBackupJob(this.client, jobOptions);
594
+ }
595
+
596
+ /**
597
+ * Get status of all backup jobs
598
+ */
599
+ async getBackupJobStatuses() {
600
+ const jobManager = new BackupJobManager();
601
+ return await jobManager.getJobStatuses(this.options.outputDirectory);
602
+ }
603
+
604
+ /**
605
+ * Get status of a specific backup job
606
+ */
607
+ async getBackupJobStatus(jobId) {
608
+ const jobManager = new BackupJobManager();
609
+ return await jobManager.getJobStatus(jobId, this.options.outputDirectory);
610
+ }
611
+ }
612
+
613
+ /**
614
+ * Asynchronous Backup Job Manager
615
+ * Handles non-blocking backup operations with progress tracking
616
+ */
617
+ export class BackupJobManager {
618
+ constructor(options = {}) {
619
+ this.cleanupDelay = options.testMode ? 100 : 5000; // Faster cleanup in test mode
620
+ }
621
+
622
+ /**
623
+ * Start an asynchronous backup job
624
+ * Returns immediately with job information
625
+ */
626
+ async startBackupJob(salesforceClient, options = {}) {
627
+ const jobId = this.generateJobId();
628
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
629
+ const backupDir = path.join(options.outputDirectory || 'backups', `backup-${timestamp}`);
630
+ const lockFilePath = path.join(options.outputDirectory || 'backups', `${jobId}.lock`);
631
+
632
+ // Ensure output directory exists
633
+ await fs.mkdir(options.outputDirectory || 'backups', { recursive: true });
634
+
635
+ // Create initial lock file
636
+ const lockFile = {
637
+ jobId,
638
+ startTime: new Date().toISOString(),
639
+ status: 'starting',
640
+ message: 'Initializing backup job...',
641
+ progress: 0,
642
+ backupDirectory: backupDir,
643
+ options: {
644
+ backupType: options.backupType || 'incremental',
645
+ includeFiles: options.includeFiles !== false,
646
+ includeAttachments: options.includeAttachments !== false,
647
+ includeDocuments: options.includeDocuments !== false
648
+ },
649
+ pid: process.pid,
650
+ lastUpdated: new Date().toISOString()
651
+ };
652
+
653
+ await fs.writeFile(lockFilePath, JSON.stringify(lockFile, null, 2));
654
+
655
+ // Start background backup using setImmediate for non-blocking execution
656
+ setImmediate(() => {
657
+ this.runBackgroundBackup(salesforceClient, jobId, backupDir, lockFilePath, options)
658
+ .catch(async (error) => {
659
+ // Update lock file with error status
660
+ const errorLockFile = {
661
+ ...lockFile,
662
+ status: 'failed',
663
+ message: `Backup failed: ${error.message}`,
664
+ progress: 0,
665
+ error: error.message,
666
+ lastUpdated: new Date().toISOString(),
667
+ endTime: new Date().toISOString()
668
+ };
669
+
670
+ try {
671
+ await fs.writeFile(lockFilePath, JSON.stringify(errorLockFile, null, 2));
672
+ } catch (writeError) {
673
+ logger.log(`Failed to update lock file: ${writeError.message}`);
674
+ }
675
+
676
+ // Schedule cleanup
677
+ setTimeout(async () => {
678
+ try {
679
+ await fs.unlink(lockFilePath);
680
+ } catch (cleanupError) {
681
+ // Ignore cleanup errors
682
+ }
683
+ }, this.cleanupDelay);
684
+ });
685
+ });
686
+
687
+ return {
688
+ jobId,
689
+ status: 'started',
690
+ message: 'Backup job started successfully. Running in background.',
691
+ backupDirectory: backupDir,
692
+ lockFile: lockFilePath
693
+ };
694
+ }
695
+
696
+ /**
697
+ * Execute backup with real-time progress tracking
698
+ */
699
+ async executeBackupWithProgress(backupManager, lockFilePath, options) {
700
+ const startTime = Date.now();
701
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
702
+ const backupDir = path.join(backupManager.options.outputDirectory, `salesforce-backup-${timestamp}`);
703
+
704
+ try {
705
+ // Ensure base output directory exists
706
+ await fs.mkdir(backupManager.options.outputDirectory, { recursive: true });
707
+
708
+ // Create backup directory structure
709
+ await backupManager.createBackupStructure(backupDir);
710
+
711
+ // Phase 1: Backup metadata (10-30%)
712
+ await this.updateLockFile(lockFilePath, {
713
+ status: 'running',
714
+ message: 'Backing up metadata...',
715
+ progress: 10
716
+ });
717
+
718
+ await backupManager.backupMetadata(backupDir);
719
+
720
+ await this.updateLockFile(lockFilePath, {
721
+ message: 'Metadata backup completed',
722
+ progress: 30
723
+ });
724
+
725
+ // Phase 2: Backup object data (30-60%)
726
+ await this.updateLockFile(lockFilePath, {
727
+ message: 'Backing up object data...',
728
+ progress: 35
729
+ });
730
+
731
+ await backupManager.backupObjectData(backupDir, options.sinceDate);
732
+
733
+ await this.updateLockFile(lockFilePath, {
734
+ message: 'Object data backup completed',
735
+ progress: 60
736
+ });
737
+
738
+ // Phase 3: Backup files if enabled (60-85%)
739
+ if (backupManager.options.includeFiles || backupManager.options.includeAttachments || backupManager.options.includeDocuments) {
740
+ await this.updateLockFile(lockFilePath, {
741
+ message: 'Downloading files...',
742
+ progress: 65
743
+ });
744
+
745
+ await backupManager.backupFiles(backupDir, options.sinceDate);
746
+
747
+ await this.updateLockFile(lockFilePath, {
748
+ message: 'File downloads completed',
749
+ progress: 85
750
+ });
751
+ } else {
752
+ await this.updateLockFile(lockFilePath, {
753
+ message: 'Skipping file downloads (disabled)',
754
+ progress: 85
755
+ });
756
+ }
757
+
758
+ // Phase 4: Create backup manifest (85-100%)
759
+ await this.updateLockFile(lockFilePath, {
760
+ message: 'Creating backup manifest...',
761
+ progress: 90
762
+ });
763
+
764
+ const stats = await backupManager.createBackupManifest(backupDir, startTime);
765
+
766
+ const duration = Math.round((Date.now() - startTime) / 1000);
767
+
768
+ return {
769
+ success: true,
770
+ backupDirectory: backupDir,
771
+ duration: duration,
772
+ stats: stats
773
+ };
774
+
775
+ } catch (error) {
776
+ throw error;
777
+ }
778
+ }
779
+
780
+ /**
781
+ * Run the actual backup process in the background
782
+ */
783
+ async runBackgroundBackup(salesforceClient, jobId, backupDir, lockFilePath, options) {
784
+ try {
785
+ const backupManager = new SalesforceBackupManager(salesforceClient, {
786
+ outputDirectory: path.dirname(backupDir),
787
+ includeFiles: options.includeFiles !== false,
788
+ includeAttachments: options.includeAttachments !== false,
789
+ includeDocuments: options.includeDocuments !== false,
790
+ compression: options.compression || false,
791
+ parallelDownloads: options.parallelDownloads || 5,
792
+ objectsFilter: options.objectsFilter || []
793
+ });
794
+
795
+ // Execute the backup with real progress tracking
796
+ const result = await this.executeBackupWithProgress(backupManager, lockFilePath, options);
797
+
798
+ // Complete the job
799
+ await this.updateLockFile(lockFilePath, {
800
+ status: 'completed',
801
+ message: 'Backup completed successfully!',
802
+ progress: 100,
803
+ endTime: new Date().toISOString(),
804
+ result: {
805
+ backupDirectory: result.backupDirectory,
806
+ duration: result.duration,
807
+ stats: result.stats
808
+ }
809
+ });
810
+
811
+ // Schedule cleanup of lock file
812
+ setTimeout(async () => {
813
+ try {
814
+ await fs.unlink(lockFilePath);
815
+ } catch (error) {
816
+ // Ignore cleanup errors
817
+ }
818
+ }, this.cleanupDelay);
819
+
820
+ return result;
821
+
822
+ } catch (error) {
823
+ // Update lock file with error
824
+ await this.updateLockFile(lockFilePath, {
825
+ status: 'failed',
826
+ message: `Backup failed: ${error.message}`,
827
+ progress: 0,
828
+ error: error.message,
829
+ endTime: new Date().toISOString()
830
+ });
831
+
832
+ // Schedule cleanup
833
+ setTimeout(async () => {
834
+ try {
835
+ await fs.unlink(lockFilePath);
836
+ } catch (cleanupError) {
837
+ // Ignore cleanup errors
838
+ }
839
+ }, this.cleanupDelay);
840
+
841
+ throw error;
842
+ }
843
+ }
844
+
845
+ /**
846
+ * Update lock file with new status information
847
+ */
848
+ async updateLockFile(lockFilePath, updates) {
849
+ try {
850
+ const currentData = JSON.parse(await fs.readFile(lockFilePath, 'utf8'));
851
+ const updatedData = {
852
+ ...currentData,
853
+ ...updates,
854
+ lastUpdated: new Date().toISOString()
855
+ };
856
+ await fs.writeFile(lockFilePath, JSON.stringify(updatedData, null, 2));
857
+ } catch (error) {
858
+ logger.log(`Failed to update lock file ${lockFilePath}: ${error.message}`);
859
+ }
860
+ }
861
+
862
+ /**
863
+ * Get status of all backup jobs
864
+ */
865
+ async getJobStatuses(backupDir = 'backups') {
866
+ try {
867
+ const files = await fs.readdir(backupDir);
868
+ const lockFiles = files.filter(file => file.endsWith('.lock'));
869
+
870
+ const jobs = [];
871
+ for (const lockFile of lockFiles) {
872
+ try {
873
+ const lockFilePath = path.join(backupDir, lockFile);
874
+ const lockData = JSON.parse(await fs.readFile(lockFilePath, 'utf8'));
875
+ jobs.push(lockData);
876
+ } catch (error) {
877
+ // Skip invalid lock files
878
+ }
879
+ }
880
+
881
+ return jobs.sort((a, b) => new Date(b.startTime) - new Date(a.startTime));
882
+ } catch (error) {
883
+ return [];
884
+ }
885
+ }
886
+
887
+ /**
888
+ * Get status of a specific job
889
+ */
890
+ async getJobStatus(jobId, backupDir = 'backups') {
891
+ try {
892
+ const lockFilePath = path.join(backupDir, `${jobId}.lock`);
893
+ const lockData = JSON.parse(await fs.readFile(lockFilePath, 'utf8'));
894
+ return lockData;
895
+ } catch (error) {
896
+ return null;
897
+ }
898
+ }
899
+
900
+ /**
901
+ * Check if a specific job is currently running
902
+ */
903
+ async isJobRunning(jobId, backupDir = 'backups') {
904
+ try {
905
+ const lockFilePath = path.join(backupDir, `${jobId}.lock`);
906
+ const lockData = JSON.parse(await fs.readFile(lockFilePath, 'utf8'));
907
+ return lockData.status === 'running';
908
+ } catch (error) {
909
+ // If lock file doesn't exist or can't be read, job is not running
910
+ return false;
911
+ }
912
+ }
913
+
914
+ /**
915
+ * Generate unique job ID
916
+ */
917
+ generateJobId() {
918
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
919
+ return `salesforce-backup-${timestamp}`;
920
+ }
921
+
922
+ /**
923
+ * Clean up old completed job lock files
924
+ */
925
+ async cleanupOldJobs(backupDir = 'backups', maxAgeHours = 24) {
926
+ try {
927
+ const files = await fs.readdir(backupDir);
928
+ const lockFiles = files.filter(file => file.endsWith('.lock'));
929
+ const cutoffTime = Date.now() - (maxAgeHours * 60 * 60 * 1000);
930
+
931
+ for (const lockFile of lockFiles) {
932
+ try {
933
+ const lockFilePath = path.join(backupDir, lockFile);
934
+ const lockData = JSON.parse(await fs.readFile(lockFilePath, 'utf8'));
935
+
936
+ // Clean up completed or failed jobs older than cutoff
937
+ if ((lockData.status === 'completed' || lockData.status === 'failed') &&
938
+ new Date(lockData.endTime).getTime() < cutoffTime) {
939
+ await fs.unlink(lockFilePath);
940
+ }
941
+ } catch (error) {
942
+ // Skip invalid lock files
943
+ }
944
+ }
945
+ } catch (error) {
946
+ // Ignore cleanup errors
947
+ }
948
+ }
949
+ }