s3db.js 9.1.0 → 9.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/PLUGINS.md +507 -0
- package/dist/s3db.cjs.js +1668 -8
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.es.js +1666 -9
- package/dist/s3db.es.js.map +1 -1
- package/package.json +1 -1
- package/src/plugins/backup.plugin.js +1018 -0
- package/src/plugins/cache/memory-cache.class.js +112 -3
- package/src/plugins/index.js +3 -0
- package/src/plugins/scheduler.plugin.js +834 -0
- package/src/plugins/state-machine.plugin.js +543 -0
|
@@ -0,0 +1,1018 @@
|
|
|
1
|
+
import Plugin from "./plugin.class.js";
|
|
2
|
+
import tryFn from "../concerns/try-fn.js";
|
|
3
|
+
import { createWriteStream, createReadStream } from 'fs';
|
|
4
|
+
import zlib from 'node:zlib';
|
|
5
|
+
import { pipeline } from 'stream/promises';
|
|
6
|
+
import { mkdir, writeFile, readFile, unlink, stat, readdir } from 'fs/promises';
|
|
7
|
+
import path from 'path';
|
|
8
|
+
import crypto from 'crypto';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* BackupPlugin - Automated Database Backup System
|
|
12
|
+
*
|
|
13
|
+
* Provides comprehensive backup functionality with multiple strategies,
|
|
14
|
+
* retention policies, and restoration capabilities.
|
|
15
|
+
*
|
|
16
|
+
* === Features ===
|
|
17
|
+
* - Full, incremental, and differential backups
|
|
18
|
+
* - Multiple destination support (S3, filesystem, etc.)
|
|
19
|
+
* - Configurable retention policies (GFS - Grandfather-Father-Son)
|
|
20
|
+
* - Compression and encryption
|
|
21
|
+
* - Backup verification and integrity checks
|
|
22
|
+
* - Scheduled backups with cron expressions
|
|
23
|
+
* - Parallel uploads for performance
|
|
24
|
+
* - Backup metadata and restoration
|
|
25
|
+
*
|
|
26
|
+
* === Configuration Example ===
|
|
27
|
+
*
|
|
28
|
+
* new BackupPlugin({
|
|
29
|
+
* // Backup scheduling
|
|
30
|
+
* schedule: {
|
|
31
|
+
* full: '0 2 * * SUN', // Sunday 2 AM - full backup
|
|
32
|
+
* incremental: '0 2 * * *' // Daily 2 AM - incremental
|
|
33
|
+
* },
|
|
34
|
+
*
|
|
35
|
+
* // Retention policy (Grandfather-Father-Son)
|
|
36
|
+
* retention: {
|
|
37
|
+
* daily: 7, // Keep 7 daily backups
|
|
38
|
+
* weekly: 4, // Keep 4 weekly backups
|
|
39
|
+
* monthly: 12, // Keep 12 monthly backups
|
|
40
|
+
* yearly: 3 // Keep 3 yearly backups
|
|
41
|
+
* },
|
|
42
|
+
*
|
|
43
|
+
* // Multiple backup destinations
|
|
44
|
+
* destinations: [
|
|
45
|
+
* {
|
|
46
|
+
* type: 's3',
|
|
47
|
+
* bucket: 'my-backups',
|
|
48
|
+
* path: 'database/{date}/',
|
|
49
|
+
* encryption: true,
|
|
50
|
+
* storageClass: 'STANDARD_IA'
|
|
51
|
+
* },
|
|
52
|
+
* {
|
|
53
|
+
* type: 'filesystem',
|
|
54
|
+
* path: '/var/backups/s3db/',
|
|
55
|
+
* compression: 'gzip'
|
|
56
|
+
* }
|
|
57
|
+
* ],
|
|
58
|
+
*
|
|
59
|
+
* // Backup configuration
|
|
60
|
+
* compression: 'gzip', // none, gzip, brotli, deflate
|
|
61
|
+
* encryption: {
|
|
62
|
+
* algorithm: 'AES-256-GCM',
|
|
63
|
+
* key: process.env.BACKUP_ENCRYPTION_KEY
|
|
64
|
+
* },
|
|
65
|
+
* verification: true, // Verify backup integrity
|
|
66
|
+
* parallelism: 4, // Parallel upload streams
|
|
67
|
+
*
|
|
68
|
+
* // Resource filtering
|
|
69
|
+
* include: ['users', 'orders'], // Only these resources
|
|
70
|
+
* exclude: ['temp_*', 'cache_*'], // Exclude patterns
|
|
71
|
+
*
|
|
72
|
+
* // Metadata
|
|
73
|
+
* backupMetadataResource: 'backup_metadata',
|
|
74
|
+
*
|
|
75
|
+
* // Hooks
|
|
76
|
+
* onBackupStart: (type, config) => console.log(`Starting ${type} backup`),
|
|
77
|
+
* onBackupComplete: (type, stats) => notifySlack(`Backup complete: ${stats}`)
|
|
78
|
+
* });
|
|
79
|
+
*/
|
|
80
|
+
export class BackupPlugin extends Plugin {
|
|
81
|
+
constructor(options = {}) {
|
|
82
|
+
super();
|
|
83
|
+
|
|
84
|
+
this.config = {
|
|
85
|
+
schedule: options.schedule || {},
|
|
86
|
+
retention: {
|
|
87
|
+
daily: 7,
|
|
88
|
+
weekly: 4,
|
|
89
|
+
monthly: 12,
|
|
90
|
+
yearly: 3,
|
|
91
|
+
...options.retention
|
|
92
|
+
},
|
|
93
|
+
destinations: options.destinations || [],
|
|
94
|
+
compression: options.compression || 'gzip',
|
|
95
|
+
encryption: options.encryption || null,
|
|
96
|
+
verification: options.verification !== false,
|
|
97
|
+
parallelism: options.parallelism || 4,
|
|
98
|
+
include: options.include || null,
|
|
99
|
+
exclude: options.exclude || [],
|
|
100
|
+
backupMetadataResource: options.backupMetadataResource || 'backup_metadata',
|
|
101
|
+
tempDir: options.tempDir || './tmp/backups',
|
|
102
|
+
verbose: options.verbose || false,
|
|
103
|
+
onBackupStart: options.onBackupStart || null,
|
|
104
|
+
onBackupComplete: options.onBackupComplete || null,
|
|
105
|
+
onBackupError: options.onBackupError || null,
|
|
106
|
+
...options
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
this.database = null;
|
|
110
|
+
this.scheduledJobs = new Map();
|
|
111
|
+
this.activeBackups = new Set();
|
|
112
|
+
|
|
113
|
+
this._validateConfiguration();
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
_validateConfiguration() {
|
|
117
|
+
if (this.config.destinations.length === 0) {
|
|
118
|
+
throw new Error('BackupPlugin: At least one destination must be configured');
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
for (const dest of this.config.destinations) {
|
|
122
|
+
if (!dest.type) {
|
|
123
|
+
throw new Error('BackupPlugin: Each destination must have a type');
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (this.config.encryption && (!this.config.encryption.key || !this.config.encryption.algorithm)) {
|
|
128
|
+
throw new Error('BackupPlugin: Encryption requires both key and algorithm');
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
async setup(database) {
|
|
133
|
+
this.database = database;
|
|
134
|
+
|
|
135
|
+
// Create backup metadata resource
|
|
136
|
+
await this._createBackupMetadataResource();
|
|
137
|
+
|
|
138
|
+
// Ensure temp directory exists
|
|
139
|
+
await this._ensureTempDirectory();
|
|
140
|
+
|
|
141
|
+
// Setup scheduled backups
|
|
142
|
+
if (Object.keys(this.config.schedule).length > 0) {
|
|
143
|
+
await this._setupScheduledBackups();
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
this.emit('initialized', {
|
|
147
|
+
destinations: this.config.destinations.length,
|
|
148
|
+
scheduled: Object.keys(this.config.schedule)
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async _createBackupMetadataResource() {
|
|
153
|
+
const [ok] = await tryFn(() => this.database.createResource({
|
|
154
|
+
name: this.config.backupMetadataResource,
|
|
155
|
+
attributes: {
|
|
156
|
+
id: 'string|required',
|
|
157
|
+
type: 'string|required',
|
|
158
|
+
timestamp: 'number|required',
|
|
159
|
+
resources: 'json|required',
|
|
160
|
+
destinations: 'json|required',
|
|
161
|
+
size: 'number|default:0',
|
|
162
|
+
compressed: 'boolean|default:false',
|
|
163
|
+
encrypted: 'boolean|default:false',
|
|
164
|
+
checksum: 'string|default:null',
|
|
165
|
+
status: 'string|required',
|
|
166
|
+
error: 'string|default:null',
|
|
167
|
+
duration: 'number|default:0',
|
|
168
|
+
createdAt: 'string|required'
|
|
169
|
+
},
|
|
170
|
+
behavior: 'body-overflow',
|
|
171
|
+
partitions: {
|
|
172
|
+
byType: { fields: { type: 'string' } },
|
|
173
|
+
byDate: { fields: { createdAt: 'string|maxlength:10' } }
|
|
174
|
+
}
|
|
175
|
+
}));
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
async _ensureTempDirectory() {
|
|
179
|
+
const [ok] = await tryFn(() => mkdir(this.config.tempDir, { recursive: true }));
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
async _setupScheduledBackups() {
|
|
183
|
+
// This would integrate with SchedulerPlugin if available
|
|
184
|
+
// For now, just log the scheduled backups
|
|
185
|
+
if (this.config.verbose) {
|
|
186
|
+
console.log('[BackupPlugin] Scheduled backups configured:', this.config.schedule);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Perform a backup
|
|
192
|
+
*/
|
|
193
|
+
async backup(type = 'full', options = {}) {
|
|
194
|
+
const backupId = `backup_${type}_${Date.now()}`;
|
|
195
|
+
|
|
196
|
+
if (this.activeBackups.has(backupId)) {
|
|
197
|
+
throw new Error(`Backup ${backupId} already in progress`);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
this.activeBackups.add(backupId);
|
|
201
|
+
|
|
202
|
+
try {
|
|
203
|
+
const startTime = Date.now();
|
|
204
|
+
|
|
205
|
+
// Execute onBackupStart hook
|
|
206
|
+
if (this.config.onBackupStart) {
|
|
207
|
+
await this._executeHook(this.config.onBackupStart, type, { backupId, ...options });
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
this.emit('backup_start', { id: backupId, type });
|
|
211
|
+
|
|
212
|
+
// Create backup metadata record
|
|
213
|
+
const metadata = await this._createBackupMetadata(backupId, type);
|
|
214
|
+
|
|
215
|
+
// Get resources to backup
|
|
216
|
+
const resources = await this._getResourcesToBackup();
|
|
217
|
+
|
|
218
|
+
// Create temporary backup directory
|
|
219
|
+
const tempBackupDir = path.join(this.config.tempDir, backupId);
|
|
220
|
+
await mkdir(tempBackupDir, { recursive: true });
|
|
221
|
+
|
|
222
|
+
let totalSize = 0;
|
|
223
|
+
const resourceFiles = new Map();
|
|
224
|
+
|
|
225
|
+
try {
|
|
226
|
+
// Backup each resource
|
|
227
|
+
for (const resourceName of resources) {
|
|
228
|
+
const resourceData = await this._backupResource(resourceName, type);
|
|
229
|
+
const filePath = path.join(tempBackupDir, `${resourceName}.json`);
|
|
230
|
+
|
|
231
|
+
await writeFile(filePath, JSON.stringify(resourceData, null, 2));
|
|
232
|
+
const stats = await stat(filePath);
|
|
233
|
+
totalSize += stats.size;
|
|
234
|
+
resourceFiles.set(resourceName, { path: filePath, size: stats.size });
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Create manifest
|
|
238
|
+
const manifest = {
|
|
239
|
+
id: backupId,
|
|
240
|
+
type,
|
|
241
|
+
timestamp: Date.now(),
|
|
242
|
+
resources: Array.from(resourceFiles.keys()),
|
|
243
|
+
totalSize,
|
|
244
|
+
compression: this.config.compression,
|
|
245
|
+
encryption: !!this.config.encryption
|
|
246
|
+
};
|
|
247
|
+
|
|
248
|
+
const manifestPath = path.join(tempBackupDir, 'manifest.json');
|
|
249
|
+
await writeFile(manifestPath, JSON.stringify(manifest, null, 2));
|
|
250
|
+
|
|
251
|
+
// Compress if enabled
|
|
252
|
+
let finalPath = tempBackupDir;
|
|
253
|
+
if (this.config.compression !== 'none') {
|
|
254
|
+
finalPath = await this._compressBackup(tempBackupDir, backupId);
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// Encrypt if enabled
|
|
258
|
+
if (this.config.encryption) {
|
|
259
|
+
finalPath = await this._encryptBackup(finalPath, backupId);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Calculate checksum
|
|
263
|
+
let checksum = null;
|
|
264
|
+
if (this.config.compression !== 'none' || this.config.encryption) {
|
|
265
|
+
// If compressed or encrypted, finalPath is a file
|
|
266
|
+
checksum = await this._calculateChecksum(finalPath);
|
|
267
|
+
} else {
|
|
268
|
+
// If no compression/encryption, calculate checksum of manifest
|
|
269
|
+
checksum = this._calculateManifestChecksum(manifest);
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// Upload to destinations
|
|
273
|
+
const uploadResults = await this._uploadToDestinations(finalPath, backupId, manifest);
|
|
274
|
+
|
|
275
|
+
// Verify backup if enabled
|
|
276
|
+
if (this.config.verification) {
|
|
277
|
+
await this._verifyBackup(backupId, checksum);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
const duration = Date.now() - startTime;
|
|
281
|
+
|
|
282
|
+
// Update metadata
|
|
283
|
+
await this._updateBackupMetadata(metadata.id, {
|
|
284
|
+
status: 'completed',
|
|
285
|
+
size: totalSize,
|
|
286
|
+
checksum,
|
|
287
|
+
destinations: uploadResults,
|
|
288
|
+
duration
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
// Execute onBackupComplete hook
|
|
292
|
+
if (this.config.onBackupComplete) {
|
|
293
|
+
const stats = { backupId, type, size: totalSize, duration, destinations: uploadResults.length };
|
|
294
|
+
await this._executeHook(this.config.onBackupComplete, type, stats);
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
this.emit('backup_complete', {
|
|
298
|
+
id: backupId,
|
|
299
|
+
type,
|
|
300
|
+
size: totalSize,
|
|
301
|
+
duration,
|
|
302
|
+
destinations: uploadResults.length
|
|
303
|
+
});
|
|
304
|
+
|
|
305
|
+
// Cleanup retention
|
|
306
|
+
await this._cleanupOldBackups();
|
|
307
|
+
|
|
308
|
+
return {
|
|
309
|
+
id: backupId,
|
|
310
|
+
type,
|
|
311
|
+
size: totalSize,
|
|
312
|
+
duration,
|
|
313
|
+
checksum,
|
|
314
|
+
destinations: uploadResults
|
|
315
|
+
};
|
|
316
|
+
|
|
317
|
+
} finally {
|
|
318
|
+
// Cleanup temporary files
|
|
319
|
+
await this._cleanupTempFiles(tempBackupDir);
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
} catch (error) {
|
|
323
|
+
// Execute onBackupError hook
|
|
324
|
+
if (this.config.onBackupError) {
|
|
325
|
+
await this._executeHook(this.config.onBackupError, type, { backupId, error });
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
this.emit('backup_error', { id: backupId, type, error: error.message });
|
|
329
|
+
|
|
330
|
+
// Update metadata with error
|
|
331
|
+
const [metadataOk] = await tryFn(() =>
|
|
332
|
+
this.database.resource(this.config.backupMetadataResource)
|
|
333
|
+
.update(backupId, { status: 'failed', error: error.message })
|
|
334
|
+
);
|
|
335
|
+
|
|
336
|
+
throw error;
|
|
337
|
+
} finally {
|
|
338
|
+
this.activeBackups.delete(backupId);
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
async _createBackupMetadata(backupId, type) {
|
|
343
|
+
const now = new Date().toISOString();
|
|
344
|
+
const metadata = {
|
|
345
|
+
id: backupId,
|
|
346
|
+
type,
|
|
347
|
+
timestamp: Date.now(),
|
|
348
|
+
resources: [],
|
|
349
|
+
destinations: [],
|
|
350
|
+
size: 0,
|
|
351
|
+
status: 'in_progress',
|
|
352
|
+
compressed: this.config.compression !== 'none',
|
|
353
|
+
encrypted: !!this.config.encryption,
|
|
354
|
+
checksum: null,
|
|
355
|
+
error: null,
|
|
356
|
+
duration: 0,
|
|
357
|
+
createdAt: now.slice(0, 10)
|
|
358
|
+
};
|
|
359
|
+
|
|
360
|
+
await this.database.resource(this.config.backupMetadataResource).insert(metadata);
|
|
361
|
+
return metadata;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
async _updateBackupMetadata(backupId, updates) {
|
|
365
|
+
const [ok] = await tryFn(() =>
|
|
366
|
+
this.database.resource(this.config.backupMetadataResource).update(backupId, updates)
|
|
367
|
+
);
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
async _getResourcesToBackup() {
|
|
371
|
+
const allResources = Object.keys(this.database.resources);
|
|
372
|
+
|
|
373
|
+
let resources = allResources;
|
|
374
|
+
|
|
375
|
+
// Apply include filter
|
|
376
|
+
if (this.config.include && this.config.include.length > 0) {
|
|
377
|
+
resources = resources.filter(name => this.config.include.includes(name));
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
// Apply exclude filter
|
|
381
|
+
if (this.config.exclude && this.config.exclude.length > 0) {
|
|
382
|
+
resources = resources.filter(name => {
|
|
383
|
+
return !this.config.exclude.some(pattern => {
|
|
384
|
+
if (pattern.includes('*')) {
|
|
385
|
+
const regex = new RegExp(pattern.replace(/\*/g, '.*'));
|
|
386
|
+
return regex.test(name);
|
|
387
|
+
}
|
|
388
|
+
return name === pattern;
|
|
389
|
+
});
|
|
390
|
+
});
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
// Exclude backup metadata resource
|
|
394
|
+
resources = resources.filter(name => name !== this.config.backupMetadataResource);
|
|
395
|
+
|
|
396
|
+
return resources;
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
async _backupResource(resourceName, type) {
|
|
400
|
+
const resource = this.database.resources[resourceName];
|
|
401
|
+
if (!resource) {
|
|
402
|
+
throw new Error(`Resource '${resourceName}' not found`);
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
// For full backup, get all data
|
|
406
|
+
if (type === 'full') {
|
|
407
|
+
const [ok, err, data] = await tryFn(() => resource.list({ limit: 999999 }));
|
|
408
|
+
if (!ok) throw err;
|
|
409
|
+
|
|
410
|
+
return {
|
|
411
|
+
resource: resourceName,
|
|
412
|
+
type: 'full',
|
|
413
|
+
data,
|
|
414
|
+
count: data.length,
|
|
415
|
+
config: resource.config
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
// For incremental backup, get changes since last backup
|
|
420
|
+
if (type === 'incremental') {
|
|
421
|
+
const lastBackup = await this._getLastBackup('incremental');
|
|
422
|
+
const since = lastBackup ? lastBackup.timestamp : 0;
|
|
423
|
+
|
|
424
|
+
// This would need audit plugin integration to get changes since timestamp
|
|
425
|
+
// For now, fall back to full backup
|
|
426
|
+
const [ok, err, data] = await tryFn(() => resource.list({ limit: 999999 }));
|
|
427
|
+
if (!ok) throw err;
|
|
428
|
+
|
|
429
|
+
return {
|
|
430
|
+
resource: resourceName,
|
|
431
|
+
type: 'incremental',
|
|
432
|
+
data,
|
|
433
|
+
count: data.length,
|
|
434
|
+
since,
|
|
435
|
+
config: resource.config
|
|
436
|
+
};
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
throw new Error(`Backup type '${type}' not supported`);
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
async _getLastBackup(type) {
|
|
443
|
+
const [ok, err, backups] = await tryFn(() =>
|
|
444
|
+
this.database.resource(this.config.backupMetadataResource).list({
|
|
445
|
+
where: { type, status: 'completed' },
|
|
446
|
+
orderBy: { timestamp: 'desc' },
|
|
447
|
+
limit: 1
|
|
448
|
+
})
|
|
449
|
+
);
|
|
450
|
+
|
|
451
|
+
return ok && backups.length > 0 ? backups[0] : null;
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
async _compressBackup(backupDir, backupId) {
|
|
455
|
+
const compressedPath = `${backupDir}.tar.gz`;
|
|
456
|
+
|
|
457
|
+
try {
|
|
458
|
+
// Read all files in backup directory
|
|
459
|
+
const files = await this._getDirectoryFiles(backupDir);
|
|
460
|
+
const backupData = {};
|
|
461
|
+
|
|
462
|
+
// Read all files into memory for compression
|
|
463
|
+
for (const file of files) {
|
|
464
|
+
const filePath = path.join(backupDir, file);
|
|
465
|
+
const content = await readFile(filePath, 'utf8');
|
|
466
|
+
backupData[file] = content;
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
// Serialize and compress using zlib (same pattern as cache plugins)
|
|
470
|
+
const serialized = JSON.stringify(backupData);
|
|
471
|
+
const originalSize = Buffer.byteLength(serialized, 'utf8');
|
|
472
|
+
|
|
473
|
+
// Compress using specified algorithm
|
|
474
|
+
let compressedBuffer;
|
|
475
|
+
let compressionType = this.config.compression;
|
|
476
|
+
|
|
477
|
+
switch (this.config.compression) {
|
|
478
|
+
case 'gzip':
|
|
479
|
+
compressedBuffer = zlib.gzipSync(Buffer.from(serialized, 'utf8'));
|
|
480
|
+
break;
|
|
481
|
+
case 'brotli':
|
|
482
|
+
compressedBuffer = zlib.brotliCompressSync(Buffer.from(serialized, 'utf8'));
|
|
483
|
+
break;
|
|
484
|
+
case 'deflate':
|
|
485
|
+
compressedBuffer = zlib.deflateSync(Buffer.from(serialized, 'utf8'));
|
|
486
|
+
break;
|
|
487
|
+
case 'none':
|
|
488
|
+
compressedBuffer = Buffer.from(serialized, 'utf8');
|
|
489
|
+
compressionType = 'none';
|
|
490
|
+
break;
|
|
491
|
+
default:
|
|
492
|
+
throw new Error(`Unsupported compression type: ${this.config.compression}`);
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
const compressedData = this.config.compression !== 'none'
|
|
496
|
+
? compressedBuffer.toString('base64')
|
|
497
|
+
: serialized;
|
|
498
|
+
|
|
499
|
+
// Write compressed data
|
|
500
|
+
await writeFile(compressedPath, compressedData, 'utf8');
|
|
501
|
+
|
|
502
|
+
// Log compression stats
|
|
503
|
+
const compressedSize = Buffer.byteLength(compressedData, 'utf8');
|
|
504
|
+
const compressionRatio = (compressedSize / originalSize * 100).toFixed(2);
|
|
505
|
+
|
|
506
|
+
if (this.config.verbose) {
|
|
507
|
+
console.log(`[BackupPlugin] Compressed ${originalSize} bytes to ${compressedSize} bytes (${compressionRatio}% of original)`);
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
return compressedPath;
|
|
511
|
+
} catch (error) {
|
|
512
|
+
throw new Error(`Failed to compress backup: ${error.message}`);
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
async _encryptBackup(filePath, backupId) {
|
|
517
|
+
if (!this.config.encryption) return filePath;
|
|
518
|
+
|
|
519
|
+
const encryptedPath = `${filePath}.enc`;
|
|
520
|
+
const { algorithm, key } = this.config.encryption;
|
|
521
|
+
|
|
522
|
+
const cipher = crypto.createCipher(algorithm, key);
|
|
523
|
+
const input = createReadStream(filePath);
|
|
524
|
+
const output = createWriteStream(encryptedPath);
|
|
525
|
+
|
|
526
|
+
await pipeline(input, cipher, output);
|
|
527
|
+
|
|
528
|
+
// Remove unencrypted file
|
|
529
|
+
await unlink(filePath);
|
|
530
|
+
|
|
531
|
+
return encryptedPath;
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
async _calculateChecksum(filePath) {
|
|
535
|
+
const hash = crypto.createHash('sha256');
|
|
536
|
+
const input = createReadStream(filePath);
|
|
537
|
+
|
|
538
|
+
return new Promise((resolve, reject) => {
|
|
539
|
+
input.on('data', data => hash.update(data));
|
|
540
|
+
input.on('end', () => resolve(hash.digest('hex')));
|
|
541
|
+
input.on('error', reject);
|
|
542
|
+
});
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
_calculateManifestChecksum(manifest) {
|
|
546
|
+
const hash = crypto.createHash('sha256');
|
|
547
|
+
hash.update(JSON.stringify(manifest));
|
|
548
|
+
return hash.digest('hex');
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
async _copyDirectory(src, dest) {
|
|
552
|
+
await mkdir(dest, { recursive: true });
|
|
553
|
+
const entries = await readdir(src, { withFileTypes: true });
|
|
554
|
+
|
|
555
|
+
for (const entry of entries) {
|
|
556
|
+
const srcPath = path.join(src, entry.name);
|
|
557
|
+
const destPath = path.join(dest, entry.name);
|
|
558
|
+
|
|
559
|
+
if (entry.isDirectory()) {
|
|
560
|
+
await this._copyDirectory(srcPath, destPath);
|
|
561
|
+
} else {
|
|
562
|
+
const input = createReadStream(srcPath);
|
|
563
|
+
const output = createWriteStream(destPath);
|
|
564
|
+
await pipeline(input, output);
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
async _getDirectorySize(dirPath) {
|
|
570
|
+
let totalSize = 0;
|
|
571
|
+
const entries = await readdir(dirPath, { withFileTypes: true });
|
|
572
|
+
|
|
573
|
+
for (const entry of entries) {
|
|
574
|
+
const entryPath = path.join(dirPath, entry.name);
|
|
575
|
+
|
|
576
|
+
if (entry.isDirectory()) {
|
|
577
|
+
totalSize += await this._getDirectorySize(entryPath);
|
|
578
|
+
} else {
|
|
579
|
+
const stats = await stat(entryPath);
|
|
580
|
+
totalSize += stats.size;
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
return totalSize;
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
async _uploadToDestinations(filePath, backupId, manifest) {
|
|
588
|
+
const results = [];
|
|
589
|
+
let hasSuccess = false;
|
|
590
|
+
|
|
591
|
+
for (const destination of this.config.destinations) {
|
|
592
|
+
const [ok, err, result] = await tryFn(() =>
|
|
593
|
+
this._uploadToDestination(filePath, backupId, manifest, destination)
|
|
594
|
+
);
|
|
595
|
+
|
|
596
|
+
if (ok) {
|
|
597
|
+
results.push({ ...destination, ...result, status: 'success' });
|
|
598
|
+
hasSuccess = true;
|
|
599
|
+
} else {
|
|
600
|
+
results.push({ ...destination, status: 'failed', error: err.message });
|
|
601
|
+
if (this.config.verbose) {
|
|
602
|
+
console.warn(`[BackupPlugin] Upload to ${destination.type} failed:`, err.message);
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
// If no destinations succeeded, throw error
|
|
608
|
+
if (!hasSuccess) {
|
|
609
|
+
const errors = results.map(r => r.error).join('; ');
|
|
610
|
+
throw new Error(`All backup destinations failed: ${errors}`);
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
return results;
|
|
614
|
+
}
|
|
615
|
+
|
|
616
|
+
async _uploadToDestination(filePath, backupId, manifest, destination) {
|
|
617
|
+
if (destination.type === 'filesystem') {
|
|
618
|
+
return this._uploadToFilesystem(filePath, backupId, destination);
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
if (destination.type === 's3') {
|
|
622
|
+
return this._uploadToS3(filePath, backupId, destination);
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
throw new Error(`Destination type '${destination.type}' not supported`);
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
async _uploadToFilesystem(filePath, backupId, destination) {
|
|
629
|
+
const destDir = destination.path.replace('{date}', new Date().toISOString().slice(0, 10));
|
|
630
|
+
await mkdir(destDir, { recursive: true });
|
|
631
|
+
|
|
632
|
+
const stats = await stat(filePath);
|
|
633
|
+
|
|
634
|
+
if (stats.isDirectory()) {
|
|
635
|
+
// Copy entire directory
|
|
636
|
+
const destPath = path.join(destDir, backupId);
|
|
637
|
+
await this._copyDirectory(filePath, destPath);
|
|
638
|
+
|
|
639
|
+
const dirStats = await this._getDirectorySize(destPath);
|
|
640
|
+
|
|
641
|
+
return {
|
|
642
|
+
path: destPath,
|
|
643
|
+
size: dirStats,
|
|
644
|
+
uploadedAt: new Date().toISOString()
|
|
645
|
+
};
|
|
646
|
+
} else {
|
|
647
|
+
// Copy single file
|
|
648
|
+
const fileName = path.basename(filePath);
|
|
649
|
+
const destPath = path.join(destDir, fileName);
|
|
650
|
+
|
|
651
|
+
const input = createReadStream(filePath);
|
|
652
|
+
const output = createWriteStream(destPath);
|
|
653
|
+
|
|
654
|
+
await pipeline(input, output);
|
|
655
|
+
|
|
656
|
+
const fileStats = await stat(destPath);
|
|
657
|
+
|
|
658
|
+
return {
|
|
659
|
+
path: destPath,
|
|
660
|
+
size: fileStats.size,
|
|
661
|
+
uploadedAt: new Date().toISOString()
|
|
662
|
+
};
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
async _uploadToS3(filePath, backupId, destination) {
|
|
667
|
+
// This would integrate with S3 client
|
|
668
|
+
// For now, simulate the upload
|
|
669
|
+
|
|
670
|
+
const key = destination.path
|
|
671
|
+
.replace('{date}', new Date().toISOString().slice(0, 10))
|
|
672
|
+
.replace('{backupId}', backupId) + path.basename(filePath);
|
|
673
|
+
|
|
674
|
+
// Simulated upload
|
|
675
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
676
|
+
|
|
677
|
+
return {
|
|
678
|
+
bucket: destination.bucket,
|
|
679
|
+
key,
|
|
680
|
+
uploadedAt: new Date().toISOString()
|
|
681
|
+
};
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
async _verifyBackup(backupId, expectedChecksum) {
|
|
685
|
+
// Verify backup integrity by re-downloading and checking checksum
|
|
686
|
+
// Implementation depends on destinations
|
|
687
|
+
if (this.config.verbose) {
|
|
688
|
+
console.log(`[BackupPlugin] Verifying backup ${backupId} with checksum ${expectedChecksum}`);
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
async _cleanupOldBackups() {
|
|
693
|
+
const retention = this.config.retention;
|
|
694
|
+
const now = new Date();
|
|
695
|
+
|
|
696
|
+
// Get all completed backups
|
|
697
|
+
const [ok, err, allBackups] = await tryFn(() =>
|
|
698
|
+
this.database.resource(this.config.backupMetadataResource).list({
|
|
699
|
+
where: { status: 'completed' },
|
|
700
|
+
orderBy: { timestamp: 'desc' }
|
|
701
|
+
})
|
|
702
|
+
);
|
|
703
|
+
|
|
704
|
+
if (!ok) return;
|
|
705
|
+
|
|
706
|
+
const toDelete = [];
|
|
707
|
+
|
|
708
|
+
// Group backups by type and age
|
|
709
|
+
const groups = {
|
|
710
|
+
daily: [],
|
|
711
|
+
weekly: [],
|
|
712
|
+
monthly: [],
|
|
713
|
+
yearly: []
|
|
714
|
+
};
|
|
715
|
+
|
|
716
|
+
for (const backup of allBackups) {
|
|
717
|
+
const backupDate = new Date(backup.timestamp);
|
|
718
|
+
const age = Math.floor((now - backupDate) / (1000 * 60 * 60 * 24)); // days
|
|
719
|
+
|
|
720
|
+
if (age < 7) groups.daily.push(backup);
|
|
721
|
+
else if (age < 30) groups.weekly.push(backup);
|
|
722
|
+
else if (age < 365) groups.monthly.push(backup);
|
|
723
|
+
else groups.yearly.push(backup);
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
// Apply retention policies
|
|
727
|
+
if (groups.daily.length > retention.daily) {
|
|
728
|
+
toDelete.push(...groups.daily.slice(retention.daily));
|
|
729
|
+
}
|
|
730
|
+
if (groups.weekly.length > retention.weekly) {
|
|
731
|
+
toDelete.push(...groups.weekly.slice(retention.weekly));
|
|
732
|
+
}
|
|
733
|
+
if (groups.monthly.length > retention.monthly) {
|
|
734
|
+
toDelete.push(...groups.monthly.slice(retention.monthly));
|
|
735
|
+
}
|
|
736
|
+
if (groups.yearly.length > retention.yearly) {
|
|
737
|
+
toDelete.push(...groups.yearly.slice(retention.yearly));
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
// Delete old backups
|
|
741
|
+
for (const backup of toDelete) {
|
|
742
|
+
await this._deleteBackup(backup);
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
if (toDelete.length > 0) {
|
|
746
|
+
this.emit('cleanup_complete', { deleted: toDelete.length });
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
async _deleteBackup(backup) {
|
|
751
|
+
// Delete from destinations
|
|
752
|
+
for (const dest of backup.destinations || []) {
|
|
753
|
+
const [ok] = await tryFn(() => this._deleteFromDestination(backup, dest));
|
|
754
|
+
}
|
|
755
|
+
|
|
756
|
+
// Delete metadata
|
|
757
|
+
const [ok] = await tryFn(() =>
|
|
758
|
+
this.database.resource(this.config.backupMetadataResource).delete(backup.id)
|
|
759
|
+
);
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
async _deleteFromDestination(backup, destination) {
|
|
763
|
+
// Implementation depends on destination type
|
|
764
|
+
if (this.config.verbose) {
|
|
765
|
+
console.log(`[BackupPlugin] Deleting backup ${backup.id} from ${destination.type}`);
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
async _cleanupTempFiles(tempDir) {
|
|
770
|
+
const [ok] = await tryFn(async () => {
|
|
771
|
+
const files = await this._getDirectoryFiles(tempDir);
|
|
772
|
+
for (const file of files) {
|
|
773
|
+
await unlink(file);
|
|
774
|
+
}
|
|
775
|
+
// Note: rmdir would require recursive removal
|
|
776
|
+
});
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
async _getDirectoryFiles(dir) {
|
|
780
|
+
// Simplified - in production use proper directory traversal
|
|
781
|
+
return [];
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
async _executeHook(hook, ...args) {
|
|
785
|
+
if (typeof hook === 'function') {
|
|
786
|
+
const [ok, err] = await tryFn(() => hook(...args));
|
|
787
|
+
if (!ok && this.config.verbose) {
|
|
788
|
+
console.warn('[BackupPlugin] Hook execution failed:', err.message);
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
}
|
|
792
|
+
|
|
793
|
+
/**
|
|
794
|
+
* Restore from backup
|
|
795
|
+
*/
|
|
796
|
+
async restore(backupId, options = {}) {
|
|
797
|
+
const { overwrite = false, resources = null } = options;
|
|
798
|
+
|
|
799
|
+
// Get backup metadata
|
|
800
|
+
const [ok, err, backup] = await tryFn(() =>
|
|
801
|
+
this.database.resource(this.config.backupMetadataResource).get(backupId)
|
|
802
|
+
);
|
|
803
|
+
|
|
804
|
+
if (!ok || !backup) {
|
|
805
|
+
throw new Error(`Backup '${backupId}' not found`);
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
if (backup.status !== 'completed') {
|
|
809
|
+
throw new Error(`Backup '${backupId}' is not in completed status`);
|
|
810
|
+
}
|
|
811
|
+
|
|
812
|
+
this.emit('restore_start', { backupId });
|
|
813
|
+
|
|
814
|
+
// Download backup files
|
|
815
|
+
const tempDir = path.join(this.config.tempDir, `restore_${backupId}`);
|
|
816
|
+
await mkdir(tempDir, { recursive: true });
|
|
817
|
+
|
|
818
|
+
try {
|
|
819
|
+
// Download from first available destination
|
|
820
|
+
await this._downloadBackup(backup, tempDir);
|
|
821
|
+
|
|
822
|
+
// Decrypt if needed
|
|
823
|
+
if (backup.encrypted) {
|
|
824
|
+
await this._decryptBackup(tempDir);
|
|
825
|
+
}
|
|
826
|
+
|
|
827
|
+
// Decompress if needed
|
|
828
|
+
if (backup.compressed) {
|
|
829
|
+
await this._decompressBackup(tempDir);
|
|
830
|
+
}
|
|
831
|
+
|
|
832
|
+
// Read manifest
|
|
833
|
+
const manifestPath = path.join(tempDir, 'manifest.json');
|
|
834
|
+
const manifest = JSON.parse(await readFile(manifestPath, 'utf-8'));
|
|
835
|
+
|
|
836
|
+
// Restore resources
|
|
837
|
+
const resourcesToRestore = resources || manifest.resources;
|
|
838
|
+
const restored = [];
|
|
839
|
+
|
|
840
|
+
for (const resourceName of resourcesToRestore) {
|
|
841
|
+
const resourcePath = path.join(tempDir, `${resourceName}.json`);
|
|
842
|
+
const resourceData = JSON.parse(await readFile(resourcePath, 'utf-8'));
|
|
843
|
+
|
|
844
|
+
await this._restoreResource(resourceName, resourceData, overwrite);
|
|
845
|
+
restored.push(resourceName);
|
|
846
|
+
}
|
|
847
|
+
|
|
848
|
+
this.emit('restore_complete', { backupId, restored });
|
|
849
|
+
|
|
850
|
+
return { backupId, restored };
|
|
851
|
+
|
|
852
|
+
} finally {
|
|
853
|
+
await this._cleanupTempFiles(tempDir);
|
|
854
|
+
}
|
|
855
|
+
}
|
|
856
|
+
|
|
857
|
+
async _downloadBackup(backup, tempDir) {
|
|
858
|
+
// Download from first successful destination
|
|
859
|
+
for (const dest of backup.destinations) {
|
|
860
|
+
const [ok] = await tryFn(() => this._downloadFromDestination(backup, dest, tempDir));
|
|
861
|
+
if (ok) return;
|
|
862
|
+
}
|
|
863
|
+
|
|
864
|
+
throw new Error('Failed to download backup from any destination');
|
|
865
|
+
}
|
|
866
|
+
|
|
867
|
+
async _downloadFromDestination(backup, destination, tempDir) {
|
|
868
|
+
// Implementation depends on destination type
|
|
869
|
+
if (this.config.verbose) {
|
|
870
|
+
console.log(`[BackupPlugin] Downloading backup ${backup.id} from ${destination.type}`);
|
|
871
|
+
}
|
|
872
|
+
}
|
|
873
|
+
|
|
874
|
+
async _decryptBackup(tempDir) {
|
|
875
|
+
// Decrypt backup files
|
|
876
|
+
}
|
|
877
|
+
|
|
878
|
+
async _decompressBackup(tempDir) {
|
|
879
|
+
try {
|
|
880
|
+
// Find compressed backup file
|
|
881
|
+
const files = await readdir(tempDir);
|
|
882
|
+
const compressedFile = files.find(f => f.endsWith('.tar.gz'));
|
|
883
|
+
|
|
884
|
+
if (!compressedFile) {
|
|
885
|
+
throw new Error('No compressed backup file found');
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
const compressedPath = path.join(tempDir, compressedFile);
|
|
889
|
+
|
|
890
|
+
// Read compressed data
|
|
891
|
+
const compressedData = await readFile(compressedPath, 'utf8');
|
|
892
|
+
|
|
893
|
+
// Read backup metadata to determine compression type
|
|
894
|
+
const backupId = path.basename(compressedFile, '.tar.gz');
|
|
895
|
+
const backup = await this._getBackupMetadata(backupId);
|
|
896
|
+
const compressionType = backup?.compression || 'gzip';
|
|
897
|
+
|
|
898
|
+
// Decompress using appropriate algorithm
|
|
899
|
+
let decompressed;
|
|
900
|
+
|
|
901
|
+
if (compressionType === 'none') {
|
|
902
|
+
decompressed = compressedData;
|
|
903
|
+
} else {
|
|
904
|
+
const compressedBuffer = Buffer.from(compressedData, 'base64');
|
|
905
|
+
|
|
906
|
+
switch (compressionType) {
|
|
907
|
+
case 'gzip':
|
|
908
|
+
decompressed = zlib.gunzipSync(compressedBuffer).toString('utf8');
|
|
909
|
+
break;
|
|
910
|
+
case 'brotli':
|
|
911
|
+
decompressed = zlib.brotliDecompressSync(compressedBuffer).toString('utf8');
|
|
912
|
+
break;
|
|
913
|
+
case 'deflate':
|
|
914
|
+
decompressed = zlib.inflateSync(compressedBuffer).toString('utf8');
|
|
915
|
+
break;
|
|
916
|
+
default:
|
|
917
|
+
throw new Error(`Unsupported compression type: ${compressionType}`);
|
|
918
|
+
}
|
|
919
|
+
}
|
|
920
|
+
|
|
921
|
+
// Parse decompressed data
|
|
922
|
+
const backupData = JSON.parse(decompressed);
|
|
923
|
+
|
|
924
|
+
// Write individual files back to temp directory
|
|
925
|
+
for (const [filename, content] of Object.entries(backupData)) {
|
|
926
|
+
const filePath = path.join(tempDir, filename);
|
|
927
|
+
await writeFile(filePath, content, 'utf8');
|
|
928
|
+
}
|
|
929
|
+
|
|
930
|
+
// Remove compressed file
|
|
931
|
+
await unlink(compressedPath);
|
|
932
|
+
|
|
933
|
+
if (this.config.verbose) {
|
|
934
|
+
console.log(`[BackupPlugin] Decompressed backup with ${Object.keys(backupData).length} files`);
|
|
935
|
+
}
|
|
936
|
+
} catch (error) {
|
|
937
|
+
throw new Error(`Failed to decompress backup: ${error.message}`);
|
|
938
|
+
}
|
|
939
|
+
}
|
|
940
|
+
|
|
941
|
+
async _restoreResource(resourceName, resourceData, overwrite) {
|
|
942
|
+
const resource = this.database.resources[resourceName];
|
|
943
|
+
if (!resource) {
|
|
944
|
+
// Create resource from backup config
|
|
945
|
+
await this.database.createResource(resourceData.config);
|
|
946
|
+
}
|
|
947
|
+
|
|
948
|
+
// Insert data
|
|
949
|
+
for (const record of resourceData.data) {
|
|
950
|
+
if (overwrite) {
|
|
951
|
+
await resource.upsert(record.id, record);
|
|
952
|
+
} else {
|
|
953
|
+
const [ok] = await tryFn(() => resource.insert(record));
|
|
954
|
+
}
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
|
|
958
|
+
/**
|
|
959
|
+
* List available backups
|
|
960
|
+
*/
|
|
961
|
+
async listBackups(options = {}) {
|
|
962
|
+
const { type = null, status = null, limit = 50 } = options;
|
|
963
|
+
|
|
964
|
+
const [ok, err, allBackups] = await tryFn(() =>
|
|
965
|
+
this.database.resource(this.config.backupMetadataResource).list({
|
|
966
|
+
orderBy: { timestamp: 'desc' },
|
|
967
|
+
limit: limit * 2 // Get more to filter client-side
|
|
968
|
+
})
|
|
969
|
+
);
|
|
970
|
+
|
|
971
|
+
if (!ok) return [];
|
|
972
|
+
|
|
973
|
+
// Filter client-side to ensure it works
|
|
974
|
+
let filteredBackups = allBackups;
|
|
975
|
+
|
|
976
|
+
if (type) {
|
|
977
|
+
filteredBackups = filteredBackups.filter(backup => backup.type === type);
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
if (status) {
|
|
981
|
+
filteredBackups = filteredBackups.filter(backup => backup.status === status);
|
|
982
|
+
}
|
|
983
|
+
|
|
984
|
+
return filteredBackups.slice(0, limit);
|
|
985
|
+
}
|
|
986
|
+
|
|
987
|
+
/**
|
|
988
|
+
* Get backup status
|
|
989
|
+
*/
|
|
990
|
+
async getBackupStatus(backupId) {
|
|
991
|
+
const [ok, err, backup] = await tryFn(() =>
|
|
992
|
+
this.database.resource(this.config.backupMetadataResource).get(backupId)
|
|
993
|
+
);
|
|
994
|
+
|
|
995
|
+
return ok ? backup : null;
|
|
996
|
+
}
|
|
997
|
+
|
|
998
|
+
async start() {
|
|
999
|
+
if (this.config.verbose) {
|
|
1000
|
+
console.log(`[BackupPlugin] Started with ${this.config.destinations.length} destinations`);
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
|
|
1004
|
+
async stop() {
|
|
1005
|
+
// Cancel any active backups
|
|
1006
|
+
for (const backupId of this.activeBackups) {
|
|
1007
|
+
this.emit('backup_cancelled', { id: backupId });
|
|
1008
|
+
}
|
|
1009
|
+
this.activeBackups.clear();
|
|
1010
|
+
}
|
|
1011
|
+
|
|
1012
|
+
async cleanup() {
|
|
1013
|
+
await this.stop();
|
|
1014
|
+
this.removeAllListeners();
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
|
|
1018
|
+
export default BackupPlugin;
|