s3db.js 9.1.0 → 9.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/PLUGINS.md +871 -13
- package/README.md +31 -2
- package/dist/s3db.cjs.js +3867 -1615
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.es.js +3870 -1621
- package/dist/s3db.es.js.map +1 -1
- package/package.json +5 -5
- package/src/concerns/async-event-emitter.js +46 -0
- package/src/database.class.js +23 -0
- package/src/plugins/backup/base-backup-driver.class.js +119 -0
- package/src/plugins/backup/filesystem-backup-driver.class.js +254 -0
- package/src/plugins/backup/index.js +85 -0
- package/src/plugins/backup/multi-backup-driver.class.js +304 -0
- package/src/plugins/backup/s3-backup-driver.class.js +313 -0
- package/src/plugins/backup.plugin.js +664 -0
- package/src/plugins/backup.plugin.js.backup +1026 -0
- package/src/plugins/cache/memory-cache.class.js +112 -3
- package/src/plugins/index.js +3 -0
- package/src/plugins/scheduler.plugin.js +833 -0
- package/src/plugins/state-machine.plugin.js +543 -0
- package/src/resource.class.js +9 -6
|
@@ -0,0 +1,664 @@
|
|
|
1
|
+
import Plugin from "./plugin.class.js";
|
|
2
|
+
import tryFn from "../concerns/try-fn.js";
|
|
3
|
+
import { createBackupDriver, validateBackupConfig } from "./backup/index.js";
|
|
4
|
+
import { createWriteStream, createReadStream } from 'fs';
|
|
5
|
+
import zlib from 'node:zlib';
|
|
6
|
+
import { pipeline } from 'stream/promises';
|
|
7
|
+
import { mkdir, writeFile, readFile, unlink, stat, readdir } from 'fs/promises';
|
|
8
|
+
import path from 'path';
|
|
9
|
+
import crypto from 'crypto';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* BackupPlugin - Automated Database Backup System
|
|
13
|
+
*
|
|
14
|
+
* Provides comprehensive backup functionality with configurable drivers,
|
|
15
|
+
* retention policies, and restoration capabilities.
|
|
16
|
+
*
|
|
17
|
+
* === Driver-Based Architecture ===
|
|
18
|
+
* Uses the standard S3DB plugin driver pattern:
|
|
19
|
+
* - driver: Driver type (filesystem, s3, multi)
|
|
20
|
+
* - config: Driver-specific configuration
|
|
21
|
+
*
|
|
22
|
+
* === Configuration Examples ===
|
|
23
|
+
*
|
|
24
|
+
* // Filesystem backup
|
|
25
|
+
* new BackupPlugin({
|
|
26
|
+
* driver: 'filesystem',
|
|
27
|
+
* config: {
|
|
28
|
+
* path: '/var/backups/s3db/{date}/',
|
|
29
|
+
* compression: 'gzip'
|
|
30
|
+
* }
|
|
31
|
+
* });
|
|
32
|
+
*
|
|
33
|
+
* // S3 backup
|
|
34
|
+
* new BackupPlugin({
|
|
35
|
+
* driver: 's3',
|
|
36
|
+
* config: {
|
|
37
|
+
* bucket: 'my-backup-bucket',
|
|
38
|
+
* path: 'database/{date}/',
|
|
39
|
+
* storageClass: 'STANDARD_IA'
|
|
40
|
+
* }
|
|
41
|
+
* });
|
|
42
|
+
*
|
|
43
|
+
* // Multiple destinations
|
|
44
|
+
* new BackupPlugin({
|
|
45
|
+
* driver: 'multi',
|
|
46
|
+
* config: {
|
|
47
|
+
* strategy: 'all', // 'all', 'any', 'priority'
|
|
48
|
+
* destinations: [
|
|
49
|
+
* {
|
|
50
|
+
* driver: 'filesystem',
|
|
51
|
+
* config: { path: '/var/backups/s3db/' }
|
|
52
|
+
* },
|
|
53
|
+
* {
|
|
54
|
+
* driver: 's3',
|
|
55
|
+
* config: {
|
|
56
|
+
* bucket: 'remote-backups',
|
|
57
|
+
* storageClass: 'GLACIER'
|
|
58
|
+
* }
|
|
59
|
+
* }
|
|
60
|
+
* ]
|
|
61
|
+
* }
|
|
62
|
+
* });
|
|
63
|
+
*
|
|
64
|
+
* === Additional Plugin Options ===
|
|
65
|
+
* - schedule: Cron expressions for automated backups
|
|
66
|
+
* - retention: Backup retention policy (GFS)
|
|
67
|
+
* - compression: Compression type (gzip, brotli, none)
|
|
68
|
+
* - encryption: Encryption configuration
|
|
69
|
+
* - verification: Enable backup verification
|
|
70
|
+
* - backupMetadataResource: Resource name for metadata
|
|
71
|
+
*/
|
|
72
|
+
export class BackupPlugin extends Plugin {
|
|
73
|
+
constructor(options = {}) {
|
|
74
|
+
super();
|
|
75
|
+
|
|
76
|
+
// Extract driver configuration
|
|
77
|
+
this.driverName = options.driver || 'filesystem';
|
|
78
|
+
this.driverConfig = options.config || {};
|
|
79
|
+
|
|
80
|
+
this.config = {
|
|
81
|
+
// Legacy destinations support (will be converted to multi driver)
|
|
82
|
+
destinations: options.destinations || null,
|
|
83
|
+
|
|
84
|
+
// Scheduling configuration
|
|
85
|
+
schedule: options.schedule || {},
|
|
86
|
+
|
|
87
|
+
// Retention policy (Grandfather-Father-Son)
|
|
88
|
+
retention: {
|
|
89
|
+
daily: 7,
|
|
90
|
+
weekly: 4,
|
|
91
|
+
monthly: 12,
|
|
92
|
+
yearly: 3,
|
|
93
|
+
...options.retention
|
|
94
|
+
},
|
|
95
|
+
|
|
96
|
+
// Backup options
|
|
97
|
+
compression: options.compression || 'gzip',
|
|
98
|
+
encryption: options.encryption || null,
|
|
99
|
+
verification: options.verification !== false,
|
|
100
|
+
parallelism: options.parallelism || 4,
|
|
101
|
+
include: options.include || null,
|
|
102
|
+
exclude: options.exclude || [],
|
|
103
|
+
backupMetadataResource: options.backupMetadataResource || 'backup_metadata',
|
|
104
|
+
tempDir: options.tempDir || './tmp/backups',
|
|
105
|
+
verbose: options.verbose || false,
|
|
106
|
+
|
|
107
|
+
// Hooks
|
|
108
|
+
onBackupStart: options.onBackupStart || null,
|
|
109
|
+
onBackupComplete: options.onBackupComplete || null,
|
|
110
|
+
onBackupError: options.onBackupError || null,
|
|
111
|
+
onRestoreStart: options.onRestoreStart || null,
|
|
112
|
+
onRestoreComplete: options.onRestoreComplete || null,
|
|
113
|
+
onRestoreError: options.onRestoreError || null
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
this.driver = null;
|
|
117
|
+
this.activeBackups = new Set();
|
|
118
|
+
|
|
119
|
+
// Handle legacy destinations format
|
|
120
|
+
this._handleLegacyDestinations();
|
|
121
|
+
|
|
122
|
+
// Validate driver configuration (after legacy conversion)
|
|
123
|
+
validateBackupConfig(this.driverName, this.driverConfig);
|
|
124
|
+
|
|
125
|
+
this._validateConfiguration();
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Convert legacy destinations format to multi driver format
|
|
130
|
+
*/
|
|
131
|
+
_handleLegacyDestinations() {
|
|
132
|
+
if (this.config.destinations && Array.isArray(this.config.destinations)) {
|
|
133
|
+
// Convert legacy format to multi driver
|
|
134
|
+
this.driverName = 'multi';
|
|
135
|
+
this.driverConfig = {
|
|
136
|
+
strategy: 'all',
|
|
137
|
+
destinations: this.config.destinations.map(dest => {
|
|
138
|
+
const { type, ...config } = dest; // Extract type and get the rest as config
|
|
139
|
+
return {
|
|
140
|
+
driver: type,
|
|
141
|
+
config
|
|
142
|
+
};
|
|
143
|
+
})
|
|
144
|
+
};
|
|
145
|
+
|
|
146
|
+
// Clear legacy destinations
|
|
147
|
+
this.config.destinations = null;
|
|
148
|
+
|
|
149
|
+
if (this.config.verbose) {
|
|
150
|
+
console.log('[BackupPlugin] Converted legacy destinations format to multi driver');
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
_validateConfiguration() {
|
|
156
|
+
// Driver validation is done in constructor
|
|
157
|
+
|
|
158
|
+
if (this.config.encryption && (!this.config.encryption.key || !this.config.encryption.algorithm)) {
|
|
159
|
+
throw new Error('BackupPlugin: Encryption requires both key and algorithm');
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
if (this.config.compression && !['none', 'gzip', 'brotli', 'deflate'].includes(this.config.compression)) {
|
|
163
|
+
throw new Error('BackupPlugin: Invalid compression type. Use: none, gzip, brotli, deflate');
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
async onSetup() {
|
|
168
|
+
// Create backup driver instance
|
|
169
|
+
this.driver = createBackupDriver(this.driverName, this.driverConfig);
|
|
170
|
+
await this.driver.setup(this.database);
|
|
171
|
+
|
|
172
|
+
// Create temporary directory
|
|
173
|
+
await mkdir(this.config.tempDir, { recursive: true });
|
|
174
|
+
|
|
175
|
+
// Create backup metadata resource
|
|
176
|
+
await this._createBackupMetadataResource();
|
|
177
|
+
|
|
178
|
+
if (this.config.verbose) {
|
|
179
|
+
const storageInfo = this.driver.getStorageInfo();
|
|
180
|
+
console.log(`[BackupPlugin] Initialized with driver: ${storageInfo.type}`);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
this.emit('initialized', {
|
|
184
|
+
driver: this.driver.getType(),
|
|
185
|
+
config: this.driver.getStorageInfo()
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
async _createBackupMetadataResource() {
|
|
190
|
+
const [ok] = await tryFn(() => this.database.createResource({
|
|
191
|
+
name: this.config.backupMetadataResource,
|
|
192
|
+
attributes: {
|
|
193
|
+
id: 'string|required',
|
|
194
|
+
type: 'string|required',
|
|
195
|
+
timestamp: 'number|required',
|
|
196
|
+
resources: 'json|required',
|
|
197
|
+
driverInfo: 'json|required', // Store driver info instead of destinations
|
|
198
|
+
size: 'number|default:0',
|
|
199
|
+
compressed: 'boolean|default:false',
|
|
200
|
+
encrypted: 'boolean|default:false',
|
|
201
|
+
checksum: 'string|default:null',
|
|
202
|
+
status: 'string|required',
|
|
203
|
+
error: 'string|default:null',
|
|
204
|
+
duration: 'number|default:0',
|
|
205
|
+
createdAt: 'string|required'
|
|
206
|
+
},
|
|
207
|
+
behavior: 'body-overflow',
|
|
208
|
+
timestamps: true
|
|
209
|
+
}));
|
|
210
|
+
|
|
211
|
+
if (!ok && this.config.verbose) {
|
|
212
|
+
console.log(`[BackupPlugin] Backup metadata resource '${this.config.backupMetadataResource}' already exists`);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
/**
|
|
217
|
+
* Create a backup
|
|
218
|
+
* @param {string} type - Backup type ('full' or 'incremental')
|
|
219
|
+
* @param {Object} options - Backup options
|
|
220
|
+
* @returns {Object} Backup result
|
|
221
|
+
*/
|
|
222
|
+
async backup(type = 'full', options = {}) {
|
|
223
|
+
const backupId = this._generateBackupId(type);
|
|
224
|
+
const startTime = Date.now();
|
|
225
|
+
|
|
226
|
+
try {
|
|
227
|
+
this.activeBackups.add(backupId);
|
|
228
|
+
|
|
229
|
+
// Execute onBackupStart hook
|
|
230
|
+
if (this.config.onBackupStart) {
|
|
231
|
+
await this._executeHook(this.config.onBackupStart, type, { backupId });
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
this.emit('backup_start', { id: backupId, type });
|
|
235
|
+
|
|
236
|
+
// Create backup metadata
|
|
237
|
+
const metadata = await this._createBackupMetadata(backupId, type);
|
|
238
|
+
|
|
239
|
+
// Create temporary backup directory
|
|
240
|
+
const tempBackupDir = path.join(this.config.tempDir, backupId);
|
|
241
|
+
await mkdir(tempBackupDir, { recursive: true });
|
|
242
|
+
|
|
243
|
+
try {
|
|
244
|
+
// Create backup manifest
|
|
245
|
+
const manifest = await this._createBackupManifest(type, options);
|
|
246
|
+
|
|
247
|
+
// Export resources to backup files
|
|
248
|
+
const exportedFiles = await this._exportResources(manifest.resources, tempBackupDir, type);
|
|
249
|
+
|
|
250
|
+
// Check if we have any files to backup
|
|
251
|
+
if (exportedFiles.length === 0) {
|
|
252
|
+
throw new Error('No resources were exported for backup');
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// Create archive if compression is enabled
|
|
256
|
+
let finalPath;
|
|
257
|
+
let totalSize = 0;
|
|
258
|
+
|
|
259
|
+
if (this.config.compression !== 'none') {
|
|
260
|
+
finalPath = path.join(tempBackupDir, `${backupId}.tar.gz`);
|
|
261
|
+
totalSize = await this._createCompressedArchive(exportedFiles, finalPath);
|
|
262
|
+
} else {
|
|
263
|
+
finalPath = exportedFiles[0]; // For single file backups
|
|
264
|
+
const [statOk, , stats] = await tryFn(() => stat(finalPath));
|
|
265
|
+
totalSize = statOk ? stats.size : 0;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// Generate checksum
|
|
269
|
+
const checksum = await this._generateChecksum(finalPath);
|
|
270
|
+
|
|
271
|
+
// Upload using driver
|
|
272
|
+
const uploadResult = await this.driver.upload(finalPath, backupId, manifest);
|
|
273
|
+
|
|
274
|
+
// Verify backup if enabled
|
|
275
|
+
if (this.config.verification) {
|
|
276
|
+
const isValid = await this.driver.verify(backupId, checksum, uploadResult);
|
|
277
|
+
if (!isValid) {
|
|
278
|
+
throw new Error('Backup verification failed');
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
const duration = Date.now() - startTime;
|
|
283
|
+
|
|
284
|
+
// Update metadata
|
|
285
|
+
await this._updateBackupMetadata(backupId, {
|
|
286
|
+
status: 'completed',
|
|
287
|
+
size: totalSize,
|
|
288
|
+
checksum,
|
|
289
|
+
driverInfo: uploadResult,
|
|
290
|
+
duration
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
// Execute onBackupComplete hook
|
|
294
|
+
if (this.config.onBackupComplete) {
|
|
295
|
+
const stats = { backupId, type, size: totalSize, duration, driverInfo: uploadResult };
|
|
296
|
+
await this._executeHook(this.config.onBackupComplete, type, stats);
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
this.emit('backup_complete', {
|
|
300
|
+
id: backupId,
|
|
301
|
+
type,
|
|
302
|
+
size: totalSize,
|
|
303
|
+
duration,
|
|
304
|
+
driverInfo: uploadResult
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
// Cleanup retention
|
|
308
|
+
await this._cleanupOldBackups();
|
|
309
|
+
|
|
310
|
+
return {
|
|
311
|
+
id: backupId,
|
|
312
|
+
type,
|
|
313
|
+
size: totalSize,
|
|
314
|
+
duration,
|
|
315
|
+
checksum,
|
|
316
|
+
driverInfo: uploadResult
|
|
317
|
+
};
|
|
318
|
+
|
|
319
|
+
} finally {
|
|
320
|
+
// Cleanup temporary files
|
|
321
|
+
await this._cleanupTempFiles(tempBackupDir);
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
} catch (error) {
|
|
325
|
+
// Execute onBackupError hook
|
|
326
|
+
if (this.config.onBackupError) {
|
|
327
|
+
await this._executeHook(this.config.onBackupError, type, { backupId, error });
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// Update metadata with error
|
|
331
|
+
await this._updateBackupMetadata(backupId, {
|
|
332
|
+
status: 'failed',
|
|
333
|
+
error: error.message,
|
|
334
|
+
duration: Date.now() - startTime
|
|
335
|
+
});
|
|
336
|
+
|
|
337
|
+
this.emit('backup_error', { id: backupId, type, error: error.message });
|
|
338
|
+
throw error;
|
|
339
|
+
|
|
340
|
+
} finally {
|
|
341
|
+
this.activeBackups.delete(backupId);
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
_generateBackupId(type) {
|
|
346
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
347
|
+
const random = Math.random().toString(36).substring(2, 8);
|
|
348
|
+
return `${type}-${timestamp}-${random}`;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
async _createBackupMetadata(backupId, type) {
|
|
352
|
+
const now = new Date();
|
|
353
|
+
const metadata = {
|
|
354
|
+
id: backupId,
|
|
355
|
+
type,
|
|
356
|
+
timestamp: Date.now(),
|
|
357
|
+
resources: [],
|
|
358
|
+
driverInfo: {},
|
|
359
|
+
size: 0,
|
|
360
|
+
status: 'in_progress',
|
|
361
|
+
compressed: this.config.compression !== 'none',
|
|
362
|
+
encrypted: !!this.config.encryption,
|
|
363
|
+
checksum: null,
|
|
364
|
+
error: null,
|
|
365
|
+
duration: 0,
|
|
366
|
+
createdAt: now.toISOString().slice(0, 10)
|
|
367
|
+
};
|
|
368
|
+
|
|
369
|
+
const [ok] = await tryFn(() =>
|
|
370
|
+
this.database.resource(this.config.backupMetadataResource).insert(metadata)
|
|
371
|
+
);
|
|
372
|
+
|
|
373
|
+
return metadata;
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
async _updateBackupMetadata(backupId, updates) {
|
|
377
|
+
const [ok] = await tryFn(() =>
|
|
378
|
+
this.database.resource(this.config.backupMetadataResource).update(backupId, updates)
|
|
379
|
+
);
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
async _createBackupManifest(type, options) {
|
|
383
|
+
let resourcesToBackup = options.resources ||
|
|
384
|
+
(this.config.include ? this.config.include : await this.database.listResources());
|
|
385
|
+
|
|
386
|
+
// Ensure we have resource names as strings
|
|
387
|
+
if (Array.isArray(resourcesToBackup) && resourcesToBackup.length > 0 && typeof resourcesToBackup[0] === 'object') {
|
|
388
|
+
resourcesToBackup = resourcesToBackup.map(resource => resource.name || resource);
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
// Filter excluded resources
|
|
392
|
+
const filteredResources = resourcesToBackup.filter(name =>
|
|
393
|
+
!this.config.exclude.includes(name)
|
|
394
|
+
);
|
|
395
|
+
|
|
396
|
+
return {
|
|
397
|
+
type,
|
|
398
|
+
timestamp: Date.now(),
|
|
399
|
+
resources: filteredResources,
|
|
400
|
+
compression: this.config.compression,
|
|
401
|
+
encrypted: !!this.config.encryption,
|
|
402
|
+
s3db_version: this.database.constructor.version || 'unknown'
|
|
403
|
+
};
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
async _exportResources(resourceNames, tempDir, type) {
|
|
407
|
+
const exportedFiles = [];
|
|
408
|
+
|
|
409
|
+
for (const resourceName of resourceNames) {
|
|
410
|
+
const resource = this.database.resources[resourceName];
|
|
411
|
+
if (!resource) {
|
|
412
|
+
console.warn(`[BackupPlugin] Resource '${resourceName}' not found, skipping`);
|
|
413
|
+
continue;
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
const exportPath = path.join(tempDir, `${resourceName}.json`);
|
|
417
|
+
|
|
418
|
+
// Export resource data
|
|
419
|
+
let records;
|
|
420
|
+
if (type === 'incremental') {
|
|
421
|
+
// For incremental, only export recent changes
|
|
422
|
+
// This is simplified - in real implementation, you'd track changes
|
|
423
|
+
const yesterday = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
|
424
|
+
records = await resource.list({
|
|
425
|
+
filter: { updatedAt: { '>': yesterday.toISOString() } }
|
|
426
|
+
});
|
|
427
|
+
} else {
|
|
428
|
+
records = await resource.list();
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
const exportData = {
|
|
432
|
+
resourceName,
|
|
433
|
+
definition: resource.config,
|
|
434
|
+
records,
|
|
435
|
+
exportedAt: new Date().toISOString(),
|
|
436
|
+
type
|
|
437
|
+
};
|
|
438
|
+
|
|
439
|
+
await writeFile(exportPath, JSON.stringify(exportData, null, 2));
|
|
440
|
+
exportedFiles.push(exportPath);
|
|
441
|
+
|
|
442
|
+
if (this.config.verbose) {
|
|
443
|
+
console.log(`[BackupPlugin] Exported ${records.length} records from '${resourceName}'`);
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
return exportedFiles;
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
async _createCompressedArchive(files, targetPath) {
|
|
451
|
+
// Simple implementation - compress all files into a single stream
|
|
452
|
+
// In production, you might want to use tar or similar
|
|
453
|
+
const output = createWriteStream(targetPath);
|
|
454
|
+
const gzip = zlib.createGzip({ level: 6 });
|
|
455
|
+
|
|
456
|
+
let totalSize = 0;
|
|
457
|
+
|
|
458
|
+
await pipeline(
|
|
459
|
+
async function* () {
|
|
460
|
+
for (const filePath of files) {
|
|
461
|
+
const content = await readFile(filePath);
|
|
462
|
+
totalSize += content.length;
|
|
463
|
+
yield content;
|
|
464
|
+
}
|
|
465
|
+
},
|
|
466
|
+
gzip,
|
|
467
|
+
output
|
|
468
|
+
);
|
|
469
|
+
|
|
470
|
+
const [statOk, , stats] = await tryFn(() => stat(targetPath));
|
|
471
|
+
return statOk ? stats.size : totalSize;
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
async _generateChecksum(filePath) {
|
|
475
|
+
const hash = crypto.createHash('sha256');
|
|
476
|
+
const stream = createReadStream(filePath);
|
|
477
|
+
|
|
478
|
+
await pipeline(stream, hash);
|
|
479
|
+
return hash.digest('hex');
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
async _cleanupTempFiles(tempDir) {
|
|
483
|
+
const [ok] = await tryFn(() =>
|
|
484
|
+
import('fs/promises').then(fs => fs.rm(tempDir, { recursive: true, force: true }))
|
|
485
|
+
);
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
/**
|
|
489
|
+
* Restore from backup
|
|
490
|
+
* @param {string} backupId - Backup identifier
|
|
491
|
+
* @param {Object} options - Restore options
|
|
492
|
+
* @returns {Object} Restore result
|
|
493
|
+
*/
|
|
494
|
+
async restore(backupId, options = {}) {
|
|
495
|
+
try {
|
|
496
|
+
// Execute onRestoreStart hook
|
|
497
|
+
if (this.config.onRestoreStart) {
|
|
498
|
+
await this._executeHook(this.config.onRestoreStart, backupId, options);
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
this.emit('restore_start', { id: backupId, options });
|
|
502
|
+
|
|
503
|
+
// Get backup metadata
|
|
504
|
+
const backup = await this.getBackupStatus(backupId);
|
|
505
|
+
if (!backup) {
|
|
506
|
+
throw new Error(`Backup '${backupId}' not found`);
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
if (backup.status !== 'completed') {
|
|
510
|
+
throw new Error(`Backup '${backupId}' is not in completed status`);
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
// Create temporary restore directory
|
|
514
|
+
const tempRestoreDir = path.join(this.config.tempDir, `restore-${backupId}`);
|
|
515
|
+
await mkdir(tempRestoreDir, { recursive: true });
|
|
516
|
+
|
|
517
|
+
try {
|
|
518
|
+
// Download backup using driver
|
|
519
|
+
const downloadPath = path.join(tempRestoreDir, `${backupId}.backup`);
|
|
520
|
+
await this.driver.download(backupId, downloadPath, backup.driverInfo);
|
|
521
|
+
|
|
522
|
+
// Verify backup if enabled
|
|
523
|
+
if (this.config.verification && backup.checksum) {
|
|
524
|
+
const actualChecksum = await this._generateChecksum(downloadPath);
|
|
525
|
+
if (actualChecksum !== backup.checksum) {
|
|
526
|
+
throw new Error('Backup verification failed during restore');
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
// Extract and restore data
|
|
531
|
+
const restoredResources = await this._restoreFromBackup(downloadPath, options);
|
|
532
|
+
|
|
533
|
+
// Execute onRestoreComplete hook
|
|
534
|
+
if (this.config.onRestoreComplete) {
|
|
535
|
+
await this._executeHook(this.config.onRestoreComplete, backupId, { restored: restoredResources });
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
this.emit('restore_complete', {
|
|
539
|
+
id: backupId,
|
|
540
|
+
restored: restoredResources
|
|
541
|
+
});
|
|
542
|
+
|
|
543
|
+
return {
|
|
544
|
+
backupId,
|
|
545
|
+
restored: restoredResources
|
|
546
|
+
};
|
|
547
|
+
|
|
548
|
+
} finally {
|
|
549
|
+
// Cleanup temporary files
|
|
550
|
+
await this._cleanupTempFiles(tempRestoreDir);
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
} catch (error) {
|
|
554
|
+
// Execute onRestoreError hook
|
|
555
|
+
if (this.config.onRestoreError) {
|
|
556
|
+
await this._executeHook(this.config.onRestoreError, backupId, { error });
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
this.emit('restore_error', { id: backupId, error: error.message });
|
|
560
|
+
throw error;
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
async _restoreFromBackup(backupPath, options) {
|
|
565
|
+
// This is a simplified implementation
|
|
566
|
+
// In reality, you'd need to handle decompression, etc.
|
|
567
|
+
const restoredResources = [];
|
|
568
|
+
|
|
569
|
+
// For now, assume the backup is a JSON file with resource data
|
|
570
|
+
// In production, handle compressed archives properly
|
|
571
|
+
|
|
572
|
+
return restoredResources;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
/**
|
|
576
|
+
* List available backups
|
|
577
|
+
* @param {Object} options - List options
|
|
578
|
+
* @returns {Array} List of backups
|
|
579
|
+
*/
|
|
580
|
+
async listBackups(options = {}) {
|
|
581
|
+
try {
|
|
582
|
+
// Get backups from driver
|
|
583
|
+
const driverBackups = await this.driver.list(options);
|
|
584
|
+
|
|
585
|
+
// Merge with metadata from database
|
|
586
|
+
const [metaOk, , metadataRecords] = await tryFn(() =>
|
|
587
|
+
this.database.resource(this.config.backupMetadataResource).list({
|
|
588
|
+
limit: options.limit || 50,
|
|
589
|
+
sort: { timestamp: -1 }
|
|
590
|
+
})
|
|
591
|
+
);
|
|
592
|
+
|
|
593
|
+
const metadataMap = new Map();
|
|
594
|
+
if (metaOk) {
|
|
595
|
+
metadataRecords.forEach(record => metadataMap.set(record.id, record));
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
// Combine driver data with metadata
|
|
599
|
+
const combinedBackups = driverBackups.map(backup => ({
|
|
600
|
+
...backup,
|
|
601
|
+
...(metadataMap.get(backup.id) || {})
|
|
602
|
+
}));
|
|
603
|
+
|
|
604
|
+
return combinedBackups;
|
|
605
|
+
|
|
606
|
+
} catch (error) {
|
|
607
|
+
if (this.config.verbose) {
|
|
608
|
+
console.log(`[BackupPlugin] Error listing backups: ${error.message}`);
|
|
609
|
+
}
|
|
610
|
+
return [];
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
/**
|
|
615
|
+
* Get backup status
|
|
616
|
+
* @param {string} backupId - Backup identifier
|
|
617
|
+
* @returns {Object|null} Backup status
|
|
618
|
+
*/
|
|
619
|
+
async getBackupStatus(backupId) {
|
|
620
|
+
const [ok, , backup] = await tryFn(() =>
|
|
621
|
+
this.database.resource(this.config.backupMetadataResource).get(backupId)
|
|
622
|
+
);
|
|
623
|
+
|
|
624
|
+
return ok ? backup : null;
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
async _cleanupOldBackups() {
|
|
628
|
+
// Implementation of retention policy
|
|
629
|
+
// This is simplified - implement GFS rotation properly
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
async _executeHook(hook, ...args) {
|
|
633
|
+
if (typeof hook === 'function') {
|
|
634
|
+
return await hook(...args);
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
async start() {
|
|
639
|
+
if (this.config.verbose) {
|
|
640
|
+
const storageInfo = this.driver.getStorageInfo();
|
|
641
|
+
console.log(`[BackupPlugin] Started with driver: ${storageInfo.type}`);
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
async stop() {
|
|
646
|
+
// Cancel any active backups
|
|
647
|
+
for (const backupId of this.activeBackups) {
|
|
648
|
+
this.emit('backup_cancelled', { id: backupId });
|
|
649
|
+
}
|
|
650
|
+
this.activeBackups.clear();
|
|
651
|
+
|
|
652
|
+
// Cleanup driver
|
|
653
|
+
if (this.driver) {
|
|
654
|
+
await this.driver.cleanup();
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
|
|
658
|
+
/**
|
|
659
|
+
* Cleanup plugin resources (alias for stop for backward compatibility)
|
|
660
|
+
*/
|
|
661
|
+
async cleanup() {
|
|
662
|
+
await this.stop();
|
|
663
|
+
}
|
|
664
|
+
}
|