s3db.js 9.3.0 → 10.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +72 -13
- package/dist/s3db.cjs.js +2342 -540
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.es.js +2341 -541
- package/dist/s3db.es.js.map +1 -1
- package/package.json +1 -1
- package/src/client.class.js +8 -7
- package/src/concerns/high-performance-inserter.js +285 -0
- package/src/concerns/partition-queue.js +171 -0
- package/src/errors.js +10 -2
- package/src/partition-drivers/base-partition-driver.js +96 -0
- package/src/partition-drivers/index.js +60 -0
- package/src/partition-drivers/memory-partition-driver.js +274 -0
- package/src/partition-drivers/sqs-partition-driver.js +332 -0
- package/src/partition-drivers/sync-partition-driver.js +38 -0
- package/src/plugins/audit.plugin.js +4 -4
- package/src/plugins/backup.plugin.js +380 -105
- package/src/plugins/backup.plugin.js.backup +1 -1
- package/src/plugins/cache.plugin.js +203 -150
- package/src/plugins/eventual-consistency.plugin.js +1012 -0
- package/src/plugins/fulltext.plugin.js +6 -6
- package/src/plugins/index.js +2 -0
- package/src/plugins/metrics.plugin.js +13 -13
- package/src/plugins/replicator.plugin.js +108 -70
- package/src/plugins/replicators/s3db-replicator.class.js +7 -3
- package/src/plugins/replicators/sqs-replicator.class.js +11 -3
- package/src/plugins/s3-queue.plugin.js +776 -0
- package/src/plugins/scheduler.plugin.js +226 -164
- package/src/plugins/state-machine.plugin.js +109 -81
- package/src/resource.class.js +205 -0
- package/PLUGINS.md +0 -5036
|
@@ -7,6 +7,7 @@ import { pipeline } from 'stream/promises';
|
|
|
7
7
|
import { mkdir, writeFile, readFile, unlink, stat, readdir } from 'fs/promises';
|
|
8
8
|
import path from 'path';
|
|
9
9
|
import crypto from 'crypto';
|
|
10
|
+
import os from 'os';
|
|
10
11
|
|
|
11
12
|
/**
|
|
12
13
|
* BackupPlugin - Automated Database Backup System
|
|
@@ -72,27 +73,24 @@ import crypto from 'crypto';
|
|
|
72
73
|
export class BackupPlugin extends Plugin {
|
|
73
74
|
constructor(options = {}) {
|
|
74
75
|
super();
|
|
75
|
-
|
|
76
|
-
// Extract driver configuration
|
|
77
|
-
this.driverName = options.driver || 'filesystem';
|
|
78
|
-
this.driverConfig = options.config || {};
|
|
79
|
-
|
|
76
|
+
|
|
80
77
|
this.config = {
|
|
81
|
-
//
|
|
82
|
-
|
|
83
|
-
|
|
78
|
+
// Driver configuration
|
|
79
|
+
driver: options.driver || 'filesystem',
|
|
80
|
+
driverConfig: options.config || {},
|
|
81
|
+
|
|
84
82
|
// Scheduling configuration
|
|
85
83
|
schedule: options.schedule || {},
|
|
86
|
-
|
|
84
|
+
|
|
87
85
|
// Retention policy (Grandfather-Father-Son)
|
|
88
86
|
retention: {
|
|
89
87
|
daily: 7,
|
|
90
|
-
weekly: 4,
|
|
88
|
+
weekly: 4,
|
|
91
89
|
monthly: 12,
|
|
92
90
|
yearly: 3,
|
|
93
91
|
...options.retention
|
|
94
92
|
},
|
|
95
|
-
|
|
93
|
+
|
|
96
94
|
// Backup options
|
|
97
95
|
compression: options.compression || 'gzip',
|
|
98
96
|
encryption: options.encryption || null,
|
|
@@ -100,10 +98,10 @@ export class BackupPlugin extends Plugin {
|
|
|
100
98
|
parallelism: options.parallelism || 4,
|
|
101
99
|
include: options.include || null,
|
|
102
100
|
exclude: options.exclude || [],
|
|
103
|
-
backupMetadataResource: options.backupMetadataResource || '
|
|
104
|
-
tempDir: options.tempDir || '
|
|
101
|
+
backupMetadataResource: options.backupMetadataResource || 'plg_backup_metadata',
|
|
102
|
+
tempDir: options.tempDir || path.join(os.tmpdir(), 's3db', 'backups'),
|
|
105
103
|
verbose: options.verbose || false,
|
|
106
|
-
|
|
104
|
+
|
|
107
105
|
// Hooks
|
|
108
106
|
onBackupStart: options.onBackupStart || null,
|
|
109
107
|
onBackupComplete: options.onBackupComplete || null,
|
|
@@ -115,41 +113,11 @@ export class BackupPlugin extends Plugin {
|
|
|
115
113
|
|
|
116
114
|
this.driver = null;
|
|
117
115
|
this.activeBackups = new Set();
|
|
118
|
-
|
|
119
|
-
// Handle legacy destinations format
|
|
120
|
-
this._handleLegacyDestinations();
|
|
121
|
-
|
|
122
|
-
// Validate driver configuration (after legacy conversion)
|
|
123
|
-
validateBackupConfig(this.driverName, this.driverConfig);
|
|
124
|
-
|
|
125
|
-
this._validateConfiguration();
|
|
126
|
-
}
|
|
127
116
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
if (this.config.destinations && Array.isArray(this.config.destinations)) {
|
|
133
|
-
// Convert legacy format to multi driver
|
|
134
|
-
this.driverName = 'multi';
|
|
135
|
-
this.driverConfig = {
|
|
136
|
-
strategy: 'all',
|
|
137
|
-
destinations: this.config.destinations.map(dest => {
|
|
138
|
-
const { type, ...config } = dest; // Extract type and get the rest as config
|
|
139
|
-
return {
|
|
140
|
-
driver: type,
|
|
141
|
-
config
|
|
142
|
-
};
|
|
143
|
-
})
|
|
144
|
-
};
|
|
145
|
-
|
|
146
|
-
// Clear legacy destinations
|
|
147
|
-
this.config.destinations = null;
|
|
148
|
-
|
|
149
|
-
if (this.config.verbose) {
|
|
150
|
-
console.log('[BackupPlugin] Converted legacy destinations format to multi driver');
|
|
151
|
-
}
|
|
152
|
-
}
|
|
117
|
+
// Validate driver configuration
|
|
118
|
+
validateBackupConfig(this.config.driver, this.config.driverConfig);
|
|
119
|
+
|
|
120
|
+
this._validateConfiguration();
|
|
153
121
|
}
|
|
154
122
|
|
|
155
123
|
_validateConfiguration() {
|
|
@@ -166,21 +134,21 @@ export class BackupPlugin extends Plugin {
|
|
|
166
134
|
|
|
167
135
|
async onSetup() {
|
|
168
136
|
// Create backup driver instance
|
|
169
|
-
this.driver = createBackupDriver(this.
|
|
137
|
+
this.driver = createBackupDriver(this.config.driver, this.config.driverConfig);
|
|
170
138
|
await this.driver.setup(this.database);
|
|
171
|
-
|
|
139
|
+
|
|
172
140
|
// Create temporary directory
|
|
173
141
|
await mkdir(this.config.tempDir, { recursive: true });
|
|
174
|
-
|
|
142
|
+
|
|
175
143
|
// Create backup metadata resource
|
|
176
144
|
await this._createBackupMetadataResource();
|
|
177
|
-
|
|
145
|
+
|
|
178
146
|
if (this.config.verbose) {
|
|
179
147
|
const storageInfo = this.driver.getStorageInfo();
|
|
180
148
|
console.log(`[BackupPlugin] Initialized with driver: ${storageInfo.type}`);
|
|
181
149
|
}
|
|
182
|
-
|
|
183
|
-
this.emit('initialized', {
|
|
150
|
+
|
|
151
|
+
this.emit('initialized', {
|
|
184
152
|
driver: this.driver.getType(),
|
|
185
153
|
config: this.driver.getStorageInfo()
|
|
186
154
|
});
|
|
@@ -222,7 +190,12 @@ export class BackupPlugin extends Plugin {
|
|
|
222
190
|
async backup(type = 'full', options = {}) {
|
|
223
191
|
const backupId = this._generateBackupId(type);
|
|
224
192
|
const startTime = Date.now();
|
|
225
|
-
|
|
193
|
+
|
|
194
|
+
// Check for race condition
|
|
195
|
+
if (this.activeBackups.has(backupId)) {
|
|
196
|
+
throw new Error(`Backup '${backupId}' is already in progress`);
|
|
197
|
+
}
|
|
198
|
+
|
|
226
199
|
try {
|
|
227
200
|
this.activeBackups.add(backupId);
|
|
228
201
|
|
|
@@ -252,18 +225,10 @@ export class BackupPlugin extends Plugin {
|
|
|
252
225
|
throw new Error('No resources were exported for backup');
|
|
253
226
|
}
|
|
254
227
|
|
|
255
|
-
// Create archive
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
if (this.config.compression !== 'none') {
|
|
260
|
-
finalPath = path.join(tempBackupDir, `${backupId}.tar.gz`);
|
|
261
|
-
totalSize = await this._createCompressedArchive(exportedFiles, finalPath);
|
|
262
|
-
} else {
|
|
263
|
-
finalPath = exportedFiles[0]; // For single file backups
|
|
264
|
-
const [statOk, , stats] = await tryFn(() => stat(finalPath));
|
|
265
|
-
totalSize = statOk ? stats.size : 0;
|
|
266
|
-
}
|
|
228
|
+
// Create archive
|
|
229
|
+
const archiveExtension = this.config.compression !== 'none' ? '.tar.gz' : '.json';
|
|
230
|
+
const finalPath = path.join(tempBackupDir, `${backupId}${archiveExtension}`);
|
|
231
|
+
const totalSize = await this._createArchive(exportedFiles, finalPath, this.config.compression);
|
|
267
232
|
|
|
268
233
|
// Generate checksum
|
|
269
234
|
const checksum = await this._generateChecksum(finalPath);
|
|
@@ -409,7 +374,9 @@ export class BackupPlugin extends Plugin {
|
|
|
409
374
|
for (const resourceName of resourceNames) {
|
|
410
375
|
const resource = this.database.resources[resourceName];
|
|
411
376
|
if (!resource) {
|
|
412
|
-
|
|
377
|
+
if (this.config.verbose) {
|
|
378
|
+
console.warn(`[BackupPlugin] Resource '${resourceName}' not found, skipping`);
|
|
379
|
+
}
|
|
413
380
|
continue;
|
|
414
381
|
}
|
|
415
382
|
|
|
@@ -418,11 +385,33 @@ export class BackupPlugin extends Plugin {
|
|
|
418
385
|
// Export resource data
|
|
419
386
|
let records;
|
|
420
387
|
if (type === 'incremental') {
|
|
421
|
-
// For incremental, only export
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
388
|
+
// For incremental, only export records changed since last successful backup
|
|
389
|
+
const [lastBackupOk, , lastBackups] = await tryFn(() =>
|
|
390
|
+
this.database.resource(this.config.backupMetadataResource).list({
|
|
391
|
+
filter: {
|
|
392
|
+
status: 'completed',
|
|
393
|
+
type: { $in: ['full', 'incremental'] }
|
|
394
|
+
},
|
|
395
|
+
sort: { timestamp: -1 },
|
|
396
|
+
limit: 1
|
|
397
|
+
})
|
|
398
|
+
);
|
|
399
|
+
|
|
400
|
+
let sinceTimestamp;
|
|
401
|
+
if (lastBackupOk && lastBackups && lastBackups.length > 0) {
|
|
402
|
+
sinceTimestamp = new Date(lastBackups[0].timestamp);
|
|
403
|
+
} else {
|
|
404
|
+
// No previous backup found, use last 24 hours as fallback
|
|
405
|
+
sinceTimestamp = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
if (this.config.verbose) {
|
|
409
|
+
console.log(`[BackupPlugin] Incremental backup for '${resourceName}' since ${sinceTimestamp.toISOString()}`);
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
// Get records updated since last backup
|
|
413
|
+
records = await resource.list({
|
|
414
|
+
filter: { updatedAt: { '>': sinceTimestamp.toISOString() } }
|
|
426
415
|
});
|
|
427
416
|
} else {
|
|
428
417
|
records = await resource.list();
|
|
@@ -447,36 +436,75 @@ export class BackupPlugin extends Plugin {
|
|
|
447
436
|
return exportedFiles;
|
|
448
437
|
}
|
|
449
438
|
|
|
450
|
-
async
|
|
451
|
-
//
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
439
|
+
async _createArchive(files, targetPath, compressionType) {
|
|
440
|
+
// Create a JSON-based archive with file metadata and contents
|
|
441
|
+
const archive = {
|
|
442
|
+
version: '1.0',
|
|
443
|
+
created: new Date().toISOString(),
|
|
444
|
+
files: []
|
|
445
|
+
};
|
|
446
|
+
|
|
456
447
|
let totalSize = 0;
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
448
|
+
|
|
449
|
+
// Read all files and add to archive
|
|
450
|
+
for (const filePath of files) {
|
|
451
|
+
const [readOk, readErr, content] = await tryFn(() => readFile(filePath, 'utf8'));
|
|
452
|
+
|
|
453
|
+
if (!readOk) {
|
|
454
|
+
if (this.config.verbose) {
|
|
455
|
+
console.warn(`[BackupPlugin] Failed to read ${filePath}: ${readErr?.message}`);
|
|
464
456
|
}
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
457
|
+
continue;
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
const fileName = path.basename(filePath);
|
|
461
|
+
totalSize += content.length;
|
|
462
|
+
|
|
463
|
+
archive.files.push({
|
|
464
|
+
name: fileName,
|
|
465
|
+
size: content.length,
|
|
466
|
+
content
|
|
467
|
+
});
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
// Write archive (compressed or uncompressed)
|
|
471
|
+
const archiveJson = JSON.stringify(archive);
|
|
472
|
+
|
|
473
|
+
if (compressionType === 'none') {
|
|
474
|
+
// Write uncompressed JSON
|
|
475
|
+
await writeFile(targetPath, archiveJson, 'utf8');
|
|
476
|
+
} else {
|
|
477
|
+
// Write compressed JSON
|
|
478
|
+
const output = createWriteStream(targetPath);
|
|
479
|
+
const gzip = zlib.createGzip({ level: 6 });
|
|
480
|
+
|
|
481
|
+
await pipeline(
|
|
482
|
+
async function* () {
|
|
483
|
+
yield Buffer.from(archiveJson, 'utf8');
|
|
484
|
+
},
|
|
485
|
+
gzip,
|
|
486
|
+
output
|
|
487
|
+
);
|
|
488
|
+
}
|
|
489
|
+
|
|
470
490
|
const [statOk, , stats] = await tryFn(() => stat(targetPath));
|
|
471
491
|
return statOk ? stats.size : totalSize;
|
|
472
492
|
}
|
|
473
493
|
|
|
474
494
|
async _generateChecksum(filePath) {
|
|
475
|
-
const
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
495
|
+
const [ok, err, result] = await tryFn(async () => {
|
|
496
|
+
const hash = crypto.createHash('sha256');
|
|
497
|
+
const stream = createReadStream(filePath);
|
|
498
|
+
|
|
499
|
+
await pipeline(stream, hash);
|
|
500
|
+
return hash.digest('hex');
|
|
501
|
+
});
|
|
502
|
+
|
|
503
|
+
if (!ok) {
|
|
504
|
+
throw new Error(`Failed to generate checksum for ${filePath}: ${err?.message}`);
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
return result;
|
|
480
508
|
}
|
|
481
509
|
|
|
482
510
|
async _cleanupTempFiles(tempDir) {
|
|
@@ -562,14 +590,151 @@ export class BackupPlugin extends Plugin {
|
|
|
562
590
|
}
|
|
563
591
|
|
|
564
592
|
async _restoreFromBackup(backupPath, options) {
|
|
565
|
-
// This is a simplified implementation
|
|
566
|
-
// In reality, you'd need to handle decompression, etc.
|
|
567
593
|
const restoredResources = [];
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
594
|
+
|
|
595
|
+
try {
|
|
596
|
+
// Read and decompress the archive
|
|
597
|
+
let archiveData = '';
|
|
598
|
+
|
|
599
|
+
if (this.config.compression !== 'none') {
|
|
600
|
+
// Decompress the archive
|
|
601
|
+
const input = createReadStream(backupPath);
|
|
602
|
+
const gunzip = zlib.createGunzip();
|
|
603
|
+
const chunks = [];
|
|
604
|
+
|
|
605
|
+
// Use pipeline with proper stream handling
|
|
606
|
+
await new Promise((resolve, reject) => {
|
|
607
|
+
input.pipe(gunzip)
|
|
608
|
+
.on('data', chunk => chunks.push(chunk))
|
|
609
|
+
.on('end', resolve)
|
|
610
|
+
.on('error', reject);
|
|
611
|
+
});
|
|
612
|
+
|
|
613
|
+
archiveData = Buffer.concat(chunks).toString('utf8');
|
|
614
|
+
} else {
|
|
615
|
+
// Read uncompressed archive
|
|
616
|
+
archiveData = await readFile(backupPath, 'utf8');
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
// Parse the archive
|
|
620
|
+
let archive;
|
|
621
|
+
try {
|
|
622
|
+
archive = JSON.parse(archiveData);
|
|
623
|
+
} catch (parseError) {
|
|
624
|
+
throw new Error(`Failed to parse backup archive: ${parseError.message}`);
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
if (!archive || typeof archive !== 'object') {
|
|
628
|
+
throw new Error('Invalid backup archive: not a valid JSON object');
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
if (!archive.version || !archive.files) {
|
|
632
|
+
throw new Error('Invalid backup archive format: missing version or files array');
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
if (this.config.verbose) {
|
|
636
|
+
console.log(`[BackupPlugin] Restoring ${archive.files.length} files from backup`);
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
// Process each file in the archive
|
|
640
|
+
for (const file of archive.files) {
|
|
641
|
+
try {
|
|
642
|
+
const resourceData = JSON.parse(file.content);
|
|
643
|
+
|
|
644
|
+
if (!resourceData.resourceName || !resourceData.definition) {
|
|
645
|
+
if (this.config.verbose) {
|
|
646
|
+
console.warn(`[BackupPlugin] Skipping invalid file: ${file.name}`);
|
|
647
|
+
}
|
|
648
|
+
continue;
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
const resourceName = resourceData.resourceName;
|
|
652
|
+
|
|
653
|
+
// Check if we should restore this resource
|
|
654
|
+
if (options.resources && !options.resources.includes(resourceName)) {
|
|
655
|
+
continue;
|
|
656
|
+
}
|
|
657
|
+
|
|
658
|
+
// Ensure resource exists or create it
|
|
659
|
+
let resource = this.database.resources[resourceName];
|
|
660
|
+
|
|
661
|
+
if (!resource) {
|
|
662
|
+
if (this.config.verbose) {
|
|
663
|
+
console.log(`[BackupPlugin] Creating resource '${resourceName}'`);
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
const [createOk, createErr] = await tryFn(() =>
|
|
667
|
+
this.database.createResource(resourceData.definition)
|
|
668
|
+
);
|
|
669
|
+
|
|
670
|
+
if (!createOk) {
|
|
671
|
+
if (this.config.verbose) {
|
|
672
|
+
console.warn(`[BackupPlugin] Failed to create resource '${resourceName}': ${createErr?.message}`);
|
|
673
|
+
}
|
|
674
|
+
continue;
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
resource = this.database.resources[resourceName];
|
|
678
|
+
}
|
|
679
|
+
|
|
680
|
+
// Restore records
|
|
681
|
+
if (resourceData.records && Array.isArray(resourceData.records)) {
|
|
682
|
+
const mode = options.mode || 'merge'; // 'merge', 'replace', 'skip'
|
|
683
|
+
|
|
684
|
+
if (mode === 'replace') {
|
|
685
|
+
// Clear existing data
|
|
686
|
+
const ids = await resource.listIds();
|
|
687
|
+
for (const id of ids) {
|
|
688
|
+
await resource.delete(id);
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
// Insert records
|
|
693
|
+
let insertedCount = 0;
|
|
694
|
+
for (const record of resourceData.records) {
|
|
695
|
+
const [insertOk] = await tryFn(async () => {
|
|
696
|
+
if (mode === 'skip') {
|
|
697
|
+
// Check if record exists
|
|
698
|
+
const existing = await resource.get(record.id);
|
|
699
|
+
if (existing) {
|
|
700
|
+
return false;
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
await resource.insert(record);
|
|
704
|
+
return true;
|
|
705
|
+
});
|
|
706
|
+
|
|
707
|
+
if (insertOk) {
|
|
708
|
+
insertedCount++;
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
restoredResources.push({
|
|
713
|
+
name: resourceName,
|
|
714
|
+
recordsRestored: insertedCount,
|
|
715
|
+
totalRecords: resourceData.records.length
|
|
716
|
+
});
|
|
717
|
+
|
|
718
|
+
if (this.config.verbose) {
|
|
719
|
+
console.log(`[BackupPlugin] Restored ${insertedCount}/${resourceData.records.length} records to '${resourceName}'`);
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
} catch (fileError) {
|
|
724
|
+
if (this.config.verbose) {
|
|
725
|
+
console.warn(`[BackupPlugin] Error processing file ${file.name}: ${fileError.message}`);
|
|
726
|
+
}
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
return restoredResources;
|
|
731
|
+
|
|
732
|
+
} catch (error) {
|
|
733
|
+
if (this.config.verbose) {
|
|
734
|
+
console.error(`[BackupPlugin] Error restoring backup: ${error.message}`);
|
|
735
|
+
}
|
|
736
|
+
throw new Error(`Failed to restore backup: ${error.message}`);
|
|
737
|
+
}
|
|
573
738
|
}
|
|
574
739
|
|
|
575
740
|
/**
|
|
@@ -625,8 +790,118 @@ export class BackupPlugin extends Plugin {
|
|
|
625
790
|
}
|
|
626
791
|
|
|
627
792
|
async _cleanupOldBackups() {
|
|
628
|
-
|
|
629
|
-
|
|
793
|
+
try {
|
|
794
|
+
// Get all completed backups sorted by timestamp
|
|
795
|
+
const [listOk, , allBackups] = await tryFn(() =>
|
|
796
|
+
this.database.resource(this.config.backupMetadataResource).list({
|
|
797
|
+
filter: { status: 'completed' },
|
|
798
|
+
sort: { timestamp: -1 }
|
|
799
|
+
})
|
|
800
|
+
);
|
|
801
|
+
|
|
802
|
+
if (!listOk || !allBackups || allBackups.length === 0) {
|
|
803
|
+
return;
|
|
804
|
+
}
|
|
805
|
+
|
|
806
|
+
const now = Date.now();
|
|
807
|
+
const msPerDay = 24 * 60 * 60 * 1000;
|
|
808
|
+
const msPerWeek = 7 * msPerDay;
|
|
809
|
+
const msPerMonth = 30 * msPerDay;
|
|
810
|
+
const msPerYear = 365 * msPerDay;
|
|
811
|
+
|
|
812
|
+
// Categorize backups by retention period
|
|
813
|
+
const categorized = {
|
|
814
|
+
daily: [],
|
|
815
|
+
weekly: [],
|
|
816
|
+
monthly: [],
|
|
817
|
+
yearly: []
|
|
818
|
+
};
|
|
819
|
+
|
|
820
|
+
for (const backup of allBackups) {
|
|
821
|
+
const age = now - backup.timestamp;
|
|
822
|
+
|
|
823
|
+
if (age <= msPerDay * this.config.retention.daily) {
|
|
824
|
+
categorized.daily.push(backup);
|
|
825
|
+
} else if (age <= msPerWeek * this.config.retention.weekly) {
|
|
826
|
+
categorized.weekly.push(backup);
|
|
827
|
+
} else if (age <= msPerMonth * this.config.retention.monthly) {
|
|
828
|
+
categorized.monthly.push(backup);
|
|
829
|
+
} else if (age <= msPerYear * this.config.retention.yearly) {
|
|
830
|
+
categorized.yearly.push(backup);
|
|
831
|
+
}
|
|
832
|
+
}
|
|
833
|
+
|
|
834
|
+
// Apply GFS retention: keep one backup per period
|
|
835
|
+
const toKeep = new Set();
|
|
836
|
+
|
|
837
|
+
// Keep all daily backups within retention
|
|
838
|
+
categorized.daily.forEach(b => toKeep.add(b.id));
|
|
839
|
+
|
|
840
|
+
// Keep one backup per week
|
|
841
|
+
const weeklyByWeek = new Map();
|
|
842
|
+
for (const backup of categorized.weekly) {
|
|
843
|
+
const weekNum = Math.floor((now - backup.timestamp) / msPerWeek);
|
|
844
|
+
if (!weeklyByWeek.has(weekNum)) {
|
|
845
|
+
weeklyByWeek.set(weekNum, backup);
|
|
846
|
+
toKeep.add(backup.id);
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
// Keep one backup per month
|
|
851
|
+
const monthlyByMonth = new Map();
|
|
852
|
+
for (const backup of categorized.monthly) {
|
|
853
|
+
const monthNum = Math.floor((now - backup.timestamp) / msPerMonth);
|
|
854
|
+
if (!monthlyByMonth.has(monthNum)) {
|
|
855
|
+
monthlyByMonth.set(monthNum, backup);
|
|
856
|
+
toKeep.add(backup.id);
|
|
857
|
+
}
|
|
858
|
+
}
|
|
859
|
+
|
|
860
|
+
// Keep one backup per year
|
|
861
|
+
const yearlyByYear = new Map();
|
|
862
|
+
for (const backup of categorized.yearly) {
|
|
863
|
+
const yearNum = Math.floor((now - backup.timestamp) / msPerYear);
|
|
864
|
+
if (!yearlyByYear.has(yearNum)) {
|
|
865
|
+
yearlyByYear.set(yearNum, backup);
|
|
866
|
+
toKeep.add(backup.id);
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
// Delete backups not in the keep set
|
|
871
|
+
const backupsToDelete = allBackups.filter(b => !toKeep.has(b.id));
|
|
872
|
+
|
|
873
|
+
if (backupsToDelete.length === 0) {
|
|
874
|
+
return;
|
|
875
|
+
}
|
|
876
|
+
|
|
877
|
+
if (this.config.verbose) {
|
|
878
|
+
console.log(`[BackupPlugin] Cleaning up ${backupsToDelete.length} old backups (keeping ${toKeep.size})`);
|
|
879
|
+
}
|
|
880
|
+
|
|
881
|
+
// Delete old backups
|
|
882
|
+
for (const backup of backupsToDelete) {
|
|
883
|
+
try {
|
|
884
|
+
// Delete from driver
|
|
885
|
+
await this.driver.delete(backup.id, backup.driverInfo);
|
|
886
|
+
|
|
887
|
+
// Delete metadata
|
|
888
|
+
await this.database.resource(this.config.backupMetadataResource).delete(backup.id);
|
|
889
|
+
|
|
890
|
+
if (this.config.verbose) {
|
|
891
|
+
console.log(`[BackupPlugin] Deleted old backup: ${backup.id}`);
|
|
892
|
+
}
|
|
893
|
+
} catch (deleteError) {
|
|
894
|
+
if (this.config.verbose) {
|
|
895
|
+
console.warn(`[BackupPlugin] Failed to delete backup ${backup.id}: ${deleteError.message}`);
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
} catch (error) {
|
|
901
|
+
if (this.config.verbose) {
|
|
902
|
+
console.warn(`[BackupPlugin] Error during cleanup: ${error.message}`);
|
|
903
|
+
}
|
|
904
|
+
}
|
|
630
905
|
}
|
|
631
906
|
|
|
632
907
|
async _executeHook(hook, ...args) {
|
|
@@ -98,7 +98,7 @@ export class BackupPlugin extends Plugin {
|
|
|
98
98
|
include: options.include || null,
|
|
99
99
|
exclude: options.exclude || [],
|
|
100
100
|
backupMetadataResource: options.backupMetadataResource || 'backup_metadata',
|
|
101
|
-
tempDir: options.tempDir || '
|
|
101
|
+
tempDir: options.tempDir || '/tmp/s3db/backups',
|
|
102
102
|
verbose: options.verbose || false,
|
|
103
103
|
onBackupStart: options.onBackupStart || null,
|
|
104
104
|
onBackupComplete: options.onBackupComplete || null,
|