sehawq.db 4.0.3 → 4.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/npm-publish.yml +30 -30
- package/LICENSE +21 -21
- package/index.js +1 -1
- package/package.json +36 -36
- package/readme.md +413 -413
- package/src/core/Database.js +294 -294
- package/src/core/Events.js +285 -285
- package/src/core/IndexManager.js +813 -813
- package/src/core/Persistence.js +375 -375
- package/src/core/QueryEngine.js +447 -447
- package/src/core/Storage.js +321 -321
- package/src/core/Validator.js +324 -324
- package/src/index.js +115 -115
- package/src/performance/Cache.js +338 -338
- package/src/performance/LazyLoader.js +354 -354
- package/src/performance/MemoryManager.js +495 -495
- package/src/server/api.js +687 -687
- package/src/server/websocket.js +527 -527
- package/src/utils/benchmark.js +51 -51
- package/src/utils/dot-notation.js +247 -247
- package/src/utils/helpers.js +275 -275
- package/src/utils/profiler.js +70 -70
- package/src/version.js +37 -37
package/src/core/Storage.js
CHANGED
|
@@ -1,322 +1,322 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Storage Layer - Handles all file I/O with performance optimizations
|
|
3
|
-
*
|
|
4
|
-
* Because reading/writing files should be fast, not frustrating
|
|
5
|
-
* Added some tricks I learned the hard way 🎯
|
|
6
|
-
*/
|
|
7
|
-
|
|
8
|
-
const fs = require('fs').promises;
|
|
9
|
-
const path = require('path');
|
|
10
|
-
const { performance } = require('perf_hooks');
|
|
11
|
-
|
|
12
|
-
class Storage {
|
|
13
|
-
constructor(filePath, options = {}) {
|
|
14
|
-
this.filePath = filePath;
|
|
15
|
-
this.options = {
|
|
16
|
-
compression: false,
|
|
17
|
-
backupOnWrite: true,
|
|
18
|
-
backupRetention: 5, // Keep last 5 backups
|
|
19
|
-
maxFileSize: 50 * 1024 * 1024, // 50MB limit
|
|
20
|
-
...options
|
|
21
|
-
};
|
|
22
|
-
|
|
23
|
-
this.writeQueue = [];
|
|
24
|
-
this.isWriting = false;
|
|
25
|
-
this.stats = {
|
|
26
|
-
reads: 0,
|
|
27
|
-
writes: 0,
|
|
28
|
-
backups: 0,
|
|
29
|
-
errors: 0,
|
|
30
|
-
totalReadTime: 0,
|
|
31
|
-
totalWriteTime: 0
|
|
32
|
-
};
|
|
33
|
-
|
|
34
|
-
this._ensureDirectory();
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
/**
|
|
38
|
-
* Make sure the directory exists
|
|
39
|
-
* Learned this the hard way - files don't create their own folders! 😅
|
|
40
|
-
*/
|
|
41
|
-
async _ensureDirectory() {
|
|
42
|
-
const dir = path.dirname(this.filePath);
|
|
43
|
-
try {
|
|
44
|
-
await fs.access(dir);
|
|
45
|
-
} catch (error) {
|
|
46
|
-
await fs.mkdir(dir, { recursive: true });
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
/**
|
|
51
|
-
* Read data with performance tracking and caching
|
|
52
|
-
*/
|
|
53
|
-
async read() {
|
|
54
|
-
const startTime = performance.now();
|
|
55
|
-
|
|
56
|
-
try {
|
|
57
|
-
// Check if file exists first
|
|
58
|
-
try {
|
|
59
|
-
await fs.access(this.filePath);
|
|
60
|
-
} catch (error) {
|
|
61
|
-
// File doesn't exist - return empty data
|
|
62
|
-
return {};
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
const data = await fs.readFile(this.filePath, 'utf8');
|
|
66
|
-
|
|
67
|
-
// Performance tracking
|
|
68
|
-
const readTime = performance.now() - startTime;
|
|
69
|
-
this.stats.reads++;
|
|
70
|
-
this.stats.totalReadTime += readTime;
|
|
71
|
-
|
|
72
|
-
if (this.options.debug) {
|
|
73
|
-
console.log(`📖 Read ${data.length} bytes in ${readTime.toFixed(2)}ms`);
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
return JSON.parse(data);
|
|
77
|
-
} catch (error) {
|
|
78
|
-
this.stats.errors++;
|
|
79
|
-
console.error('🚨 Storage read error:', error);
|
|
80
|
-
|
|
81
|
-
// Try to recover from backup if main file is corrupted
|
|
82
|
-
return await this._recoverFromBackup();
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
/**
|
|
87
|
-
* Write data with queuing and atomic operations
|
|
88
|
-
* Prevents corruption and handles concurrent writes
|
|
89
|
-
*/
|
|
90
|
-
async write(data) {
|
|
91
|
-
return new Promise((resolve, reject) => {
|
|
92
|
-
// Queue the write operation
|
|
93
|
-
this.writeQueue.push({ data, resolve, reject });
|
|
94
|
-
|
|
95
|
-
if (!this.isWriting) {
|
|
96
|
-
this._processWriteQueue();
|
|
97
|
-
}
|
|
98
|
-
});
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
/**
|
|
102
|
-
* Process write queue one by one
|
|
103
|
-
* Prevents race conditions and file corruption
|
|
104
|
-
*/
|
|
105
|
-
async _processWriteQueue() {
|
|
106
|
-
if (this.writeQueue.length === 0 || this.isWriting) {
|
|
107
|
-
return;
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
this.isWriting = true;
|
|
111
|
-
const startTime = performance.now();
|
|
112
|
-
|
|
113
|
-
try {
|
|
114
|
-
const { data, resolve, reject } = this.writeQueue.shift();
|
|
115
|
-
|
|
116
|
-
// Check file size limit
|
|
117
|
-
const dataSize = Buffer.byteLength(JSON.stringify(data), 'utf8');
|
|
118
|
-
if (dataSize > this.options.maxFileSize) {
|
|
119
|
-
throw new Error(`File size limit exceeded: ${dataSize} > ${this.options.maxFileSize}`);
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
// Create backup before writing
|
|
123
|
-
if (this.options.backupOnWrite) {
|
|
124
|
-
await this._createBackup();
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
// Atomic write - write to temp file then rename
|
|
128
|
-
const tempPath = this.filePath + '.tmp';
|
|
129
|
-
const serializedData = JSON.stringify(data, null, 2);
|
|
130
|
-
|
|
131
|
-
await fs.writeFile(tempPath, serializedData, 'utf8');
|
|
132
|
-
await fs.rename(tempPath, this.filePath);
|
|
133
|
-
|
|
134
|
-
// Performance tracking
|
|
135
|
-
const writeTime = performance.now() - startTime;
|
|
136
|
-
this.stats.writes++;
|
|
137
|
-
this.stats.totalWriteTime += writeTime;
|
|
138
|
-
|
|
139
|
-
if (this.options.debug) {
|
|
140
|
-
console.log(`💾 Written ${serializedData.length} bytes in ${writeTime.toFixed(2)}ms`);
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
resolve();
|
|
144
|
-
} catch (error) {
|
|
145
|
-
this.stats.errors++;
|
|
146
|
-
console.error('🚨 Storage write error:', error);
|
|
147
|
-
this.writeQueue[0]?.reject(error);
|
|
148
|
-
} finally {
|
|
149
|
-
this.isWriting = false;
|
|
150
|
-
|
|
151
|
-
// Process next item in queue
|
|
152
|
-
if (this.writeQueue.length > 0) {
|
|
153
|
-
setImmediate(() => this._processWriteQueue());
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
/**
|
|
159
|
-
* Create backup of current data file
|
|
160
|
-
* Saved my data more times than I can count! 💾
|
|
161
|
-
*/
|
|
162
|
-
async _createBackup() {
|
|
163
|
-
try {
|
|
164
|
-
// Check if source file exists
|
|
165
|
-
try {
|
|
166
|
-
await fs.access(this.filePath);
|
|
167
|
-
} catch (error) {
|
|
168
|
-
// No file to backup - that's fine
|
|
169
|
-
return;
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
const timestamp = new Date().toISOString()
|
|
173
|
-
.replace(/[:.]/g, '-')
|
|
174
|
-
.replace('T', '_')
|
|
175
|
-
.split('.')[0];
|
|
176
|
-
|
|
177
|
-
const backupPath = `${this.filePath}.backup_${timestamp}`;
|
|
178
|
-
|
|
179
|
-
await fs.copyFile(this.filePath, backupPath);
|
|
180
|
-
this.stats.backups++;
|
|
181
|
-
|
|
182
|
-
// Clean up old backups
|
|
183
|
-
await this._cleanupOldBackups();
|
|
184
|
-
|
|
185
|
-
if (this.options.debug) {
|
|
186
|
-
console.log(`🔐 Backup created: ${backupPath}`);
|
|
187
|
-
}
|
|
188
|
-
} catch (error) {
|
|
189
|
-
console.error('🚨 Backup creation failed:', error);
|
|
190
|
-
// Don't throw - backup failure shouldn't block main write
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
/**
|
|
195
|
-
* Keep only the most recent backups
|
|
196
|
-
*/
|
|
197
|
-
async _cleanupOldBackups() {
|
|
198
|
-
try {
|
|
199
|
-
const dir = path.dirname(this.filePath);
|
|
200
|
-
const fileName = path.basename(this.filePath);
|
|
201
|
-
|
|
202
|
-
const files = await fs.readdir(dir);
|
|
203
|
-
const backupFiles = files
|
|
204
|
-
.filter(file => file.startsWith(fileName + '.backup_'))
|
|
205
|
-
.sort()
|
|
206
|
-
.reverse();
|
|
207
|
-
|
|
208
|
-
// Remove old backups beyond retention limit
|
|
209
|
-
for (const file of backupFiles.slice(this.options.backupRetention)) {
|
|
210
|
-
await fs.unlink(path.join(dir, file));
|
|
211
|
-
|
|
212
|
-
if (this.options.debug) {
|
|
213
|
-
console.log(`🗑️ Cleaned up old backup: ${file}`);
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
} catch (error) {
|
|
217
|
-
console.error('🚨 Backup cleanup failed:', error);
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
|
|
221
|
-
/**
|
|
222
|
-
* Try to recover data from backup if main file is corrupted
|
|
223
|
-
*/
|
|
224
|
-
async _recoverFromBackup() {
|
|
225
|
-
try {
|
|
226
|
-
const dir = path.dirname(this.filePath);
|
|
227
|
-
const fileName = path.basename(this.filePath);
|
|
228
|
-
|
|
229
|
-
const files = await fs.readdir(dir);
|
|
230
|
-
const backupFiles = files
|
|
231
|
-
.filter(file => file.startsWith(fileName + '.backup_'))
|
|
232
|
-
.sort()
|
|
233
|
-
.reverse();
|
|
234
|
-
|
|
235
|
-
for (const backupFile of backupFiles) {
|
|
236
|
-
try {
|
|
237
|
-
const backupPath = path.join(dir, backupFile);
|
|
238
|
-
const data = await fs.readFile(backupPath, 'utf8');
|
|
239
|
-
const parsed = JSON.parse(data);
|
|
240
|
-
|
|
241
|
-
console.log(`🔧 Recovered data from backup: ${backupFile}`);
|
|
242
|
-
|
|
243
|
-
// Restore the backup to main file
|
|
244
|
-
await this.write(parsed);
|
|
245
|
-
|
|
246
|
-
return parsed;
|
|
247
|
-
} catch (error) {
|
|
248
|
-
// This backup is also corrupted, try next one
|
|
249
|
-
continue;
|
|
250
|
-
}
|
|
251
|
-
}
|
|
252
|
-
|
|
253
|
-
throw new Error('No valid backup found for recovery');
|
|
254
|
-
} catch (error) {
|
|
255
|
-
console.error('🚨 Recovery from backup failed:', error);
|
|
256
|
-
return {}; // Return empty data as last resort
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
|
|
260
|
-
/**
|
|
261
|
-
* Get storage statistics
|
|
262
|
-
*/
|
|
263
|
-
getStats() {
|
|
264
|
-
return {
|
|
265
|
-
...this.stats,
|
|
266
|
-
avgReadTime: this.stats.reads > 0
|
|
267
|
-
? (this.stats.totalReadTime / this.stats.reads).toFixed(2) + 'ms'
|
|
268
|
-
: '0ms',
|
|
269
|
-
avgWriteTime: this.stats.writes > 0
|
|
270
|
-
? (this.stats.totalWriteTime / this.stats.writes).toFixed(2) + 'ms'
|
|
271
|
-
: '0ms',
|
|
272
|
-
queueLength: this.writeQueue.length,
|
|
273
|
-
isWriting: this.isWriting
|
|
274
|
-
};
|
|
275
|
-
}
|
|
276
|
-
|
|
277
|
-
/**
|
|
278
|
-
* Manual backup creation
|
|
279
|
-
*/
|
|
280
|
-
async createBackup() {
|
|
281
|
-
return await this._createBackup();
|
|
282
|
-
}
|
|
283
|
-
|
|
284
|
-
/**
|
|
285
|
-
* Get list of available backups
|
|
286
|
-
*/
|
|
287
|
-
async listBackups() {
|
|
288
|
-
try {
|
|
289
|
-
const dir = path.dirname(this.filePath);
|
|
290
|
-
const fileName = path.basename(this.filePath);
|
|
291
|
-
|
|
292
|
-
const files = await fs.readdir(dir);
|
|
293
|
-
return files
|
|
294
|
-
.filter(file => file.startsWith(fileName + '.backup_'))
|
|
295
|
-
.sort()
|
|
296
|
-
.reverse();
|
|
297
|
-
} catch (error) {
|
|
298
|
-
return [];
|
|
299
|
-
}
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
/**
|
|
303
|
-
* Restore from specific backup
|
|
304
|
-
*/
|
|
305
|
-
async restoreBackup(backupName) {
|
|
306
|
-
const backupPath = path.join(path.dirname(this.filePath), backupName);
|
|
307
|
-
|
|
308
|
-
try {
|
|
309
|
-
const data = await fs.readFile(backupPath, 'utf8');
|
|
310
|
-
const parsed = JSON.parse(data);
|
|
311
|
-
|
|
312
|
-
await this.write(parsed);
|
|
313
|
-
console.log(`✅ Restored from backup: ${backupName}`);
|
|
314
|
-
|
|
315
|
-
return parsed;
|
|
316
|
-
} catch (error) {
|
|
317
|
-
throw new Error(`Failed to restore backup ${backupName}: ${error.message}`);
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
|
|
1
|
+
/**
|
|
2
|
+
* Storage Layer - Handles all file I/O with performance optimizations
|
|
3
|
+
*
|
|
4
|
+
* Because reading/writing files should be fast, not frustrating
|
|
5
|
+
* Added some tricks I learned the hard way 🎯
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const fs = require('fs').promises;
|
|
9
|
+
const path = require('path');
|
|
10
|
+
const { performance } = require('perf_hooks');
|
|
11
|
+
|
|
12
|
+
class Storage {
|
|
13
|
+
constructor(filePath, options = {}) {
|
|
14
|
+
this.filePath = filePath;
|
|
15
|
+
this.options = {
|
|
16
|
+
compression: false,
|
|
17
|
+
backupOnWrite: true,
|
|
18
|
+
backupRetention: 5, // Keep last 5 backups
|
|
19
|
+
maxFileSize: 50 * 1024 * 1024, // 50MB limit
|
|
20
|
+
...options
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
this.writeQueue = [];
|
|
24
|
+
this.isWriting = false;
|
|
25
|
+
this.stats = {
|
|
26
|
+
reads: 0,
|
|
27
|
+
writes: 0,
|
|
28
|
+
backups: 0,
|
|
29
|
+
errors: 0,
|
|
30
|
+
totalReadTime: 0,
|
|
31
|
+
totalWriteTime: 0
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
this._ensureDirectory();
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Make sure the directory exists
|
|
39
|
+
* Learned this the hard way - files don't create their own folders! 😅
|
|
40
|
+
*/
|
|
41
|
+
async _ensureDirectory() {
|
|
42
|
+
const dir = path.dirname(this.filePath);
|
|
43
|
+
try {
|
|
44
|
+
await fs.access(dir);
|
|
45
|
+
} catch (error) {
|
|
46
|
+
await fs.mkdir(dir, { recursive: true });
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Read data with performance tracking and caching
|
|
52
|
+
*/
|
|
53
|
+
async read() {
|
|
54
|
+
const startTime = performance.now();
|
|
55
|
+
|
|
56
|
+
try {
|
|
57
|
+
// Check if file exists first
|
|
58
|
+
try {
|
|
59
|
+
await fs.access(this.filePath);
|
|
60
|
+
} catch (error) {
|
|
61
|
+
// File doesn't exist - return empty data
|
|
62
|
+
return {};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const data = await fs.readFile(this.filePath, 'utf8');
|
|
66
|
+
|
|
67
|
+
// Performance tracking
|
|
68
|
+
const readTime = performance.now() - startTime;
|
|
69
|
+
this.stats.reads++;
|
|
70
|
+
this.stats.totalReadTime += readTime;
|
|
71
|
+
|
|
72
|
+
if (this.options.debug) {
|
|
73
|
+
console.log(`📖 Read ${data.length} bytes in ${readTime.toFixed(2)}ms`);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return JSON.parse(data);
|
|
77
|
+
} catch (error) {
|
|
78
|
+
this.stats.errors++;
|
|
79
|
+
console.error('🚨 Storage read error:', error);
|
|
80
|
+
|
|
81
|
+
// Try to recover from backup if main file is corrupted
|
|
82
|
+
return await this._recoverFromBackup();
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
/**
|
|
87
|
+
* Write data with queuing and atomic operations
|
|
88
|
+
* Prevents corruption and handles concurrent writes
|
|
89
|
+
*/
|
|
90
|
+
async write(data) {
|
|
91
|
+
return new Promise((resolve, reject) => {
|
|
92
|
+
// Queue the write operation
|
|
93
|
+
this.writeQueue.push({ data, resolve, reject });
|
|
94
|
+
|
|
95
|
+
if (!this.isWriting) {
|
|
96
|
+
this._processWriteQueue();
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Process write queue one by one
|
|
103
|
+
* Prevents race conditions and file corruption
|
|
104
|
+
*/
|
|
105
|
+
async _processWriteQueue() {
|
|
106
|
+
if (this.writeQueue.length === 0 || this.isWriting) {
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
this.isWriting = true;
|
|
111
|
+
const startTime = performance.now();
|
|
112
|
+
|
|
113
|
+
try {
|
|
114
|
+
const { data, resolve, reject } = this.writeQueue.shift();
|
|
115
|
+
|
|
116
|
+
// Check file size limit
|
|
117
|
+
const dataSize = Buffer.byteLength(JSON.stringify(data), 'utf8');
|
|
118
|
+
if (dataSize > this.options.maxFileSize) {
|
|
119
|
+
throw new Error(`File size limit exceeded: ${dataSize} > ${this.options.maxFileSize}`);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// Create backup before writing
|
|
123
|
+
if (this.options.backupOnWrite) {
|
|
124
|
+
await this._createBackup();
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Atomic write - write to temp file then rename
|
|
128
|
+
const tempPath = this.filePath + '.tmp';
|
|
129
|
+
const serializedData = JSON.stringify(data, null, 2);
|
|
130
|
+
|
|
131
|
+
await fs.writeFile(tempPath, serializedData, 'utf8');
|
|
132
|
+
await fs.rename(tempPath, this.filePath);
|
|
133
|
+
|
|
134
|
+
// Performance tracking
|
|
135
|
+
const writeTime = performance.now() - startTime;
|
|
136
|
+
this.stats.writes++;
|
|
137
|
+
this.stats.totalWriteTime += writeTime;
|
|
138
|
+
|
|
139
|
+
if (this.options.debug) {
|
|
140
|
+
console.log(`💾 Written ${serializedData.length} bytes in ${writeTime.toFixed(2)}ms`);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
resolve();
|
|
144
|
+
} catch (error) {
|
|
145
|
+
this.stats.errors++;
|
|
146
|
+
console.error('🚨 Storage write error:', error);
|
|
147
|
+
this.writeQueue[0]?.reject(error);
|
|
148
|
+
} finally {
|
|
149
|
+
this.isWriting = false;
|
|
150
|
+
|
|
151
|
+
// Process next item in queue
|
|
152
|
+
if (this.writeQueue.length > 0) {
|
|
153
|
+
setImmediate(() => this._processWriteQueue());
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Create backup of current data file
|
|
160
|
+
* Saved my data more times than I can count! 💾
|
|
161
|
+
*/
|
|
162
|
+
async _createBackup() {
|
|
163
|
+
try {
|
|
164
|
+
// Check if source file exists
|
|
165
|
+
try {
|
|
166
|
+
await fs.access(this.filePath);
|
|
167
|
+
} catch (error) {
|
|
168
|
+
// No file to backup - that's fine
|
|
169
|
+
return;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
const timestamp = new Date().toISOString()
|
|
173
|
+
.replace(/[:.]/g, '-')
|
|
174
|
+
.replace('T', '_')
|
|
175
|
+
.split('.')[0];
|
|
176
|
+
|
|
177
|
+
const backupPath = `${this.filePath}.backup_${timestamp}`;
|
|
178
|
+
|
|
179
|
+
await fs.copyFile(this.filePath, backupPath);
|
|
180
|
+
this.stats.backups++;
|
|
181
|
+
|
|
182
|
+
// Clean up old backups
|
|
183
|
+
await this._cleanupOldBackups();
|
|
184
|
+
|
|
185
|
+
if (this.options.debug) {
|
|
186
|
+
console.log(`🔐 Backup created: ${backupPath}`);
|
|
187
|
+
}
|
|
188
|
+
} catch (error) {
|
|
189
|
+
console.error('🚨 Backup creation failed:', error);
|
|
190
|
+
// Don't throw - backup failure shouldn't block main write
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
/**
|
|
195
|
+
* Keep only the most recent backups
|
|
196
|
+
*/
|
|
197
|
+
async _cleanupOldBackups() {
|
|
198
|
+
try {
|
|
199
|
+
const dir = path.dirname(this.filePath);
|
|
200
|
+
const fileName = path.basename(this.filePath);
|
|
201
|
+
|
|
202
|
+
const files = await fs.readdir(dir);
|
|
203
|
+
const backupFiles = files
|
|
204
|
+
.filter(file => file.startsWith(fileName + '.backup_'))
|
|
205
|
+
.sort()
|
|
206
|
+
.reverse();
|
|
207
|
+
|
|
208
|
+
// Remove old backups beyond retention limit
|
|
209
|
+
for (const file of backupFiles.slice(this.options.backupRetention)) {
|
|
210
|
+
await fs.unlink(path.join(dir, file));
|
|
211
|
+
|
|
212
|
+
if (this.options.debug) {
|
|
213
|
+
console.log(`🗑️ Cleaned up old backup: ${file}`);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
} catch (error) {
|
|
217
|
+
console.error('🚨 Backup cleanup failed:', error);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
/**
|
|
222
|
+
* Try to recover data from backup if main file is corrupted
|
|
223
|
+
*/
|
|
224
|
+
async _recoverFromBackup() {
|
|
225
|
+
try {
|
|
226
|
+
const dir = path.dirname(this.filePath);
|
|
227
|
+
const fileName = path.basename(this.filePath);
|
|
228
|
+
|
|
229
|
+
const files = await fs.readdir(dir);
|
|
230
|
+
const backupFiles = files
|
|
231
|
+
.filter(file => file.startsWith(fileName + '.backup_'))
|
|
232
|
+
.sort()
|
|
233
|
+
.reverse();
|
|
234
|
+
|
|
235
|
+
for (const backupFile of backupFiles) {
|
|
236
|
+
try {
|
|
237
|
+
const backupPath = path.join(dir, backupFile);
|
|
238
|
+
const data = await fs.readFile(backupPath, 'utf8');
|
|
239
|
+
const parsed = JSON.parse(data);
|
|
240
|
+
|
|
241
|
+
console.log(`🔧 Recovered data from backup: ${backupFile}`);
|
|
242
|
+
|
|
243
|
+
// Restore the backup to main file
|
|
244
|
+
await this.write(parsed);
|
|
245
|
+
|
|
246
|
+
return parsed;
|
|
247
|
+
} catch (error) {
|
|
248
|
+
// This backup is also corrupted, try next one
|
|
249
|
+
continue;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
throw new Error('No valid backup found for recovery');
|
|
254
|
+
} catch (error) {
|
|
255
|
+
console.error('🚨 Recovery from backup failed:', error);
|
|
256
|
+
return {}; // Return empty data as last resort
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
/**
|
|
261
|
+
* Get storage statistics
|
|
262
|
+
*/
|
|
263
|
+
getStats() {
|
|
264
|
+
return {
|
|
265
|
+
...this.stats,
|
|
266
|
+
avgReadTime: this.stats.reads > 0
|
|
267
|
+
? (this.stats.totalReadTime / this.stats.reads).toFixed(2) + 'ms'
|
|
268
|
+
: '0ms',
|
|
269
|
+
avgWriteTime: this.stats.writes > 0
|
|
270
|
+
? (this.stats.totalWriteTime / this.stats.writes).toFixed(2) + 'ms'
|
|
271
|
+
: '0ms',
|
|
272
|
+
queueLength: this.writeQueue.length,
|
|
273
|
+
isWriting: this.isWriting
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
/**
|
|
278
|
+
* Manual backup creation
|
|
279
|
+
*/
|
|
280
|
+
async createBackup() {
|
|
281
|
+
return await this._createBackup();
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Get list of available backups
|
|
286
|
+
*/
|
|
287
|
+
async listBackups() {
|
|
288
|
+
try {
|
|
289
|
+
const dir = path.dirname(this.filePath);
|
|
290
|
+
const fileName = path.basename(this.filePath);
|
|
291
|
+
|
|
292
|
+
const files = await fs.readdir(dir);
|
|
293
|
+
return files
|
|
294
|
+
.filter(file => file.startsWith(fileName + '.backup_'))
|
|
295
|
+
.sort()
|
|
296
|
+
.reverse();
|
|
297
|
+
} catch (error) {
|
|
298
|
+
return [];
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
/**
|
|
303
|
+
* Restore from specific backup
|
|
304
|
+
*/
|
|
305
|
+
async restoreBackup(backupName) {
|
|
306
|
+
const backupPath = path.join(path.dirname(this.filePath), backupName);
|
|
307
|
+
|
|
308
|
+
try {
|
|
309
|
+
const data = await fs.readFile(backupPath, 'utf8');
|
|
310
|
+
const parsed = JSON.parse(data);
|
|
311
|
+
|
|
312
|
+
await this.write(parsed);
|
|
313
|
+
console.log(`✅ Restored from backup: ${backupName}`);
|
|
314
|
+
|
|
315
|
+
return parsed;
|
|
316
|
+
} catch (error) {
|
|
317
|
+
throw new Error(`Failed to restore backup ${backupName}: ${error.message}`);
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
322
|
module.exports = Storage;
|