s3db.js 7.2.1 → 7.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/s3db.cjs.js +149 -1489
- package/dist/s3db.cjs.min.js +1 -1
- package/dist/s3db.es.js +150 -1490
- package/dist/s3db.es.min.js +1 -1
- package/dist/s3db.iife.js +149 -1489
- package/dist/s3db.iife.min.js +1 -1
- package/package.json +15 -8
- package/src/behaviors/body-only.js +2 -2
- package/src/behaviors/truncate-data.js +2 -2
- package/src/client.class.js +1 -1
- package/src/database.class.js +1 -1
- package/src/errors.js +1 -1
- package/src/plugins/audit.plugin.js +5 -5
- package/src/plugins/cache/filesystem-cache.class.js +661 -0
- package/src/plugins/cache/index.js +4 -0
- package/src/plugins/cache/partition-aware-filesystem-cache.class.js +480 -0
- package/src/plugins/cache.plugin.js +159 -9
- package/src/plugins/consumers/index.js +3 -3
- package/src/plugins/consumers/sqs-consumer.js +2 -2
- package/src/plugins/fulltext.plugin.js +5 -5
- package/src/plugins/metrics.plugin.js +2 -2
- package/src/plugins/queue-consumer.plugin.js +3 -3
- package/src/plugins/replicator.plugin.js +259 -362
- package/src/plugins/replicators/s3db-replicator.class.js +35 -19
- package/src/plugins/replicators/sqs-replicator.class.js +17 -5
- package/src/resource.class.js +14 -14
- package/src/schema.class.js +3 -3
|
@@ -0,0 +1,661 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Filesystem Cache Configuration Documentation
|
|
3
|
+
*
|
|
4
|
+
* This cache implementation stores data in the local filesystem, providing persistent storage
|
|
5
|
+
* that survives process restarts and is suitable for single-instance applications.
|
|
6
|
+
* It's faster than S3 cache for local operations and doesn't require network connectivity.
|
|
7
|
+
*
|
|
8
|
+
* @typedef {Object} FilesystemCacheConfig
|
|
9
|
+
* @property {string} directory - The directory path to store cache files (required)
|
|
10
|
+
* @property {string} [prefix='cache'] - Prefix for cache filenames
|
|
11
|
+
* @property {number} [ttl=3600000] - Time to live in milliseconds (1 hour default)
|
|
12
|
+
* @property {boolean} [enableCompression=true] - Whether to compress cache values using gzip
|
|
13
|
+
* @property {number} [compressionThreshold=1024] - Minimum size in bytes to trigger compression
|
|
14
|
+
* @property {boolean} [createDirectory=true] - Whether to create the directory if it doesn't exist
|
|
15
|
+
* @property {string} [fileExtension='.cache'] - File extension for cache files
|
|
16
|
+
* @property {boolean} [enableMetadata=true] - Whether to store metadata alongside cache data
|
|
17
|
+
* @property {number} [maxFileSize=10485760] - Maximum file size in bytes (10MB default)
|
|
18
|
+
* @property {boolean} [enableStats=false] - Whether to track cache statistics
|
|
19
|
+
* @property {boolean} [enableCleanup=true] - Whether to automatically clean up expired files
|
|
20
|
+
* @property {number} [cleanupInterval=300000] - Interval in milliseconds to run cleanup (5 minutes default)
|
|
21
|
+
* @property {string} [encoding='utf8'] - File encoding to use
|
|
22
|
+
* @property {number} [fileMode=0o644] - File permissions in octal notation
|
|
23
|
+
* @property {boolean} [enableBackup=false] - Whether to create backup files before overwriting
|
|
24
|
+
* @property {string} [backupSuffix='.bak'] - Suffix for backup files
|
|
25
|
+
* @property {boolean} [enableLocking=false] - Whether to use file locking to prevent concurrent access
|
|
26
|
+
* @property {number} [lockTimeout=5000] - Lock timeout in milliseconds
|
|
27
|
+
* @property {boolean} [enableJournal=false] - Whether to maintain a journal of operations
|
|
28
|
+
* @property {string} [journalFile='cache.journal'] - Journal filename
|
|
29
|
+
*
|
|
30
|
+
* @example
|
|
31
|
+
* // Basic configuration
|
|
32
|
+
* {
|
|
33
|
+
* directory: './cache',
|
|
34
|
+
* prefix: 'app-cache',
|
|
35
|
+
* ttl: 7200000, // 2 hours
|
|
36
|
+
* enableCompression: true
|
|
37
|
+
* }
|
|
38
|
+
*
|
|
39
|
+
* @example
|
|
40
|
+
* // Configuration with cleanup and metadata
|
|
41
|
+
* {
|
|
42
|
+
* directory: '/tmp/s3db-cache',
|
|
43
|
+
* prefix: 'db-cache',
|
|
44
|
+
* ttl: 1800000, // 30 minutes
|
|
45
|
+
* enableCompression: true,
|
|
46
|
+
* compressionThreshold: 512,
|
|
47
|
+
* enableCleanup: true,
|
|
48
|
+
* cleanupInterval: 600000, // 10 minutes
|
|
49
|
+
* enableMetadata: true,
|
|
50
|
+
* maxFileSize: 5242880 // 5MB
|
|
51
|
+
* }
|
|
52
|
+
*
|
|
53
|
+
* @example
|
|
54
|
+
* // Configuration with backup and locking
|
|
55
|
+
* {
|
|
56
|
+
* directory: './data/cache',
|
|
57
|
+
* ttl: 86400000, // 24 hours
|
|
58
|
+
* enableBackup: true,
|
|
59
|
+
* enableLocking: true,
|
|
60
|
+
* lockTimeout: 3000,
|
|
61
|
+
* enableJournal: true
|
|
62
|
+
* }
|
|
63
|
+
*
|
|
64
|
+
* @example
|
|
65
|
+
* // Minimal configuration
|
|
66
|
+
* {
|
|
67
|
+
* directory: './cache'
|
|
68
|
+
* }
|
|
69
|
+
*
|
|
70
|
+
* @notes
|
|
71
|
+
* - Requires filesystem write permissions to the specified directory
|
|
72
|
+
* - File storage is faster than S3 but limited to single instance
|
|
73
|
+
* - Compression reduces disk usage but increases CPU overhead
|
|
74
|
+
* - TTL is enforced by checking file modification time
|
|
75
|
+
* - Cleanup interval helps prevent disk space issues
|
|
76
|
+
* - File locking prevents corruption during concurrent access
|
|
77
|
+
* - Journal provides audit trail of cache operations
|
|
78
|
+
* - Backup files help recover from write failures
|
|
79
|
+
* - Metadata includes creation time, compression info, and custom properties
|
|
80
|
+
*/
|
|
81
|
+
import fs from 'fs';
|
|
82
|
+
import path from 'path';
|
|
83
|
+
import zlib from 'zlib';
|
|
84
|
+
import { promisify } from 'util';
|
|
85
|
+
import { Cache } from './cache.class.js';
|
|
86
|
+
import tryFn from '../../concerns/try-fn.js';
|
|
87
|
+
|
|
88
|
+
const readFile = promisify(fs.readFile);
|
|
89
|
+
const writeFile = promisify(fs.writeFile);
|
|
90
|
+
const unlink = promisify(fs.unlink);
|
|
91
|
+
const readdir = promisify(fs.readdir);
|
|
92
|
+
const stat = promisify(fs.stat);
|
|
93
|
+
const mkdir = promisify(fs.mkdir);
|
|
94
|
+
|
|
95
|
+
export class FilesystemCache extends Cache {
|
|
96
|
+
constructor({
|
|
97
|
+
directory,
|
|
98
|
+
prefix = 'cache',
|
|
99
|
+
ttl = 3600000,
|
|
100
|
+
enableCompression = true,
|
|
101
|
+
compressionThreshold = 1024,
|
|
102
|
+
createDirectory = true,
|
|
103
|
+
fileExtension = '.cache',
|
|
104
|
+
enableMetadata = true,
|
|
105
|
+
maxFileSize = 10485760, // 10MB
|
|
106
|
+
enableStats = false,
|
|
107
|
+
enableCleanup = true,
|
|
108
|
+
cleanupInterval = 300000, // 5 minutes
|
|
109
|
+
encoding = 'utf8',
|
|
110
|
+
fileMode = 0o644,
|
|
111
|
+
enableBackup = false,
|
|
112
|
+
backupSuffix = '.bak',
|
|
113
|
+
enableLocking = false,
|
|
114
|
+
lockTimeout = 5000,
|
|
115
|
+
enableJournal = false,
|
|
116
|
+
journalFile = 'cache.journal',
|
|
117
|
+
...config
|
|
118
|
+
}) {
|
|
119
|
+
super(config);
|
|
120
|
+
|
|
121
|
+
if (!directory) {
|
|
122
|
+
throw new Error('FilesystemCache: directory parameter is required');
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
this.directory = path.resolve(directory);
|
|
126
|
+
this.prefix = prefix;
|
|
127
|
+
this.ttl = ttl;
|
|
128
|
+
this.enableCompression = enableCompression;
|
|
129
|
+
this.compressionThreshold = compressionThreshold;
|
|
130
|
+
this.createDirectory = createDirectory;
|
|
131
|
+
this.fileExtension = fileExtension;
|
|
132
|
+
this.enableMetadata = enableMetadata;
|
|
133
|
+
this.maxFileSize = maxFileSize;
|
|
134
|
+
this.enableStats = enableStats;
|
|
135
|
+
this.enableCleanup = enableCleanup;
|
|
136
|
+
this.cleanupInterval = cleanupInterval;
|
|
137
|
+
this.encoding = encoding;
|
|
138
|
+
this.fileMode = fileMode;
|
|
139
|
+
this.enableBackup = enableBackup;
|
|
140
|
+
this.backupSuffix = backupSuffix;
|
|
141
|
+
this.enableLocking = enableLocking;
|
|
142
|
+
this.lockTimeout = lockTimeout;
|
|
143
|
+
this.enableJournal = enableJournal;
|
|
144
|
+
this.journalFile = path.join(this.directory, journalFile);
|
|
145
|
+
|
|
146
|
+
this.stats = {
|
|
147
|
+
hits: 0,
|
|
148
|
+
misses: 0,
|
|
149
|
+
sets: 0,
|
|
150
|
+
deletes: 0,
|
|
151
|
+
clears: 0,
|
|
152
|
+
errors: 0
|
|
153
|
+
};
|
|
154
|
+
|
|
155
|
+
this.locks = new Map(); // For file locking
|
|
156
|
+
this.cleanupTimer = null;
|
|
157
|
+
|
|
158
|
+
this._init();
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
async _init() {
|
|
162
|
+
// Create cache directory if needed
|
|
163
|
+
if (this.createDirectory) {
|
|
164
|
+
await this._ensureDirectory(this.directory);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// Start cleanup timer if enabled
|
|
168
|
+
if (this.enableCleanup && this.cleanupInterval > 0) {
|
|
169
|
+
this.cleanupTimer = setInterval(() => {
|
|
170
|
+
this._cleanup().catch(err => {
|
|
171
|
+
console.warn('FilesystemCache cleanup error:', err.message);
|
|
172
|
+
});
|
|
173
|
+
}, this.cleanupInterval);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
async _ensureDirectory(dir) {
|
|
178
|
+
const [ok, err] = await tryFn(async () => {
|
|
179
|
+
await mkdir(dir, { recursive: true });
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
if (!ok && err.code !== 'EEXIST') {
|
|
183
|
+
throw new Error(`Failed to create cache directory: ${err.message}`);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
_getFilePath(key) {
|
|
188
|
+
// Sanitize key for filesystem
|
|
189
|
+
const sanitizedKey = key.replace(/[<>:"/\\|?*]/g, '_');
|
|
190
|
+
const filename = `${this.prefix}_${sanitizedKey}${this.fileExtension}`;
|
|
191
|
+
return path.join(this.directory, filename);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
_getMetadataPath(filePath) {
|
|
195
|
+
return filePath + '.meta';
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async _set(key, data) {
|
|
199
|
+
const filePath = this._getFilePath(key);
|
|
200
|
+
|
|
201
|
+
try {
|
|
202
|
+
// Prepare data
|
|
203
|
+
let serialized = JSON.stringify(data);
|
|
204
|
+
const originalSize = Buffer.byteLength(serialized, this.encoding);
|
|
205
|
+
|
|
206
|
+
// Check size limit
|
|
207
|
+
if (originalSize > this.maxFileSize) {
|
|
208
|
+
throw new Error(`Cache data exceeds maximum file size: ${originalSize} > ${this.maxFileSize}`);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
let compressed = false;
|
|
212
|
+
let finalData = serialized;
|
|
213
|
+
|
|
214
|
+
// Compress if enabled and over threshold
|
|
215
|
+
if (this.enableCompression && originalSize >= this.compressionThreshold) {
|
|
216
|
+
const compressedBuffer = zlib.gzipSync(Buffer.from(serialized, this.encoding));
|
|
217
|
+
finalData = compressedBuffer.toString('base64');
|
|
218
|
+
compressed = true;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Create backup if enabled
|
|
222
|
+
if (this.enableBackup && await this._fileExists(filePath)) {
|
|
223
|
+
const backupPath = filePath + this.backupSuffix;
|
|
224
|
+
await this._copyFile(filePath, backupPath);
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// Acquire lock if enabled
|
|
228
|
+
if (this.enableLocking) {
|
|
229
|
+
await this._acquireLock(filePath);
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
try {
|
|
233
|
+
// Write data
|
|
234
|
+
await writeFile(filePath, finalData, {
|
|
235
|
+
encoding: compressed ? 'utf8' : this.encoding,
|
|
236
|
+
mode: this.fileMode
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
// Write metadata if enabled
|
|
240
|
+
if (this.enableMetadata) {
|
|
241
|
+
const metadata = {
|
|
242
|
+
key,
|
|
243
|
+
timestamp: Date.now(),
|
|
244
|
+
ttl: this.ttl,
|
|
245
|
+
compressed,
|
|
246
|
+
originalSize,
|
|
247
|
+
compressedSize: compressed ? Buffer.byteLength(finalData, 'utf8') : originalSize,
|
|
248
|
+
compressionRatio: compressed ? (Buffer.byteLength(finalData, 'utf8') / originalSize).toFixed(2) : 1.0
|
|
249
|
+
};
|
|
250
|
+
|
|
251
|
+
await writeFile(this._getMetadataPath(filePath), JSON.stringify(metadata), {
|
|
252
|
+
encoding: this.encoding,
|
|
253
|
+
mode: this.fileMode
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// Update stats
|
|
258
|
+
if (this.enableStats) {
|
|
259
|
+
this.stats.sets++;
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Journal operation
|
|
263
|
+
if (this.enableJournal) {
|
|
264
|
+
await this._journalOperation('set', key, { size: originalSize, compressed });
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
} finally {
|
|
268
|
+
// Release lock
|
|
269
|
+
if (this.enableLocking) {
|
|
270
|
+
this._releaseLock(filePath);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
return data;
|
|
275
|
+
|
|
276
|
+
} catch (error) {
|
|
277
|
+
if (this.enableStats) {
|
|
278
|
+
this.stats.errors++;
|
|
279
|
+
}
|
|
280
|
+
throw new Error(`Failed to set cache key '${key}': ${error.message}`);
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
async _get(key) {
|
|
285
|
+
const filePath = this._getFilePath(key);
|
|
286
|
+
|
|
287
|
+
try {
|
|
288
|
+
// Check if file exists
|
|
289
|
+
if (!await this._fileExists(filePath)) {
|
|
290
|
+
if (this.enableStats) {
|
|
291
|
+
this.stats.misses++;
|
|
292
|
+
}
|
|
293
|
+
return null;
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// Check TTL using metadata or file modification time
|
|
297
|
+
let isExpired = false;
|
|
298
|
+
|
|
299
|
+
if (this.enableMetadata) {
|
|
300
|
+
const metadataPath = this._getMetadataPath(filePath);
|
|
301
|
+
if (await this._fileExists(metadataPath)) {
|
|
302
|
+
const [ok, err, metadata] = await tryFn(async () => {
|
|
303
|
+
const metaContent = await readFile(metadataPath, this.encoding);
|
|
304
|
+
return JSON.parse(metaContent);
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
if (ok && metadata.ttl > 0) {
|
|
308
|
+
const age = Date.now() - metadata.timestamp;
|
|
309
|
+
isExpired = age > metadata.ttl;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
} else if (this.ttl > 0) {
|
|
313
|
+
// Fallback to file modification time
|
|
314
|
+
const stats = await stat(filePath);
|
|
315
|
+
const age = Date.now() - stats.mtime.getTime();
|
|
316
|
+
isExpired = age > this.ttl;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// Remove expired files
|
|
320
|
+
if (isExpired) {
|
|
321
|
+
await this._del(key);
|
|
322
|
+
if (this.enableStats) {
|
|
323
|
+
this.stats.misses++;
|
|
324
|
+
}
|
|
325
|
+
return null;
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
// Acquire lock if enabled
|
|
329
|
+
if (this.enableLocking) {
|
|
330
|
+
await this._acquireLock(filePath);
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
try {
|
|
334
|
+
// Read file content
|
|
335
|
+
const content = await readFile(filePath, this.encoding);
|
|
336
|
+
|
|
337
|
+
// Check if compressed using metadata
|
|
338
|
+
let isCompressed = false;
|
|
339
|
+
if (this.enableMetadata) {
|
|
340
|
+
const metadataPath = this._getMetadataPath(filePath);
|
|
341
|
+
if (await this._fileExists(metadataPath)) {
|
|
342
|
+
const [ok, err, metadata] = await tryFn(async () => {
|
|
343
|
+
const metaContent = await readFile(metadataPath, this.encoding);
|
|
344
|
+
return JSON.parse(metaContent);
|
|
345
|
+
});
|
|
346
|
+
if (ok) {
|
|
347
|
+
isCompressed = metadata.compressed;
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
// Decompress if needed
|
|
353
|
+
let finalContent = content;
|
|
354
|
+
if (isCompressed || (this.enableCompression && content.match(/^[A-Za-z0-9+/=]+$/))) {
|
|
355
|
+
try {
|
|
356
|
+
const compressedBuffer = Buffer.from(content, 'base64');
|
|
357
|
+
finalContent = zlib.gunzipSync(compressedBuffer).toString(this.encoding);
|
|
358
|
+
} catch (decompressError) {
|
|
359
|
+
// If decompression fails, assume it's not compressed
|
|
360
|
+
finalContent = content;
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// Parse JSON
|
|
365
|
+
const data = JSON.parse(finalContent);
|
|
366
|
+
|
|
367
|
+
// Update stats
|
|
368
|
+
if (this.enableStats) {
|
|
369
|
+
this.stats.hits++;
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
return data;
|
|
373
|
+
|
|
374
|
+
} finally {
|
|
375
|
+
// Release lock
|
|
376
|
+
if (this.enableLocking) {
|
|
377
|
+
this._releaseLock(filePath);
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
} catch (error) {
|
|
382
|
+
if (this.enableStats) {
|
|
383
|
+
this.stats.errors++;
|
|
384
|
+
}
|
|
385
|
+
// If file is corrupted or unreadable, delete it and return null
|
|
386
|
+
await this._del(key);
|
|
387
|
+
return null;
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
async _del(key) {
|
|
392
|
+
const filePath = this._getFilePath(key);
|
|
393
|
+
|
|
394
|
+
try {
|
|
395
|
+
// Delete main file
|
|
396
|
+
if (await this._fileExists(filePath)) {
|
|
397
|
+
await unlink(filePath);
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
// Delete metadata file
|
|
401
|
+
if (this.enableMetadata) {
|
|
402
|
+
const metadataPath = this._getMetadataPath(filePath);
|
|
403
|
+
if (await this._fileExists(metadataPath)) {
|
|
404
|
+
await unlink(metadataPath);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
// Delete backup file
|
|
409
|
+
if (this.enableBackup) {
|
|
410
|
+
const backupPath = filePath + this.backupSuffix;
|
|
411
|
+
if (await this._fileExists(backupPath)) {
|
|
412
|
+
await unlink(backupPath);
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
// Update stats
|
|
417
|
+
if (this.enableStats) {
|
|
418
|
+
this.stats.deletes++;
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
// Journal operation
|
|
422
|
+
if (this.enableJournal) {
|
|
423
|
+
await this._journalOperation('delete', key);
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
return true;
|
|
427
|
+
|
|
428
|
+
} catch (error) {
|
|
429
|
+
if (this.enableStats) {
|
|
430
|
+
this.stats.errors++;
|
|
431
|
+
}
|
|
432
|
+
throw new Error(`Failed to delete cache key '${key}': ${error.message}`);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
async _clear(prefix) {
|
|
437
|
+
try {
|
|
438
|
+
const files = await readdir(this.directory);
|
|
439
|
+
const cacheFiles = files.filter(file => {
|
|
440
|
+
if (!file.startsWith(this.prefix)) return false;
|
|
441
|
+
if (!file.endsWith(this.fileExtension)) return false;
|
|
442
|
+
|
|
443
|
+
if (prefix) {
|
|
444
|
+
// Extract key from filename
|
|
445
|
+
const keyPart = file.slice(this.prefix.length + 1, -this.fileExtension.length);
|
|
446
|
+
return keyPart.startsWith(prefix);
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
return true;
|
|
450
|
+
});
|
|
451
|
+
|
|
452
|
+
// Delete matching files and their metadata
|
|
453
|
+
for (const file of cacheFiles) {
|
|
454
|
+
const filePath = path.join(this.directory, file);
|
|
455
|
+
|
|
456
|
+
// Delete main file
|
|
457
|
+
if (await this._fileExists(filePath)) {
|
|
458
|
+
await unlink(filePath);
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
// Delete metadata file
|
|
462
|
+
if (this.enableMetadata) {
|
|
463
|
+
const metadataPath = this._getMetadataPath(filePath);
|
|
464
|
+
if (await this._fileExists(metadataPath)) {
|
|
465
|
+
await unlink(metadataPath);
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
// Delete backup file
|
|
470
|
+
if (this.enableBackup) {
|
|
471
|
+
const backupPath = filePath + this.backupSuffix;
|
|
472
|
+
if (await this._fileExists(backupPath)) {
|
|
473
|
+
await unlink(backupPath);
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
// Update stats
|
|
479
|
+
if (this.enableStats) {
|
|
480
|
+
this.stats.clears++;
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
// Journal operation
|
|
484
|
+
if (this.enableJournal) {
|
|
485
|
+
await this._journalOperation('clear', prefix || 'all', { count: cacheFiles.length });
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
return true;
|
|
489
|
+
|
|
490
|
+
} catch (error) {
|
|
491
|
+
if (this.enableStats) {
|
|
492
|
+
this.stats.errors++;
|
|
493
|
+
}
|
|
494
|
+
throw new Error(`Failed to clear cache: ${error.message}`);
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
|
|
498
|
+
async size() {
|
|
499
|
+
const keys = await this.keys();
|
|
500
|
+
return keys.length;
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
async keys() {
|
|
504
|
+
try {
|
|
505
|
+
const files = await readdir(this.directory);
|
|
506
|
+
const cacheFiles = files.filter(file =>
|
|
507
|
+
file.startsWith(this.prefix) &&
|
|
508
|
+
file.endsWith(this.fileExtension)
|
|
509
|
+
);
|
|
510
|
+
|
|
511
|
+
// Extract keys from filenames
|
|
512
|
+
const keys = cacheFiles.map(file => {
|
|
513
|
+
const keyPart = file.slice(this.prefix.length + 1, -this.fileExtension.length);
|
|
514
|
+
return keyPart;
|
|
515
|
+
});
|
|
516
|
+
|
|
517
|
+
return keys;
|
|
518
|
+
|
|
519
|
+
} catch (error) {
|
|
520
|
+
console.warn('FilesystemCache: Failed to list keys:', error.message);
|
|
521
|
+
return [];
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
// Helper methods
|
|
526
|
+
|
|
527
|
+
async _fileExists(filePath) {
|
|
528
|
+
const [ok] = await tryFn(async () => {
|
|
529
|
+
await stat(filePath);
|
|
530
|
+
});
|
|
531
|
+
return ok;
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
async _copyFile(src, dest) {
|
|
535
|
+
const [ok, err] = await tryFn(async () => {
|
|
536
|
+
const content = await readFile(src);
|
|
537
|
+
await writeFile(dest, content);
|
|
538
|
+
});
|
|
539
|
+
if (!ok) {
|
|
540
|
+
console.warn('FilesystemCache: Failed to create backup:', err.message);
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
async _cleanup() {
|
|
545
|
+
if (!this.ttl || this.ttl <= 0) return;
|
|
546
|
+
|
|
547
|
+
try {
|
|
548
|
+
const files = await readdir(this.directory);
|
|
549
|
+
const now = Date.now();
|
|
550
|
+
|
|
551
|
+
for (const file of files) {
|
|
552
|
+
if (!file.startsWith(this.prefix) || !file.endsWith(this.fileExtension)) {
|
|
553
|
+
continue;
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
const filePath = path.join(this.directory, file);
|
|
557
|
+
|
|
558
|
+
let shouldDelete = false;
|
|
559
|
+
|
|
560
|
+
if (this.enableMetadata) {
|
|
561
|
+
// Use metadata for TTL check
|
|
562
|
+
const metadataPath = this._getMetadataPath(filePath);
|
|
563
|
+
if (await this._fileExists(metadataPath)) {
|
|
564
|
+
const [ok, err, metadata] = await tryFn(async () => {
|
|
565
|
+
const metaContent = await readFile(metadataPath, this.encoding);
|
|
566
|
+
return JSON.parse(metaContent);
|
|
567
|
+
});
|
|
568
|
+
|
|
569
|
+
if (ok && metadata.ttl > 0) {
|
|
570
|
+
const age = now - metadata.timestamp;
|
|
571
|
+
shouldDelete = age > metadata.ttl;
|
|
572
|
+
}
|
|
573
|
+
}
|
|
574
|
+
} else {
|
|
575
|
+
// Use file modification time
|
|
576
|
+
const [ok, err, stats] = await tryFn(async () => {
|
|
577
|
+
return await stat(filePath);
|
|
578
|
+
});
|
|
579
|
+
|
|
580
|
+
if (ok) {
|
|
581
|
+
const age = now - stats.mtime.getTime();
|
|
582
|
+
shouldDelete = age > this.ttl;
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
if (shouldDelete) {
|
|
587
|
+
const keyPart = file.slice(this.prefix.length + 1, -this.fileExtension.length);
|
|
588
|
+
await this._del(keyPart);
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
} catch (error) {
|
|
593
|
+
console.warn('FilesystemCache cleanup error:', error.message);
|
|
594
|
+
}
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
async _acquireLock(filePath) {
|
|
598
|
+
if (!this.enableLocking) return;
|
|
599
|
+
|
|
600
|
+
const lockKey = filePath;
|
|
601
|
+
const startTime = Date.now();
|
|
602
|
+
|
|
603
|
+
while (this.locks.has(lockKey)) {
|
|
604
|
+
if (Date.now() - startTime > this.lockTimeout) {
|
|
605
|
+
throw new Error(`Lock timeout for file: ${filePath}`);
|
|
606
|
+
}
|
|
607
|
+
await new Promise(resolve => setTimeout(resolve, 10));
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
this.locks.set(lockKey, Date.now());
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
_releaseLock(filePath) {
|
|
614
|
+
if (!this.enableLocking) return;
|
|
615
|
+
this.locks.delete(filePath);
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
async _journalOperation(operation, key, metadata = {}) {
|
|
619
|
+
if (!this.enableJournal) return;
|
|
620
|
+
|
|
621
|
+
const entry = {
|
|
622
|
+
timestamp: new Date().toISOString(),
|
|
623
|
+
operation,
|
|
624
|
+
key,
|
|
625
|
+
metadata
|
|
626
|
+
};
|
|
627
|
+
|
|
628
|
+
const [ok, err] = await tryFn(async () => {
|
|
629
|
+
const line = JSON.stringify(entry) + '\n';
|
|
630
|
+
await fs.promises.appendFile(this.journalFile, line, this.encoding);
|
|
631
|
+
});
|
|
632
|
+
|
|
633
|
+
if (!ok) {
|
|
634
|
+
console.warn('FilesystemCache journal error:', err.message);
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
// Cleanup on process exit
|
|
639
|
+
destroy() {
|
|
640
|
+
if (this.cleanupTimer) {
|
|
641
|
+
clearInterval(this.cleanupTimer);
|
|
642
|
+
this.cleanupTimer = null;
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
// Get cache statistics
|
|
647
|
+
getStats() {
|
|
648
|
+
return {
|
|
649
|
+
...this.stats,
|
|
650
|
+
directory: this.directory,
|
|
651
|
+
ttl: this.ttl,
|
|
652
|
+
compression: this.enableCompression,
|
|
653
|
+
metadata: this.enableMetadata,
|
|
654
|
+
cleanup: this.enableCleanup,
|
|
655
|
+
locking: this.enableLocking,
|
|
656
|
+
journal: this.enableJournal
|
|
657
|
+
};
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
export default FilesystemCache;
|
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
export * from "./cache.class.js"
|
|
2
2
|
export * from "./memory-cache.class.js"
|
|
3
3
|
export * from "./s3-cache.class.js"
|
|
4
|
+
export * from "./filesystem-cache.class.js"
|
|
5
|
+
export * from "./partition-aware-filesystem-cache.class.js"
|
|
4
6
|
|
|
5
7
|
export { default as S3Cache } from './s3-cache.class.js';
|
|
6
8
|
export { default as MemoryCache } from './memory-cache.class.js';
|
|
9
|
+
export { default as FilesystemCache } from './filesystem-cache.class.js';
|
|
10
|
+
export { PartitionAwareFilesystemCache } from './partition-aware-filesystem-cache.class.js';
|