cacheable 1.9.0 → 1.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +149 -5
- package/dist/index.cjs +184 -124
- package/dist/index.d.cts +62 -33
- package/dist/index.d.ts +62 -33
- package/dist/index.js +185 -125
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -37,6 +37,9 @@
|
|
|
37
37
|
* [Cacheable Statistics (Instance Only)](#cacheable-statistics-instance-only)
|
|
38
38
|
* [Cacheable - API](#cacheable---api)
|
|
39
39
|
* [CacheableMemory - In-Memory Cache](#cacheablememory---in-memory-cache)
|
|
40
|
+
* [CacheableMemory Store Hashing](#cacheablememory-store-hashing)
|
|
41
|
+
* [CacheableMemory LRU Feature](#cacheablememory-lru-feature)
|
|
42
|
+
* [CacheableMemory Performance](#cacheablememory-performance)
|
|
40
43
|
* [CacheableMemory Options](#cacheablememory-options)
|
|
41
44
|
* [CacheableMemory - API](#cacheablememory---api)
|
|
42
45
|
* [Keyv Storage Adapter - KeyvCacheableMemory](#keyv-storage-adapter---keyvcacheablememory)
|
|
@@ -252,6 +255,41 @@ raws.forEach((entry, idx) => {
|
|
|
252
255
|
|
|
253
256
|
If you want your layer 2 (secondary) store to be non-blocking you can set the `nonBlocking` property to `true` in the options. This will make the secondary store non-blocking and will not wait for the secondary store to respond on `setting data`, `deleting data`, or `clearing data`. This is useful if you want to have a faster response time and not wait for the secondary store to respond.
|
|
254
257
|
|
|
258
|
+
# GetOrSet
|
|
259
|
+
|
|
260
|
+
The `getOrSet` method provides a convenient way to implement the cache-aside pattern. It attempts to retrieve a value
|
|
261
|
+
from cache, and if not found, calls the provided function to compute the value and store it in cache before returning
|
|
262
|
+
it.
|
|
263
|
+
|
|
264
|
+
```typescript
|
|
265
|
+
import { Cacheable } from 'cacheable';
|
|
266
|
+
|
|
267
|
+
// Create a new Cacheable instance
|
|
268
|
+
const cache = new Cacheable();
|
|
269
|
+
|
|
270
|
+
// Use getOrSet to fetch user data
|
|
271
|
+
async function getUserData(userId: string) {
|
|
272
|
+
return await cache.getOrSet(
|
|
273
|
+
`user:${userId}`,
|
|
274
|
+
async () => {
|
|
275
|
+
// This function only runs if the data isn't in the cache
|
|
276
|
+
console.log('Fetching user from database...');
|
|
277
|
+
// Simulate database fetch
|
|
278
|
+
return { id: userId, name: 'John Doe', email: 'john@example.com' };
|
|
279
|
+
},
|
|
280
|
+
{ ttl: '30m' } // Cache for 30 minutes
|
|
281
|
+
);
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// First call - will fetch from "database"
|
|
285
|
+
const user1 = await getUserData('123');
|
|
286
|
+
console.log(user1); // { id: '123', name: 'John Doe', email: 'john@example.com' }
|
|
287
|
+
|
|
288
|
+
// Second call - will retrieve from cache
|
|
289
|
+
const user2 = await getUserData('123');
|
|
290
|
+
console.log(user2); // Same data, but retrieved from cache
|
|
291
|
+
```
|
|
292
|
+
|
|
255
293
|
```javascript
|
|
256
294
|
import { Cacheable } from 'cacheable';
|
|
257
295
|
import {KeyvRedis} from '@keyv/redis';
|
|
@@ -317,6 +355,7 @@ _This does not enable statistics for your layer 2 cache as that is a distributed
|
|
|
317
355
|
* `deleteMany([keys])`: Deletes multiple values from the cache.
|
|
318
356
|
* `clear()`: Clears the cache stores. Be careful with this as it will clear both layer 1 and layer 2.
|
|
319
357
|
* `wrap(function, WrapOptions)`: Wraps an `async` function in a cache.
|
|
358
|
+
* `getOrSet(key, valueFunction, ttl?)`: Gets a value from cache or sets it if not found using the provided function.
|
|
320
359
|
* `disconnect()`: Disconnects from the cache stores.
|
|
321
360
|
* `onHook(hook, callback)`: Sets a hook.
|
|
322
361
|
* `removeHook(hook)`: Removes a hook.
|
|
@@ -351,12 +390,111 @@ This simple in-memory cache uses multiple Map objects and a with `expiration` an
|
|
|
351
390
|
|
|
352
391
|
By default we use lazy expiration deletion which means on `get` and `getMany` type functions we look if it is expired and then delete it. If you want to have a more aggressive expiration policy you can set the `checkInterval` property to a value greater than `0` which will check for expired keys at the interval you set.
|
|
353
392
|
|
|
393
|
+
Here are some of the main features of `CacheableMemory`:
|
|
394
|
+
* High performance in-memory cache with a robust API and feature set. 🚀
|
|
395
|
+
* Can scale past the `16,777,216 (2^24) keys` limit of a single `Map` via `hashStoreSize`. Default is `16` Map objects.
|
|
396
|
+
* LRU (Least Recently Used) cache feature to limit the number of keys in the cache via `lruSize`. Limit to `16,777,216 (2^24) keys` total.
|
|
397
|
+
* Expiration policy to delete expired keys with lazy deletion or aggressive deletion via `checkInterval`.
|
|
398
|
+
* `Wrap` feature to memoize `sync` and `async` functions with stampede protection.
|
|
399
|
+
* Ability to do many operations at once such as `setMany`, `getMany`, `deleteMany`, and `takeMany`.
|
|
400
|
+
* Supports `raw` data retrieval with `getRaw` and `getManyRaw` methods to get the full metadata of the cache entry.
|
|
401
|
+
|
|
402
|
+
## CacheableMemory Store Hashing
|
|
403
|
+
|
|
404
|
+
`CacheableMemory` uses `Map` objects to store the keys and values. To make this scale past the `16,777,216 (2^24) keys` limit of a single `Map` we use a hash to balance the data across multiple `Map` objects. This is done by hashing the key and using the hash to determine which `Map` object to use. The default hashing algorithm is `djb2Hash` but you can change it by setting the `storeHashAlgorithm` property in the options. By default we set the amount of `Map` objects to `16`.
|
|
405
|
+
|
|
406
|
+
NOTE: if you are using the LRU cache feature the `lruSize` no matter how many `Map` objects you have it will be limited to the `16,777,216 (2^24) keys` limit of a single `Map` object. This is because we use a double linked list to manage the LRU cache and it is not possible to have more than `16,777,216 (2^24) keys` in a single `Map` object.
|
|
407
|
+
|
|
408
|
+
Here is an example of how to set the number of `Map` objects and the hashing algorithm:
|
|
409
|
+
|
|
410
|
+
```javascript
|
|
411
|
+
import { CacheableMemory } from 'cacheable';
|
|
412
|
+
const cache = new CacheableMemory({
|
|
413
|
+
storeSize: 32, // set the number of Map objects to 32
|
|
414
|
+
});
|
|
415
|
+
cache.set('key', 'value');
|
|
416
|
+
const value = cache.get('key'); // value
|
|
417
|
+
```
|
|
418
|
+
|
|
419
|
+
Here is an example of how to use the `storeHashAlgorithm` property:
|
|
420
|
+
|
|
421
|
+
```javascript
|
|
422
|
+
import { CacheableMemory } from 'cacheable';
|
|
423
|
+
const cache = new CacheableMemory({ storeHashAlgorithm: 'sha256' });
|
|
424
|
+
cache.set('key', 'value');
|
|
425
|
+
const value = cache.get('key'); // value
|
|
426
|
+
```
|
|
427
|
+
|
|
428
|
+
If you want to provide your own hashing function you can set the `storeHashAlgorithm` property to a function that takes an object and returns a `number` that is in the range of the amount of `Map` stores you have.
|
|
429
|
+
|
|
430
|
+
```javascript
|
|
431
|
+
import { CacheableMemory } from 'cacheable';
|
|
432
|
+
/**
|
|
433
|
+
* Custom hash function that takes a key and the size of the store
|
|
434
|
+
* and returns a number between 0 and storeHashSize - 1.
|
|
435
|
+
* @param {string} key - The key to hash.
|
|
436
|
+
* @param {number} storeHashSize - The size of the store (number of Map objects).
|
|
437
|
+
* @returns {number} - A number between 0 and storeHashSize - 1.
|
|
438
|
+
*/
|
|
439
|
+
const customHash = (key, storeHashSize) => {
|
|
440
|
+
// custom hashing logic
|
|
441
|
+
return key.length % storeHashSize; // returns a number between 0 and 31 for 32 Map objects
|
|
442
|
+
};
|
|
443
|
+
const cache = new CacheableMemory({ storeHashAlgorithm: customHash, storeSize: 32 });
|
|
444
|
+
cache.set('key', 'value');
|
|
445
|
+
const value = cache.get('key'); // value
|
|
446
|
+
```
|
|
447
|
+
|
|
448
|
+
## CacheableMemory LRU Feature
|
|
449
|
+
|
|
450
|
+
You can enable the LRU (Least Recently Used) feature in `CacheableMemory` by setting the `lruSize` property in the options. This will limit the number of keys in the cache to the size you set. When the cache reaches the limit it will remove the least recently used keys from the cache. This is useful if you want to limit the memory usage of the cache.
|
|
451
|
+
|
|
452
|
+
When you set the `lruSize` we use a double linked list to manage the LRU cache and also set the `hashStoreSize` to `1` which means we will only use a single `Map` object for the LRU cache. This is because the LRU cache is managed by the double linked list and it is not possible to have more than `16,777,216 (2^24) keys` in a single `Map` object.
|
|
453
|
+
|
|
454
|
+
```javascript
|
|
455
|
+
import { CacheableMemory } from 'cacheable';
|
|
456
|
+
const cache = new CacheableMemory({ lruSize: 1 }); // sets the LRU cache size to 1000 keys and hashStoreSize to 1
|
|
457
|
+
cache.set('key1', 'value1');
|
|
458
|
+
cache.set('key2', 'value2');
|
|
459
|
+
const value1 = cache.get('key1');
|
|
460
|
+
console.log(value1); // undefined if the cache is full and key1 is the least recently used
|
|
461
|
+
const value2 = cache.get('key2');
|
|
462
|
+
console.log(value2); // value2 if key2 is still in the cache
|
|
463
|
+
console.log(cache.size()); // 1
|
|
464
|
+
```
|
|
465
|
+
|
|
466
|
+
NOTE: if you set the `lruSize` property to `0` after it was enabled it will disable the LRU cache feature and will not limit the number of keys in the cache. This will remove the `16,777,216 (2^24) keys` limit of a single `Map` object and will allow you to store more keys in the cache.
|
|
467
|
+
|
|
468
|
+
## CacheableMemory Performance
|
|
469
|
+
|
|
470
|
+
Our goal with `cacheable` and `CacheableMemory` is to provide a high performance caching engine that is simple to use and has a robust API. We test it against other cacheing engines such that are less feature rich to make sure there is little difference. Here are some of the benchmarks we have run:
|
|
471
|
+
|
|
472
|
+
*Memory Benchmark Results:*
|
|
473
|
+
| name | summary | ops/sec | time/op | margin | samples |
|
|
474
|
+
|------------------------------------------|:---------:|----------:|----------:|:--------:|----------:|
|
|
475
|
+
| Map (v22) - set / get | 🥇 | 117K | 9µs | ±1.29% | 110K |
|
|
476
|
+
| Cacheable Memory (v1.10.0) - set / get | -1.3% | 116K | 9µs | ±0.77% | 110K |
|
|
477
|
+
| Node Cache - set / get | -4.1% | 112K | 9µs | ±1.34% | 107K |
|
|
478
|
+
| bentocache (v1.4.0) - set / get | -45% | 65K | 17µs | ±1.10% | 100K |
|
|
479
|
+
|
|
480
|
+
*Memory LRU Benchmark Results:*
|
|
481
|
+
| name | summary | ops/sec | time/op | margin | samples |
|
|
482
|
+
|------------------------------------------|:---------:|----------:|----------:|:--------:|----------:|
|
|
483
|
+
| quick-lru (v7.0.1) - set / get | 🥇 | 118K | 9µs | ±0.85% | 112K |
|
|
484
|
+
| Map (v22) - set / get | -0.56% | 117K | 9µs | ±1.35% | 110K |
|
|
485
|
+
| lru.min (v1.1.2) - set / get | -1.7% | 116K | 9µs | ±0.90% | 110K |
|
|
486
|
+
| Cacheable Memory (v1.10.0) - set / get | -3.3% | 114K | 9µs | ±1.16% | 108K |
|
|
487
|
+
|
|
488
|
+
As you can see from the benchmarks `CacheableMemory` is on par with other caching engines such as `Map`, `Node Cache`, and `bentocache`. We have also tested it against other LRU caching engines such as `quick-lru` and `lru.min` and it performs well against them too.
|
|
489
|
+
|
|
354
490
|
## CacheableMemory Options
|
|
355
491
|
|
|
356
492
|
* `ttl`: The time to live for the cache in milliseconds. Default is `undefined` which is means indefinitely.
|
|
357
493
|
* `useClones`: If the cache should use clones for the values. Default is `true`.
|
|
358
494
|
* `lruSize`: The size of the LRU cache. Default is `0` which is unlimited.
|
|
359
495
|
* `checkInterval`: The interval to check for expired keys in milliseconds. Default is `0` which is disabled.
|
|
496
|
+
* `storeHashSize`: The number of `Map` objects to use for the cache. Default is `16`.
|
|
497
|
+
* `storeHashAlgorithm`: The hashing algorithm to use for the cache. Default is `djb2Hash`.
|
|
360
498
|
|
|
361
499
|
## CacheableMemory - API
|
|
362
500
|
|
|
@@ -374,13 +512,19 @@ By default we use lazy expiration deletion which means on `get` and `getMany` ty
|
|
|
374
512
|
* `takeMany([keys])`: Takes multiple values from the cache and deletes them.
|
|
375
513
|
* `wrap(function, WrapSyncOptions)`: Wraps a `sync` function in a cache.
|
|
376
514
|
* `clear()`: Clears the cache.
|
|
377
|
-
* `
|
|
378
|
-
* `
|
|
379
|
-
* `
|
|
515
|
+
* `ttl`: The default time to live for the cache in milliseconds. Default is `undefined` which is disabled.
|
|
516
|
+
* `useClones`: If the cache should use clones for the values. Default is `true`.
|
|
517
|
+
* `lruSize`: The size of the LRU cache. Default is `0` which is unlimited.
|
|
518
|
+
* `size`: The number of keys in the cache.
|
|
519
|
+
* `checkInterval`: The interval to check for expired keys in milliseconds. Default is `0` which is disabled.
|
|
520
|
+
* `storeHashSize`: The number of `Map` objects to use for the cache. Default is `16`.
|
|
521
|
+
* `storeHashAlgorithm`: The hashing algorithm to use for the cache. Default is `djb2Hash`.
|
|
522
|
+
* `keys`: Get the keys in the cache. Not able to be set.
|
|
523
|
+
* `items`: Get the items in the cache as `CacheableStoreItem` example `{ key, value, expires? }`.
|
|
524
|
+
* `store`: The hash store for the cache which is an array of `Map` objects.
|
|
380
525
|
* `checkExpired()`: Checks for expired keys in the cache. This is used by the `checkInterval` property.
|
|
381
526
|
* `startIntervalCheck()`: Starts the interval check for expired keys if `checkInterval` is above 0 ms.
|
|
382
527
|
* `stopIntervalCheck()`: Stops the interval check for expired keys.
|
|
383
|
-
* `hash(object: any, algorithm = 'sha256'): string`: Hashes an object with the algorithm. Default is `sha256`.
|
|
384
528
|
|
|
385
529
|
# Keyv Storage Adapter - KeyvCacheableMemory
|
|
386
530
|
|
|
@@ -475,4 +619,4 @@ console.log(wrappedFunction()); // error from cache
|
|
|
475
619
|
You can contribute by forking the repo and submitting a pull request. Please make sure to add tests and update the documentation. To learn more about how to contribute go to our main README [https://github.com/jaredwray/cacheable](https://github.com/jaredwray/cacheable). This will talk about how to `Open a Pull Request`, `Ask a Question`, or `Post an Issue`.
|
|
476
620
|
|
|
477
621
|
# License and Copyright
|
|
478
|
-
[MIT © Jared Wray](./LICENSE)
|
|
622
|
+
[MIT © Jared Wray](./LICENSE)
|
package/dist/index.cjs
CHANGED
|
@@ -132,6 +132,26 @@ function hash(object, algorithm = "sha256") {
|
|
|
132
132
|
hasher.update(objectString);
|
|
133
133
|
return hasher.digest("hex");
|
|
134
134
|
}
|
|
135
|
+
function hashToNumber(object, min = 0, max = 10, algorithm = "sha256") {
|
|
136
|
+
const objectString = JSON.stringify(object);
|
|
137
|
+
if (!crypto.getHashes().includes(algorithm)) {
|
|
138
|
+
throw new Error(`Unsupported hash algorithm: '${algorithm}'`);
|
|
139
|
+
}
|
|
140
|
+
const hasher = crypto.createHash(algorithm);
|
|
141
|
+
hasher.update(objectString);
|
|
142
|
+
const hashHex = hasher.digest("hex");
|
|
143
|
+
const hashNumber = Number.parseInt(hashHex, 16);
|
|
144
|
+
const range = max - min + 1;
|
|
145
|
+
return min + hashNumber % range;
|
|
146
|
+
}
|
|
147
|
+
function djb2Hash(string_, min = 0, max = 10) {
|
|
148
|
+
let hash2 = 5381;
|
|
149
|
+
for (let i = 0; i < string_.length; i++) {
|
|
150
|
+
hash2 = hash2 * 33 ^ string_.charCodeAt(i);
|
|
151
|
+
}
|
|
152
|
+
const range = max - min + 1;
|
|
153
|
+
return min + Math.abs(hash2) % range;
|
|
154
|
+
}
|
|
135
155
|
|
|
136
156
|
// src/coalesce-async.ts
|
|
137
157
|
var callbacks = /* @__PURE__ */ new Map();
|
|
@@ -208,29 +228,31 @@ function wrapSync(function_, options) {
|
|
|
208
228
|
return value;
|
|
209
229
|
};
|
|
210
230
|
}
|
|
231
|
+
async function getOrSet(key, function_, options) {
|
|
232
|
+
let value = await options.cache.get(key);
|
|
233
|
+
if (value === void 0) {
|
|
234
|
+
const cacheId = options.cacheId ?? "default";
|
|
235
|
+
const coalesceKey = `${cacheId}::${key}`;
|
|
236
|
+
value = await coalesceAsync(coalesceKey, async () => {
|
|
237
|
+
try {
|
|
238
|
+
const result = await function_();
|
|
239
|
+
await options.cache.set(key, result, options.ttl);
|
|
240
|
+
return result;
|
|
241
|
+
} catch (error) {
|
|
242
|
+
options.cache.emit("error", error);
|
|
243
|
+
if (options.cacheErrors) {
|
|
244
|
+
await options.cache.set(key, error, options.ttl);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
return value;
|
|
250
|
+
}
|
|
211
251
|
function wrap(function_, options) {
|
|
212
|
-
const {
|
|
252
|
+
const { keyPrefix, cache } = options;
|
|
213
253
|
return async function(...arguments_) {
|
|
214
|
-
let value;
|
|
215
254
|
const cacheKey = createWrapKey(function_, arguments_, keyPrefix);
|
|
216
|
-
|
|
217
|
-
if (value === void 0) {
|
|
218
|
-
const cacheId = options.cacheId ?? "default";
|
|
219
|
-
const coalesceKey = `${cacheId}::${cacheKey}`;
|
|
220
|
-
value = await coalesceAsync(coalesceKey, async () => {
|
|
221
|
-
try {
|
|
222
|
-
const result = await function_(...arguments_);
|
|
223
|
-
await cache.set(cacheKey, result, ttl);
|
|
224
|
-
return result;
|
|
225
|
-
} catch (error) {
|
|
226
|
-
cache.emit("error", error);
|
|
227
|
-
if (options.cacheErrors) {
|
|
228
|
-
await cache.set(cacheKey, error, ttl);
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
});
|
|
232
|
-
}
|
|
233
|
-
return value;
|
|
255
|
+
return cache.getOrSet(cacheKey, async () => function_(...arguments_), options);
|
|
234
256
|
};
|
|
235
257
|
}
|
|
236
258
|
function createWrapKey(function_, arguments_, keyPrefix) {
|
|
@@ -314,19 +336,14 @@ var DoublyLinkedList = class {
|
|
|
314
336
|
};
|
|
315
337
|
|
|
316
338
|
// src/memory.ts
|
|
339
|
+
var defaultStoreHashSize = 16;
|
|
340
|
+
var maximumMapSize = 16777216;
|
|
317
341
|
var CacheableMemory = class extends import_hookified.Hookified {
|
|
318
342
|
_lru = new DoublyLinkedList();
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
_hash3 = /* @__PURE__ */ new Map();
|
|
324
|
-
_hash4 = /* @__PURE__ */ new Map();
|
|
325
|
-
_hash5 = /* @__PURE__ */ new Map();
|
|
326
|
-
_hash6 = /* @__PURE__ */ new Map();
|
|
327
|
-
_hash7 = /* @__PURE__ */ new Map();
|
|
328
|
-
_hash8 = /* @__PURE__ */ new Map();
|
|
329
|
-
_hash9 = /* @__PURE__ */ new Map();
|
|
343
|
+
_storeHashSize = defaultStoreHashSize;
|
|
344
|
+
_storeHashAlgorithm = "djb2Hash" /* djb2Hash */;
|
|
345
|
+
// Default is djb2Hash
|
|
346
|
+
_store = Array.from({ length: this._storeHashSize }, () => /* @__PURE__ */ new Map());
|
|
330
347
|
_ttl;
|
|
331
348
|
// Turned off by default
|
|
332
349
|
_useClone = true;
|
|
@@ -349,12 +366,23 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
349
366
|
if (options?.useClone !== void 0) {
|
|
350
367
|
this._useClone = options.useClone;
|
|
351
368
|
}
|
|
369
|
+
if (options?.storeHashSize && options.storeHashSize > 0) {
|
|
370
|
+
this._storeHashSize = options.storeHashSize;
|
|
371
|
+
}
|
|
352
372
|
if (options?.lruSize) {
|
|
353
|
-
|
|
373
|
+
if (options.lruSize > maximumMapSize) {
|
|
374
|
+
this.emit("error", new Error(`LRU size cannot be larger than ${maximumMapSize} due to Map limitations.`));
|
|
375
|
+
} else {
|
|
376
|
+
this._lruSize = options.lruSize;
|
|
377
|
+
}
|
|
354
378
|
}
|
|
355
379
|
if (options?.checkInterval) {
|
|
356
380
|
this._checkInterval = options.checkInterval;
|
|
357
381
|
}
|
|
382
|
+
if (options?.storeHashAlgorithm) {
|
|
383
|
+
this._storeHashAlgorithm = options.storeHashAlgorithm;
|
|
384
|
+
}
|
|
385
|
+
this._store = Array.from({ length: this._storeHashSize }, () => /* @__PURE__ */ new Map());
|
|
358
386
|
this.startIntervalCheck();
|
|
359
387
|
}
|
|
360
388
|
/**
|
|
@@ -387,17 +415,25 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
387
415
|
}
|
|
388
416
|
/**
|
|
389
417
|
* Gets the size of the LRU cache
|
|
390
|
-
* @returns {number} - The size of the LRU cache. If set to 0, it will not use LRU cache. Default is 0.
|
|
418
|
+
* @returns {number} - The size of the LRU cache. If set to 0, it will not use LRU cache. Default is 0. If you are using LRU then the limit is based on Map() size 17mm.
|
|
391
419
|
*/
|
|
392
420
|
get lruSize() {
|
|
393
421
|
return this._lruSize;
|
|
394
422
|
}
|
|
395
423
|
/**
|
|
396
424
|
* Sets the size of the LRU cache
|
|
397
|
-
* @param {number} value - The size of the LRU cache. If set to 0, it will not use LRU cache. Default is 0.
|
|
425
|
+
* @param {number} value - The size of the LRU cache. If set to 0, it will not use LRU cache. Default is 0. If you are using LRU then the limit is based on Map() size 17mm.
|
|
398
426
|
*/
|
|
399
427
|
set lruSize(value) {
|
|
428
|
+
if (value > maximumMapSize) {
|
|
429
|
+
this.emit("error", new Error(`LRU size cannot be larger than ${maximumMapSize} due to Map limitations.`));
|
|
430
|
+
return;
|
|
431
|
+
}
|
|
400
432
|
this._lruSize = value;
|
|
433
|
+
if (this._lruSize === 0) {
|
|
434
|
+
this._lru = new DoublyLinkedList();
|
|
435
|
+
return;
|
|
436
|
+
}
|
|
401
437
|
this.lruResize();
|
|
402
438
|
}
|
|
403
439
|
/**
|
|
@@ -419,21 +455,85 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
419
455
|
* @returns {number} - The size of the cache
|
|
420
456
|
*/
|
|
421
457
|
get size() {
|
|
422
|
-
|
|
458
|
+
let size = 0;
|
|
459
|
+
for (const store of this._store) {
|
|
460
|
+
size += store.size;
|
|
461
|
+
}
|
|
462
|
+
return size;
|
|
463
|
+
}
|
|
464
|
+
/**
|
|
465
|
+
* Gets the number of hash stores
|
|
466
|
+
* @returns {number} - The number of hash stores
|
|
467
|
+
*/
|
|
468
|
+
get storeHashSize() {
|
|
469
|
+
return this._storeHashSize;
|
|
470
|
+
}
|
|
471
|
+
/**
|
|
472
|
+
* Sets the number of hash stores. This will recreate the store and all data will be cleared
|
|
473
|
+
* @param {number} value - The number of hash stores
|
|
474
|
+
*/
|
|
475
|
+
set storeHashSize(value) {
|
|
476
|
+
if (value === this._storeHashSize) {
|
|
477
|
+
return;
|
|
478
|
+
}
|
|
479
|
+
this._storeHashSize = value;
|
|
480
|
+
this._store = Array.from({ length: this._storeHashSize }, () => /* @__PURE__ */ new Map());
|
|
481
|
+
}
|
|
482
|
+
/**
|
|
483
|
+
* Gets the store hash algorithm
|
|
484
|
+
* @returns {StoreHashAlgorithm | StoreHashAlgorithmFunction} - The store hash algorithm
|
|
485
|
+
*/
|
|
486
|
+
get storeHashAlgorithm() {
|
|
487
|
+
return this._storeHashAlgorithm;
|
|
488
|
+
}
|
|
489
|
+
/**
|
|
490
|
+
* Sets the store hash algorithm. This will recreate the store and all data will be cleared
|
|
491
|
+
* @param {StoreHashAlgorithm | StoreHashAlgorithmFunction} value - The store hash algorithm
|
|
492
|
+
*/
|
|
493
|
+
set storeHashAlgorithm(value) {
|
|
494
|
+
this._storeHashAlgorithm = value;
|
|
423
495
|
}
|
|
424
496
|
/**
|
|
425
497
|
* Gets the keys
|
|
426
498
|
* @returns {IterableIterator<string>} - The keys
|
|
427
499
|
*/
|
|
428
500
|
get keys() {
|
|
429
|
-
|
|
501
|
+
const keys = new Array();
|
|
502
|
+
for (const store of this._store) {
|
|
503
|
+
for (const key of store.keys()) {
|
|
504
|
+
const item = store.get(key);
|
|
505
|
+
if (item && this.hasExpired(item)) {
|
|
506
|
+
store.delete(key);
|
|
507
|
+
continue;
|
|
508
|
+
}
|
|
509
|
+
keys.push(key);
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
return keys.values();
|
|
430
513
|
}
|
|
431
514
|
/**
|
|
432
515
|
* Gets the items
|
|
433
516
|
* @returns {IterableIterator<CacheableStoreItem>} - The items
|
|
434
517
|
*/
|
|
435
518
|
get items() {
|
|
436
|
-
|
|
519
|
+
const items = new Array();
|
|
520
|
+
for (const store of this._store) {
|
|
521
|
+
for (const item of store.values()) {
|
|
522
|
+
if (this.hasExpired(item)) {
|
|
523
|
+
store.delete(item.key);
|
|
524
|
+
continue;
|
|
525
|
+
}
|
|
526
|
+
items.push(item);
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
return items.values();
|
|
530
|
+
}
|
|
531
|
+
/**
|
|
532
|
+
* Gets the store
|
|
533
|
+
* @returns {Array<Map<string, CacheableStoreItem>>} - The store
|
|
534
|
+
*/
|
|
535
|
+
get store() {
|
|
536
|
+
return this._store;
|
|
437
537
|
}
|
|
438
538
|
/**
|
|
439
539
|
* Gets the value of the key
|
|
@@ -446,7 +546,7 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
446
546
|
if (!item) {
|
|
447
547
|
return void 0;
|
|
448
548
|
}
|
|
449
|
-
if (item.expires &&
|
|
549
|
+
if (item.expires && Date.now() > item.expires) {
|
|
450
550
|
store.delete(key);
|
|
451
551
|
return void 0;
|
|
452
552
|
}
|
|
@@ -613,7 +713,6 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
613
713
|
delete(key) {
|
|
614
714
|
const store = this.getStore(key);
|
|
615
715
|
store.delete(key);
|
|
616
|
-
this._hashCache.delete(key);
|
|
617
716
|
}
|
|
618
717
|
/**
|
|
619
718
|
* Delete the keys
|
|
@@ -630,17 +729,7 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
630
729
|
* @returns {void}
|
|
631
730
|
*/
|
|
632
731
|
clear() {
|
|
633
|
-
this.
|
|
634
|
-
this._hash1.clear();
|
|
635
|
-
this._hash2.clear();
|
|
636
|
-
this._hash3.clear();
|
|
637
|
-
this._hash4.clear();
|
|
638
|
-
this._hash5.clear();
|
|
639
|
-
this._hash6.clear();
|
|
640
|
-
this._hash7.clear();
|
|
641
|
-
this._hash8.clear();
|
|
642
|
-
this._hash9.clear();
|
|
643
|
-
this._hashCache.clear();
|
|
732
|
+
this._store = Array.from({ length: this._storeHashSize }, () => /* @__PURE__ */ new Map());
|
|
644
733
|
this._lru = new DoublyLinkedList();
|
|
645
734
|
}
|
|
646
735
|
/**
|
|
@@ -649,66 +738,27 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
649
738
|
* @returns {CacheableHashStore} - The store
|
|
650
739
|
*/
|
|
651
740
|
getStore(key) {
|
|
652
|
-
const hash2 = this.
|
|
653
|
-
|
|
741
|
+
const hash2 = this.getKeyStoreHash(key);
|
|
742
|
+
this._store[hash2] ||= /* @__PURE__ */ new Map();
|
|
743
|
+
return this._store[hash2];
|
|
654
744
|
}
|
|
655
745
|
/**
|
|
656
|
-
*
|
|
657
|
-
* @param {
|
|
658
|
-
*
|
|
746
|
+
* Hash the key for which store to go to (internal use)
|
|
747
|
+
* @param {string} key - The key to hash
|
|
748
|
+
* Available algorithms are: SHA256, SHA1, MD5, and djb2Hash.
|
|
749
|
+
* @returns {number} - The hashed key as a number
|
|
659
750
|
*/
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
return this._hash1;
|
|
664
|
-
}
|
|
665
|
-
case 2: {
|
|
666
|
-
return this._hash2;
|
|
667
|
-
}
|
|
668
|
-
case 3: {
|
|
669
|
-
return this._hash3;
|
|
670
|
-
}
|
|
671
|
-
case 4: {
|
|
672
|
-
return this._hash4;
|
|
673
|
-
}
|
|
674
|
-
case 5: {
|
|
675
|
-
return this._hash5;
|
|
676
|
-
}
|
|
677
|
-
case 6: {
|
|
678
|
-
return this._hash6;
|
|
679
|
-
}
|
|
680
|
-
case 7: {
|
|
681
|
-
return this._hash7;
|
|
682
|
-
}
|
|
683
|
-
case 8: {
|
|
684
|
-
return this._hash8;
|
|
685
|
-
}
|
|
686
|
-
case 9: {
|
|
687
|
-
return this._hash9;
|
|
688
|
-
}
|
|
689
|
-
default: {
|
|
690
|
-
return this._hash0;
|
|
691
|
-
}
|
|
751
|
+
getKeyStoreHash(key) {
|
|
752
|
+
if (this._store.length === 1) {
|
|
753
|
+
return 0;
|
|
692
754
|
}
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
* Hash the key (internal use)
|
|
696
|
-
* @param key
|
|
697
|
-
* @returns {number} from 0 to 9
|
|
698
|
-
*/
|
|
699
|
-
hashKey(key) {
|
|
700
|
-
const cacheHashNumber = this._hashCache.get(key);
|
|
701
|
-
if (typeof cacheHashNumber === "number") {
|
|
702
|
-
return cacheHashNumber;
|
|
755
|
+
if (this._storeHashAlgorithm === "djb2Hash" /* djb2Hash */) {
|
|
756
|
+
return djb2Hash(key, 0, this._storeHashSize);
|
|
703
757
|
}
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
for (let i = 0; i < key.length; i++) {
|
|
707
|
-
hash2 = hash2 * primeMultiplier + key.charCodeAt(i);
|
|
758
|
+
if (typeof this._storeHashAlgorithm === "function") {
|
|
759
|
+
return this._storeHashAlgorithm(key, this._storeHashSize);
|
|
708
760
|
}
|
|
709
|
-
|
|
710
|
-
this._hashCache.set(key, result);
|
|
711
|
-
return result;
|
|
761
|
+
return hashToNumber(key, 0, this._storeHashSize, this._storeHashAlgorithm);
|
|
712
762
|
}
|
|
713
763
|
/**
|
|
714
764
|
* Clone the value. This is for internal use
|
|
@@ -744,13 +794,10 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
744
794
|
this._lru.moveToFront(key);
|
|
745
795
|
}
|
|
746
796
|
/**
|
|
747
|
-
* Resize the LRU cache. This is for internal use
|
|
797
|
+
* Resize the LRU cache. This is for internal use.
|
|
748
798
|
* @returns {void}
|
|
749
799
|
*/
|
|
750
800
|
lruResize() {
|
|
751
|
-
if (this._lruSize === 0) {
|
|
752
|
-
return;
|
|
753
|
-
}
|
|
754
801
|
while (this._lru.size > this._lruSize) {
|
|
755
802
|
const oldestKey = this._lru.getOldest();
|
|
756
803
|
if (oldestKey) {
|
|
@@ -764,10 +811,11 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
764
811
|
* @returns {void}
|
|
765
812
|
*/
|
|
766
813
|
checkExpiration() {
|
|
767
|
-
const
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
814
|
+
for (const store of this._store) {
|
|
815
|
+
for (const item of store.values()) {
|
|
816
|
+
if (item.expires && Date.now() > item.expires) {
|
|
817
|
+
store.delete(item.key);
|
|
818
|
+
}
|
|
771
819
|
}
|
|
772
820
|
}
|
|
773
821
|
}
|
|
@@ -796,15 +844,6 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
796
844
|
this._interval = 0;
|
|
797
845
|
this._checkInterval = 0;
|
|
798
846
|
}
|
|
799
|
-
/**
|
|
800
|
-
* Hash the object. This is for internal use
|
|
801
|
-
* @param {any} object - The object to hash
|
|
802
|
-
* @param {string} [algorithm='sha256'] - The algorithm to hash
|
|
803
|
-
* @returns {string} - The hashed string
|
|
804
|
-
*/
|
|
805
|
-
hash(object, algorithm = "sha256") {
|
|
806
|
-
return hash(object, algorithm);
|
|
807
|
-
}
|
|
808
847
|
/**
|
|
809
848
|
* Wrap the function for caching
|
|
810
849
|
* @param {Function} function_ - The function to wrap
|
|
@@ -829,9 +868,6 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
829
868
|
}
|
|
830
869
|
return result;
|
|
831
870
|
}
|
|
832
|
-
concatStores() {
|
|
833
|
-
return new Map([...this._hash0, ...this._hash1, ...this._hash2, ...this._hash3, ...this._hash4, ...this._hash5, ...this._hash6, ...this._hash7, ...this._hash8, ...this._hash9]);
|
|
834
|
-
}
|
|
835
871
|
setTtl(ttl) {
|
|
836
872
|
if (typeof ttl === "string" || ttl === void 0) {
|
|
837
873
|
this._ttl = ttl;
|
|
@@ -841,6 +877,12 @@ var CacheableMemory = class extends import_hookified.Hookified {
|
|
|
841
877
|
this._ttl = void 0;
|
|
842
878
|
}
|
|
843
879
|
}
|
|
880
|
+
hasExpired(item) {
|
|
881
|
+
if (item.expires && Date.now() > item.expires) {
|
|
882
|
+
return true;
|
|
883
|
+
}
|
|
884
|
+
return false;
|
|
885
|
+
}
|
|
844
886
|
};
|
|
845
887
|
|
|
846
888
|
// src/keyv-memory.ts
|
|
@@ -1700,6 +1742,24 @@ var Cacheable = class extends import_hookified2.Hookified {
|
|
|
1700
1742
|
};
|
|
1701
1743
|
return wrap(function_, wrapOptions);
|
|
1702
1744
|
}
|
|
1745
|
+
/**
|
|
1746
|
+
* Retrieves the value associated with the given key from the cache. If the key is not found,
|
|
1747
|
+
* invokes the provided function to calculate the value, stores it in the cache, and then returns it.
|
|
1748
|
+
*
|
|
1749
|
+
* @param {string} key - The key to retrieve or set in the cache.
|
|
1750
|
+
* @param {() => Promise<T>} function_ - The asynchronous function that computes the value to be cached if the key does not exist.
|
|
1751
|
+
* @param {WrapFunctionOptions} [options] - Optional settings for caching, such as the time to live (TTL) or whether to cache errors.
|
|
1752
|
+
* @return {Promise<T | undefined>} - A promise that resolves to the cached or newly computed value, or undefined if an error occurs and caching is not configured for errors.
|
|
1753
|
+
*/
|
|
1754
|
+
async getOrSet(key, function_, options) {
|
|
1755
|
+
const getOrSetOptions = {
|
|
1756
|
+
cache: this,
|
|
1757
|
+
cacheId: this._cacheId,
|
|
1758
|
+
ttl: options?.ttl ?? this._ttl,
|
|
1759
|
+
cacheErrors: options?.cacheErrors
|
|
1760
|
+
};
|
|
1761
|
+
return getOrSet(key, function_, getOrSetOptions);
|
|
1762
|
+
}
|
|
1703
1763
|
/**
|
|
1704
1764
|
* Will hash an object using the specified algorithm. The default algorithm is 'sha256'.
|
|
1705
1765
|
* @param {any} object the object to hash
|