cachimbo 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +89 -0
- package/dist/index.cjs +1125 -0
- package/dist/index.d.cts +762 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.ts +762 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +1108 -0
- package/dist/index.js.map +1 -0
- package/package.json +76 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1125 @@
|
|
|
1
|
+
let lru_cache = require("lru-cache");
|
|
2
|
+
let __isaacs_ttlcache = require("@isaacs/ttlcache");
|
|
3
|
+
|
|
4
|
+
//#region src/base/index.ts
|
|
5
|
+
/**
|
|
6
|
+
* The base implementation of a cache.
|
|
7
|
+
*
|
|
8
|
+
* This class only requires subclasses to implement {@link ICache#get}, {@link ICache#set} and {@link ICache#delete}.
|
|
9
|
+
* All other methods fall back into these three.
|
|
10
|
+
*/
|
|
11
|
+
var BaseCache = class {
|
|
12
|
+
name;
|
|
13
|
+
logger;
|
|
14
|
+
constructor(options) {
|
|
15
|
+
this.name = options.name;
|
|
16
|
+
this.logger = options.logger;
|
|
17
|
+
}
|
|
18
|
+
async getOrLoad(key, load, options) {
|
|
19
|
+
let data = await this.get(key);
|
|
20
|
+
if (data !== null) {
|
|
21
|
+
this.logger?.debug(this.name, "[getOrLoad] Returning from cache.", "key =", key);
|
|
22
|
+
return data;
|
|
23
|
+
}
|
|
24
|
+
this.logger?.debug(this.name, "[getOrLoad] Refreshing the cache...", "key =", key);
|
|
25
|
+
data = await load();
|
|
26
|
+
await this.set(key, data, options);
|
|
27
|
+
return data;
|
|
28
|
+
}
|
|
29
|
+
async getMany(keys) {
|
|
30
|
+
this.logger?.debug(this.name, "[getMany] Reading all keys in parallel...", "keys =", keys);
|
|
31
|
+
return Object.fromEntries(await Promise.all(keys.map(async (key) => [key, await this.get(key)])));
|
|
32
|
+
}
|
|
33
|
+
async setMany(data, options) {
|
|
34
|
+
this.logger?.debug(this.name, "[setMany] Writing all keys in parallel...", "data =", data);
|
|
35
|
+
await Promise.all(Object.entries(data).map(([key, value]) => this.set(key, value, options)));
|
|
36
|
+
}
|
|
37
|
+
async deleteMany(keys) {
|
|
38
|
+
this.logger?.debug(this.name, "[deleteMany] Deleting all keys in parallel...", "keys =", keys);
|
|
39
|
+
await Promise.all(keys.map((key) => this.delete(key)));
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
//#endregion
|
|
44
|
+
//#region src/local/lru/index.ts
|
|
45
|
+
/**
|
|
46
|
+
* An in-memory cache implementation of a Least-Recently-Used cache eviction algorithm.
|
|
47
|
+
*
|
|
48
|
+
* It allows setting an expiration time and a limit of cached items.
|
|
49
|
+
*
|
|
50
|
+
* Once the limit of items is reached, the least recently used items will be purged.
|
|
51
|
+
*/
|
|
52
|
+
var LocalLRUCache = class extends BaseCache {
|
|
53
|
+
cache;
|
|
54
|
+
shouldUseFetch;
|
|
55
|
+
constructor(options = {}) {
|
|
56
|
+
super(options);
|
|
57
|
+
if ("cache" in options) {
|
|
58
|
+
this.cache = options.cache;
|
|
59
|
+
this.shouldUseFetch = options.shouldUseFetch;
|
|
60
|
+
} else {
|
|
61
|
+
this.cache = new lru_cache.LRUCache({
|
|
62
|
+
ttl: options.ttl ? options.ttl * 1e3 : void 0,
|
|
63
|
+
max: options.max || 1e4,
|
|
64
|
+
ttlAutopurge: false,
|
|
65
|
+
fetchMethod: (_key, _staleValue, options$1) => options$1.context()
|
|
66
|
+
});
|
|
67
|
+
this.shouldUseFetch = true;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
async get(key) {
|
|
71
|
+
this.logger?.debug(this.name, "[get]", "key =", key);
|
|
72
|
+
const data = this.cache.get(key);
|
|
73
|
+
return data === void 0 ? null : data;
|
|
74
|
+
}
|
|
75
|
+
getOrLoad(key, load, options) {
|
|
76
|
+
if (!this.shouldUseFetch) return super.getOrLoad(key, load, options);
|
|
77
|
+
this.logger?.debug(this.name, "[getOrLoad] Running LRUCache's fetch...", "key =", key);
|
|
78
|
+
const ttl = options?.ttl;
|
|
79
|
+
return this.cache.fetch(key, {
|
|
80
|
+
context: load,
|
|
81
|
+
ttl: ttl ? ttl * 1e3 : void 0
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
async set(key, value, options) {
|
|
85
|
+
this.logger?.debug(this.name, "[set]", "key =", key);
|
|
86
|
+
const ttl = options?.ttl;
|
|
87
|
+
this.cache.set(key, value, { ttl: ttl ? ttl * 1e3 : void 0 });
|
|
88
|
+
}
|
|
89
|
+
async delete(key) {
|
|
90
|
+
this.logger?.debug(this.name, "[delete]", "key =", key);
|
|
91
|
+
this.cache.delete(key);
|
|
92
|
+
}
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
//#endregion
|
|
96
|
+
//#region src/local/ttl/index.ts
|
|
97
|
+
/**
|
|
98
|
+
* An in-memory cache implementation that allows setting an expiration time and a limit of cached items.
|
|
99
|
+
*
|
|
100
|
+
* Once the limit of items is reached, the soonest expiring items will be purged.
|
|
101
|
+
*/
|
|
102
|
+
var LocalTTLCache = class extends BaseCache {
|
|
103
|
+
cache;
|
|
104
|
+
constructor(options = {}) {
|
|
105
|
+
super(options);
|
|
106
|
+
if ("cache" in options) this.cache = options.cache;
|
|
107
|
+
else this.cache = new __isaacs_ttlcache.TTLCache({
|
|
108
|
+
max: options.max,
|
|
109
|
+
ttl: options.ttl ? options.ttl * 1e3 : void 0
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
async get(key) {
|
|
113
|
+
this.logger?.debug(this.name, "[get]", "key =", key);
|
|
114
|
+
const data = this.cache.get(key);
|
|
115
|
+
return data === void 0 ? null : data;
|
|
116
|
+
}
|
|
117
|
+
async set(key, value, options) {
|
|
118
|
+
this.logger?.debug(this.name, "[set]", "key =", key);
|
|
119
|
+
const ttl = options?.ttl;
|
|
120
|
+
this.cache.set(key, value, { ttl: ttl ? ttl * 1e3 : void 0 });
|
|
121
|
+
}
|
|
122
|
+
async delete(key) {
|
|
123
|
+
this.logger?.debug(this.name, "[delete]", "key =", key);
|
|
124
|
+
this.cache.delete(key);
|
|
125
|
+
}
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
//#endregion
|
|
129
|
+
//#region src/local/map/index.ts
|
|
130
|
+
/**
|
|
131
|
+
* A simple in-memory cache implementation based on {@link Map}.
|
|
132
|
+
*
|
|
133
|
+
* It ignores expiration times, but a limit of cached items can be set.
|
|
134
|
+
*
|
|
135
|
+
* Once the limit of items is reached, the first inserted keys will be purged.
|
|
136
|
+
*/
|
|
137
|
+
var LocalMapCache = class extends BaseCache {
|
|
138
|
+
cache;
|
|
139
|
+
max;
|
|
140
|
+
constructor(options = {}) {
|
|
141
|
+
super(options);
|
|
142
|
+
this.cache = options.cache ?? /* @__PURE__ */ new Map();
|
|
143
|
+
this.max = options.max ?? Infinity;
|
|
144
|
+
}
|
|
145
|
+
async get(key) {
|
|
146
|
+
this.logger?.debug(this.name, "[get]", "key =", key);
|
|
147
|
+
const data = this.cache.get(key);
|
|
148
|
+
return data === void 0 ? null : data;
|
|
149
|
+
}
|
|
150
|
+
async set(key, value, options) {
|
|
151
|
+
this.logger?.debug(this.name, "[set]", "key =", key);
|
|
152
|
+
if (this.cache.size >= this.max && !this.cache.has(key)) this.evict(1);
|
|
153
|
+
this.cache.set(key, value);
|
|
154
|
+
}
|
|
155
|
+
async delete(key) {
|
|
156
|
+
this.logger?.debug(this.name, "[delete]", "key =", key);
|
|
157
|
+
this.cache.delete(key);
|
|
158
|
+
}
|
|
159
|
+
async setMany(data, options) {
|
|
160
|
+
this.logger?.debug(this.name, "[setMany]", "data =", data);
|
|
161
|
+
const entries = Object.entries(data);
|
|
162
|
+
const newEntries = entries.filter(([key]) => !this.cache.has(key)).length;
|
|
163
|
+
if (this.cache.size + newEntries > this.max) this.evict(this.cache.size + newEntries - this.max);
|
|
164
|
+
for (const [key, value] of entries) this.cache.set(key, value);
|
|
165
|
+
}
|
|
166
|
+
clear() {
|
|
167
|
+
this.logger?.debug(this.name, "[clear]");
|
|
168
|
+
this.cache.clear();
|
|
169
|
+
}
|
|
170
|
+
evict(length) {
|
|
171
|
+
const keys = this.cache.keys();
|
|
172
|
+
for (let i = 0; i < length; i++) {
|
|
173
|
+
const key = keys.next();
|
|
174
|
+
if (key.done) break;
|
|
175
|
+
this.logger?.debug(this.name, "[evict]", "key = ", key);
|
|
176
|
+
this.cache.delete(key.value);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
};
|
|
180
|
+
|
|
181
|
+
//#endregion
|
|
182
|
+
//#region src/local/noop/index.ts
|
|
183
|
+
/**
|
|
184
|
+
* A cache implementation that does nothing.
|
|
185
|
+
* It's useful for disabling cache.
|
|
186
|
+
*
|
|
187
|
+
* @example
|
|
188
|
+
* ```ts
|
|
189
|
+
* const cache = isCacheEnabled ? new LocalTTLCache() : new NoOpCache();
|
|
190
|
+
* ```
|
|
191
|
+
*/
|
|
192
|
+
var NoOpCache = class {
|
|
193
|
+
constructor() {}
|
|
194
|
+
async get(key) {
|
|
195
|
+
return null;
|
|
196
|
+
}
|
|
197
|
+
async getMany(keys) {
|
|
198
|
+
return {};
|
|
199
|
+
}
|
|
200
|
+
getOrLoad(key, load, options) {
|
|
201
|
+
return load();
|
|
202
|
+
}
|
|
203
|
+
async set(key, value, options) {}
|
|
204
|
+
async setMany(data, options) {}
|
|
205
|
+
async delete(key) {}
|
|
206
|
+
async deleteMany(keys) {}
|
|
207
|
+
};
|
|
208
|
+
|
|
209
|
+
//#endregion
|
|
210
|
+
//#region src/remote/ioredis/index.ts
|
|
211
|
+
/**
|
|
212
|
+
* A Redis cache implementation using ioredis
|
|
213
|
+
*/
|
|
214
|
+
var IORedisCache = class extends BaseCache {
|
|
215
|
+
client;
|
|
216
|
+
defaultTTL;
|
|
217
|
+
isMSETEXSupported;
|
|
218
|
+
constructor(options) {
|
|
219
|
+
super(options);
|
|
220
|
+
this.client = options.client;
|
|
221
|
+
this.defaultTTL = options.defaultTTL;
|
|
222
|
+
this.isMSETEXSupported = options.isMSETEXSupported;
|
|
223
|
+
}
|
|
224
|
+
async get(key) {
|
|
225
|
+
this.logger?.debug(this.name, "[get] Running \"GET\" command...", "key =", key);
|
|
226
|
+
const raw = await this.client.get(key);
|
|
227
|
+
return raw ? JSON.parse(raw) : null;
|
|
228
|
+
}
|
|
229
|
+
async set(key, value, options) {
|
|
230
|
+
this.logger?.debug(this.name, "[set] Running \"SET\" command...", "key =", key);
|
|
231
|
+
const ttl = options?.ttl ?? this.defaultTTL;
|
|
232
|
+
const raw = JSON.stringify(value);
|
|
233
|
+
if (ttl) await this.client.set(key, raw, "EX", ttl);
|
|
234
|
+
else await this.client.set(key, raw);
|
|
235
|
+
}
|
|
236
|
+
async delete(key) {
|
|
237
|
+
this.logger?.debug(this.name, "[delete] Running \"DEL\" command...", "key =", key);
|
|
238
|
+
await this.client.del(key);
|
|
239
|
+
}
|
|
240
|
+
async getMany(keys) {
|
|
241
|
+
this.logger?.debug(this.name, "[getMany] Running \"MGET\" command...", "keys =", keys);
|
|
242
|
+
const values = await this.client.mget(keys);
|
|
243
|
+
const data = {};
|
|
244
|
+
for (let i = 0; i < keys.length; i++) {
|
|
245
|
+
const value = values[i];
|
|
246
|
+
data[keys[i]] = value ? JSON.parse(value) : null;
|
|
247
|
+
}
|
|
248
|
+
return data;
|
|
249
|
+
}
|
|
250
|
+
async setMany(data, options) {
|
|
251
|
+
if (!this.isMSETEXSupported) return super.setMany(data, options);
|
|
252
|
+
this.logger?.debug(this.name, "[setMany] Running \"MSETEX\" command...", "data =", data);
|
|
253
|
+
const entries = Object.entries(data);
|
|
254
|
+
const ttl = options?.ttl ?? this.defaultTTL;
|
|
255
|
+
const raw = entries.flatMap(([key, value]) => [key, JSON.stringify(value)]);
|
|
256
|
+
await this.client.call("MSETEX", entries.length, ...raw, ...ttl ? ["EX", ttl] : []);
|
|
257
|
+
}
|
|
258
|
+
async deleteMany(keys) {
|
|
259
|
+
this.logger?.debug(this.name, "[deleteMany] Running \"DEL\" command...", "keys =", keys);
|
|
260
|
+
await this.client.del(keys);
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
//#endregion
|
|
265
|
+
//#region src/remote/redis/index.ts
|
|
266
|
+
/**
|
|
267
|
+
* A Redis cache implementation using node-redis
|
|
268
|
+
*/
|
|
269
|
+
var RedisCache = class extends BaseCache {
|
|
270
|
+
client;
|
|
271
|
+
defaultTTL;
|
|
272
|
+
isMSETEXSupported;
|
|
273
|
+
constructor(options) {
|
|
274
|
+
super(options);
|
|
275
|
+
this.client = options.client;
|
|
276
|
+
this.defaultTTL = options.defaultTTL;
|
|
277
|
+
this.isMSETEXSupported = options.isMSETEXSupported;
|
|
278
|
+
}
|
|
279
|
+
async get(key) {
|
|
280
|
+
this.logger?.debug(this.name, "[get] Running \"GET\" command...", "key =", key);
|
|
281
|
+
const raw = await this.client.get(key);
|
|
282
|
+
return raw ? JSON.parse(raw) : null;
|
|
283
|
+
}
|
|
284
|
+
async set(key, value, options) {
|
|
285
|
+
this.logger?.debug(this.name, "[set] Running \"SET\" command...", "key =", key);
|
|
286
|
+
const ttl = options?.ttl ?? this.defaultTTL;
|
|
287
|
+
await this.client.set(key, JSON.stringify(value), { expiration: ttl ? {
|
|
288
|
+
type: "EX",
|
|
289
|
+
value: ttl
|
|
290
|
+
} : void 0 });
|
|
291
|
+
}
|
|
292
|
+
async delete(key) {
|
|
293
|
+
this.logger?.debug(this.name, "[delete] Running \"DEL\" command...", "key =", key);
|
|
294
|
+
await this.client.del(key);
|
|
295
|
+
}
|
|
296
|
+
async getMany(keys) {
|
|
297
|
+
this.logger?.debug(this.name, "[getMany] Running \"MGET\" command...", "keys =", keys);
|
|
298
|
+
const values = await this.client.mGet(keys);
|
|
299
|
+
const data = {};
|
|
300
|
+
for (let i = 0; i < keys.length; i++) {
|
|
301
|
+
const value = values[i];
|
|
302
|
+
data[keys[i]] = value ? JSON.parse(value) : null;
|
|
303
|
+
}
|
|
304
|
+
return data;
|
|
305
|
+
}
|
|
306
|
+
async setMany(data, options) {
|
|
307
|
+
if (!this.isMSETEXSupported) return super.setMany(data, options);
|
|
308
|
+
this.logger?.debug(this.name, "[setMany] Running \"MSETEX\" command...", "data =", data);
|
|
309
|
+
const raw = [];
|
|
310
|
+
const ttl = options?.ttl ?? this.defaultTTL;
|
|
311
|
+
for (const [key, value] of Object.entries(data)) raw.push([key, JSON.stringify(value)]);
|
|
312
|
+
await this.client.mSetEx(raw, { expiration: ttl ? {
|
|
313
|
+
type: "EX",
|
|
314
|
+
value: ttl
|
|
315
|
+
} : void 0 });
|
|
316
|
+
}
|
|
317
|
+
async deleteMany(keys) {
|
|
318
|
+
this.logger?.debug(this.name, "[deleteMany] Running \"DEL\" command...", "keys =", keys);
|
|
319
|
+
await this.client.del(keys);
|
|
320
|
+
}
|
|
321
|
+
};
|
|
322
|
+
|
|
323
|
+
//#endregion
|
|
324
|
+
//#region src/remote/valkey-glide/index.ts
|
|
325
|
+
/**
|
|
326
|
+
* A Valkey cache implementation using @valkey/valkey-glide
|
|
327
|
+
*/
|
|
328
|
+
var ValkeyGlideCache = class extends BaseCache {
|
|
329
|
+
client;
|
|
330
|
+
defaultTTL;
|
|
331
|
+
constructor(options) {
|
|
332
|
+
super(options);
|
|
333
|
+
this.client = options.client;
|
|
334
|
+
this.defaultTTL = options.defaultTTL;
|
|
335
|
+
}
|
|
336
|
+
async get(key) {
|
|
337
|
+
this.logger?.debug(this.name, "[get] Running \"GET\" command...", "key =", key);
|
|
338
|
+
const raw = await this.client.get(key);
|
|
339
|
+
return raw ? JSON.parse(raw.toString()) : null;
|
|
340
|
+
}
|
|
341
|
+
async set(key, value, options) {
|
|
342
|
+
this.logger?.debug(this.name, "[set] Running \"SET\" command...", "key =", key);
|
|
343
|
+
const ttl = options?.ttl ?? this.defaultTTL;
|
|
344
|
+
await this.client.set(key, JSON.stringify(value), { expiry: ttl ? {
|
|
345
|
+
type: "EX",
|
|
346
|
+
count: ttl
|
|
347
|
+
} : void 0 });
|
|
348
|
+
}
|
|
349
|
+
async delete(key) {
|
|
350
|
+
this.logger?.debug(this.name, "[delete] Running \"DEL\" command...", "key =", key);
|
|
351
|
+
await this.client.del([key]);
|
|
352
|
+
}
|
|
353
|
+
async getMany(keys) {
|
|
354
|
+
this.logger?.debug(this.name, "[getMany] Running \"MGET\" command...", "keys =", keys);
|
|
355
|
+
const values = await this.client.mget(keys);
|
|
356
|
+
const data = {};
|
|
357
|
+
for (let i = 0; i < keys.length; i++) {
|
|
358
|
+
const value = values[i];
|
|
359
|
+
data[keys[i]] = value ? JSON.parse(value.toString()) : null;
|
|
360
|
+
}
|
|
361
|
+
return data;
|
|
362
|
+
}
|
|
363
|
+
async deleteMany(keys) {
|
|
364
|
+
this.logger?.debug(this.name, "[deleteMany] Running \"DEL\" command...", "keys =", keys);
|
|
365
|
+
await this.client.del(keys);
|
|
366
|
+
}
|
|
367
|
+
};
|
|
368
|
+
|
|
369
|
+
//#endregion
|
|
370
|
+
//#region src/remote/memcache/index.ts
|
|
371
|
+
/**
|
|
372
|
+
* A Memcached cache implementation using Memcache
|
|
373
|
+
*/
|
|
374
|
+
var MemcacheCache = class extends BaseCache {
|
|
375
|
+
client;
|
|
376
|
+
defaultTTL;
|
|
377
|
+
constructor(options) {
|
|
378
|
+
super(options);
|
|
379
|
+
this.client = options.client;
|
|
380
|
+
this.defaultTTL = options.defaultTTL;
|
|
381
|
+
}
|
|
382
|
+
async get(key) {
|
|
383
|
+
this.logger?.debug(this.name, "[get] Running \"get\" command...", "key =", key);
|
|
384
|
+
const raw = await this.client.get(key);
|
|
385
|
+
return raw ? JSON.parse(raw) : null;
|
|
386
|
+
}
|
|
387
|
+
async set(key, value, options) {
|
|
388
|
+
this.logger?.debug(this.name, "[set] Running \"set\" command...", "key =", key);
|
|
389
|
+
await this.client.set(key, JSON.stringify(value), options?.ttl ?? this.defaultTTL);
|
|
390
|
+
}
|
|
391
|
+
async delete(key) {
|
|
392
|
+
this.logger?.debug(this.name, "[delete] Running \"delete\" command...", "key =", key);
|
|
393
|
+
await this.client.delete(key);
|
|
394
|
+
}
|
|
395
|
+
async getMany(keys) {
|
|
396
|
+
this.logger?.debug(this.name, "[getMany] Running \"get\" command...", "keys =", keys);
|
|
397
|
+
const raw = await this.client.gets(keys);
|
|
398
|
+
const data = {};
|
|
399
|
+
raw.forEach((value, key) => {
|
|
400
|
+
data[key] = value ? JSON.parse(value) : null;
|
|
401
|
+
});
|
|
402
|
+
return data;
|
|
403
|
+
}
|
|
404
|
+
};
|
|
405
|
+
|
|
406
|
+
//#endregion
|
|
407
|
+
//#region src/remote/memjs/index.ts
|
|
408
|
+
/**
|
|
409
|
+
* A Memcached cache implementation using MemJS
|
|
410
|
+
*/
|
|
411
|
+
var MemJSCache = class extends BaseCache {
|
|
412
|
+
client;
|
|
413
|
+
defaultTTL;
|
|
414
|
+
constructor(options) {
|
|
415
|
+
super(options);
|
|
416
|
+
this.client = options.client;
|
|
417
|
+
this.defaultTTL = options.defaultTTL;
|
|
418
|
+
}
|
|
419
|
+
async get(key) {
|
|
420
|
+
this.logger?.debug(this.name, "[get] Running \"get\" command...", "key =", key);
|
|
421
|
+
const { value } = await this.client.get(key);
|
|
422
|
+
return value ? JSON.parse(value.toString("utf8")) : null;
|
|
423
|
+
}
|
|
424
|
+
async set(key, value, options) {
|
|
425
|
+
this.logger?.debug(this.name, "[set] Running \"set\" command...", "key =", key);
|
|
426
|
+
await this.client.set(key, JSON.stringify(value), { expires: options?.ttl ?? this.defaultTTL });
|
|
427
|
+
}
|
|
428
|
+
async delete(key) {
|
|
429
|
+
this.logger?.debug(this.name, "[delete] Running \"delete\" command...", "key =", key);
|
|
430
|
+
await this.client.delete(key);
|
|
431
|
+
}
|
|
432
|
+
};
|
|
433
|
+
|
|
434
|
+
//#endregion
|
|
435
|
+
//#region src/remote/workers-kv/index.ts
|
|
436
|
+
/**
|
|
437
|
+
* A Cloudflare Workers KV cache implementation.
|
|
438
|
+
*/
|
|
439
|
+
var WorkersKVCache = class extends BaseCache {
|
|
440
|
+
kv;
|
|
441
|
+
edgeCacheTTL;
|
|
442
|
+
constructor(options) {
|
|
443
|
+
super(options);
|
|
444
|
+
this.kv = options.kv;
|
|
445
|
+
this.edgeCacheTTL = options.edgeCacheTTL;
|
|
446
|
+
}
|
|
447
|
+
get(key) {
|
|
448
|
+
return this.kv.get(key, {
|
|
449
|
+
type: "json",
|
|
450
|
+
cacheTtl: this.edgeCacheTTL
|
|
451
|
+
});
|
|
452
|
+
}
|
|
453
|
+
set(key, value, options = {}) {
|
|
454
|
+
return this.kv.put(key, JSON.stringify(value), { expirationTtl: options.ttl });
|
|
455
|
+
}
|
|
456
|
+
delete(key) {
|
|
457
|
+
return this.kv.delete(key);
|
|
458
|
+
}
|
|
459
|
+
async getMany(keys) {
|
|
460
|
+
const data = await this.kv.get(keys, { type: "json" });
|
|
461
|
+
return Object.fromEntries(data);
|
|
462
|
+
}
|
|
463
|
+
};
|
|
464
|
+
|
|
465
|
+
//#endregion
|
|
466
|
+
//#region src/remote/keyv/index.ts
|
|
467
|
+
var KeyvCache = class extends BaseCache {
|
|
468
|
+
keyv;
|
|
469
|
+
constructor(options) {
|
|
470
|
+
super(options);
|
|
471
|
+
this.keyv = options.keyv;
|
|
472
|
+
}
|
|
473
|
+
async get(key) {
|
|
474
|
+
const result = await this.keyv.get(key);
|
|
475
|
+
return result === void 0 ? null : result;
|
|
476
|
+
}
|
|
477
|
+
async set(key, value, options) {
|
|
478
|
+
await this.keyv.set(key, value, options?.ttl);
|
|
479
|
+
}
|
|
480
|
+
async delete(key) {
|
|
481
|
+
await this.keyv.delete(key);
|
|
482
|
+
}
|
|
483
|
+
async getMany(keys) {
|
|
484
|
+
const data = await this.keyv.getMany(keys);
|
|
485
|
+
const result = {};
|
|
486
|
+
for (let i = 0; i < keys.length; i++) result[keys[i]] = data[i] === void 0 ? null : data[i];
|
|
487
|
+
return result;
|
|
488
|
+
}
|
|
489
|
+
async setMany(data, options) {
|
|
490
|
+
const ttl = options?.ttl;
|
|
491
|
+
await this.keyv.setMany(Object.entries(data).map(([key, value]) => ({
|
|
492
|
+
key,
|
|
493
|
+
value,
|
|
494
|
+
ttl
|
|
495
|
+
})));
|
|
496
|
+
}
|
|
497
|
+
async deleteMany(keys) {
|
|
498
|
+
await this.keyv.deleteMany(keys);
|
|
499
|
+
}
|
|
500
|
+
};
|
|
501
|
+
|
|
502
|
+
//#endregion
|
|
503
|
+
//#region src/layers/async-lazy/index.ts
|
|
504
|
+
/**
|
|
505
|
+
* A cache layer that initializes the underlying cache asynchronously.
|
|
506
|
+
*
|
|
507
|
+
* This layer can be used to connect to an external cache with the cache methods already available.
|
|
508
|
+
*
|
|
509
|
+
* This layer can also be used to lazily initialize the cache only when it's actually needed.
|
|
510
|
+
*
|
|
511
|
+
* @example
|
|
512
|
+
* ```ts
|
|
513
|
+
* const cache = new AsyncCache({
|
|
514
|
+
* factory: async () => {
|
|
515
|
+
* const redisClient = await createClient({
|
|
516
|
+
* url: "redis://user:password@localhost:6380",
|
|
517
|
+
* });
|
|
518
|
+
*
|
|
519
|
+
* return new RedisCache({ client: redisClient });
|
|
520
|
+
* },
|
|
521
|
+
* lazy: true,
|
|
522
|
+
* });
|
|
523
|
+
*
|
|
524
|
+
* cache.get("key")
|
|
525
|
+
* .then(result => console.log('redis was connected and read the key:', value));
|
|
526
|
+
* ```
|
|
527
|
+
*/
|
|
528
|
+
var AsyncLazyCache = class {
|
|
529
|
+
factory;
|
|
530
|
+
cache = null;
|
|
531
|
+
constructor(options) {
|
|
532
|
+
this.factory = options.factory;
|
|
533
|
+
if (!options.lazy) this.cache = Promise.resolve(this.factory());
|
|
534
|
+
}
|
|
535
|
+
async get(key) {
|
|
536
|
+
return (await this.resolveCache()).get(key);
|
|
537
|
+
}
|
|
538
|
+
async getOrLoad(key, load, options) {
|
|
539
|
+
return (await this.resolveCache()).getOrLoad(key, load, options);
|
|
540
|
+
}
|
|
541
|
+
async set(key, value, options) {
|
|
542
|
+
return (await this.resolveCache()).set(key, value, options);
|
|
543
|
+
}
|
|
544
|
+
async delete(key) {
|
|
545
|
+
return (await this.resolveCache()).delete(key);
|
|
546
|
+
}
|
|
547
|
+
async getMany(keys) {
|
|
548
|
+
return (await this.resolveCache()).getMany(keys);
|
|
549
|
+
}
|
|
550
|
+
async setMany(data, options) {
|
|
551
|
+
return (await this.resolveCache()).setMany(data, options);
|
|
552
|
+
}
|
|
553
|
+
async deleteMany(keys) {
|
|
554
|
+
return (await this.resolveCache()).deleteMany(keys);
|
|
555
|
+
}
|
|
556
|
+
/**
|
|
557
|
+
* Gets the underlying cache, initializing it if not already initialized.
|
|
558
|
+
*/
|
|
559
|
+
resolveCache() {
|
|
560
|
+
if (!this.cache) this.cache = Promise.resolve(this.factory());
|
|
561
|
+
return this.cache;
|
|
562
|
+
}
|
|
563
|
+
};
|
|
564
|
+
|
|
565
|
+
//#endregion
|
|
566
|
+
//#region src/layers/coalescing/index.ts
|
|
567
|
+
/**
|
|
568
|
+
* A cache strategy layer that deduplicates parallel requests into a single request.
|
|
569
|
+
*
|
|
570
|
+
* This strategy can prevent the Thundering Herd problem as all parallel requests will be coalesced into one.
|
|
571
|
+
*/
|
|
572
|
+
var CoalescingCache = class {
|
|
573
|
+
ongoingRequests = /* @__PURE__ */ new Map();
|
|
574
|
+
cache;
|
|
575
|
+
name;
|
|
576
|
+
logger;
|
|
577
|
+
constructor(options) {
|
|
578
|
+
this.cache = options.cache;
|
|
579
|
+
this.name = options.name;
|
|
580
|
+
this.logger = options.logger;
|
|
581
|
+
}
|
|
582
|
+
get(key) {
|
|
583
|
+
const ongoingRequest = this.ongoingRequests.get(key);
|
|
584
|
+
if (ongoingRequest) {
|
|
585
|
+
this.logger?.debug(this.name, "[get] Returning ongoing request...", "key =", key);
|
|
586
|
+
return ongoingRequest.promise;
|
|
587
|
+
}
|
|
588
|
+
this.logger?.debug(this.name, "[get] Reading from underlying cache...", "key =", key);
|
|
589
|
+
const promise = this.cache.get(key);
|
|
590
|
+
this.ongoingRequests.set(key, {
|
|
591
|
+
promise,
|
|
592
|
+
type: "get"
|
|
593
|
+
});
|
|
594
|
+
return promise.finally(() => this.ongoingRequests.delete(key));
|
|
595
|
+
}
|
|
596
|
+
async getOrLoad(key, load, options) {
|
|
597
|
+
const ongoingRequest = this.ongoingRequests.get(key);
|
|
598
|
+
if (!ongoingRequest) {
|
|
599
|
+
this.logger?.debug(this.name, "[getOrLoad] Reading from the underlying cache...", "key =", key);
|
|
600
|
+
const promise$1 = this.cache.getOrLoad(key, load, options);
|
|
601
|
+
this.ongoingRequests.set(key, {
|
|
602
|
+
promise: promise$1,
|
|
603
|
+
type: "getOrLoad"
|
|
604
|
+
});
|
|
605
|
+
return promise$1.finally(() => this.ongoingRequests.delete(key));
|
|
606
|
+
}
|
|
607
|
+
let request = await ongoingRequest.promise;
|
|
608
|
+
if (request !== null || ongoingRequest.type === "getOrLoad") {
|
|
609
|
+
this.logger?.debug(this.name, "[getOrLoad] Read from an ongoing request.", "key =", key);
|
|
610
|
+
return request;
|
|
611
|
+
}
|
|
612
|
+
this.logger?.debug(this.name, "[getOrLoad] Refreshing the cache...", "key =", key);
|
|
613
|
+
const promise = load();
|
|
614
|
+
this.ongoingRequests.set(key, {
|
|
615
|
+
promise,
|
|
616
|
+
type: "getOrLoad"
|
|
617
|
+
});
|
|
618
|
+
try {
|
|
619
|
+
request = await promise;
|
|
620
|
+
if (request !== null) await this.cache.set(key, request, options);
|
|
621
|
+
} finally {
|
|
622
|
+
this.ongoingRequests.delete(key);
|
|
623
|
+
}
|
|
624
|
+
return request;
|
|
625
|
+
}
|
|
626
|
+
set(key, value, options) {
|
|
627
|
+
this.logger?.debug(this.name, "[set]", "key =", key);
|
|
628
|
+
const promise = this.cache.set(key, value, options);
|
|
629
|
+
this.ongoingRequests.set(key, {
|
|
630
|
+
promise: promise.then(() => value),
|
|
631
|
+
type: "getOrLoad"
|
|
632
|
+
});
|
|
633
|
+
return promise.finally(() => this.ongoingRequests.delete(key));
|
|
634
|
+
}
|
|
635
|
+
async delete(key) {
|
|
636
|
+
this.logger?.debug(this.name, "[delete]", "key =", key);
|
|
637
|
+
try {
|
|
638
|
+
this.ongoingRequests.set(key, {
|
|
639
|
+
type: "get",
|
|
640
|
+
promise: Promise.resolve(null)
|
|
641
|
+
});
|
|
642
|
+
await this.cache.delete(key);
|
|
643
|
+
} finally {
|
|
644
|
+
this.ongoingRequests.delete(key);
|
|
645
|
+
}
|
|
646
|
+
}
|
|
647
|
+
async getMany(keys) {
|
|
648
|
+
const items = [];
|
|
649
|
+
const remainingKeys = [];
|
|
650
|
+
for (const key of keys) {
|
|
651
|
+
const ongoingRequest = this.ongoingRequests.get(key);
|
|
652
|
+
if (ongoingRequest) items.push([key, ongoingRequest.promise]);
|
|
653
|
+
else remainingKeys.push(key);
|
|
654
|
+
}
|
|
655
|
+
this.logger?.debug(this.name, "[getMany]", items.length, "ongoing requests found, reading", remainingKeys.length, "resources.", "keys =", keys);
|
|
656
|
+
if (remainingKeys.length > 0) {
|
|
657
|
+
const promise = this.cache.getMany(remainingKeys);
|
|
658
|
+
for (const key of remainingKeys) {
|
|
659
|
+
const itemPromise = promise.then((data) => data[key]).finally(() => this.ongoingRequests.delete(key));
|
|
660
|
+
this.ongoingRequests.set(key, {
|
|
661
|
+
promise: itemPromise,
|
|
662
|
+
type: "get"
|
|
663
|
+
});
|
|
664
|
+
items.push([key, itemPromise]);
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
return Object.fromEntries(await Promise.all(items.map(async ([key, promise]) => [key, await promise])));
|
|
668
|
+
}
|
|
669
|
+
setMany(data, options) {
|
|
670
|
+
this.logger?.debug(this.name, "[setMany]", "data =", data);
|
|
671
|
+
const promise = this.cache.setMany(data, options);
|
|
672
|
+
for (const [key, value] of Object.entries(data)) this.ongoingRequests.set(key, {
|
|
673
|
+
promise: promise.then(() => value).finally(() => this.ongoingRequests.delete(key)),
|
|
674
|
+
type: "getOrLoad"
|
|
675
|
+
});
|
|
676
|
+
return promise;
|
|
677
|
+
}
|
|
678
|
+
async deleteMany(keys) {
|
|
679
|
+
this.logger?.debug(this.name, "[deleteMany]", "keys =", keys);
|
|
680
|
+
try {
|
|
681
|
+
for (const key of keys) this.ongoingRequests.set(key, {
|
|
682
|
+
type: "get",
|
|
683
|
+
promise: Promise.resolve(null)
|
|
684
|
+
});
|
|
685
|
+
await this.cache.deleteMany(keys);
|
|
686
|
+
} finally {
|
|
687
|
+
for (const key of keys) this.ongoingRequests.delete(key);
|
|
688
|
+
}
|
|
689
|
+
}
|
|
690
|
+
};
|
|
691
|
+
|
|
692
|
+
//#endregion
|
|
693
|
+
//#region src/layers/jittering/index.ts
|
|
694
|
+
/**
|
|
695
|
+
* A cache layer that adds a random jitter to the TTL of cached items to prevent cache stampedes.
|
|
696
|
+
*
|
|
697
|
+
* This layer is useful in scenarios where many cached items expire simultaneously, causing a sudden surge of requests to the underlying data source.
|
|
698
|
+
*/
|
|
699
|
+
var JitteringCache = class {
|
|
700
|
+
cache;
|
|
701
|
+
defaultTTL;
|
|
702
|
+
maxJitterTTL;
|
|
703
|
+
constructor(options) {
|
|
704
|
+
this.cache = options.cache;
|
|
705
|
+
this.defaultTTL = options.defaultTTL;
|
|
706
|
+
this.maxJitterTTL = options.maxJitterTTL;
|
|
707
|
+
}
|
|
708
|
+
get(key) {
|
|
709
|
+
return this.cache.get(key);
|
|
710
|
+
}
|
|
711
|
+
getOrLoad(key, load, options = {}) {
|
|
712
|
+
return this.cache.getOrLoad(key, load, this.jitterTTL(options));
|
|
713
|
+
}
|
|
714
|
+
set(key, value, options) {
|
|
715
|
+
return this.cache.set(key, value, this.jitterTTL(options));
|
|
716
|
+
}
|
|
717
|
+
delete(key) {
|
|
718
|
+
return this.cache.delete(key);
|
|
719
|
+
}
|
|
720
|
+
getMany(keys) {
|
|
721
|
+
return this.cache.getMany(keys);
|
|
722
|
+
}
|
|
723
|
+
setMany(data, options) {
|
|
724
|
+
return this.cache.setMany(data, this.jitterTTL(options));
|
|
725
|
+
}
|
|
726
|
+
deleteMany(keys) {
|
|
727
|
+
return this.cache.deleteMany(keys);
|
|
728
|
+
}
|
|
729
|
+
jitterTTL(options = {}) {
|
|
730
|
+
const ttl = options.ttl ?? this.defaultTTL;
|
|
731
|
+
const jitter = Math.random() * this.maxJitterTTL;
|
|
732
|
+
return {
|
|
733
|
+
...options,
|
|
734
|
+
ttl: ttl + jitter
|
|
735
|
+
};
|
|
736
|
+
}
|
|
737
|
+
};
|
|
738
|
+
|
|
739
|
+
//#endregion
|
|
740
|
+
//#region src/layers/key-transforming/index.ts
|
|
741
|
+
/**
|
|
742
|
+
* A cache layer that changes keys before passing them to the underlying cache.
|
|
743
|
+
*
|
|
744
|
+
* This layer can be used to:
|
|
745
|
+
* - Create namespaced caches, avoiding conflicts with shared cache servers.
|
|
746
|
+
* - Add a version number, allowing schema changes without causing incompatibility.
|
|
747
|
+
* - Implement any other key transformations, such as normalizing or hashing.
|
|
748
|
+
*/
|
|
749
|
+
var KeyTransformingCache = class {
|
|
750
|
+
cache;
|
|
751
|
+
transform;
|
|
752
|
+
constructor(options) {
|
|
753
|
+
this.cache = options.cache;
|
|
754
|
+
if ("transform" in options) this.transform = options.transform;
|
|
755
|
+
else {
|
|
756
|
+
const prefix = options.prefix || "";
|
|
757
|
+
const suffix = options.suffix || "";
|
|
758
|
+
this.transform = (key) => `${prefix}${key}${suffix}`;
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
get(key) {
|
|
762
|
+
return this.cache.get(this.transform(key));
|
|
763
|
+
}
|
|
764
|
+
getOrLoad(key, load, options) {
|
|
765
|
+
return this.cache.getOrLoad(this.transform(key), load, options);
|
|
766
|
+
}
|
|
767
|
+
set(key, value, options) {
|
|
768
|
+
return this.cache.set(this.transform(key), value, options);
|
|
769
|
+
}
|
|
770
|
+
delete(key) {
|
|
771
|
+
return this.cache.delete(this.transform(key));
|
|
772
|
+
}
|
|
773
|
+
async getMany(keys) {
|
|
774
|
+
const transformedKeys = keys.map((key) => this.transform(key));
|
|
775
|
+
const data = await this.cache.getMany(transformedKeys);
|
|
776
|
+
return Object.fromEntries(Object.entries(data).map(([transformedKey, value]) => [keys[transformedKeys.indexOf(transformedKey)], value]));
|
|
777
|
+
}
|
|
778
|
+
setMany(data, options) {
|
|
779
|
+
return this.cache.setMany(Object.fromEntries(Object.entries(data).map(([key, value]) => [this.transform(key), value])), options);
|
|
780
|
+
}
|
|
781
|
+
deleteMany(keys) {
|
|
782
|
+
return this.cache.deleteMany(keys.map((key) => this.transform(key)));
|
|
783
|
+
}
|
|
784
|
+
};
|
|
785
|
+
|
|
786
|
+
//#endregion
|
|
787
|
+
//#region src/layers/swr/index.ts
|
|
788
|
+
/**
|
|
789
|
+
* A cache strategy that returns stale resources immediately while it refreshes the cache in background.
|
|
790
|
+
*
|
|
791
|
+
* This is an implementation of the Stale-While-Revalidate algorithm.
|
|
792
|
+
*
|
|
793
|
+
* This strategy is only effective when calling {@link ICache#getOrLoad}.
|
|
794
|
+
*/
|
|
795
|
+
var SWRCache = class {
|
|
796
|
+
revalidating = /* @__PURE__ */ new Map();
|
|
797
|
+
cache;
|
|
798
|
+
name;
|
|
799
|
+
logger;
|
|
800
|
+
defaultTTL;
|
|
801
|
+
staleTTL;
|
|
802
|
+
constructor(options) {
|
|
803
|
+
this.cache = options.cache;
|
|
804
|
+
this.name = options.name;
|
|
805
|
+
this.logger = options.logger;
|
|
806
|
+
this.defaultTTL = options.defaultTTL;
|
|
807
|
+
this.staleTTL = options.staleTTL;
|
|
808
|
+
}
|
|
809
|
+
async get(key) {
|
|
810
|
+
this.logger?.debug(this.name, "[get]", "key =", key);
|
|
811
|
+
const item = await this.cache.get(key);
|
|
812
|
+
return item ? item.data : null;
|
|
813
|
+
}
|
|
814
|
+
async getOrLoad(key, load, options = {}) {
|
|
815
|
+
this.logger?.debug(this.name, "[getOrLoad]", "key =", key);
|
|
816
|
+
const ttl = options.ttl || this.defaultTTL;
|
|
817
|
+
const cacheOptions = {
|
|
818
|
+
...options,
|
|
819
|
+
ttl: ttl + this.staleTTL
|
|
820
|
+
};
|
|
821
|
+
const loadItem = async () => ({
|
|
822
|
+
data: await load(),
|
|
823
|
+
expiresAt: Date.now() + ttl * 1e3
|
|
824
|
+
});
|
|
825
|
+
const item = await this.cache.getOrLoad(key, loadItem, cacheOptions);
|
|
826
|
+
if (item && item.expiresAt < Date.now() && !this.revalidating.has(key)) {
|
|
827
|
+
this.logger?.debug(this.name, "[getOrLoad] Refreshing stale resource in background...", "key =", key);
|
|
828
|
+
const promise = loadItem().then((newItem) => this.cache.set(key, newItem, cacheOptions)).finally(() => this.revalidating.delete(key));
|
|
829
|
+
this.revalidating.set(key, promise);
|
|
830
|
+
}
|
|
831
|
+
return item.data;
|
|
832
|
+
}
|
|
833
|
+
set(key, value, options = {}) {
|
|
834
|
+
this.logger?.debug(this.name, "[set]", "key =", key);
|
|
835
|
+
const ttl = options.ttl || this.defaultTTL;
|
|
836
|
+
const item = {
|
|
837
|
+
data: value,
|
|
838
|
+
expiresAt: Date.now() + ttl * 1e3
|
|
839
|
+
};
|
|
840
|
+
const cacheOptions = {
|
|
841
|
+
...options,
|
|
842
|
+
ttl: ttl + this.staleTTL
|
|
843
|
+
};
|
|
844
|
+
return this.cache.set(key, item, cacheOptions);
|
|
845
|
+
}
|
|
846
|
+
delete(key) {
|
|
847
|
+
this.logger?.debug(this.name, "[delete]", "key =", key);
|
|
848
|
+
return this.cache.delete(key);
|
|
849
|
+
}
|
|
850
|
+
async getMany(keys) {
|
|
851
|
+
this.logger?.debug(this.name, "[getMany]", "keys =", keys);
|
|
852
|
+
const data = await this.cache.getMany(keys);
|
|
853
|
+
const items = {};
|
|
854
|
+
for (const [key, value] of Object.entries(data)) items[key] = value ? value.data : null;
|
|
855
|
+
return items;
|
|
856
|
+
}
|
|
857
|
+
setMany(data, options = {}) {
|
|
858
|
+
this.logger?.debug(this.name, "[setMany]", "data =", data);
|
|
859
|
+
const ttl = options.ttl || this.defaultTTL;
|
|
860
|
+
const items = {};
|
|
861
|
+
for (const [key, value] of Object.entries(data)) items[key] = {
|
|
862
|
+
data: value,
|
|
863
|
+
expiresAt: Date.now() + ttl * 1e3
|
|
864
|
+
};
|
|
865
|
+
return this.cache.setMany(items, {
|
|
866
|
+
...options,
|
|
867
|
+
ttl: ttl + this.staleTTL
|
|
868
|
+
});
|
|
869
|
+
}
|
|
870
|
+
deleteMany(keys) {
|
|
871
|
+
this.logger?.debug(this.name, "[deleteMany]", "keys =", keys);
|
|
872
|
+
return this.cache.deleteMany(keys);
|
|
873
|
+
}
|
|
874
|
+
};
|
|
875
|
+
|
|
876
|
+
//#endregion
|
|
877
|
+
//#region src/layers/tiered/index.ts
|
|
878
|
+
/**
|
|
879
|
+
* A cache strategy layer that implements multi-level caching
|
|
880
|
+
*
|
|
881
|
+
* The objective of a tiered cache is to minimize latency while still having the benefits of a larger, shared cache.
|
|
882
|
+
* This is done by having the first tier being an in-memory cache (such as {@link LocalTTLCache}) that stores a small amount of items with a short TTL,
|
|
883
|
+
* and the second tier being an external cache (such as {@link RedisCache}) that stores a lot more items and may have a longer TTL.
|
|
884
|
+
*
|
|
885
|
+
* This strategy is similarly known as Cache Hierarchy, CPU cache or L1/L2/L3 cache.
|
|
886
|
+
*/
|
|
887
|
+
var TieredCache = class extends BaseCache {
|
|
888
|
+
tiers;
|
|
889
|
+
constructor(options) {
|
|
890
|
+
super(options);
|
|
891
|
+
this.tiers = options.tiers;
|
|
892
|
+
}
|
|
893
|
+
async get(key) {
|
|
894
|
+
const next = (i) => {
|
|
895
|
+
this.logger?.debug(this.name, "[get] Reading from tier =", i, "key =", key);
|
|
896
|
+
const tier = this.tiers[i];
|
|
897
|
+
if (i === this.tiers.length - 1) return tier.cache.get(key);
|
|
898
|
+
return tier.cache.getOrLoad(key, () => next(i + 1), tier.options);
|
|
899
|
+
};
|
|
900
|
+
return next(0);
|
|
901
|
+
}
|
|
902
|
+
async getOrLoad(key, load, options) {
|
|
903
|
+
const next = (i) => {
|
|
904
|
+
this.logger?.debug(this.name, "[getOrLoad] Reading from tier =", i, "key =", key);
|
|
905
|
+
const tier = this.tiers[i];
|
|
906
|
+
if (i === this.tiers.length - 1) return tier.cache.getOrLoad(key, load, options || tier.options);
|
|
907
|
+
return tier.cache.getOrLoad(key, () => next(i + 1), tier.options);
|
|
908
|
+
};
|
|
909
|
+
return next(0);
|
|
910
|
+
}
|
|
911
|
+
async set(key, value, options) {
|
|
912
|
+
this.logger?.debug(this.name, "[set] Writing to all tiers in parallel...", "key =", key);
|
|
913
|
+
await Promise.all(this.tiers.map((tier, i) => {
|
|
914
|
+
const isLastTier = i === this.tiers.length - 1;
|
|
915
|
+
return tier.cache.set(key, value, isLastTier ? options || tier.options : tier.options);
|
|
916
|
+
}));
|
|
917
|
+
}
|
|
918
|
+
async delete(key) {
|
|
919
|
+
this.logger?.debug(this.name, "[delete] Deleting from all tiers in parallel...", "key =", key);
|
|
920
|
+
await Promise.all(this.tiers.map((tier) => tier.cache.delete(key)));
|
|
921
|
+
}
|
|
922
|
+
async getMany(keys) {
|
|
923
|
+
const next = async (i, remainingKeys) => {
|
|
924
|
+
this.logger?.debug(this.name, "[getMany] Reading from tier =", i, "keys =", keys);
|
|
925
|
+
const tier = this.tiers[i];
|
|
926
|
+
const isLastTier = i === this.tiers.length - 1;
|
|
927
|
+
const items = await tier.cache.getMany(remainingKeys);
|
|
928
|
+
remainingKeys = Object.entries(items).filter(([, value]) => value === null || value === void 0).map(([key]) => key);
|
|
929
|
+
if (isLastTier || remainingKeys.length === 0) return items;
|
|
930
|
+
const nextItems = await next(i + 1, remainingKeys);
|
|
931
|
+
const backfillItems = Object.entries(nextItems).filter(([, value]) => value !== null && value !== void 0);
|
|
932
|
+
if (backfillItems.length > 0) await tier.cache.setMany(Object.fromEntries(backfillItems), tier.options);
|
|
933
|
+
return {
|
|
934
|
+
...items,
|
|
935
|
+
...nextItems
|
|
936
|
+
};
|
|
937
|
+
};
|
|
938
|
+
return next(0, keys);
|
|
939
|
+
}
|
|
940
|
+
async setMany(data, options) {
|
|
941
|
+
this.logger?.debug(this.name, "[setMany] Writing to all tiers in parallel...", "data =", data);
|
|
942
|
+
await Promise.all(this.tiers.map((tier, i) => {
|
|
943
|
+
const isLastTier = i === this.tiers.length - 1;
|
|
944
|
+
return tier.cache.setMany(data, isLastTier ? options || tier.options : tier.options);
|
|
945
|
+
}));
|
|
946
|
+
}
|
|
947
|
+
async deleteMany(keys) {
|
|
948
|
+
this.logger?.debug(this.name, "[deleteMany] Deleting from all tiers in parallel...", "keys =", keys);
|
|
949
|
+
await Promise.all(this.tiers.map((tier) => tier.cache.deleteMany(keys)));
|
|
950
|
+
}
|
|
951
|
+
};
|
|
952
|
+
|
|
953
|
+
//#endregion
|
|
954
|
+
//#region src/layers/metrics/index.ts
|
|
955
|
+
/**
|
|
956
|
+
* A cache layer that collects metrics from each cache call.
|
|
957
|
+
*
|
|
958
|
+
* This can be useful to measure the cache effectiveness
|
|
959
|
+
*/
|
|
960
|
+
var MetricsCollectingCache = class {
|
|
961
|
+
cache;
|
|
962
|
+
logger;
|
|
963
|
+
name;
|
|
964
|
+
countMetrics = {
|
|
965
|
+
missCount: 0,
|
|
966
|
+
hitCount: 0,
|
|
967
|
+
loadCount: 0,
|
|
968
|
+
setCount: 0,
|
|
969
|
+
deleteCount: 0
|
|
970
|
+
};
|
|
971
|
+
totalTimeMetrics = {
|
|
972
|
+
missTime: 0,
|
|
973
|
+
hitTime: 0,
|
|
974
|
+
loadTime: 0,
|
|
975
|
+
setTime: 0,
|
|
976
|
+
deleteTime: 0
|
|
977
|
+
};
|
|
978
|
+
constructor(options) {
|
|
979
|
+
this.cache = options.cache;
|
|
980
|
+
this.logger = options.logger;
|
|
981
|
+
this.name = options.name;
|
|
982
|
+
}
|
|
983
|
+
async get(key) {
|
|
984
|
+
const startAt = performance.now();
|
|
985
|
+
const data = await this.cache.get(key);
|
|
986
|
+
const time = performance.now() - startAt;
|
|
987
|
+
if (data === null) {
|
|
988
|
+
this.countMetrics.missCount++;
|
|
989
|
+
this.totalTimeMetrics.missTime += time;
|
|
990
|
+
this.logger?.debug(this.name, "[get] Cache miss.", "key =", key, "timeMS =", time);
|
|
991
|
+
} else {
|
|
992
|
+
this.countMetrics.hitCount++;
|
|
993
|
+
this.totalTimeMetrics.hitTime += time;
|
|
994
|
+
this.logger?.debug(this.name, "[get] Cache hit.", "key =", key, "timeMS =", time);
|
|
995
|
+
}
|
|
996
|
+
return data;
|
|
997
|
+
}
|
|
998
|
+
async getOrLoad(key, load, options) {
|
|
999
|
+
let didTriggerLoad = false;
|
|
1000
|
+
let loadFinishAt = 0;
|
|
1001
|
+
const loadMiddleware = () => {
|
|
1002
|
+
const missFinishAt = performance.now();
|
|
1003
|
+
this.countMetrics.missCount++;
|
|
1004
|
+
this.totalTimeMetrics.missTime += missFinishAt - startAt;
|
|
1005
|
+
didTriggerLoad = true;
|
|
1006
|
+
this.logger?.debug(this.name, "[getOrLoad] Cache refresh.", "key =", key);
|
|
1007
|
+
const loadStartAt = performance.now();
|
|
1008
|
+
return load().finally(() => {
|
|
1009
|
+
loadFinishAt = performance.now();
|
|
1010
|
+
this.countMetrics.loadCount++;
|
|
1011
|
+
this.totalTimeMetrics.loadTime += loadFinishAt - loadStartAt;
|
|
1012
|
+
});
|
|
1013
|
+
};
|
|
1014
|
+
let startAt = performance.now();
|
|
1015
|
+
const data = await this.cache.getOrLoad(key, loadMiddleware, options);
|
|
1016
|
+
if (!didTriggerLoad) {
|
|
1017
|
+
const hitFinishedAt = performance.now();
|
|
1018
|
+
this.countMetrics.hitCount++;
|
|
1019
|
+
this.totalTimeMetrics.hitTime += hitFinishedAt - startAt;
|
|
1020
|
+
this.logger?.debug(this.name, "[getOrLoad] Cache hit.", "key =", key);
|
|
1021
|
+
} else {
|
|
1022
|
+
const setFinishAt = performance.now();
|
|
1023
|
+
this.countMetrics.setCount++;
|
|
1024
|
+
this.totalTimeMetrics.setTime += setFinishAt - loadFinishAt;
|
|
1025
|
+
}
|
|
1026
|
+
return data;
|
|
1027
|
+
}
|
|
1028
|
+
async set(key, value, options) {
|
|
1029
|
+
const startAt = performance.now();
|
|
1030
|
+
await this.cache.set(key, value, options);
|
|
1031
|
+
const time = performance.now() - startAt;
|
|
1032
|
+
this.countMetrics.setCount++;
|
|
1033
|
+
this.totalTimeMetrics.setTime += time;
|
|
1034
|
+
this.logger?.debug(this.name, "[set] Cache set.", "key =", key, "timeMS =", time);
|
|
1035
|
+
}
|
|
1036
|
+
async delete(key) {
|
|
1037
|
+
const startAt = performance.now();
|
|
1038
|
+
await this.cache.delete(key);
|
|
1039
|
+
const time = performance.now() - startAt;
|
|
1040
|
+
this.countMetrics.deleteCount++;
|
|
1041
|
+
this.totalTimeMetrics.deleteTime += time;
|
|
1042
|
+
this.logger?.debug(this.name, "[delete] Cache delete.", "key =", key, "timeMS =", time);
|
|
1043
|
+
}
|
|
1044
|
+
async getMany(keys) {
|
|
1045
|
+
const startAt = performance.now();
|
|
1046
|
+
const data = await this.cache.getMany(keys);
|
|
1047
|
+
const time = performance.now() - startAt;
|
|
1048
|
+
const timePerKey = time / keys.length;
|
|
1049
|
+
const miss = keys.filter((key) => data[key] === void 0 || data[key] === null).length;
|
|
1050
|
+
const hits = keys.length - miss;
|
|
1051
|
+
this.countMetrics.missCount += miss;
|
|
1052
|
+
this.countMetrics.hitCount += hits;
|
|
1053
|
+
this.totalTimeMetrics.missTime += miss * timePerKey;
|
|
1054
|
+
this.totalTimeMetrics.hitTime += hits * timePerKey;
|
|
1055
|
+
this.logger?.debug(this.name, "[getMany]", "hits =", hits, "misses = ", miss, "timeMS =", time);
|
|
1056
|
+
return data;
|
|
1057
|
+
}
|
|
1058
|
+
async setMany(data, options) {
|
|
1059
|
+
const startAt = performance.now();
|
|
1060
|
+
await this.cache.setMany(data, options);
|
|
1061
|
+
const time = performance.now() - startAt;
|
|
1062
|
+
const sets = Object.keys(data).length;
|
|
1063
|
+
this.countMetrics.setCount += sets;
|
|
1064
|
+
this.totalTimeMetrics.setTime += time;
|
|
1065
|
+
this.logger?.debug(this.name, "[setMany]", "sets =", sets, "timeMS =", time);
|
|
1066
|
+
}
|
|
1067
|
+
async deleteMany(keys) {
|
|
1068
|
+
const startAt = performance.now();
|
|
1069
|
+
await this.cache.deleteMany(keys);
|
|
1070
|
+
const time = performance.now() - startAt;
|
|
1071
|
+
this.countMetrics.deleteCount += keys.length;
|
|
1072
|
+
this.totalTimeMetrics.deleteTime += time;
|
|
1073
|
+
this.logger?.debug(this.name, "[deleteMany]", "deletes =", keys.length, "timeMS =", time);
|
|
1074
|
+
}
|
|
1075
|
+
getMetrics() {
|
|
1076
|
+
const count = this.countMetrics;
|
|
1077
|
+
const time = this.totalTimeMetrics;
|
|
1078
|
+
return {
|
|
1079
|
+
...count,
|
|
1080
|
+
missTime: count.missCount === 0 ? 0 : time.missTime / count.missCount,
|
|
1081
|
+
hitTime: count.hitCount === 0 ? 0 : time.hitTime / count.hitCount,
|
|
1082
|
+
loadTime: count.loadCount === 0 ? 0 : time.loadTime / count.loadCount,
|
|
1083
|
+
setTime: count.setCount === 0 ? 0 : time.setTime / count.setCount,
|
|
1084
|
+
deleteTime: count.deleteCount === 0 ? 0 : time.deleteTime / count.deleteCount
|
|
1085
|
+
};
|
|
1086
|
+
}
|
|
1087
|
+
resetMetrics() {
|
|
1088
|
+
this.countMetrics = {
|
|
1089
|
+
missCount: 0,
|
|
1090
|
+
hitCount: 0,
|
|
1091
|
+
loadCount: 0,
|
|
1092
|
+
setCount: 0,
|
|
1093
|
+
deleteCount: 0
|
|
1094
|
+
};
|
|
1095
|
+
this.totalTimeMetrics = {
|
|
1096
|
+
missTime: 0,
|
|
1097
|
+
hitTime: 0,
|
|
1098
|
+
loadTime: 0,
|
|
1099
|
+
setTime: 0,
|
|
1100
|
+
deleteTime: 0
|
|
1101
|
+
};
|
|
1102
|
+
this.logger?.debug(this.name, "[resetMetrics]");
|
|
1103
|
+
}
|
|
1104
|
+
};
|
|
1105
|
+
|
|
1106
|
+
//#endregion
|
|
1107
|
+
exports.AsyncLazyCache = AsyncLazyCache;
|
|
1108
|
+
exports.BaseCache = BaseCache;
|
|
1109
|
+
exports.CoalescingCache = CoalescingCache;
|
|
1110
|
+
exports.IORedisCache = IORedisCache;
|
|
1111
|
+
exports.JitteringCache = JitteringCache;
|
|
1112
|
+
exports.KeyTransformingCache = KeyTransformingCache;
|
|
1113
|
+
exports.KeyvCache = KeyvCache;
|
|
1114
|
+
exports.LocalLRUCache = LocalLRUCache;
|
|
1115
|
+
exports.LocalMapCache = LocalMapCache;
|
|
1116
|
+
exports.LocalTTLCache = LocalTTLCache;
|
|
1117
|
+
exports.MemJSCache = MemJSCache;
|
|
1118
|
+
exports.MemcacheCache = MemcacheCache;
|
|
1119
|
+
exports.MetricsCollectingCache = MetricsCollectingCache;
|
|
1120
|
+
exports.NoOpCache = NoOpCache;
|
|
1121
|
+
exports.RedisCache = RedisCache;
|
|
1122
|
+
exports.SWRCache = SWRCache;
|
|
1123
|
+
exports.TieredCache = TieredCache;
|
|
1124
|
+
exports.ValkeyGlideCache = ValkeyGlideCache;
|
|
1125
|
+
exports.WorkersKVCache = WorkersKVCache;
|