cachimbo 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,762 @@
1
+ import { LRUCache } from "lru-cache";
2
+ import { TTLCache } from "@isaacs/ttlcache";
3
+ import { Redis } from "ioredis";
4
+ import { Redis as Redis$1 } from "iovalkey";
5
+ import { RedisClientPoolType, RedisClientType, RedisClusterType, RedisSentinelType } from "@redis/client";
6
+ import { BaseClient, GlideClient, GlideClusterClient } from "@valkey/valkey-glide";
7
+ import Memcache from "memcache";
8
+ import { Client } from "memjs";
9
+ import { KVNamespace } from "@cloudflare/workers-types";
10
+ import Keyv from "keyv";
11
+
12
+ //#region src/types/logger.d.ts
13
+ interface Logger {
14
+ /**
15
+ * Sends a debug log
16
+ * @param name The cache name
17
+ * @param message The log message
18
+ */
19
+ debug: (name?: string, ...message: any[]) => void;
20
+ }
21
+ //#endregion
22
+ //#region src/types/cache.d.ts
23
+ interface SetCacheOptions {
24
+ /**
25
+ * Time to Live in seconds
26
+ */
27
+ ttl?: number;
28
+ }
29
+ interface BaseCacheOptions {
30
+ /**
31
+ * The name of this strategy. Used for logging
32
+ */
33
+ name?: string;
34
+
35
+ /**
36
+ * A logger that is useful for debugging the cache chain
37
+ */
38
+ logger?: Logger;
39
+ }
40
+ interface ICache {
41
+ /**
42
+ * Reads the cached resource from a key.
43
+ * Returns `null` when the resource is not found.
44
+ *
45
+ * @param key The cache key
46
+ */
47
+ get<T>(key: string): Promise<T | null>;
48
+
49
+ /**
50
+ * Reads the cached resource from a key.
51
+ * If the cached resource is not found, fetches it by calling the `load` function and then saves it into cache.
52
+ *
53
+ * @param key The cache key
54
+ * @param load The function which should fetch the fresh data from origin
55
+ * @param options The options used to save the cache
56
+ */
57
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
58
+
59
+ /**
60
+ * Writes a resource into cache.
61
+ *
62
+ * The value **must** be JSON stringifiable.
63
+ *
64
+ * @param key The cache key
65
+ * @param value The resource value
66
+ * @param options The options to save the cache
67
+ */
68
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
69
+
70
+ /**
71
+ * Deletes a cached resource by a key.
72
+ *
73
+ * @param key The cache key to delete
74
+ */
75
+ delete(key: string): Promise<void>;
76
+
77
+ /**
78
+ * Reads cached resources in batch.
79
+ *
80
+ * @param keys The list of cache keys.
81
+ */
82
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
83
+
84
+ /**
85
+ * Writes cache resources in batch.
86
+ *
87
+ * The values **must** be JSON stringifiable.
88
+ *
89
+ * @param data The cache keys and values
90
+ * @param options The options to save the cache
91
+ */
92
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
93
+
94
+ /**
95
+ * Deletes cached resources by their keys in batch.
96
+ *
97
+ * @param keys The list of keys to delete
98
+ */
99
+ deleteMany(keys: string[]): Promise<void>;
100
+ }
101
+ //#endregion
102
+ //#region src/base/index.d.ts
103
+ /**
104
+ * The base implementation of a cache.
105
+ *
106
+ * This class only requires subclasses to implement {@link ICache#get}, {@link ICache#set} and {@link ICache#delete}.
107
+ * All other methods fall back into these three.
108
+ */
109
+ declare abstract class BaseCache implements ICache {
110
+ protected readonly name?: string;
111
+ protected readonly logger?: Logger;
112
+ protected constructor(options: BaseCacheOptions);
113
+ abstract get<T>(key: string): Promise<T | null>;
114
+ abstract set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
115
+ abstract delete(key: string): Promise<void>;
116
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
117
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
118
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
119
+ deleteMany(keys: string[]): Promise<void>;
120
+ }
121
+ //#endregion
122
+ //#region src/local/lru/index.d.ts
123
+ interface ExistingLRUCacheOptions extends BaseCacheOptions {
124
+ /**
125
+ * The existing instance of a LRUCache.
126
+ */
127
+ cache: LRUCache<string, any, () => Promise<any>>;
128
+ /**
129
+ * Whether it should call {@link LRUCache#fetch} when `getOrLoad` is called.
130
+ *
131
+ * For that, {@link LRUCache#fetchMethod} needs to call the context function:
132
+ * ```ts
133
+ * new LRUCache<string, any, () => Promise<any>>({
134
+ * fetchMethod: (_key, _staleValue, options) => options.context(),
135
+ * });
136
+ * ```
137
+ */
138
+ shouldUseFetch?: boolean;
139
+ }
140
+ interface LocalLRUCacheOptions extends BaseCacheOptions {
141
+ /**
142
+ * The default Time to Live (in seconds)
143
+ */
144
+ ttl?: number;
145
+ /**
146
+ * The maximum amount of items stored
147
+ */
148
+ max?: number;
149
+ }
150
+ /**
151
+ * An in-memory cache implementation of a Least-Recently-Used cache eviction algorithm.
152
+ *
153
+ * It allows setting an expiration time and a limit of cached items.
154
+ *
155
+ * Once the limit of items is reached, the least recently used items will be purged.
156
+ */
157
+ declare class LocalLRUCache extends BaseCache {
158
+ protected readonly cache: LRUCache<string, any, () => Promise<any>>;
159
+ protected shouldUseFetch?: boolean;
160
+ constructor(options?: LocalLRUCacheOptions | ExistingLRUCacheOptions);
161
+ get<T>(key: string): Promise<T | null>;
162
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
163
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
164
+ delete(key: string): Promise<void>;
165
+ }
166
+ //#endregion
167
+ //#region src/local/ttl/index.d.ts
168
+ interface ExistingTTLCacheOptions extends BaseCacheOptions {
169
+ /**
170
+ * Existing instance of a TTLCache
171
+ */
172
+ cache: TTLCache<string, any>;
173
+ }
174
+ interface LocalTTLCacheOptions extends BaseCacheOptions {
175
+ /**
176
+ * The default Time to Live (in seconds)
177
+ */
178
+ ttl?: number;
179
+ /**
180
+ * The maximum amount of items stored
181
+ */
182
+ max?: number;
183
+ }
184
+ /**
185
+ * An in-memory cache implementation that allows setting an expiration time and a limit of cached items.
186
+ *
187
+ * Once the limit of items is reached, the soonest expiring items will be purged.
188
+ */
189
+ declare class LocalTTLCache extends BaseCache {
190
+ protected readonly cache: TTLCache<string, any>;
191
+ constructor(options?: LocalTTLCacheOptions | ExistingTTLCacheOptions);
192
+ get<T>(key: string): Promise<T | null>;
193
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
194
+ delete(key: string): Promise<void>;
195
+ }
196
+ //#endregion
197
+ //#region src/local/map/index.d.ts
198
+ interface LocalMapCacheOptions extends BaseCacheOptions {
199
+ /**
200
+ * The underlying map.
201
+ */
202
+ cache?: MapLike<string, any>;
203
+ /**
204
+ * The maximum size of the cache.
205
+ * When not set, the cache can grow indefinitely.
206
+ */
207
+ max?: number;
208
+ }
209
+ interface MapLike<K, V> {
210
+ get(key: K): V | undefined;
211
+ set(key: K, value: V): void;
212
+ delete(key: K): void;
213
+ has(key: K): boolean;
214
+ keys(): IterableIterator<K>;
215
+ size: number;
216
+ clear(): void;
217
+ }
218
+ /**
219
+ * A simple in-memory cache implementation based on {@link Map}.
220
+ *
221
+ * It ignores expiration times, but a limit of cached items can be set.
222
+ *
223
+ * Once the limit of items is reached, the first inserted keys will be purged.
224
+ */
225
+ declare class LocalMapCache extends BaseCache {
226
+ protected readonly cache: MapLike<string, any>;
227
+ protected max: number;
228
+ constructor(options?: LocalMapCacheOptions);
229
+ get<T>(key: string): Promise<T | null>;
230
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
231
+ delete(key: string): Promise<void>;
232
+ setMany(data: Record<string, any>, options?: SetCacheOptions): Promise<void>;
233
+ clear(): void;
234
+ protected evict(length: number): void;
235
+ }
236
+ //#endregion
237
+ //#region src/local/noop/index.d.ts
238
+ /**
239
+ * A cache implementation that does nothing.
240
+ * It's useful for disabling cache.
241
+ *
242
+ * @example
243
+ * ```ts
244
+ * const cache = isCacheEnabled ? new LocalTTLCache() : new NoOpCache();
245
+ * ```
246
+ */
247
+ declare class NoOpCache implements ICache {
248
+ constructor();
249
+ get<T>(key: string): Promise<T | null>;
250
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
251
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
252
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
253
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
254
+ delete(key: string): Promise<void>;
255
+ deleteMany(keys: string[]): Promise<void>;
256
+ }
257
+ //#endregion
258
+ //#region src/remote/ioredis/index.d.ts
259
+ interface IORedisCacheOptions extends BaseCacheOptions {
260
+ /**
261
+ * The ioredis or iovalkey client
262
+ */
263
+ client: Redis | Redis$1;
264
+ /**
265
+ * The default Time To Live in seconds
266
+ */
267
+ defaultTTL?: number;
268
+ /**
269
+ * Indicates whether the Redis server supports the MSETEX command.
270
+ *
271
+ * {@link IORedisCache#setMany} will use MSETEX if this option is set to `true`.
272
+ *
273
+ * This option should be set to `true` if the server runs Redis OSS 8.4.0 or above.
274
+ * Valkey does not support this yet. (see https://github.com/valkey-io/valkey/issues/2592)
275
+ */
276
+ isMSETEXSupported?: boolean;
277
+ }
278
+ /**
279
+ * A Redis cache implementation using ioredis
280
+ */
281
+ declare class IORedisCache extends BaseCache {
282
+ protected readonly client: Redis | Redis$1;
283
+ protected defaultTTL?: number;
284
+ protected isMSETEXSupported?: boolean;
285
+ constructor(options: IORedisCacheOptions);
286
+ get<T>(key: string): Promise<T | null>;
287
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
288
+ delete(key: string): Promise<void>;
289
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
290
+ setMany(data: Record<string, any>, options?: SetCacheOptions): Promise<void>;
291
+ deleteMany(keys: string[]): Promise<void>;
292
+ }
293
+ //#endregion
294
+ //#region src/remote/redis/index.d.ts
295
+ type Redis$2 = RedisClientType | RedisClientPoolType | RedisClusterType | RedisSentinelType;
296
+ interface RedisCacheOptions extends BaseCacheOptions {
297
+ /**
298
+ * The Redis client
299
+ */
300
+ client: RedisClientType | RedisClientPoolType | RedisClusterType | RedisSentinelType;
301
+ /**
302
+ * The default Time To Live in seconds
303
+ */
304
+ defaultTTL?: number;
305
+ /**
306
+ * Indicates whether the Redis server supports MSETEX command.
307
+ *
308
+ * {@link RedisCache#setMany} will use MSETEX if this option is set to `true`.
309
+ *
310
+ * This option should be set to `true` if the server runs Redis OSS 8.4.0 or above.
311
+ * Valkey does not support this yet. (see https://github.com/valkey-io/valkey/issues/2592)
312
+ */
313
+ isMSETEXSupported?: boolean;
314
+ }
315
+ /**
316
+ * A Redis cache implementation using node-redis
317
+ */
318
+ declare class RedisCache extends BaseCache {
319
+ protected readonly client: Redis$2;
320
+ protected defaultTTL?: number;
321
+ protected isMSETEXSupported?: boolean;
322
+ constructor(options: RedisCacheOptions);
323
+ get<T>(key: string): Promise<T | null>;
324
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
325
+ delete(key: string): Promise<void>;
326
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
327
+ setMany(data: Record<string, any>, options?: SetCacheOptions): Promise<void>;
328
+ deleteMany(keys: string[]): Promise<void>;
329
+ }
330
+ //#endregion
331
+ //#region src/remote/valkey-glide/index.d.ts
332
+ interface ValkeyGlideCacheOptions extends BaseCacheOptions {
333
+ /**
334
+ * The GLIDE client instance
335
+ */
336
+ client: GlideClient | GlideClusterClient | BaseClient;
337
+ /**
338
+ * The default Time To Live in seconds
339
+ */
340
+ defaultTTL?: number;
341
+ }
342
+ /**
343
+ * A Valkey cache implementation using @valkey/valkey-glide
344
+ */
345
+ declare class ValkeyGlideCache extends BaseCache {
346
+ protected readonly client: BaseClient;
347
+ protected defaultTTL?: number;
348
+ constructor(options: ValkeyGlideCacheOptions);
349
+ get<T>(key: string): Promise<T | null>;
350
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
351
+ delete(key: string): Promise<void>;
352
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
353
+ deleteMany(keys: string[]): Promise<void>;
354
+ }
355
+ //#endregion
356
+ //#region src/remote/memcache/index.d.ts
357
+ interface MemcacheCacheOptions extends BaseCacheOptions {
358
+ /**
359
+ * The Memcache client
360
+ */
361
+ client: Memcache;
362
+ /**
363
+ * The default Time To Live in seconds
364
+ */
365
+ defaultTTL?: number;
366
+ }
367
+ /**
368
+ * A Memcached cache implementation using Memcache
369
+ */
370
+ declare class MemcacheCache extends BaseCache {
371
+ protected readonly client: Memcache;
372
+ protected defaultTTL?: number;
373
+ constructor(options: MemcacheCacheOptions);
374
+ get<T>(key: string): Promise<T | null>;
375
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
376
+ delete(key: string): Promise<void>;
377
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
378
+ }
379
+ //#endregion
380
+ //#region src/remote/memjs/index.d.ts
381
+ interface MemJSCacheOptions extends BaseCacheOptions {
382
+ /**
383
+ * The memjs client
384
+ */
385
+ client: Client;
386
+ /**
387
+ * The default Time To Live in seconds
388
+ */
389
+ defaultTTL?: number;
390
+ }
391
+ /**
392
+ * A Memcached cache implementation using MemJS
393
+ */
394
+ declare class MemJSCache extends BaseCache {
395
+ protected readonly client: Client;
396
+ protected defaultTTL?: number;
397
+ constructor(options: MemJSCacheOptions);
398
+ get<T>(key: string): Promise<T | null>;
399
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
400
+ delete(key: string): Promise<void>;
401
+ }
402
+ //#endregion
403
+ //#region src/remote/workers-kv/index.d.ts
404
+ interface WorkersKVCacheOptions extends BaseCacheOptions {
405
+ /**
406
+ * The KV namespace (`env.NAMESPACE`)
407
+ */
408
+ kv: KVNamespace;
409
+ /**
410
+ * The amount of time in seconds that a KV result is cached in the global network location it is accessed from.
411
+ *
412
+ * Increasing this value can improve read performance at the cost of data being stale.
413
+ *
414
+ * @see https://developers.cloudflare.com/kv/api/read-key-value-pairs/#cachettl-parameter
415
+ */
416
+ edgeCacheTTL?: number;
417
+ }
418
+ /**
419
+ * A Cloudflare Workers KV cache implementation.
420
+ */
421
+ declare class WorkersKVCache extends BaseCache {
422
+ protected readonly kv: KVNamespace;
423
+ protected edgeCacheTTL?: number;
424
+ constructor(options: WorkersKVCacheOptions);
425
+ get<T>(key: string): Promise<T | null>;
426
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
427
+ delete(key: string): Promise<void>;
428
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
429
+ }
430
+ //#endregion
431
+ //#region src/remote/keyv/index.d.ts
432
+ interface KeyvCacheOptions extends BaseCacheOptions {
433
+ /**
434
+ * The underlying key-value store
435
+ */
436
+ keyv: Keyv;
437
+ }
438
+ declare class KeyvCache extends BaseCache {
439
+ protected readonly keyv: Keyv;
440
+ constructor(options: KeyvCacheOptions);
441
+ get<T>(key: string): Promise<T | null>;
442
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
443
+ delete(key: string): Promise<void>;
444
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
445
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
446
+ deleteMany(keys: string[]): Promise<void>;
447
+ }
448
+ //#endregion
449
+ //#region src/layers/async-lazy/index.d.ts
450
+ interface AsyncLazyCacheOptions {
451
+ /**
452
+ * A factory function that will be called to create the underlying cache when needed.
453
+ */
454
+ factory: () => Promise<ICache> | ICache;
455
+ /**
456
+ * Whether it should initialize only when needed.
457
+ *
458
+ * - When set to `true`, it will initialize the cache only when a method is called.
459
+ * - When set to `false`, it will initialize the cache upon construction.
460
+ *
461
+ * @default `false`
462
+ */
463
+ lazy?: boolean;
464
+ }
465
+ /**
466
+ * A cache layer that initializes the underlying cache asynchronously.
467
+ *
468
+ * This layer can be used to connect to an external cache with the cache methods already available.
469
+ *
470
+ * This layer can also be used to lazily initialize the cache only when it's actually needed.
471
+ *
472
+ * @example
473
+ * ```ts
474
+ * const cache = new AsyncCache({
475
+ * factory: async () => {
476
+ * const redisClient = await createClient({
477
+ * url: "redis://user:password@localhost:6380",
478
+ * });
479
+ *
480
+ * return new RedisCache({ client: redisClient });
481
+ * },
482
+ * lazy: true,
483
+ * });
484
+ *
485
+ * cache.get("key")
486
+ * .then(result => console.log('redis was connected and read the key:', value));
487
+ * ```
488
+ */
489
+ declare class AsyncLazyCache implements ICache {
490
+ protected readonly factory: () => Promise<ICache> | ICache;
491
+ protected cache: Promise<ICache> | null;
492
+ constructor(options: AsyncLazyCacheOptions);
493
+ get<T>(key: string): Promise<T | null>;
494
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
495
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
496
+ delete(key: string): Promise<void>;
497
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
498
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
499
+ deleteMany(keys: string[]): Promise<void>;
500
+ /**
501
+ * Gets the underlying cache, initializing it if not already initialized.
502
+ */
503
+ resolveCache(): Promise<ICache>;
504
+ }
505
+ //#endregion
506
+ //#region src/layers/coalescing/index.d.ts
507
+ /**
508
+ * The options to construct the {@link CoalescingCache}
509
+ */
510
+ interface CoalescingCacheOptions extends BaseCacheOptions {
511
+ /**
512
+ * The underlying cache
513
+ */
514
+ cache: ICache;
515
+ }
516
+ interface OngoingRequest {
517
+ promise: Promise<any | null>;
518
+ type: 'get' | 'getOrLoad';
519
+ }
520
+ /**
521
+ * A cache strategy layer that deduplicates parallel requests into a single request.
522
+ *
523
+ * This strategy can prevent the Thundering Herd problem as all parallel requests will be coalesced into one.
524
+ */
525
+ declare class CoalescingCache implements ICache {
526
+ protected readonly ongoingRequests: Map<string, OngoingRequest>;
527
+ protected readonly cache: ICache;
528
+ protected readonly name?: string;
529
+ protected readonly logger?: Logger;
530
+ constructor(options: CoalescingCacheOptions);
531
+ get<T>(key: string): Promise<T | null>;
532
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
533
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
534
+ delete(key: string): Promise<void>;
535
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
536
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
537
+ deleteMany(keys: string[]): Promise<void>;
538
+ }
539
+ //#endregion
540
+ //#region src/layers/jittering/index.d.ts
541
+ interface JitteringCacheOptions {
542
+ /**
543
+ * The underlying cache
544
+ */
545
+ cache: ICache;
546
+ /**
547
+ * The default Time To Live in seconds
548
+ */
549
+ defaultTTL: number;
550
+ /**
551
+ * The maximum jitter (in seconds) to add to the TTL of cached items.
552
+ */
553
+ maxJitterTTL: number;
554
+ }
555
+ /**
556
+ * A cache layer that adds a random jitter to the TTL of cached items to prevent cache stampedes.
557
+ *
558
+ * This layer is useful in scenarios where many cached items expire simultaneously, causing a sudden surge of requests to the underlying data source.
559
+ */
560
+ declare class JitteringCache implements ICache {
561
+ protected readonly cache: ICache;
562
+ protected defaultTTL: number;
563
+ protected maxJitterTTL: number;
564
+ constructor(options: JitteringCacheOptions);
565
+ get<T>(key: string): Promise<T | null>;
566
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
567
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
568
+ delete(key: string): Promise<void>;
569
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
570
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
571
+ deleteMany(keys: string[]): Promise<void>;
572
+ protected jitterTTL(options?: SetCacheOptions): SetCacheOptions;
573
+ }
574
+ //#endregion
575
+ //#region src/layers/key-transforming/index.d.ts
576
+ interface BaseKeyTransformingCacheOptions {
577
+ /**
578
+ * The underlying cache
579
+ */
580
+ cache: ICache;
581
+ }
582
+ interface KeyTransformerFnCacheOptions extends BaseKeyTransformingCacheOptions {
583
+ /**
584
+ * The function that will be called with each key to transform it.
585
+ * @param key The original key
586
+ * @return The transformed key
587
+ */
588
+ transform: (key: string) => string;
589
+ prefix?: never;
590
+ suffix?: never;
591
+ }
592
+ interface KeyAffixCacheOptions extends BaseKeyTransformingCacheOptions {
593
+ /**
594
+ * The prefix to add to keys
595
+ */
596
+ prefix?: string;
597
+ /**
598
+ * The suffix to add to keys
599
+ */
600
+ suffix?: string;
601
+ }
602
+ type KeyTransformingCacheOptions = KeyAffixCacheOptions | KeyTransformerFnCacheOptions;
603
+ /**
604
+ * A cache layer that changes keys before passing them to the underlying cache.
605
+ *
606
+ * This layer can be used to:
607
+ * - Create namespaced caches, avoiding conflicts with shared cache servers.
608
+ * - Add a version number, allowing schema changes without causing incompatibility.
609
+ * - Implement any other key transformations, such as normalizing or hashing.
610
+ */
611
+ declare class KeyTransformingCache implements ICache {
612
+ protected readonly cache: ICache;
613
+ protected transform: (key: string) => string;
614
+ constructor(options: KeyTransformingCacheOptions);
615
+ get<T>(key: string): Promise<T | null>;
616
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
617
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
618
+ delete(key: string): Promise<void>;
619
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
620
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
621
+ deleteMany(keys: string[]): Promise<void>;
622
+ }
623
+ //#endregion
624
+ //#region src/layers/swr/index.d.ts
625
+ /**
626
+ * The options to create the {@link SWRCache}
627
+ */
628
+ interface SWRCacheOptions extends BaseCacheOptions {
629
+ /**
630
+ * The underlying cache
631
+ */
632
+ cache: ICache;
633
+ /**
634
+ * The default Time To Live in seconds
635
+ */
636
+ defaultTTL: number;
637
+ /**
638
+ * The additional time in seconds to keep the resource stored, but consider it as stale
639
+ */
640
+ staleTTL: number;
641
+ }
642
+ /**
643
+ * A cache strategy that returns stale resources immediately while it refreshes the cache in background.
644
+ *
645
+ * This is an implementation of the Stale-While-Revalidate algorithm.
646
+ *
647
+ * This strategy is only effective when calling {@link ICache#getOrLoad}.
648
+ */
649
+ declare class SWRCache implements ICache {
650
+ protected readonly revalidating: Map<string, Promise<any>>;
651
+ protected readonly cache: ICache;
652
+ protected readonly name?: string;
653
+ protected readonly logger?: Logger;
654
+ protected defaultTTL: number;
655
+ protected staleTTL: number;
656
+ constructor(options: SWRCacheOptions);
657
+ get<T>(key: string): Promise<T | null>;
658
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
659
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
660
+ delete(key: string): Promise<void>;
661
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
662
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
663
+ deleteMany(keys: string[]): Promise<void>;
664
+ }
665
+ //#endregion
666
+ //#region src/layers/tiered/index.d.ts
667
+ interface TieredCacheOptions extends BaseCacheOptions {
668
+ /**
669
+ * The list of tiers in order of priority.
670
+ *
671
+ * The first item represents the "hot" cache while the last one represents a "cold" cache.
672
+ */
673
+ tiers: CacheTier[];
674
+ }
675
+ interface CacheTier {
676
+ /**
677
+ * The underlying cache
678
+ */
679
+ cache: ICache;
680
+ /**
681
+ * The options that will be passed to {@link ICache#getOrLoad}, {@link ICache#set} and {@link ICache#setMany}.
682
+ */
683
+ options?: SetCacheOptions;
684
+ }
685
+ /**
686
+ * A cache strategy layer that implements multi-level caching
687
+ *
688
+ * The objective of a tiered cache is to minimize latency while still having the benefits of a larger, shared cache.
689
+ * This is done by having the first tier being an in-memory cache (such as {@link LocalTTLCache}) that stores a small amount of items with a short TTL,
690
+ * and the second tier being an external cache (such as {@link RedisCache}) that stores a lot more items and may have a longer TTL.
691
+ *
692
+ * This strategy is similarly known as Cache Hierarchy, CPU cache or L1/L2/L3 cache.
693
+ */
694
+ declare class TieredCache extends BaseCache {
695
+ protected readonly tiers: CacheTier[];
696
+ constructor(options: TieredCacheOptions);
697
+ get<T>(key: string): Promise<T | null>;
698
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
699
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
700
+ delete(key: string): Promise<void>;
701
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
702
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
703
+ deleteMany(keys: string[]): Promise<void>;
704
+ }
705
+ //#endregion
706
+ //#region src/layers/metrics/index.d.ts
707
+ interface MetricsCollectingCacheOptions extends BaseCacheOptions {
708
+ /**
709
+ * The underlying cache
710
+ */
711
+ cache: ICache;
712
+ }
713
+ interface CacheCountMetrics {
714
+ /** Amount of times the cache didn't have the requested resource */
715
+ missCount: number;
716
+ /** The amount of times the cache returned the requested resource */
717
+ hitCount: number;
718
+ /** The amount of times the cache was refreshed (the `load` function was called in {@link ICache#getOrLoad}) */
719
+ loadCount: number;
720
+ /** The amount of times the cache was updated */
721
+ setCount: number;
722
+ /** The amount of times a cached resource was invalidated */
723
+ deleteCount: number;
724
+ }
725
+ interface CacheTimeMetrics {
726
+ /** Average time taken to verify that the cache didn't have a single requested resource (in milliseconds) */
727
+ missTime: number;
728
+ /** Average time taken to return a single requested resource from cache (in milliseconds) */
729
+ hitTime: number;
730
+ /** Average time taken to load a single resource from source (in milliseconds) */
731
+ loadTime: number;
732
+ /** Average time taken to update a single resource from cache (in milliseconds) */
733
+ setTime: number;
734
+ /** Average time taken to invalidate a single resource from cache (in milliseconds) */
735
+ deleteTime: number;
736
+ }
737
+ type CacheMetrics = CacheCountMetrics & CacheTimeMetrics;
738
+ /**
739
+ * A cache layer that collects metrics from each cache call.
740
+ *
741
+ * This can be useful to measure the cache effectiveness
742
+ */
743
+ declare class MetricsCollectingCache implements ICache {
744
+ protected readonly cache: ICache;
745
+ protected readonly logger?: Logger;
746
+ protected name?: string;
747
+ protected countMetrics: CacheCountMetrics;
748
+ protected totalTimeMetrics: CacheTimeMetrics;
749
+ constructor(options: MetricsCollectingCacheOptions);
750
+ get<T>(key: string): Promise<T | null>;
751
+ getOrLoad<T>(key: string, load: () => Promise<T>, options?: SetCacheOptions): Promise<T>;
752
+ set<T>(key: string, value: T, options?: SetCacheOptions): Promise<void>;
753
+ delete(key: string): Promise<void>;
754
+ getMany<T>(keys: string[]): Promise<Record<string, T | null>>;
755
+ setMany<T>(data: Record<string, T>, options?: SetCacheOptions): Promise<void>;
756
+ deleteMany(keys: string[]): Promise<void>;
757
+ getMetrics(): CacheMetrics;
758
+ resetMetrics(): void;
759
+ }
760
+ //#endregion
761
+ export { AsyncLazyCache, AsyncLazyCacheOptions, BaseCache, BaseCacheOptions, CacheMetrics, CacheTier, CoalescingCache, CoalescingCacheOptions, ExistingLRUCacheOptions, ExistingTTLCacheOptions, ICache, IORedisCache, IORedisCacheOptions, JitteringCache, JitteringCacheOptions, KeyTransformingCache, KeyTransformingCacheOptions, KeyvCache, KeyvCacheOptions, LocalLRUCache, LocalLRUCacheOptions, LocalMapCache, LocalMapCacheOptions, LocalTTLCache, LocalTTLCacheOptions, Logger, MapLike, MemJSCache, MemJSCacheOptions, MemcacheCache, MemcacheCacheOptions, MetricsCollectingCache, MetricsCollectingCacheOptions, NoOpCache, RedisCache, RedisCacheOptions, SWRCache, SWRCacheOptions, SetCacheOptions, TieredCache, TieredCacheOptions, ValkeyGlideCache, ValkeyGlideCacheOptions, WorkersKVCache, WorkersKVCacheOptions };
762
+ //# sourceMappingURL=index.d.cts.map