layercache 1.0.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -20,9 +20,12 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var index_exports = {};
22
22
  __export(index_exports, {
23
+ CacheMissError: () => CacheMissError,
23
24
  CacheNamespace: () => CacheNamespace,
24
25
  CacheStack: () => CacheStack,
26
+ DiskLayer: () => DiskLayer,
25
27
  JsonSerializer: () => JsonSerializer,
28
+ MemcachedLayer: () => MemcachedLayer,
26
29
  MemoryLayer: () => MemoryLayer,
27
30
  MsgpackSerializer: () => MsgpackSerializer,
28
31
  PatternMatcher: () => PatternMatcher,
@@ -35,15 +38,284 @@ __export(index_exports, {
35
38
  cacheGraphqlResolver: () => cacheGraphqlResolver,
36
39
  createCacheStatsHandler: () => createCacheStatsHandler,
37
40
  createCachedMethodDecorator: () => createCachedMethodDecorator,
41
+ createExpressCacheMiddleware: () => createExpressCacheMiddleware,
38
42
  createFastifyLayercachePlugin: () => createFastifyLayercachePlugin,
43
+ createPrometheusMetricsExporter: () => createPrometheusMetricsExporter,
39
44
  createTrpcCacheMiddleware: () => createTrpcCacheMiddleware
40
45
  });
41
46
  module.exports = __toCommonJS(index_exports);
42
47
 
43
48
  // src/CacheStack.ts
44
49
  var import_node_crypto = require("crypto");
45
- var import_node_fs = require("fs");
46
50
  var import_node_events = require("events");
51
+ var import_node_fs = require("fs");
52
+
53
+ // src/CacheNamespace.ts
54
+ var CacheNamespace = class _CacheNamespace {
55
+ constructor(cache, prefix) {
56
+ this.cache = cache;
57
+ this.prefix = prefix;
58
+ }
59
+ cache;
60
+ prefix;
61
+ async get(key, fetcher, options) {
62
+ return this.cache.get(this.qualify(key), fetcher, options);
63
+ }
64
+ async getOrSet(key, fetcher, options) {
65
+ return this.cache.getOrSet(this.qualify(key), fetcher, options);
66
+ }
67
+ /**
68
+ * Like `get()`, but throws `CacheMissError` instead of returning `null`.
69
+ */
70
+ async getOrThrow(key, fetcher, options) {
71
+ return this.cache.getOrThrow(this.qualify(key), fetcher, options);
72
+ }
73
+ async has(key) {
74
+ return this.cache.has(this.qualify(key));
75
+ }
76
+ async ttl(key) {
77
+ return this.cache.ttl(this.qualify(key));
78
+ }
79
+ async set(key, value, options) {
80
+ await this.cache.set(this.qualify(key), value, options);
81
+ }
82
+ async delete(key) {
83
+ await this.cache.delete(this.qualify(key));
84
+ }
85
+ async mdelete(keys) {
86
+ await this.cache.mdelete(keys.map((k) => this.qualify(k)));
87
+ }
88
+ async clear() {
89
+ await this.cache.invalidateByPattern(`${this.prefix}:*`);
90
+ }
91
+ async mget(entries) {
92
+ return this.cache.mget(
93
+ entries.map((entry) => ({
94
+ ...entry,
95
+ key: this.qualify(entry.key)
96
+ }))
97
+ );
98
+ }
99
+ async mset(entries) {
100
+ await this.cache.mset(
101
+ entries.map((entry) => ({
102
+ ...entry,
103
+ key: this.qualify(entry.key)
104
+ }))
105
+ );
106
+ }
107
+ async invalidateByTag(tag) {
108
+ await this.cache.invalidateByTag(tag);
109
+ }
110
+ async invalidateByPattern(pattern) {
111
+ await this.cache.invalidateByPattern(this.qualify(pattern));
112
+ }
113
+ /**
114
+ * Returns detailed metadata about a single cache key within this namespace.
115
+ */
116
+ async inspect(key) {
117
+ return this.cache.inspect(this.qualify(key));
118
+ }
119
+ wrap(keyPrefix, fetcher, options) {
120
+ return this.cache.wrap(`${this.prefix}:${keyPrefix}`, fetcher, options);
121
+ }
122
+ warm(entries, options) {
123
+ return this.cache.warm(
124
+ entries.map((entry) => ({
125
+ ...entry,
126
+ key: this.qualify(entry.key)
127
+ })),
128
+ options
129
+ );
130
+ }
131
+ getMetrics() {
132
+ return this.cache.getMetrics();
133
+ }
134
+ getHitRate() {
135
+ return this.cache.getHitRate();
136
+ }
137
+ /**
138
+ * Creates a nested namespace. Keys are prefixed with `parentPrefix:childPrefix:`.
139
+ *
140
+ * ```ts
141
+ * const tenant = cache.namespace('tenant:abc')
142
+ * const posts = tenant.namespace('posts')
143
+ * // keys become: "tenant:abc:posts:mykey"
144
+ * ```
145
+ */
146
+ namespace(childPrefix) {
147
+ return new _CacheNamespace(this.cache, `${this.prefix}:${childPrefix}`);
148
+ }
149
+ qualify(key) {
150
+ return `${this.prefix}:${key}`;
151
+ }
152
+ };
153
+
154
+ // src/internal/CircuitBreakerManager.ts
155
+ var CircuitBreakerManager = class {
156
+ breakers = /* @__PURE__ */ new Map();
157
+ maxEntries;
158
+ constructor(options) {
159
+ this.maxEntries = options.maxEntries;
160
+ }
161
+ /**
162
+ * Throws if the circuit is open for the given key.
163
+ * Automatically resets if the cooldown has elapsed.
164
+ */
165
+ assertClosed(key, options) {
166
+ const state = this.breakers.get(key);
167
+ if (!state?.openUntil) {
168
+ return;
169
+ }
170
+ const now = Date.now();
171
+ if (state.openUntil <= now) {
172
+ state.openUntil = null;
173
+ state.failures = 0;
174
+ this.breakers.set(key, state);
175
+ return;
176
+ }
177
+ const remainingMs = state.openUntil - now;
178
+ const remainingSecs = Math.ceil(remainingMs / 1e3);
179
+ throw new Error(`Circuit breaker is open for key "${key}" (resets in ${remainingSecs}s).`);
180
+ }
181
+ recordFailure(key, options) {
182
+ if (!options) {
183
+ return;
184
+ }
185
+ const failureThreshold = options.failureThreshold ?? 3;
186
+ const cooldownMs = options.cooldownMs ?? 3e4;
187
+ const state = this.breakers.get(key) ?? { failures: 0, openUntil: null };
188
+ state.failures += 1;
189
+ if (state.failures >= failureThreshold) {
190
+ state.openUntil = Date.now() + cooldownMs;
191
+ }
192
+ this.breakers.set(key, state);
193
+ this.pruneIfNeeded();
194
+ }
195
+ recordSuccess(key) {
196
+ this.breakers.delete(key);
197
+ }
198
+ isOpen(key) {
199
+ const state = this.breakers.get(key);
200
+ if (!state?.openUntil) {
201
+ return false;
202
+ }
203
+ if (state.openUntil <= Date.now()) {
204
+ state.openUntil = null;
205
+ state.failures = 0;
206
+ return false;
207
+ }
208
+ return true;
209
+ }
210
+ delete(key) {
211
+ this.breakers.delete(key);
212
+ }
213
+ clear() {
214
+ this.breakers.clear();
215
+ }
216
+ tripCount() {
217
+ let count = 0;
218
+ for (const state of this.breakers.values()) {
219
+ if (state.openUntil !== null) {
220
+ count += 1;
221
+ }
222
+ }
223
+ return count;
224
+ }
225
+ pruneIfNeeded() {
226
+ if (this.breakers.size <= this.maxEntries) {
227
+ return;
228
+ }
229
+ for (const [key, state] of this.breakers.entries()) {
230
+ if (this.breakers.size <= this.maxEntries) {
231
+ break;
232
+ }
233
+ if (!state.openUntil || state.openUntil <= Date.now()) {
234
+ this.breakers.delete(key);
235
+ }
236
+ }
237
+ for (const key of this.breakers.keys()) {
238
+ if (this.breakers.size <= this.maxEntries) {
239
+ break;
240
+ }
241
+ this.breakers.delete(key);
242
+ }
243
+ }
244
+ };
245
+
246
+ // src/internal/MetricsCollector.ts
247
+ var MetricsCollector = class {
248
+ data = this.empty();
249
+ get snapshot() {
250
+ return {
251
+ ...this.data,
252
+ hitsByLayer: { ...this.data.hitsByLayer },
253
+ missesByLayer: { ...this.data.missesByLayer },
254
+ latencyByLayer: Object.fromEntries(Object.entries(this.data.latencyByLayer).map(([k, v]) => [k, { ...v }]))
255
+ };
256
+ }
257
+ increment(field, amount = 1) {
258
+ ;
259
+ this.data[field] += amount;
260
+ }
261
+ incrementLayer(map, layerName) {
262
+ this.data[map][layerName] = (this.data[map][layerName] ?? 0) + 1;
263
+ }
264
+ /**
265
+ * Records a read latency sample for the given layer.
266
+ * Maintains a rolling average and max using Welford's online algorithm.
267
+ */
268
+ recordLatency(layerName, durationMs) {
269
+ const existing = this.data.latencyByLayer[layerName];
270
+ if (!existing) {
271
+ this.data.latencyByLayer[layerName] = { avgMs: durationMs, maxMs: durationMs, count: 1 };
272
+ return;
273
+ }
274
+ existing.count += 1;
275
+ existing.avgMs += (durationMs - existing.avgMs) / existing.count;
276
+ if (durationMs > existing.maxMs) {
277
+ existing.maxMs = durationMs;
278
+ }
279
+ }
280
+ reset() {
281
+ this.data = this.empty();
282
+ }
283
+ hitRate() {
284
+ const total = this.data.hits + this.data.misses;
285
+ const overall = total === 0 ? 0 : this.data.hits / total;
286
+ const byLayer = {};
287
+ const allLayers = /* @__PURE__ */ new Set([...Object.keys(this.data.hitsByLayer), ...Object.keys(this.data.missesByLayer)]);
288
+ for (const layer of allLayers) {
289
+ const h = this.data.hitsByLayer[layer] ?? 0;
290
+ const m = this.data.missesByLayer[layer] ?? 0;
291
+ byLayer[layer] = h + m === 0 ? 0 : h / (h + m);
292
+ }
293
+ return { overall, byLayer };
294
+ }
295
+ empty() {
296
+ return {
297
+ hits: 0,
298
+ misses: 0,
299
+ fetches: 0,
300
+ sets: 0,
301
+ deletes: 0,
302
+ backfills: 0,
303
+ invalidations: 0,
304
+ staleHits: 0,
305
+ refreshes: 0,
306
+ refreshErrors: 0,
307
+ writeFailures: 0,
308
+ singleFlightWaits: 0,
309
+ negativeCacheHits: 0,
310
+ circuitBreakerTrips: 0,
311
+ degradedOperations: 0,
312
+ hitsByLayer: {},
313
+ missesByLayer: {},
314
+ latencyByLayer: {},
315
+ resetAt: Date.now()
316
+ };
317
+ }
318
+ };
47
319
 
48
320
  // src/internal/StoredValue.ts
49
321
  function isStoredValueEnvelope(value) {
@@ -146,67 +418,129 @@ function normalizePositiveSeconds(value) {
146
418
  return value;
147
419
  }
148
420
 
149
- // src/CacheNamespace.ts
150
- var CacheNamespace = class {
151
- constructor(cache, prefix) {
152
- this.cache = cache;
153
- this.prefix = prefix;
154
- }
155
- cache;
156
- prefix;
157
- async get(key, fetcher, options) {
158
- return this.cache.get(this.qualify(key), fetcher, options);
159
- }
160
- async set(key, value, options) {
161
- await this.cache.set(this.qualify(key), value, options);
162
- }
163
- async delete(key) {
164
- await this.cache.delete(this.qualify(key));
421
+ // src/internal/TtlResolver.ts
422
+ var DEFAULT_NEGATIVE_TTL_SECONDS = 60;
423
+ var TtlResolver = class {
424
+ accessProfiles = /* @__PURE__ */ new Map();
425
+ maxProfileEntries;
426
+ constructor(options) {
427
+ this.maxProfileEntries = options.maxProfileEntries;
165
428
  }
166
- async clear() {
167
- await this.cache.invalidateByPattern(`${this.prefix}:*`);
429
+ recordAccess(key) {
430
+ const profile = this.accessProfiles.get(key) ?? { hits: 0, lastAccessAt: Date.now() };
431
+ profile.hits += 1;
432
+ profile.lastAccessAt = Date.now();
433
+ this.accessProfiles.set(key, profile);
434
+ this.pruneIfNeeded();
168
435
  }
169
- async mget(entries) {
170
- return this.cache.mget(entries.map((entry) => ({
171
- ...entry,
172
- key: this.qualify(entry.key)
173
- })));
436
+ deleteProfile(key) {
437
+ this.accessProfiles.delete(key);
174
438
  }
175
- async mset(entries) {
176
- await this.cache.mset(entries.map((entry) => ({
177
- ...entry,
178
- key: this.qualify(entry.key)
179
- })));
439
+ clearProfiles() {
440
+ this.accessProfiles.clear();
180
441
  }
181
- async invalidateByTag(tag) {
182
- await this.cache.invalidateByTag(tag);
442
+ resolveFreshTtl(key, layerName, kind, options, fallbackTtl, globalNegativeTtl, globalTtl) {
443
+ const baseTtl = kind === "empty" ? this.resolveLayerSeconds(
444
+ layerName,
445
+ options?.negativeTtl,
446
+ globalNegativeTtl,
447
+ this.resolveLayerSeconds(layerName, options?.ttl, globalTtl, fallbackTtl) ?? DEFAULT_NEGATIVE_TTL_SECONDS
448
+ ) : this.resolveLayerSeconds(layerName, options?.ttl, globalTtl, fallbackTtl);
449
+ const adaptiveTtl = this.applyAdaptiveTtl(key, layerName, baseTtl, options?.adaptiveTtl);
450
+ const jitter = this.resolveLayerSeconds(layerName, options?.ttlJitter, void 0);
451
+ return this.applyJitter(adaptiveTtl, jitter);
183
452
  }
184
- async invalidateByPattern(pattern) {
185
- await this.cache.invalidateByPattern(this.qualify(pattern));
453
+ resolveLayerSeconds(layerName, override, globalDefault, fallback) {
454
+ if (override !== void 0) {
455
+ return this.readLayerNumber(layerName, override) ?? fallback;
456
+ }
457
+ if (globalDefault !== void 0) {
458
+ return this.readLayerNumber(layerName, globalDefault) ?? fallback;
459
+ }
460
+ return fallback;
186
461
  }
187
- wrap(keyPrefix, fetcher, options) {
188
- return this.cache.wrap(`${this.prefix}:${keyPrefix}`, fetcher, options);
462
+ applyAdaptiveTtl(key, layerName, ttl, adaptiveTtl) {
463
+ if (!ttl || !adaptiveTtl) {
464
+ return ttl;
465
+ }
466
+ const profile = this.accessProfiles.get(key);
467
+ if (!profile) {
468
+ return ttl;
469
+ }
470
+ const config = adaptiveTtl === true ? {} : adaptiveTtl;
471
+ const hotAfter = config.hotAfter ?? 3;
472
+ if (profile.hits < hotAfter) {
473
+ return ttl;
474
+ }
475
+ const step = this.resolveLayerSeconds(layerName, config.step, void 0, Math.max(1, Math.round(ttl / 2))) ?? 0;
476
+ const maxTtl = this.resolveLayerSeconds(layerName, config.maxTtl, void 0, ttl + step * 4) ?? ttl;
477
+ const multiplier = Math.floor(profile.hits / hotAfter);
478
+ return Math.min(maxTtl, ttl + step * multiplier);
189
479
  }
190
- warm(entries, options) {
191
- return this.cache.warm(entries.map((entry) => ({
192
- ...entry,
193
- key: this.qualify(entry.key)
194
- })), options);
480
+ applyJitter(ttl, jitter) {
481
+ if (!ttl || ttl <= 0 || !jitter || jitter <= 0) {
482
+ return ttl;
483
+ }
484
+ const delta = (Math.random() * 2 - 1) * jitter;
485
+ return Math.max(1, Math.round(ttl + delta));
195
486
  }
196
- getMetrics() {
197
- return this.cache.getMetrics();
487
+ readLayerNumber(layerName, value) {
488
+ if (typeof value === "number") {
489
+ return value;
490
+ }
491
+ return value[layerName];
198
492
  }
199
- qualify(key) {
200
- return `${this.prefix}:${key}`;
493
+ pruneIfNeeded() {
494
+ if (this.accessProfiles.size <= this.maxProfileEntries) {
495
+ return;
496
+ }
497
+ const toRemove = Math.ceil(this.maxProfileEntries * 0.1);
498
+ let removed = 0;
499
+ for (const key of this.accessProfiles.keys()) {
500
+ if (removed >= toRemove) {
501
+ break;
502
+ }
503
+ this.accessProfiles.delete(key);
504
+ removed += 1;
505
+ }
201
506
  }
202
507
  };
203
508
 
204
509
  // src/invalidation/PatternMatcher.ts
205
- var PatternMatcher = class {
510
+ var PatternMatcher = class _PatternMatcher {
511
+ /**
512
+ * Tests whether a glob-style pattern matches a value.
513
+ * Supports `*` (any sequence of characters) and `?` (any single character).
514
+ * Uses a linear-time algorithm to avoid ReDoS vulnerabilities.
515
+ */
206
516
  static matches(pattern, value) {
207
- const escaped = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&");
208
- const regex = new RegExp(`^${escaped.replace(/\*/g, ".*").replace(/\?/g, ".")}$`);
209
- return regex.test(value);
517
+ return _PatternMatcher.matchLinear(pattern, value);
518
+ }
519
+ /**
520
+ * Linear-time glob matching using dynamic programming.
521
+ * Avoids catastrophic backtracking that RegExp-based glob matching can cause.
522
+ */
523
+ static matchLinear(pattern, value) {
524
+ const m = pattern.length;
525
+ const n = value.length;
526
+ const dp = Array.from({ length: m + 1 }, () => new Array(n + 1).fill(false));
527
+ dp[0][0] = true;
528
+ for (let i = 1; i <= m; i++) {
529
+ if (pattern[i - 1] === "*") {
530
+ dp[i][0] = dp[i - 1]?.[0];
531
+ }
532
+ }
533
+ for (let i = 1; i <= m; i++) {
534
+ for (let j = 1; j <= n; j++) {
535
+ const pc = pattern[i - 1];
536
+ if (pc === "*") {
537
+ dp[i][j] = dp[i - 1]?.[j] || dp[i]?.[j - 1];
538
+ } else if (pc === "?" || pc === value[j - 1]) {
539
+ dp[i][j] = dp[i - 1]?.[j - 1];
540
+ }
541
+ }
542
+ }
543
+ return dp[m]?.[n];
210
544
  }
211
545
  };
212
546
 
@@ -215,11 +549,17 @@ var TagIndex = class {
215
549
  tagToKeys = /* @__PURE__ */ new Map();
216
550
  keyToTags = /* @__PURE__ */ new Map();
217
551
  knownKeys = /* @__PURE__ */ new Set();
552
+ maxKnownKeys;
553
+ constructor(options = {}) {
554
+ this.maxKnownKeys = options.maxKnownKeys;
555
+ }
218
556
  async touch(key) {
219
557
  this.knownKeys.add(key);
558
+ this.pruneKnownKeysIfNeeded();
220
559
  }
221
560
  async track(key, tags) {
222
561
  this.knownKeys.add(key);
562
+ this.pruneKnownKeysIfNeeded();
223
563
  if (tags.length === 0) {
224
564
  return;
225
565
  }
@@ -258,6 +598,9 @@ var TagIndex = class {
258
598
  async keysForTag(tag) {
259
599
  return [...this.tagToKeys.get(tag) ?? /* @__PURE__ */ new Set()];
260
600
  }
601
+ async tagsForKey(key) {
602
+ return [...this.keyToTags.get(key) ?? /* @__PURE__ */ new Set()];
603
+ }
261
604
  async matchPattern(pattern) {
262
605
  return [...this.knownKeys].filter((key) => PatternMatcher.matches(pattern, key));
263
606
  }
@@ -266,6 +609,21 @@ var TagIndex = class {
266
609
  this.keyToTags.clear();
267
610
  this.knownKeys.clear();
268
611
  }
612
+ pruneKnownKeysIfNeeded() {
613
+ if (this.maxKnownKeys === void 0 || this.knownKeys.size <= this.maxKnownKeys) {
614
+ return;
615
+ }
616
+ const toRemove = Math.ceil(this.maxKnownKeys * 0.1);
617
+ let removed = 0;
618
+ for (const key of this.knownKeys) {
619
+ if (removed >= toRemove) {
620
+ break;
621
+ }
622
+ this.knownKeys.delete(key);
623
+ this.keyToTags.delete(key);
624
+ removed += 1;
625
+ }
626
+ }
269
627
  };
270
628
 
271
629
  // src/stampede/StampedeGuard.ts
@@ -294,31 +652,22 @@ var StampedeGuard = class {
294
652
  }
295
653
  };
296
654
 
655
+ // src/types.ts
656
+ var CacheMissError = class extends Error {
657
+ key;
658
+ constructor(key) {
659
+ super(`Cache miss for key "${key}".`);
660
+ this.name = "CacheMissError";
661
+ this.key = key;
662
+ }
663
+ };
664
+
297
665
  // src/CacheStack.ts
298
- var DEFAULT_NEGATIVE_TTL_SECONDS = 60;
299
666
  var DEFAULT_SINGLE_FLIGHT_LEASE_MS = 3e4;
300
667
  var DEFAULT_SINGLE_FLIGHT_TIMEOUT_MS = 5e3;
301
668
  var DEFAULT_SINGLE_FLIGHT_POLL_MS = 50;
302
669
  var MAX_CACHE_KEY_LENGTH = 1024;
303
- var EMPTY_METRICS = () => ({
304
- hits: 0,
305
- misses: 0,
306
- fetches: 0,
307
- sets: 0,
308
- deletes: 0,
309
- backfills: 0,
310
- invalidations: 0,
311
- staleHits: 0,
312
- refreshes: 0,
313
- refreshErrors: 0,
314
- writeFailures: 0,
315
- singleFlightWaits: 0,
316
- negativeCacheHits: 0,
317
- circuitBreakerTrips: 0,
318
- degradedOperations: 0,
319
- hitsByLayer: {},
320
- missesByLayer: {}
321
- });
670
+ var DEFAULT_MAX_PROFILE_ENTRIES = 1e5;
322
671
  var DebugLogger = class {
323
672
  enabled;
324
673
  constructor(enabled) {
@@ -353,6 +702,14 @@ var CacheStack = class extends import_node_events.EventEmitter {
353
702
  throw new Error("CacheStack requires at least one cache layer.");
354
703
  }
355
704
  this.validateConfiguration();
705
+ const maxProfileEntries = options.maxProfileEntries ?? DEFAULT_MAX_PROFILE_ENTRIES;
706
+ this.ttlResolver = new TtlResolver({ maxProfileEntries });
707
+ this.circuitBreakerManager = new CircuitBreakerManager({ maxEntries: maxProfileEntries });
708
+ if (options.publishSetInvalidation !== void 0) {
709
+ console.warn(
710
+ "[layercache] CacheStackOptions.publishSetInvalidation is deprecated. Use broadcastL1Invalidation instead."
711
+ );
712
+ }
356
713
  const debugEnv = process.env.DEBUG?.split(",").includes("layercache:debug") ?? false;
357
714
  this.logger = typeof options.logger === "object" ? options.logger : new DebugLogger(Boolean(options.logger) || debugEnv);
358
715
  this.tagIndex = options.tagIndex ?? new TagIndex();
@@ -361,36 +718,42 @@ var CacheStack = class extends import_node_events.EventEmitter {
361
718
  layers;
362
719
  options;
363
720
  stampedeGuard = new StampedeGuard();
364
- metrics = EMPTY_METRICS();
721
+ metricsCollector = new MetricsCollector();
365
722
  instanceId = (0, import_node_crypto.randomUUID)();
366
723
  startup;
367
724
  unsubscribeInvalidation;
368
725
  logger;
369
726
  tagIndex;
370
727
  backgroundRefreshes = /* @__PURE__ */ new Map();
371
- accessProfiles = /* @__PURE__ */ new Map();
372
728
  layerDegradedUntil = /* @__PURE__ */ new Map();
373
- circuitBreakers = /* @__PURE__ */ new Map();
729
+ ttlResolver;
730
+ circuitBreakerManager;
374
731
  isDisconnecting = false;
375
732
  disconnectPromise;
733
+ /**
734
+ * Read-through cache get.
735
+ * Returns the cached value if present and fresh, or invokes `fetcher` on a miss
736
+ * and stores the result across all layers. Returns `null` if the key is not found
737
+ * and no `fetcher` is provided.
738
+ */
376
739
  async get(key, fetcher, options) {
377
740
  const normalizedKey = this.validateCacheKey(key);
378
741
  this.validateWriteOptions(options);
379
742
  await this.startup;
380
743
  const hit = await this.readFromLayers(normalizedKey, options, "allow-stale");
381
744
  if (hit.found) {
382
- this.recordAccess(normalizedKey);
745
+ this.ttlResolver.recordAccess(normalizedKey);
383
746
  if (this.isNegativeStoredValue(hit.stored)) {
384
- this.metrics.negativeCacheHits += 1;
747
+ this.metricsCollector.increment("negativeCacheHits");
385
748
  }
386
749
  if (hit.state === "fresh") {
387
- this.metrics.hits += 1;
750
+ this.metricsCollector.increment("hits");
388
751
  await this.applyFreshReadPolicies(normalizedKey, hit, options, fetcher);
389
752
  return hit.value;
390
753
  }
391
754
  if (hit.state === "stale-while-revalidate") {
392
- this.metrics.hits += 1;
393
- this.metrics.staleHits += 1;
755
+ this.metricsCollector.increment("hits");
756
+ this.metricsCollector.increment("staleHits");
394
757
  this.emit("stale-serve", { key: normalizedKey, state: hit.state, layer: hit.layerName });
395
758
  if (fetcher) {
396
759
  this.scheduleBackgroundRefresh(normalizedKey, fetcher, options);
@@ -398,47 +761,148 @@ var CacheStack = class extends import_node_events.EventEmitter {
398
761
  return hit.value;
399
762
  }
400
763
  if (!fetcher) {
401
- this.metrics.hits += 1;
402
- this.metrics.staleHits += 1;
764
+ this.metricsCollector.increment("hits");
765
+ this.metricsCollector.increment("staleHits");
403
766
  this.emit("stale-serve", { key: normalizedKey, state: hit.state, layer: hit.layerName });
404
767
  return hit.value;
405
768
  }
406
769
  try {
407
770
  return await this.fetchWithGuards(normalizedKey, fetcher, options);
408
771
  } catch (error) {
409
- this.metrics.staleHits += 1;
410
- this.metrics.refreshErrors += 1;
772
+ this.metricsCollector.increment("staleHits");
773
+ this.metricsCollector.increment("refreshErrors");
411
774
  this.logger.debug?.("stale-if-error", { key: normalizedKey, error: this.formatError(error) });
412
775
  return hit.value;
413
776
  }
414
777
  }
415
- this.metrics.misses += 1;
778
+ this.metricsCollector.increment("misses");
416
779
  if (!fetcher) {
417
780
  return null;
418
781
  }
419
782
  return this.fetchWithGuards(normalizedKey, fetcher, options);
420
783
  }
784
+ /**
785
+ * Alias for `get(key, fetcher, options)` — explicit get-or-set pattern.
786
+ * Fetches and caches the value if not already present.
787
+ */
788
+ async getOrSet(key, fetcher, options) {
789
+ return this.get(key, fetcher, options);
790
+ }
791
+ /**
792
+ * Like `get()`, but throws `CacheMissError` instead of returning `null`.
793
+ * Useful when the value is expected to exist or the fetcher is expected to
794
+ * return non-null.
795
+ */
796
+ async getOrThrow(key, fetcher, options) {
797
+ const value = await this.get(key, fetcher, options);
798
+ if (value === null) {
799
+ throw new CacheMissError(key);
800
+ }
801
+ return value;
802
+ }
803
+ /**
804
+ * Returns true if the given key exists and is not expired in any layer.
805
+ */
806
+ async has(key) {
807
+ const normalizedKey = this.validateCacheKey(key);
808
+ await this.startup;
809
+ for (const layer of this.layers) {
810
+ if (this.shouldSkipLayer(layer)) {
811
+ continue;
812
+ }
813
+ if (layer.has) {
814
+ try {
815
+ const exists = await layer.has(normalizedKey);
816
+ if (exists) {
817
+ return true;
818
+ }
819
+ } catch {
820
+ }
821
+ } else {
822
+ try {
823
+ const value = await layer.get(normalizedKey);
824
+ if (value !== null) {
825
+ return true;
826
+ }
827
+ } catch {
828
+ }
829
+ }
830
+ }
831
+ return false;
832
+ }
833
+ /**
834
+ * Returns the remaining TTL in seconds for the key in the fastest layer
835
+ * that has it, or null if the key is not found / has no TTL.
836
+ */
837
+ async ttl(key) {
838
+ const normalizedKey = this.validateCacheKey(key);
839
+ await this.startup;
840
+ for (const layer of this.layers) {
841
+ if (this.shouldSkipLayer(layer)) {
842
+ continue;
843
+ }
844
+ if (layer.ttl) {
845
+ try {
846
+ const remaining = await layer.ttl(normalizedKey);
847
+ if (remaining !== null) {
848
+ return remaining;
849
+ }
850
+ } catch {
851
+ }
852
+ }
853
+ }
854
+ return null;
855
+ }
856
+ /**
857
+ * Stores a value in all cache layers. Overwrites any existing value.
858
+ */
421
859
  async set(key, value, options) {
422
860
  const normalizedKey = this.validateCacheKey(key);
423
861
  this.validateWriteOptions(options);
424
862
  await this.startup;
425
863
  await this.storeEntry(normalizedKey, "value", value, options);
426
864
  }
865
+ /**
866
+ * Deletes the key from all layers and publishes an invalidation message.
867
+ */
427
868
  async delete(key) {
428
869
  const normalizedKey = this.validateCacheKey(key);
429
870
  await this.startup;
430
871
  await this.deleteKeys([normalizedKey]);
431
- await this.publishInvalidation({ scope: "key", keys: [normalizedKey], sourceId: this.instanceId, operation: "delete" });
872
+ await this.publishInvalidation({
873
+ scope: "key",
874
+ keys: [normalizedKey],
875
+ sourceId: this.instanceId,
876
+ operation: "delete"
877
+ });
432
878
  }
433
879
  async clear() {
434
880
  await this.startup;
435
881
  await Promise.all(this.layers.map((layer) => layer.clear()));
436
882
  await this.tagIndex.clear();
437
- this.accessProfiles.clear();
438
- this.metrics.invalidations += 1;
883
+ this.ttlResolver.clearProfiles();
884
+ this.circuitBreakerManager.clear();
885
+ this.metricsCollector.increment("invalidations");
439
886
  this.logger.debug?.("clear");
440
887
  await this.publishInvalidation({ scope: "clear", sourceId: this.instanceId, operation: "clear" });
441
888
  }
889
+ /**
890
+ * Deletes multiple keys at once. More efficient than calling `delete()` in a loop.
891
+ */
892
+ async mdelete(keys) {
893
+ if (keys.length === 0) {
894
+ return;
895
+ }
896
+ await this.startup;
897
+ const normalizedKeys = keys.map((k) => this.validateCacheKey(k));
898
+ await this.deleteKeys(normalizedKeys);
899
+ await this.publishInvalidation({
900
+ scope: "keys",
901
+ keys: normalizedKeys,
902
+ sourceId: this.instanceId,
903
+ operation: "delete"
904
+ });
905
+ }
442
906
  async mget(entries) {
443
907
  if (entries.length === 0) {
444
908
  return [];
@@ -476,7 +940,9 @@ var CacheStack = class extends import_node_events.EventEmitter {
476
940
  const indexesByKey = /* @__PURE__ */ new Map();
477
941
  const resultsByKey = /* @__PURE__ */ new Map();
478
942
  for (let index = 0; index < normalizedEntries.length; index += 1) {
479
- const key = normalizedEntries[index].key;
943
+ const entry = normalizedEntries[index];
944
+ if (!entry) continue;
945
+ const key = entry.key;
480
946
  const indexes = indexesByKey.get(key) ?? [];
481
947
  indexes.push(index);
482
948
  indexesByKey.set(key, indexes);
@@ -484,6 +950,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
484
950
  }
485
951
  for (let layerIndex = 0; layerIndex < this.layers.length; layerIndex += 1) {
486
952
  const layer = this.layers[layerIndex];
953
+ if (!layer) continue;
487
954
  const keys = [...pending];
488
955
  if (keys.length === 0) {
489
956
  break;
@@ -492,7 +959,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
492
959
  for (let offset = 0; offset < values.length; offset += 1) {
493
960
  const key = keys[offset];
494
961
  const stored = values[offset];
495
- if (stored === null) {
962
+ if (!key || stored === null) {
496
963
  continue;
497
964
  }
498
965
  const resolved = resolveStoredValue(stored);
@@ -504,13 +971,13 @@ var CacheStack = class extends import_node_events.EventEmitter {
504
971
  await this.backfill(key, stored, layerIndex - 1);
505
972
  resultsByKey.set(key, resolved.value);
506
973
  pending.delete(key);
507
- this.metrics.hits += indexesByKey.get(key)?.length ?? 1;
974
+ this.metricsCollector.increment("hits", indexesByKey.get(key)?.length ?? 1);
508
975
  }
509
976
  }
510
977
  if (pending.size > 0) {
511
978
  for (const key of pending) {
512
979
  await this.tagIndex.remove(key);
513
- this.metrics.misses += indexesByKey.get(key)?.length ?? 1;
980
+ this.metricsCollector.increment("misses", indexesByKey.get(key)?.length ?? 1);
514
981
  }
515
982
  }
516
983
  return normalizedEntries.map((entry) => resultsByKey.get(entry.key) ?? null);
@@ -525,26 +992,38 @@ var CacheStack = class extends import_node_events.EventEmitter {
525
992
  }
526
993
  async warm(entries, options = {}) {
527
994
  const concurrency = Math.max(1, options.concurrency ?? 4);
995
+ const total = entries.length;
996
+ let completed = 0;
528
997
  const queue = [...entries].sort((left, right) => (right.priority ?? 0) - (left.priority ?? 0));
529
- const workers = Array.from({ length: Math.min(concurrency, queue.length) }, async () => {
998
+ const workers = Array.from({ length: Math.min(concurrency, queue.length || 1) }, async () => {
530
999
  while (queue.length > 0) {
531
1000
  const entry = queue.shift();
532
1001
  if (!entry) {
533
1002
  return;
534
1003
  }
1004
+ let success = false;
535
1005
  try {
536
1006
  await this.get(entry.key, entry.fetcher, entry.options);
537
1007
  this.emit("warm", { key: entry.key });
1008
+ success = true;
538
1009
  } catch (error) {
539
1010
  this.emitError("warm", { key: entry.key, error: this.formatError(error) });
540
1011
  if (!options.continueOnError) {
541
1012
  throw error;
542
1013
  }
1014
+ } finally {
1015
+ completed += 1;
1016
+ const progress = { completed, total, key: entry.key, success };
1017
+ options.onProgress?.(progress);
543
1018
  }
544
1019
  }
545
1020
  });
546
1021
  await Promise.all(workers);
547
1022
  }
1023
+ /**
1024
+ * Returns a cached version of `fetcher`. The cache key is derived from
1025
+ * `prefix` plus the serialized arguments unless a `keyResolver` is provided.
1026
+ */
548
1027
  wrap(prefix, fetcher, options = {}) {
549
1028
  return (...args) => {
550
1029
  const suffix = options.keyResolver ? options.keyResolver(...args) : args.map((argument) => this.serializeKeyPart(argument)).join(":");
@@ -552,6 +1031,10 @@ var CacheStack = class extends import_node_events.EventEmitter {
552
1031
  return this.get(key, () => fetcher(...args), options);
553
1032
  };
554
1033
  }
1034
+ /**
1035
+ * Creates a `CacheNamespace` that automatically prefixes all keys with
1036
+ * `prefix:`. Useful for multi-tenant or module-level isolation.
1037
+ */
555
1038
  namespace(prefix) {
556
1039
  return new CacheNamespace(this, prefix);
557
1040
  }
@@ -568,7 +1051,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
568
1051
  await this.publishInvalidation({ scope: "keys", keys, sourceId: this.instanceId, operation: "invalidate" });
569
1052
  }
570
1053
  getMetrics() {
571
- return { ...this.metrics };
1054
+ return this.metricsCollector.snapshot;
572
1055
  }
573
1056
  getStats() {
574
1057
  return {
@@ -582,7 +1065,53 @@ var CacheStack = class extends import_node_events.EventEmitter {
582
1065
  };
583
1066
  }
584
1067
  resetMetrics() {
585
- Object.assign(this.metrics, EMPTY_METRICS());
1068
+ this.metricsCollector.reset();
1069
+ }
1070
+ /**
1071
+ * Returns computed hit-rate statistics (overall and per-layer).
1072
+ */
1073
+ getHitRate() {
1074
+ return this.metricsCollector.hitRate();
1075
+ }
1076
+ /**
1077
+ * Returns detailed metadata about a single cache key: which layers contain it,
1078
+ * remaining fresh/stale/error TTLs, and associated tags.
1079
+ * Returns `null` if the key does not exist in any layer.
1080
+ */
1081
+ async inspect(key) {
1082
+ const normalizedKey = this.validateCacheKey(key);
1083
+ await this.startup;
1084
+ const foundInLayers = [];
1085
+ let freshTtlSeconds = null;
1086
+ let staleTtlSeconds = null;
1087
+ let errorTtlSeconds = null;
1088
+ let isStale = false;
1089
+ for (const layer of this.layers) {
1090
+ if (this.shouldSkipLayer(layer)) {
1091
+ continue;
1092
+ }
1093
+ const stored = await this.readLayerEntry(layer, normalizedKey);
1094
+ if (stored === null) {
1095
+ continue;
1096
+ }
1097
+ const resolved = resolveStoredValue(stored);
1098
+ if (resolved.state === "expired") {
1099
+ continue;
1100
+ }
1101
+ foundInLayers.push(layer.name);
1102
+ if (foundInLayers.length === 1 && resolved.envelope) {
1103
+ const now = Date.now();
1104
+ freshTtlSeconds = resolved.envelope.freshUntil !== null ? Math.max(0, Math.ceil((resolved.envelope.freshUntil - now) / 1e3)) : null;
1105
+ staleTtlSeconds = resolved.envelope.staleUntil !== null ? Math.max(0, Math.ceil((resolved.envelope.staleUntil - now) / 1e3)) : null;
1106
+ errorTtlSeconds = resolved.envelope.errorUntil !== null ? Math.max(0, Math.ceil((resolved.envelope.errorUntil - now) / 1e3)) : null;
1107
+ isStale = resolved.state === "stale-while-revalidate" || resolved.state === "stale-if-error";
1108
+ }
1109
+ }
1110
+ if (foundInLayers.length === 0) {
1111
+ return null;
1112
+ }
1113
+ const tags = await this.getTagsForKey(normalizedKey);
1114
+ return { key: normalizedKey, foundInLayers, freshTtlSeconds, staleTtlSeconds, errorTtlSeconds, isStale, tags };
586
1115
  }
587
1116
  async exportState() {
588
1117
  await this.startup;
@@ -611,10 +1140,12 @@ var CacheStack = class extends import_node_events.EventEmitter {
611
1140
  }
612
1141
  async importState(entries) {
613
1142
  await this.startup;
614
- await Promise.all(entries.map(async (entry) => {
615
- await Promise.all(this.layers.map((layer) => layer.set(entry.key, entry.value, entry.ttl)));
616
- await this.tagIndex.touch(entry.key);
617
- }));
1143
+ await Promise.all(
1144
+ entries.map(async (entry) => {
1145
+ await Promise.all(this.layers.map((layer) => layer.set(entry.key, entry.value, entry.ttl)));
1146
+ await this.tagIndex.touch(entry.key);
1147
+ })
1148
+ );
618
1149
  }
619
1150
  async persistToFile(filePath) {
620
1151
  const snapshot = await this.exportState();
@@ -622,11 +1153,21 @@ var CacheStack = class extends import_node_events.EventEmitter {
622
1153
  }
623
1154
  async restoreFromFile(filePath) {
624
1155
  const raw = await import_node_fs.promises.readFile(filePath, "utf8");
625
- const snapshot = JSON.parse(raw);
626
- if (!this.isCacheSnapshotEntries(snapshot)) {
627
- throw new Error("Invalid snapshot file: expected CacheSnapshotEntry[]");
1156
+ let parsed;
1157
+ try {
1158
+ parsed = JSON.parse(raw, (_key, value) => {
1159
+ if (value !== null && typeof value === "object" && !Array.isArray(value)) {
1160
+ return Object.assign(/* @__PURE__ */ Object.create(null), value);
1161
+ }
1162
+ return value;
1163
+ });
1164
+ } catch (cause) {
1165
+ throw new Error(`Invalid snapshot file: could not parse JSON (${this.formatError(cause)})`);
628
1166
  }
629
- await this.importState(snapshot);
1167
+ if (!this.isCacheSnapshotEntries(parsed)) {
1168
+ throw new Error("Invalid snapshot file: expected an array of { key: string, value, ttl? } entries");
1169
+ }
1170
+ await this.importState(parsed);
630
1171
  }
631
1172
  async disconnect() {
632
1173
  if (!this.disconnectPromise) {
@@ -651,7 +1192,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
651
1192
  const fetchTask = async () => {
652
1193
  const secondHit = await this.readFromLayers(key, options, "fresh-only");
653
1194
  if (secondHit.found) {
654
- this.metrics.hits += 1;
1195
+ this.metricsCollector.increment("hits");
655
1196
  return secondHit.value;
656
1197
  }
657
1198
  return this.fetchAndPopulate(key, fetcher, options);
@@ -676,12 +1217,12 @@ var CacheStack = class extends import_node_events.EventEmitter {
676
1217
  const timeoutMs = this.options.singleFlightTimeoutMs ?? DEFAULT_SINGLE_FLIGHT_TIMEOUT_MS;
677
1218
  const pollIntervalMs = this.options.singleFlightPollMs ?? DEFAULT_SINGLE_FLIGHT_POLL_MS;
678
1219
  const deadline = Date.now() + timeoutMs;
679
- this.metrics.singleFlightWaits += 1;
1220
+ this.metricsCollector.increment("singleFlightWaits");
680
1221
  this.emit("stampede-dedupe", { key });
681
1222
  while (Date.now() < deadline) {
682
1223
  const hit = await this.readFromLayers(key, options, "fresh-only");
683
1224
  if (hit.found) {
684
- this.metrics.hits += 1;
1225
+ this.metricsCollector.increment("hits");
685
1226
  return hit.value;
686
1227
  }
687
1228
  await this.sleep(pollIntervalMs);
@@ -689,12 +1230,14 @@ var CacheStack = class extends import_node_events.EventEmitter {
689
1230
  return this.fetchAndPopulate(key, fetcher, options);
690
1231
  }
691
1232
  async fetchAndPopulate(key, fetcher, options) {
692
- this.assertCircuitClosed(key, options?.circuitBreaker ?? this.options.circuitBreaker);
693
- this.metrics.fetches += 1;
1233
+ this.circuitBreakerManager.assertClosed(key, options?.circuitBreaker ?? this.options.circuitBreaker);
1234
+ this.metricsCollector.increment("fetches");
1235
+ const fetchStart = Date.now();
694
1236
  let fetched;
695
1237
  try {
696
1238
  fetched = await fetcher();
697
- this.resetCircuitBreaker(key);
1239
+ this.circuitBreakerManager.recordSuccess(key);
1240
+ this.logger.debug?.("fetch", { key, durationMs: Date.now() - fetchStart });
698
1241
  } catch (error) {
699
1242
  this.recordCircuitFailure(key, options?.circuitBreaker ?? this.options.circuitBreaker, error);
700
1243
  throw error;
@@ -706,6 +1249,9 @@ var CacheStack = class extends import_node_events.EventEmitter {
706
1249
  await this.storeEntry(key, "empty", null, options);
707
1250
  return null;
708
1251
  }
1252
+ if (options?.shouldCache && !options.shouldCache(fetched)) {
1253
+ return fetched;
1254
+ }
709
1255
  await this.storeEntry(key, "value", fetched, options);
710
1256
  return fetched;
711
1257
  }
@@ -716,7 +1262,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
716
1262
  } else {
717
1263
  await this.tagIndex.touch(key);
718
1264
  }
719
- this.metrics.sets += 1;
1265
+ this.metricsCollector.increment("sets");
720
1266
  this.logger.debug?.("set", { key, kind, tags: options?.tags });
721
1267
  this.emit("set", { key, kind, tags: options?.tags });
722
1268
  if (this.shouldBroadcastL1Invalidation()) {
@@ -727,9 +1273,13 @@ var CacheStack = class extends import_node_events.EventEmitter {
727
1273
  let sawRetainableValue = false;
728
1274
  for (let index = 0; index < this.layers.length; index += 1) {
729
1275
  const layer = this.layers[index];
1276
+ if (!layer) continue;
1277
+ const readStart = performance.now();
730
1278
  const stored = await this.readLayerEntry(layer, key);
1279
+ const readDuration = performance.now() - readStart;
1280
+ this.metricsCollector.recordLatency(layer.name, readDuration);
731
1281
  if (stored === null) {
732
- this.incrementMetricMap(this.metrics.missesByLayer, layer.name);
1282
+ this.metricsCollector.incrementLayer("missesByLayer", layer.name);
733
1283
  continue;
734
1284
  }
735
1285
  const resolved = resolveStoredValue(stored);
@@ -743,10 +1293,17 @@ var CacheStack = class extends import_node_events.EventEmitter {
743
1293
  }
744
1294
  await this.tagIndex.touch(key);
745
1295
  await this.backfill(key, stored, index - 1, options);
746
- this.incrementMetricMap(this.metrics.hitsByLayer, layer.name);
1296
+ this.metricsCollector.incrementLayer("hitsByLayer", layer.name);
747
1297
  this.logger.debug?.("hit", { key, layer: layer.name, state: resolved.state });
748
1298
  this.emit("hit", { key, layer: layer.name, state: resolved.state });
749
- return { found: true, value: resolved.value, stored, state: resolved.state, layerIndex: index, layerName: layer.name };
1299
+ return {
1300
+ found: true,
1301
+ value: resolved.value,
1302
+ stored,
1303
+ state: resolved.state,
1304
+ layerIndex: index,
1305
+ layerName: layer.name
1306
+ };
750
1307
  }
751
1308
  if (!sawRetainableValue) {
752
1309
  await this.tagIndex.remove(key);
@@ -778,7 +1335,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
778
1335
  }
779
1336
  for (let index = 0; index <= upToIndex; index += 1) {
780
1337
  const layer = this.layers[index];
781
- if (this.shouldSkipLayer(layer)) {
1338
+ if (!layer || this.shouldSkipLayer(layer)) {
782
1339
  continue;
783
1340
  }
784
1341
  const ttl = remainingStoredTtlSeconds(stored) ?? this.resolveLayerSeconds(layer.name, options?.ttl, void 0, layer.defaultTtl);
@@ -788,7 +1345,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
788
1345
  await this.handleLayerFailure(layer, "backfill", error);
789
1346
  continue;
790
1347
  }
791
- this.metrics.backfills += 1;
1348
+ this.metricsCollector.increment("backfills");
792
1349
  this.logger.debug?.("backfill", { key, layer: layer.name });
793
1350
  this.emit("backfill", { key, layer: layer.name });
794
1351
  }
@@ -805,11 +1362,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
805
1362
  options?.staleWhileRevalidate,
806
1363
  this.options.staleWhileRevalidate
807
1364
  );
808
- const staleIfError = this.resolveLayerSeconds(
809
- layer.name,
810
- options?.staleIfError,
811
- this.options.staleIfError
812
- );
1365
+ const staleIfError = this.resolveLayerSeconds(layer.name, options?.staleIfError, this.options.staleIfError);
813
1366
  const payload = createStoredValueEnvelope({
814
1367
  kind,
815
1368
  value,
@@ -837,7 +1390,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
837
1390
  if (failures.length === 0) {
838
1391
  return;
839
1392
  }
840
- this.metrics.writeFailures += failures.length;
1393
+ this.metricsCollector.increment("writeFailures", failures.length);
841
1394
  this.logger.debug?.("write-failure", {
842
1395
  ...context,
843
1396
  failures: failures.map((failure) => this.formatError(failure.reason))
@@ -850,42 +1403,10 @@ var CacheStack = class extends import_node_events.EventEmitter {
850
1403
  }
851
1404
  }
852
1405
  resolveFreshTtl(key, layerName, kind, options, fallbackTtl) {
853
- const baseTtl = kind === "empty" ? this.resolveLayerSeconds(
854
- layerName,
855
- options?.negativeTtl,
856
- this.options.negativeTtl,
857
- this.resolveLayerSeconds(layerName, options?.ttl, void 0, fallbackTtl) ?? DEFAULT_NEGATIVE_TTL_SECONDS
858
- ) : this.resolveLayerSeconds(layerName, options?.ttl, void 0, fallbackTtl);
859
- const adaptiveTtl = this.applyAdaptiveTtl(
860
- key,
861
- layerName,
862
- baseTtl,
863
- options?.adaptiveTtl ?? this.options.adaptiveTtl
864
- );
865
- const jitter = this.resolveLayerSeconds(layerName, options?.ttlJitter, this.options.ttlJitter);
866
- return this.applyJitter(adaptiveTtl, jitter);
1406
+ return this.ttlResolver.resolveFreshTtl(key, layerName, kind, options, fallbackTtl, this.options.negativeTtl);
867
1407
  }
868
1408
  resolveLayerSeconds(layerName, override, globalDefault, fallback) {
869
- if (override !== void 0) {
870
- return this.readLayerNumber(layerName, override) ?? fallback;
871
- }
872
- if (globalDefault !== void 0) {
873
- return this.readLayerNumber(layerName, globalDefault) ?? fallback;
874
- }
875
- return fallback;
876
- }
877
- readLayerNumber(layerName, value) {
878
- if (typeof value === "number") {
879
- return value;
880
- }
881
- return value[layerName];
882
- }
883
- applyJitter(ttl, jitter) {
884
- if (!ttl || ttl <= 0 || !jitter || jitter <= 0) {
885
- return ttl;
886
- }
887
- const delta = (Math.random() * 2 - 1) * jitter;
888
- return Math.max(1, Math.round(ttl + delta));
1409
+ return this.ttlResolver.resolveLayerSeconds(layerName, override, globalDefault, fallback);
889
1410
  }
890
1411
  shouldNegativeCache(options) {
891
1412
  return options?.negativeCache ?? this.options.negativeCaching ?? false;
@@ -895,11 +1416,11 @@ var CacheStack = class extends import_node_events.EventEmitter {
895
1416
  return;
896
1417
  }
897
1418
  const refresh = (async () => {
898
- this.metrics.refreshes += 1;
1419
+ this.metricsCollector.increment("refreshes");
899
1420
  try {
900
1421
  await this.fetchWithGuards(key, fetcher, options);
901
1422
  } catch (error) {
902
- this.metrics.refreshErrors += 1;
1423
+ this.metricsCollector.increment("refreshErrors");
903
1424
  this.logger.debug?.("refresh-error", { key, error: this.formatError(error) });
904
1425
  } finally {
905
1426
  this.backgroundRefreshes.delete(key);
@@ -921,10 +1442,11 @@ var CacheStack = class extends import_node_events.EventEmitter {
921
1442
  await this.deleteKeysFromLayers(this.layers, keys);
922
1443
  for (const key of keys) {
923
1444
  await this.tagIndex.remove(key);
924
- this.accessProfiles.delete(key);
1445
+ this.ttlResolver.deleteProfile(key);
1446
+ this.circuitBreakerManager.delete(key);
925
1447
  }
926
- this.metrics.deletes += keys.length;
927
- this.metrics.invalidations += 1;
1448
+ this.metricsCollector.increment("deletes", keys.length);
1449
+ this.metricsCollector.increment("invalidations");
928
1450
  this.logger.debug?.("delete", { keys });
929
1451
  this.emit("delete", { keys });
930
1452
  }
@@ -945,7 +1467,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
945
1467
  if (message.scope === "clear") {
946
1468
  await Promise.all(localLayers.map((layer) => layer.clear()));
947
1469
  await this.tagIndex.clear();
948
- this.accessProfiles.clear();
1470
+ this.ttlResolver.clearProfiles();
949
1471
  return;
950
1472
  }
951
1473
  const keys = message.keys ?? [];
@@ -953,10 +1475,16 @@ var CacheStack = class extends import_node_events.EventEmitter {
953
1475
  if (message.operation !== "write") {
954
1476
  for (const key of keys) {
955
1477
  await this.tagIndex.remove(key);
956
- this.accessProfiles.delete(key);
1478
+ this.ttlResolver.deleteProfile(key);
957
1479
  }
958
1480
  }
959
1481
  }
1482
+ async getTagsForKey(key) {
1483
+ if (this.tagIndex.tagsForKey) {
1484
+ return this.tagIndex.tagsForKey(key);
1485
+ }
1486
+ return [];
1487
+ }
960
1488
  formatError(error) {
961
1489
  if (error instanceof Error) {
962
1490
  return error.message;
@@ -983,13 +1511,15 @@ var CacheStack = class extends import_node_events.EventEmitter {
983
1511
  }
984
1512
  return;
985
1513
  }
986
- await Promise.all(keys.map(async (key) => {
987
- try {
988
- await layer.delete(key);
989
- } catch (error) {
990
- await this.handleLayerFailure(layer, "delete", error);
991
- }
992
- }));
1514
+ await Promise.all(
1515
+ keys.map(async (key) => {
1516
+ try {
1517
+ await layer.delete(key);
1518
+ } catch (error) {
1519
+ await this.handleLayerFailure(layer, "delete", error);
1520
+ }
1521
+ })
1522
+ );
993
1523
  })
994
1524
  );
995
1525
  }
@@ -1090,46 +1620,19 @@ var CacheStack = class extends import_node_events.EventEmitter {
1090
1620
  const ttl = remainingStoredTtlSeconds(refreshed);
1091
1621
  for (let index = 0; index <= hit.layerIndex; index += 1) {
1092
1622
  const layer = this.layers[index];
1093
- if (this.shouldSkipLayer(layer)) {
1623
+ if (!layer || this.shouldSkipLayer(layer)) {
1094
1624
  continue;
1095
1625
  }
1096
1626
  try {
1097
1627
  await layer.set(key, refreshed, ttl);
1098
1628
  } catch (error) {
1099
1629
  await this.handleLayerFailure(layer, "sliding-ttl", error);
1100
- }
1101
- }
1102
- }
1103
- if (fetcher && refreshAhead > 0 && remainingFreshTtl > 0 && remainingFreshTtl <= refreshAhead) {
1104
- this.scheduleBackgroundRefresh(key, fetcher, options);
1105
- }
1106
- }
1107
- applyAdaptiveTtl(key, layerName, ttl, adaptiveTtl) {
1108
- if (!ttl || !adaptiveTtl) {
1109
- return ttl;
1110
- }
1111
- const profile = this.accessProfiles.get(key);
1112
- if (!profile) {
1113
- return ttl;
1630
+ }
1631
+ }
1114
1632
  }
1115
- const config = adaptiveTtl === true ? {} : adaptiveTtl;
1116
- const hotAfter = config.hotAfter ?? 3;
1117
- if (profile.hits < hotAfter) {
1118
- return ttl;
1633
+ if (fetcher && refreshAhead > 0 && remainingFreshTtl > 0 && remainingFreshTtl <= refreshAhead) {
1634
+ this.scheduleBackgroundRefresh(key, fetcher, options);
1119
1635
  }
1120
- const step = this.resolveLayerSeconds(layerName, config.step, void 0, Math.max(1, Math.round(ttl / 2))) ?? 0;
1121
- const maxTtl = this.resolveLayerSeconds(layerName, config.maxTtl, void 0, ttl + step * 4) ?? ttl;
1122
- const multiplier = Math.floor(profile.hits / hotAfter);
1123
- return Math.min(maxTtl, ttl + step * multiplier);
1124
- }
1125
- recordAccess(key) {
1126
- const profile = this.accessProfiles.get(key) ?? { hits: 0, lastAccessAt: Date.now() };
1127
- profile.hits += 1;
1128
- profile.lastAccessAt = Date.now();
1129
- this.accessProfiles.set(key, profile);
1130
- }
1131
- incrementMetricMap(target, key) {
1132
- target[key] = (target[key] ?? 0) + 1;
1133
1636
  }
1134
1637
  shouldSkipLayer(layer) {
1135
1638
  const degradedUntil = this.layerDegradedUntil.get(layer.name);
@@ -1141,7 +1644,7 @@ var CacheStack = class extends import_node_events.EventEmitter {
1141
1644
  }
1142
1645
  const retryAfterMs = typeof this.options.gracefulDegradation === "object" ? this.options.gracefulDegradation.retryAfterMs ?? 1e4 : 1e4;
1143
1646
  this.layerDegradedUntil.set(layer.name, Date.now() + retryAfterMs);
1144
- this.metrics.degradedOperations += 1;
1647
+ this.metricsCollector.increment("degradedOperations");
1145
1648
  this.logger.warn?.("layer-degraded", { layer: layer.name, operation, error: this.formatError(error) });
1146
1649
  this.emitError(operation, { layer: layer.name, degraded: true, error: this.formatError(error) });
1147
1650
  return null;
@@ -1149,37 +1652,15 @@ var CacheStack = class extends import_node_events.EventEmitter {
1149
1652
  isGracefulDegradationEnabled() {
1150
1653
  return Boolean(this.options.gracefulDegradation);
1151
1654
  }
1152
- assertCircuitClosed(key, options) {
1153
- const state = this.circuitBreakers.get(key);
1154
- if (!state?.openUntil) {
1155
- return;
1156
- }
1157
- if (state.openUntil <= Date.now()) {
1158
- state.openUntil = null;
1159
- state.failures = 0;
1160
- this.circuitBreakers.set(key, state);
1161
- return;
1162
- }
1163
- this.emitError("circuit-breaker-open", { key, openUntil: state.openUntil });
1164
- throw new Error(`Circuit breaker is open for key "${key}".`);
1165
- }
1166
1655
  recordCircuitFailure(key, options, error) {
1167
1656
  if (!options) {
1168
1657
  return;
1169
1658
  }
1170
- const failureThreshold = options.failureThreshold ?? 3;
1171
- const cooldownMs = options.cooldownMs ?? 3e4;
1172
- const state = this.circuitBreakers.get(key) ?? { failures: 0, openUntil: null };
1173
- state.failures += 1;
1174
- if (state.failures >= failureThreshold) {
1175
- state.openUntil = Date.now() + cooldownMs;
1176
- this.metrics.circuitBreakerTrips += 1;
1659
+ this.circuitBreakerManager.recordFailure(key, options);
1660
+ if (this.circuitBreakerManager.isOpen(key)) {
1661
+ this.metricsCollector.increment("circuitBreakerTrips");
1177
1662
  }
1178
- this.circuitBreakers.set(key, state);
1179
- this.emitError("fetch", { key, error: this.formatError(error), failures: state.failures });
1180
- }
1181
- resetCircuitBreaker(key) {
1182
- this.circuitBreakers.delete(key);
1663
+ this.emitError("fetch", { key, error: this.formatError(error) });
1183
1664
  }
1184
1665
  isNegativeStoredValue(stored) {
1185
1666
  return isStoredValueEnvelope(stored) && stored.kind === "empty";
@@ -1224,35 +1705,36 @@ var RedisInvalidationBus = class {
1224
1705
  channel;
1225
1706
  publisher;
1226
1707
  subscriber;
1227
- activeListener;
1708
+ handlers = /* @__PURE__ */ new Set();
1709
+ sharedListener;
1228
1710
  constructor(options) {
1229
1711
  this.publisher = options.publisher;
1230
1712
  this.subscriber = options.subscriber ?? options.publisher.duplicate();
1231
1713
  this.channel = options.channel ?? "layercache:invalidation";
1232
1714
  }
1233
1715
  async subscribe(handler) {
1234
- if (this.activeListener) {
1235
- throw new Error("RedisInvalidationBus already has an active subscription.");
1236
- }
1237
- const listener = (_channel, payload) => {
1238
- void this.handleMessage(payload, handler);
1239
- };
1240
- this.activeListener = listener;
1241
- this.subscriber.on("message", listener);
1242
- await this.subscriber.subscribe(this.channel);
1716
+ if (this.handlers.size === 0) {
1717
+ const listener = (_channel, payload) => {
1718
+ void this.dispatchToHandlers(payload);
1719
+ };
1720
+ this.sharedListener = listener;
1721
+ this.subscriber.on("message", listener);
1722
+ await this.subscriber.subscribe(this.channel);
1723
+ }
1724
+ this.handlers.add(handler);
1243
1725
  return async () => {
1244
- if (this.activeListener !== listener) {
1245
- return;
1726
+ this.handlers.delete(handler);
1727
+ if (this.handlers.size === 0 && this.sharedListener) {
1728
+ this.subscriber.off("message", this.sharedListener);
1729
+ this.sharedListener = void 0;
1730
+ await this.subscriber.unsubscribe(this.channel);
1246
1731
  }
1247
- this.activeListener = void 0;
1248
- this.subscriber.off("message", listener);
1249
- await this.subscriber.unsubscribe(this.channel);
1250
1732
  };
1251
1733
  }
1252
1734
  async publish(message) {
1253
1735
  await this.publisher.publish(this.channel, JSON.stringify(message));
1254
1736
  }
1255
- async handleMessage(payload, handler) {
1737
+ async dispatchToHandlers(payload) {
1256
1738
  let message;
1257
1739
  try {
1258
1740
  const parsed = JSON.parse(payload);
@@ -1264,11 +1746,15 @@ var RedisInvalidationBus = class {
1264
1746
  this.reportError("invalid invalidation payload", error);
1265
1747
  return;
1266
1748
  }
1267
- try {
1268
- await handler(message);
1269
- } catch (error) {
1270
- this.reportError("invalidation handler failed", error);
1271
- }
1749
+ await Promise.all(
1750
+ [...this.handlers].map(async (handler) => {
1751
+ try {
1752
+ await handler(message);
1753
+ } catch (error) {
1754
+ this.reportError("invalidation handler failed", error);
1755
+ }
1756
+ })
1757
+ );
1272
1758
  }
1273
1759
  isInvalidationMessage(value) {
1274
1760
  if (!value || typeof value !== "object") {
@@ -1329,6 +1815,9 @@ var RedisTagIndex = class {
1329
1815
  async keysForTag(tag) {
1330
1816
  return this.client.smembers(this.tagKeysKey(tag));
1331
1817
  }
1818
+ async tagsForKey(key) {
1819
+ return this.client.smembers(this.keyTagsKey(key));
1820
+ }
1332
1821
  async matchPattern(pattern) {
1333
1822
  const matches = [];
1334
1823
  let cursor = "0";
@@ -1419,6 +1908,39 @@ function createFastifyLayercachePlugin(cache, options = {}) {
1419
1908
  };
1420
1909
  }
1421
1910
 
1911
+ // src/integrations/express.ts
1912
+ function createExpressCacheMiddleware(cache, options = {}) {
1913
+ const allowedMethods = new Set((options.methods ?? ["GET"]).map((m) => m.toUpperCase()));
1914
+ return async (req, res, next) => {
1915
+ const method = (req.method ?? "GET").toUpperCase();
1916
+ if (!allowedMethods.has(method)) {
1917
+ next();
1918
+ return;
1919
+ }
1920
+ const key = options.keyResolver ? options.keyResolver(req) : `${method}:${req.originalUrl ?? req.url ?? "/"}`;
1921
+ const cached = await cache.get(key, void 0, options);
1922
+ if (cached !== null) {
1923
+ res.setHeader?.("content-type", "application/json; charset=utf-8");
1924
+ res.setHeader?.("x-cache", "HIT");
1925
+ if (res.json) {
1926
+ res.json(cached);
1927
+ } else {
1928
+ res.end?.(JSON.stringify(cached));
1929
+ }
1930
+ return;
1931
+ }
1932
+ const originalJson = res.json?.bind(res);
1933
+ if (originalJson) {
1934
+ res.json = (body) => {
1935
+ res.setHeader?.("x-cache", "MISS");
1936
+ void cache.set(key, body, options);
1937
+ return originalJson(body);
1938
+ };
1939
+ }
1940
+ next();
1941
+ };
1942
+ }
1943
+
1422
1944
  // src/integrations/graphql.ts
1423
1945
  function cacheGraphqlResolver(cache, prefix, resolver, options = {}) {
1424
1946
  const wrapped = cache.wrap(prefix, resolver, {
@@ -1459,11 +1981,13 @@ var MemoryLayer = class {
1459
1981
  defaultTtl;
1460
1982
  isLocal = true;
1461
1983
  maxSize;
1984
+ evictionPolicy;
1462
1985
  entries = /* @__PURE__ */ new Map();
1463
1986
  constructor(options = {}) {
1464
1987
  this.name = options.name ?? "memory";
1465
1988
  this.defaultTtl = options.ttl;
1466
1989
  this.maxSize = options.maxSize ?? 1e3;
1990
+ this.evictionPolicy = options.evictionPolicy ?? "lru";
1467
1991
  }
1468
1992
  async get(key) {
1469
1993
  const value = await this.getEntry(key);
@@ -1478,8 +2002,13 @@ var MemoryLayer = class {
1478
2002
  this.entries.delete(key);
1479
2003
  return null;
1480
2004
  }
1481
- this.entries.delete(key);
1482
- this.entries.set(key, entry);
2005
+ if (this.evictionPolicy === "lru") {
2006
+ this.entries.delete(key);
2007
+ entry.accessCount += 1;
2008
+ this.entries.set(key, entry);
2009
+ } else if (this.evictionPolicy === "lfu") {
2010
+ entry.accessCount += 1;
2011
+ }
1483
2012
  return entry.value;
1484
2013
  }
1485
2014
  async getMany(keys) {
@@ -1493,16 +2022,43 @@ var MemoryLayer = class {
1493
2022
  this.entries.delete(key);
1494
2023
  this.entries.set(key, {
1495
2024
  value,
1496
- expiresAt: ttl && ttl > 0 ? Date.now() + ttl * 1e3 : null
2025
+ expiresAt: ttl && ttl > 0 ? Date.now() + ttl * 1e3 : null,
2026
+ accessCount: 0,
2027
+ insertedAt: Date.now()
1497
2028
  });
1498
2029
  while (this.entries.size > this.maxSize) {
1499
- const oldestKey = this.entries.keys().next().value;
1500
- if (!oldestKey) {
1501
- break;
1502
- }
1503
- this.entries.delete(oldestKey);
2030
+ this.evict();
1504
2031
  }
1505
2032
  }
2033
+ async has(key) {
2034
+ const entry = this.entries.get(key);
2035
+ if (!entry) {
2036
+ return false;
2037
+ }
2038
+ if (this.isExpired(entry)) {
2039
+ this.entries.delete(key);
2040
+ return false;
2041
+ }
2042
+ return true;
2043
+ }
2044
+ async ttl(key) {
2045
+ const entry = this.entries.get(key);
2046
+ if (!entry) {
2047
+ return null;
2048
+ }
2049
+ if (this.isExpired(entry)) {
2050
+ this.entries.delete(key);
2051
+ return null;
2052
+ }
2053
+ if (entry.expiresAt === null) {
2054
+ return null;
2055
+ }
2056
+ return Math.max(0, Math.ceil((entry.expiresAt - Date.now()) / 1e3));
2057
+ }
2058
+ async size() {
2059
+ this.pruneExpired();
2060
+ return this.entries.size;
2061
+ }
1506
2062
  async delete(key) {
1507
2063
  this.entries.delete(key);
1508
2064
  }
@@ -1533,15 +2089,35 @@ var MemoryLayer = class {
1533
2089
  }
1534
2090
  this.entries.set(entry.key, {
1535
2091
  value: entry.value,
1536
- expiresAt: entry.expiresAt
2092
+ expiresAt: entry.expiresAt,
2093
+ accessCount: 0,
2094
+ insertedAt: Date.now()
1537
2095
  });
1538
2096
  }
1539
2097
  while (this.entries.size > this.maxSize) {
2098
+ this.evict();
2099
+ }
2100
+ }
2101
+ evict() {
2102
+ if (this.evictionPolicy === "lru" || this.evictionPolicy === "fifo") {
1540
2103
  const oldestKey = this.entries.keys().next().value;
1541
- if (!oldestKey) {
1542
- break;
2104
+ if (oldestKey !== void 0) {
2105
+ this.entries.delete(oldestKey);
2106
+ }
2107
+ return;
2108
+ }
2109
+ let victimKey;
2110
+ let minCount = Number.POSITIVE_INFINITY;
2111
+ let minInsertedAt = Number.POSITIVE_INFINITY;
2112
+ for (const [key, entry] of this.entries.entries()) {
2113
+ if (entry.accessCount < minCount || entry.accessCount === minCount && entry.insertedAt < minInsertedAt) {
2114
+ minCount = entry.accessCount;
2115
+ minInsertedAt = entry.insertedAt;
2116
+ victimKey = key;
1543
2117
  }
1544
- this.entries.delete(oldestKey);
2118
+ }
2119
+ if (victimKey !== void 0) {
2120
+ this.entries.delete(victimKey);
1545
2121
  }
1546
2122
  }
1547
2123
  pruneExpired() {
@@ -1557,6 +2133,7 @@ var MemoryLayer = class {
1557
2133
  };
1558
2134
 
1559
2135
  // src/layers/RedisLayer.ts
2136
+ var import_node_util = require("util");
1560
2137
  var import_node_zlib = require("zlib");
1561
2138
 
1562
2139
  // src/serialization/JsonSerializer.ts
@@ -1571,6 +2148,11 @@ var JsonSerializer = class {
1571
2148
  };
1572
2149
 
1573
2150
  // src/layers/RedisLayer.ts
2151
+ var BATCH_DELETE_SIZE = 500;
2152
+ var gzipAsync = (0, import_node_util.promisify)(import_node_zlib.gzip);
2153
+ var gunzipAsync = (0, import_node_util.promisify)(import_node_zlib.gunzip);
2154
+ var brotliCompressAsync = (0, import_node_util.promisify)(import_node_zlib.brotliCompress);
2155
+ var brotliDecompressAsync = (0, import_node_util.promisify)(import_node_zlib.brotliDecompress);
1574
2156
  var RedisLayer = class {
1575
2157
  name;
1576
2158
  defaultTtl;
@@ -1622,12 +2204,13 @@ var RedisLayer = class {
1622
2204
  if (error || payload === null || !this.isSerializablePayload(payload)) {
1623
2205
  return null;
1624
2206
  }
1625
- return this.deserializeOrDelete(keys[index], payload);
2207
+ return this.deserializeOrDelete(keys[index] ?? "", payload);
1626
2208
  })
1627
2209
  );
1628
2210
  }
1629
2211
  async set(key, value, ttl = this.defaultTtl) {
1630
- const payload = this.encodePayload(this.serializer.serialize(value));
2212
+ const serialized = this.serializer.serialize(value);
2213
+ const payload = await this.encodePayload(serialized);
1631
2214
  const normalizedKey = this.withPrefix(key);
1632
2215
  if (ttl && ttl > 0) {
1633
2216
  await this.client.set(normalizedKey, payload, "EX", ttl);
@@ -1644,14 +2227,44 @@ var RedisLayer = class {
1644
2227
  }
1645
2228
  await this.client.del(...keys.map((key) => this.withPrefix(key)));
1646
2229
  }
1647
- async clear() {
1648
- if (!this.prefix && !this.allowUnprefixedClear) {
1649
- throw new Error("RedisLayer.clear() requires a prefix or allowUnprefixedClear=true to avoid deleting unrelated keys.");
2230
+ async has(key) {
2231
+ const exists = await this.client.exists(this.withPrefix(key));
2232
+ return exists > 0;
2233
+ }
2234
+ async ttl(key) {
2235
+ const remaining = await this.client.ttl(this.withPrefix(key));
2236
+ if (remaining < 0) {
2237
+ return null;
1650
2238
  }
2239
+ return remaining;
2240
+ }
2241
+ async size() {
1651
2242
  const keys = await this.keys();
1652
- if (keys.length > 0) {
1653
- await this.deleteMany(keys);
2243
+ return keys.length;
2244
+ }
2245
+ /**
2246
+ * Deletes all keys matching the layer's prefix in batches to avoid
2247
+ * loading millions of keys into memory at once.
2248
+ */
2249
+ async clear() {
2250
+ if (!this.prefix && !this.allowUnprefixedClear) {
2251
+ throw new Error(
2252
+ "RedisLayer.clear() requires a prefix or allowUnprefixedClear=true to avoid deleting unrelated keys."
2253
+ );
1654
2254
  }
2255
+ const pattern = `${this.prefix}*`;
2256
+ let cursor = "0";
2257
+ do {
2258
+ const [nextCursor, keys] = await this.client.scan(cursor, "MATCH", pattern, "COUNT", this.scanCount);
2259
+ cursor = nextCursor;
2260
+ if (keys.length === 0) {
2261
+ continue;
2262
+ }
2263
+ for (let i = 0; i < keys.length; i += BATCH_DELETE_SIZE) {
2264
+ const batch = keys.slice(i, i + BATCH_DELETE_SIZE);
2265
+ await this.client.del(...batch);
2266
+ }
2267
+ } while (cursor !== "0");
1655
2268
  }
1656
2269
  async keys() {
1657
2270
  const pattern = `${this.prefix}*`;
@@ -1676,7 +2289,7 @@ var RedisLayer = class {
1676
2289
  }
1677
2290
  async deserializeOrDelete(key, payload) {
1678
2291
  try {
1679
- return this.serializer.deserialize(this.decodePayload(payload));
2292
+ return this.serializer.deserialize(await this.decodePayload(payload));
1680
2293
  } catch {
1681
2294
  await this.client.del(this.withPrefix(key)).catch(() => void 0);
1682
2295
  return null;
@@ -1685,7 +2298,11 @@ var RedisLayer = class {
1685
2298
  isSerializablePayload(payload) {
1686
2299
  return typeof payload === "string" || Buffer.isBuffer(payload);
1687
2300
  }
1688
- encodePayload(payload) {
2301
+ /**
2302
+ * Compresses the payload asynchronously if compression is enabled and the
2303
+ * payload exceeds the threshold. This avoids blocking the event loop.
2304
+ */
2305
+ async encodePayload(payload) {
1689
2306
  if (!this.compression) {
1690
2307
  return payload;
1691
2308
  }
@@ -1694,23 +2311,269 @@ var RedisLayer = class {
1694
2311
  return payload;
1695
2312
  }
1696
2313
  const header = Buffer.from(`LCZ1:${this.compression}:`);
1697
- const compressed = this.compression === "gzip" ? (0, import_node_zlib.gzipSync)(source) : (0, import_node_zlib.brotliCompressSync)(source);
2314
+ const compressed = this.compression === "gzip" ? await gzipAsync(source) : await brotliCompressAsync(source);
1698
2315
  return Buffer.concat([header, compressed]);
1699
2316
  }
1700
- decodePayload(payload) {
2317
+ /**
2318
+ * Decompresses the payload asynchronously if a compression header is present.
2319
+ */
2320
+ async decodePayload(payload) {
1701
2321
  if (!Buffer.isBuffer(payload)) {
1702
2322
  return payload;
1703
2323
  }
1704
2324
  if (payload.subarray(0, 10).toString() === "LCZ1:gzip:") {
1705
- return (0, import_node_zlib.gunzipSync)(payload.subarray(10));
2325
+ return gunzipAsync(payload.subarray(10));
1706
2326
  }
1707
2327
  if (payload.subarray(0, 12).toString() === "LCZ1:brotli:") {
1708
- return (0, import_node_zlib.brotliDecompressSync)(payload.subarray(12));
2328
+ return brotliDecompressAsync(payload.subarray(12));
1709
2329
  }
1710
2330
  return payload;
1711
2331
  }
1712
2332
  };
1713
2333
 
2334
+ // src/layers/DiskLayer.ts
2335
+ var import_node_crypto2 = require("crypto");
2336
+ var import_node_fs2 = require("fs");
2337
+ var import_node_path = require("path");
2338
+ var DiskLayer = class {
2339
+ name;
2340
+ defaultTtl;
2341
+ isLocal = true;
2342
+ directory;
2343
+ serializer;
2344
+ maxFiles;
2345
+ constructor(options) {
2346
+ this.directory = options.directory;
2347
+ this.defaultTtl = options.ttl;
2348
+ this.name = options.name ?? "disk";
2349
+ this.serializer = options.serializer ?? new JsonSerializer();
2350
+ this.maxFiles = options.maxFiles;
2351
+ }
2352
+ async get(key) {
2353
+ return unwrapStoredValue(await this.getEntry(key));
2354
+ }
2355
+ async getEntry(key) {
2356
+ const filePath = this.keyToPath(key);
2357
+ let raw;
2358
+ try {
2359
+ raw = await import_node_fs2.promises.readFile(filePath);
2360
+ } catch {
2361
+ return null;
2362
+ }
2363
+ let entry;
2364
+ try {
2365
+ entry = this.serializer.deserialize(raw);
2366
+ } catch {
2367
+ await this.safeDelete(filePath);
2368
+ return null;
2369
+ }
2370
+ if (entry.expiresAt !== null && entry.expiresAt <= Date.now()) {
2371
+ await this.safeDelete(filePath);
2372
+ return null;
2373
+ }
2374
+ return entry.value;
2375
+ }
2376
+ async set(key, value, ttl = this.defaultTtl) {
2377
+ await import_node_fs2.promises.mkdir(this.directory, { recursive: true });
2378
+ const entry = {
2379
+ key,
2380
+ value,
2381
+ expiresAt: ttl && ttl > 0 ? Date.now() + ttl * 1e3 : null
2382
+ };
2383
+ const payload = this.serializer.serialize(entry);
2384
+ await import_node_fs2.promises.writeFile(this.keyToPath(key), payload);
2385
+ if (this.maxFiles !== void 0) {
2386
+ await this.enforceMaxFiles();
2387
+ }
2388
+ }
2389
+ async has(key) {
2390
+ const value = await this.getEntry(key);
2391
+ return value !== null;
2392
+ }
2393
+ async ttl(key) {
2394
+ const filePath = this.keyToPath(key);
2395
+ let raw;
2396
+ try {
2397
+ raw = await import_node_fs2.promises.readFile(filePath);
2398
+ } catch {
2399
+ return null;
2400
+ }
2401
+ let entry;
2402
+ try {
2403
+ entry = this.serializer.deserialize(raw);
2404
+ } catch {
2405
+ return null;
2406
+ }
2407
+ if (entry.expiresAt === null) {
2408
+ return null;
2409
+ }
2410
+ const remaining = Math.ceil((entry.expiresAt - Date.now()) / 1e3);
2411
+ if (remaining <= 0) {
2412
+ return null;
2413
+ }
2414
+ return remaining;
2415
+ }
2416
+ async delete(key) {
2417
+ await this.safeDelete(this.keyToPath(key));
2418
+ }
2419
+ async deleteMany(keys) {
2420
+ await Promise.all(keys.map((key) => this.delete(key)));
2421
+ }
2422
+ async clear() {
2423
+ let entries;
2424
+ try {
2425
+ entries = await import_node_fs2.promises.readdir(this.directory);
2426
+ } catch {
2427
+ return;
2428
+ }
2429
+ await Promise.all(
2430
+ entries.filter((name) => name.endsWith(".lc")).map((name) => this.safeDelete((0, import_node_path.join)(this.directory, name)))
2431
+ );
2432
+ }
2433
+ /**
2434
+ * Returns the original cache key strings stored on disk.
2435
+ * Expired entries are skipped and cleaned up during the scan.
2436
+ */
2437
+ async keys() {
2438
+ let entries;
2439
+ try {
2440
+ entries = await import_node_fs2.promises.readdir(this.directory);
2441
+ } catch {
2442
+ return [];
2443
+ }
2444
+ const lcFiles = entries.filter((name) => name.endsWith(".lc"));
2445
+ const keys = [];
2446
+ await Promise.all(
2447
+ lcFiles.map(async (name) => {
2448
+ const filePath = (0, import_node_path.join)(this.directory, name);
2449
+ let raw;
2450
+ try {
2451
+ raw = await import_node_fs2.promises.readFile(filePath);
2452
+ } catch {
2453
+ return;
2454
+ }
2455
+ let entry;
2456
+ try {
2457
+ entry = this.serializer.deserialize(raw);
2458
+ } catch {
2459
+ await this.safeDelete(filePath);
2460
+ return;
2461
+ }
2462
+ if (entry.expiresAt !== null && entry.expiresAt <= Date.now()) {
2463
+ await this.safeDelete(filePath);
2464
+ return;
2465
+ }
2466
+ keys.push(entry.key);
2467
+ })
2468
+ );
2469
+ return keys;
2470
+ }
2471
+ async size() {
2472
+ const keys = await this.keys();
2473
+ return keys.length;
2474
+ }
2475
+ keyToPath(key) {
2476
+ const hash = (0, import_node_crypto2.createHash)("sha256").update(key).digest("hex");
2477
+ return (0, import_node_path.join)(this.directory, `${hash}.lc`);
2478
+ }
2479
+ async safeDelete(filePath) {
2480
+ try {
2481
+ await import_node_fs2.promises.unlink(filePath);
2482
+ } catch {
2483
+ }
2484
+ }
2485
+ /**
2486
+ * Removes the oldest files (by mtime) when the directory exceeds maxFiles.
2487
+ */
2488
+ async enforceMaxFiles() {
2489
+ if (this.maxFiles === void 0) {
2490
+ return;
2491
+ }
2492
+ let entries;
2493
+ try {
2494
+ entries = await import_node_fs2.promises.readdir(this.directory);
2495
+ } catch {
2496
+ return;
2497
+ }
2498
+ const lcFiles = entries.filter((name) => name.endsWith(".lc"));
2499
+ if (lcFiles.length <= this.maxFiles) {
2500
+ return;
2501
+ }
2502
+ const withStats = await Promise.all(
2503
+ lcFiles.map(async (name) => {
2504
+ const filePath = (0, import_node_path.join)(this.directory, name);
2505
+ try {
2506
+ const stat = await import_node_fs2.promises.stat(filePath);
2507
+ return { filePath, mtimeMs: stat.mtimeMs };
2508
+ } catch {
2509
+ return { filePath, mtimeMs: 0 };
2510
+ }
2511
+ })
2512
+ );
2513
+ withStats.sort((a, b) => a.mtimeMs - b.mtimeMs);
2514
+ const toEvict = withStats.slice(0, lcFiles.length - this.maxFiles);
2515
+ await Promise.all(toEvict.map(({ filePath }) => this.safeDelete(filePath)));
2516
+ }
2517
+ };
2518
+
2519
+ // src/layers/MemcachedLayer.ts
2520
+ var MemcachedLayer = class {
2521
+ name;
2522
+ defaultTtl;
2523
+ isLocal = false;
2524
+ client;
2525
+ keyPrefix;
2526
+ serializer;
2527
+ constructor(options) {
2528
+ this.client = options.client;
2529
+ this.defaultTtl = options.ttl;
2530
+ this.name = options.name ?? "memcached";
2531
+ this.keyPrefix = options.keyPrefix ?? "";
2532
+ this.serializer = options.serializer ?? new JsonSerializer();
2533
+ }
2534
+ async get(key) {
2535
+ return unwrapStoredValue(await this.getEntry(key));
2536
+ }
2537
+ async getEntry(key) {
2538
+ const result = await this.client.get(this.withPrefix(key));
2539
+ if (!result || result.value === null) {
2540
+ return null;
2541
+ }
2542
+ try {
2543
+ return this.serializer.deserialize(result.value);
2544
+ } catch {
2545
+ return null;
2546
+ }
2547
+ }
2548
+ async getMany(keys) {
2549
+ return Promise.all(keys.map((key) => this.getEntry(key)));
2550
+ }
2551
+ async set(key, value, ttl = this.defaultTtl) {
2552
+ const payload = this.serializer.serialize(value);
2553
+ await this.client.set(this.withPrefix(key), payload, {
2554
+ expires: ttl && ttl > 0 ? ttl : void 0
2555
+ });
2556
+ }
2557
+ async has(key) {
2558
+ const result = await this.client.get(this.withPrefix(key));
2559
+ return result !== null && result.value !== null;
2560
+ }
2561
+ async delete(key) {
2562
+ await this.client.delete(this.withPrefix(key));
2563
+ }
2564
+ async deleteMany(keys) {
2565
+ await Promise.all(keys.map((key) => this.delete(key)));
2566
+ }
2567
+ async clear() {
2568
+ throw new Error(
2569
+ "MemcachedLayer.clear() is not supported. Use a key prefix and rotate it to effectively invalidate all keys."
2570
+ );
2571
+ }
2572
+ withPrefix(key) {
2573
+ return `${this.keyPrefix}${key}`;
2574
+ }
2575
+ };
2576
+
1714
2577
  // src/serialization/MsgpackSerializer.ts
1715
2578
  var import_msgpack = require("@msgpack/msgpack");
1716
2579
  var MsgpackSerializer = class {
@@ -1724,7 +2587,7 @@ var MsgpackSerializer = class {
1724
2587
  };
1725
2588
 
1726
2589
  // src/singleflight/RedisSingleFlightCoordinator.ts
1727
- var import_node_crypto2 = require("crypto");
2590
+ var import_node_crypto3 = require("crypto");
1728
2591
  var RELEASE_SCRIPT = `
1729
2592
  if redis.call("get", KEYS[1]) == ARGV[1] then
1730
2593
  return redis.call("del", KEYS[1])
@@ -1740,7 +2603,7 @@ var RedisSingleFlightCoordinator = class {
1740
2603
  }
1741
2604
  async execute(key, options, worker, waiter) {
1742
2605
  const lockKey = `${this.prefix}:${encodeURIComponent(key)}`;
1743
- const token = (0, import_node_crypto2.randomUUID)();
2606
+ const token = (0, import_node_crypto3.randomUUID)();
1744
2607
  const acquired = await this.client.set(lockKey, token, "PX", options.leaseMs, "NX");
1745
2608
  if (acquired === "OK") {
1746
2609
  try {
@@ -1752,11 +2615,93 @@ var RedisSingleFlightCoordinator = class {
1752
2615
  return waiter();
1753
2616
  }
1754
2617
  };
2618
+
2619
+ // src/metrics/PrometheusExporter.ts
2620
+ function createPrometheusMetricsExporter(stacks) {
2621
+ return () => {
2622
+ const entries = Array.isArray(stacks) ? stacks : [{ stack: stacks, name: "default" }];
2623
+ const lines = [];
2624
+ lines.push("# HELP layercache_hits_total Total number of cache hits");
2625
+ lines.push("# TYPE layercache_hits_total counter");
2626
+ lines.push("# HELP layercache_misses_total Total number of cache misses");
2627
+ lines.push("# TYPE layercache_misses_total counter");
2628
+ lines.push("# HELP layercache_fetches_total Total fetcher invocations (full misses)");
2629
+ lines.push("# TYPE layercache_fetches_total counter");
2630
+ lines.push("# HELP layercache_sets_total Total number of cache sets");
2631
+ lines.push("# TYPE layercache_sets_total counter");
2632
+ lines.push("# HELP layercache_deletes_total Total number of cache deletes");
2633
+ lines.push("# TYPE layercache_deletes_total counter");
2634
+ lines.push("# HELP layercache_backfills_total Total number of backfill operations");
2635
+ lines.push("# TYPE layercache_backfills_total counter");
2636
+ lines.push("# HELP layercache_stale_hits_total Total number of stale hits served");
2637
+ lines.push("# TYPE layercache_stale_hits_total counter");
2638
+ lines.push("# HELP layercache_refreshes_total Background refreshes triggered");
2639
+ lines.push("# TYPE layercache_refreshes_total counter");
2640
+ lines.push("# HELP layercache_refresh_errors_total Background refresh errors");
2641
+ lines.push("# TYPE layercache_refresh_errors_total counter");
2642
+ lines.push("# HELP layercache_negative_cache_hits_total Negative cache hits");
2643
+ lines.push("# TYPE layercache_negative_cache_hits_total counter");
2644
+ lines.push("# HELP layercache_circuit_breaker_trips_total Circuit breaker trips");
2645
+ lines.push("# TYPE layercache_circuit_breaker_trips_total counter");
2646
+ lines.push("# HELP layercache_degraded_operations_total Operations run in degraded mode");
2647
+ lines.push("# TYPE layercache_degraded_operations_total counter");
2648
+ lines.push("# HELP layercache_hit_rate Overall cache hit rate (0-1)");
2649
+ lines.push("# TYPE layercache_hit_rate gauge");
2650
+ lines.push("# HELP layercache_hits_by_layer_total Hits broken down by layer");
2651
+ lines.push("# TYPE layercache_hits_by_layer_total counter");
2652
+ lines.push("# HELP layercache_misses_by_layer_total Misses broken down by layer");
2653
+ lines.push("# TYPE layercache_misses_by_layer_total counter");
2654
+ lines.push("# HELP layercache_layer_latency_avg_ms Average read latency per layer in milliseconds");
2655
+ lines.push("# TYPE layercache_layer_latency_avg_ms gauge");
2656
+ lines.push("# HELP layercache_layer_latency_max_ms Maximum read latency per layer in milliseconds");
2657
+ lines.push("# TYPE layercache_layer_latency_max_ms gauge");
2658
+ lines.push("# HELP layercache_layer_latency_count Number of read latency samples per layer");
2659
+ lines.push("# TYPE layercache_layer_latency_count counter");
2660
+ for (const { stack, name } of entries) {
2661
+ const m = stack.getMetrics();
2662
+ const hr = stack.getHitRate();
2663
+ const label = `cache="${sanitizeLabel(name)}"`;
2664
+ lines.push(`layercache_hits_total{${label}} ${m.hits}`);
2665
+ lines.push(`layercache_misses_total{${label}} ${m.misses}`);
2666
+ lines.push(`layercache_fetches_total{${label}} ${m.fetches}`);
2667
+ lines.push(`layercache_sets_total{${label}} ${m.sets}`);
2668
+ lines.push(`layercache_deletes_total{${label}} ${m.deletes}`);
2669
+ lines.push(`layercache_backfills_total{${label}} ${m.backfills}`);
2670
+ lines.push(`layercache_stale_hits_total{${label}} ${m.staleHits}`);
2671
+ lines.push(`layercache_refreshes_total{${label}} ${m.refreshes}`);
2672
+ lines.push(`layercache_refresh_errors_total{${label}} ${m.refreshErrors}`);
2673
+ lines.push(`layercache_negative_cache_hits_total{${label}} ${m.negativeCacheHits}`);
2674
+ lines.push(`layercache_circuit_breaker_trips_total{${label}} ${m.circuitBreakerTrips}`);
2675
+ lines.push(`layercache_degraded_operations_total{${label}} ${m.degradedOperations}`);
2676
+ lines.push(`layercache_hit_rate{${label}} ${hr.overall.toFixed(6)}`);
2677
+ for (const [layerName, count] of Object.entries(m.hitsByLayer)) {
2678
+ lines.push(`layercache_hits_by_layer_total{${label},layer="${sanitizeLabel(layerName)}"} ${count}`);
2679
+ }
2680
+ for (const [layerName, count] of Object.entries(m.missesByLayer)) {
2681
+ lines.push(`layercache_misses_by_layer_total{${label},layer="${sanitizeLabel(layerName)}"} ${count}`);
2682
+ }
2683
+ for (const [layerName, latency] of Object.entries(m.latencyByLayer)) {
2684
+ const layerLabel = `${label},layer="${sanitizeLabel(layerName)}"`;
2685
+ lines.push(`layercache_layer_latency_avg_ms{${layerLabel}} ${latency.avgMs.toFixed(4)}`);
2686
+ lines.push(`layercache_layer_latency_max_ms{${layerLabel}} ${latency.maxMs.toFixed(4)}`);
2687
+ lines.push(`layercache_layer_latency_count{${layerLabel}} ${latency.count}`);
2688
+ }
2689
+ }
2690
+ lines.push("");
2691
+ return lines.join("\n");
2692
+ };
2693
+ }
2694
+ function sanitizeLabel(value) {
2695
+ return value.replace(/["\\\n]/g, "_");
2696
+ }
1755
2697
  // Annotate the CommonJS export names for ESM import in node:
1756
2698
  0 && (module.exports = {
2699
+ CacheMissError,
1757
2700
  CacheNamespace,
1758
2701
  CacheStack,
2702
+ DiskLayer,
1759
2703
  JsonSerializer,
2704
+ MemcachedLayer,
1760
2705
  MemoryLayer,
1761
2706
  MsgpackSerializer,
1762
2707
  PatternMatcher,
@@ -1769,6 +2714,8 @@ var RedisSingleFlightCoordinator = class {
1769
2714
  cacheGraphqlResolver,
1770
2715
  createCacheStatsHandler,
1771
2716
  createCachedMethodDecorator,
2717
+ createExpressCacheMiddleware,
1772
2718
  createFastifyLayercachePlugin,
2719
+ createPrometheusMetricsExporter,
1773
2720
  createTrpcCacheMiddleware
1774
2721
  });