layercache 1.1.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -6,7 +6,7 @@
6
6
  [![npm downloads](https://img.shields.io/npm/dw/layercache)](https://www.npmjs.com/package/layercache)
7
7
  [![license](https://img.shields.io/npm/l/layercache)](LICENSE)
8
8
  [![TypeScript](https://img.shields.io/badge/TypeScript-first-blue)](https://www.typescriptlang.org/)
9
- [![test coverage](https://img.shields.io/badge/tests-49%20passing-brightgreen)](https://github.com/flyingsquirrel0419/layercache)
9
+ [![test coverage](https://img.shields.io/badge/tests-132%20passing-brightgreen)](https://github.com/flyingsquirrel0419/layercache)
10
10
 
11
11
  ```
12
12
  L1 hit ~0.01 ms ← served from memory, zero network
@@ -58,13 +58,17 @@ On a hit, the value is returned from the fastest layer that has it, and automati
58
58
  - **Event hooks** — `EventEmitter`-based events for hits, misses, stale serves, errors, and more
59
59
  - **Graceful degradation** — skip a failing layer for a configurable retry window
60
60
  - **Circuit breaker** — per-key or global; opens after N failures, recovers after cooldown
61
- - **Compression** — transparent gzip/brotli in `RedisLayer` with a byte threshold
62
- - **Metrics & stats** — per-layer hit/miss counters, circuit-breaker trips, degraded operations; HTTP stats handler
61
+ - **Compression** — transparent async gzip/brotli in `RedisLayer` (non-blocking) with a byte threshold
62
+ - **Metrics & stats** — per-layer hit/miss counters, **per-layer latency tracking**, circuit-breaker trips, degraded operations; HTTP stats handler
63
63
  - **Persistence** — `exportState` / `importState` for in-process snapshots; `persistToFile` / `restoreFromFile` for disk
64
64
  - **Admin CLI** — `layercache stats | keys | invalidate` against any Redis URL
65
- - **Framework integrations** — Fastify plugin, tRPC middleware, GraphQL resolver wrapper
65
+ - **Framework integrations** — Express middleware, Fastify plugin, tRPC middleware, GraphQL resolver wrapper
66
66
  - **MessagePack serializer** — drop-in replacement for lower Redis memory usage
67
- - **NestJS module** — `CacheStackModule.forRoot(...)` with `@InjectCacheStack()`
67
+ - **NestJS module** — `CacheStackModule.forRoot(...)` and `forRootAsync(...)` with `@InjectCacheStack()`
68
+ - **`getOrThrow()`** — throws `CacheMissError` instead of returning `null`, for strict use cases
69
+ - **`inspect()`** — debug a key: see which layers hold it, remaining TTLs, tags, and staleness state
70
+ - **Conditional caching** — `shouldCache` predicate to skip caching specific fetcher results
71
+ - **Nested namespaces** — `namespace('a').namespace('b')` for composable key prefixes
68
72
  - **Custom layers** — implement the 5-method `CacheLayer` interface to plug in Memcached, DynamoDB, or anything else
69
73
  - **ESM + CJS** — works with both module systems, Node.js ≥ 18
70
74
 
@@ -250,6 +254,55 @@ const posts = cache.namespace('posts')
250
254
 
251
255
  await users.set('123', userData) // stored as "users:123"
252
256
  await users.clear() // only deletes "users:*"
257
+
258
+ // Nested namespaces
259
+ const tenant = cache.namespace('tenant:abc')
260
+ const posts = tenant.namespace('posts')
261
+ await posts.set('1', postData) // stored as "tenant:abc:posts:1"
262
+ ```
263
+
264
+ ### `cache.getOrThrow<T>(key, fetcher?, options?): Promise<T>`
265
+
266
+ Like `get()`, but throws `CacheMissError` instead of returning `null`. Useful when you know the value must exist (e.g. after a warm-up).
267
+
268
+ ```ts
269
+ import { CacheMissError } from 'layercache'
270
+
271
+ try {
272
+ const config = await cache.getOrThrow<Config>('app:config')
273
+ } catch (err) {
274
+ if (err instanceof CacheMissError) {
275
+ console.error(`Missing key: ${err.key}`)
276
+ }
277
+ }
278
+ ```
279
+
280
+ ### `cache.inspect(key): Promise<CacheInspectResult | null>`
281
+
282
+ Returns detailed metadata about a cache key for debugging. Returns `null` if the key is not in any layer.
283
+
284
+ ```ts
285
+ const info = await cache.inspect('user:123')
286
+ // {
287
+ // key: 'user:123',
288
+ // foundInLayers: ['memory', 'redis'],
289
+ // freshTtlSeconds: 45,
290
+ // staleTtlSeconds: 75,
291
+ // errorTtlSeconds: 345,
292
+ // isStale: false,
293
+ // tags: ['user', 'user:123']
294
+ // }
295
+ ```
296
+
297
+ ### Conditional caching with `shouldCache`
298
+
299
+ Skip caching specific results without affecting the return value:
300
+
301
+ ```ts
302
+ const data = await cache.get('api:response', fetchFromApi, {
303
+ shouldCache: (value) => (value as any).status === 200
304
+ })
305
+ // If fetchFromApi returns { status: 500 }, the value is returned but NOT cached
253
306
  ```
254
307
 
255
308
  ---
@@ -591,6 +644,26 @@ cache.on('error', ({ event, context }) => logger.error(event, context))
591
644
 
592
645
  ## Framework integrations
593
646
 
647
+ ### Express
648
+
649
+ ```ts
650
+ import { CacheStack, MemoryLayer, createExpressCacheMiddleware } from 'layercache'
651
+
652
+ const cache = new CacheStack([new MemoryLayer({ ttl: 60 })])
653
+
654
+ // Automatically caches GET responses as JSON
655
+ app.get('/api/users', createExpressCacheMiddleware(cache, { ttl: 30 }), (req, res) => {
656
+ res.json(await db.getUsers())
657
+ })
658
+
659
+ // Custom key resolver + tag support
660
+ app.get('/api/user/:id', createExpressCacheMiddleware(cache, {
661
+ keyResolver: (req) => `user:${req.url}`,
662
+ tags: ['users'],
663
+ ttl: 60
664
+ }), handler)
665
+ ```
666
+
594
667
  ### tRPC
595
668
 
596
669
  ```ts
@@ -699,6 +772,25 @@ import { CacheStackModule } from '@cachestack/nestjs'
699
772
  export class AppModule {}
700
773
  ```
701
774
 
775
+ Async configuration (resolve dependencies from DI):
776
+
777
+ ```ts
778
+ @Module({
779
+ imports: [
780
+ CacheStackModule.forRootAsync({
781
+ inject: [ConfigService],
782
+ useFactory: (config: ConfigService) => ({
783
+ layers: [
784
+ new MemoryLayer({ ttl: 20 }),
785
+ new RedisLayer({ client: new Redis(config.get('REDIS_URL')), ttl: 300 })
786
+ ]
787
+ })
788
+ })
789
+ ]
790
+ })
791
+ export class AppModule {}
792
+ ```
793
+
702
794
  ```ts
703
795
  // your.service.ts
704
796
  import { InjectCacheStack } from '@cachestack/nestjs'
@@ -80,6 +80,9 @@ var RedisTagIndex = class {
80
80
  async keysForTag(tag) {
81
81
  return this.client.smembers(this.tagKeysKey(tag));
82
82
  }
83
+ async tagsForKey(key) {
84
+ return this.client.smembers(this.keyTagsKey(key));
85
+ }
83
86
  async matchPattern(pattern) {
84
87
  const matches = [];
85
88
  let cursor = "0";
package/dist/cli.cjs CHANGED
@@ -118,6 +118,9 @@ var RedisTagIndex = class {
118
118
  async keysForTag(tag) {
119
119
  return this.client.smembers(this.tagKeysKey(tag));
120
120
  }
121
+ async tagsForKey(key) {
122
+ return this.client.smembers(this.keyTagsKey(key));
123
+ }
121
124
  async matchPattern(pattern) {
122
125
  const matches = [];
123
126
  let cursor = "0";
@@ -211,7 +214,7 @@ async function main(argv = process.argv.slice(2)) {
211
214
  const tagIndex = new RedisTagIndex({ client: redis, prefix: args.tagIndexPrefix ?? "layercache:tag-index" });
212
215
  const keys2 = await tagIndex.keysForTag(args.tag);
213
216
  if (keys2.length > 0) {
214
- await redis.del(...keys2);
217
+ await batchDelete(redis, keys2);
215
218
  }
216
219
  process.stdout.write(`${JSON.stringify({ deletedKeys: keys2.length, tag: args.tag }, null, 2)}
217
220
  `);
@@ -219,7 +222,7 @@ async function main(argv = process.argv.slice(2)) {
219
222
  }
220
223
  const keys = await scanKeys(redis, args.pattern ?? "*");
221
224
  if (keys.length > 0) {
222
- await redis.del(...keys);
225
+ await batchDelete(redis, keys);
223
226
  }
224
227
  process.stdout.write(`${JSON.stringify({ deletedKeys: keys.length, pattern: args.pattern ?? "*" }, null, 2)}
225
228
  `);
@@ -272,6 +275,13 @@ function parseArgs(argv) {
272
275
  }
273
276
  return parsed;
274
277
  }
278
+ var BATCH_DELETE_SIZE = 500;
279
+ async function batchDelete(redis, keys) {
280
+ for (let i = 0; i < keys.length; i += BATCH_DELETE_SIZE) {
281
+ const batch = keys.slice(i, i + BATCH_DELETE_SIZE);
282
+ await redis.del(...batch);
283
+ }
284
+ }
275
285
  async function scanKeys(redis, pattern) {
276
286
  const keys = [];
277
287
  let cursor = "0";
package/dist/cli.js CHANGED
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  RedisTagIndex
4
- } from "./chunk-QUB5VZFZ.js";
4
+ } from "./chunk-BWM4MU2X.js";
5
5
 
6
6
  // src/cli.ts
7
7
  import Redis from "ioredis";
@@ -51,7 +51,7 @@ async function main(argv = process.argv.slice(2)) {
51
51
  const tagIndex = new RedisTagIndex({ client: redis, prefix: args.tagIndexPrefix ?? "layercache:tag-index" });
52
52
  const keys2 = await tagIndex.keysForTag(args.tag);
53
53
  if (keys2.length > 0) {
54
- await redis.del(...keys2);
54
+ await batchDelete(redis, keys2);
55
55
  }
56
56
  process.stdout.write(`${JSON.stringify({ deletedKeys: keys2.length, tag: args.tag }, null, 2)}
57
57
  `);
@@ -59,7 +59,7 @@ async function main(argv = process.argv.slice(2)) {
59
59
  }
60
60
  const keys = await scanKeys(redis, args.pattern ?? "*");
61
61
  if (keys.length > 0) {
62
- await redis.del(...keys);
62
+ await batchDelete(redis, keys);
63
63
  }
64
64
  process.stdout.write(`${JSON.stringify({ deletedKeys: keys.length, pattern: args.pattern ?? "*" }, null, 2)}
65
65
  `);
@@ -112,6 +112,13 @@ function parseArgs(argv) {
112
112
  }
113
113
  return parsed;
114
114
  }
115
+ var BATCH_DELETE_SIZE = 500;
116
+ async function batchDelete(redis, keys) {
117
+ for (let i = 0; i < keys.length; i += BATCH_DELETE_SIZE) {
118
+ const batch = keys.slice(i, i + BATCH_DELETE_SIZE);
119
+ await redis.del(...batch);
120
+ }
121
+ }
115
122
  async function scanKeys(redis, pattern) {
116
123
  const keys = [];
117
124
  let cursor = "0";