@md-oss/cache 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +5 -0
- package/README.md +471 -0
- package/dist/async.cjs +2 -0
- package/dist/async.cjs.map +1 -0
- package/dist/async.d.cts +92 -0
- package/dist/async.d.mts +92 -0
- package/dist/async.mjs +2 -0
- package/dist/async.mjs.map +1 -0
- package/dist/env.cjs +2 -0
- package/dist/env.cjs.map +1 -0
- package/dist/env.d.cts +5 -0
- package/dist/env.d.mts +5 -0
- package/dist/env.mjs +2 -0
- package/dist/env.mjs.map +1 -0
- package/dist/index.cjs +2 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +13 -0
- package/dist/index.d.mts +13 -0
- package/dist/index.mjs +2 -0
- package/dist/index.mjs.map +1 -0
- package/dist/lru.cjs +2 -0
- package/dist/lru.cjs.map +1 -0
- package/dist/lru.d.cts +9 -0
- package/dist/lru.d.mts +9 -0
- package/dist/lru.mjs +2 -0
- package/dist/lru.mjs.map +1 -0
- package/dist/manager.cjs +2 -0
- package/dist/manager.cjs.map +1 -0
- package/dist/manager.d.cts +69 -0
- package/dist/manager.d.mts +69 -0
- package/dist/manager.mjs +2 -0
- package/dist/manager.mjs.map +1 -0
- package/dist/promise.cjs +2 -0
- package/dist/promise.cjs.map +1 -0
- package/dist/promise.d.cts +12 -0
- package/dist/promise.d.mts +12 -0
- package/dist/promise.mjs +2 -0
- package/dist/promise.mjs.map +1 -0
- package/dist/redis-client.cjs +2 -0
- package/dist/redis-client.cjs.map +1 -0
- package/dist/redis-client.d.cts +27 -0
- package/dist/redis-client.d.mts +27 -0
- package/dist/redis-client.mjs +2 -0
- package/dist/redis-client.mjs.map +1 -0
- package/dist/types.d.cts +60 -0
- package/dist/types.d.mts +60 -0
- package/package.json +127 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
Copyright 2026 Mirasaki Development
|
|
2
|
+
|
|
3
|
+
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
|
4
|
+
|
|
5
|
+
THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,471 @@
|
|
|
1
|
+
# @md-oss/cache
|
|
2
|
+
|
|
3
|
+
Multi-layer caching with LRU, Redis, and async data fetching support built on cache-manager and Keyv.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- **Multi-Store Support** - Use LRU cache, Redis, or combine multiple stores
|
|
8
|
+
- **Async Cache Manager** - Automatic data fetching with cache-aside pattern
|
|
9
|
+
- **Promise Caching** - Cache in-flight promises to prevent duplicate requests
|
|
10
|
+
- **Metadata Tracking** - Built-in statistics for hits, misses, and performance
|
|
11
|
+
- **TTL Support** - Configurable time-to-live for all cache entries
|
|
12
|
+
- **Event System** - Listen to cache operations (set, delete, clear, refresh)
|
|
13
|
+
- **Type-Safe** - Full TypeScript support with generics
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
pnpm add @md-oss/cache
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## Basic Usage
|
|
22
|
+
|
|
23
|
+
### LRU Cache Manager
|
|
24
|
+
|
|
25
|
+
```typescript
|
|
26
|
+
import { CacheManager, LRUCache } from '@md-oss/cache';
|
|
27
|
+
import Keyv from 'keyv';
|
|
28
|
+
|
|
29
|
+
// Create an in-memory LRU cache
|
|
30
|
+
const cache = CacheManager.fromStore<string>(
|
|
31
|
+
new LRUCache({
|
|
32
|
+
max: 1000, // Maximum 1000 items
|
|
33
|
+
ttl: 60000 // 60 seconds TTL
|
|
34
|
+
})
|
|
35
|
+
);
|
|
36
|
+
|
|
37
|
+
// Set a value
|
|
38
|
+
await cache.set('user:123', 'John Doe', 30000); // 30 second TTL
|
|
39
|
+
|
|
40
|
+
// Get a value
|
|
41
|
+
const user = await cache.get('user:123'); // 'John Doe'
|
|
42
|
+
|
|
43
|
+
// Delete a value
|
|
44
|
+
await cache.del('user:123');
|
|
45
|
+
|
|
46
|
+
// Clear all
|
|
47
|
+
await cache.clear();
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
### Async Cache Manager
|
|
51
|
+
|
|
52
|
+
Automatically fetch and cache data when not found:
|
|
53
|
+
|
|
54
|
+
```typescript
|
|
55
|
+
import { AsyncCacheManager } from '@md-oss/cache';
|
|
56
|
+
import Keyv from 'keyv';
|
|
57
|
+
|
|
58
|
+
const userCache = new AsyncCacheManager({
|
|
59
|
+
stores: [new Keyv()],
|
|
60
|
+
ttl: 60000, // 60 seconds
|
|
61
|
+
dataFunction: async (userId: string) => {
|
|
62
|
+
// This function runs only on cache miss
|
|
63
|
+
const user = await db.users.findOne({ id: userId });
|
|
64
|
+
return user;
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
// First call - fetches from database and caches
|
|
69
|
+
const user1 = await userCache.get('user:123');
|
|
70
|
+
|
|
71
|
+
// Second call - returns from cache
|
|
72
|
+
const user2 = await userCache.get('user:123');
|
|
73
|
+
|
|
74
|
+
console.log(userCache.metadata);
|
|
75
|
+
// { hits: 1, misses: 1, added: 1, deleted: 0, ... }
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### Promise Cache
|
|
79
|
+
|
|
80
|
+
Prevent duplicate in-flight requests:
|
|
81
|
+
|
|
82
|
+
```typescript
|
|
83
|
+
import { PromiseCache } from '@md-oss/cache';
|
|
84
|
+
|
|
85
|
+
const apiCache = new PromiseCache<User>(5000); // 5 second cache
|
|
86
|
+
|
|
87
|
+
async function getUser(id: string) {
|
|
88
|
+
return apiCache.get(async () => {
|
|
89
|
+
// This function won't run if a request is already in-flight
|
|
90
|
+
return await fetch(`/api/users/${id}`).then(r => r.json());
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// These 3 calls will only make 1 API request
|
|
95
|
+
const [user1, user2, user3] = await Promise.all([
|
|
96
|
+
getUser('123'),
|
|
97
|
+
getUser('123'),
|
|
98
|
+
getUser('123')
|
|
99
|
+
]);
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
## Redis Cache
|
|
103
|
+
|
|
104
|
+
### Setup
|
|
105
|
+
|
|
106
|
+
Set the Redis URL in your environment:
|
|
107
|
+
|
|
108
|
+
```env
|
|
109
|
+
REDIS_URL=redis://localhost:6379
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
### Usage
|
|
113
|
+
|
|
114
|
+
```typescript
|
|
115
|
+
import { initializeRedis, getRedisClient } from '@md-oss/cache';
|
|
116
|
+
import Keyv from 'keyv';
|
|
117
|
+
import KeyvRedis from '@keyv/redis';
|
|
118
|
+
|
|
119
|
+
// Initialize Redis connection
|
|
120
|
+
await initializeRedis();
|
|
121
|
+
|
|
122
|
+
// Get Redis client
|
|
123
|
+
const redisClient = getRedisClient();
|
|
124
|
+
|
|
125
|
+
// Use Redis as cache store
|
|
126
|
+
const cache = CacheManager.fromStore<User>(
|
|
127
|
+
new Keyv({
|
|
128
|
+
store: new KeyvRedis(redisClient)
|
|
129
|
+
})
|
|
130
|
+
);
|
|
131
|
+
|
|
132
|
+
await cache.set('user:123', userData);
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
## Advanced Usage
|
|
136
|
+
|
|
137
|
+
### Multi-Store Caching
|
|
138
|
+
|
|
139
|
+
Layer multiple caches for optimal performance:
|
|
140
|
+
|
|
141
|
+
```typescript
|
|
142
|
+
import { CacheManager, LRUCache } from '@md-oss/cache';
|
|
143
|
+
import Keyv from 'keyv';
|
|
144
|
+
import KeyvRedis from '@keyv/redis';
|
|
145
|
+
|
|
146
|
+
// Create a multi-layer cache: LRU (L1) → Redis (L2)
|
|
147
|
+
const cache = new CacheManager<User>({
|
|
148
|
+
stores: [
|
|
149
|
+
// L1: Fast in-memory cache
|
|
150
|
+
new Keyv({
|
|
151
|
+
store: new LRUCache({ max: 500, ttl: 60000 })
|
|
152
|
+
}),
|
|
153
|
+
// L2: Shared Redis cache
|
|
154
|
+
new Keyv({
|
|
155
|
+
store: new KeyvRedis(redisClient)
|
|
156
|
+
})
|
|
157
|
+
],
|
|
158
|
+
ttl: 300000 // 5 minutes default TTL
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
// Get checks L1 first, then L2, populates higher levels on hit
|
|
162
|
+
const user = await cache.get('user:123');
|
|
163
|
+
```
|
|
164
|
+
|
|
165
|
+
### Async Cache with Callbacks
|
|
166
|
+
|
|
167
|
+
Track cache performance and errors:
|
|
168
|
+
|
|
169
|
+
```typescript
|
|
170
|
+
import { AsyncCacheManager } from '@md-oss/cache';
|
|
171
|
+
|
|
172
|
+
const cache = new AsyncCacheManager({
|
|
173
|
+
stores: [new Keyv()],
|
|
174
|
+
ttl: 60000,
|
|
175
|
+
dataFunction: async (key: string) => {
|
|
176
|
+
const data = await fetchExpensiveData(key);
|
|
177
|
+
return data;
|
|
178
|
+
},
|
|
179
|
+
callbacks: {
|
|
180
|
+
onStart: (key) => {
|
|
181
|
+
console.log(`Fetching data for ${key}`);
|
|
182
|
+
},
|
|
183
|
+
onEnd: (key, duration) => {
|
|
184
|
+
console.log(`Fetch took ${duration}ms for ${key}`);
|
|
185
|
+
},
|
|
186
|
+
onSuccess: (key, value) => {
|
|
187
|
+
console.log(`Successfully cached ${key}`);
|
|
188
|
+
},
|
|
189
|
+
onError: (key, error) => {
|
|
190
|
+
console.error(`Error fetching ${key}:`, error);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
// Check async metadata
|
|
196
|
+
console.log(cache.metadata.async);
|
|
197
|
+
// { last: 123, total: 456, average: 152, longest: 234, shortest: 89 }
|
|
198
|
+
```
|
|
199
|
+
|
|
200
|
+
### Cache Events
|
|
201
|
+
|
|
202
|
+
Listen to cache operations:
|
|
203
|
+
|
|
204
|
+
```typescript
|
|
205
|
+
import { CacheManager } from '@md-oss/cache';
|
|
206
|
+
|
|
207
|
+
const cache = CacheManager.fromStore<string>(new LRUCache({ max: 100 }));
|
|
208
|
+
|
|
209
|
+
cache.on('set', ({ key, value, error }) => {
|
|
210
|
+
if (!error) {
|
|
211
|
+
console.log(`Cached: ${key} = ${value}`);
|
|
212
|
+
}
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
cache.on('del', ({ key, error }) => {
|
|
216
|
+
if (!error) {
|
|
217
|
+
console.log(`Deleted: ${key}`);
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
cache.on('clear', () => {
|
|
222
|
+
console.log('Cache cleared');
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
cache.on('refresh', ({ key, error }) => {
|
|
226
|
+
if (!error) {
|
|
227
|
+
console.log(`Refreshed: ${key}`);
|
|
228
|
+
}
|
|
229
|
+
});
|
|
230
|
+
```
|
|
231
|
+
|
|
232
|
+
### Batch Operations
|
|
233
|
+
|
|
234
|
+
```typescript
|
|
235
|
+
// Get multiple keys
|
|
236
|
+
const users = await cache.mget(['user:1', 'user:2', 'user:3']);
|
|
237
|
+
|
|
238
|
+
// Set multiple keys
|
|
239
|
+
await cache.mset([
|
|
240
|
+
{ key: 'user:1', value: user1, ttl: 60000 },
|
|
241
|
+
{ key: 'user:2', value: user2, ttl: 60000 },
|
|
242
|
+
{ key: 'user:3', value: user3, ttl: 60000 }
|
|
243
|
+
]);
|
|
244
|
+
|
|
245
|
+
// Delete multiple keys
|
|
246
|
+
await cache.mdel(['user:1', 'user:2', 'user:3']);
|
|
247
|
+
```
|
|
248
|
+
|
|
249
|
+
### Cache Wrapping
|
|
250
|
+
|
|
251
|
+
Wrap expensive operations with automatic caching:
|
|
252
|
+
|
|
253
|
+
```typescript
|
|
254
|
+
const result = await cache.wrap(
|
|
255
|
+
'expensive-computation',
|
|
256
|
+
async () => {
|
|
257
|
+
// Only runs on cache miss
|
|
258
|
+
return await performExpensiveComputation();
|
|
259
|
+
},
|
|
260
|
+
60000 // TTL in milliseconds
|
|
261
|
+
);
|
|
262
|
+
```
|
|
263
|
+
|
|
264
|
+
## Use Cases
|
|
265
|
+
|
|
266
|
+
### 1. API Response Caching
|
|
267
|
+
|
|
268
|
+
```typescript
|
|
269
|
+
import { AsyncCacheManager } from '@md-oss/cache';
|
|
270
|
+
|
|
271
|
+
const apiCache = new AsyncCacheManager<ApiResponse>({
|
|
272
|
+
stores: [new Keyv()],
|
|
273
|
+
ttl: 300000, // 5 minutes
|
|
274
|
+
dataFunction: async (endpoint: string) => {
|
|
275
|
+
const response = await fetch(endpoint);
|
|
276
|
+
return response.json();
|
|
277
|
+
}
|
|
278
|
+
});
|
|
279
|
+
|
|
280
|
+
app.get('/api/proxy/:path', async (req, res) => {
|
|
281
|
+
const data = await apiCache.get(req.params.path);
|
|
282
|
+
res.json(data);
|
|
283
|
+
});
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
### 2. Database Query Caching
|
|
287
|
+
|
|
288
|
+
```typescript
|
|
289
|
+
const userCache = new AsyncCacheManager<User>({
|
|
290
|
+
stores: [
|
|
291
|
+
new Keyv({ store: new LRUCache({ max: 1000, ttl: 60000 }) }),
|
|
292
|
+
new Keyv({ store: new KeyvRedis(redisClient) })
|
|
293
|
+
],
|
|
294
|
+
ttl: 600000, // 10 minutes
|
|
295
|
+
dataFunction: async (userId: string) => {
|
|
296
|
+
return await db.users.findUnique({ where: { id: userId } });
|
|
297
|
+
}
|
|
298
|
+
});
|
|
299
|
+
|
|
300
|
+
async function getUser(id: string) {
|
|
301
|
+
return userCache.get(id);
|
|
302
|
+
}
|
|
303
|
+
```
|
|
304
|
+
|
|
305
|
+
### 3. Session Management
|
|
306
|
+
|
|
307
|
+
```typescript
|
|
308
|
+
const sessionCache = CacheManager.fromStore<Session>(
|
|
309
|
+
new Keyv({ store: new KeyvRedis(redisClient) })
|
|
310
|
+
);
|
|
311
|
+
|
|
312
|
+
async function getSession(sessionId: string) {
|
|
313
|
+
return sessionCache.get(`session:${sessionId}`);
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
async function createSession(sessionId: string, data: Session) {
|
|
317
|
+
await sessionCache.set(`session:${sessionId}`, data, 3600000); // 1 hour
|
|
318
|
+
}
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
### 4. Rate Limiting
|
|
322
|
+
|
|
323
|
+
```typescript
|
|
324
|
+
const rateLimitCache = CacheManager.fromStore<number>(
|
|
325
|
+
new LRUCache({ max: 10000, ttl: 60000 })
|
|
326
|
+
);
|
|
327
|
+
|
|
328
|
+
async function checkRateLimit(userId: string): Promise<boolean> {
|
|
329
|
+
const key = `rate:${userId}`;
|
|
330
|
+
const count = (await rateLimitCache.get(key)) ?? 0;
|
|
331
|
+
|
|
332
|
+
if (count >= 100) {
|
|
333
|
+
return false; // Rate limit exceeded
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
await rateLimitCache.set(key, count + 1);
|
|
337
|
+
return true;
|
|
338
|
+
}
|
|
339
|
+
```
|
|
340
|
+
|
|
341
|
+
### 5. Computed Value Caching
|
|
342
|
+
|
|
343
|
+
```typescript
|
|
344
|
+
const computeCache = new AsyncCacheManager<number>({
|
|
345
|
+
stores: [new Keyv()],
|
|
346
|
+
ttl: 3600000, // 1 hour
|
|
347
|
+
dataFunction: async (key: string) => {
|
|
348
|
+
const [start, end] = key.split(':');
|
|
349
|
+
return await calculateComplexMetrics(Number(start), Number(end));
|
|
350
|
+
}
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
async function getMetrics(startDate: Date, endDate: Date) {
|
|
354
|
+
const key = `${startDate.getTime()}:${endDate.getTime()}`;
|
|
355
|
+
return computeCache.get(key);
|
|
356
|
+
}
|
|
357
|
+
```
|
|
358
|
+
|
|
359
|
+
## Cache Metadata
|
|
360
|
+
|
|
361
|
+
Track cache performance:
|
|
362
|
+
|
|
363
|
+
```typescript
|
|
364
|
+
const cache = new AsyncCacheManager({ /* ... */ });
|
|
365
|
+
|
|
366
|
+
// Basic metadata
|
|
367
|
+
console.log(cache.metadata);
|
|
368
|
+
// {
|
|
369
|
+
// hits: 150,
|
|
370
|
+
// misses: 50,
|
|
371
|
+
// added: 50,
|
|
372
|
+
// deleted: 10,
|
|
373
|
+
// updated: 20,
|
|
374
|
+
// cleared: 0,
|
|
375
|
+
// errors: 0,
|
|
376
|
+
// async: {
|
|
377
|
+
// last: 123,
|
|
378
|
+
// total: 6150,
|
|
379
|
+
// average: 123,
|
|
380
|
+
// longest: 456,
|
|
381
|
+
// shortest: 45
|
|
382
|
+
// }
|
|
383
|
+
// }
|
|
384
|
+
|
|
385
|
+
// Calculate hit rate
|
|
386
|
+
const hitRate = cache.metadata.hits / (cache.metadata.hits + cache.metadata.misses);
|
|
387
|
+
console.log(`Cache hit rate: ${(hitRate * 100).toFixed(2)}%`);
|
|
388
|
+
```
|
|
389
|
+
|
|
390
|
+
## Performance Considerations
|
|
391
|
+
|
|
392
|
+
- **LRU Cache**: O(1) operations, best for hot data
|
|
393
|
+
- **Redis**: Network overhead, best for shared/persistent cache
|
|
394
|
+
- **Multi-Store**: Checks stores in order, populates higher levels on hit
|
|
395
|
+
- **Promise Cache**: Prevents thundering herd for in-flight requests
|
|
396
|
+
|
|
397
|
+
### Best Practices
|
|
398
|
+
|
|
399
|
+
```typescript
|
|
400
|
+
// Use appropriate TTLs
|
|
401
|
+
const shortTTL = 60000; // 1 minute for volatile data
|
|
402
|
+
const mediumTTL = 600000; // 10 minutes for regular data
|
|
403
|
+
const longTTL = 3600000; // 1 hour for stable data
|
|
404
|
+
|
|
405
|
+
// Layer caches appropriately
|
|
406
|
+
const cache = new CacheManager({
|
|
407
|
+
stores: [
|
|
408
|
+
new Keyv({ store: new LRUCache({ max: 100, ttl: shortTTL }) }), // Hot cache
|
|
409
|
+
new Keyv({ store: new KeyvRedis(redis), ttl: longTTL }) // Warm cache
|
|
410
|
+
]
|
|
411
|
+
});
|
|
412
|
+
|
|
413
|
+
// Use promise caching for expensive operations
|
|
414
|
+
const promiseCache = new PromiseCache(5000);
|
|
415
|
+
const result = await promiseCache.get(() => expensiveOperation());
|
|
416
|
+
```
|
|
417
|
+
|
|
418
|
+
## API Reference
|
|
419
|
+
|
|
420
|
+
### CacheManager
|
|
421
|
+
|
|
422
|
+
- `get(key)` - Get a value from cache
|
|
423
|
+
- `mget(keys)` - Get multiple values
|
|
424
|
+
- `set(key, value, ttl?)` - Set a value
|
|
425
|
+
- `mset(entries)` - Set multiple values
|
|
426
|
+
- `del(key)` - Delete a value
|
|
427
|
+
- `mdel(keys)` - Delete multiple values
|
|
428
|
+
- `clear()` - Clear all cache entries
|
|
429
|
+
- `wrap(key, fn, ttl?)` - Wrap function with caching
|
|
430
|
+
- `ttl(key)` - Get remaining TTL for a key
|
|
431
|
+
- `keys()` - Get all cached keys
|
|
432
|
+
- `on(event, listener)` - Listen to cache events
|
|
433
|
+
- `extend(options)` - Create extended cache manager
|
|
434
|
+
- `disconnect()` - Disconnect all stores
|
|
435
|
+
|
|
436
|
+
### AsyncCacheManager
|
|
437
|
+
|
|
438
|
+
Extends `CacheManager` with:
|
|
439
|
+
- `dataFunction` - Automatic data fetching on cache miss
|
|
440
|
+
- `callbacks` - Lifecycle callbacks (onStart, onEnd, onSuccess, onError)
|
|
441
|
+
- Additional metadata tracking for async operations
|
|
442
|
+
|
|
443
|
+
### PromiseCache
|
|
444
|
+
|
|
445
|
+
- `get(generator)` - Get or generate cached promise
|
|
446
|
+
- `clear()` - Clear cached promise
|
|
447
|
+
|
|
448
|
+
### Redis
|
|
449
|
+
|
|
450
|
+
- `initializeRedis()` - Initialize Redis connection
|
|
451
|
+
- `getRedisClient()` - Get Redis client instance
|
|
452
|
+
- `redisSetKv(key, value, ttl)` - Set key-value with TTL
|
|
453
|
+
|
|
454
|
+
## Types
|
|
455
|
+
|
|
456
|
+
```typescript
|
|
457
|
+
import type {
|
|
458
|
+
AbstractCache,
|
|
459
|
+
CacheManagerMetadata,
|
|
460
|
+
AsyncCacheManagerMetadata,
|
|
461
|
+
WithCacheDetails,
|
|
462
|
+
SetCacheArguments,
|
|
463
|
+
LRUArgs
|
|
464
|
+
} from '@md-oss/cache';
|
|
465
|
+
```
|
|
466
|
+
|
|
467
|
+
## Environment Variables
|
|
468
|
+
|
|
469
|
+
```env
|
|
470
|
+
REDIS_URL=redis://localhost:6379 # Required for Redis support
|
|
471
|
+
```
|
package/dist/async.cjs
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";var f=Object.defineProperty;var o=(u,a)=>f(u,"name",{value:a,configurable:!0});var p=require("debug"),d=require("keyv"),l=require("./lru.cjs"),m=require("./manager.cjs");require("lru-cache"),require("cache-manager");class n extends m.CacheManager{static{o(this,"AsyncCacheManager")}debug=p("md-oss:cache:async-manager");cacheOptions;metadata={hits:0,misses:0,added:0,deleted:0,updated:0,cleared:0,errors:0,async:{last:0,total:0,average:0,longest:0,shortest:0}};dataFunction;constructor(a){super(a),this.cacheOptions=a,this.dataFunction=a.dataFunction}extend(a){return new n({...this.cacheOptions,...a})}static fromStore(a){return new n({dataFunction:a.dataFunction,stores:[a instanceof d?a:new d({...a,store:a instanceof l.LRUCache?a:new l.LRUCache(a)})]})}async get(a){const r=await super.get(a);if(typeof r<"u")return r;const t=await this.dataWrapper(a);return typeof t<"u"&&await this.set(a,t),t}async getWithDetails(a){const r=await super.get(a);if(typeof r<"u")return[r,{source:"cache",cached:!1,cachedFor:null}];const t=await this.dataWrapper(a);let e;return typeof t<"u"?(await this.set(a,t),e={source:"no-cache",cached:!0,cachedFor:await this.ttl(a)}):e={source:"no-cache",cached:!1,cachedFor:null},[t,e]}dataWrapper=o(async a=>{const r=process.hrtime();this.cacheOptions.callbacks?.onStart&&this.cacheOptions.callbacks.onStart(a);let t,e,h=!1;try{t=this.dataFunction(a),e=t instanceof Promise?await t:t,typeof e<"u"&&this.cacheOptions.callbacks?.onSuccess&&(h=!0,this.cacheOptions.callbacks.onSuccess(a,e))}catch(s){const c=h?"Error in onSuccess callback for key":"Failed to fetch/process data for key",i=typeof s=="object"&&s&&"message"in s?new Error(`${c} "${a}": ${s.message}`):new Error(`${c} "${a}": ${s}`);throw this.metadata.errors++,delete i.stack,this.cacheOptions.callbacks?.onError&&this.cacheOptions.callbacks.onError(a,i),i}finally{const s=process.hrtime(r),c=s[0]*1e3+s[1]/1e6;this.metadata.async.last=c,this.metadata.async.total+=c,this.metadata.async.average=this.metadata.async.total/this.metadata.hits,this.metadata.async.longest=Math.max(this.metadata.async.longest,c),this.metadata.async.shortest=Math.min(this.metadata.async.shortest,c),this.cacheOptions.callbacks?.onEnd&&this.cacheOptions.callbacks.onEnd(a,c)}return e},"dataWrapper")}exports.AsyncCacheManager=n;
|
|
2
|
+
//# sourceMappingURL=async.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"async.cjs","sources":["../src/async.ts"],"sourcesContent":["import debug from 'debug';\nimport Keyv from 'keyv';\nimport { type LRUArgs, LRUCache } from './lru';\nimport { CacheManager } from './manager';\nimport type {\n\tCacheManagerMetadata,\n\tResolvedCreateCacheOptions,\n\tWithCacheDataFn,\n\tWithCacheDetails,\n\tWithCacheReturnType,\n} from './types';\n\nexport type AsyncCreateCacheOptions<T extends NonNullable<unknown>> =\n\tResolvedCreateCacheOptions<T> & {\n\t\t/**\n\t\t * The function that fetches/processed data for a key. The function is ran\n\t\t * when the key is not found in the cache, and the data is stored in the cache.\n\t\t * - Values of type `null` are treated as cache, and will be stored as `null`.\n\t\t * - Values of type `undefined` are treated as no-cache, and will not be stored.\n\t\t * - __You are responsible for error handling in this function.__\n\t\t * - __Any unhandled errors will throw, and terminate your process.__\n\t\t * @param key The key to fetch/process data for\n\t\t * @returns The data for the key\n\t\t */\n\t\tdataFunction: WithCacheDataFn<string, T>;\n\t\t/**\n\t\t * Callbacks to run at various stages of the asynchronous caching process.\n\t\t */\n\t\tcallbacks?: AsyncCacheManagerCallbacks<T>;\n\t};\n\nexport type AsyncCacheManagerCallbacks<T extends NonNullable<unknown>> = {\n\t/**\n\t * Callback to execute when `dataFunction` starts. This callback is always executed,\n\t * and ran just before the function is called.\n\t * @param key The key passed to `dataFunction`\n\t */\n\tonStart?: (key: string) => void;\n\t/**\n\t * Callback to execute when `dataFunction` ends. This callback is always executed,\n\t * and ran just after the function finishes/resolves.\n\t * @param key The key passed to `dataFunction`\n\t * @param duration The duration of `dataFunction` in milliseconds\n\t * @returns\n\t */\n\tonEnd?: (key: string, duration: number) => void;\n\t/**\n\t * Callback to execute when `dataFunction` is successful. This callback is only\n\t * executed if the function resolves successfully, without erroring/throwing.\n\t * @param key The key passed to `dataFunction`\n\t * @param value The value that was returned/resolved by `dataFunction`\n\t * @returns\n\t */\n\tonSuccess?: (key: string, value: T | null) => void;\n\t/**\n\t * Callback to execute when an error occurs while running `dataFunction`.\n\t * - __Please note that this callback does not affect error handling.__\n\t * - __If your `dataFunction` errors and this callback is provided, an error is still thrown.__\n\t * @param key The key passed to `dataFunction`\n\t * @param error The error that occurred while running `dataFunction`\n\t */\n\tonError?: (key: string, error: Error) => void;\n};\n\nexport type AsyncCacheManagerMetadata = CacheManagerMetadata & {\n\t/** The number/amount of errors that occurred while processing data */\n\terrors: number;\n\t/** Metadata specific to async operations */\n\tasync: {\n\t\t/** The duration of the last `dataFunction` in milliseconds */\n\t\tlast: number;\n\t\t/** The total duration the `dataFunction` has taken in milliseconds */\n\t\ttotal: number;\n\t\t/** The average duration of all `dataFunction` runs in milliseconds */\n\t\taverage: number;\n\t\t/** The longest duration of all `dataFunction` runs in milliseconds */\n\t\tlongest: number;\n\t\t/** The shortest duration of all `dataFunction` runs in milliseconds */\n\t\tshortest: number;\n\t};\n};\n\nexport class AsyncCacheManager<\n\tT extends NonNullable<unknown>,\n> extends CacheManager<T> {\n\tprotected override readonly debug: debug.Debugger = debug(\n\t\t'md-oss:cache:async-manager'\n\t);\n\toverride readonly cacheOptions: AsyncCreateCacheOptions<T>;\n\toverride readonly metadata: AsyncCacheManagerMetadata = {\n\t\thits: 0,\n\t\tmisses: 0,\n\t\tadded: 0,\n\t\tdeleted: 0,\n\t\tupdated: 0,\n\t\tcleared: 0,\n\t\terrors: 0,\n\t\tasync: {\n\t\t\tlast: 0,\n\t\t\ttotal: 0,\n\t\t\taverage: 0,\n\t\t\tlongest: 0,\n\t\t\tshortest: 0,\n\t\t},\n\t};\n\n\tprivate readonly dataFunction: WithCacheDataFn<string, T>;\n\n\t//\n\t// Class instantiation\n\t//\n\n\tconstructor(options: AsyncCreateCacheOptions<T>) {\n\t\tsuper(options);\n\t\tthis.cacheOptions = options;\n\t\tthis.dataFunction = options.dataFunction;\n\t}\n\n\toverride extend(options?: AsyncCreateCacheOptions<T>): AsyncCacheManager<T> {\n\t\treturn new AsyncCacheManager({ ...this.cacheOptions, ...options });\n\t}\n\n\tstatic override fromStore<O extends NonNullable<unknown>>(\n\t\tstore: (Keyv<O> | LRUCache<O> | LRUArgs<O>) & {\n\t\t\tdataFunction: WithCacheDataFn<string, O>;\n\t\t}\n\t): AsyncCacheManager<O> {\n\t\treturn new AsyncCacheManager<O>({\n\t\t\tdataFunction: store.dataFunction,\n\t\t\tstores: [\n\t\t\t\tstore instanceof Keyv\n\t\t\t\t\t? store\n\t\t\t\t\t: new Keyv<O>({\n\t\t\t\t\t\t\t...store,\n\t\t\t\t\t\t\tstore: store instanceof LRUCache ? store : new LRUCache(store),\n\t\t\t\t\t\t}),\n\t\t\t],\n\t\t});\n\t}\n\n\t//\n\t// Standard Cache (Manager) operations\n\t//\n\n\toverride async get(key: string): Promise<T | null | undefined> {\n\t\tconst fromCache = await super.get(key);\n\n\t\tif (typeof fromCache !== 'undefined') {\n\t\t\treturn fromCache;\n\t\t}\n\n\t\tconst wrapperResult = await this.dataWrapper(key);\n\n\t\tif (typeof wrapperResult !== 'undefined') {\n\t\t\tawait this.set(key, wrapperResult);\n\t\t}\n\n\t\treturn wrapperResult;\n\t}\n\n\tasync getWithDetails(key: string): Promise<WithCacheReturnType<T>> {\n\t\tconst fromCache = await super.get(key);\n\n\t\tif (typeof fromCache !== 'undefined') {\n\t\t\treturn [fromCache, { source: 'cache', cached: false, cachedFor: null }];\n\t\t}\n\n\t\tconst wrapperResult = await this.dataWrapper(key);\n\n\t\tlet details: WithCacheDetails;\n\n\t\tif (typeof wrapperResult !== 'undefined') {\n\t\t\tawait this.set(key, wrapperResult);\n\t\t\tdetails = {\n\t\t\t\tsource: 'no-cache',\n\t\t\t\tcached: true,\n\t\t\t\tcachedFor: await this.ttl(key),\n\t\t\t};\n\t\t} else {\n\t\t\tdetails = { source: 'no-cache', cached: false, cachedFor: null };\n\t\t}\n\n\t\treturn [wrapperResult, details];\n\t}\n\n\t//\n\t// Wrappers\n\t//\n\n\tprivate readonly dataWrapper = async (\n\t\tkey: string\n\t): Promise<T | null | undefined> => {\n\t\tconst start = process.hrtime();\n\n\t\tif (this.cacheOptions.callbacks?.onStart) {\n\t\t\tthis.cacheOptions.callbacks.onStart(key);\n\t\t}\n\n\t\tlet result: ReturnType<typeof this.dataFunction>;\n\t\tlet resolvedResult: T | null | undefined;\n\t\tlet isUserCallbackError = false;\n\n\t\ttry {\n\t\t\tresult = this.dataFunction(key);\n\t\t\tresolvedResult = result instanceof Promise ? await result : result;\n\t\t\tif (\n\t\t\t\ttypeof resolvedResult !== 'undefined' &&\n\t\t\t\tthis.cacheOptions.callbacks?.onSuccess\n\t\t\t) {\n\t\t\t\tisUserCallbackError = true;\n\t\t\t\tthis.cacheOptions.callbacks.onSuccess(key, resolvedResult);\n\t\t\t}\n\t\t} catch (err) {\n\t\t\tconst reason = isUserCallbackError\n\t\t\t\t? 'Error in onSuccess callback for key'\n\t\t\t\t: 'Failed to fetch/process data for key';\n\t\t\tconst cleanError =\n\t\t\t\ttypeof err === 'object' && err && 'message' in err\n\t\t\t\t\t? new Error(`${reason} \"${key}\": ${err.message}`)\n\t\t\t\t\t: new Error(`${reason} \"${key}\": ${err}`);\n\n\t\t\tthis.metadata.errors++;\n\t\t\tdelete cleanError.stack;\n\n\t\t\tif (this.cacheOptions.callbacks?.onError) {\n\t\t\t\tthis.cacheOptions.callbacks.onError(key, cleanError);\n\t\t\t}\n\n\t\t\tthrow cleanError;\n\t\t} finally {\n\t\t\tconst end = process.hrtime(start);\n\t\t\tconst duration = end[0] * 1e3 + end[1] / 1e6;\n\n\t\t\tthis.metadata.async.last = duration;\n\t\t\tthis.metadata.async.total += duration;\n\t\t\tthis.metadata.async.average =\n\t\t\t\tthis.metadata.async.total / this.metadata.hits;\n\t\t\tthis.metadata.async.longest = Math.max(\n\t\t\t\tthis.metadata.async.longest,\n\t\t\t\tduration\n\t\t\t);\n\t\t\tthis.metadata.async.shortest = Math.min(\n\t\t\t\tthis.metadata.async.shortest,\n\t\t\t\tduration\n\t\t\t);\n\n\t\t\tif (this.cacheOptions.callbacks?.onEnd) {\n\t\t\t\tthis.cacheOptions.callbacks.onEnd(key, duration);\n\t\t\t}\n\t\t}\n\n\t\treturn resolvedResult;\n\t};\n}\n"],"names":["AsyncCacheManager","CacheManager","__name","debug","options","store","Keyv","LRUCache","key","fromCache","wrapperResult","details","start","result","resolvedResult","isUserCallbackError","err","reason","cleanError","end","duration"],"mappings":"qOAkFO,MAAMA,UAEHC,EAAAA,YAAgB,OAAA,CAAAC,EAAA,0BACG,MAAwBC,EACnD,4BAAA,EAEiB,aACA,SAAsC,CACvD,KAAM,EACN,OAAQ,EACR,MAAO,EACP,QAAS,EACT,QAAS,EACT,QAAS,EACT,OAAQ,EACR,MAAO,CACN,KAAM,EACN,MAAO,EACP,QAAS,EACT,QAAS,EACT,SAAU,CAAA,CACX,EAGgB,aAMjB,YAAYC,EAAqC,CAChD,MAAMA,CAAO,EACb,KAAK,aAAeA,EACpB,KAAK,aAAeA,EAAQ,YAC7B,CAES,OAAOA,EAA4D,CAC3E,OAAO,IAAIJ,EAAkB,CAAE,GAAG,KAAK,aAAc,GAAGI,EAAS,CAClE,CAEA,OAAgB,UACfC,EAGuB,CACvB,OAAO,IAAIL,EAAqB,CAC/B,aAAcK,EAAM,aACpB,OAAQ,CACPA,aAAiBC,EACdD,EACA,IAAIC,EAAQ,CACZ,GAAGD,EACH,MAAOA,aAAiBE,EAAAA,SAAWF,EAAQ,IAAIE,EAAAA,SAASF,CAAK,CAAA,CAC7D,CAAA,CACJ,CACA,CACF,CAMA,MAAe,IAAIG,EAA4C,CAC9D,MAAMC,EAAY,MAAM,MAAM,IAAID,CAAG,EAErC,GAAI,OAAOC,EAAc,IACxB,OAAOA,EAGR,MAAMC,EAAgB,MAAM,KAAK,YAAYF,CAAG,EAEhD,OAAI,OAAOE,EAAkB,KAC5B,MAAM,KAAK,IAAIF,EAAKE,CAAa,EAG3BA,CACR,CAEA,MAAM,eAAeF,EAA8C,CAClE,MAAMC,EAAY,MAAM,MAAM,IAAID,CAAG,EAErC,GAAI,OAAOC,EAAc,IACxB,MAAO,CAACA,EAAW,CAAE,OAAQ,QAAS,OAAQ,GAAO,UAAW,KAAM,EAGvE,MAAMC,EAAgB,MAAM,KAAK,YAAYF,CAAG,EAEhD,IAAIG,EAEJ,OAAI,OAAOD,EAAkB,KAC5B,MAAM,KAAK,IAAIF,EAAKE,CAAa,EACjCC,EAAU,CACT,OAAQ,WACR,OAAQ,GACR,UAAW,MAAM,KAAK,IAAIH,CAAG,CAAA,GAG9BG,EAAU,CAAE,OAAQ,WAAY,OAAQ,GAAO,UAAW,IAAA,EAGpD,CAACD,EAAeC,CAAO,CAC/B,CAMiB,YAAcT,EAAA,MAC9BM,GACmC,CACnC,MAAMI,EAAQ,QAAQ,OAAA,EAElB,KAAK,aAAa,WAAW,SAChC,KAAK,aAAa,UAAU,QAAQJ,CAAG,EAGxC,IAAIK,EACAC,EACAC,EAAsB,GAE1B,GAAI,CACHF,EAAS,KAAK,aAAaL,CAAG,EAC9BM,EAAiBD,aAAkB,QAAU,MAAMA,EAASA,EAE3D,OAAOC,EAAmB,KAC1B,KAAK,aAAa,WAAW,YAE7BC,EAAsB,GACtB,KAAK,aAAa,UAAU,UAAUP,EAAKM,CAAc,EAE3D,OAASE,EAAK,CACb,MAAMC,EAASF,EACZ,sCACA,uCACGG,EACL,OAAOF,GAAQ,UAAYA,GAAO,YAAaA,EAC5C,IAAI,MAAM,GAAGC,CAAM,KAAKT,CAAG,MAAMQ,EAAI,OAAO,EAAE,EAC9C,IAAI,MAAM,GAAGC,CAAM,KAAKT,CAAG,MAAMQ,CAAG,EAAE,EAE1C,WAAK,SAAS,SACd,OAAOE,EAAW,MAEd,KAAK,aAAa,WAAW,SAChC,KAAK,aAAa,UAAU,QAAQV,EAAKU,CAAU,EAG9CA,CACP,QAAA,CACC,MAAMC,EAAM,QAAQ,OAAOP,CAAK,EAC1BQ,EAAWD,EAAI,CAAC,EAAI,IAAMA,EAAI,CAAC,EAAI,IAEzC,KAAK,SAAS,MAAM,KAAOC,EAC3B,KAAK,SAAS,MAAM,OAASA,EAC7B,KAAK,SAAS,MAAM,QACnB,KAAK,SAAS,MAAM,MAAQ,KAAK,SAAS,KAC3C,KAAK,SAAS,MAAM,QAAU,KAAK,IAClC,KAAK,SAAS,MAAM,QACpBA,CAAA,EAED,KAAK,SAAS,MAAM,SAAW,KAAK,IACnC,KAAK,SAAS,MAAM,SACpBA,CAAA,EAGG,KAAK,aAAa,WAAW,OAChC,KAAK,aAAa,UAAU,MAAMZ,EAAKY,CAAQ,CAEjD,CAEA,OAAON,CACR,EA/D+B,cAgEhC"}
|
package/dist/async.d.cts
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import debug from 'debug';
|
|
2
|
+
import Keyv from 'keyv';
|
|
3
|
+
import { LRUCache, LRUArgs } from './lru.cjs';
|
|
4
|
+
import { CacheManager } from './manager.cjs';
|
|
5
|
+
import { ResolvedCreateCacheOptions, WithCacheDataFn, CacheManagerMetadata, WithCacheReturnType } from './types.cjs';
|
|
6
|
+
import 'lru-cache';
|
|
7
|
+
import 'node:events';
|
|
8
|
+
import 'cache-manager';
|
|
9
|
+
|
|
10
|
+
type AsyncCreateCacheOptions<T extends NonNullable<unknown>> = ResolvedCreateCacheOptions<T> & {
|
|
11
|
+
/**
|
|
12
|
+
* The function that fetches/processed data for a key. The function is ran
|
|
13
|
+
* when the key is not found in the cache, and the data is stored in the cache.
|
|
14
|
+
* - Values of type `null` are treated as cache, and will be stored as `null`.
|
|
15
|
+
* - Values of type `undefined` are treated as no-cache, and will not be stored.
|
|
16
|
+
* - __You are responsible for error handling in this function.__
|
|
17
|
+
* - __Any unhandled errors will throw, and terminate your process.__
|
|
18
|
+
* @param key The key to fetch/process data for
|
|
19
|
+
* @returns The data for the key
|
|
20
|
+
*/
|
|
21
|
+
dataFunction: WithCacheDataFn<string, T>;
|
|
22
|
+
/**
|
|
23
|
+
* Callbacks to run at various stages of the asynchronous caching process.
|
|
24
|
+
*/
|
|
25
|
+
callbacks?: AsyncCacheManagerCallbacks<T>;
|
|
26
|
+
};
|
|
27
|
+
type AsyncCacheManagerCallbacks<T extends NonNullable<unknown>> = {
|
|
28
|
+
/**
|
|
29
|
+
* Callback to execute when `dataFunction` starts. This callback is always executed,
|
|
30
|
+
* and ran just before the function is called.
|
|
31
|
+
* @param key The key passed to `dataFunction`
|
|
32
|
+
*/
|
|
33
|
+
onStart?: (key: string) => void;
|
|
34
|
+
/**
|
|
35
|
+
* Callback to execute when `dataFunction` ends. This callback is always executed,
|
|
36
|
+
* and ran just after the function finishes/resolves.
|
|
37
|
+
* @param key The key passed to `dataFunction`
|
|
38
|
+
* @param duration The duration of `dataFunction` in milliseconds
|
|
39
|
+
* @returns
|
|
40
|
+
*/
|
|
41
|
+
onEnd?: (key: string, duration: number) => void;
|
|
42
|
+
/**
|
|
43
|
+
* Callback to execute when `dataFunction` is successful. This callback is only
|
|
44
|
+
* executed if the function resolves successfully, without erroring/throwing.
|
|
45
|
+
* @param key The key passed to `dataFunction`
|
|
46
|
+
* @param value The value that was returned/resolved by `dataFunction`
|
|
47
|
+
* @returns
|
|
48
|
+
*/
|
|
49
|
+
onSuccess?: (key: string, value: T | null) => void;
|
|
50
|
+
/**
|
|
51
|
+
* Callback to execute when an error occurs while running `dataFunction`.
|
|
52
|
+
* - __Please note that this callback does not affect error handling.__
|
|
53
|
+
* - __If your `dataFunction` errors and this callback is provided, an error is still thrown.__
|
|
54
|
+
* @param key The key passed to `dataFunction`
|
|
55
|
+
* @param error The error that occurred while running `dataFunction`
|
|
56
|
+
*/
|
|
57
|
+
onError?: (key: string, error: Error) => void;
|
|
58
|
+
};
|
|
59
|
+
type AsyncCacheManagerMetadata = CacheManagerMetadata & {
|
|
60
|
+
/** The number/amount of errors that occurred while processing data */
|
|
61
|
+
errors: number;
|
|
62
|
+
/** Metadata specific to async operations */
|
|
63
|
+
async: {
|
|
64
|
+
/** The duration of the last `dataFunction` in milliseconds */
|
|
65
|
+
last: number;
|
|
66
|
+
/** The total duration the `dataFunction` has taken in milliseconds */
|
|
67
|
+
total: number;
|
|
68
|
+
/** The average duration of all `dataFunction` runs in milliseconds */
|
|
69
|
+
average: number;
|
|
70
|
+
/** The longest duration of all `dataFunction` runs in milliseconds */
|
|
71
|
+
longest: number;
|
|
72
|
+
/** The shortest duration of all `dataFunction` runs in milliseconds */
|
|
73
|
+
shortest: number;
|
|
74
|
+
};
|
|
75
|
+
};
|
|
76
|
+
declare class AsyncCacheManager<T extends NonNullable<unknown>> extends CacheManager<T> {
|
|
77
|
+
protected readonly debug: debug.Debugger;
|
|
78
|
+
readonly cacheOptions: AsyncCreateCacheOptions<T>;
|
|
79
|
+
readonly metadata: AsyncCacheManagerMetadata;
|
|
80
|
+
private readonly dataFunction;
|
|
81
|
+
constructor(options: AsyncCreateCacheOptions<T>);
|
|
82
|
+
extend(options?: AsyncCreateCacheOptions<T>): AsyncCacheManager<T>;
|
|
83
|
+
static fromStore<O extends NonNullable<unknown>>(store: (Keyv<O> | LRUCache<O> | LRUArgs<O>) & {
|
|
84
|
+
dataFunction: WithCacheDataFn<string, O>;
|
|
85
|
+
}): AsyncCacheManager<O>;
|
|
86
|
+
get(key: string): Promise<T | null | undefined>;
|
|
87
|
+
getWithDetails(key: string): Promise<WithCacheReturnType<T>>;
|
|
88
|
+
private readonly dataWrapper;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export { AsyncCacheManager };
|
|
92
|
+
export type { AsyncCacheManagerCallbacks, AsyncCacheManagerMetadata, AsyncCreateCacheOptions };
|
package/dist/async.d.mts
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import debug from 'debug';
|
|
2
|
+
import Keyv from 'keyv';
|
|
3
|
+
import { LRUCache, LRUArgs } from './lru.mjs';
|
|
4
|
+
import { CacheManager } from './manager.mjs';
|
|
5
|
+
import { ResolvedCreateCacheOptions, WithCacheDataFn, CacheManagerMetadata, WithCacheReturnType } from './types.mjs';
|
|
6
|
+
import 'lru-cache';
|
|
7
|
+
import 'node:events';
|
|
8
|
+
import 'cache-manager';
|
|
9
|
+
|
|
10
|
+
type AsyncCreateCacheOptions<T extends NonNullable<unknown>> = ResolvedCreateCacheOptions<T> & {
|
|
11
|
+
/**
|
|
12
|
+
* The function that fetches/processed data for a key. The function is ran
|
|
13
|
+
* when the key is not found in the cache, and the data is stored in the cache.
|
|
14
|
+
* - Values of type `null` are treated as cache, and will be stored as `null`.
|
|
15
|
+
* - Values of type `undefined` are treated as no-cache, and will not be stored.
|
|
16
|
+
* - __You are responsible for error handling in this function.__
|
|
17
|
+
* - __Any unhandled errors will throw, and terminate your process.__
|
|
18
|
+
* @param key The key to fetch/process data for
|
|
19
|
+
* @returns The data for the key
|
|
20
|
+
*/
|
|
21
|
+
dataFunction: WithCacheDataFn<string, T>;
|
|
22
|
+
/**
|
|
23
|
+
* Callbacks to run at various stages of the asynchronous caching process.
|
|
24
|
+
*/
|
|
25
|
+
callbacks?: AsyncCacheManagerCallbacks<T>;
|
|
26
|
+
};
|
|
27
|
+
type AsyncCacheManagerCallbacks<T extends NonNullable<unknown>> = {
|
|
28
|
+
/**
|
|
29
|
+
* Callback to execute when `dataFunction` starts. This callback is always executed,
|
|
30
|
+
* and ran just before the function is called.
|
|
31
|
+
* @param key The key passed to `dataFunction`
|
|
32
|
+
*/
|
|
33
|
+
onStart?: (key: string) => void;
|
|
34
|
+
/**
|
|
35
|
+
* Callback to execute when `dataFunction` ends. This callback is always executed,
|
|
36
|
+
* and ran just after the function finishes/resolves.
|
|
37
|
+
* @param key The key passed to `dataFunction`
|
|
38
|
+
* @param duration The duration of `dataFunction` in milliseconds
|
|
39
|
+
* @returns
|
|
40
|
+
*/
|
|
41
|
+
onEnd?: (key: string, duration: number) => void;
|
|
42
|
+
/**
|
|
43
|
+
* Callback to execute when `dataFunction` is successful. This callback is only
|
|
44
|
+
* executed if the function resolves successfully, without erroring/throwing.
|
|
45
|
+
* @param key The key passed to `dataFunction`
|
|
46
|
+
* @param value The value that was returned/resolved by `dataFunction`
|
|
47
|
+
* @returns
|
|
48
|
+
*/
|
|
49
|
+
onSuccess?: (key: string, value: T | null) => void;
|
|
50
|
+
/**
|
|
51
|
+
* Callback to execute when an error occurs while running `dataFunction`.
|
|
52
|
+
* - __Please note that this callback does not affect error handling.__
|
|
53
|
+
* - __If your `dataFunction` errors and this callback is provided, an error is still thrown.__
|
|
54
|
+
* @param key The key passed to `dataFunction`
|
|
55
|
+
* @param error The error that occurred while running `dataFunction`
|
|
56
|
+
*/
|
|
57
|
+
onError?: (key: string, error: Error) => void;
|
|
58
|
+
};
|
|
59
|
+
type AsyncCacheManagerMetadata = CacheManagerMetadata & {
|
|
60
|
+
/** The number/amount of errors that occurred while processing data */
|
|
61
|
+
errors: number;
|
|
62
|
+
/** Metadata specific to async operations */
|
|
63
|
+
async: {
|
|
64
|
+
/** The duration of the last `dataFunction` in milliseconds */
|
|
65
|
+
last: number;
|
|
66
|
+
/** The total duration the `dataFunction` has taken in milliseconds */
|
|
67
|
+
total: number;
|
|
68
|
+
/** The average duration of all `dataFunction` runs in milliseconds */
|
|
69
|
+
average: number;
|
|
70
|
+
/** The longest duration of all `dataFunction` runs in milliseconds */
|
|
71
|
+
longest: number;
|
|
72
|
+
/** The shortest duration of all `dataFunction` runs in milliseconds */
|
|
73
|
+
shortest: number;
|
|
74
|
+
};
|
|
75
|
+
};
|
|
76
|
+
declare class AsyncCacheManager<T extends NonNullable<unknown>> extends CacheManager<T> {
|
|
77
|
+
protected readonly debug: debug.Debugger;
|
|
78
|
+
readonly cacheOptions: AsyncCreateCacheOptions<T>;
|
|
79
|
+
readonly metadata: AsyncCacheManagerMetadata;
|
|
80
|
+
private readonly dataFunction;
|
|
81
|
+
constructor(options: AsyncCreateCacheOptions<T>);
|
|
82
|
+
extend(options?: AsyncCreateCacheOptions<T>): AsyncCacheManager<T>;
|
|
83
|
+
static fromStore<O extends NonNullable<unknown>>(store: (Keyv<O> | LRUCache<O> | LRUArgs<O>) & {
|
|
84
|
+
dataFunction: WithCacheDataFn<string, O>;
|
|
85
|
+
}): AsyncCacheManager<O>;
|
|
86
|
+
get(key: string): Promise<T | null | undefined>;
|
|
87
|
+
getWithDetails(key: string): Promise<WithCacheReturnType<T>>;
|
|
88
|
+
private readonly dataWrapper;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export { AsyncCacheManager };
|
|
92
|
+
export type { AsyncCacheManagerCallbacks, AsyncCacheManagerMetadata, AsyncCreateCacheOptions };
|