@donkeylabs/server 2.0.18 → 2.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/docs/caching-strategies.md +677 -0
- package/docs/dev-experience.md +656 -0
- package/docs/hot-reload-limitations.md +166 -0
- package/docs/load-testing.md +974 -0
- package/docs/plugin-registry-design.md +1064 -0
- package/docs/production.md +1229 -0
- package/docs/workflows.md +90 -3
- package/package.json +18 -2
- package/src/admin/routes.ts +153 -0
- package/src/core/cron.ts +184 -15
- package/src/core/index.ts +25 -0
- package/src/core/job-adapter-kysely.ts +176 -73
- package/src/core/job-adapter-sqlite.ts +10 -0
- package/src/core/jobs.ts +112 -17
- package/src/core/migrations/workflows/002_add_metadata_column.ts +28 -0
- package/src/core/process-adapter-kysely.ts +62 -21
- package/src/core/storage-adapter-local.test.ts +199 -0
- package/src/core/storage.test.ts +197 -0
- package/src/core/workflow-adapter-kysely.ts +66 -19
- package/src/core/workflow-executor.ts +469 -0
- package/src/core/workflow-proxy.ts +238 -0
- package/src/core/workflow-socket.ts +447 -0
- package/src/core/workflows.test.ts +415 -0
- package/src/core/workflows.ts +782 -9
- package/src/core.ts +17 -6
- package/src/index.ts +14 -0
- package/src/server.ts +40 -26
- package/src/testing/database.test.ts +263 -0
- package/src/testing/database.ts +173 -0
- package/src/testing/e2e.test.ts +189 -0
- package/src/testing/e2e.ts +272 -0
- package/src/testing/index.ts +18 -0
|
@@ -0,0 +1,677 @@
|
|
|
1
|
+
# Advanced Caching Strategies
|
|
2
|
+
|
|
3
|
+
Advanced patterns for distributed systems, high-load scenarios, and cache consistency.
|
|
4
|
+
|
|
5
|
+
## Table of Contents
|
|
6
|
+
|
|
7
|
+
- [Caching Patterns](#caching-patterns)
|
|
8
|
+
- [Cache Stampede Prevention](#cache-stampede-prevention)
|
|
9
|
+
- [Distributed Cache Coordination](#distributed-cache-coordination)
|
|
10
|
+
- [Cache Warming](#cache-warming)
|
|
11
|
+
- [Cache Versioning](#cache-versioning)
|
|
12
|
+
- [Monitoring & Metrics](#monitoring--metrics)
|
|
13
|
+
- [Multi-Layer Caching](#multi-layer-caching)
|
|
14
|
+
|
|
15
|
+
---
|
|
16
|
+
|
|
17
|
+
## Caching Patterns
|
|
18
|
+
|
|
19
|
+
### 1. Cache-Aside (Lazy Loading)
|
|
20
|
+
|
|
21
|
+
Most common pattern - application manages cache.
|
|
22
|
+
|
|
23
|
+
```ts
|
|
24
|
+
// Application checks cache first
|
|
25
|
+
async function getUser(id: string) {
|
|
26
|
+
const cacheKey = `user:${id}`;
|
|
27
|
+
|
|
28
|
+
// 1. Check cache
|
|
29
|
+
let user = await cache.get(cacheKey);
|
|
30
|
+
if (user) return user;
|
|
31
|
+
|
|
32
|
+
// 2. Cache miss - load from DB
|
|
33
|
+
user = await db.selectFrom("users")
|
|
34
|
+
.where("id", "=", id)
|
|
35
|
+
.executeTakeFirst();
|
|
36
|
+
|
|
37
|
+
// 3. Store in cache
|
|
38
|
+
if (user) {
|
|
39
|
+
await cache.set(cacheKey, user, 60000);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return user;
|
|
43
|
+
}
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
**Pros:** Simple, flexible
|
|
47
|
+
**Cons:** Cache misses are expensive
|
|
48
|
+
|
|
49
|
+
### 2. Read-Through
|
|
50
|
+
|
|
51
|
+
Cache automatically loads from source on miss.
|
|
52
|
+
|
|
53
|
+
```ts
|
|
54
|
+
class ReadThroughCache {
|
|
55
|
+
constructor(
|
|
56
|
+
private cache: Cache,
|
|
57
|
+
private loader: (key: string) => Promise<any>
|
|
58
|
+
) {}
|
|
59
|
+
|
|
60
|
+
async get(key: string) {
|
|
61
|
+
let value = await this.cache.get(key);
|
|
62
|
+
|
|
63
|
+
if (!value) {
|
|
64
|
+
value = await this.loader(key);
|
|
65
|
+
await this.cache.set(key, value);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return value;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// Usage
|
|
73
|
+
const userCache = new ReadThroughCache(
|
|
74
|
+
cache,
|
|
75
|
+
async (key) => {
|
|
76
|
+
const id = key.replace("user:", "");
|
|
77
|
+
return db.selectFrom("users").where("id", "=", id).executeTakeFirst();
|
|
78
|
+
}
|
|
79
|
+
);
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
**Pros:** Consistent loading logic
|
|
83
|
+
**Cons:** Less control over loading
|
|
84
|
+
|
|
85
|
+
### 3. Write-Through
|
|
86
|
+
|
|
87
|
+
Data written to cache and DB simultaneously.
|
|
88
|
+
|
|
89
|
+
```ts
|
|
90
|
+
class WriteThroughCache {
|
|
91
|
+
async set(key: string, value: any, ttl?: number) {
|
|
92
|
+
// Write to both
|
|
93
|
+
await Promise.all([
|
|
94
|
+
this.db.set(key, value),
|
|
95
|
+
this.cache.set(key, value, ttl),
|
|
96
|
+
]);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async delete(key: string) {
|
|
100
|
+
await Promise.all([
|
|
101
|
+
this.db.delete(key),
|
|
102
|
+
this.cache.delete(key),
|
|
103
|
+
]);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
**Pros:** Strong consistency, no stale data
|
|
109
|
+
**Cons:** Slower writes
|
|
110
|
+
|
|
111
|
+
### 4. Write-Behind (Write-Back)
|
|
112
|
+
|
|
113
|
+
Write to cache immediately, async write to DB.
|
|
114
|
+
|
|
115
|
+
```ts
|
|
116
|
+
class WriteBehindCache {
|
|
117
|
+
private pendingWrites = new Map<string, any>();
|
|
118
|
+
|
|
119
|
+
async set(key: string, value: any) {
|
|
120
|
+
// 1. Write to cache immediately
|
|
121
|
+
await this.cache.set(key, value);
|
|
122
|
+
|
|
123
|
+
// 2. Queue for async DB write
|
|
124
|
+
this.pendingWrites.set(key, value);
|
|
125
|
+
this.scheduleFlush();
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
private flushTimer: Timer | null = null;
|
|
129
|
+
|
|
130
|
+
private scheduleFlush() {
|
|
131
|
+
if (this.flushTimer) return;
|
|
132
|
+
|
|
133
|
+
this.flushTimer = setTimeout(async () => {
|
|
134
|
+
const batch = new Map(this.pendingWrites);
|
|
135
|
+
this.pendingWrites.clear();
|
|
136
|
+
|
|
137
|
+
// Batch write to DB
|
|
138
|
+
await this.db.batchSet(batch);
|
|
139
|
+
|
|
140
|
+
this.flushTimer = null;
|
|
141
|
+
}, 1000); // Flush every second
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
**Pros:** Fast writes, batch DB operations
|
|
147
|
+
**Cons:** Risk of data loss, eventual consistency
|
|
148
|
+
|
|
149
|
+
### 5. Refresh-Ahead
|
|
150
|
+
|
|
151
|
+
Proactively refresh cache before expiration.
|
|
152
|
+
|
|
153
|
+
```ts
|
|
154
|
+
class RefreshAheadCache {
|
|
155
|
+
async get(key: string, loader: () => Promise<any>, ttl: number) {
|
|
156
|
+
const entry = await this.cache.getWithMetadata(key);
|
|
157
|
+
|
|
158
|
+
if (!entry) {
|
|
159
|
+
// Cache miss - load and cache
|
|
160
|
+
const value = await loader();
|
|
161
|
+
await this.cache.set(key, value, ttl);
|
|
162
|
+
return value;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// Check if nearing expiration (e.g., < 20% of TTL remaining)
|
|
166
|
+
const remainingRatio = entry.ttlRemaining / ttl;
|
|
167
|
+
if (remainingRatio < 0.2) {
|
|
168
|
+
// Refresh in background
|
|
169
|
+
this.refreshAsync(key, loader, ttl);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
return entry.value;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
private async refreshAsync(key: string, loader: () => Promise<any>, ttl: number) {
|
|
176
|
+
try {
|
|
177
|
+
const value = await loader();
|
|
178
|
+
await this.cache.set(key, value, ttl);
|
|
179
|
+
} catch (err) {
|
|
180
|
+
console.error("Refresh failed:", err);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
**Pros:** No stale cache hits
|
|
187
|
+
**Cons:** Extra load on refresh
|
|
188
|
+
|
|
189
|
+
---
|
|
190
|
+
|
|
191
|
+
## Cache Stampede Prevention
|
|
192
|
+
|
|
193
|
+
When cache expires, multiple requests hit the DB simultaneously.
|
|
194
|
+
|
|
195
|
+
### Problem
|
|
196
|
+
|
|
197
|
+
```
|
|
198
|
+
T0: Cache expires
|
|
199
|
+
T1: Request A - cache miss, queries DB (5s)
|
|
200
|
+
T2: Request B - cache miss, queries DB (5s)
|
|
201
|
+
T3: Request C - cache miss, queries DB (5s)
|
|
202
|
+
...
|
|
203
|
+
Result: 100s of DB queries for same data
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
### Solutions
|
|
207
|
+
|
|
208
|
+
#### 1. Lock/Lease Pattern
|
|
209
|
+
|
|
210
|
+
Only one request regenerates cache.
|
|
211
|
+
|
|
212
|
+
```ts
|
|
213
|
+
class StampedeProtectedCache {
|
|
214
|
+
private locks = new Map<string, Promise<any>>();
|
|
215
|
+
|
|
216
|
+
async getOrSet(key: string, factory: () => Promise<any>, ttl: number) {
|
|
217
|
+
// Check cache first
|
|
218
|
+
const cached = await this.cache.get(key);
|
|
219
|
+
if (cached) return cached;
|
|
220
|
+
|
|
221
|
+
// Check if another request is already generating
|
|
222
|
+
let lock = this.locks.get(key);
|
|
223
|
+
|
|
224
|
+
if (!lock) {
|
|
225
|
+
// We're the first - create lock
|
|
226
|
+
lock = this.generateAndCache(key, factory, ttl);
|
|
227
|
+
this.locks.set(key, lock);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
try {
|
|
231
|
+
return await lock;
|
|
232
|
+
} finally {
|
|
233
|
+
// Clean up lock
|
|
234
|
+
this.locks.delete(key);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
private async generateAndCache(key: string, factory: () => Promise<any>, ttl: number) {
|
|
239
|
+
const value = await factory();
|
|
240
|
+
await this.cache.set(key, value, ttl);
|
|
241
|
+
return value;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
#### 2. Early Expiration (Probabilistic)
|
|
247
|
+
|
|
248
|
+
Expire cache early for some requests to spread load.
|
|
249
|
+
|
|
250
|
+
```ts
|
|
251
|
+
class ProbabilisticEarlyExpiration {
|
|
252
|
+
async get(key: string, loader: () => Promise<any>, ttl: number) {
|
|
253
|
+
const entry = await this.cache.getWithMetadata(key);
|
|
254
|
+
|
|
255
|
+
if (!entry) {
|
|
256
|
+
return this.loadAndCache(key, loader, ttl);
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
// Calculate probability of early expiration
|
|
260
|
+
const age = Date.now() - entry.createdAt;
|
|
261
|
+
const ttlRemaining = ttl - age;
|
|
262
|
+
|
|
263
|
+
// Higher probability as we near expiration
|
|
264
|
+
// At 80% of TTL, 50% chance to refresh
|
|
265
|
+
const refreshProbability = Math.max(0, (age / ttl - 0.5) * 2);
|
|
266
|
+
|
|
267
|
+
if (Math.random() < refreshProbability) {
|
|
268
|
+
// Refresh in background
|
|
269
|
+
this.refreshAsync(key, loader, ttl);
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
return entry.value;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
```
|
|
276
|
+
|
|
277
|
+
#### 3. Circuit Breaker + Fallback
|
|
278
|
+
|
|
279
|
+
Use stale cache while refreshing.
|
|
280
|
+
|
|
281
|
+
```ts
|
|
282
|
+
class CircuitBreakerCache {
|
|
283
|
+
async getWithStaleFallback(key: string, loader: () => Promise<any>, ttl: number) {
|
|
284
|
+
const entry = await this.cache.getWithMetadata(key);
|
|
285
|
+
|
|
286
|
+
if (!entry) {
|
|
287
|
+
// Complete miss
|
|
288
|
+
return this.loadAndCache(key, loader, ttl);
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
const age = Date.now() - entry.createdAt;
|
|
292
|
+
const isExpired = age > ttl;
|
|
293
|
+
|
|
294
|
+
if (!isExpired) {
|
|
295
|
+
return entry.value; // Fresh cache
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
// Stale - return immediately but refresh
|
|
299
|
+
this.refreshAsync(key, loader, ttl).catch(console.error);
|
|
300
|
+
return entry.value;
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
```
|
|
304
|
+
|
|
305
|
+
---
|
|
306
|
+
|
|
307
|
+
## Distributed Cache Coordination
|
|
308
|
+
|
|
309
|
+
For multi-instance deployments.
|
|
310
|
+
|
|
311
|
+
### 1. Cache Invalidation Broadcast
|
|
312
|
+
|
|
313
|
+
When one instance updates, notify others.
|
|
314
|
+
|
|
315
|
+
```ts
|
|
316
|
+
class DistributedCache {
|
|
317
|
+
constructor(
|
|
318
|
+
private localCache: Cache,
|
|
319
|
+
private eventBus: EventBus
|
|
320
|
+
) {
|
|
321
|
+
// Listen for invalidation events
|
|
322
|
+
this.eventBus.on("cache:invalidate", ({ keys }) => {
|
|
323
|
+
for (const key of keys) {
|
|
324
|
+
this.localCache.delete(key);
|
|
325
|
+
}
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
async set(key: string, value: any, ttl?: number) {
|
|
330
|
+
await this.localCache.set(key, value, ttl);
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
async delete(key: string) {
|
|
334
|
+
await this.localCache.delete(key);
|
|
335
|
+
// Broadcast to other instances
|
|
336
|
+
this.eventBus.emit("cache:invalidate", { keys: [key] });
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
async deletePattern(pattern: string) {
|
|
340
|
+
const keys = await this.localCache.keys(pattern);
|
|
341
|
+
for (const key of keys) {
|
|
342
|
+
await this.localCache.delete(key);
|
|
343
|
+
}
|
|
344
|
+
// Broadcast
|
|
345
|
+
this.eventBus.emit("cache:invalidate", { keys });
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
```
|
|
349
|
+
|
|
350
|
+
### 2. Hash-Based Sharding
|
|
351
|
+
|
|
352
|
+
Distribute keys across cache instances.
|
|
353
|
+
|
|
354
|
+
```ts
|
|
355
|
+
class ShardedCache {
|
|
356
|
+
private shards: Cache[];
|
|
357
|
+
|
|
358
|
+
constructor(shards: Cache[]) {
|
|
359
|
+
this.shards = shards;
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
private getShard(key: string): Cache {
|
|
363
|
+
const hash = this.hashKey(key);
|
|
364
|
+
const index = hash % this.shards.length;
|
|
365
|
+
return this.shards[index];
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
private hashKey(key: string): number {
|
|
369
|
+
let hash = 0;
|
|
370
|
+
for (let i = 0; i < key.length; i++) {
|
|
371
|
+
const char = key.charCodeAt(i);
|
|
372
|
+
hash = ((hash << 5) - hash) + char;
|
|
373
|
+
hash = hash & hash; // Convert to 32bit integer
|
|
374
|
+
}
|
|
375
|
+
return Math.abs(hash);
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
async get(key: string) {
|
|
379
|
+
return this.getShard(key).get(key);
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
async set(key: string, value: any, ttl?: number) {
|
|
383
|
+
return this.getShard(key).set(key, value, ttl);
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
```
|
|
387
|
+
|
|
388
|
+
---
|
|
389
|
+
|
|
390
|
+
## Cache Warming
|
|
391
|
+
|
|
392
|
+
Pre-populate cache before high-traffic events.
|
|
393
|
+
|
|
394
|
+
### 1. Scheduled Warming
|
|
395
|
+
|
|
396
|
+
```ts
|
|
397
|
+
class CacheWarmer {
|
|
398
|
+
constructor(private cache: Cache, private ctx: PluginContext) {
|
|
399
|
+
// Warm cache every hour
|
|
400
|
+
ctx.core.cron.schedule("0 * * * *", () => this.warmCache());
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
async warmCache() {
|
|
404
|
+
console.log("Warming cache...");
|
|
405
|
+
|
|
406
|
+
// Warm popular users
|
|
407
|
+
const popularUsers = await this.ctx.db
|
|
408
|
+
.selectFrom("users")
|
|
409
|
+
.orderBy("lastLogin", "desc")
|
|
410
|
+
.limit(100)
|
|
411
|
+
.selectAll()
|
|
412
|
+
.execute();
|
|
413
|
+
|
|
414
|
+
for (const user of popularUsers) {
|
|
415
|
+
await this.cache.set(`user:${user.id}`, user, 3600000);
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
// Warm reference data
|
|
419
|
+
const countries = await this.ctx.db
|
|
420
|
+
.selectFrom("countries")
|
|
421
|
+
.selectAll()
|
|
422
|
+
.execute();
|
|
423
|
+
|
|
424
|
+
await this.cache.set("countries:list", countries, 86400000);
|
|
425
|
+
|
|
426
|
+
console.log(`Warmed ${popularUsers.length} users, ${countries.length} countries`);
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
```
|
|
430
|
+
|
|
431
|
+
### 2. Event-Driven Warming
|
|
432
|
+
|
|
433
|
+
```ts
|
|
434
|
+
// Warm cache when new data is likely to be accessed
|
|
435
|
+
ctx.core.events.on("user.registered", async ({ userId }) => {
|
|
436
|
+
// Pre-welcome email content
|
|
437
|
+
const welcomeContent = await generateWelcomeEmail();
|
|
438
|
+
await cache.set(`email:welcome:${userId}`, welcomeContent, 300000);
|
|
439
|
+
});
|
|
440
|
+
|
|
441
|
+
ctx.core.events.on("order.created", async ({ orderId, userId }) => {
|
|
442
|
+
// Pre-load order for confirmation page
|
|
443
|
+
const order = await ctx.plugins.orders.getById(orderId);
|
|
444
|
+
await cache.set(`order:${orderId}`, order, 300000);
|
|
445
|
+
await cache.set(`user:${userId}:latestOrder`, order, 300000);
|
|
446
|
+
});
|
|
447
|
+
```
|
|
448
|
+
|
|
449
|
+
### 3. Predictive Warming
|
|
450
|
+
|
|
451
|
+
```ts
|
|
452
|
+
class PredictiveCacheWarmer {
|
|
453
|
+
async warmBasedOnTrafficPatterns() {
|
|
454
|
+
const hour = new Date().getHours();
|
|
455
|
+
|
|
456
|
+
// Morning - warm dashboard data
|
|
457
|
+
if (hour === 8) {
|
|
458
|
+
await this.warmDashboardData();
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
// Lunch - warm social feeds
|
|
462
|
+
if (hour === 12) {
|
|
463
|
+
await this.warmSocialFeeds();
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
// Evening - warm entertainment content
|
|
467
|
+
if (hour === 19) {
|
|
468
|
+
await this.warmEntertainmentContent();
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
```
|
|
473
|
+
|
|
474
|
+
---
|
|
475
|
+
|
|
476
|
+
## Cache Versioning
|
|
477
|
+
|
|
478
|
+
Handle schema changes gracefully.
|
|
479
|
+
|
|
480
|
+
### Versioned Keys
|
|
481
|
+
|
|
482
|
+
```ts
|
|
483
|
+
const CACHE_VERSION = "v2"; // Bump when schema changes
|
|
484
|
+
|
|
485
|
+
function getCacheKey(baseKey: string): string {
|
|
486
|
+
return `${CACHE_VERSION}:${baseKey}`;
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
// Usage
|
|
490
|
+
await cache.set(getCacheKey("user:123"), userData);
|
|
491
|
+
const user = await cache.get(getCacheKey("user:123"));
|
|
492
|
+
```
|
|
493
|
+
|
|
494
|
+
### Schema Migration
|
|
495
|
+
|
|
496
|
+
```ts
|
|
497
|
+
class CacheVersionManager {
|
|
498
|
+
private currentVersion = 2;
|
|
499
|
+
|
|
500
|
+
async migrateIfNeeded() {
|
|
501
|
+
const cachedVersion = await cache.get("cache:version");
|
|
502
|
+
|
|
503
|
+
if (cachedVersion !== this.currentVersion) {
|
|
504
|
+
console.log(`Migrating cache from ${cachedVersion} to ${currentVersion}`);
|
|
505
|
+
|
|
506
|
+
// Clear old versioned keys
|
|
507
|
+
await this.clearOldVersions();
|
|
508
|
+
|
|
509
|
+
// Set new version
|
|
510
|
+
await cache.set("cache:version", this.currentVersion);
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
private async clearOldVersions() {
|
|
515
|
+
// Clear all v1: keys when upgrading to v2
|
|
516
|
+
const oldKeys = await cache.keys("v1:*");
|
|
517
|
+
for (const key of oldKeys) {
|
|
518
|
+
await cache.delete(key);
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
```
|
|
523
|
+
|
|
524
|
+
---
|
|
525
|
+
|
|
526
|
+
## Monitoring & Metrics
|
|
527
|
+
|
|
528
|
+
Track cache effectiveness.
|
|
529
|
+
|
|
530
|
+
### Cache Stats
|
|
531
|
+
|
|
532
|
+
```ts
|
|
533
|
+
class MonitoredCache {
|
|
534
|
+
private hits = 0;
|
|
535
|
+
private misses = 0;
|
|
536
|
+
private evictions = 0;
|
|
537
|
+
|
|
538
|
+
async get(key: string) {
|
|
539
|
+
const value = await this.cache.get(key);
|
|
540
|
+
|
|
541
|
+
if (value) {
|
|
542
|
+
this.hits++;
|
|
543
|
+
this.recordMetric("cache.hit", key);
|
|
544
|
+
} else {
|
|
545
|
+
this.misses++;
|
|
546
|
+
this.recordMetric("cache.miss", key);
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
return value;
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
getStats() {
|
|
553
|
+
const total = this.hits + this.misses;
|
|
554
|
+
return {
|
|
555
|
+
hits: this.hits,
|
|
556
|
+
misses: this.misses,
|
|
557
|
+
hitRate: total > 0 ? this.hits / total : 0,
|
|
558
|
+
evictions: this.evictions,
|
|
559
|
+
size: this.cache.size(),
|
|
560
|
+
};
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
private recordMetric(type: string, key: string) {
|
|
564
|
+
// Send to monitoring (e.g., DataDog, Prometheus)
|
|
565
|
+
ctx.core.events.emit("metric", {
|
|
566
|
+
name: `cache.${type}`,
|
|
567
|
+
tags: { key: this.sanitizeKey(key) },
|
|
568
|
+
value: 1,
|
|
569
|
+
});
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
```
|
|
573
|
+
|
|
574
|
+
### Health Checks
|
|
575
|
+
|
|
576
|
+
```ts
|
|
577
|
+
// In your health check endpoint
|
|
578
|
+
router.route("health").typed({
|
|
579
|
+
handle: async (_, ctx) => {
|
|
580
|
+
const cacheStats = ctx.core.cache.getStats?.() || {};
|
|
581
|
+
|
|
582
|
+
return {
|
|
583
|
+
status: "healthy",
|
|
584
|
+
cache: {
|
|
585
|
+
hitRate: cacheStats.hitRate,
|
|
586
|
+
size: cacheStats.size,
|
|
587
|
+
evictions: cacheStats.evictions,
|
|
588
|
+
},
|
|
589
|
+
};
|
|
590
|
+
},
|
|
591
|
+
});
|
|
592
|
+
```
|
|
593
|
+
|
|
594
|
+
---
|
|
595
|
+
|
|
596
|
+
## Multi-Layer Caching
|
|
597
|
+
|
|
598
|
+
Combine multiple cache layers.
|
|
599
|
+
|
|
600
|
+
### L1 (In-Memory) → L2 (Redis) → L3 (DB)
|
|
601
|
+
|
|
602
|
+
```ts
|
|
603
|
+
class MultiLayerCache {
|
|
604
|
+
constructor(
|
|
605
|
+
private l1Cache: Cache, // In-process, ultra-fast
|
|
606
|
+
private l2Cache: Cache, // Redis, shared
|
|
607
|
+
private loader: (key: string) => Promise<any>
|
|
608
|
+
) {}
|
|
609
|
+
|
|
610
|
+
async get(key: string) {
|
|
611
|
+
// 1. Try L1 (local)
|
|
612
|
+
const l1Value = await this.l1Cache.get(key);
|
|
613
|
+
if (l1Value) return l1Value;
|
|
614
|
+
|
|
615
|
+
// 2. Try L2 (distributed)
|
|
616
|
+
const l2Value = await this.l2Cache.get(key);
|
|
617
|
+
if (l2Value) {
|
|
618
|
+
// Populate L1 for next time
|
|
619
|
+
await this.l1Cache.set(key, l2Value);
|
|
620
|
+
return l2Value;
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
// 3. Load from source
|
|
624
|
+
const value = await this.loader(key);
|
|
625
|
+
|
|
626
|
+
// 4. Populate both caches
|
|
627
|
+
await Promise.all([
|
|
628
|
+
this.l1Cache.set(key, value, 60000), // 1 min in L1
|
|
629
|
+
this.l2Cache.set(key, value, 300000), // 5 min in L2
|
|
630
|
+
]);
|
|
631
|
+
|
|
632
|
+
return value;
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
async delete(key: string) {
|
|
636
|
+
// Invalidate all layers
|
|
637
|
+
await Promise.all([
|
|
638
|
+
this.l1Cache.delete(key),
|
|
639
|
+
this.l2Cache.delete(key),
|
|
640
|
+
]);
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
```
|
|
644
|
+
|
|
645
|
+
### Layer Characteristics
|
|
646
|
+
|
|
647
|
+
| Layer | Speed | Scope | TTL | Use Case |
|
|
648
|
+
|-------|-------|-------|-----|----------|
|
|
649
|
+
| L1 (In-Memory) | ~1μs | Instance | Short (1-5 min) | Hot data, request-scoped |
|
|
650
|
+
| L2 (Redis) | ~1ms | Cluster | Medium (5-60 min) | Shared data, sessions |
|
|
651
|
+
| L3 (DB) | ~10ms | Persistent | Permanent | Source of truth |
|
|
652
|
+
|
|
653
|
+
---
|
|
654
|
+
|
|
655
|
+
## Best Practices Summary
|
|
656
|
+
|
|
657
|
+
1. **Choose the right pattern** for your consistency needs
|
|
658
|
+
2. **Prevent stampedes** with locking or early expiration
|
|
659
|
+
3. **Warm caches** proactively for predictable traffic
|
|
660
|
+
4. **Version your keys** to handle schema migrations
|
|
661
|
+
5. **Monitor hit rates** and adjust TTLs
|
|
662
|
+
6. **Use multi-layer** for high-scale applications
|
|
663
|
+
7. **Invalidate proactively** on data changes
|
|
664
|
+
8. **Use meaningful key prefixes** for organization
|
|
665
|
+
|
|
666
|
+
---
|
|
667
|
+
|
|
668
|
+
## Implementation Checklist
|
|
669
|
+
|
|
670
|
+
- [ ] Identify hot data and appropriate TTLs
|
|
671
|
+
- [ ] Implement stampede protection for high-traffic keys
|
|
672
|
+
- [ ] Set up cache warming for predictable patterns
|
|
673
|
+
- [ ] Add cache versioning strategy
|
|
674
|
+
- [ ] Monitor hit/miss rates
|
|
675
|
+
- [ ] Document cache key patterns for your domain
|
|
676
|
+
- [ ] Plan cache invalidation strategy
|
|
677
|
+
- [ ] Consider multi-layer for scale
|