@vizamodo/edge-cache-core 0.3.30 → 0.3.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/runtime/cache.js +11 -6
- package/package.json +1 -1
package/dist/runtime/cache.js
CHANGED
|
@@ -18,27 +18,30 @@ const l1 = new DedupePromiseCache();
|
|
|
18
18
|
// ── Public API ─────────────────────────────────────────────────────────────
|
|
19
19
|
export async function getCachedOrFetch(key, fetcher, options) {
|
|
20
20
|
const { ttlSec, forceRefresh = false } = options;
|
|
21
|
-
// ── L1: memory (skip on forceRefresh) ───────────────────────────────────
|
|
22
|
-
if (!forceRefresh) {
|
|
23
|
-
// DedupePromiseCache handles: in-flight dedupe + TTL expiry + sentinel cleanup.
|
|
24
|
-
// We wrap the inner fetch so L1 only misses once even under concurrency.
|
|
25
|
-
// The factory is only called when L1 truly misses.
|
|
26
|
-
}
|
|
27
21
|
// Use DedupePromiseCache as L1. On forceRefresh we bypass by deleting first.
|
|
28
22
|
if (forceRefresh)
|
|
29
23
|
l1.delete(key);
|
|
24
|
+
if (forceRefresh) {
|
|
25
|
+
console.debug("[cache] forceRefresh → bypass L1");
|
|
26
|
+
}
|
|
30
27
|
return l1.getOrCreate(key, async () => {
|
|
28
|
+
console.debug("[cache] L1 MISS → entering factory", { key });
|
|
31
29
|
// ── L2: edge cache (skip on forceRefresh) ─────────────────────────────
|
|
32
30
|
if (!forceRefresh) {
|
|
31
|
+
console.debug("[cache] checking L2 (edge cache)", { key });
|
|
33
32
|
const edgeValue = await getEdgeCache(key).catch(() => null);
|
|
34
33
|
if (edgeValue !== null && edgeValue !== undefined) {
|
|
34
|
+
console.debug("[cache] L2 HIT", { key });
|
|
35
35
|
// Populate L1 for next request; edge TTL is not observable,
|
|
36
36
|
// so use caller-supplied ttlSec as an upper bound (may slightly over-cache).
|
|
37
37
|
return { value: edgeValue, ttlMs: ttlSec * 1000 };
|
|
38
38
|
}
|
|
39
39
|
}
|
|
40
|
+
console.debug("[cache] L2 MISS → calling fetcher", { key });
|
|
40
41
|
// ── L3: fetcher ───────────────────────────────────────────────────────
|
|
42
|
+
console.debug("[cache] L3 FETCH start", { key });
|
|
41
43
|
const raw = await fetcher();
|
|
44
|
+
console.debug("[cache] L3 FETCH done", { key });
|
|
42
45
|
let unwrapped;
|
|
43
46
|
let edgeTtlSec = ttlSec;
|
|
44
47
|
if (isWrapped(raw)) {
|
|
@@ -60,10 +63,12 @@ export async function getCachedOrFetch(key, fetcher, options) {
|
|
|
60
63
|
}
|
|
61
64
|
// Write to edge cache (fire-and-forget — failure must not affect return value).
|
|
62
65
|
if (edgeTtlSec > 0) {
|
|
66
|
+
console.debug("[cache] writing to L2", { key, ttl: edgeTtlSec });
|
|
63
67
|
setEdgeCache(key, unwrapped, edgeTtlSec).catch(() => { });
|
|
64
68
|
}
|
|
65
69
|
// ttlMs for L1 = same window as edge TTL so both layers expire together.
|
|
66
70
|
const l1TtlMs = edgeTtlSec * 1000;
|
|
71
|
+
console.debug("[cache] writing to L1", { key, ttlMs: l1TtlMs });
|
|
67
72
|
return { value: unwrapped, ttlMs: l1TtlMs };
|
|
68
73
|
});
|
|
69
74
|
}
|