@vizamodo/edge-cache-core 0.3.36 → 0.3.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,36 @@
1
+ /**
2
+ * Fetcher can return a plain value T, or a WrappedResult<T> to communicate
3
+ * a server-side expiry time back to the cache layer.
4
+ */
1
5
  export type WrappedResult<T> = {
2
6
  readonly __wrapped: true;
3
7
  readonly value: T;
8
+ /**
9
+ * ISO-8601 timestamp. When present, the cache derives its TTL from this
10
+ * value (minus EDGE_BUFFER_SEC) instead of using the caller-supplied ttlSec.
11
+ */
4
12
  readonly expiresAt: string | undefined;
5
13
  };
6
14
  export declare function wrapResult<T>(value: T, expiresAt?: string): WrappedResult<T>;
7
- export declare function getCachedOrFetch<T>(key: string, fetcher: () => Promise<T | WrappedResult<T>>, options: {
15
+ export type GetCachedOrFetchOptions = {
16
+ /**
17
+ * Default TTL (seconds) used when the fetcher does not supply an expiresAt.
18
+ */
8
19
  ttlSec: number;
20
+ /**
21
+ * When true, bypass L1 and L2 and re-populate them with a fresh value.
22
+ *
23
+ * Two concurrent force-refresh calls for the same key will both hit the
24
+ * fetcher independently — this is intentional. If you need exactly-once
25
+ * semantics on force-refresh, that belongs inside DedupePromiseCache.
26
+ */
9
27
  forceRefresh?: boolean;
10
- }): Promise<T>;
28
+ };
29
+ /**
30
+ * Three-layer cache:
31
+ *
32
+ * L1 — in-process DedupePromiseCache (per-isolate, concurrent-safe)
33
+ * L2 — edge / distributed cache (getEdgeCache / setEdgeCache)
34
+ * L3 — fetcher callback
35
+ */
36
+ export declare function getCachedOrFetch<T>(key: string, fetcher: () => Promise<T | WrappedResult<T>>, options: GetCachedOrFetchOptions): Promise<T>;
@@ -9,60 +9,89 @@ function isWrapped(v) {
9
9
  v["__wrapped"] === true);
10
10
  }
11
11
  // ── Constants ──────────────────────────────────────────────────────────────
12
- const EDGE_BUFFER_SEC = 300; // 5-min safety buffer before expiry
12
+ /** Subtracted from the server-reported expiry as a safety buffer. */
13
+ const EDGE_BUFFER_SEC = 300; // 5 min
14
+ /** Floor for the computed edge TTL to avoid near-zero useless writes. */
13
15
  const MIN_TTL_SEC = 60;
14
16
  // ── Module-level L1 cache ──────────────────────────────────────────────────
15
- // One shared instance per isolate; generic over `unknown` because different
16
- // callers store different value types under different keys.
17
+ // One shared instance per isolate. The map is keyed by string and stores
18
+ // heterogeneous value types, so the outer type parameter is `unknown`.
19
+ // Type-safety is restored at the call-site via the generic on getCachedOrFetch.
17
20
  const l1 = new DedupePromiseCache();
21
+ // ── Helpers ────────────────────────────────────────────────────────────────
22
+ /**
23
+ * Derive the edge TTL (seconds) from an optional ISO-8601 expiry string.
24
+ *
25
+ * Returns:
26
+ * - A positive number (>= MIN_TTL_SEC) when the expiry is usefully in the future.
27
+ * - 0 when the expiry is absent, invalid, already past, or within the buffer.
28
+ */
29
+ function deriveEdgeTtlSec(expiresAt, fallbackSec) {
30
+ if (!expiresAt)
31
+ return fallbackSec;
32
+ const expiresMs = Date.parse(expiresAt);
33
+ if (!Number.isFinite(expiresMs)) {
34
+ console.warn("[cache] expiresAt is not a valid ISO-8601 date", { expiresAt });
35
+ return 0;
36
+ }
37
+ const remainingSec = Math.floor((expiresMs - Date.now()) / 1000);
38
+ if (remainingSec <= 0)
39
+ return 0; // already expired
40
+ return Math.max(remainingSec - EDGE_BUFFER_SEC, MIN_TTL_SEC);
41
+ }
42
+ /**
43
+ * Unwrap the fetcher result and compute the effective edge TTL in one pass.
44
+ */
45
+ function resolveResult(raw, fallbackTtlSec) {
46
+ if (!isWrapped(raw)) {
47
+ return { value: raw, edgeTtlSec: fallbackTtlSec };
48
+ }
49
+ return {
50
+ value: raw.value,
51
+ edgeTtlSec: deriveEdgeTtlSec(raw.expiresAt, fallbackTtlSec),
52
+ };
53
+ }
18
54
  // ── Public API ─────────────────────────────────────────────────────────────
55
+ /**
56
+ * Three-layer cache:
57
+ *
58
+ * L1 — in-process DedupePromiseCache (per-isolate, concurrent-safe)
59
+ * L2 — edge / distributed cache (getEdgeCache / setEdgeCache)
60
+ * L3 — fetcher callback
61
+ */
19
62
  export async function getCachedOrFetch(key, fetcher, options) {
20
63
  const { ttlSec, forceRefresh = false } = options;
21
- // Use DedupePromiseCache as L1. On forceRefresh we bypass by deleting first.
22
64
  if (forceRefresh)
23
65
  l1.delete(key);
24
- return l1.getOrCreate(key, async () => {
25
- // ── L2: edge cache (skip on forceRefresh) ─────────────────────────────
66
+ // DedupePromiseCache is typed as `unknown`; the single `as T` below is the
67
+ // only unsafe boundary in this module. It is safe because every write path
68
+ // inside the factory produces a value of type T.
69
+ const value = await l1.getOrCreate(key, async () => {
70
+ // ── L2: edge cache ──────────────────────────────────────────────────
26
71
  if (!forceRefresh) {
27
- const edgeValue = await getEdgeCache(key).catch(() => null);
28
- if (edgeValue !== null && edgeValue !== undefined) {
29
- // Populate L1 for next request; edge TTL is not observable,
30
- // so use caller-supplied ttlSec as an upper bound (may slightly over-cache).
72
+ const edgeValue = await getEdgeCache(key).catch((err) => {
73
+ console.error("[cache][edge-get-failed]", { key, err });
74
+ return null;
75
+ });
76
+ if (edgeValue != null) {
77
+ // Edge TTL is not externally observable. We use the caller-supplied
78
+ // ttlSec as an upper bound for L1. In the worst case L1 serves data
79
+ // slightly past the edge expiry — an accepted trade-off for skipping
80
+ // a second edge round-trip. To close this gap, store the expiry
81
+ // timestamp alongside the value in the edge cache and thread it back.
31
82
  return { value: edgeValue, ttlMs: ttlSec * 1000 };
32
83
  }
33
84
  }
34
- // ── L3: fetcher ───────────────────────────────────────────────────────
85
+ // ── L3: fetcher ─────────────────────────────────────────────────────
35
86
  const raw = await fetcher();
36
- let unwrapped;
37
- let edgeTtlSec = ttlSec;
38
- if (isWrapped(raw)) {
39
- unwrapped = raw.value;
40
- if (raw.expiresAt) {
41
- const expiresMs = Date.parse(raw.expiresAt);
42
- if (Number.isFinite(expiresMs) && expiresMs > Date.now()) {
43
- const remainingSec = Math.floor((expiresMs - Date.now()) / 1000);
44
- edgeTtlSec = Math.max(remainingSec - EDGE_BUFFER_SEC, MIN_TTL_SEC);
45
- }
46
- else {
47
- // expired or invalid → do not cache at edge
48
- edgeTtlSec = 0;
49
- }
50
- }
51
- }
52
- else {
53
- unwrapped = raw;
54
- }
55
- // Write to edge cache (must await to ensure commit before worker exits).
87
+ const { value: unwrapped, edgeTtlSec } = resolveResult(raw, ttlSec);
56
88
  if (edgeTtlSec > 0) {
57
- try {
58
- await setEdgeCache(key, unwrapped, edgeTtlSec);
59
- }
60
- catch {
61
- // best-effort: do not break response on cache failure
62
- }
89
+ await setEdgeCache(key, unwrapped, edgeTtlSec).catch((err) => {
90
+ console.error("[cache][edge-set-failed]", { key, err });
91
+ });
63
92
  }
64
- // ttlMs for L1 = same window as edge TTL so both layers expire together.
65
- const l1TtlMs = edgeTtlSec * 1000;
66
- return { value: unwrapped, ttlMs: l1TtlMs };
93
+ // Mirror L1 TTL to edge TTL so both layers expire around the same time.
94
+ return { value: unwrapped, ttlMs: edgeTtlSec * 1000 };
67
95
  });
96
+ return value;
68
97
  }
@@ -6,4 +6,10 @@ export declare class DedupePromiseCache<T> {
6
6
  }>): Promise<T>;
7
7
  delete(key: string): void;
8
8
  clear(): void;
9
+ /**
10
+ * Remove all expired entries (in-flight sentinels are kept).
11
+ * Falls back to a full clear when the map is still over the limit after
12
+ * eviction — this is a last resort to avoid unbounded memory growth.
13
+ */
14
+ private evict;
9
15
  }
@@ -1,53 +1,84 @@
1
1
  // src/runtime/dedupe-promise-cache.ts
2
+ const MAX_ENTRIES = 1_000;
2
3
  export class DedupePromiseCache {
3
4
  map = new Map();
4
5
  getOrCreate(key, factory) {
6
+ const now = Date.now();
5
7
  const existing = this.map.get(key);
6
- // 1. In-flight → dedupe.
8
+ // 1. In-flight → dedupe on the sentinel promise.
7
9
  if (existing?.expiresAt === 0)
8
10
  return existing.promise;
9
- // 2. Valid cache entry → reuse.
10
- if (existing && existing.expiresAt > Date.now())
11
+ // 2. Valid (non-expired) entry → reuse.
12
+ if (existing && existing.expiresAt > now)
11
13
  return existing.promise;
12
- // 3. Miss / expired → evict and run factory.
14
+ // 3. Stale entry → evict before running factory.
13
15
  if (existing)
14
16
  this.map.delete(key);
17
+ // Evict expired entries when the map is getting large.
18
+ // Runs before the new key is inserted so the size check is accurate.
19
+ if (this.map.size >= MAX_ENTRIES)
20
+ this.evict();
21
+ // Build the public promise separately from the resolution handles so we
22
+ // can suppress the unhandled-rejection warning on the sentinel itself
23
+ // while still propagating errors to callers who await the returned promise.
15
24
  let resolveFn;
16
25
  let rejectFn;
17
26
  const promise = new Promise((resolve, reject) => {
18
27
  resolveFn = resolve;
19
28
  rejectFn = reject;
20
29
  });
21
- // Suppress unhandled rejection on the sentinel promise itself.
22
- // Callers that attach after a failure will still receive the rejection
23
- // via their own .catch / try-await — this only prevents the global warning
24
- // for the case where no consumer is attached at the moment of rejection.
30
+ // Prevent "unhandled rejection" warnings for the case where rejection
31
+ // occurs before any external .catch / await is attached.
25
32
  promise.catch(() => { });
26
33
  // 🔥 Set sentinel BEFORE factory() so any concurrent getOrCreate()
27
- // arriving during the async gap will dedupe on this promise.
34
+ // call during the async gap dedupes on this promise.
28
35
  this.map.set(key, { promise, expiresAt: 0 });
36
+ // Run factory asynchronously so the sentinel is visible to all callers
37
+ // synchronously queued after this point.
29
38
  Promise.resolve()
30
39
  .then(factory)
31
40
  .then(({ value, ttlMs }) => {
32
41
  resolveFn(value);
33
- if (typeof ttlMs === "number" && ttlMs > 0) {
34
- const resolved = Promise.resolve(value);
42
+ if (ttlMs != null && Number.isFinite(ttlMs) && ttlMs > 0) {
43
+ // Capture Date.now() AFTER factory resolves for an accurate expiry.
35
44
  this.map.set(key, {
36
- promise: resolved,
45
+ promise: Promise.resolve(value),
37
46
  expiresAt: Date.now() + ttlMs,
38
47
  });
39
48
  }
40
49
  else {
50
+ // ttlMs = 0 or undefined → do not cache the resolved value.
41
51
  this.map.delete(key);
42
52
  }
43
53
  })
44
54
  .catch((err) => {
45
55
  rejectFn(err);
46
- // Remove poisoned sentinel so the next caller retries.
56
+ // Remove the poisoned sentinel so the next caller retries the factory.
47
57
  this.map.delete(key);
48
58
  });
49
59
  return promise;
50
60
  }
51
- delete(key) { this.map.delete(key); }
52
- clear() { this.map.clear(); }
61
+ delete(key) {
62
+ this.map.delete(key);
63
+ }
64
+ clear() {
65
+ this.map.clear();
66
+ }
67
+ // ── Private helpers ────────────────────────────────────────────────────
68
+ /**
69
+ * Remove all expired entries (in-flight sentinels are kept).
70
+ * Falls back to a full clear when the map is still over the limit after
71
+ * eviction — this is a last resort to avoid unbounded memory growth.
72
+ */
73
+ evict() {
74
+ const now = Date.now();
75
+ for (const [k, v] of this.map) {
76
+ if (v.expiresAt !== 0 && v.expiresAt <= now) {
77
+ this.map.delete(k);
78
+ }
79
+ }
80
+ if (this.map.size >= MAX_ENTRIES) {
81
+ this.map.clear();
82
+ }
83
+ }
53
84
  }
@@ -1,8 +1,14 @@
1
1
  /**
2
- * Generic edge cache GET helper
2
+ * Reads a value from the edge cache.
3
+ * Returns `null` on cache miss, parse failure, or any runtime error.
3
4
  */
4
5
  export declare function getEdgeCache<T>(key: string): Promise<T | null>;
5
6
  /**
6
- * Generic edge cache SET helper
7
+ * Writes a value to the edge cache with the given TTL.
8
+ * Platform-agnostic: no ExecutionContext required.
9
+ *
10
+ * @param key - Cache key (arbitrary string).
11
+ * @param value - Any JSON-serialisable value.
12
+ * @param ttlSec - Time-to-live in seconds. Values <= 0 are ignored.
7
13
  */
8
14
  export declare function setEdgeCache(key: string, value: unknown, ttlSec: number): Promise<void>;
@@ -1,101 +1,88 @@
1
1
  const CACHE_KEY_PREFIX = "https://edge-cache.internal/";
2
+ /**
3
+ * Encodes a cache key to a safe URL-compatible base64 string.
4
+ * Uses a loop instead of spread to avoid stack overflow on long keys.
5
+ */
2
6
  function normalizeKey(key) {
3
- return btoa(unescape(encodeURIComponent(key)));
7
+ const bytes = new TextEncoder().encode(key);
8
+ let binary = "";
9
+ for (const b of bytes)
10
+ binary += String.fromCharCode(b);
11
+ return btoa(binary);
12
+ }
13
+ /**
14
+ * Wraps primitive values so the stored JSON always has a stable object shape.
15
+ * Objects are stored as-is; primitives get wrapped with a sentinel flag.
16
+ */
17
+ function wrap(value) {
18
+ return value !== null && typeof value === "object"
19
+ ? value
20
+ : { __primitive: true, value };
21
+ }
22
+ function unwrap(parsed) {
23
+ if (parsed !== null &&
24
+ typeof parsed === "object" &&
25
+ "__primitive" in parsed &&
26
+ parsed.__primitive === true) {
27
+ return parsed.value;
28
+ }
29
+ return parsed;
4
30
  }
5
31
  /**
6
- * Generic edge cache GET helper
32
+ * Reads a value from the edge cache.
33
+ * Returns `null` on cache miss, parse failure, or any runtime error.
7
34
  */
8
35
  export async function getEdgeCache(key) {
36
+ const cacheKey = CACHE_KEY_PREFIX + normalizeKey(key);
37
+ let res;
9
38
  try {
10
- const cache = caches.default;
11
- const req = new Request(CACHE_KEY_PREFIX + normalizeKey(key));
12
- console.debug("[edge-cache] GET", { key, url: req.url });
13
- const res = await cache.match(req);
14
- console.debug("[edge-cache] MATCH RESULT", {
15
- key,
16
- hasResponse: !!res,
17
- status: res?.status,
18
- headers: res ? Object.fromEntries(res.headers.entries()) : null,
19
- });
20
- if (!res) {
21
- console.debug("[edge-cache] MISS", { key });
22
- return null;
23
- }
24
- console.debug("[edge-cache] HIT", {
25
- key,
26
- status: res.status,
27
- headers: Object.fromEntries(res.headers.entries()),
28
- });
29
- try {
30
- const data = await res.clone().text();
31
- console.debug("[edge-cache] HIT RAW", {
32
- key,
33
- size: data.length,
34
- preview: data.slice(0, 200), // avoid huge logs
35
- });
36
- let parsed;
37
- try {
38
- parsed = JSON.parse(data);
39
- // Normalize back: unwrap primitive values
40
- if (parsed &&
41
- typeof parsed === "object" &&
42
- parsed.__primitive === true) {
43
- parsed = parsed.value;
44
- }
45
- }
46
- catch (err) {
47
- console.error("[edge-cache] JSON PARSE FAILED", { key, err, dataPreview: data.slice(0, 200) });
48
- return null;
49
- }
50
- console.debug("[edge-cache] PARSED OK", {
51
- key,
52
- type: typeof parsed,
53
- isPrimitiveWrapped: typeof parsed !== "object"
54
- ? true
55
- : parsed?.__primitive === true,
56
- });
57
- return parsed;
58
- }
59
- catch {
60
- // corrupted cache entry → ignore
61
- return null;
62
- }
39
+ res = await caches.default.match(new Request(cacheKey));
40
+ }
41
+ catch (err) {
42
+ console.error("[edge-cache] GET match failed", { key, err });
43
+ return null;
44
+ }
45
+ if (!res)
46
+ return null;
47
+ let raw;
48
+ try {
49
+ raw = await res.text();
50
+ }
51
+ catch (err) {
52
+ console.error("[edge-cache] GET read body failed", { key, err });
53
+ return null;
54
+ }
55
+ try {
56
+ const parsed = JSON.parse(raw);
57
+ return unwrap(parsed);
63
58
  }
64
59
  catch (err) {
65
- console.error("[edge-cache] GET FAILED", { key, err });
60
+ console.error("[edge-cache] GET JSON parse failed", {
61
+ key,
62
+ err,
63
+ preview: raw.slice(0, 200),
64
+ });
66
65
  return null;
67
66
  }
68
67
  }
69
68
  /**
70
- * Generic edge cache SET helper
69
+ * Writes a value to the edge cache with the given TTL.
70
+ * Platform-agnostic: no ExecutionContext required.
71
+ *
72
+ * @param key - Cache key (arbitrary string).
73
+ * @param value - Any JSON-serialisable value.
74
+ * @param ttlSec - Time-to-live in seconds. Values <= 0 are ignored.
71
75
  */
72
76
  export async function setEdgeCache(key, value, ttlSec) {
73
77
  if (ttlSec <= 0)
74
78
  return;
75
- const cache = caches.default;
76
- const req = new Request(CACHE_KEY_PREFIX + normalizeKey(key));
77
- console.debug("[edge-cache] SET", { key, url: req.url, ttlSec });
78
- // Normalize value: always store as object to ensure stable JSON shape
79
- const normalizedValue = value !== null && typeof value === "object"
80
- ? value
81
- : { __primitive: true, value };
82
- const body = JSON.stringify(normalizedValue);
79
+ const cacheKey = CACHE_KEY_PREFIX + normalizeKey(key);
80
+ const body = JSON.stringify(wrap(value));
83
81
  const res = new Response(body, {
84
82
  headers: {
85
83
  "Content-Type": "application/json",
86
84
  "Cache-Control": `max-age=${ttlSec}`,
87
85
  },
88
86
  });
89
- try {
90
- console.debug("[edge-cache] SET BODY SIZE", { key, size: body.length });
91
- const t0 = Date.now();
92
- await cache.put(req, res);
93
- // Force event loop flush to ensure write completes before worker exits
94
- await new Promise((r) => setTimeout(r, 0));
95
- const t1 = Date.now();
96
- console.debug("[edge-cache] SET OK", { key, durationMs: t1 - t0 });
97
- }
98
- catch (err) {
99
- console.error("[edge-cache] SET FAILED", { key, err });
100
- }
87
+ await caches.default.put(new Request(cacheKey), res);
101
88
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vizamodo/edge-cache-core",
3
- "version": "0.3.36",
3
+ "version": "0.3.38",
4
4
  "description": "Edge cache primitives for Cloudflare Workers (L1 memory + L2 edge cache)",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",