@vizamodo/edge-cache-core 0.3.28 → 0.3.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,31 +1,10 @@
1
- export type CacheOptions = {
2
- ttlSec: number;
3
- forceRefresh?: boolean;
4
- };
5
- /**
6
- * Wrap a fetcher result to override the TTL via an absolute expiry timestamp.
7
- * Use this when the upstream API tells you exactly when the data expires
8
- * (e.g. OAuth token expiresAt, CDN Cache-Control, etc.).
9
- *
10
- * @example
11
- * fetcher: async () => {
12
- * const token = await getToken();
13
- * return wrapResult(token.accessToken, token.expiresAt); // ISO string
14
- * }
15
- */
16
1
  export type WrappedResult<T> = {
17
2
  readonly __wrapped: true;
18
3
  readonly value: T;
19
- readonly expiresAt?: string;
4
+ readonly expiresAt: string | undefined;
20
5
  };
21
6
  export declare function wrapResult<T>(value: T, expiresAt?: string): WrappedResult<T>;
22
- /**
23
- * Two-level cache: L1 in-memory → L2 edge → L3 fetcher.
24
- *
25
- * - ttlSec: edge cache TTL in seconds (REQUIRED). Pass 0 to skip edge write.
26
- * - forceRefresh: skips L1 + L2, always calls the fetcher and writes back.
27
- * - WrappedResult: return wrapResult(value, expiresAt) from fetcher to override TTL
28
- * with an absolute expiry from the upstream source.
29
- * - Edge and memory write failures are non-fatal and will not throw.
30
- */
31
- export declare function getCachedOrFetch<T>(key: string, fetcher: () => Promise<T | WrappedResult<T>>, options: CacheOptions): Promise<T>;
7
+ export declare function getCachedOrFetch<T>(key: string, fetcher: () => Promise<T | WrappedResult<T>>, options: {
8
+ ttlSec: number;
9
+ forceRefresh?: boolean;
10
+ }): Promise<T>;
@@ -1,61 +1,69 @@
1
1
  import { getEdgeCache, setEdgeCache } from "./edge-cache";
2
- const memory = new Map();
2
+ import { DedupePromiseCache } from "./dedupe-promise-cache";
3
3
  export function wrapResult(value, expiresAt) {
4
4
  return { __wrapped: true, value, expiresAt };
5
5
  }
6
- // ─── Internals ─────────────────────────────────────────────────────────────
7
- const TTL_BUFFER_SEC = 300; // 5-minute safety buffer
8
- const TTL_MIN_SEC = 60; // never cache for less than 1 minute
9
- function isWrapped(result) {
10
- return (typeof result === "object" &&
11
- result !== null &&
12
- result.__wrapped === true);
6
+ function isWrapped(v) {
7
+ return (typeof v === "object" &&
8
+ v !== null &&
9
+ v["__wrapped"] === true);
13
10
  }
14
- function ttlFromExpiresAt(expiresAt) {
15
- const ms = new Date(expiresAt).getTime();
16
- if (Number.isNaN(ms)) {
17
- return TTL_MIN_SEC; // fallback safe
18
- }
19
- const ttl = Math.floor((ms - Date.now()) / 1000) - TTL_BUFFER_SEC;
20
- return Math.max(ttl, TTL_MIN_SEC);
21
- }
22
- // ─── Main ──────────────────────────────────────────────────────────────────
23
- /**
24
- * Two-level cache: L1 in-memory → L2 edge → L3 fetcher.
25
- *
26
- * - ttlSec: edge cache TTL in seconds (REQUIRED). Pass 0 to skip edge write.
27
- * - forceRefresh: skips L1 + L2, always calls the fetcher and writes back.
28
- * - WrappedResult: return wrapResult(value, expiresAt) from fetcher to override TTL
29
- * with an absolute expiry from the upstream source.
30
- * - Edge and memory write failures are non-fatal and will not throw.
31
- */
11
+ // ── Constants ──────────────────────────────────────────────────────────────
12
+ const EDGE_BUFFER_SEC = 300; // 5-min safety buffer before expiry
13
+ const MIN_TTL_SEC = 60;
14
+ // ── Module-level L1 cache ──────────────────────────────────────────────────
15
+ // One shared instance per isolate; generic over `unknown` because different
16
+ // callers store different value types under different keys.
17
+ const l1 = new DedupePromiseCache();
18
+ // ── Public API ─────────────────────────────────────────────────────────────
32
19
  export async function getCachedOrFetch(key, fetcher, options) {
33
- const { forceRefresh = false, ttlSec } = options;
20
+ const { ttlSec, forceRefresh = false } = options;
21
+ // ── L1: memory (skip on forceRefresh) ───────────────────────────────────
34
22
  if (!forceRefresh) {
35
- // L1: memory
36
- if (memory.has(key)) {
37
- return memory.get(key);
23
+ // DedupePromiseCache handles: in-flight dedupe + TTL expiry + sentinel cleanup.
24
+ // We wrap the inner fetch so L1 only misses once even under concurrency.
25
+ // The factory is only called when L1 truly misses.
26
+ }
27
+ // Use DedupePromiseCache as L1. On forceRefresh we bypass by deleting first.
28
+ if (forceRefresh)
29
+ l1.delete(key);
30
+ return l1.getOrCreate(key, async () => {
31
+ // ── L2: edge cache (skip on forceRefresh) ─────────────────────────────
32
+ if (!forceRefresh) {
33
+ const edgeValue = await getEdgeCache(key).catch(() => null);
34
+ if (edgeValue !== null && edgeValue !== undefined) {
35
+ // Populate L1 for next request; edge TTL is not observable,
36
+ // so use caller-supplied ttlSec as an upper bound (may slightly over-cache).
37
+ return { value: edgeValue, ttlMs: ttlSec * 1000 };
38
+ }
38
39
  }
39
- // L2: edge cache
40
- const edge = await getEdgeCache(key);
41
- if (edge !== null) {
42
- memory.set(key, edge);
43
- return edge;
40
+ // ── L3: fetcher ───────────────────────────────────────────────────────
41
+ const raw = await fetcher();
42
+ let unwrapped;
43
+ let edgeTtlSec = ttlSec;
44
+ if (isWrapped(raw)) {
45
+ unwrapped = raw.value;
46
+ if (raw.expiresAt) {
47
+ const expiresMs = Date.parse(raw.expiresAt);
48
+ if (Number.isFinite(expiresMs) && expiresMs > Date.now()) {
49
+ const remainingSec = Math.floor((expiresMs - Date.now()) / 1000);
50
+ edgeTtlSec = Math.max(remainingSec - EDGE_BUFFER_SEC, MIN_TTL_SEC);
51
+ }
52
+ else {
53
+ // expired or invalid → do not cache at edge
54
+ edgeTtlSec = 0;
55
+ }
56
+ }
44
57
  }
45
- }
46
- // L3: fetch from source
47
- const result = await fetcher();
48
- const value = isWrapped(result) ? result.value : result;
49
- const finalTtl = isWrapped(result) && result.expiresAt
50
- ? ttlFromExpiresAt(result.expiresAt)
51
- : ttlSec;
52
- // Write back to both layers edge write is best-effort
53
- memory.set(key, value);
54
- try {
55
- await setEdgeCache(key, value, finalTtl);
56
- }
57
- catch {
58
- // non-fatal
59
- }
60
- return value;
58
+ else {
59
+ unwrapped = raw;
60
+ }
61
+ // Write to edge cache (fire-and-forget failure must not affect return value).
62
+ if (edgeTtlSec > 0) {
63
+ setEdgeCache(key, unwrapped, edgeTtlSec).catch(() => { });
64
+ }
65
+ // ttlMs for L1 = same window as edge TTL so both layers expire together.
66
+ const l1TtlMs = edgeTtlSec * 1000;
67
+ return { value: unwrapped, ttlMs: l1TtlMs };
68
+ });
61
69
  }
@@ -0,0 +1,9 @@
1
+ export declare class DedupePromiseCache<T> {
2
+ private readonly map;
3
+ getOrCreate(key: string, factory: () => Promise<{
4
+ value: T;
5
+ ttlMs?: number;
6
+ }>): Promise<T>;
7
+ delete(key: string): void;
8
+ clear(): void;
9
+ }
@@ -0,0 +1,53 @@
1
+ // src/runtime/dedupe-promise-cache.ts
2
+ export class DedupePromiseCache {
3
+ map = new Map();
4
+ getOrCreate(key, factory) {
5
+ const existing = this.map.get(key);
6
+ // 1. In-flight → dedupe.
7
+ if (existing?.expiresAt === 0)
8
+ return existing.promise;
9
+ // 2. Valid cache entry → reuse.
10
+ if (existing && existing.expiresAt > Date.now())
11
+ return existing.promise;
12
+ // 3. Miss / expired → evict and run factory.
13
+ if (existing)
14
+ this.map.delete(key);
15
+ let resolveFn;
16
+ let rejectFn;
17
+ const promise = new Promise((resolve, reject) => {
18
+ resolveFn = resolve;
19
+ rejectFn = reject;
20
+ });
21
+ // Suppress unhandled rejection on the sentinel promise itself.
22
+ // Callers that attach after a failure will still receive the rejection
23
+ // via their own .catch / try-await — this only prevents the global warning
24
+ // for the case where no consumer is attached at the moment of rejection.
25
+ promise.catch(() => { });
26
+ // 🔥 Set sentinel BEFORE factory() so any concurrent getOrCreate()
27
+ // arriving during the async gap will dedupe on this promise.
28
+ this.map.set(key, { promise, expiresAt: 0 });
29
+ Promise.resolve()
30
+ .then(factory)
31
+ .then(({ value, ttlMs }) => {
32
+ resolveFn(value);
33
+ if (typeof ttlMs === "number" && ttlMs > 0) {
34
+ const resolved = Promise.resolve(value);
35
+ this.map.set(key, {
36
+ promise: resolved,
37
+ expiresAt: Date.now() + ttlMs,
38
+ });
39
+ }
40
+ else {
41
+ this.map.delete(key);
42
+ }
43
+ })
44
+ .catch((err) => {
45
+ rejectFn(err);
46
+ // Remove poisoned sentinel so the next caller retries.
47
+ this.map.delete(key);
48
+ });
49
+ return promise;
50
+ }
51
+ delete(key) { this.map.delete(key); }
52
+ clear() { this.map.clear(); }
53
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vizamodo/edge-cache-core",
3
- "version": "0.3.28",
3
+ "version": "0.3.30",
4
4
  "description": "Edge cache primitives for Cloudflare Workers (L1 memory + L2 edge cache)",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",