@b9g/cache 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,13 +1,13 @@
1
1
  # @b9g/cache
2
2
 
3
- Universal Cache API implementation providing CacheStorage and Cache interfaces across all JavaScript runtimes.
3
+ **Universal Cache API for ServiceWorker applications. Provides standard CacheStorage and Cache interfaces across all JavaScript runtimes.**
4
4
 
5
5
  ## Features
6
6
 
7
- - **Standard APIs**: Implements the Cache and CacheStorage APIs from service workers
8
- - **Multiple Backends**: Memory, filesystem, Redis, KV store implementations
7
+ - **ServiceWorker Cache API**: Standard `caches` global and Cache interface from ServiceWorker spec
8
+ - **Multiple Backends**: Memory, filesystem, Redis, KV store implementations
9
9
  - **Universal**: Same API works in browsers, Node.js, Bun, and edge platforms
10
- - **Request/Response**: HTTP-semantic caching with full Request/Response support
10
+ - **Request/Response Caching**: Full HTTP semantics with Request/Response objects
11
11
  - **Registry Pattern**: Named cache management with factory registration
12
12
 
13
13
  ## Installation
@@ -96,7 +96,23 @@ import { Router } from '@b9g/router';
96
96
  const router = new Router({ caches });
97
97
  ```
98
98
 
99
- ## Cache API Reference
99
+ ## Exports
100
+
101
+ ### Classes
102
+
103
+ - `Cache` - Abstract base class for cache implementations
104
+ - `CustomCacheStorage` - CacheStorage implementation with factory registration
105
+
106
+ ### Functions
107
+
108
+ - `generateCacheKey(request, options?)` - Generate a cache key from a Request
109
+
110
+ ### Types
111
+
112
+ - `CacheQueryOptions` - Options for cache query operations (ignoreSearch, ignoreMethod, ignoreVary)
113
+ - `CacheFactory` - Factory function type `(name: string) => Cache | Promise<Cache>`
114
+
115
+ ## API Reference
100
116
 
101
117
  ### Standard Cache Methods
102
118
 
@@ -186,13 +202,13 @@ caches.register('api', () => new MemoryCache('api'));
186
202
  const router = new Router({ caches });
187
203
 
188
204
  // Cache-aware middleware
189
- router.use(async (request, context, next) => {
205
+ router.use(async function* (request, context) {
190
206
  if (request.method === 'GET' && context.cache) {
191
207
  const cached = await context.cache.match(request);
192
208
  if (cached) return cached;
193
209
  }
194
210
 
195
- const response = await next();
211
+ const response = yield request;
196
212
 
197
213
  if (request.method === 'GET' && context.cache && response.ok) {
198
214
  await context.cache.put(request, response.clone());
package/package.json CHANGED
@@ -1,16 +1,21 @@
1
1
  {
2
2
  "name": "@b9g/cache",
3
- "version": "0.1.2",
4
- "description": "Universal Cache API implementation",
3
+ "version": "0.1.4",
4
+ "description": "Universal Cache API for ServiceWorker applications. Provides standard CacheStorage and Cache interfaces across all JavaScript runtimes.",
5
5
  "keywords": [
6
6
  "cache",
7
7
  "storage",
8
+ "serviceworker",
9
+ "cachestorage",
8
10
  "web-standards",
9
- "universal"
11
+ "universal",
12
+ "memory",
13
+ "filesystem",
14
+ "shovel"
10
15
  ],
11
16
  "dependencies": {},
12
17
  "devDependencies": {
13
- "@b9g/libuild": "^0.1.10",
18
+ "@b9g/libuild": "^0.1.11",
14
19
  "bun-types": "latest"
15
20
  },
16
21
  "type": "module",
@@ -21,10 +26,6 @@
21
26
  "types": "./src/index.d.ts",
22
27
  "import": "./src/index.js"
23
28
  },
24
- "./cache": {
25
- "types": "./src/cache.d.ts",
26
- "import": "./src/cache.js"
27
- },
28
29
  "./memory": {
29
30
  "types": "./src/memory.d.ts",
30
31
  "import": "./src/memory.js"
@@ -34,18 +35,6 @@
34
35
  "import": "./src/postmessage.js"
35
36
  },
36
37
  "./package.json": "./package.json",
37
- "./cache.js": {
38
- "types": "./src/cache.d.ts",
39
- "import": "./src/cache.js"
40
- },
41
- "./cache-storage": {
42
- "types": "./src/cache-storage.d.ts",
43
- "import": "./src/cache-storage.js"
44
- },
45
- "./cache-storage.js": {
46
- "types": "./src/cache-storage.d.ts",
47
- "import": "./src/cache-storage.js"
48
- },
49
38
  "./index": {
50
39
  "types": "./src/index.d.ts",
51
40
  "import": "./src/index.js"
package/src/index.d.ts CHANGED
@@ -3,15 +3,109 @@
3
3
  *
4
4
  * Provides HTTP-aware caching with PostMessage coordination for worker environments
5
5
  */
6
- export { Cache, type CacheQueryOptions, generateCacheKey, cloneResponse } from "./cache.js";
7
- export { CustomCacheStorage, type CacheFactory } from "./cache-storage.js";
8
- export { MemoryCache, MemoryCacheManager, type MemoryCacheOptions } from "./memory.js";
9
- export { PostMessageCache, type PostMessageCacheOptions } from "./postmessage.js";
10
- import { MemoryCache, type MemoryCacheOptions } from "./memory.js";
11
6
  /**
12
- * Platform adapter factory function
13
- * Creates a MemoryCache instance with the given configuration
7
+ * Cache query options for matching requests
8
+ * Based on the Cache API specification
14
9
  */
15
- export declare function createCache(config?: MemoryCacheOptions & {
16
- name?: string;
17
- }): MemoryCache;
10
+ export interface CacheQueryOptions {
11
+ /** Ignore the search portion of the request URL */
12
+ ignoreSearch?: boolean;
13
+ /** Ignore the request method */
14
+ ignoreMethod?: boolean;
15
+ /** Ignore the Vary header */
16
+ ignoreVary?: boolean;
17
+ /** Custom cache name for scoped operations */
18
+ cacheName?: string;
19
+ }
20
+ /**
21
+ * Abstract Cache class implementing the Cache API interface
22
+ * Provides shared implementations for add() and addAll() while requiring
23
+ * concrete implementations to handle the core storage operations
24
+ */
25
+ export declare abstract class Cache {
26
+ /**
27
+ * Returns a Promise that resolves to the response associated with the first matching request
28
+ */
29
+ abstract match(request: Request, options?: CacheQueryOptions): Promise<Response | undefined>;
30
+ /**
31
+ * Puts a request/response pair into the cache
32
+ */
33
+ abstract put(request: Request, response: Response): Promise<void>;
34
+ /**
35
+ * Finds the cache entry whose key is the request, and if found, deletes it and returns true
36
+ */
37
+ abstract delete(request: Request, options?: CacheQueryOptions): Promise<boolean>;
38
+ /**
39
+ * Returns a Promise that resolves to an array of cache keys (Request objects)
40
+ */
41
+ abstract keys(request?: Request, options?: CacheQueryOptions): Promise<readonly Request[]>;
42
+ /**
43
+ * Takes a URL, retrieves it and adds the resulting response object to the cache
44
+ * Shared implementation using fetch() and put()
45
+ */
46
+ add(request: Request): Promise<void>;
47
+ /**
48
+ * Takes an array of URLs, retrieves them, and adds the resulting response objects to the cache
49
+ * Shared implementation using add()
50
+ */
51
+ addAll(requests: Request[]): Promise<void>;
52
+ /**
53
+ * Returns a Promise that resolves to an array of all matching responses
54
+ * Default implementation using keys() and match() - can be overridden for efficiency
55
+ */
56
+ matchAll(request?: Request, options?: CacheQueryOptions): Promise<readonly Response[]>;
57
+ }
58
+ /**
59
+ * Generate a cache key from a Request object
60
+ * Normalizes the request for consistent cache key generation
61
+ */
62
+ export declare function generateCacheKey(request: Request, options?: CacheQueryOptions): string;
63
+ /**
64
+ * Factory function for creating Cache instances based on cache name
65
+ */
66
+ export type CacheFactory = (name: string) => Cache | Promise<Cache>;
67
+ /**
68
+ * CustomCacheStorage implements CacheStorage interface with a configurable factory
69
+ * The factory function receives the cache name and can return different cache types
70
+ */
71
+ export declare class CustomCacheStorage {
72
+ #private;
73
+ constructor(factory: CacheFactory);
74
+ /**
75
+ * Matches a request across all caches
76
+ */
77
+ match(request: Request, options?: CacheQueryOptions): Promise<Response | undefined>;
78
+ /**
79
+ * Opens a cache with the given name
80
+ * Returns existing instance if already opened, otherwise creates a new one
81
+ */
82
+ open(name: string): Promise<Cache>;
83
+ /**
84
+ * Returns true if a cache with the given name exists (has been opened)
85
+ */
86
+ has(name: string): Promise<boolean>;
87
+ /**
88
+ * Deletes a cache with the given name
89
+ */
90
+ delete(name: string): Promise<boolean>;
91
+ /**
92
+ * Returns a list of all opened cache names
93
+ */
94
+ keys(): Promise<string[]>;
95
+ /**
96
+ * Get statistics about the cache storage
97
+ */
98
+ getStats(): {
99
+ openInstances: number;
100
+ cacheNames: string[];
101
+ };
102
+ /**
103
+ * Dispose of all cache instances
104
+ * Calls dispose() on each cache if it exists (e.g., RedisCache needs to close connections)
105
+ */
106
+ dispose(): Promise<void>;
107
+ /**
108
+ * Handle cache messages from worker threads (PostMessageCache coordination)
109
+ */
110
+ handleMessage(worker: any, message: any): Promise<void>;
111
+ }
package/src/index.js CHANGED
@@ -1,21 +1,187 @@
1
1
  /// <reference types="./index.d.ts" />
2
2
  // src/index.ts
3
- import { Cache, generateCacheKey, cloneResponse } from "./cache.js";
4
- import { CustomCacheStorage } from "./cache-storage.js";
5
- import { MemoryCache, MemoryCacheManager } from "./memory.js";
6
- import { PostMessageCache } from "./postmessage.js";
7
- import { MemoryCache as MemoryCache2 } from "./memory.js";
8
- function createCache(config = {}) {
9
- const name = config.name || "default";
10
- return new MemoryCache2(name, config);
3
+ var Cache = class {
4
+ /**
5
+ * Takes a URL, retrieves it and adds the resulting response object to the cache
6
+ * Shared implementation using fetch() and put()
7
+ */
8
+ async add(request) {
9
+ const response = await fetch(request);
10
+ if (!response.ok) {
11
+ throw new TypeError(
12
+ `Failed to fetch ${request.url}: ${response.status} ${response.statusText}`
13
+ );
14
+ }
15
+ await this.put(request, response);
16
+ }
17
+ /**
18
+ * Takes an array of URLs, retrieves them, and adds the resulting response objects to the cache
19
+ * Shared implementation using add()
20
+ */
21
+ async addAll(requests) {
22
+ await Promise.all(requests.map((request) => this.add(request)));
23
+ }
24
+ /**
25
+ * Returns a Promise that resolves to an array of all matching responses
26
+ * Default implementation using keys() and match() - can be overridden for efficiency
27
+ */
28
+ async matchAll(request, options) {
29
+ const keys = await this.keys(request, options);
30
+ const responses = [];
31
+ for (const key of keys) {
32
+ const response = await this.match(key, options);
33
+ if (response) {
34
+ responses.push(response);
35
+ }
36
+ }
37
+ return responses;
38
+ }
39
+ };
40
+ function generateCacheKey(request, options) {
41
+ const url = new URL(request.url);
42
+ if (options?.ignoreSearch) {
43
+ url.search = "";
44
+ }
45
+ const method = options?.ignoreMethod ? "GET" : request.method;
46
+ return `${method}:${url.href}`;
11
47
  }
48
+ var CustomCacheStorage = class {
49
+ #instances;
50
+ #factory;
51
+ constructor(factory) {
52
+ this.#instances = /* @__PURE__ */ new Map();
53
+ this.#factory = factory;
54
+ }
55
+ /**
56
+ * Matches a request across all caches
57
+ */
58
+ async match(request, options) {
59
+ for (const cache of this.#instances.values()) {
60
+ const response = await cache.match(request, options);
61
+ if (response) {
62
+ return response;
63
+ }
64
+ }
65
+ return void 0;
66
+ }
67
+ /**
68
+ * Opens a cache with the given name
69
+ * Returns existing instance if already opened, otherwise creates a new one
70
+ */
71
+ async open(name) {
72
+ const existingInstance = this.#instances.get(name);
73
+ if (existingInstance) {
74
+ return existingInstance;
75
+ }
76
+ const cache = await this.#factory(name);
77
+ this.#instances.set(name, cache);
78
+ return cache;
79
+ }
80
+ /**
81
+ * Returns true if a cache with the given name exists (has been opened)
82
+ */
83
+ async has(name) {
84
+ return this.#instances.has(name);
85
+ }
86
+ /**
87
+ * Deletes a cache with the given name
88
+ */
89
+ async delete(name) {
90
+ const instance = this.#instances.get(name);
91
+ if (instance) {
92
+ this.#instances.delete(name);
93
+ return true;
94
+ }
95
+ return false;
96
+ }
97
+ /**
98
+ * Returns a list of all opened cache names
99
+ */
100
+ async keys() {
101
+ return Array.from(this.#instances.keys());
102
+ }
103
+ /**
104
+ * Get statistics about the cache storage
105
+ */
106
+ getStats() {
107
+ return {
108
+ openInstances: this.#instances.size,
109
+ cacheNames: Array.from(this.#instances.keys())
110
+ };
111
+ }
112
+ /**
113
+ * Dispose of all cache instances
114
+ * Calls dispose() on each cache if it exists (e.g., RedisCache needs to close connections)
115
+ */
116
+ async dispose() {
117
+ const disposePromises = [];
118
+ for (const cache of this.#instances.values()) {
119
+ if (typeof cache.dispose === "function") {
120
+ disposePromises.push(cache.dispose());
121
+ }
122
+ }
123
+ await Promise.allSettled(disposePromises);
124
+ this.#instances.clear();
125
+ }
126
+ /**
127
+ * Handle cache messages from worker threads (PostMessageCache coordination)
128
+ */
129
+ async handleMessage(worker, message) {
130
+ const { type, requestID, cacheName } = message;
131
+ try {
132
+ const cache = await this.open(cacheName);
133
+ let result;
134
+ switch (type) {
135
+ case "cache:match": {
136
+ const req = new Request(message.request.url, message.request);
137
+ const response = await cache.match(req, message.options);
138
+ result = response ? {
139
+ status: response.status,
140
+ statusText: response.statusText,
141
+ headers: Object.fromEntries(response.headers),
142
+ body: await response.text()
143
+ } : void 0;
144
+ break;
145
+ }
146
+ case "cache:put": {
147
+ const req = new Request(message.request.url, message.request);
148
+ const res = new Response(message.response.body, message.response);
149
+ await cache.put(req, res);
150
+ result = true;
151
+ break;
152
+ }
153
+ case "cache:delete": {
154
+ const req = new Request(message.request.url, message.request);
155
+ result = await cache.delete(req, message.options);
156
+ break;
157
+ }
158
+ case "cache:keys": {
159
+ const req = message.request ? new Request(message.request.url, message.request) : void 0;
160
+ const keys = await cache.keys(req, message.options);
161
+ result = keys.map((r) => ({
162
+ url: r.url,
163
+ method: r.method,
164
+ headers: Object.fromEntries(r.headers.entries())
165
+ }));
166
+ break;
167
+ }
168
+ case "cache:clear":
169
+ await cache.clear?.();
170
+ result = true;
171
+ break;
172
+ }
173
+ worker.postMessage({ type: "cache:response", requestID, result });
174
+ } catch (error) {
175
+ worker.postMessage({
176
+ type: "cache:error",
177
+ requestID,
178
+ error: error.message
179
+ });
180
+ }
181
+ }
182
+ };
12
183
  export {
13
184
  Cache,
14
185
  CustomCacheStorage,
15
- MemoryCache,
16
- MemoryCacheManager,
17
- PostMessageCache,
18
- cloneResponse,
19
- createCache,
20
186
  generateCacheKey
21
187
  };
package/src/memory.d.ts CHANGED
@@ -1,23 +1,17 @@
1
- import { Cache, type CacheQueryOptions } from "./cache.js";
1
+ import { Cache, type CacheQueryOptions } from "./index.js";
2
2
  /**
3
3
  * Configuration options for MemoryCache
4
4
  */
5
5
  export interface MemoryCacheOptions {
6
6
  /** Maximum number of entries to store */
7
7
  maxEntries?: number;
8
- /** Maximum age of entries in milliseconds */
9
- maxAge?: number;
10
8
  }
11
9
  /**
12
10
  * In-memory cache implementation using Map for storage
13
11
  * Supports LRU eviction and TTL expiration
14
12
  */
15
13
  export declare class MemoryCache extends Cache {
16
- private name;
17
- private options;
18
- private storage;
19
- private accessOrder;
20
- private accessCounter;
14
+ #private;
21
15
  constructor(name: string, options?: MemoryCacheOptions);
22
16
  /**
23
17
  * Find a cached response for the given request
@@ -34,7 +28,7 @@ export declare class MemoryCache extends Cache {
34
28
  /**
35
29
  * Get all stored requests, optionally filtered by a request pattern
36
30
  */
37
- keys(request?: Request, options?: CacheQueryOptions): Promise<Request[]>;
31
+ keys(request?: Request, options?: CacheQueryOptions): Promise<readonly Request[]>;
38
32
  /**
39
33
  * Clear all entries from the cache
40
34
  */
@@ -46,77 +40,6 @@ export declare class MemoryCache extends Cache {
46
40
  name: string;
47
41
  size: number;
48
42
  maxEntries: number;
49
- maxAge: number;
50
43
  hitRate: number;
51
44
  };
52
- /**
53
- * Dispose of the cache and clean up resources
54
- */
55
- dispose(): Promise<void>;
56
- /**
57
- * Check if a cache entry has expired
58
- */
59
- private isExpired;
60
- /**
61
- * Check if a response should be cached
62
- */
63
- private isCacheable;
64
- /**
65
- * Enforce maximum entry limits using LRU eviction
66
- */
67
- private enforceMaxEntries;
68
- }
69
- /**
70
- * Memory Cache Manager for Main Thread
71
- *
72
- * Coordinates MemoryCache operations across Worker threads by managing
73
- * shared MemoryCache instances and handling postMessage requests.
74
- *
75
- * Only MemoryCache needs coordination since it stores data in process memory.
76
- * Other cache types can be used directly by workers without coordination.
77
- */
78
- interface WorkerLike {
79
- postMessage(value: any): void;
80
- on(event: string, listener: (data: any) => void): void;
81
- }
82
- interface CacheMessage {
83
- type: string;
84
- requestId: string;
85
- cacheName: string;
86
- request?: SerializedRequest;
87
- response?: SerializedResponse;
88
- options?: any;
89
- }
90
- interface SerializedRequest {
91
- url: string;
92
- method: string;
93
- headers: Record<string, string>;
94
- body?: string;
95
- }
96
- interface SerializedResponse {
97
- status: number;
98
- statusText: string;
99
- headers: Record<string, string>;
100
- body: string;
101
- }
102
- export declare class MemoryCacheManager {
103
- private memoryCaches;
104
- /**
105
- * Handle memory cache-related message from a Worker
106
- */
107
- handleMessage(worker: WorkerLike, message: CacheMessage): Promise<void>;
108
- /**
109
- * Get or create a MemoryCache instance
110
- */
111
- private getMemoryCache;
112
- private handleMatch;
113
- private handlePut;
114
- private handleDelete;
115
- private handleKeys;
116
- private handleClear;
117
- /**
118
- * Dispose of all memory caches
119
- */
120
- dispose(): Promise<void>;
121
45
  }
122
- export {};
package/src/memory.js CHANGED
@@ -1,64 +1,64 @@
1
1
  /// <reference types="./memory.d.ts" />
2
2
  // src/memory.ts
3
- import {
4
- Cache,
5
- generateCacheKey,
6
- cloneResponse
7
- } from "./cache.js";
3
+ import { Cache, generateCacheKey } from "./index.js";
8
4
  var MemoryCache = class extends Cache {
5
+ #storage;
6
+ #accessOrder;
7
+ #accessCounter;
8
+ #name;
9
+ #options;
9
10
  constructor(name, options = {}) {
10
11
  super();
11
- this.name = name;
12
- this.options = options;
12
+ this.#storage = /* @__PURE__ */ new Map();
13
+ this.#accessOrder = /* @__PURE__ */ new Map();
14
+ this.#accessCounter = 0;
15
+ this.#name = name;
16
+ this.#options = options;
13
17
  }
14
- storage = /* @__PURE__ */ new Map();
15
- accessOrder = /* @__PURE__ */ new Map();
16
- // For LRU tracking
17
- accessCounter = 0;
18
18
  /**
19
19
  * Find a cached response for the given request
20
20
  */
21
21
  async match(request, options) {
22
22
  const key = generateCacheKey(request, options);
23
- const entry = this.storage.get(key);
23
+ const entry = this.#storage.get(key);
24
24
  if (!entry) {
25
25
  return void 0;
26
26
  }
27
- if (this.isExpired(entry)) {
28
- this.storage.delete(key);
29
- this.accessOrder.delete(key);
27
+ if (this.#isExpired(entry)) {
28
+ this.#storage.delete(key);
29
+ this.#accessOrder.delete(key);
30
30
  return void 0;
31
31
  }
32
- this.accessOrder.set(key, ++this.accessCounter);
33
- return cloneResponse(entry.response);
32
+ this.#accessOrder.set(key, ++this.#accessCounter);
33
+ return entry.response.clone();
34
34
  }
35
35
  /**
36
36
  * Store a request/response pair in the cache
37
37
  */
38
38
  async put(request, response) {
39
39
  const key = generateCacheKey(request);
40
- if (!this.isCacheable(response)) {
40
+ if (!this.#isCacheable(response)) {
41
41
  return;
42
42
  }
43
43
  const clonedRequest = request.clone();
44
- const clonedResponse = await cloneResponse(response);
44
+ const clonedResponse = response.clone();
45
45
  const entry = {
46
46
  request: clonedRequest,
47
47
  response: clonedResponse,
48
48
  timestamp: Date.now()
49
49
  };
50
- this.storage.set(key, entry);
51
- this.accessOrder.set(key, ++this.accessCounter);
52
- this.enforceMaxEntries();
50
+ this.#storage.set(key, entry);
51
+ this.#accessOrder.set(key, ++this.#accessCounter);
52
+ this.#enforceMaxEntries();
53
53
  }
54
54
  /**
55
55
  * Delete matching entries from the cache
56
56
  */
57
57
  async delete(request, options) {
58
58
  const key = generateCacheKey(request, options);
59
- const deleted = this.storage.delete(key);
59
+ const deleted = this.#storage.delete(key);
60
60
  if (deleted) {
61
- this.accessOrder.delete(key);
61
+ this.#accessOrder.delete(key);
62
62
  }
63
63
  return deleted;
64
64
  }
@@ -67,8 +67,8 @@ var MemoryCache = class extends Cache {
67
67
  */
68
68
  async keys(request, options) {
69
69
  const keys = [];
70
- for (const [_, entry] of this.storage) {
71
- if (this.isExpired(entry)) {
70
+ for (const [_, entry] of this.#storage) {
71
+ if (this.#isExpired(entry)) {
72
72
  continue;
73
73
  }
74
74
  if (!request) {
@@ -87,42 +87,41 @@ var MemoryCache = class extends Cache {
87
87
  * Clear all entries from the cache
88
88
  */
89
89
  async clear() {
90
- this.storage.clear();
91
- this.accessOrder.clear();
92
- this.accessCounter = 0;
90
+ this.#storage.clear();
91
+ this.#accessOrder.clear();
92
+ this.#accessCounter = 0;
93
93
  }
94
94
  /**
95
95
  * Get cache statistics
96
96
  */
97
97
  getStats() {
98
98
  return {
99
- name: this.name,
100
- size: this.storage.size,
101
- maxEntries: this.options.maxEntries,
102
- maxAge: this.options.maxAge,
99
+ name: this.#name,
100
+ size: this.#storage.size,
101
+ maxEntries: this.#options.maxEntries,
103
102
  hitRate: 0
104
103
  // Could be implemented with additional tracking
105
104
  };
106
105
  }
107
106
  /**
108
- * Dispose of the cache and clean up resources
107
+ * Check if a cache entry has expired based on Cache-Control header
109
108
  */
110
- async dispose() {
111
- await this.clear();
112
- }
113
- /**
114
- * Check if a cache entry has expired
115
- */
116
- isExpired(entry) {
117
- if (!this.options.maxAge) {
109
+ #isExpired(entry) {
110
+ const cacheControl = entry.response.headers.get("cache-control");
111
+ if (!cacheControl) {
112
+ return false;
113
+ }
114
+ const maxAgeMatch = cacheControl.match(/max-age=(\d+)/);
115
+ if (!maxAgeMatch) {
118
116
  return false;
119
117
  }
120
- return Date.now() - entry.timestamp > this.options.maxAge;
118
+ const maxAge = parseInt(maxAgeMatch[1], 10) * 1e3;
119
+ return Date.now() - entry.timestamp > maxAge;
121
120
  }
122
121
  /**
123
122
  * Check if a response should be cached
124
123
  */
125
- isCacheable(response) {
124
+ #isCacheable(response) {
126
125
  if (!response.ok) {
127
126
  return false;
128
127
  }
@@ -137,158 +136,21 @@ var MemoryCache = class extends Cache {
137
136
  /**
138
137
  * Enforce maximum entry limits using LRU eviction
139
138
  */
140
- enforceMaxEntries() {
141
- if (!this.options.maxEntries || this.storage.size <= this.options.maxEntries) {
139
+ #enforceMaxEntries() {
140
+ if (!this.#options.maxEntries || this.#storage.size <= this.#options.maxEntries) {
142
141
  return;
143
142
  }
144
- const entries = Array.from(this.accessOrder.entries()).sort((a, b) => a[1] - b[1]);
145
- const toRemove = this.storage.size - this.options.maxEntries;
143
+ const entries = Array.from(this.#accessOrder.entries()).sort(
144
+ (a, b) => a[1] - b[1]
145
+ );
146
+ const toRemove = this.#storage.size - this.#options.maxEntries;
146
147
  for (let i = 0; i < toRemove; i++) {
147
148
  const [key] = entries[i];
148
- this.storage.delete(key);
149
- this.accessOrder.delete(key);
150
- }
151
- }
152
- };
153
- var MemoryCacheManager = class {
154
- memoryCaches = /* @__PURE__ */ new Map();
155
- /**
156
- * Handle memory cache-related message from a Worker
157
- */
158
- async handleMessage(worker, message) {
159
- const { type, requestId } = message;
160
- try {
161
- let result;
162
- switch (type) {
163
- case "cache:match":
164
- result = await this.handleMatch(message);
165
- break;
166
- case "cache:put":
167
- result = await this.handlePut(message);
168
- break;
169
- case "cache:delete":
170
- result = await this.handleDelete(message);
171
- break;
172
- case "cache:keys":
173
- result = await this.handleKeys(message);
174
- break;
175
- case "cache:clear":
176
- result = await this.handleClear(message);
177
- break;
178
- default:
179
- throw new Error(`Unknown cache operation: ${type}`);
180
- }
181
- worker.postMessage({
182
- type: "cache:response",
183
- requestId,
184
- result
185
- });
186
- } catch (error) {
187
- worker.postMessage({
188
- type: "cache:error",
189
- requestId,
190
- error: error.message
191
- });
192
- }
193
- }
194
- /**
195
- * Get or create a MemoryCache instance
196
- */
197
- getMemoryCache(name, options) {
198
- if (!this.memoryCaches.has(name)) {
199
- this.memoryCaches.set(name, new MemoryCache(name, options));
149
+ this.#storage.delete(key);
150
+ this.#accessOrder.delete(key);
200
151
  }
201
- return this.memoryCaches.get(name);
202
- }
203
- async handleMatch(message) {
204
- const { cacheName, request, options } = message;
205
- if (!request)
206
- throw new Error("Request is required for match operation");
207
- const cache = this.getMemoryCache(cacheName);
208
- const req = new Request(request.url, {
209
- method: request.method,
210
- headers: request.headers,
211
- body: request.body
212
- });
213
- const response = await cache.match(req, options);
214
- if (!response) {
215
- return void 0;
216
- }
217
- return {
218
- status: response.status,
219
- statusText: response.statusText,
220
- headers: Object.fromEntries(response.headers),
221
- body: await response.text()
222
- };
223
- }
224
- async handlePut(message) {
225
- const { cacheName, request, response } = message;
226
- if (!request || !response)
227
- throw new Error("Request and response are required for put operation");
228
- const cache = this.getMemoryCache(cacheName);
229
- const req = new Request(request.url, {
230
- method: request.method,
231
- headers: request.headers,
232
- body: request.body
233
- });
234
- const res = new Response(response.body, {
235
- status: response.status,
236
- statusText: response.statusText,
237
- headers: response.headers
238
- });
239
- await cache.put(req, res);
240
- return true;
241
- }
242
- async handleDelete(message) {
243
- const { cacheName, request, options } = message;
244
- if (!request)
245
- throw new Error("Request is required for delete operation");
246
- const cache = this.getMemoryCache(cacheName);
247
- const req = new Request(request.url, {
248
- method: request.method,
249
- headers: request.headers,
250
- body: request.body
251
- });
252
- return await cache.delete(req, options);
253
- }
254
- async handleKeys(message) {
255
- const { cacheName, request, options } = message;
256
- const cache = this.getMemoryCache(cacheName);
257
- let req;
258
- if (request) {
259
- req = new Request(request.url, {
260
- method: request.method,
261
- headers: request.headers,
262
- body: request.body
263
- });
264
- }
265
- const keys = await cache.keys(req, options);
266
- return keys.map((r) => ({
267
- url: r.url,
268
- method: r.method,
269
- headers: Object.fromEntries(r.headers),
270
- body: void 0
271
- // Keys typically don't need body
272
- }));
273
- }
274
- async handleClear(message) {
275
- const { cacheName } = message;
276
- const cache = this.getMemoryCache(cacheName);
277
- await cache.clear();
278
- return true;
279
- }
280
- /**
281
- * Dispose of all memory caches
282
- */
283
- async dispose() {
284
- const disposePromises = Array.from(this.memoryCaches.values()).map(
285
- (cache) => cache.dispose()
286
- );
287
- await Promise.all(disposePromises);
288
- this.memoryCaches.clear();
289
152
  }
290
153
  };
291
154
  export {
292
- MemoryCache,
293
- MemoryCacheManager
155
+ MemoryCache
294
156
  };
@@ -1,29 +1,23 @@
1
- import { Cache, type CacheQueryOptions } from "./cache.js";
1
+ import { Cache, type CacheQueryOptions } from "./index.js";
2
2
  /**
3
3
  * Configuration options for PostMessageCache
4
4
  */
5
5
  export interface PostMessageCacheOptions {
6
6
  /** Maximum number of entries to store */
7
7
  maxEntries?: number;
8
- /** Maximum age of entries in milliseconds */
9
- maxAge?: number;
8
+ /** Timeout for cache operations in milliseconds (default: 30000) */
9
+ timeout?: number;
10
10
  }
11
11
  /**
12
12
  * Worker-side cache that forwards operations to main thread via postMessage
13
13
  * Only used for MemoryCache in multi-worker environments
14
14
  */
15
15
  export declare class PostMessageCache extends Cache {
16
- private name;
17
- private options;
18
- private requestId;
19
- private pendingRequests;
20
- constructor(name: string, options?: PostMessageCacheOptions);
21
- private handleResponse;
22
- private sendRequest;
16
+ #private;
17
+ constructor(name: string, _options?: PostMessageCacheOptions);
23
18
  match(request: Request, options?: CacheQueryOptions): Promise<Response | undefined>;
24
19
  put(request: Request, response: Response): Promise<void>;
25
20
  delete(request: Request, options?: CacheQueryOptions): Promise<boolean>;
26
- keys(request?: Request, options?: CacheQueryOptions): Promise<Request[]>;
21
+ keys(request?: Request, options?: CacheQueryOptions): Promise<readonly Request[]>;
27
22
  clear(): Promise<void>;
28
- dispose(): Promise<void>;
29
23
  }
@@ -1,57 +1,61 @@
1
1
  /// <reference types="./postmessage.d.ts" />
2
2
  // src/postmessage.ts
3
- import { Cache } from "./cache.js";
4
- var isMainThread = typeof self === "undefined";
5
- var parentPort = typeof self !== "undefined" ? self : null;
6
- var PostMessageCache = class extends Cache {
7
- constructor(name, options = {}) {
8
- super();
9
- this.name = name;
10
- this.options = options;
11
- if (isMainThread) {
12
- throw new Error(
13
- "PostMessageCache should only be used in worker threads"
14
- );
15
- }
16
- if (parentPort) {
17
- parentPort.on("message", (message) => {
18
- if (message.type === "cache:response" || message.type === "cache:error") {
19
- this.handleResponse(message);
20
- }
21
- });
22
- }
23
- }
24
- requestId = 0;
25
- pendingRequests = /* @__PURE__ */ new Map();
26
- handleResponse(message) {
27
- const pending = this.pendingRequests.get(message.requestId);
28
- if (pending) {
29
- this.pendingRequests.delete(message.requestId);
30
- if (message.type === "cache:error") {
31
- pending.reject(new Error(message.error));
32
- } else {
33
- pending.resolve(message.result);
3
+ import { Cache } from "./index.js";
4
+ function getParentPort() {
5
+ return typeof self !== "undefined" ? self : null;
6
+ }
7
+ var messageHandlerSetup = false;
8
+ var pendingRequestsRegistry = /* @__PURE__ */ new Map();
9
+ function setupMessageHandler() {
10
+ if (messageHandlerSetup)
11
+ return;
12
+ messageHandlerSetup = true;
13
+ const parentPort = getParentPort();
14
+ if (parentPort && parentPort.addEventListener) {
15
+ parentPort.addEventListener("message", (event) => {
16
+ const message = event.data;
17
+ if (message.type === "cache:response" || message.type === "cache:error") {
18
+ handleCacheResponse(message);
34
19
  }
20
+ });
21
+ }
22
+ }
23
+ function handleCacheResponse(message) {
24
+ const pending = pendingRequestsRegistry.get(message.requestID);
25
+ if (pending) {
26
+ pendingRequestsRegistry.delete(message.requestID);
27
+ if (message.type === "cache:error") {
28
+ pending.reject(new Error(message.error));
29
+ } else {
30
+ pending.resolve(message.result);
35
31
  }
36
32
  }
37
- async sendRequest(type, data) {
33
+ }
34
+ var globalRequestID = 0;
35
+ var PostMessageCache = class extends Cache {
36
+ #name;
37
+ constructor(name, _options = {}) {
38
+ super();
39
+ this.#name = name;
40
+ setupMessageHandler();
41
+ }
42
+ async #sendRequest(type, data) {
43
+ const parentPort = getParentPort();
38
44
  if (!parentPort) {
39
- throw new Error(
40
- "PostMessageCache can only be used in worker threads"
41
- );
45
+ throw new Error("PostMessageCache can only be used in worker threads");
42
46
  }
43
- const requestId = ++this.requestId;
47
+ const requestID = ++globalRequestID;
44
48
  return new Promise((resolve, reject) => {
45
- this.pendingRequests.set(requestId, { resolve, reject });
49
+ pendingRequestsRegistry.set(requestID, { resolve, reject });
46
50
  parentPort.postMessage({
47
51
  type,
48
- requestId,
49
- cacheName: this.name,
52
+ requestID,
53
+ cacheName: this.#name,
50
54
  ...data
51
55
  });
52
56
  setTimeout(() => {
53
- if (this.pendingRequests.has(requestId)) {
54
- this.pendingRequests.delete(requestId);
57
+ if (pendingRequestsRegistry.has(requestID)) {
58
+ pendingRequestsRegistry.delete(requestID);
55
59
  reject(new Error("Cache operation timeout"));
56
60
  }
57
61
  }, 3e4);
@@ -64,7 +68,7 @@ var PostMessageCache = class extends Cache {
64
68
  headers: Object.fromEntries(request.headers),
65
69
  body: request.method !== "GET" && request.method !== "HEAD" ? await request.text() : void 0
66
70
  };
67
- const response = await this.sendRequest("cache:match", {
71
+ const response = await this.#sendRequest("cache:match", {
68
72
  request: serializedRequest,
69
73
  options
70
74
  });
@@ -90,7 +94,7 @@ var PostMessageCache = class extends Cache {
90
94
  headers: Object.fromEntries(response.headers),
91
95
  body: await response.clone().text()
92
96
  };
93
- await this.sendRequest("cache:put", {
97
+ await this.#sendRequest("cache:put", {
94
98
  request: serializedRequest,
95
99
  response: serializedResponse
96
100
  });
@@ -102,7 +106,7 @@ var PostMessageCache = class extends Cache {
102
106
  headers: Object.fromEntries(request.headers),
103
107
  body: request.method !== "GET" && request.method !== "HEAD" ? await request.text() : void 0
104
108
  };
105
- return await this.sendRequest("cache:delete", {
109
+ return await this.#sendRequest("cache:delete", {
106
110
  request: serializedRequest,
107
111
  options
108
112
  });
@@ -117,7 +121,7 @@ var PostMessageCache = class extends Cache {
117
121
  body: request.method !== "GET" && request.method !== "HEAD" ? await request.text() : void 0
118
122
  };
119
123
  }
120
- const keys = await this.sendRequest("cache:keys", {
124
+ const keys = await this.#sendRequest("cache:keys", {
121
125
  request: serializedRequest,
122
126
  options
123
127
  });
@@ -130,11 +134,7 @@ var PostMessageCache = class extends Cache {
130
134
  );
131
135
  }
132
136
  async clear() {
133
- await this.sendRequest("cache:clear", {});
134
- }
135
- async dispose() {
136
- await this.clear();
137
- this.pendingRequests.clear();
137
+ await this.#sendRequest("cache:clear", {});
138
138
  }
139
139
  };
140
140
  export {
@@ -1,44 +0,0 @@
1
- import type { Cache } from "./cache.js";
2
- /**
3
- * Factory function for creating Cache instances based on cache name
4
- */
5
- export type CacheFactory = (name: string) => Cache | Promise<Cache>;
6
- /**
7
- * CustomCacheStorage implements CacheStorage interface with a configurable factory
8
- * The factory function receives the cache name and can return different cache types
9
- */
10
- export declare class CustomCacheStorage {
11
- private factory;
12
- private instances;
13
- constructor(factory: CacheFactory);
14
- /**
15
- * Opens a cache with the given name
16
- * Returns existing instance if already opened, otherwise creates a new one
17
- */
18
- open(name: string): Promise<Cache>;
19
- /**
20
- * Returns true if a cache with the given name exists (has been opened)
21
- */
22
- has(name: string): Promise<boolean>;
23
- /**
24
- * Deletes a cache with the given name
25
- * Disposes of the instance if it exists
26
- */
27
- delete(name: string): Promise<boolean>;
28
- /**
29
- * Returns a list of all opened cache names
30
- */
31
- keys(): Promise<string[]>;
32
- /**
33
- * Get statistics about the cache storage
34
- */
35
- getStats(): {
36
- openInstances: number;
37
- cacheNames: string[];
38
- };
39
- /**
40
- * Dispose of all open cache instances
41
- * Useful for cleanup during shutdown
42
- */
43
- dispose(): Promise<void>;
44
- }
@@ -1,74 +0,0 @@
1
- /// <reference types="./cache-storage.d.ts" />
2
- // src/cache-storage.ts
3
- var CustomCacheStorage = class {
4
- constructor(factory) {
5
- this.factory = factory;
6
- }
7
- instances = /* @__PURE__ */ new Map();
8
- /**
9
- * Opens a cache with the given name
10
- * Returns existing instance if already opened, otherwise creates a new one
11
- */
12
- async open(name) {
13
- const existingInstance = this.instances.get(name);
14
- if (existingInstance) {
15
- return existingInstance;
16
- }
17
- const cache = await this.factory(name);
18
- this.instances.set(name, cache);
19
- return cache;
20
- }
21
- /**
22
- * Returns true if a cache with the given name exists (has been opened)
23
- */
24
- async has(name) {
25
- return this.instances.has(name);
26
- }
27
- /**
28
- * Deletes a cache with the given name
29
- * Disposes of the instance if it exists
30
- */
31
- async delete(name) {
32
- const instance = this.instances.get(name);
33
- if (instance) {
34
- if (instance.dispose) {
35
- await instance.dispose();
36
- }
37
- this.instances.delete(name);
38
- return true;
39
- }
40
- return false;
41
- }
42
- /**
43
- * Returns a list of all opened cache names
44
- */
45
- async keys() {
46
- return Array.from(this.instances.keys());
47
- }
48
- /**
49
- * Get statistics about the cache storage
50
- */
51
- getStats() {
52
- return {
53
- openInstances: this.instances.size,
54
- cacheNames: Array.from(this.instances.keys())
55
- };
56
- }
57
- /**
58
- * Dispose of all open cache instances
59
- * Useful for cleanup during shutdown
60
- */
61
- async dispose() {
62
- const disposePromises = [];
63
- for (const [_name, instance] of this.instances) {
64
- if (instance.dispose) {
65
- disposePromises.push(instance.dispose());
66
- }
67
- }
68
- await Promise.all(disposePromises);
69
- this.instances.clear();
70
- }
71
- };
72
- export {
73
- CustomCacheStorage
74
- };
package/src/cache.d.ts DELETED
@@ -1,66 +0,0 @@
1
- /**
2
- * Cache query options for matching requests
3
- * Based on the Cache API specification
4
- */
5
- export interface CacheQueryOptions {
6
- /** Ignore the search portion of the request URL */
7
- ignoreSearch?: boolean;
8
- /** Ignore the request method */
9
- ignoreMethod?: boolean;
10
- /** Ignore the Vary header */
11
- ignoreVary?: boolean;
12
- /** Custom cache name for scoped operations */
13
- cacheName?: string;
14
- }
15
- /**
16
- * Abstract Cache class implementing the Cache API interface
17
- * Provides shared implementations for add() and addAll() while requiring
18
- * concrete implementations to handle the core storage operations
19
- */
20
- export declare abstract class Cache {
21
- /**
22
- * Returns a Promise that resolves to the response associated with the first matching request
23
- */
24
- abstract match(request: Request, options?: CacheQueryOptions): Promise<Response | undefined>;
25
- /**
26
- * Puts a request/response pair into the cache
27
- */
28
- abstract put(request: Request, response: Response): Promise<void>;
29
- /**
30
- * Finds the cache entry whose key is the request, and if found, deletes it and returns true
31
- */
32
- abstract delete(request: Request, options?: CacheQueryOptions): Promise<boolean>;
33
- /**
34
- * Returns a Promise that resolves to an array of cache keys (Request objects)
35
- */
36
- abstract keys(request?: Request, options?: CacheQueryOptions): Promise<Request[]>;
37
- /**
38
- * Takes a URL, retrieves it and adds the resulting response object to the cache
39
- * Shared implementation using fetch() and put()
40
- */
41
- add(request: Request): Promise<void>;
42
- /**
43
- * Takes an array of URLs, retrieves them, and adds the resulting response objects to the cache
44
- * Shared implementation using add()
45
- */
46
- addAll(requests: Request[]): Promise<void>;
47
- /**
48
- * Returns a Promise that resolves to an array of all matching responses
49
- * Default implementation using keys() and match() - can be overridden for efficiency
50
- */
51
- matchAll(request?: Request, options?: CacheQueryOptions): Promise<Response[]>;
52
- /**
53
- * Optional cleanup method for implementations that need resource disposal
54
- */
55
- dispose?(): Promise<void>;
56
- }
57
- /**
58
- * Generate a cache key from a Request object
59
- * Normalizes the request for consistent cache key generation
60
- */
61
- export declare function generateCacheKey(request: Request, options?: CacheQueryOptions): string;
62
- /**
63
- * Clone a Response object for storage
64
- * Responses can only be consumed once, so we need to clone them for caching
65
- */
66
- export declare function cloneResponse(response: Response): Promise<Response>;
package/src/cache.js DELETED
@@ -1,55 +0,0 @@
1
- /// <reference types="./cache.d.ts" />
2
- // src/cache.ts
3
- var Cache = class {
4
- /**
5
- * Takes a URL, retrieves it and adds the resulting response object to the cache
6
- * Shared implementation using fetch() and put()
7
- */
8
- async add(request) {
9
- const response = await fetch(request);
10
- if (!response.ok) {
11
- throw new TypeError(
12
- `Failed to fetch ${request.url}: ${response.status} ${response.statusText}`
13
- );
14
- }
15
- await this.put(request, response);
16
- }
17
- /**
18
- * Takes an array of URLs, retrieves them, and adds the resulting response objects to the cache
19
- * Shared implementation using add()
20
- */
21
- async addAll(requests) {
22
- await Promise.all(requests.map((request) => this.add(request)));
23
- }
24
- /**
25
- * Returns a Promise that resolves to an array of all matching responses
26
- * Default implementation using keys() and match() - can be overridden for efficiency
27
- */
28
- async matchAll(request, options) {
29
- const keys = await this.keys(request, options);
30
- const responses = [];
31
- for (const key of keys) {
32
- const response = await this.match(key, options);
33
- if (response) {
34
- responses.push(response);
35
- }
36
- }
37
- return responses;
38
- }
39
- };
40
- function generateCacheKey(request, options) {
41
- const url = new URL(request.url);
42
- if (options?.ignoreSearch) {
43
- url.search = "";
44
- }
45
- const method = options?.ignoreMethod ? "GET" : request.method;
46
- return `${method}:${url.href}`;
47
- }
48
- async function cloneResponse(response) {
49
- return response.clone();
50
- }
51
- export {
52
- Cache,
53
- cloneResponse,
54
- generateCacheKey
55
- };