@b9g/cache 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json ADDED
@@ -0,0 +1,66 @@
1
+ {
2
+ "name": "@b9g/cache",
3
+ "version": "0.1.0",
4
+ "description": "Universal Cache API implementation",
5
+ "keywords": [
6
+ "cache",
7
+ "storage",
8
+ "web-standards",
9
+ "universal"
10
+ ],
11
+ "dependencies": {},
12
+ "devDependencies": {
13
+ "@b9g/libuild": "^0.1.10",
14
+ "bun-types": "latest"
15
+ },
16
+ "type": "module",
17
+ "types": "src/index.d.ts",
18
+ "module": "src/index.js",
19
+ "exports": {
20
+ ".": {
21
+ "types": "./src/index.d.ts",
22
+ "import": "./src/index.js"
23
+ },
24
+ "./cache": {
25
+ "types": "./src/cache.d.ts",
26
+ "import": "./src/cache.js"
27
+ },
28
+ "./memory": {
29
+ "types": "./src/memory.d.ts",
30
+ "import": "./src/memory.js"
31
+ },
32
+ "./postmessage": {
33
+ "types": "./src/postmessage.d.ts",
34
+ "import": "./src/postmessage.js"
35
+ },
36
+ "./package.json": "./package.json",
37
+ "./cache.js": {
38
+ "types": "./src/cache.d.ts",
39
+ "import": "./src/cache.js"
40
+ },
41
+ "./cache-storage": {
42
+ "types": "./src/cache-storage.d.ts",
43
+ "import": "./src/cache-storage.js"
44
+ },
45
+ "./cache-storage.js": {
46
+ "types": "./src/cache-storage.d.ts",
47
+ "import": "./src/cache-storage.js"
48
+ },
49
+ "./index": {
50
+ "types": "./src/index.d.ts",
51
+ "import": "./src/index.js"
52
+ },
53
+ "./index.js": {
54
+ "types": "./src/index.d.ts",
55
+ "import": "./src/index.js"
56
+ },
57
+ "./memory.js": {
58
+ "types": "./src/memory.d.ts",
59
+ "import": "./src/memory.js"
60
+ },
61
+ "./postmessage.js": {
62
+ "types": "./src/postmessage.d.ts",
63
+ "import": "./src/postmessage.js"
64
+ }
65
+ }
66
+ }
@@ -0,0 +1,44 @@
1
+ import type { Cache } from "./cache.js";
2
+ /**
3
+ * Factory function for creating Cache instances based on cache name
4
+ */
5
+ export type CacheFactory = (name: string) => Cache | Promise<Cache>;
6
+ /**
7
+ * CustomCacheStorage implements CacheStorage interface with a configurable factory
8
+ * The factory function receives the cache name and can return different cache types
9
+ */
10
+ export declare class CustomCacheStorage {
11
+ private factory;
12
+ private instances;
13
+ constructor(factory: CacheFactory);
14
+ /**
15
+ * Opens a cache with the given name
16
+ * Returns existing instance if already opened, otherwise creates a new one
17
+ */
18
+ open(name: string): Promise<Cache>;
19
+ /**
20
+ * Returns true if a cache with the given name exists (has been opened)
21
+ */
22
+ has(name: string): Promise<boolean>;
23
+ /**
24
+ * Deletes a cache with the given name
25
+ * Disposes of the instance if it exists
26
+ */
27
+ delete(name: string): Promise<boolean>;
28
+ /**
29
+ * Returns a list of all opened cache names
30
+ */
31
+ keys(): Promise<string[]>;
32
+ /**
33
+ * Get statistics about the cache storage
34
+ */
35
+ getStats(): {
36
+ openInstances: number;
37
+ cacheNames: string[];
38
+ };
39
+ /**
40
+ * Dispose of all open cache instances
41
+ * Useful for cleanup during shutdown
42
+ */
43
+ dispose(): Promise<void>;
44
+ }
@@ -0,0 +1,74 @@
1
+ /// <reference types="./cache-storage.d.ts" />
2
+ // src/cache-storage.ts
3
+ var CustomCacheStorage = class {
4
+ constructor(factory) {
5
+ this.factory = factory;
6
+ }
7
+ instances = /* @__PURE__ */ new Map();
8
+ /**
9
+ * Opens a cache with the given name
10
+ * Returns existing instance if already opened, otherwise creates a new one
11
+ */
12
+ async open(name) {
13
+ const existingInstance = this.instances.get(name);
14
+ if (existingInstance) {
15
+ return existingInstance;
16
+ }
17
+ const cache = await this.factory(name);
18
+ this.instances.set(name, cache);
19
+ return cache;
20
+ }
21
+ /**
22
+ * Returns true if a cache with the given name exists (has been opened)
23
+ */
24
+ async has(name) {
25
+ return this.instances.has(name);
26
+ }
27
+ /**
28
+ * Deletes a cache with the given name
29
+ * Disposes of the instance if it exists
30
+ */
31
+ async delete(name) {
32
+ const instance = this.instances.get(name);
33
+ if (instance) {
34
+ if (instance.dispose) {
35
+ await instance.dispose();
36
+ }
37
+ this.instances.delete(name);
38
+ return true;
39
+ }
40
+ return false;
41
+ }
42
+ /**
43
+ * Returns a list of all opened cache names
44
+ */
45
+ async keys() {
46
+ return Array.from(this.instances.keys());
47
+ }
48
+ /**
49
+ * Get statistics about the cache storage
50
+ */
51
+ getStats() {
52
+ return {
53
+ openInstances: this.instances.size,
54
+ cacheNames: Array.from(this.instances.keys())
55
+ };
56
+ }
57
+ /**
58
+ * Dispose of all open cache instances
59
+ * Useful for cleanup during shutdown
60
+ */
61
+ async dispose() {
62
+ const disposePromises = [];
63
+ for (const [_name, instance] of this.instances) {
64
+ if (instance.dispose) {
65
+ disposePromises.push(instance.dispose());
66
+ }
67
+ }
68
+ await Promise.all(disposePromises);
69
+ this.instances.clear();
70
+ }
71
+ };
72
+ export {
73
+ CustomCacheStorage
74
+ };
package/src/cache.d.ts ADDED
@@ -0,0 +1,66 @@
1
+ /**
2
+ * Cache query options for matching requests
3
+ * Based on the Cache API specification
4
+ */
5
+ export interface CacheQueryOptions {
6
+ /** Ignore the search portion of the request URL */
7
+ ignoreSearch?: boolean;
8
+ /** Ignore the request method */
9
+ ignoreMethod?: boolean;
10
+ /** Ignore the Vary header */
11
+ ignoreVary?: boolean;
12
+ /** Custom cache name for scoped operations */
13
+ cacheName?: string;
14
+ }
15
+ /**
16
+ * Abstract Cache class implementing the Cache API interface
17
+ * Provides shared implementations for add() and addAll() while requiring
18
+ * concrete implementations to handle the core storage operations
19
+ */
20
+ export declare abstract class Cache {
21
+ /**
22
+ * Returns a Promise that resolves to the response associated with the first matching request
23
+ */
24
+ abstract match(request: Request, options?: CacheQueryOptions): Promise<Response | undefined>;
25
+ /**
26
+ * Puts a request/response pair into the cache
27
+ */
28
+ abstract put(request: Request, response: Response): Promise<void>;
29
+ /**
30
+ * Finds the cache entry whose key is the request, and if found, deletes it and returns true
31
+ */
32
+ abstract delete(request: Request, options?: CacheQueryOptions): Promise<boolean>;
33
+ /**
34
+ * Returns a Promise that resolves to an array of cache keys (Request objects)
35
+ */
36
+ abstract keys(request?: Request, options?: CacheQueryOptions): Promise<Request[]>;
37
+ /**
38
+ * Takes a URL, retrieves it and adds the resulting response object to the cache
39
+ * Shared implementation using fetch() and put()
40
+ */
41
+ add(request: Request): Promise<void>;
42
+ /**
43
+ * Takes an array of URLs, retrieves them, and adds the resulting response objects to the cache
44
+ * Shared implementation using add()
45
+ */
46
+ addAll(requests: Request[]): Promise<void>;
47
+ /**
48
+ * Returns a Promise that resolves to an array of all matching responses
49
+ * Default implementation using keys() and match() - can be overridden for efficiency
50
+ */
51
+ matchAll(request?: Request, options?: CacheQueryOptions): Promise<Response[]>;
52
+ /**
53
+ * Optional cleanup method for implementations that need resource disposal
54
+ */
55
+ dispose?(): Promise<void>;
56
+ }
57
+ /**
58
+ * Generate a cache key from a Request object
59
+ * Normalizes the request for consistent cache key generation
60
+ */
61
+ export declare function generateCacheKey(request: Request, options?: CacheQueryOptions): string;
62
+ /**
63
+ * Clone a Response object for storage
64
+ * Responses can only be consumed once, so we need to clone them for caching
65
+ */
66
+ export declare function cloneResponse(response: Response): Promise<Response>;
package/src/cache.js ADDED
@@ -0,0 +1,55 @@
1
+ /// <reference types="./cache.d.ts" />
2
+ // src/cache.ts
3
+ var Cache = class {
4
+ /**
5
+ * Takes a URL, retrieves it and adds the resulting response object to the cache
6
+ * Shared implementation using fetch() and put()
7
+ */
8
+ async add(request) {
9
+ const response = await fetch(request);
10
+ if (!response.ok) {
11
+ throw new TypeError(
12
+ `Failed to fetch ${request.url}: ${response.status} ${response.statusText}`
13
+ );
14
+ }
15
+ await this.put(request, response);
16
+ }
17
+ /**
18
+ * Takes an array of URLs, retrieves them, and adds the resulting response objects to the cache
19
+ * Shared implementation using add()
20
+ */
21
+ async addAll(requests) {
22
+ await Promise.all(requests.map((request) => this.add(request)));
23
+ }
24
+ /**
25
+ * Returns a Promise that resolves to an array of all matching responses
26
+ * Default implementation using keys() and match() - can be overridden for efficiency
27
+ */
28
+ async matchAll(request, options) {
29
+ const keys = await this.keys(request, options);
30
+ const responses = [];
31
+ for (const key of keys) {
32
+ const response = await this.match(key, options);
33
+ if (response) {
34
+ responses.push(response);
35
+ }
36
+ }
37
+ return responses;
38
+ }
39
+ };
40
+ function generateCacheKey(request, options) {
41
+ const url = new URL(request.url);
42
+ if (options?.ignoreSearch) {
43
+ url.search = "";
44
+ }
45
+ const method = options?.ignoreMethod ? "GET" : request.method;
46
+ return `${method}:${url.href}`;
47
+ }
48
+ async function cloneResponse(response) {
49
+ return response.clone();
50
+ }
51
+ export {
52
+ Cache,
53
+ cloneResponse,
54
+ generateCacheKey
55
+ };
package/src/index.d.ts ADDED
@@ -0,0 +1,9 @@
1
+ /**
2
+ * @b9g/cache - Universal Cache API implementation
3
+ *
4
+ * Provides HTTP-aware caching with PostMessage coordination for worker environments
5
+ */
6
+ export { Cache, type CacheQueryOptions, generateCacheKey, cloneResponse } from "./cache.js";
7
+ export { CustomCacheStorage, type CacheFactory } from "./cache-storage.js";
8
+ export { MemoryCache, MemoryCacheManager, type MemoryCacheOptions } from "./memory.js";
9
+ export { PostMessageCache, type PostMessageCacheOptions } from "./postmessage.js";
package/src/index.js ADDED
@@ -0,0 +1,15 @@
1
+ /// <reference types="./index.d.ts" />
2
+ // src/index.ts
3
+ import { Cache, generateCacheKey, cloneResponse } from "./cache.js";
4
+ import { CustomCacheStorage } from "./cache-storage.js";
5
+ import { MemoryCache, MemoryCacheManager } from "./memory.js";
6
+ import { PostMessageCache } from "./postmessage.js";
7
+ export {
8
+ Cache,
9
+ CustomCacheStorage,
10
+ MemoryCache,
11
+ MemoryCacheManager,
12
+ PostMessageCache,
13
+ cloneResponse,
14
+ generateCacheKey
15
+ };
@@ -0,0 +1,119 @@
1
+ import { Cache, type CacheQueryOptions } from "./cache.js";
2
+ /**
3
+ * Configuration options for MemoryCache
4
+ */
5
+ export interface MemoryCacheOptions {
6
+ /** Maximum number of entries to store */
7
+ maxEntries?: number;
8
+ /** Maximum age of entries in milliseconds */
9
+ maxAge?: number;
10
+ }
11
+ /**
12
+ * In-memory cache implementation using Map for storage
13
+ * Supports LRU eviction and TTL expiration
14
+ */
15
+ export declare class MemoryCache extends Cache {
16
+ private name;
17
+ private options;
18
+ private storage;
19
+ private accessOrder;
20
+ private accessCounter;
21
+ constructor(name: string, options?: MemoryCacheOptions);
22
+ /**
23
+ * Find a cached response for the given request
24
+ */
25
+ match(request: Request, options?: CacheQueryOptions): Promise<Response | undefined>;
26
+ /**
27
+ * Store a request/response pair in the cache
28
+ */
29
+ put(request: Request, response: Response): Promise<void>;
30
+ /**
31
+ * Delete matching entries from the cache
32
+ */
33
+ delete(request: Request, options?: CacheQueryOptions): Promise<boolean>;
34
+ /**
35
+ * Get all stored requests, optionally filtered by a request pattern
36
+ */
37
+ keys(request?: Request, options?: CacheQueryOptions): Promise<Request[]>;
38
+ /**
39
+ * Clear all entries from the cache
40
+ */
41
+ clear(): Promise<void>;
42
+ /**
43
+ * Get cache statistics
44
+ */
45
+ getStats(): {
46
+ name: string;
47
+ size: number;
48
+ maxEntries: number;
49
+ maxAge: number;
50
+ hitRate: number;
51
+ };
52
+ /**
53
+ * Dispose of the cache and clean up resources
54
+ */
55
+ dispose(): Promise<void>;
56
+ /**
57
+ * Check if a cache entry has expired
58
+ */
59
+ private isExpired;
60
+ /**
61
+ * Check if a response should be cached
62
+ */
63
+ private isCacheable;
64
+ /**
65
+ * Enforce maximum entry limits using LRU eviction
66
+ */
67
+ private enforceMaxEntries;
68
+ }
69
+ /**
70
+ * Memory Cache Manager for Main Thread
71
+ *
72
+ * Coordinates MemoryCache operations across Worker threads by managing
73
+ * shared MemoryCache instances and handling postMessage requests.
74
+ *
75
+ * Only MemoryCache needs coordination since it stores data in process memory.
76
+ * Other cache types can be used directly by workers without coordination.
77
+ */
78
+ import type { Worker } from "worker_threads";
79
+ interface CacheMessage {
80
+ type: string;
81
+ requestId: string;
82
+ cacheName: string;
83
+ request?: SerializedRequest;
84
+ response?: SerializedResponse;
85
+ options?: any;
86
+ }
87
+ interface SerializedRequest {
88
+ url: string;
89
+ method: string;
90
+ headers: Record<string, string>;
91
+ body?: string;
92
+ }
93
+ interface SerializedResponse {
94
+ status: number;
95
+ statusText: string;
96
+ headers: Record<string, string>;
97
+ body: string;
98
+ }
99
+ export declare class MemoryCacheManager {
100
+ private memoryCaches;
101
+ /**
102
+ * Handle memory cache-related message from a Worker
103
+ */
104
+ handleMessage(worker: Worker, message: CacheMessage): Promise<void>;
105
+ /**
106
+ * Get or create a MemoryCache instance
107
+ */
108
+ private getMemoryCache;
109
+ private handleMatch;
110
+ private handlePut;
111
+ private handleDelete;
112
+ private handleKeys;
113
+ private handleClear;
114
+ /**
115
+ * Dispose of all memory caches
116
+ */
117
+ dispose(): Promise<void>;
118
+ }
119
+ export {};
package/src/memory.js ADDED
@@ -0,0 +1,294 @@
1
+ /// <reference types="./memory.d.ts" />
2
+ // src/memory.ts
3
+ import {
4
+ Cache,
5
+ generateCacheKey,
6
+ cloneResponse
7
+ } from "./cache.js";
8
+ var MemoryCache = class extends Cache {
9
+ constructor(name, options = {}) {
10
+ super();
11
+ this.name = name;
12
+ this.options = options;
13
+ }
14
+ storage = /* @__PURE__ */ new Map();
15
+ accessOrder = /* @__PURE__ */ new Map();
16
+ // For LRU tracking
17
+ accessCounter = 0;
18
+ /**
19
+ * Find a cached response for the given request
20
+ */
21
+ async match(request, options) {
22
+ const key = generateCacheKey(request, options);
23
+ const entry = this.storage.get(key);
24
+ if (!entry) {
25
+ return void 0;
26
+ }
27
+ if (this.isExpired(entry)) {
28
+ this.storage.delete(key);
29
+ this.accessOrder.delete(key);
30
+ return void 0;
31
+ }
32
+ this.accessOrder.set(key, ++this.accessCounter);
33
+ return cloneResponse(entry.response);
34
+ }
35
+ /**
36
+ * Store a request/response pair in the cache
37
+ */
38
+ async put(request, response) {
39
+ const key = generateCacheKey(request);
40
+ if (!this.isCacheable(response)) {
41
+ return;
42
+ }
43
+ const clonedRequest = request.clone();
44
+ const clonedResponse = cloneResponse(response);
45
+ const entry = {
46
+ request: clonedRequest,
47
+ response: clonedResponse,
48
+ timestamp: Date.now()
49
+ };
50
+ this.storage.set(key, entry);
51
+ this.accessOrder.set(key, ++this.accessCounter);
52
+ this.enforceMaxEntries();
53
+ }
54
+ /**
55
+ * Delete matching entries from the cache
56
+ */
57
+ async delete(request, options) {
58
+ const key = generateCacheKey(request, options);
59
+ const deleted = this.storage.delete(key);
60
+ if (deleted) {
61
+ this.accessOrder.delete(key);
62
+ }
63
+ return deleted;
64
+ }
65
+ /**
66
+ * Get all stored requests, optionally filtered by a request pattern
67
+ */
68
+ async keys(request, options) {
69
+ const keys = [];
70
+ for (const [_, entry] of this.storage) {
71
+ if (this.isExpired(entry)) {
72
+ continue;
73
+ }
74
+ if (!request) {
75
+ keys.push(entry.request.clone());
76
+ continue;
77
+ }
78
+ const entryKey = generateCacheKey(entry.request, options);
79
+ const filterKey = generateCacheKey(request, options);
80
+ if (entryKey === filterKey) {
81
+ keys.push(entry.request.clone());
82
+ }
83
+ }
84
+ return keys;
85
+ }
86
+ /**
87
+ * Clear all entries from the cache
88
+ */
89
+ async clear() {
90
+ this.storage.clear();
91
+ this.accessOrder.clear();
92
+ this.accessCounter = 0;
93
+ }
94
+ /**
95
+ * Get cache statistics
96
+ */
97
+ getStats() {
98
+ return {
99
+ name: this.name,
100
+ size: this.storage.size,
101
+ maxEntries: this.options.maxEntries,
102
+ maxAge: this.options.maxAge,
103
+ hitRate: 0
104
+ // Could be implemented with additional tracking
105
+ };
106
+ }
107
+ /**
108
+ * Dispose of the cache and clean up resources
109
+ */
110
+ async dispose() {
111
+ await this.clear();
112
+ }
113
+ /**
114
+ * Check if a cache entry has expired
115
+ */
116
+ isExpired(entry) {
117
+ if (!this.options.maxAge) {
118
+ return false;
119
+ }
120
+ return Date.now() - entry.timestamp > this.options.maxAge;
121
+ }
122
+ /**
123
+ * Check if a response should be cached
124
+ */
125
+ isCacheable(response) {
126
+ if (!response.ok) {
127
+ return false;
128
+ }
129
+ const cacheControl = response.headers.get("cache-control");
130
+ if (cacheControl) {
131
+ if (cacheControl.includes("no-cache") || cacheControl.includes("no-store")) {
132
+ return false;
133
+ }
134
+ }
135
+ return true;
136
+ }
137
+ /**
138
+ * Enforce maximum entry limits using LRU eviction
139
+ */
140
+ enforceMaxEntries() {
141
+ if (!this.options.maxEntries || this.storage.size <= this.options.maxEntries) {
142
+ return;
143
+ }
144
+ const entries = Array.from(this.accessOrder.entries()).sort((a, b) => a[1] - b[1]);
145
+ const toRemove = this.storage.size - this.options.maxEntries;
146
+ for (let i = 0; i < toRemove; i++) {
147
+ const [key] = entries[i];
148
+ this.storage.delete(key);
149
+ this.accessOrder.delete(key);
150
+ }
151
+ }
152
+ };
153
+ var MemoryCacheManager = class {
154
+ memoryCaches = /* @__PURE__ */ new Map();
155
+ /**
156
+ * Handle memory cache-related message from a Worker
157
+ */
158
+ async handleMessage(worker, message) {
159
+ const { type, requestId } = message;
160
+ try {
161
+ let result;
162
+ switch (type) {
163
+ case "cache:match":
164
+ result = await this.handleMatch(message);
165
+ break;
166
+ case "cache:put":
167
+ result = await this.handlePut(message);
168
+ break;
169
+ case "cache:delete":
170
+ result = await this.handleDelete(message);
171
+ break;
172
+ case "cache:keys":
173
+ result = await this.handleKeys(message);
174
+ break;
175
+ case "cache:clear":
176
+ result = await this.handleClear(message);
177
+ break;
178
+ default:
179
+ throw new Error(`Unknown cache operation: ${type}`);
180
+ }
181
+ worker.postMessage({
182
+ type: "cache:response",
183
+ requestId,
184
+ result
185
+ });
186
+ } catch (error) {
187
+ worker.postMessage({
188
+ type: "cache:error",
189
+ requestId,
190
+ error: error.message
191
+ });
192
+ }
193
+ }
194
+ /**
195
+ * Get or create a MemoryCache instance
196
+ */
197
+ getMemoryCache(name, options) {
198
+ if (!this.memoryCaches.has(name)) {
199
+ this.memoryCaches.set(name, new MemoryCache(name, options));
200
+ }
201
+ return this.memoryCaches.get(name);
202
+ }
203
+ async handleMatch(message) {
204
+ const { cacheName, request, options } = message;
205
+ if (!request)
206
+ throw new Error("Request is required for match operation");
207
+ const cache = this.getMemoryCache(cacheName);
208
+ const req = new Request(request.url, {
209
+ method: request.method,
210
+ headers: request.headers,
211
+ body: request.body
212
+ });
213
+ const response = await cache.match(req, options);
214
+ if (!response) {
215
+ return void 0;
216
+ }
217
+ return {
218
+ status: response.status,
219
+ statusText: response.statusText,
220
+ headers: Object.fromEntries(response.headers.entries()),
221
+ body: await response.text()
222
+ };
223
+ }
224
+ async handlePut(message) {
225
+ const { cacheName, request, response } = message;
226
+ if (!request || !response)
227
+ throw new Error("Request and response are required for put operation");
228
+ const cache = this.getMemoryCache(cacheName);
229
+ const req = new Request(request.url, {
230
+ method: request.method,
231
+ headers: request.headers,
232
+ body: request.body
233
+ });
234
+ const res = new Response(response.body, {
235
+ status: response.status,
236
+ statusText: response.statusText,
237
+ headers: response.headers
238
+ });
239
+ await cache.put(req, res);
240
+ return true;
241
+ }
242
+ async handleDelete(message) {
243
+ const { cacheName, request, options } = message;
244
+ if (!request)
245
+ throw new Error("Request is required for delete operation");
246
+ const cache = this.getMemoryCache(cacheName);
247
+ const req = new Request(request.url, {
248
+ method: request.method,
249
+ headers: request.headers,
250
+ body: request.body
251
+ });
252
+ return await cache.delete(req, options);
253
+ }
254
+ async handleKeys(message) {
255
+ const { cacheName, request, options } = message;
256
+ const cache = this.getMemoryCache(cacheName);
257
+ let req;
258
+ if (request) {
259
+ req = new Request(request.url, {
260
+ method: request.method,
261
+ headers: request.headers,
262
+ body: request.body
263
+ });
264
+ }
265
+ const keys = await cache.keys(req, options);
266
+ return keys.map((r) => ({
267
+ url: r.url,
268
+ method: r.method,
269
+ headers: Object.fromEntries(r.headers.entries()),
270
+ body: void 0
271
+ // Keys typically don't need body
272
+ }));
273
+ }
274
+ async handleClear(message) {
275
+ const { cacheName } = message;
276
+ const cache = this.getMemoryCache(cacheName);
277
+ await cache.clear();
278
+ return true;
279
+ }
280
+ /**
281
+ * Dispose of all memory caches
282
+ */
283
+ async dispose() {
284
+ const disposePromises = Array.from(this.memoryCaches.values()).map(
285
+ (cache) => cache.dispose()
286
+ );
287
+ await Promise.all(disposePromises);
288
+ this.memoryCaches.clear();
289
+ }
290
+ };
291
+ export {
292
+ MemoryCache,
293
+ MemoryCacheManager
294
+ };
@@ -0,0 +1,29 @@
1
+ import { Cache, type CacheQueryOptions } from "./cache.js";
2
+ /**
3
+ * Configuration options for PostMessageCache
4
+ */
5
+ export interface PostMessageCacheOptions {
6
+ /** Maximum number of entries to store */
7
+ maxEntries?: number;
8
+ /** Maximum age of entries in milliseconds */
9
+ maxAge?: number;
10
+ }
11
+ /**
12
+ * Worker-side cache that forwards operations to main thread via postMessage
13
+ * Only used for MemoryCache in multi-worker environments
14
+ */
15
+ export declare class PostMessageCache extends Cache {
16
+ private name;
17
+ private options;
18
+ private requestId;
19
+ private pendingRequests;
20
+ constructor(name: string, options?: PostMessageCacheOptions);
21
+ private handleResponse;
22
+ private sendRequest;
23
+ match(request: Request, options?: CacheQueryOptions): Promise<Response | undefined>;
24
+ put(request: Request, response: Response): Promise<void>;
25
+ delete(request: Request, options?: CacheQueryOptions): Promise<boolean>;
26
+ keys(request?: Request, options?: CacheQueryOptions): Promise<Request[]>;
27
+ clear(): Promise<void>;
28
+ dispose(): Promise<void>;
29
+ }
@@ -0,0 +1,141 @@
1
+ /// <reference types="./postmessage.d.ts" />
2
+ // src/postmessage.ts
3
+ import { Cache } from "./cache.js";
4
+ import { parentPort, isMainThread } from "worker_threads";
5
+ var PostMessageCache = class extends Cache {
6
+ constructor(name, options = {}) {
7
+ super();
8
+ this.name = name;
9
+ this.options = options;
10
+ if (isMainThread) {
11
+ throw new Error(
12
+ "PostMessageCache should only be used in worker threads"
13
+ );
14
+ }
15
+ if (parentPort) {
16
+ parentPort.on("message", (message) => {
17
+ if (message.type === "cache:response" || message.type === "cache:error") {
18
+ this.handleResponse(message);
19
+ }
20
+ });
21
+ }
22
+ }
23
+ requestId = 0;
24
+ pendingRequests = /* @__PURE__ */ new Map();
25
+ handleResponse(message) {
26
+ const pending = this.pendingRequests.get(message.requestId);
27
+ if (pending) {
28
+ this.pendingRequests.delete(message.requestId);
29
+ if (message.type === "cache:error") {
30
+ pending.reject(new Error(message.error));
31
+ } else {
32
+ pending.resolve(message.result);
33
+ }
34
+ }
35
+ }
36
+ async sendRequest(type, data) {
37
+ if (!parentPort) {
38
+ throw new Error(
39
+ "PostMessageCache can only be used in worker threads"
40
+ );
41
+ }
42
+ const requestId = ++this.requestId;
43
+ return new Promise((resolve, reject) => {
44
+ this.pendingRequests.set(requestId, { resolve, reject });
45
+ parentPort.postMessage({
46
+ type,
47
+ requestId,
48
+ cacheName: this.name,
49
+ ...data
50
+ });
51
+ setTimeout(() => {
52
+ if (this.pendingRequests.has(requestId)) {
53
+ this.pendingRequests.delete(requestId);
54
+ reject(new Error("Cache operation timeout"));
55
+ }
56
+ }, 3e4);
57
+ });
58
+ }
59
+ async match(request, options) {
60
+ const serializedRequest = {
61
+ url: request.url,
62
+ method: request.method,
63
+ headers: Object.fromEntries(request.headers.entries()),
64
+ body: request.method !== "GET" && request.method !== "HEAD" ? await request.text() : void 0
65
+ };
66
+ const response = await this.sendRequest("cache:match", {
67
+ request: serializedRequest,
68
+ options
69
+ });
70
+ if (!response) {
71
+ return void 0;
72
+ }
73
+ return new Response(response.body, {
74
+ status: response.status,
75
+ statusText: response.statusText,
76
+ headers: response.headers
77
+ });
78
+ }
79
+ async put(request, response) {
80
+ const serializedRequest = {
81
+ url: request.url,
82
+ method: request.method,
83
+ headers: Object.fromEntries(request.headers.entries()),
84
+ body: request.method !== "GET" && request.method !== "HEAD" ? await request.clone().text() : void 0
85
+ };
86
+ const serializedResponse = {
87
+ status: response.status,
88
+ statusText: response.statusText,
89
+ headers: Object.fromEntries(response.headers.entries()),
90
+ body: await response.clone().text()
91
+ };
92
+ await this.sendRequest("cache:put", {
93
+ request: serializedRequest,
94
+ response: serializedResponse
95
+ });
96
+ }
97
+ async delete(request, options) {
98
+ const serializedRequest = {
99
+ url: request.url,
100
+ method: request.method,
101
+ headers: Object.fromEntries(request.headers.entries()),
102
+ body: request.method !== "GET" && request.method !== "HEAD" ? await request.text() : void 0
103
+ };
104
+ return await this.sendRequest("cache:delete", {
105
+ request: serializedRequest,
106
+ options
107
+ });
108
+ }
109
+ async keys(request, options) {
110
+ let serializedRequest;
111
+ if (request) {
112
+ serializedRequest = {
113
+ url: request.url,
114
+ method: request.method,
115
+ headers: Object.fromEntries(request.headers.entries()),
116
+ body: request.method !== "GET" && request.method !== "HEAD" ? await request.text() : void 0
117
+ };
118
+ }
119
+ const keys = await this.sendRequest("cache:keys", {
120
+ request: serializedRequest,
121
+ options
122
+ });
123
+ return keys.map(
124
+ (req) => new Request(req.url, {
125
+ method: req.method,
126
+ headers: req.headers,
127
+ body: req.body
128
+ })
129
+ );
130
+ }
131
+ async clear() {
132
+ await this.sendRequest("cache:clear", {});
133
+ }
134
+ async dispose() {
135
+ await this.clear();
136
+ this.pendingRequests.clear();
137
+ }
138
+ };
139
+ export {
140
+ PostMessageCache
141
+ };