@warp-drive-mirror/experiments 0.2.6-beta.0 → 0.2.6-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +14 -17
  2. package/dist/data-worker.js +1 -0
  3. package/dist/unpkg/dev/data-worker.js +380 -0
  4. package/dist/unpkg/dev/document-storage.js +349 -0
  5. package/dist/unpkg/dev/image-fetch.js +74 -0
  6. package/dist/unpkg/dev/image-worker.js +99 -0
  7. package/dist/unpkg/dev/worker-fetch.js +158 -0
  8. package/dist/unpkg/dev-deprecated/data-worker.js +380 -0
  9. package/dist/unpkg/dev-deprecated/document-storage.js +349 -0
  10. package/dist/unpkg/dev-deprecated/image-fetch.js +74 -0
  11. package/dist/unpkg/dev-deprecated/image-worker.js +99 -0
  12. package/dist/unpkg/dev-deprecated/worker-fetch.js +158 -0
  13. package/dist/unpkg/prod/data-worker.js +366 -0
  14. package/dist/unpkg/prod/document-storage.js +339 -0
  15. package/dist/unpkg/prod/image-fetch.js +74 -0
  16. package/dist/unpkg/prod/image-worker.js +99 -0
  17. package/dist/unpkg/prod/worker-fetch.js +158 -0
  18. package/dist/unpkg/prod-deprecated/data-worker.js +366 -0
  19. package/dist/unpkg/prod-deprecated/document-storage.js +339 -0
  20. package/dist/unpkg/prod-deprecated/image-fetch.js +74 -0
  21. package/dist/unpkg/prod-deprecated/image-worker.js +99 -0
  22. package/dist/unpkg/prod-deprecated/worker-fetch.js +158 -0
  23. package/logos/README.md +2 -2
  24. package/logos/logo-yellow-slab.svg +1 -0
  25. package/logos/word-mark-black.svg +1 -0
  26. package/logos/word-mark-white.svg +1 -0
  27. package/package.json +28 -8
  28. package/logos/NCC-1701-a-blue.svg +0 -4
  29. package/logos/NCC-1701-a-gold.svg +0 -4
  30. package/logos/NCC-1701-a-gold_100.svg +0 -1
  31. package/logos/NCC-1701-a-gold_base-64.txt +0 -1
  32. package/logos/NCC-1701-a.svg +0 -4
  33. package/logos/docs-badge.svg +0 -2
  34. package/logos/ember-data-logo-dark.svg +0 -12
  35. package/logos/ember-data-logo-light.svg +0 -12
  36. package/logos/social1.png +0 -0
  37. package/logos/social2.png +0 -0
  38. package/logos/warp-drive-logo-dark.svg +0 -4
  39. package/logos/warp-drive-logo-gold.svg +0 -4
@@ -0,0 +1,366 @@
1
+ import { DocumentStorage } from './document-storage.js';
2
+ import { assertPrivateStore } from '@warp-drive-mirror/core/store/-private';
3
+ import { SkipCache } from '@warp-drive-mirror/core/types/request';
4
+
5
+ const WorkerScope = globalThis.SharedWorkerGlobalScope;
6
+ class DataWorker {
7
+ constructor(UserStore, options) {
8
+ // disable if running on main thread
9
+ if (typeof window !== 'undefined') {
10
+ return;
11
+ }
12
+ this.store = new UserStore();
13
+ this.threads = new Map();
14
+ this.pending = new Map();
15
+ this.options = Object.assign({
16
+ persisted: false,
17
+ scope: ''
18
+ }, options);
19
+ this.isSharedWorker = WorkerScope && globalThis instanceof WorkerScope;
20
+ this.initialize();
21
+ }
22
+ initialize() {
23
+ // enable the CacheHandler to access the worker
24
+ this.store._worker = this;
25
+ if (this.options.persisted) {
26
+ // will be accessed by the worker's CacheHandler off of store
27
+ this.storage = new DocumentStorage({
28
+ scope: this.options.scope
29
+ });
30
+ }
31
+ if (this.isSharedWorker) {
32
+ globalThis.onconnect = e => {
33
+ const port = e.ports[0];
34
+ port.onmessage = event => {
35
+ const {
36
+ type
37
+ } = event.data;
38
+ switch (type) {
39
+ case 'connect':
40
+ this.setupThread(event.data.thread, port);
41
+ break;
42
+ }
43
+ };
44
+ port.start();
45
+ };
46
+ } else {
47
+ globalThis.onmessage = event => {
48
+ const {
49
+ type
50
+ } = event.data;
51
+ switch (type) {
52
+ case 'connect':
53
+ this.setupThread(event.data.thread, event.ports[0]);
54
+ break;
55
+ }
56
+ };
57
+ }
58
+ }
59
+ setupThread(thread, port) {
60
+ this.threads.set(thread, port);
61
+ this.pending.set(thread, new Map());
62
+ port.onmessage = event => {
63
+ if (event.type === 'close') {
64
+ this.threads.delete(thread);
65
+ return;
66
+ }
67
+ const {
68
+ type
69
+ } = event.data;
70
+ switch (type) {
71
+ case 'abort':
72
+ this.abortRequest(event.data);
73
+ break;
74
+ case 'request':
75
+ void this.request(prepareRequest(event.data));
76
+ break;
77
+ }
78
+ };
79
+ }
80
+ abortRequest(event) {
81
+ const {
82
+ thread,
83
+ id
84
+ } = event;
85
+ const future = this.pending.get(thread).get(id);
86
+ if (future) {
87
+ future.abort();
88
+ this.pending.get(thread).delete(id);
89
+ }
90
+ }
91
+ async request(event) {
92
+ const {
93
+ thread,
94
+ id,
95
+ data
96
+ } = event;
97
+ try {
98
+ const future = this.store.request(data);
99
+ this.pending.get(thread).set(id, future);
100
+ const result = await future;
101
+ this.threads.get(thread)?.postMessage({
102
+ type: 'success-response',
103
+ id,
104
+ thread,
105
+ data: prepareResponse(result)
106
+ });
107
+ } catch (error) {
108
+ if (isAbortError(error)) return;
109
+ this.threads.get(thread)?.postMessage({
110
+ type: 'error-response',
111
+ id,
112
+ thread,
113
+ data: error
114
+ });
115
+ } finally {
116
+ this.pending.get(thread).delete(id);
117
+ }
118
+ }
119
+ }
120
+ function softCloneResponse(response) {
121
+ if (!response) return null;
122
+ const clone = {};
123
+ if (response.headers) {
124
+ clone.headers = Array.from(response.headers);
125
+ }
126
+ clone.ok = response.ok;
127
+ clone.redirected = response.redirected;
128
+ clone.status = response.status;
129
+ clone.statusText = response.statusText;
130
+ clone.type = response.type;
131
+ clone.url = response.url;
132
+ return clone;
133
+ }
134
+ function isAbortError(error) {
135
+ return error instanceof Error && error.name === 'AbortError';
136
+ }
137
+ function prepareResponse(result) {
138
+ const newResponse = {
139
+ response: softCloneResponse(result.response),
140
+ content: result.content
141
+ };
142
+ return newResponse;
143
+ }
144
+ function prepareRequest(event) {
145
+ if (event.data.headers) {
146
+ event.data.headers = new Headers(event.data.headers);
147
+ }
148
+ return event;
149
+ }
150
+
151
+ const MUTATION_OPS = new Set(['createRecord', 'updateRecord', 'deleteRecord']);
152
+
153
+ /**
154
+ * In a Worker, any time we are asked to make a request, data needs to be returned.
155
+ * background requests are ergo no different than foreground requests.
156
+ * @internal
157
+ */
158
+ function calcShouldFetch(store, request, hasCachedValue, identifier) {
159
+ const {
160
+ cacheOptions
161
+ } = request;
162
+ return request.op && MUTATION_OPS.has(request.op) || cacheOptions?.reload || cacheOptions?.backgroundReload || !hasCachedValue || (store.lifetimes && identifier ? store.lifetimes.isHardExpired(identifier, store) || store.lifetimes.isSoftExpired(identifier, store) : false);
163
+ }
164
+ function isMutation(request) {
165
+ return Boolean(request.op && MUTATION_OPS.has(request.op));
166
+ }
167
+ function isCacheAffecting(document) {
168
+ if (!isMutation(document.request)) {
169
+ return true;
170
+ }
171
+ // a mutation combined with a 204 has no cache impact when no known records were involved
172
+ // a createRecord with a 201 with an empty response and no known records should similarly
173
+ // have no cache impact
174
+
175
+ if (document.request.op === 'createRecord' && document.response?.status === 201) {
176
+ return document.content ? Object.keys(document.content).length > 0 : false;
177
+ }
178
+ return document.response?.status !== 204;
179
+ }
180
+ function isAggregateError(error) {
181
+ return error instanceof AggregateError || error.name === 'AggregateError' && Array.isArray(error.errors);
182
+ }
183
+ // TODO @runspired, consider if we should deep freeze errors (potentially only in debug) vs cloning them
184
+ function cloneError(error) {
185
+ const isAggregate = isAggregateError(error);
186
+ const cloned = isAggregate ? new AggregateError(structuredClone(error.errors), error.message) : new Error(error.message);
187
+ cloned.stack = error.stack;
188
+ cloned.error = error.error;
189
+
190
+ // copy over enumerable properties
191
+ Object.assign(cloned, error);
192
+ return cloned;
193
+ }
194
+
195
+ /**
196
+ * A simplified CacheHandler that hydrates ResourceDataDocuments from the cache
197
+ * with their referenced resources.
198
+ *
199
+ */
200
+ const CacheHandler = {
201
+ request(context, next) {
202
+ // if we have no cache or no cache-key skip cache handling
203
+ if (!context.request.store || context.request.cacheOptions?.[SkipCache]) {
204
+ return next(context.request);
205
+ }
206
+ const {
207
+ store
208
+ } = context.request;
209
+ const identifier = store.cacheKeyManager.getOrCreateDocumentIdentifier(context.request);
210
+ const peeked = identifier ? store.cache.peekRequest(identifier) : null;
211
+ if (identifier && !peeked) {
212
+ // if we are using persisted cache, we should attempt to populate the in-memory cache now
213
+ const worker = store._worker;
214
+ if (worker?.storage) {
215
+ return worker.storage.getDocument(identifier).then(document => {
216
+ if (document) {
217
+ store.cache.put(document);
218
+ }
219
+ return completeRequest(identifier, store, context, next);
220
+ }).catch(e => {
221
+ return completeRequest(identifier, store, context, next);
222
+ });
223
+ }
224
+ }
225
+ return completeRequest(identifier, store, context, next);
226
+ }
227
+ };
228
+ function completeRequest(identifier, store, context, next) {
229
+ const peeked = identifier ? store.cache.peekRequest(identifier) : null;
230
+ // In a Worker, any time we are asked to make a request, data needs to be returned.
231
+ // background requests are ergo no different than foreground requests.
232
+ if (calcShouldFetch(store, context.request, !!peeked, identifier)) {
233
+ return fetchContentAndHydrate(next, context, identifier);
234
+ }
235
+ context.setResponse(peeked.response);
236
+ if ('error' in peeked) {
237
+ throw peeked;
238
+ }
239
+ return maybeUpdateObjects(store, peeked.content);
240
+ }
241
+ function maybeUpdateObjects(store, document) {
242
+ if (!document) {
243
+ return document;
244
+ }
245
+ if (Array.isArray(document.data)) {
246
+ const data = document.data.map(identifier => {
247
+ return store.cache.peek(identifier);
248
+ });
249
+ return Object.assign({}, document, {
250
+ data
251
+ });
252
+ } else {
253
+ // @ts-expect-error FIXME investigate why document.data won't accept the signature
254
+ const data = document.data ? store.cache.peek(document.data) : null;
255
+ return Object.assign({}, document, {
256
+ data
257
+ });
258
+ }
259
+ }
260
+ function maybeUpdatePersistedCache(store, document, resourceDocument) {
261
+ const worker = store._worker;
262
+ if (!worker?.storage) {
263
+ return;
264
+ }
265
+ if (!document && resourceDocument) {
266
+ // we have resources to update but not a full request to cache
267
+ void worker.storage.putResources(resourceDocument, resourceIdentifier => {
268
+ return store.cache.peek(resourceIdentifier);
269
+ });
270
+ } else if (document) {
271
+ void worker.storage.putDocument(document, resourceIdentifier => {
272
+ return store.cache.peek(resourceIdentifier);
273
+ });
274
+ }
275
+ }
276
+ function updateCacheForSuccess(store, request, document) {
277
+ let response = null;
278
+ if (isMutation(request)) {
279
+ const record = request.data?.record || request.records?.[0];
280
+ if (record) {
281
+ // @ts-expect-error while this is valid, we should update the CacheHandler for transactional saves
282
+ response = store.cache.didCommit(record, document);
283
+
284
+ // a mutation combined with a 204 has no cache impact when no known records were involved
285
+ // a createRecord with a 201 with an empty response and no known records should similarly
286
+ // have no cache impact
287
+ } else if (isCacheAffecting(document)) {
288
+ response = store.cache.put(document);
289
+ maybeUpdatePersistedCache(store, null, response);
290
+ }
291
+ } else {
292
+ response = store.cache.put(document);
293
+ if (response.lid) {
294
+ const identifier = store.cacheKeyManager.getOrCreateDocumentIdentifier(request);
295
+ const full = store.cache.peekRequest(identifier);
296
+ maybeUpdatePersistedCache(store, full);
297
+ }
298
+ }
299
+ return maybeUpdateObjects(store, response);
300
+ }
301
+ function handleFetchSuccess(store, request, identifier, document) {
302
+ let response;
303
+ assertPrivateStore(store);
304
+ store._join(() => {
305
+ response = updateCacheForSuccess(store, request, document);
306
+ });
307
+ if (store.lifetimes?.didRequest) {
308
+ store.lifetimes.didRequest(request, document.response, identifier, store);
309
+ }
310
+ return response;
311
+ }
312
+ function updateCacheForError(store, request, error) {
313
+ if (isMutation(request)) {
314
+ // TODO similar to didCommit we should spec this to be similar to cache.put for handling full response
315
+ // currently we let the response remain undefiend.
316
+ const errors = error && error.content && typeof error.content === 'object' && 'errors' in error.content && Array.isArray(error.content.errors) ? error.content.errors : undefined;
317
+ const record = request.data?.record || request.records?.[0];
318
+ store.cache.commitWasRejected(record, errors);
319
+ } else {
320
+ const identifier = store.cacheKeyManager.getOrCreateDocumentIdentifier(request);
321
+ if (identifier) {
322
+ maybeUpdatePersistedCache(store, error);
323
+ }
324
+ return store.cache.put(error);
325
+ }
326
+ }
327
+ function handleFetchError(store, request, identifier, error) {
328
+ if (request.signal?.aborted) {
329
+ throw error;
330
+ }
331
+ assertPrivateStore(store);
332
+ let response;
333
+ store._join(() => {
334
+ response = updateCacheForError(store, request, error);
335
+ });
336
+ if (identifier && store.lifetimes?.didRequest) {
337
+ store.lifetimes.didRequest(request, error.response, identifier, store);
338
+ }
339
+ if (isMutation(request)) {
340
+ throw error;
341
+ }
342
+ const newError = cloneError(error);
343
+ newError.content = response;
344
+ throw newError;
345
+ }
346
+ function fetchContentAndHydrate(next, context, identifier) {
347
+ const {
348
+ request
349
+ } = context;
350
+ const {
351
+ store
352
+ } = context.request;
353
+ if (isMutation(request)) {
354
+ // TODO should we handle multiple records in request.records by iteratively calling willCommit for each
355
+ const record = request.data?.record || request.records?.[0];
356
+ if (record) {
357
+ store.cache.willCommit(record, context);
358
+ }
359
+ }
360
+ if (store.lifetimes?.willRequest) {
361
+ store.lifetimes.willRequest(request, identifier, store);
362
+ }
363
+ return next(request).then(document => handleFetchSuccess(store, request, identifier, document), error => handleFetchError(store, request, identifier, error));
364
+ }
365
+
366
+ export { CacheHandler, DataWorker };
@@ -0,0 +1,339 @@
1
+ const WARP_DRIVE_STORAGE_FILE_NAME = 'warp-drive_document-storage';
2
+ const WARP_DRIVE_STORAGE_VERSION = 1;
3
+
4
+ /**
5
+ * DocumentStorage is specifically designed around WarpDrive Cache and Request concepts.
6
+ *
7
+ * CacheFileDocument is a StructuredDocument (request response) whose `content` is
8
+ * the ResourceDocument returned by inserting the request into a Store's Cache.
9
+ */
10
+
11
+ /**
12
+ * A CacheDocument is a reconstructed request response that rehydrates ResourceDocument
13
+ * with the associated resources based on their identifiers.
14
+ */
15
+
16
+ class InternalDocumentStorage {
17
+ constructor(options) {
18
+ this.options = options;
19
+ this._lastModified = 0;
20
+ this._invalidated = true;
21
+ this._fileHandle = this._open(options.scope);
22
+ this._channel = Object.assign(new BroadcastChannel(options.scope), {
23
+ onmessage: this._onMessage.bind(this)
24
+ });
25
+ }
26
+ _onMessage(_event) {
27
+ this._invalidated = true;
28
+ }
29
+ async _open(scope) {
30
+ const directoryHandle = await navigator.storage.getDirectory();
31
+ const fileHandle = await directoryHandle.getFileHandle(scope, {
32
+ create: true
33
+ });
34
+ return fileHandle;
35
+ }
36
+ async _read() {
37
+ if (this._filePromise) {
38
+ return this._filePromise;
39
+ }
40
+ if (this._invalidated) {
41
+ const updateFile = async () => {
42
+ const fileHandle = await this._fileHandle;
43
+ const file = await fileHandle.getFile();
44
+ const lastModified = file.lastModified;
45
+ if (lastModified === this._lastModified && this._cache) {
46
+ return this._cache;
47
+ }
48
+ const contents = await file.text();
49
+ const cache = contents ? JSON.parse(contents) : {
50
+ documents: [],
51
+ resources: []
52
+ };
53
+ const documents = new Map(cache.documents);
54
+ const resources = new Map(cache.resources);
55
+ const cacheMap = {
56
+ documents,
57
+ resources
58
+ };
59
+ this._cache = cacheMap;
60
+ this._invalidated = false;
61
+ this._lastModified = lastModified;
62
+ return cacheMap;
63
+ };
64
+ this._filePromise = updateFile();
65
+ await this._filePromise;
66
+ this._filePromise = null;
67
+ }
68
+ return this._cache;
69
+ }
70
+ async _patch(documentKey, document, updatedResources) {
71
+ const fileHandle = await this._fileHandle;
72
+ // secure a lock before getting latest state
73
+ const writable = await fileHandle.createWritable();
74
+ const cache = await this._read();
75
+ cache.documents.set(documentKey, document);
76
+ updatedResources.forEach((resource, key) => {
77
+ cache.resources.set(key, resource);
78
+ });
79
+ const documents = [...cache.documents.entries()];
80
+ const resources = [...cache.resources.entries()];
81
+ const cacheFile = {
82
+ documents,
83
+ resources
84
+ };
85
+ await writable.write(JSON.stringify(cacheFile));
86
+ await writable.close();
87
+ this._channel.postMessage({
88
+ type: 'patch',
89
+ key: documentKey,
90
+ resources: [...updatedResources.keys()]
91
+ });
92
+ }
93
+ async getDocument(key) {
94
+ const cache = await this._read();
95
+ // clone the document to avoid leaking the internal cache
96
+ const document = safeDocumentHydrate(cache.documents.get(key.lid));
97
+ if (!document) {
98
+ return null;
99
+ }
100
+
101
+ // expand the document with the resources
102
+ if (document.content) {
103
+ if (docHasData(document.content)) {
104
+ let data = null;
105
+ if (Array.isArray(document.content.data)) {
106
+ data = document.content.data.map(resourceIdentifier => {
107
+ const resource = cache.resources.get(resourceIdentifier.lid);
108
+ if (!resource) {
109
+ throw new Error(`Resource not found for ${resourceIdentifier.lid}`);
110
+ }
111
+
112
+ // clone the resource to avoid leaking the internal cache
113
+ return structuredClone(resource);
114
+ });
115
+ } else if (document.content.data) {
116
+ const resource = cache.resources.get(document.content.data.lid);
117
+ if (!resource) {
118
+ throw new Error(`Resource not found for ${document.content.data.lid}`);
119
+ }
120
+
121
+ // clone the resource to avoid leaking the internal cache
122
+ data = structuredClone(resource);
123
+ }
124
+ if (document.content.included) {
125
+ const included = document.content.included.map(resourceIdentifier => {
126
+ const resource = cache.resources.get(resourceIdentifier.lid);
127
+ if (!resource) {
128
+ throw new Error(`Resource not found for ${resourceIdentifier.lid}`);
129
+ }
130
+
131
+ // clone the resource to avoid leaking the internal cache
132
+ return structuredClone(resource);
133
+ });
134
+ document.content.included = included;
135
+ }
136
+ document.content.data = data;
137
+ }
138
+ }
139
+ return document;
140
+ }
141
+ async putDocument(document, resourceCollector) {
142
+ const resources = new Map();
143
+ if (!document.content) {
144
+ throw new Error(`Document content is missing, only finalized documents can be stored`);
145
+ }
146
+ if (!document.content.lid) {
147
+ throw new Error(`Document content is missing a lid, only documents with a cache-key can be stored`);
148
+ }
149
+ if (docHasData(document.content)) {
150
+ this._getResources(document.content, resourceCollector, resources);
151
+ }
152
+ await this._patch(document.content.lid, safeDocumentSerialize(document), resources);
153
+ }
154
+ _getResources(document, resourceCollector, resources = new Map()) {
155
+ if (Array.isArray(document.data)) {
156
+ document.data.forEach(resourceIdentifier => {
157
+ const resource = resourceCollector(resourceIdentifier);
158
+ resources.set(resourceIdentifier.lid, structuredClone(resource));
159
+ });
160
+ } else if (document.data) {
161
+ const resource = resourceCollector(document.data);
162
+ resources.set(document.data.lid, structuredClone(resource));
163
+ }
164
+ if (document.included) {
165
+ document.included.forEach(resourceIdentifier => {
166
+ const resource = resourceCollector(resourceIdentifier);
167
+ resources.set(resourceIdentifier.lid, structuredClone(resource));
168
+ });
169
+ }
170
+ return resources;
171
+ }
172
+ async putResources(document, resourceCollector) {
173
+ const fileHandle = await this._fileHandle;
174
+ // secure a lock before getting latest state
175
+ const writable = await fileHandle.createWritable();
176
+ const cache = await this._read();
177
+ const updatedResources = this._getResources(document, resourceCollector);
178
+ updatedResources.forEach((resource, key) => {
179
+ cache.resources.set(key, resource);
180
+ });
181
+ const documents = [...cache.documents.entries()];
182
+ const resources = [...cache.resources.entries()];
183
+ const cacheFile = {
184
+ documents,
185
+ resources
186
+ };
187
+ await writable.write(JSON.stringify(cacheFile));
188
+ await writable.close();
189
+ this._channel.postMessage({
190
+ type: 'patch',
191
+ key: null,
192
+ resources: [...updatedResources.keys()]
193
+ });
194
+ }
195
+ async clear(reset) {
196
+ const fileHandle = await this._fileHandle;
197
+ const writable = await fileHandle.createWritable();
198
+ await writable.write('');
199
+ await writable.close();
200
+ this._invalidated = true;
201
+ this._lastModified = 0;
202
+ this._cache = null;
203
+ this._filePromise = null;
204
+ this._channel.postMessage({
205
+ type: 'clear'
206
+ });
207
+ if (!reset) {
208
+ this._channel.close();
209
+ this._channel = null;
210
+ if (!this.options.isolated) {
211
+ Storages.delete(this.options.scope);
212
+ }
213
+ }
214
+ }
215
+ }
216
+ function safeDocumentSerialize(document) {
217
+ const doc = document;
218
+ const newDoc = {};
219
+ if ('request' in doc) {
220
+ newDoc.request = prepareRequest(doc.request);
221
+ }
222
+ if ('response' in doc) {
223
+ newDoc.response = prepareResponse(doc.response);
224
+ }
225
+ if ('content' in doc) {
226
+ newDoc.content = structuredClone(doc.content);
227
+ }
228
+ return newDoc;
229
+ }
230
+ function prepareRequest(request) {
231
+ const {
232
+ signal,
233
+ headers
234
+ } = request;
235
+ const requestCopy = Object.assign({}, request);
236
+ delete requestCopy.store;
237
+ if (signal instanceof AbortSignal) {
238
+ delete requestCopy.signal;
239
+ }
240
+ if (headers instanceof Headers) {
241
+ requestCopy.headers = Array.from(headers);
242
+ }
243
+ return requestCopy;
244
+ }
245
+ function prepareResponse(response) {
246
+ if (!response) return null;
247
+ const clone = {};
248
+ if (response.headers) {
249
+ clone.headers = Array.from(response.headers);
250
+ }
251
+ clone.ok = response.ok;
252
+ clone.redirected = response.redirected;
253
+ clone.status = response.status;
254
+ clone.statusText = response.statusText;
255
+ clone.type = response.type;
256
+ clone.url = response.url;
257
+ return clone;
258
+ }
259
+ function safeDocumentHydrate(document) {
260
+ const doc = document;
261
+ const newDoc = {};
262
+ if ('request' in doc) {
263
+ const headers = new Headers(doc.request.headers);
264
+ const req = Object.assign({}, doc.request, {
265
+ headers
266
+ });
267
+ newDoc.request = new Request(doc.request.url ?? '', req);
268
+ }
269
+ if ('response' in doc) {
270
+ const headers = new Headers(doc.response.headers);
271
+ const resp = Object.assign({}, doc.response, {
272
+ headers
273
+ });
274
+ newDoc.response = new Response(null, resp);
275
+ }
276
+ if ('content' in doc) {
277
+ newDoc.content = structuredClone(doc.content);
278
+ }
279
+ return newDoc;
280
+ }
281
+ function docHasData(doc) {
282
+ return 'data' in doc;
283
+ }
284
+ const Storages = new Map();
285
+
286
+ /**
287
+ * DocumentStorage is a wrapper around the StorageManager API that provides
288
+ * a simple interface for reading and updating documents and requests.
289
+ *
290
+ * Some goals for this experiment:
291
+ *
292
+ * - optimize for storing requests/documents
293
+ * - optimize for storing resources
294
+ * - optimize for looking up resources associated to a document
295
+ * - optimize for notifying cross-tab when data is updated
296
+ *
297
+ * optional features:
298
+ *
299
+ * - support for offline mode
300
+ * - ?? support for relationship based cache traversal
301
+ * - a way to index records by type + another field (e.g updatedAt/createAt/name)
302
+ * such that simple queries can be done without having to scan all records
303
+ */
304
+ class DocumentStorage {
305
+ constructor(options = {}) {
306
+ options.isolated = options.isolated ?? false;
307
+ options.scope = options.scope ?? 'default';
308
+ const fileName = `${WARP_DRIVE_STORAGE_FILE_NAME}@version_${WARP_DRIVE_STORAGE_VERSION}:${options.scope}`;
309
+ if (!options.isolated && Storages.has(fileName)) {
310
+ const storage = Storages.get(fileName);
311
+ if (storage) {
312
+ this._storage = storage.deref();
313
+ return;
314
+ }
315
+ }
316
+ const storage = new InternalDocumentStorage({
317
+ scope: fileName,
318
+ isolated: options.isolated
319
+ });
320
+ this._storage = storage;
321
+ if (!options.isolated) {
322
+ Storages.set(fileName, new WeakRef(storage));
323
+ }
324
+ }
325
+ getDocument(key) {
326
+ return this._storage.getDocument(key);
327
+ }
328
+ putDocument(document, resourceCollector) {
329
+ return this._storage.putDocument(document, resourceCollector);
330
+ }
331
+ putResources(document, resourceCollector) {
332
+ return this._storage.putResources(document, resourceCollector);
333
+ }
334
+ clear(reset) {
335
+ return this._storage.clear(reset);
336
+ }
337
+ }
338
+
339
+ export { DocumentStorage };