@khanacademy/wonder-blocks-data 3.1.3 → 5.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +41 -0
- package/dist/es/index.js +408 -349
- package/dist/index.js +599 -494
- package/docs.md +17 -35
- package/package.json +1 -1
- package/src/__tests__/__snapshots__/generated-snapshot.test.js.snap +7 -46
- package/src/__tests__/generated-snapshot.test.js +60 -126
- package/src/components/__tests__/data.test.js +373 -313
- package/src/components/__tests__/intercept-requests.test.js +58 -0
- package/src/components/data.js +139 -21
- package/src/components/data.md +38 -69
- package/src/components/intercept-context.js +6 -3
- package/src/components/intercept-requests.js +69 -0
- package/src/components/intercept-requests.md +54 -0
- package/src/components/track-data.md +9 -23
- package/src/hooks/__tests__/__snapshots__/use-shared-cache.test.js.snap +17 -0
- package/src/hooks/__tests__/use-gql.test.js +1 -0
- package/src/hooks/__tests__/use-request-interception.test.js +255 -0
- package/src/hooks/__tests__/use-server-effect.test.js +217 -0
- package/src/hooks/__tests__/use-shared-cache.test.js +307 -0
- package/src/hooks/use-gql.js +36 -23
- package/src/hooks/use-request-interception.js +54 -0
- package/src/hooks/use-server-effect.js +45 -0
- package/src/hooks/use-shared-cache.js +106 -0
- package/src/index.js +18 -20
- package/src/util/__tests__/__snapshots__/scoped-in-memory-cache.test.js.snap +19 -0
- package/src/util/__tests__/request-fulfillment.test.js +42 -85
- package/src/util/__tests__/request-tracking.test.js +72 -191
- package/src/util/__tests__/{result-from-cache-entry.test.js → result-from-cache-response.test.js} +9 -10
- package/src/util/__tests__/scoped-in-memory-cache.test.js +396 -0
- package/src/util/__tests__/ssr-cache.test.js +639 -0
- package/src/util/request-fulfillment.js +36 -44
- package/src/util/request-tracking.js +62 -75
- package/src/util/{result-from-cache-entry.js → result-from-cache-response.js} +10 -13
- package/src/util/scoped-in-memory-cache.js +149 -0
- package/src/util/ssr-cache.js +206 -0
- package/src/util/types.js +43 -108
- package/src/components/__tests__/intercept-data.test.js +0 -87
- package/src/components/intercept-data.js +0 -77
- package/src/components/intercept-data.md +0 -65
- package/src/hooks/__tests__/use-data.test.js +0 -826
- package/src/hooks/use-data.js +0 -143
- package/src/util/__tests__/memory-cache.test.js +0 -446
- package/src/util/__tests__/request-handler.test.js +0 -121
- package/src/util/__tests__/response-cache.test.js +0 -879
- package/src/util/memory-cache.js +0 -187
- package/src/util/request-handler.js +0 -42
- package/src/util/request-handler.md +0 -51
- package/src/util/response-cache.js +0 -213
package/dist/es/index.js
CHANGED
|
@@ -1,141 +1,101 @@
|
|
|
1
1
|
import { Server } from '@khanacademy/wonder-blocks-core';
|
|
2
|
+
import { KindError, Errors, clone } from '@khanacademy/wonder-stuff-core';
|
|
2
3
|
import * as React from 'react';
|
|
3
|
-
import {
|
|
4
|
+
import { useContext, useMemo } from 'react';
|
|
4
5
|
import _extends from '@babel/runtime/helpers/extends';
|
|
5
|
-
import { Errors, KindError } from '@khanacademy/wonder-stuff-core';
|
|
6
6
|
|
|
7
|
-
function deepClone(source) {
|
|
8
|
-
/**
|
|
9
|
-
* We want to deep clone the source cache to dodge mutations by external
|
|
10
|
-
* references. So we serialize the source cache to JSON and parse it
|
|
11
|
-
* back into a new object.
|
|
12
|
-
*
|
|
13
|
-
* NOTE: This doesn't work for get/set property accessors.
|
|
14
|
-
*/
|
|
15
|
-
const serializedInitCache = JSON.stringify(source);
|
|
16
|
-
const cloneInitCache = JSON.parse(serializedInitCache);
|
|
17
|
-
return Object.freeze(cloneInitCache);
|
|
18
|
-
}
|
|
19
7
|
/**
|
|
20
|
-
*
|
|
21
|
-
*
|
|
22
|
-
* Special case cache implementation for the memory cache.
|
|
23
|
-
*
|
|
24
|
-
* This is only used within our framework for SSR (see ./response-cache.js).
|
|
8
|
+
* Describe an in-memory cache.
|
|
25
9
|
*/
|
|
10
|
+
class ScopedInMemoryCache {
|
|
11
|
+
constructor(initialCache = Object.freeze({})) {
|
|
12
|
+
this.set = (scope, id, value) => {
|
|
13
|
+
var _this$_cache$scope;
|
|
26
14
|
|
|
15
|
+
if (!id || typeof id !== "string") {
|
|
16
|
+
throw new KindError("id must be non-empty string", Errors.InvalidInput);
|
|
17
|
+
}
|
|
27
18
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
const requestType = handler.type;
|
|
32
|
-
const frozenEntry = Object.freeze(entry); // Ensure we have a cache location for this handler type.
|
|
19
|
+
if (!scope || typeof scope !== "string") {
|
|
20
|
+
throw new KindError("scope must be non-empty string", Errors.InvalidInput);
|
|
21
|
+
}
|
|
33
22
|
|
|
34
|
-
|
|
23
|
+
if (typeof value === "function") {
|
|
24
|
+
throw new KindError("value must be a non-function value", Errors.InvalidInput);
|
|
25
|
+
}
|
|
35
26
|
|
|
36
|
-
|
|
37
|
-
this._cache[
|
|
27
|
+
this._cache[scope] = (_this$_cache$scope = this._cache[scope]) != null ? _this$_cache$scope : {};
|
|
28
|
+
this._cache[scope][id] = Object.freeze(clone(value));
|
|
38
29
|
};
|
|
39
30
|
|
|
40
|
-
this.
|
|
41
|
-
|
|
31
|
+
this.get = (scope, id) => {
|
|
32
|
+
var _this$_cache$scope$id, _this$_cache$scope2;
|
|
42
33
|
|
|
43
|
-
|
|
34
|
+
return (_this$_cache$scope$id = (_this$_cache$scope2 = this._cache[scope]) == null ? void 0 : _this$_cache$scope2[id]) != null ? _this$_cache$scope$id : null;
|
|
35
|
+
};
|
|
44
36
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
} // Get the response.
|
|
37
|
+
this.purge = (scope, id) => {
|
|
38
|
+
var _this$_cache$scope3;
|
|
48
39
|
|
|
40
|
+
if (!((_this$_cache$scope3 = this._cache[scope]) != null && _this$_cache$scope3[id])) {
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
49
43
|
|
|
50
|
-
|
|
51
|
-
const internalEntry = handlerCache[key];
|
|
44
|
+
delete this._cache[scope][id];
|
|
52
45
|
|
|
53
|
-
if (
|
|
54
|
-
|
|
46
|
+
if (Object.keys(this._cache[scope]).length === 0) {
|
|
47
|
+
delete this._cache[scope];
|
|
55
48
|
}
|
|
56
|
-
|
|
57
|
-
return internalEntry;
|
|
58
49
|
};
|
|
59
50
|
|
|
60
|
-
this.
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
// Get the internal subcache for the handler.
|
|
65
|
-
|
|
66
|
-
const handlerCache = this._cache[requestType];
|
|
67
|
-
|
|
68
|
-
if (!handlerCache) {
|
|
69
|
-
return false;
|
|
70
|
-
} // Get the entry.
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
const key = handler.getKey(options);
|
|
74
|
-
const internalEntry = handlerCache[key];
|
|
51
|
+
this.purgeScope = (scope, predicate) => {
|
|
52
|
+
if (!this._cache[scope]) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
75
55
|
|
|
76
|
-
if (
|
|
77
|
-
|
|
78
|
-
|
|
56
|
+
if (predicate == null) {
|
|
57
|
+
delete this._cache[scope];
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
79
60
|
|
|
61
|
+
for (const key of Object.keys(this._cache[scope])) {
|
|
62
|
+
if (predicate(key, this._cache[scope][key])) {
|
|
63
|
+
delete this._cache[scope][key];
|
|
64
|
+
}
|
|
65
|
+
}
|
|
80
66
|
|
|
81
|
-
|
|
82
|
-
|
|
67
|
+
if (Object.keys(this._cache[scope]).length === 0) {
|
|
68
|
+
delete this._cache[scope];
|
|
69
|
+
}
|
|
83
70
|
};
|
|
84
71
|
|
|
85
|
-
this.
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
if (!handlerCache) {
|
|
91
|
-
return 0;
|
|
72
|
+
this.purgeAll = predicate => {
|
|
73
|
+
if (predicate == null) {
|
|
74
|
+
this._cache = {};
|
|
75
|
+
return;
|
|
92
76
|
}
|
|
93
77
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
if (typeof predicate === "function") {
|
|
97
|
-
// Apply the predicate to what we have cached.
|
|
98
|
-
for (const [key, entry] of Object.entries(handlerCache)) {
|
|
99
|
-
if (predicate(key, entry)) {
|
|
100
|
-
removedCount++;
|
|
101
|
-
delete handlerCache[key];
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
} else {
|
|
105
|
-
// We're removing everything so delete the entire subcache.
|
|
106
|
-
removedCount = Object.keys(handlerCache).length;
|
|
107
|
-
delete this._cache[requestType];
|
|
78
|
+
for (const scope of Object.keys(this._cache)) {
|
|
79
|
+
this.purgeScope(scope, (id, value) => predicate(scope, id, value));
|
|
108
80
|
}
|
|
109
|
-
|
|
110
|
-
return removedCount;
|
|
111
81
|
};
|
|
112
82
|
|
|
113
|
-
this.
|
|
83
|
+
this.clone = () => {
|
|
114
84
|
try {
|
|
115
|
-
return
|
|
85
|
+
return clone(this._cache);
|
|
116
86
|
} catch (e) {
|
|
117
87
|
throw new Error(`An error occurred while trying to clone the cache: ${e}`);
|
|
118
88
|
}
|
|
119
89
|
};
|
|
120
90
|
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
/**
|
|
126
|
-
* Object.assign only performs a shallow clone.
|
|
127
|
-
* So we deep clone it and then assign the clone values to our
|
|
128
|
-
* internal cache.
|
|
129
|
-
*/
|
|
130
|
-
const cloneInitCache = deepClone(source);
|
|
131
|
-
Object.assign(this._cache, cloneInitCache);
|
|
132
|
-
} catch (e) {
|
|
133
|
-
throw new Error(`An error occurred trying to initialize from a response cache snapshot: ${e}`);
|
|
134
|
-
}
|
|
91
|
+
try {
|
|
92
|
+
this._cache = clone(initialCache);
|
|
93
|
+
} catch (e) {
|
|
94
|
+
throw new KindError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, Errors.InvalidInput);
|
|
135
95
|
}
|
|
136
96
|
}
|
|
137
97
|
/**
|
|
138
|
-
* Indicate if this cache is being used or
|
|
98
|
+
* Indicate if this cache is being used or not.
|
|
139
99
|
*
|
|
140
100
|
* When the cache has entries, returns `true`; otherwise, returns `false`.
|
|
141
101
|
*/
|
|
@@ -144,13 +104,19 @@ class MemoryCache {
|
|
|
144
104
|
get inUse() {
|
|
145
105
|
return Object.keys(this._cache).length > 0;
|
|
146
106
|
}
|
|
107
|
+
/**
|
|
108
|
+
* Set a value in the cache.
|
|
109
|
+
*/
|
|
110
|
+
|
|
147
111
|
|
|
148
112
|
}
|
|
149
113
|
|
|
114
|
+
const DefaultScope = "default";
|
|
150
115
|
/**
|
|
151
116
|
* The default instance is stored here.
|
|
152
117
|
* It's created below in the Default() static property.
|
|
153
118
|
*/
|
|
119
|
+
|
|
154
120
|
let _default$2;
|
|
155
121
|
/**
|
|
156
122
|
* Implements the response cache.
|
|
@@ -159,10 +125,10 @@ let _default$2;
|
|
|
159
125
|
*/
|
|
160
126
|
|
|
161
127
|
|
|
162
|
-
class
|
|
128
|
+
class SsrCache {
|
|
163
129
|
static get Default() {
|
|
164
130
|
if (!_default$2) {
|
|
165
|
-
_default$2 = new
|
|
131
|
+
_default$2 = new SsrCache();
|
|
166
132
|
}
|
|
167
133
|
|
|
168
134
|
return _default$2;
|
|
@@ -174,31 +140,29 @@ class ResponseCache {
|
|
|
174
140
|
throw new Error("Cannot initialize data response cache more than once");
|
|
175
141
|
}
|
|
176
142
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
}
|
|
143
|
+
this._hydrationCache = new ScopedInMemoryCache({
|
|
144
|
+
// $FlowIgnore[incompatible-call]
|
|
145
|
+
[DefaultScope]: source
|
|
146
|
+
});
|
|
182
147
|
};
|
|
183
148
|
|
|
184
|
-
this.cacheData = (
|
|
149
|
+
this.cacheData = (id, data, hydrate) => this._setCachedResponse(id, {
|
|
185
150
|
data
|
|
186
|
-
});
|
|
151
|
+
}, hydrate);
|
|
187
152
|
|
|
188
|
-
this.cacheError = (
|
|
153
|
+
this.cacheError = (id, error, hydrate) => {
|
|
189
154
|
const errorMessage = typeof error === "string" ? error : error.message;
|
|
190
|
-
return this.
|
|
155
|
+
return this._setCachedResponse(id, {
|
|
191
156
|
error: errorMessage
|
|
192
|
-
});
|
|
157
|
+
}, hydrate);
|
|
193
158
|
};
|
|
194
159
|
|
|
195
|
-
this.getEntry =
|
|
160
|
+
this.getEntry = id => {
|
|
161
|
+
var _this$_ssrOnlyCache$g, _this$_ssrOnlyCache;
|
|
162
|
+
|
|
196
163
|
// Get the cached entry for this value.
|
|
197
|
-
//
|
|
198
|
-
|
|
199
|
-
// we use our SSR-only cache. Otherwise, there's no entry to return.
|
|
200
|
-
const cache = handler.hydrate ? this._hydrationCache : Server.isServerSide() ? this._ssrOnlyCache : undefined;
|
|
201
|
-
const internalEntry = cache == null ? void 0 : cache.retrieve(handler, options); // If we are not server-side and we hydrated something, let's clear
|
|
164
|
+
// We first look in the ssr cache and then the hydration cache.
|
|
165
|
+
const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope, id); // If we are not server-side and we hydrated something, let's clear
|
|
202
166
|
// that from the hydration cache to save memory.
|
|
203
167
|
|
|
204
168
|
if (this._ssrOnlyCache == null && internalEntry != null) {
|
|
@@ -208,48 +172,71 @@ class ResponseCache {
|
|
|
208
172
|
// that's not an expected use-case. If two different places use the
|
|
209
173
|
// same handler and options (i.e. the same request), then the
|
|
210
174
|
// handler should cater to that to ensure they share the result.
|
|
211
|
-
this._hydrationCache.
|
|
212
|
-
}
|
|
175
|
+
this._hydrationCache.purge(DefaultScope, id);
|
|
176
|
+
} // Getting the typing right between the in-memory cache and this
|
|
177
|
+
// is hard. Just telling flow it's OK.
|
|
178
|
+
// $FlowIgnore[incompatible-return]
|
|
179
|
+
|
|
213
180
|
|
|
214
181
|
return internalEntry;
|
|
215
182
|
};
|
|
216
183
|
|
|
217
|
-
this.remove =
|
|
218
|
-
var _this$_ssrOnlyCache$
|
|
184
|
+
this.remove = id => {
|
|
185
|
+
var _this$_ssrOnlyCache$p, _this$_ssrOnlyCache2;
|
|
219
186
|
|
|
220
187
|
// NOTE(somewhatabstract): We could invoke removeAll with a predicate
|
|
221
188
|
// to match the key of the entry we're removing, but that's an
|
|
222
189
|
// inefficient way to remove a single item, so let's not do that.
|
|
223
190
|
// Delete the entry from the appropriate cache.
|
|
224
|
-
return
|
|
191
|
+
return this._hydrationCache.purge(DefaultScope, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope, id)) != null ? _this$_ssrOnlyCache$p : false);
|
|
225
192
|
};
|
|
226
193
|
|
|
227
|
-
this.removeAll =
|
|
228
|
-
var _this$
|
|
194
|
+
this.removeAll = predicate => {
|
|
195
|
+
var _this$_ssrOnlyCache3;
|
|
196
|
+
|
|
197
|
+
const realPredicate = predicate ? // We know what we're putting into the cache so let's assume it
|
|
198
|
+
// conforms.
|
|
199
|
+
// $FlowIgnore[incompatible-call]
|
|
200
|
+
(_, key, cachedEntry) => predicate(key, cachedEntry) : undefined; // Apply the predicate to what we have in our caches.
|
|
229
201
|
|
|
230
|
-
|
|
231
|
-
|
|
202
|
+
this._hydrationCache.purgeAll(realPredicate);
|
|
203
|
+
|
|
204
|
+
(_this$_ssrOnlyCache3 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache3.purgeAll(realPredicate);
|
|
232
205
|
};
|
|
233
206
|
|
|
234
207
|
this.cloneHydratableData = () => {
|
|
208
|
+
var _cache$DefaultScope;
|
|
209
|
+
|
|
235
210
|
// We return our hydration cache only.
|
|
236
|
-
|
|
211
|
+
const cache = this._hydrationCache.clone(); // If we're empty, we still want to return an object, so we default
|
|
212
|
+
// to an empty object.
|
|
213
|
+
// We only need the default scope out of our scoped in-memory cache.
|
|
214
|
+
// We know that it conforms to our expectations.
|
|
215
|
+
// $FlowIgnore[incompatible-return]
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
return (_cache$DefaultScope = cache[DefaultScope]) != null ? _cache$DefaultScope : {};
|
|
237
219
|
};
|
|
238
220
|
|
|
239
|
-
this._ssrOnlyCache = Server.isServerSide() ? ssrOnlyCache || new
|
|
240
|
-
this._hydrationCache = hydrationCache || new
|
|
221
|
+
this._ssrOnlyCache = Server.isServerSide() ? ssrOnlyCache || new ScopedInMemoryCache() : undefined;
|
|
222
|
+
this._hydrationCache = hydrationCache || new ScopedInMemoryCache();
|
|
241
223
|
}
|
|
242
224
|
|
|
243
|
-
|
|
225
|
+
_setCachedResponse(id, entry, hydrate) {
|
|
244
226
|
const frozenEntry = Object.freeze(entry);
|
|
245
227
|
|
|
246
|
-
if (
|
|
228
|
+
if (Server.isServerSide()) {
|
|
247
229
|
// We are server-side.
|
|
248
230
|
// We need to store this value.
|
|
249
|
-
if (
|
|
250
|
-
this._hydrationCache.
|
|
231
|
+
if (hydrate) {
|
|
232
|
+
this._hydrationCache.set(DefaultScope, id, frozenEntry);
|
|
251
233
|
} else {
|
|
252
|
-
|
|
234
|
+
var _this$_ssrOnlyCache4;
|
|
235
|
+
|
|
236
|
+
// Usually, when server-side, this cache will always be present.
|
|
237
|
+
// We do fake server-side in our doc example though, when it
|
|
238
|
+
// won't be.
|
|
239
|
+
(_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope, id, frozenEntry);
|
|
253
240
|
}
|
|
254
241
|
}
|
|
255
242
|
|
|
@@ -278,23 +265,14 @@ class RequestFulfillment {
|
|
|
278
265
|
constructor(responseCache = undefined) {
|
|
279
266
|
this._requests = {};
|
|
280
267
|
|
|
281
|
-
this.
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
return this._requests[handler.type];
|
|
287
|
-
};
|
|
288
|
-
|
|
289
|
-
this.fulfill = (handler, options) => {
|
|
290
|
-
const handlerRequests = this._getHandlerSubcache(handler);
|
|
291
|
-
|
|
292
|
-
const key = handler.getKey(options);
|
|
268
|
+
this.fulfill = (id, {
|
|
269
|
+
handler,
|
|
270
|
+
hydrate: _hydrate = true
|
|
271
|
+
}) => {
|
|
293
272
|
/**
|
|
294
273
|
* If we have an inflight request, we'll provide that.
|
|
295
274
|
*/
|
|
296
|
-
|
|
297
|
-
const inflight = handlerRequests[key];
|
|
275
|
+
const inflight = this._requests[id];
|
|
298
276
|
|
|
299
277
|
if (inflight) {
|
|
300
278
|
return inflight;
|
|
@@ -310,38 +288,51 @@ class RequestFulfillment {
|
|
|
310
288
|
} = this._responseCache;
|
|
311
289
|
|
|
312
290
|
try {
|
|
313
|
-
const request = handler
|
|
314
|
-
delete
|
|
291
|
+
const request = handler().then(data => {
|
|
292
|
+
delete this._requests[id];
|
|
293
|
+
|
|
294
|
+
if (data == null) {
|
|
295
|
+
// Request aborted. We won't cache this.
|
|
296
|
+
return null;
|
|
297
|
+
}
|
|
315
298
|
/**
|
|
316
299
|
* Let's cache the data!
|
|
317
300
|
*
|
|
318
301
|
* NOTE: This only caches when we're server side.
|
|
319
302
|
*/
|
|
320
303
|
|
|
321
|
-
|
|
304
|
+
|
|
305
|
+
return cacheData(id, data, _hydrate);
|
|
322
306
|
}).catch(error => {
|
|
323
|
-
delete
|
|
307
|
+
delete this._requests[id];
|
|
324
308
|
/**
|
|
325
309
|
* Let's cache the error!
|
|
326
310
|
*
|
|
327
311
|
* NOTE: This only caches when we're server side.
|
|
328
312
|
*/
|
|
329
313
|
|
|
330
|
-
return cacheError(
|
|
314
|
+
return cacheError(id, error, _hydrate);
|
|
331
315
|
});
|
|
332
|
-
|
|
316
|
+
this._requests[id] = request;
|
|
333
317
|
return request;
|
|
334
318
|
} catch (e) {
|
|
335
319
|
/**
|
|
336
320
|
* In this case, we don't cache an inflight request, because there
|
|
337
321
|
* really isn't one.
|
|
338
322
|
*/
|
|
339
|
-
return Promise.resolve(cacheError(
|
|
323
|
+
return Promise.resolve(cacheError(id, e, _hydrate));
|
|
340
324
|
}
|
|
341
325
|
};
|
|
342
326
|
|
|
343
|
-
this._responseCache = responseCache ||
|
|
327
|
+
this._responseCache = responseCache || SsrCache.Default;
|
|
344
328
|
}
|
|
329
|
+
/**
|
|
330
|
+
* Get a promise of a request for a given handler and options.
|
|
331
|
+
*
|
|
332
|
+
* This will return an inflight request if one exists, otherwise it will
|
|
333
|
+
* make a new request. Inflight requests are deleted once they resolve.
|
|
334
|
+
*/
|
|
335
|
+
|
|
345
336
|
|
|
346
337
|
}
|
|
347
338
|
|
|
@@ -378,48 +369,31 @@ class RequestTracker {
|
|
|
378
369
|
|
|
379
370
|
|
|
380
371
|
constructor(responseCache = undefined) {
|
|
381
|
-
this._trackedHandlers = {};
|
|
382
372
|
this._trackedRequests = {};
|
|
383
373
|
|
|
384
|
-
this.trackDataRequest = (handler,
|
|
385
|
-
const key = handler.getKey(options);
|
|
386
|
-
const type = handler.type;
|
|
387
|
-
/**
|
|
388
|
-
* Make sure we have stored the handler for use when fulfilling requests.
|
|
389
|
-
*/
|
|
390
|
-
|
|
391
|
-
if (this._trackedHandlers[type] == null) {
|
|
392
|
-
this._trackedHandlers[type] = handler;
|
|
393
|
-
this._trackedRequests[type] = {};
|
|
394
|
-
}
|
|
374
|
+
this.trackDataRequest = (id, handler, hydrate) => {
|
|
395
375
|
/**
|
|
396
376
|
* If we don't already have this tracked, then let's track it.
|
|
397
377
|
*/
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
378
|
+
if (this._trackedRequests[id] == null) {
|
|
379
|
+
this._trackedRequests[id] = {
|
|
380
|
+
handler,
|
|
381
|
+
hydrate
|
|
382
|
+
};
|
|
402
383
|
}
|
|
403
384
|
};
|
|
404
385
|
|
|
405
386
|
this.reset = () => {
|
|
406
|
-
this._trackedHandlers = {};
|
|
407
387
|
this._trackedRequests = {};
|
|
408
388
|
};
|
|
409
389
|
|
|
410
390
|
this.fulfillTrackedRequests = () => {
|
|
411
391
|
const promises = [];
|
|
412
392
|
|
|
413
|
-
for (const
|
|
414
|
-
const
|
|
415
|
-
|
|
416
|
-
const requests = this._trackedRequests[handlerType];
|
|
417
|
-
|
|
418
|
-
for (const requestKey of Object.keys(requests)) {
|
|
419
|
-
const promise = this._requestFulfillment.fulfill(handler, requests[requestKey]);
|
|
393
|
+
for (const requestKey of Object.keys(this._trackedRequests)) {
|
|
394
|
+
const promise = this._requestFulfillment.fulfill(requestKey, this._trackedRequests[requestKey]);
|
|
420
395
|
|
|
421
|
-
|
|
422
|
-
}
|
|
396
|
+
promises.push(promise);
|
|
423
397
|
}
|
|
424
398
|
/**
|
|
425
399
|
* Clear out our tracked info.
|
|
@@ -448,7 +422,7 @@ class RequestTracker {
|
|
|
448
422
|
return Promise.all(promises).then(() => this._responseCache.cloneHydratableData());
|
|
449
423
|
};
|
|
450
424
|
|
|
451
|
-
this._responseCache = responseCache ||
|
|
425
|
+
this._responseCache = responseCache || SsrCache.Default;
|
|
452
426
|
this._requestFulfillment = new RequestFulfillment(responseCache);
|
|
453
427
|
}
|
|
454
428
|
/**
|
|
@@ -475,47 +449,13 @@ class RequestTracker {
|
|
|
475
449
|
* Calling this method marks tracked requests as fulfilled; requests are
|
|
476
450
|
* removed from the list of tracked requests by calling this method.
|
|
477
451
|
*
|
|
478
|
-
* @returns {Promise<
|
|
479
|
-
* as a result of fulfilling the tracked requests.
|
|
452
|
+
* @returns {Promise<ResponseCache>} The promise of the data that was
|
|
453
|
+
* cached as a result of fulfilling the tracked requests.
|
|
480
454
|
*/
|
|
481
455
|
|
|
482
456
|
|
|
483
457
|
}
|
|
484
458
|
|
|
485
|
-
/**
|
|
486
|
-
* Base implementation for creating a request handler.
|
|
487
|
-
*
|
|
488
|
-
* Provides a base implementation of the `IRequestHandler` base class for
|
|
489
|
-
* use with the Wonder Blocks Data framework.
|
|
490
|
-
*/
|
|
491
|
-
class RequestHandler {
|
|
492
|
-
constructor(type, hydrate = true) {
|
|
493
|
-
this._type = type;
|
|
494
|
-
this._hydrate = !!hydrate;
|
|
495
|
-
}
|
|
496
|
-
|
|
497
|
-
get type() {
|
|
498
|
-
return this._type;
|
|
499
|
-
}
|
|
500
|
-
|
|
501
|
-
get hydrate() {
|
|
502
|
-
return this._hydrate;
|
|
503
|
-
}
|
|
504
|
-
|
|
505
|
-
getKey(options) {
|
|
506
|
-
try {
|
|
507
|
-
return options === undefined ? "undefined" : JSON.stringify(options);
|
|
508
|
-
} catch (e) {
|
|
509
|
-
throw new Error(`Failed to auto-generate key: ${e}`);
|
|
510
|
-
}
|
|
511
|
-
}
|
|
512
|
-
|
|
513
|
-
fulfillRequest(options) {
|
|
514
|
-
throw new Error("Not implemented");
|
|
515
|
-
}
|
|
516
|
-
|
|
517
|
-
}
|
|
518
|
-
|
|
519
459
|
/**
|
|
520
460
|
* Component to enable data request tracking when server-side rendering.
|
|
521
461
|
*/
|
|
@@ -533,16 +473,89 @@ class TrackData extends React.Component {
|
|
|
533
473
|
}
|
|
534
474
|
|
|
535
475
|
/**
|
|
536
|
-
*
|
|
476
|
+
* Hook to perform an asynchronous action during server-side rendering.
|
|
477
|
+
*
|
|
478
|
+
* This hook registers an asynchronous action to be performed during
|
|
479
|
+
* server-side rendering. The action is performed only once, and the result
|
|
480
|
+
* is cached against the given identifier so that subsequent calls return that
|
|
481
|
+
* cached result allowing components to render more of the component.
|
|
482
|
+
*
|
|
483
|
+
* This hook requires the Wonder Blocks Data functionality for resolving
|
|
484
|
+
* pending requests, as well as support for the hydration cache to be
|
|
485
|
+
* embedded into a page so that the result can by hydrated (if that is a
|
|
486
|
+
* requirement).
|
|
487
|
+
*
|
|
488
|
+
* The asynchronous action is never invoked on the client-side.
|
|
489
|
+
*/
|
|
490
|
+
const useServerEffect = (requestId, handler, hydrate = true) => {
|
|
491
|
+
// If we're server-side or hydrating, we'll have a cached entry to use.
|
|
492
|
+
// So we get that and use it to initialize our state.
|
|
493
|
+
// This works in both hydration and SSR because the very first call to
|
|
494
|
+
// this will have cached data in those cases as it will be present on the
|
|
495
|
+
// initial render - and subsequent renders on the client it will be null.
|
|
496
|
+
const cachedResult = SsrCache.Default.getEntry(requestId); // We only track data requests when we are server-side and we don't
|
|
497
|
+
// already have a result, as given by the cachedData (which is also the
|
|
498
|
+
// initial value for the result state).
|
|
499
|
+
|
|
500
|
+
const maybeTrack = useContext(TrackerContext);
|
|
501
|
+
|
|
502
|
+
if (cachedResult == null && Server.isServerSide()) {
|
|
503
|
+
maybeTrack == null ? void 0 : maybeTrack(requestId, handler, hydrate);
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
return cachedResult;
|
|
507
|
+
};
|
|
508
|
+
|
|
509
|
+
/**
|
|
510
|
+
* InterceptContext defines a map from request ID to interception methods.
|
|
537
511
|
*
|
|
538
512
|
* INTERNAL USE ONLY
|
|
539
513
|
*/
|
|
540
|
-
const InterceptContext = /*#__PURE__*/React.createContext(
|
|
514
|
+
const InterceptContext = /*#__PURE__*/React.createContext([]);
|
|
515
|
+
|
|
516
|
+
/**
|
|
517
|
+
* Allow request handling to be intercepted.
|
|
518
|
+
*
|
|
519
|
+
* Hook to take a uniquely identified request handler and return a
|
|
520
|
+
* method that will support request interception from the InterceptRequest
|
|
521
|
+
* component.
|
|
522
|
+
*
|
|
523
|
+
* If you want request interception to be supported with `useServerEffect` or
|
|
524
|
+
* any client-side effect that uses the handler, call this first to generate
|
|
525
|
+
* an intercepted handler, and then invoke `useServerEffect` (or other things)
|
|
526
|
+
* with that intercepted handler.
|
|
527
|
+
*/
|
|
528
|
+
const useRequestInterception = (requestId, handler) => {
|
|
529
|
+
// Get the interceptors that have been registered.
|
|
530
|
+
const interceptors = React.useContext(InterceptContext); // Now, we need to create a new handler that will check if the
|
|
531
|
+
// request is intercepted before ultimately calling the original handler
|
|
532
|
+
// if nothing intercepted it.
|
|
533
|
+
// We memoize this so that it only changes if something related to it
|
|
534
|
+
// changes.
|
|
535
|
+
|
|
536
|
+
const interceptedHandler = React.useMemo(() => () => {
|
|
537
|
+
// Call the interceptors from closest to furthest.
|
|
538
|
+
// If one returns a non-null result, then we keep that.
|
|
539
|
+
const interceptResponse = interceptors.reduceRight((prev, interceptor) => {
|
|
540
|
+
if (prev != null) {
|
|
541
|
+
return prev;
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
return interceptor(requestId);
|
|
545
|
+
}, null); // If nothing intercepted this request, invoke the original handler.
|
|
546
|
+
// NOTE: We can't guarantee all interceptors return the same type
|
|
547
|
+
// as our handler, so how can flow know? Let's just suppress that.
|
|
548
|
+
// $FlowFixMe[incompatible-return]
|
|
549
|
+
|
|
550
|
+
return interceptResponse != null ? interceptResponse : handler();
|
|
551
|
+
}, [handler, interceptors, requestId]);
|
|
552
|
+
return interceptedHandler;
|
|
553
|
+
};
|
|
541
554
|
|
|
542
555
|
/**
|
|
543
556
|
* Turns a cache entry into a stateful result.
|
|
544
557
|
*/
|
|
545
|
-
const
|
|
558
|
+
const resultFromCachedResponse = cacheEntry => {
|
|
546
559
|
// No cache entry means we didn't load one yet.
|
|
547
560
|
if (cacheEntry == null) {
|
|
548
561
|
return {
|
|
@@ -555,113 +568,82 @@ const resultFromCacheEntry = cacheEntry => {
|
|
|
555
568
|
error
|
|
556
569
|
} = cacheEntry;
|
|
557
570
|
|
|
558
|
-
if (
|
|
571
|
+
if (error != null) {
|
|
559
572
|
return {
|
|
560
|
-
status: "
|
|
561
|
-
|
|
573
|
+
status: "error",
|
|
574
|
+
error
|
|
562
575
|
};
|
|
563
576
|
}
|
|
564
577
|
|
|
565
|
-
if (
|
|
566
|
-
// We should never get here ever.
|
|
578
|
+
if (data != null) {
|
|
567
579
|
return {
|
|
568
|
-
status: "
|
|
569
|
-
|
|
580
|
+
status: "success",
|
|
581
|
+
data
|
|
570
582
|
};
|
|
571
583
|
}
|
|
572
584
|
|
|
573
585
|
return {
|
|
574
|
-
status: "
|
|
575
|
-
error
|
|
586
|
+
status: "aborted"
|
|
576
587
|
};
|
|
577
588
|
};
|
|
578
589
|
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
//
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
const fulfillRequestFn = options => {
|
|
603
|
-
var _interceptor$fulfillR;
|
|
604
|
-
|
|
605
|
-
return (_interceptor$fulfillR = interceptor.fulfillRequest(options)) != null ? _interceptor$fulfillR : handler.fulfillRequest(options);
|
|
606
|
-
};
|
|
607
|
-
|
|
608
|
-
return {
|
|
609
|
-
fulfillRequest: fulfillRequestFn,
|
|
610
|
-
getKey: options => handler.getKey(options),
|
|
611
|
-
type: handler.type,
|
|
612
|
-
hydrate: handler.hydrate
|
|
613
|
-
};
|
|
614
|
-
}; // We only track data requests when we are server-side and we don't
|
|
615
|
-
// already have a result, as given by the cachedData (which is also the
|
|
616
|
-
// initial value for the result state).
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
const maybeTrack = useContext(TrackerContext);
|
|
620
|
-
|
|
621
|
-
if (result == null && Server.isServerSide()) {
|
|
622
|
-
maybeTrack == null ? void 0 : maybeTrack(getMaybeInterceptedHandler(), options);
|
|
623
|
-
} // We need to update our request when the handler changes or the key
|
|
624
|
-
// to the options change, so we keep track of those.
|
|
625
|
-
// However, even if we are hydrating from cache, we still need to make the
|
|
626
|
-
// request at least once, so we do not initialize these references.
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
const handlerRef = useRef();
|
|
630
|
-
const keyRef = useRef();
|
|
631
|
-
const interceptorRef = useRef(); // This effect will ensure that we fulfill the request as desired.
|
|
632
|
-
|
|
633
|
-
useEffect(() => {
|
|
634
|
-
// If we are server-side, then just skip the effect. We track requests
|
|
635
|
-
// during SSR and fulfill them outside of the React render cycle.
|
|
636
|
-
// NOTE: This shouldn't happen since effects would not run on the server
|
|
637
|
-
// but let's be defensive - I think it makes the code clearer.
|
|
638
|
-
|
|
639
|
-
/* istanbul ignore next */
|
|
590
|
+
/**
|
|
591
|
+
* This component is the main component of Wonder Blocks Data. With this, data
|
|
592
|
+
* requirements can be placed in a React application in a manner that will
|
|
593
|
+
* support server-side rendering and efficient caching.
|
|
594
|
+
*/
|
|
595
|
+
const Data = ({
|
|
596
|
+
requestId,
|
|
597
|
+
handler,
|
|
598
|
+
children,
|
|
599
|
+
hydrate,
|
|
600
|
+
showOldDataWhileLoading,
|
|
601
|
+
alwaysRequestOnHydration
|
|
602
|
+
}) => {
|
|
603
|
+
const interceptedHandler = useRequestInterception(requestId, handler);
|
|
604
|
+
const hydrateResult = useServerEffect(requestId, interceptedHandler, hydrate);
|
|
605
|
+
const [currentResult, setResult] = React.useState(hydrateResult); // Here we make sure the request still occurs client-side as needed.
|
|
606
|
+
// This is for legacy usage that expects this. Eventually we will want
|
|
607
|
+
// to deprecate.
|
|
608
|
+
|
|
609
|
+
React.useEffect(() => {
|
|
610
|
+
// This is here until I can do a better documentation example for
|
|
611
|
+
// the TrackData docs.
|
|
612
|
+
// istanbul ignore next
|
|
640
613
|
if (Server.isServerSide()) {
|
|
641
614
|
return;
|
|
642
|
-
} //
|
|
615
|
+
} // We don't bother with this if we have hydration data and we're not
|
|
616
|
+
// forcing a request on hydration.
|
|
617
|
+
// We don't care if these things change after the first render,
|
|
618
|
+
// so we don't want them in the inputs array.
|
|
643
619
|
|
|
644
620
|
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
621
|
+
if (!alwaysRequestOnHydration && (hydrateResult == null ? void 0 : hydrateResult.data) != null) {
|
|
622
|
+
return;
|
|
623
|
+
} // If we're not hydrating a result and we're not going to render
|
|
624
|
+
// with old data until we're loaded, we want to make sure we set our
|
|
648
625
|
// result to null so that we're in the loading state.
|
|
649
626
|
|
|
650
|
-
|
|
627
|
+
|
|
628
|
+
if (!showOldDataWhileLoading) {
|
|
651
629
|
// Mark ourselves as loading.
|
|
652
630
|
setResult(null);
|
|
653
631
|
} // We aren't server-side, so let's make the request.
|
|
654
|
-
//
|
|
655
|
-
//
|
|
632
|
+
// We don't need to use our built-in request fulfillment here if we
|
|
633
|
+
// don't want, but it does mean we'll share inflight requests for the
|
|
634
|
+
// same ID and the result will be in the same format as the
|
|
635
|
+
// hydrated value.
|
|
656
636
|
|
|
657
637
|
|
|
658
638
|
let cancel = false;
|
|
659
|
-
RequestFulfillment.Default.fulfill(
|
|
639
|
+
RequestFulfillment.Default.fulfill(requestId, {
|
|
640
|
+
handler: interceptedHandler
|
|
641
|
+
}).then(result => {
|
|
660
642
|
if (cancel) {
|
|
661
643
|
return;
|
|
662
644
|
}
|
|
663
645
|
|
|
664
|
-
setResult(
|
|
646
|
+
setResult(result);
|
|
665
647
|
return;
|
|
666
648
|
}).catch(e => {
|
|
667
649
|
if (cancel) {
|
|
@@ -676,68 +658,129 @@ const useData = (handler, options) => {
|
|
|
676
658
|
|
|
677
659
|
console.error(`Unexpected error occurred during data fulfillment: ${e}`);
|
|
678
660
|
setResult({
|
|
679
|
-
data: null,
|
|
680
661
|
error: typeof e === "string" ? e : e.message
|
|
681
662
|
});
|
|
682
663
|
return;
|
|
683
664
|
});
|
|
684
665
|
return () => {
|
|
685
666
|
cancel = true;
|
|
686
|
-
}; //
|
|
687
|
-
//
|
|
688
|
-
//
|
|
689
|
-
//
|
|
667
|
+
}; // If the handler changes, we don't care. The ID is what indicates
|
|
668
|
+
// the request that should be made and folks shouldn't be changing the
|
|
669
|
+
// handler without changing the ID as well.
|
|
670
|
+
// In addition, we don't want to include hydrateResult nor
|
|
671
|
+
// alwaysRequestOnHydration as them changinng after the first pass
|
|
672
|
+
// is irrelevant.
|
|
673
|
+
// Finally, we don't want to include showOldDataWhileLoading as that
|
|
674
|
+
// changing on its own is also not relevant. It only matters if the
|
|
675
|
+
// request itself changes. All of which is to say that we only
|
|
676
|
+
// run this effect for the ID changing.
|
|
690
677
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
691
|
-
}, [
|
|
692
|
-
return
|
|
678
|
+
}, [requestId]);
|
|
679
|
+
return children(resultFromCachedResponse(currentResult));
|
|
693
680
|
};
|
|
694
681
|
|
|
695
682
|
/**
|
|
696
|
-
* This component
|
|
697
|
-
*
|
|
698
|
-
* support server-side rendering and efficient caching.
|
|
699
|
-
*/
|
|
700
|
-
const Data = props => {
|
|
701
|
-
const data = useData(props.handler, props.options);
|
|
702
|
-
return props.children(data);
|
|
703
|
-
};
|
|
704
|
-
|
|
705
|
-
/**
|
|
706
|
-
* This component provides a mechanism to intercept the data requests for the
|
|
707
|
-
* type of a given handler and provide alternative results. This is mostly
|
|
708
|
-
* useful for testing.
|
|
683
|
+
* This component provides a mechanism to intercept data requests.
|
|
684
|
+
* This is for use in testing.
|
|
709
685
|
*
|
|
710
686
|
* This component is not recommended for use in production code as it
|
|
711
687
|
* can prevent predictable functioning of the Wonder Blocks Data framework.
|
|
712
688
|
* One possible side-effect is that inflight requests from the interceptor could
|
|
713
|
-
* be picked up by `Data` component requests
|
|
714
|
-
*
|
|
689
|
+
* be picked up by `Data` component requests from outside the children of this
|
|
690
|
+
* component.
|
|
715
691
|
*
|
|
716
|
-
*
|
|
717
|
-
*
|
|
718
|
-
*
|
|
719
|
-
* will be replaced.
|
|
692
|
+
* Interceptions within the same component tree are chained such that the
|
|
693
|
+
* interceptor closest to the intercepted request is called first, and the
|
|
694
|
+
* furthest interceptor is called last.
|
|
720
695
|
*/
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
696
|
+
const InterceptRequests = ({
|
|
697
|
+
interceptor,
|
|
698
|
+
children
|
|
699
|
+
}) => {
|
|
700
|
+
const interceptors = React.useContext(InterceptContext);
|
|
701
|
+
const updatedInterceptors = React.useMemo( // We could build this in reverse order so that our hook that does
|
|
702
|
+
// the interception didn't have to use reduceRight, but I think it
|
|
703
|
+
// is easier to think about if we do this in component tree order.
|
|
704
|
+
() => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
|
|
705
|
+
return /*#__PURE__*/React.createElement(InterceptContext.Provider, {
|
|
706
|
+
value: updatedInterceptors
|
|
707
|
+
}, children);
|
|
708
|
+
};
|
|
725
709
|
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
710
|
+
/**
|
|
711
|
+
* This is the cache.
|
|
712
|
+
* It's incredibly complex.
|
|
713
|
+
* Very in-memory. So cache. Such complex. Wow.
|
|
714
|
+
*/
|
|
715
|
+
const cache = new ScopedInMemoryCache();
|
|
716
|
+
/**
|
|
717
|
+
* Clear the in-memory cache or a single scope within it.
|
|
718
|
+
*/
|
|
729
719
|
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
720
|
+
const clearSharedCache = (scope = "") => {
|
|
721
|
+
// If we have a valid scope (empty string is falsy), then clear that scope.
|
|
722
|
+
if (scope && typeof scope === "string") {
|
|
723
|
+
cache.purgeScope(scope);
|
|
724
|
+
} else {
|
|
725
|
+
// Just reset the object. This should be sufficient.
|
|
726
|
+
cache.purgeAll();
|
|
727
|
+
}
|
|
728
|
+
};
|
|
729
|
+
/**
|
|
730
|
+
* Hook to retrieve data from and store data in an in-memory cache.
|
|
731
|
+
*
|
|
732
|
+
* @returns {[?ReadOnlyCacheValue, CacheValueFn]}
|
|
733
|
+
* Returns an array containing the current cache entry (or undefined), a
|
|
734
|
+
* function to set the cache entry (passing null or undefined to this function
|
|
735
|
+
* will delete the entry).
|
|
736
|
+
*
|
|
737
|
+
* To clear a single scope within the cache or the entire cache,
|
|
738
|
+
* the `clearScopedCache` export is available.
|
|
739
|
+
*
|
|
740
|
+
* NOTE: Unlike useState or useReducer, we don't automatically update folks
|
|
741
|
+
* if the value they reference changes. We might add it later (if we need to),
|
|
742
|
+
* but the likelihood here is that things won't be changing in this cache in a
|
|
743
|
+
* way where we would need that. If we do (and likely only in specific
|
|
744
|
+
* circumstances), we should consider adding a simple boolean useState that can
|
|
745
|
+
* be toggled to cause a rerender whenever the referenced cached data changes
|
|
746
|
+
* so that callers can re-render on cache changes. However, we should make
|
|
747
|
+
* sure this toggling is optional - or we could use a callback argument, to
|
|
748
|
+
* achieve this on an as-needed basis.
|
|
749
|
+
*/
|
|
733
750
|
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
751
|
+
const useSharedCache = (id, scope, initialValue) => {
|
|
752
|
+
// Verify arguments.
|
|
753
|
+
if (!id || typeof id !== "string") {
|
|
754
|
+
throw new KindError("id must be a non-empty string", Errors.InvalidInput);
|
|
738
755
|
}
|
|
739
756
|
|
|
740
|
-
|
|
757
|
+
if (!scope || typeof scope !== "string") {
|
|
758
|
+
throw new KindError("scope must be a non-empty string", Errors.InvalidInput);
|
|
759
|
+
} // Memoize our APIs.
|
|
760
|
+
// This one allows callers to set or replace the cached value.
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
const cacheValue = React.useMemo(() => value => value == null ? cache.purge(scope, id) : cache.set(scope, id, value), [id, scope]); // We don't memo-ize the current value, just in case the cache was updated
|
|
764
|
+
// since our last run through. Also, our cache does not know what type it
|
|
765
|
+
// stores, so we have to cast it to the type we're exporting. This is a
|
|
766
|
+
// dev time courtesy, rather than a runtime thing.
|
|
767
|
+
// $FlowIgnore[incompatible-type]
|
|
768
|
+
|
|
769
|
+
let currentValue = cache.get(scope, id); // If we have an initial value, we need to add it to the cache
|
|
770
|
+
// and use it as our current value.
|
|
771
|
+
|
|
772
|
+
if (currentValue == null && initialValue !== undefined) {
|
|
773
|
+
// Get the initial value.
|
|
774
|
+
const value = typeof initialValue === "function" ? initialValue() : initialValue; // Update the cache.
|
|
775
|
+
|
|
776
|
+
cacheValue(value); // Make sure we return this value as our current value.
|
|
777
|
+
|
|
778
|
+
currentValue = value;
|
|
779
|
+
} // Now we have everything, let's return it.
|
|
780
|
+
|
|
781
|
+
|
|
782
|
+
return [currentValue, cacheValue];
|
|
783
|
+
};
|
|
741
784
|
|
|
742
785
|
const GqlRouterContext = /*#__PURE__*/React.createContext(null);
|
|
743
786
|
|
|
@@ -865,6 +908,10 @@ const getGqlDataFromResponse = async response => {
|
|
|
865
908
|
*
|
|
866
909
|
* The fetch function will resolve null if the request was aborted, otherwise
|
|
867
910
|
* it will resolve the data returned by the GraphQL server.
|
|
911
|
+
*
|
|
912
|
+
* Context is merged with the default context provided to the GqlRouter.
|
|
913
|
+
* Values in the partial context given to the returned fetch function will
|
|
914
|
+
* only be included if they have a value other than undefined.
|
|
868
915
|
*/
|
|
869
916
|
const useGql = () => {
|
|
870
917
|
// This hook only works if the `GqlRouter` has been used to setup context.
|
|
@@ -886,10 +933,22 @@ const useGql = () => {
|
|
|
886
933
|
const gqlFetch = useMemo(() => (operation, options = Object.freeze({})) => {
|
|
887
934
|
const {
|
|
888
935
|
variables,
|
|
889
|
-
context
|
|
890
|
-
} = options; //
|
|
936
|
+
context = {}
|
|
937
|
+
} = options; // Let's merge the partial context of the fetch with the
|
|
938
|
+
// default context. We deliberately don't spread because
|
|
939
|
+
// spreading would overwrite default context values with
|
|
940
|
+
// undefined if the partial context includes a value explicitly
|
|
941
|
+
// set to undefined. Instead, we use a map/reduce of keys.
|
|
942
|
+
|
|
943
|
+
const mergedContext = Object.keys(context).reduce((acc, key) => {
|
|
944
|
+
if (context[key] !== undefined) {
|
|
945
|
+
acc[key] = context[key];
|
|
946
|
+
}
|
|
947
|
+
|
|
948
|
+
return acc;
|
|
949
|
+
}, _extends({}, defaultContext)); // Invoke the fetch and extract the data.
|
|
891
950
|
|
|
892
|
-
return fetch(operation, variables,
|
|
951
|
+
return fetch(operation, variables, mergedContext).then(getGqlDataFromResponse, error => {
|
|
893
952
|
// Return null if the request was aborted.
|
|
894
953
|
// The only way to detect this reliably, it seems, is to
|
|
895
954
|
// check the error name and see if it's "AbortError" (this
|
|
@@ -906,7 +965,7 @@ const useGql = () => {
|
|
|
906
965
|
return gqlFetch;
|
|
907
966
|
};
|
|
908
967
|
|
|
909
|
-
const initializeCache = source =>
|
|
968
|
+
const initializeCache = source => SsrCache.Default.initialize(source);
|
|
910
969
|
const fulfillAllDataRequests = () => {
|
|
911
970
|
if (!Server.isServerSide()) {
|
|
912
971
|
return Promise.reject(new Error("Data requests are not tracked when client-side"));
|
|
@@ -921,7 +980,7 @@ const hasUnfulfilledRequests = () => {
|
|
|
921
980
|
|
|
922
981
|
return RequestTracker.Default.hasUnfulfilledRequests;
|
|
923
982
|
};
|
|
924
|
-
const removeFromCache =
|
|
925
|
-
const removeAllFromCache =
|
|
983
|
+
const removeFromCache = id => SsrCache.Default.remove(id);
|
|
984
|
+
const removeAllFromCache = predicate => SsrCache.Default.removeAll(predicate);
|
|
926
985
|
|
|
927
|
-
export { Data, GqlError, GqlErrors, GqlRouter,
|
|
986
|
+
export { Data, GqlError, GqlErrors, GqlRouter, InterceptRequests, ScopedInMemoryCache, TrackData, clearSharedCache, fulfillAllDataRequests, hasUnfulfilledRequests, initializeCache, removeAllFromCache, removeFromCache, resultFromCachedResponse, useGql, useRequestInterception, useServerEffect, useSharedCache };
|