@khanacademy/wonder-blocks-data 5.0.0 → 6.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/dist/es/index.js +778 -372
- package/dist/index.js +1203 -551
- package/legacy-docs.md +3 -0
- package/package.json +2 -2
- package/src/__docs__/_overview_.stories.mdx +18 -0
- package/src/__docs__/_overview_graphql.stories.mdx +35 -0
- package/src/__docs__/_overview_ssr_.stories.mdx +185 -0
- package/src/__docs__/_overview_testing_.stories.mdx +123 -0
- package/src/__docs__/exports.clear-shared-cache.stories.mdx +20 -0
- package/src/__docs__/exports.data-error.stories.mdx +23 -0
- package/src/__docs__/exports.data-errors.stories.mdx +23 -0
- package/src/{components/data.md → __docs__/exports.data.stories.mdx} +15 -18
- package/src/__docs__/exports.fulfill-all-data-requests.stories.mdx +24 -0
- package/src/__docs__/exports.gql-error.stories.mdx +23 -0
- package/src/__docs__/exports.gql-errors.stories.mdx +20 -0
- package/src/__docs__/exports.gql-router.stories.mdx +29 -0
- package/src/__docs__/exports.has-unfulfilled-requests.stories.mdx +20 -0
- package/src/{components/intercept-requests.md → __docs__/exports.intercept-requests.stories.mdx} +16 -1
- package/src/__docs__/exports.intialize-cache.stories.mdx +29 -0
- package/src/__docs__/exports.remove-all-from-cache.stories.mdx +24 -0
- package/src/__docs__/exports.remove-from-cache.stories.mdx +25 -0
- package/src/__docs__/exports.request-fulfillment.stories.mdx +36 -0
- package/src/__docs__/exports.scoped-in-memory-cache.stories.mdx +92 -0
- package/src/__docs__/exports.serializable-in-memory-cache.stories.mdx +112 -0
- package/src/__docs__/exports.status.stories.mdx +31 -0
- package/src/{components/track-data.md → __docs__/exports.track-data.stories.mdx} +15 -0
- package/src/__docs__/exports.use-cached-effect.stories.mdx +41 -0
- package/src/__docs__/exports.use-gql.stories.mdx +73 -0
- package/src/__docs__/exports.use-hydratable-effect.stories.mdx +43 -0
- package/src/__docs__/exports.use-server-effect.stories.mdx +38 -0
- package/src/__docs__/exports.use-shared-cache.stories.mdx +30 -0
- package/src/__docs__/exports.when-client-side.stories.mdx +33 -0
- package/src/__docs__/types.cached-response.stories.mdx +29 -0
- package/src/__docs__/types.error-options.stories.mdx +21 -0
- package/src/__docs__/types.gql-context.stories.mdx +20 -0
- package/src/__docs__/types.gql-fetch-fn.stories.mdx +24 -0
- package/src/__docs__/types.gql-fetch-options.stories.mdx +24 -0
- package/src/__docs__/types.gql-operation-type.stories.mdx +24 -0
- package/src/__docs__/types.gql-operation.stories.mdx +67 -0
- package/src/__docs__/types.response-cache.stories.mdx +33 -0
- package/src/__docs__/types.result.stories.mdx +39 -0
- package/src/__docs__/types.scoped-cache.stories.mdx +27 -0
- package/src/__docs__/types.valid-cache-data.stories.mdx +23 -0
- package/src/__tests__/__snapshots__/generated-snapshot.test.js.snap +0 -80
- package/src/__tests__/generated-snapshot.test.js +0 -24
- package/src/components/__tests__/data.test.js +149 -128
- package/src/components/data.js +22 -112
- package/src/components/intercept-requests.js +1 -1
- package/src/hooks/__tests__/__snapshots__/use-shared-cache.test.js.snap +8 -8
- package/src/hooks/__tests__/use-cached-effect.test.js +507 -0
- package/src/hooks/__tests__/use-gql-router-context.test.js +133 -0
- package/src/hooks/__tests__/use-gql.test.js +1 -30
- package/src/hooks/__tests__/use-hydratable-effect.test.js +728 -0
- package/src/hooks/__tests__/use-server-effect.test.js +39 -11
- package/src/hooks/use-cached-effect.js +225 -0
- package/src/hooks/use-gql-router-context.js +50 -0
- package/src/hooks/use-gql.js +22 -52
- package/src/hooks/use-hydratable-effect.js +213 -0
- package/src/hooks/use-request-interception.js +20 -23
- package/src/hooks/use-server-effect.js +12 -5
- package/src/hooks/use-shared-cache.js +13 -11
- package/src/index.js +53 -2
- package/src/util/__tests__/__snapshots__/serializable-in-memory-cache.test.js.snap +19 -0
- package/src/util/__tests__/merge-gql-context.test.js +74 -0
- package/src/util/__tests__/request-fulfillment.test.js +23 -42
- package/src/util/__tests__/request-tracking.test.js +26 -7
- package/src/util/__tests__/result-from-cache-response.test.js +19 -5
- package/src/util/__tests__/scoped-in-memory-cache.test.js +6 -85
- package/src/util/__tests__/serializable-in-memory-cache.test.js +398 -0
- package/src/util/__tests__/ssr-cache.test.js +52 -52
- package/src/util/abort-error.js +15 -0
- package/src/util/data-error.js +58 -0
- package/src/util/get-gql-data-from-response.js +3 -2
- package/src/util/gql-error.js +19 -11
- package/src/util/merge-gql-context.js +34 -0
- package/src/util/request-fulfillment.js +49 -46
- package/src/util/request-tracking.js +69 -15
- package/src/util/result-from-cache-response.js +12 -16
- package/src/util/scoped-in-memory-cache.js +24 -47
- package/src/util/serializable-in-memory-cache.js +49 -0
- package/src/util/ssr-cache.js +9 -8
- package/src/util/status.js +30 -0
- package/src/util/types.js +18 -1
- package/docs.md +0 -122
package/dist/es/index.js
CHANGED
|
@@ -1,117 +1,216 @@
|
|
|
1
|
-
import { Server } from '@khanacademy/wonder-blocks-core';
|
|
2
|
-
import { KindError,
|
|
3
|
-
import * as React from 'react';
|
|
4
|
-
import { useContext, useMemo } from 'react';
|
|
1
|
+
import { Server, useForceUpdate } from '@khanacademy/wonder-blocks-core';
|
|
2
|
+
import { KindError, clone } from '@khanacademy/wonder-stuff-core';
|
|
5
3
|
import _extends from '@babel/runtime/helpers/extends';
|
|
4
|
+
import * as React from 'react';
|
|
5
|
+
import { useContext, useRef, useMemo, useCallback } from 'react';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Error kinds for DataError.
|
|
9
|
+
*/
|
|
10
|
+
const DataErrors = Object.freeze({
|
|
11
|
+
/**
|
|
12
|
+
* The kind of error is not known.
|
|
13
|
+
*/
|
|
14
|
+
Unknown: "Unknown",
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* The error is internal to the executing code.
|
|
18
|
+
*/
|
|
19
|
+
Internal: "Internal",
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* There was a problem with the provided input.
|
|
23
|
+
*/
|
|
24
|
+
InvalidInput: "InvalidInput",
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* A network error occurred.
|
|
28
|
+
*/
|
|
29
|
+
Network: "Network",
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Response could not be parsed.
|
|
33
|
+
*/
|
|
34
|
+
Parse: "Parse",
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* An error that occurred during SSR and was hydrated from cache
|
|
38
|
+
*/
|
|
39
|
+
Hydrated: "Hydrated"
|
|
40
|
+
});
|
|
41
|
+
/**
|
|
42
|
+
* An error from the Wonder Blocks Data API.
|
|
43
|
+
*
|
|
44
|
+
* Errors of this type will have names of the format:
|
|
45
|
+
* `${kind}DataError`
|
|
46
|
+
*/
|
|
47
|
+
|
|
48
|
+
class DataError extends KindError {
|
|
49
|
+
constructor(message, kind, {
|
|
50
|
+
metadata,
|
|
51
|
+
cause
|
|
52
|
+
} = {}) {
|
|
53
|
+
super(message, kind, {
|
|
54
|
+
metadata,
|
|
55
|
+
cause,
|
|
56
|
+
name: "Data"
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
}
|
|
6
61
|
|
|
7
62
|
/**
|
|
8
63
|
* Describe an in-memory cache.
|
|
9
64
|
*/
|
|
10
65
|
class ScopedInMemoryCache {
|
|
11
|
-
constructor(initialCache =
|
|
12
|
-
this.
|
|
13
|
-
|
|
66
|
+
constructor(initialCache = {}) {
|
|
67
|
+
this._cache = initialCache;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Indicate if this cache is being used or not.
|
|
71
|
+
*
|
|
72
|
+
* When the cache has entries, returns `true`; otherwise, returns `false`.
|
|
73
|
+
*/
|
|
14
74
|
|
|
15
|
-
if (!id || typeof id !== "string") {
|
|
16
|
-
throw new KindError("id must be non-empty string", Errors.InvalidInput);
|
|
17
|
-
}
|
|
18
75
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
76
|
+
get inUse() {
|
|
77
|
+
return Object.keys(this._cache).length > 0;
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Set a value in the cache.
|
|
81
|
+
*/
|
|
22
82
|
|
|
23
|
-
if (typeof value === "function") {
|
|
24
|
-
throw new KindError("value must be a non-function value", Errors.InvalidInput);
|
|
25
|
-
}
|
|
26
83
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
};
|
|
84
|
+
set(scope, id, value) {
|
|
85
|
+
var _this$_cache$scope;
|
|
30
86
|
|
|
31
|
-
|
|
32
|
-
|
|
87
|
+
if (!id || typeof id !== "string") {
|
|
88
|
+
throw new DataError("id must be non-empty string", DataErrors.InvalidInput);
|
|
89
|
+
}
|
|
33
90
|
|
|
34
|
-
|
|
35
|
-
|
|
91
|
+
if (!scope || typeof scope !== "string") {
|
|
92
|
+
throw new DataError("scope must be non-empty string", DataErrors.InvalidInput);
|
|
93
|
+
}
|
|
36
94
|
|
|
37
|
-
|
|
38
|
-
|
|
95
|
+
if (typeof value === "function") {
|
|
96
|
+
throw new DataError("value must be a non-function value", DataErrors.InvalidInput);
|
|
97
|
+
}
|
|
39
98
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
99
|
+
this._cache[scope] = (_this$_cache$scope = this._cache[scope]) != null ? _this$_cache$scope : {};
|
|
100
|
+
this._cache[scope][id] = value;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Retrieve a value from the cache.
|
|
104
|
+
*/
|
|
43
105
|
|
|
44
|
-
delete this._cache[scope][id];
|
|
45
106
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
}
|
|
49
|
-
};
|
|
107
|
+
get(scope, id) {
|
|
108
|
+
var _this$_cache$scope$id, _this$_cache$scope2;
|
|
50
109
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
110
|
+
return (_this$_cache$scope$id = (_this$_cache$scope2 = this._cache[scope]) == null ? void 0 : _this$_cache$scope2[id]) != null ? _this$_cache$scope$id : null;
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Purge an item from the cache.
|
|
114
|
+
*/
|
|
55
115
|
|
|
56
|
-
if (predicate == null) {
|
|
57
|
-
delete this._cache[scope];
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
60
116
|
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
delete this._cache[scope][key];
|
|
64
|
-
}
|
|
65
|
-
}
|
|
117
|
+
purge(scope, id) {
|
|
118
|
+
var _this$_cache$scope3;
|
|
66
119
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
};
|
|
120
|
+
if (!((_this$_cache$scope3 = this._cache[scope]) != null && _this$_cache$scope3[id])) {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
71
123
|
|
|
72
|
-
this.
|
|
73
|
-
if (predicate == null) {
|
|
74
|
-
this._cache = {};
|
|
75
|
-
return;
|
|
76
|
-
}
|
|
124
|
+
delete this._cache[scope][id];
|
|
77
125
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
126
|
+
if (Object.keys(this._cache[scope]).length === 0) {
|
|
127
|
+
delete this._cache[scope];
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Purge a scope of items that match the given predicate.
|
|
132
|
+
*
|
|
133
|
+
* If the predicate is omitted, then all items in the scope are purged.
|
|
134
|
+
*/
|
|
82
135
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
136
|
+
|
|
137
|
+
purgeScope(scope, predicate) {
|
|
138
|
+
if (!this._cache[scope]) {
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (predicate == null) {
|
|
143
|
+
delete this._cache[scope];
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
for (const key of Object.keys(this._cache[scope])) {
|
|
148
|
+
if (predicate(key, this._cache[scope][key])) {
|
|
149
|
+
delete this._cache[scope][key];
|
|
88
150
|
}
|
|
89
|
-
}
|
|
151
|
+
}
|
|
90
152
|
|
|
91
|
-
|
|
92
|
-
this._cache
|
|
93
|
-
} catch (e) {
|
|
94
|
-
throw new KindError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, Errors.InvalidInput);
|
|
153
|
+
if (Object.keys(this._cache[scope]).length === 0) {
|
|
154
|
+
delete this._cache[scope];
|
|
95
155
|
}
|
|
96
156
|
}
|
|
97
157
|
/**
|
|
98
|
-
*
|
|
158
|
+
* Purge all items from the cache that match the given predicate.
|
|
99
159
|
*
|
|
100
|
-
*
|
|
160
|
+
* If the predicate is omitted, then all items in the cache are purged.
|
|
101
161
|
*/
|
|
102
162
|
|
|
103
163
|
|
|
104
|
-
|
|
105
|
-
|
|
164
|
+
purgeAll(predicate) {
|
|
165
|
+
if (predicate == null) {
|
|
166
|
+
this._cache = {};
|
|
167
|
+
return;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
for (const scope of Object.keys(this._cache)) {
|
|
171
|
+
this.purgeScope(scope, (id, value) => predicate(scope, id, value));
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Describe a serializable in-memory cache.
|
|
179
|
+
*/
|
|
180
|
+
class SerializableInMemoryCache extends ScopedInMemoryCache {
|
|
181
|
+
constructor(initialCache = {}) {
|
|
182
|
+
try {
|
|
183
|
+
super(clone(initialCache));
|
|
184
|
+
} catch (e) {
|
|
185
|
+
throw new DataError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, DataErrors.InvalidInput);
|
|
186
|
+
}
|
|
106
187
|
}
|
|
107
188
|
/**
|
|
108
189
|
* Set a value in the cache.
|
|
109
190
|
*/
|
|
110
191
|
|
|
111
192
|
|
|
193
|
+
set(scope, id, value) {
|
|
194
|
+
super.set(scope, id, Object.freeze(clone(value)));
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Clone the cache.
|
|
198
|
+
*/
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
clone() {
|
|
202
|
+
try {
|
|
203
|
+
return clone(this._cache);
|
|
204
|
+
} catch (e) {
|
|
205
|
+
throw new DataError("An error occurred while trying to clone the cache", DataErrors.Internal, {
|
|
206
|
+
cause: e
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
112
211
|
}
|
|
113
212
|
|
|
114
|
-
const DefaultScope = "default";
|
|
213
|
+
const DefaultScope$2 = "default";
|
|
115
214
|
/**
|
|
116
215
|
* The default instance is stored here.
|
|
117
216
|
* It's created below in the Default() static property.
|
|
@@ -140,9 +239,9 @@ class SsrCache {
|
|
|
140
239
|
throw new Error("Cannot initialize data response cache more than once");
|
|
141
240
|
}
|
|
142
241
|
|
|
143
|
-
this._hydrationCache = new
|
|
242
|
+
this._hydrationCache = new SerializableInMemoryCache({
|
|
144
243
|
// $FlowIgnore[incompatible-call]
|
|
145
|
-
[DefaultScope]: source
|
|
244
|
+
[DefaultScope$2]: source
|
|
146
245
|
});
|
|
147
246
|
};
|
|
148
247
|
|
|
@@ -162,7 +261,7 @@ class SsrCache {
|
|
|
162
261
|
|
|
163
262
|
// Get the cached entry for this value.
|
|
164
263
|
// We first look in the ssr cache and then the hydration cache.
|
|
165
|
-
const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope, id); // If we are not server-side and we hydrated something, let's clear
|
|
264
|
+
const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope$2, id); // If we are not server-side and we hydrated something, let's clear
|
|
166
265
|
// that from the hydration cache to save memory.
|
|
167
266
|
|
|
168
267
|
if (this._ssrOnlyCache == null && internalEntry != null) {
|
|
@@ -172,7 +271,7 @@ class SsrCache {
|
|
|
172
271
|
// that's not an expected use-case. If two different places use the
|
|
173
272
|
// same handler and options (i.e. the same request), then the
|
|
174
273
|
// handler should cater to that to ensure they share the result.
|
|
175
|
-
this._hydrationCache.purge(DefaultScope, id);
|
|
274
|
+
this._hydrationCache.purge(DefaultScope$2, id);
|
|
176
275
|
} // Getting the typing right between the in-memory cache and this
|
|
177
276
|
// is hard. Just telling flow it's OK.
|
|
178
277
|
// $FlowIgnore[incompatible-return]
|
|
@@ -188,7 +287,7 @@ class SsrCache {
|
|
|
188
287
|
// to match the key of the entry we're removing, but that's an
|
|
189
288
|
// inefficient way to remove a single item, so let's not do that.
|
|
190
289
|
// Delete the entry from the appropriate cache.
|
|
191
|
-
return this._hydrationCache.purge(DefaultScope, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope, id)) != null ? _this$_ssrOnlyCache$p : false);
|
|
290
|
+
return this._hydrationCache.purge(DefaultScope$2, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$p : false);
|
|
192
291
|
};
|
|
193
292
|
|
|
194
293
|
this.removeAll = predicate => {
|
|
@@ -215,11 +314,11 @@ class SsrCache {
|
|
|
215
314
|
// $FlowIgnore[incompatible-return]
|
|
216
315
|
|
|
217
316
|
|
|
218
|
-
return (_cache$DefaultScope = cache[DefaultScope]) != null ? _cache$DefaultScope : {};
|
|
317
|
+
return (_cache$DefaultScope = cache[DefaultScope$2]) != null ? _cache$DefaultScope : {};
|
|
219
318
|
};
|
|
220
319
|
|
|
221
|
-
this._ssrOnlyCache = Server.isServerSide() ? ssrOnlyCache || new
|
|
222
|
-
this._hydrationCache = hydrationCache || new
|
|
320
|
+
this._ssrOnlyCache = Server.isServerSide() ? ssrOnlyCache || new SerializableInMemoryCache() : undefined;
|
|
321
|
+
this._hydrationCache = hydrationCache || new SerializableInMemoryCache();
|
|
223
322
|
}
|
|
224
323
|
|
|
225
324
|
_setCachedResponse(id, entry, hydrate) {
|
|
@@ -229,14 +328,14 @@ class SsrCache {
|
|
|
229
328
|
// We are server-side.
|
|
230
329
|
// We need to store this value.
|
|
231
330
|
if (hydrate) {
|
|
232
|
-
this._hydrationCache.set(DefaultScope, id, frozenEntry);
|
|
331
|
+
this._hydrationCache.set(DefaultScope$2, id, frozenEntry);
|
|
233
332
|
} else {
|
|
234
333
|
var _this$_ssrOnlyCache4;
|
|
235
334
|
|
|
236
335
|
// Usually, when server-side, this cache will always be present.
|
|
237
336
|
// We do fake server-side in our doc example though, when it
|
|
238
337
|
// won't be.
|
|
239
|
-
(_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope, id, frozenEntry);
|
|
338
|
+
(_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope$2, id, frozenEntry);
|
|
240
339
|
}
|
|
241
340
|
}
|
|
242
341
|
|
|
@@ -252,17 +351,13 @@ class SsrCache {
|
|
|
252
351
|
}
|
|
253
352
|
|
|
254
353
|
let _default$1;
|
|
354
|
+
/**
|
|
355
|
+
* This fulfills a request, making sure that in-flight requests are shared.
|
|
356
|
+
*/
|
|
255
357
|
|
|
256
|
-
class RequestFulfillment {
|
|
257
|
-
static get Default() {
|
|
258
|
-
if (!_default$1) {
|
|
259
|
-
_default$1 = new RequestFulfillment();
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
return _default$1;
|
|
263
|
-
}
|
|
264
358
|
|
|
265
|
-
|
|
359
|
+
class RequestFulfillment {
|
|
360
|
+
constructor() {
|
|
266
361
|
this._requests = {};
|
|
267
362
|
|
|
268
363
|
this.fulfill = (id, {
|
|
@@ -282,57 +377,48 @@ class RequestFulfillment {
|
|
|
282
377
|
*/
|
|
283
378
|
|
|
284
379
|
|
|
285
|
-
const {
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
}
|
|
380
|
+
const request = handler().then(data => ({
|
|
381
|
+
status: "success",
|
|
382
|
+
data
|
|
383
|
+
})).catch(error => {
|
|
384
|
+
const actualError = typeof error === "string" ? new DataError("Request failed", DataErrors.Unknown, {
|
|
385
|
+
metadata: {
|
|
386
|
+
unexpectedError: error
|
|
387
|
+
}
|
|
388
|
+
}) : error; // Return aborted result if the request was aborted.
|
|
389
|
+
// The only way to detect this reliably, it seems, is to
|
|
390
|
+
// check the error name and see if it's "AbortError" (this
|
|
391
|
+
// is also what Apollo does).
|
|
392
|
+
// Even then, it's reliant on the handler supporting aborts.
|
|
393
|
+
// TODO(somewhatabstract, FEI-4276): Add first class abort
|
|
394
|
+
// support to the handler API.
|
|
395
|
+
|
|
396
|
+
if (actualError.name === "AbortError") {
|
|
397
|
+
return {
|
|
398
|
+
status: "aborted"
|
|
399
|
+
};
|
|
400
|
+
}
|
|
289
401
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
402
|
+
return {
|
|
403
|
+
status: "error",
|
|
404
|
+
error: actualError
|
|
405
|
+
};
|
|
406
|
+
}).finally(() => {
|
|
407
|
+
delete this._requests[id];
|
|
408
|
+
}); // Store the request in our cache.
|
|
293
409
|
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
return null;
|
|
297
|
-
}
|
|
298
|
-
/**
|
|
299
|
-
* Let's cache the data!
|
|
300
|
-
*
|
|
301
|
-
* NOTE: This only caches when we're server side.
|
|
302
|
-
*/
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
return cacheData(id, data, _hydrate);
|
|
306
|
-
}).catch(error => {
|
|
307
|
-
delete this._requests[id];
|
|
308
|
-
/**
|
|
309
|
-
* Let's cache the error!
|
|
310
|
-
*
|
|
311
|
-
* NOTE: This only caches when we're server side.
|
|
312
|
-
*/
|
|
313
|
-
|
|
314
|
-
return cacheError(id, error, _hydrate);
|
|
315
|
-
});
|
|
316
|
-
this._requests[id] = request;
|
|
317
|
-
return request;
|
|
318
|
-
} catch (e) {
|
|
319
|
-
/**
|
|
320
|
-
* In this case, we don't cache an inflight request, because there
|
|
321
|
-
* really isn't one.
|
|
322
|
-
*/
|
|
323
|
-
return Promise.resolve(cacheError(id, e, _hydrate));
|
|
324
|
-
}
|
|
410
|
+
this._requests[id] = request;
|
|
411
|
+
return request;
|
|
325
412
|
};
|
|
326
|
-
|
|
327
|
-
this._responseCache = responseCache || SsrCache.Default;
|
|
328
413
|
}
|
|
329
|
-
/**
|
|
330
|
-
* Get a promise of a request for a given handler and options.
|
|
331
|
-
*
|
|
332
|
-
* This will return an inflight request if one exists, otherwise it will
|
|
333
|
-
* make a new request. Inflight requests are deleted once they resolve.
|
|
334
|
-
*/
|
|
335
414
|
|
|
415
|
+
static get Default() {
|
|
416
|
+
if (!_default$1) {
|
|
417
|
+
_default$1 = new RequestFulfillment();
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
return _default$1;
|
|
421
|
+
}
|
|
336
422
|
|
|
337
423
|
}
|
|
338
424
|
|
|
@@ -389,11 +475,54 @@ class RequestTracker {
|
|
|
389
475
|
|
|
390
476
|
this.fulfillTrackedRequests = () => {
|
|
391
477
|
const promises = [];
|
|
478
|
+
const {
|
|
479
|
+
cacheData,
|
|
480
|
+
cacheError
|
|
481
|
+
} = this._responseCache;
|
|
392
482
|
|
|
393
483
|
for (const requestKey of Object.keys(this._trackedRequests)) {
|
|
394
|
-
const
|
|
395
|
-
|
|
396
|
-
|
|
484
|
+
const options = this._trackedRequests[requestKey];
|
|
485
|
+
|
|
486
|
+
try {
|
|
487
|
+
promises.push(this._requestFulfillment.fulfill(requestKey, _extends({}, options)).then(result => {
|
|
488
|
+
switch (result.status) {
|
|
489
|
+
case "success":
|
|
490
|
+
/**
|
|
491
|
+
* Let's cache the data!
|
|
492
|
+
*
|
|
493
|
+
* NOTE: This only caches when we're
|
|
494
|
+
* server side.
|
|
495
|
+
*/
|
|
496
|
+
cacheData(requestKey, result.data, options.hydrate);
|
|
497
|
+
break;
|
|
498
|
+
|
|
499
|
+
case "error":
|
|
500
|
+
/**
|
|
501
|
+
* Let's cache the error!
|
|
502
|
+
*
|
|
503
|
+
* NOTE: This only caches when we're
|
|
504
|
+
* server side.
|
|
505
|
+
*/
|
|
506
|
+
cacheError(requestKey, result.error, options.hydrate);
|
|
507
|
+
break;
|
|
508
|
+
} // For status === "loading":
|
|
509
|
+
// Could never get here unless we wrote
|
|
510
|
+
// the code wrong. Rather than bloat
|
|
511
|
+
// code with useless error, just ignore.
|
|
512
|
+
// For status === "aborted":
|
|
513
|
+
// We won't cache this.
|
|
514
|
+
// We don't hydrate aborted requests,
|
|
515
|
+
// so the client would just see them
|
|
516
|
+
// as unfulfilled data.
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
return;
|
|
520
|
+
}));
|
|
521
|
+
} catch (e) {
|
|
522
|
+
// This captures if there are problems in the code that
|
|
523
|
+
// begins the requests.
|
|
524
|
+
promises.push(Promise.resolve(cacheError(requestKey, e, options.hydrate)));
|
|
525
|
+
}
|
|
397
526
|
}
|
|
398
527
|
/**
|
|
399
528
|
* Clear out our tracked info.
|
|
@@ -401,16 +530,15 @@ class RequestTracker {
|
|
|
401
530
|
* We call this now for a simpler API.
|
|
402
531
|
*
|
|
403
532
|
* If we reset the tracked calls after all promises resolve, any
|
|
404
|
-
*
|
|
533
|
+
* request tracking done while promises are in flight would be lost.
|
|
405
534
|
*
|
|
406
535
|
* If we don't reset at all, then we have to expose the `reset` call
|
|
407
536
|
* for consumers to use, or they'll only ever be able to accumulate
|
|
408
537
|
* more and more tracked requests, having to fulfill them all every
|
|
409
538
|
* time.
|
|
410
539
|
*
|
|
411
|
-
* Calling it here means we can have multiple "track -> request"
|
|
412
|
-
* in a row and in an easy to reason about manner.
|
|
413
|
-
*
|
|
540
|
+
* Calling it here means we can have multiple "track -> request"
|
|
541
|
+
* cycles in a row and in an easy to reason about manner.
|
|
414
542
|
*/
|
|
415
543
|
|
|
416
544
|
|
|
@@ -423,7 +551,7 @@ class RequestTracker {
|
|
|
423
551
|
};
|
|
424
552
|
|
|
425
553
|
this._responseCache = responseCache || SsrCache.Default;
|
|
426
|
-
this._requestFulfillment = new RequestFulfillment(
|
|
554
|
+
this._requestFulfillment = new RequestFulfillment();
|
|
427
555
|
}
|
|
428
556
|
/**
|
|
429
557
|
* Track a request.
|
|
@@ -473,37 +601,70 @@ class TrackData extends React.Component {
|
|
|
473
601
|
}
|
|
474
602
|
|
|
475
603
|
/**
|
|
476
|
-
*
|
|
477
|
-
*
|
|
478
|
-
* This hook registers an asynchronous action to be performed during
|
|
479
|
-
* server-side rendering. The action is performed only once, and the result
|
|
480
|
-
* is cached against the given identifier so that subsequent calls return that
|
|
481
|
-
* cached result allowing components to render more of the component.
|
|
604
|
+
* Simple implementation to represent aborting.
|
|
482
605
|
*
|
|
483
|
-
*
|
|
484
|
-
*
|
|
485
|
-
*
|
|
486
|
-
* requirement).
|
|
487
|
-
*
|
|
488
|
-
* The asynchronous action is never invoked on the client-side.
|
|
606
|
+
* Other frameworks may provide this too, so we won't be sharing this with
|
|
607
|
+
* the outside world. It's just a utility for test and internal use whenever
|
|
608
|
+
* we need to represent the concept of aborted things.
|
|
489
609
|
*/
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
// initial render - and subsequent renders on the client it will be null.
|
|
496
|
-
const cachedResult = SsrCache.Default.getEntry(requestId); // We only track data requests when we are server-side and we don't
|
|
497
|
-
// already have a result, as given by the cachedData (which is also the
|
|
498
|
-
// initial value for the result state).
|
|
610
|
+
class AbortError extends Error {
|
|
611
|
+
constructor(message) {
|
|
612
|
+
super(message);
|
|
613
|
+
this.name = "AbortError";
|
|
614
|
+
}
|
|
499
615
|
|
|
500
|
-
|
|
616
|
+
}
|
|
501
617
|
|
|
502
|
-
|
|
503
|
-
|
|
618
|
+
const loadingStatus = Object.freeze({
|
|
619
|
+
status: "loading"
|
|
620
|
+
});
|
|
621
|
+
const abortedStatus = Object.freeze({
|
|
622
|
+
status: "aborted"
|
|
623
|
+
});
|
|
624
|
+
/**
|
|
625
|
+
* Create Result<TData> instances with specific statuses.
|
|
626
|
+
*/
|
|
627
|
+
|
|
628
|
+
const Status = Object.freeze({
|
|
629
|
+
loading: () => loadingStatus,
|
|
630
|
+
aborted: () => abortedStatus,
|
|
631
|
+
success: data => ({
|
|
632
|
+
status: "success",
|
|
633
|
+
data
|
|
634
|
+
}),
|
|
635
|
+
error: error => ({
|
|
636
|
+
status: "error",
|
|
637
|
+
error
|
|
638
|
+
})
|
|
639
|
+
});
|
|
640
|
+
|
|
641
|
+
/**
|
|
642
|
+
* Turns a cache entry into a stateful result.
|
|
643
|
+
*/
|
|
644
|
+
const resultFromCachedResponse = cacheEntry => {
|
|
645
|
+
// No cache entry means no result to be hydrated.
|
|
646
|
+
if (cacheEntry == null) {
|
|
647
|
+
return null;
|
|
504
648
|
}
|
|
505
649
|
|
|
506
|
-
|
|
650
|
+
const {
|
|
651
|
+
data,
|
|
652
|
+
error
|
|
653
|
+
} = cacheEntry;
|
|
654
|
+
|
|
655
|
+
if (error != null) {
|
|
656
|
+
// Let's hydrate the error. We don't persist everything about the
|
|
657
|
+
// original error on the server, hence why we only superficially
|
|
658
|
+
// hydrate it to a GqlHydratedError.
|
|
659
|
+
return Status.error(new DataError(error, DataErrors.Hydrated));
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
if (data != null) {
|
|
663
|
+
return Status.success(data);
|
|
664
|
+
} // We shouldn't get here since we don't actually cache null data.
|
|
665
|
+
|
|
666
|
+
|
|
667
|
+
return Status.aborted();
|
|
507
668
|
};
|
|
508
669
|
|
|
509
670
|
/**
|
|
@@ -533,7 +694,7 @@ const useRequestInterception = (requestId, handler) => {
|
|
|
533
694
|
// We memoize this so that it only changes if something related to it
|
|
534
695
|
// changes.
|
|
535
696
|
|
|
536
|
-
const interceptedHandler = React.
|
|
697
|
+
const interceptedHandler = React.useCallback(() => {
|
|
537
698
|
// Call the interceptors from closest to furthest.
|
|
538
699
|
// If one returns a non-null result, then we keep that.
|
|
539
700
|
const interceptResponse = interceptors.reduceRight((prev, interceptor) => {
|
|
@@ -553,158 +714,41 @@ const useRequestInterception = (requestId, handler) => {
|
|
|
553
714
|
};
|
|
554
715
|
|
|
555
716
|
/**
|
|
556
|
-
*
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
} = cacheEntry;
|
|
570
|
-
|
|
571
|
-
if (error != null) {
|
|
572
|
-
return {
|
|
573
|
-
status: "error",
|
|
574
|
-
error
|
|
575
|
-
};
|
|
576
|
-
}
|
|
577
|
-
|
|
578
|
-
if (data != null) {
|
|
579
|
-
return {
|
|
580
|
-
status: "success",
|
|
581
|
-
data
|
|
582
|
-
};
|
|
583
|
-
}
|
|
584
|
-
|
|
585
|
-
return {
|
|
586
|
-
status: "aborted"
|
|
587
|
-
};
|
|
588
|
-
};
|
|
589
|
-
|
|
590
|
-
/**
|
|
591
|
-
* This component is the main component of Wonder Blocks Data. With this, data
|
|
592
|
-
* requirements can be placed in a React application in a manner that will
|
|
593
|
-
* support server-side rendering and efficient caching.
|
|
717
|
+
* Hook to perform an asynchronous action during server-side rendering.
|
|
718
|
+
*
|
|
719
|
+
* This hook registers an asynchronous action to be performed during
|
|
720
|
+
* server-side rendering. The action is performed only once, and the result
|
|
721
|
+
* is cached against the given identifier so that subsequent calls return that
|
|
722
|
+
* cached result allowing components to render more of the component.
|
|
723
|
+
*
|
|
724
|
+
* This hook requires the Wonder Blocks Data functionality for resolving
|
|
725
|
+
* pending requests, as well as support for the hydration cache to be
|
|
726
|
+
* embedded into a page so that the result can by hydrated (if that is a
|
|
727
|
+
* requirement).
|
|
728
|
+
*
|
|
729
|
+
* The asynchronous action is never invoked on the client-side.
|
|
594
730
|
*/
|
|
595
|
-
const
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
const interceptedHandler = useRequestInterception(requestId, handler);
|
|
604
|
-
const hydrateResult = useServerEffect(requestId, interceptedHandler, hydrate);
|
|
605
|
-
const [currentResult, setResult] = React.useState(hydrateResult); // Here we make sure the request still occurs client-side as needed.
|
|
606
|
-
// This is for legacy usage that expects this. Eventually we will want
|
|
607
|
-
// to deprecate.
|
|
608
|
-
|
|
609
|
-
React.useEffect(() => {
|
|
610
|
-
// This is here until I can do a better documentation example for
|
|
611
|
-
// the TrackData docs.
|
|
612
|
-
// istanbul ignore next
|
|
613
|
-
if (Server.isServerSide()) {
|
|
614
|
-
return;
|
|
615
|
-
} // We don't bother with this if we have hydration data and we're not
|
|
616
|
-
// forcing a request on hydration.
|
|
617
|
-
// We don't care if these things change after the first render,
|
|
618
|
-
// so we don't want them in the inputs array.
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
if (!alwaysRequestOnHydration && (hydrateResult == null ? void 0 : hydrateResult.data) != null) {
|
|
622
|
-
return;
|
|
623
|
-
} // If we're not hydrating a result and we're not going to render
|
|
624
|
-
// with old data until we're loaded, we want to make sure we set our
|
|
625
|
-
// result to null so that we're in the loading state.
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
if (!showOldDataWhileLoading) {
|
|
629
|
-
// Mark ourselves as loading.
|
|
630
|
-
setResult(null);
|
|
631
|
-
} // We aren't server-side, so let's make the request.
|
|
632
|
-
// We don't need to use our built-in request fulfillment here if we
|
|
633
|
-
// don't want, but it does mean we'll share inflight requests for the
|
|
634
|
-
// same ID and the result will be in the same format as the
|
|
635
|
-
// hydrated value.
|
|
731
|
+
const useServerEffect = (requestId, handler, hydrate = true) => {
|
|
732
|
+
// Plug in to the request interception framework for code that wants
|
|
733
|
+
// to use that.
|
|
734
|
+
const interceptedHandler = useRequestInterception(requestId, handler); // If we're server-side or hydrating, we'll have a cached entry to use.
|
|
735
|
+
// So we get that and use it to initialize our state.
|
|
736
|
+
// This works in both hydration and SSR because the very first call to
|
|
737
|
+
// this will have cached data in those cases as it will be present on the
|
|
738
|
+
// initial render - and subsequent renders on the client it will be null.
|
|
636
739
|
|
|
740
|
+
const cachedResult = SsrCache.Default.getEntry(requestId); // We only track data requests when we are server-side and we don't
|
|
741
|
+
// already have a result, as given by the cachedData (which is also the
|
|
742
|
+
// initial value for the result state).
|
|
637
743
|
|
|
638
|
-
|
|
639
|
-
RequestFulfillment.Default.fulfill(requestId, {
|
|
640
|
-
handler: interceptedHandler
|
|
641
|
-
}).then(result => {
|
|
642
|
-
if (cancel) {
|
|
643
|
-
return;
|
|
644
|
-
}
|
|
744
|
+
const maybeTrack = useContext(TrackerContext);
|
|
645
745
|
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
if (cancel) {
|
|
650
|
-
return;
|
|
651
|
-
}
|
|
652
|
-
/**
|
|
653
|
-
* We should never get here as errors in fulfillment are part
|
|
654
|
-
* of the `then`, but if we do.
|
|
655
|
-
*/
|
|
656
|
-
// eslint-disable-next-line no-console
|
|
746
|
+
if (cachedResult == null && Server.isServerSide()) {
|
|
747
|
+
maybeTrack == null ? void 0 : maybeTrack(requestId, interceptedHandler, hydrate);
|
|
748
|
+
} // A null result means there was no result to hydrate.
|
|
657
749
|
|
|
658
750
|
|
|
659
|
-
|
|
660
|
-
setResult({
|
|
661
|
-
error: typeof e === "string" ? e : e.message
|
|
662
|
-
});
|
|
663
|
-
return;
|
|
664
|
-
});
|
|
665
|
-
return () => {
|
|
666
|
-
cancel = true;
|
|
667
|
-
}; // If the handler changes, we don't care. The ID is what indicates
|
|
668
|
-
// the request that should be made and folks shouldn't be changing the
|
|
669
|
-
// handler without changing the ID as well.
|
|
670
|
-
// In addition, we don't want to include hydrateResult nor
|
|
671
|
-
// alwaysRequestOnHydration as them changinng after the first pass
|
|
672
|
-
// is irrelevant.
|
|
673
|
-
// Finally, we don't want to include showOldDataWhileLoading as that
|
|
674
|
-
// changing on its own is also not relevant. It only matters if the
|
|
675
|
-
// request itself changes. All of which is to say that we only
|
|
676
|
-
// run this effect for the ID changing.
|
|
677
|
-
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
678
|
-
}, [requestId]);
|
|
679
|
-
return children(resultFromCachedResponse(currentResult));
|
|
680
|
-
};
|
|
681
|
-
|
|
682
|
-
/**
|
|
683
|
-
* This component provides a mechanism to intercept data requests.
|
|
684
|
-
* This is for use in testing.
|
|
685
|
-
*
|
|
686
|
-
* This component is not recommended for use in production code as it
|
|
687
|
-
* can prevent predictable functioning of the Wonder Blocks Data framework.
|
|
688
|
-
* One possible side-effect is that inflight requests from the interceptor could
|
|
689
|
-
* be picked up by `Data` component requests from outside the children of this
|
|
690
|
-
* component.
|
|
691
|
-
*
|
|
692
|
-
* Interceptions within the same component tree are chained such that the
|
|
693
|
-
* interceptor closest to the intercepted request is called first, and the
|
|
694
|
-
* furthest interceptor is called last.
|
|
695
|
-
*/
|
|
696
|
-
const InterceptRequests = ({
|
|
697
|
-
interceptor,
|
|
698
|
-
children
|
|
699
|
-
}) => {
|
|
700
|
-
const interceptors = React.useContext(InterceptContext);
|
|
701
|
-
const updatedInterceptors = React.useMemo( // We could build this in reverse order so that our hook that does
|
|
702
|
-
// the interception didn't have to use reduceRight, but I think it
|
|
703
|
-
// is easier to think about if we do this in component tree order.
|
|
704
|
-
() => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
|
|
705
|
-
return /*#__PURE__*/React.createElement(InterceptContext.Provider, {
|
|
706
|
-
value: updatedInterceptors
|
|
707
|
-
}, children);
|
|
751
|
+
return cachedResult == null ? null : resultFromCachedResponse(cachedResult);
|
|
708
752
|
};
|
|
709
753
|
|
|
710
754
|
/**
|
|
@@ -751,16 +795,16 @@ const clearSharedCache = (scope = "") => {
|
|
|
751
795
|
const useSharedCache = (id, scope, initialValue) => {
|
|
752
796
|
// Verify arguments.
|
|
753
797
|
if (!id || typeof id !== "string") {
|
|
754
|
-
throw new
|
|
798
|
+
throw new DataError("id must be a non-empty string", DataErrors.InvalidInput);
|
|
755
799
|
}
|
|
756
800
|
|
|
757
801
|
if (!scope || typeof scope !== "string") {
|
|
758
|
-
throw new
|
|
802
|
+
throw new DataError("scope must be a non-empty string", DataErrors.InvalidInput);
|
|
759
803
|
} // Memoize our APIs.
|
|
760
804
|
// This one allows callers to set or replace the cached value.
|
|
761
805
|
|
|
762
806
|
|
|
763
|
-
const cacheValue = React.
|
|
807
|
+
const cacheValue = React.useCallback(value => value == null ? cache.purge(scope, id) : cache.set(scope, id, value), [id, scope]); // We don't memo-ize the current value, just in case the cache was updated
|
|
764
808
|
// since our last run through. Also, our cache does not know what type it
|
|
765
809
|
// stores, so we have to cast it to the type we're exporting. This is a
|
|
766
810
|
// dev time courtesy, rather than a runtime thing.
|
|
@@ -771,17 +815,290 @@ const useSharedCache = (id, scope, initialValue) => {
|
|
|
771
815
|
|
|
772
816
|
if (currentValue == null && initialValue !== undefined) {
|
|
773
817
|
// Get the initial value.
|
|
774
|
-
const value = typeof initialValue === "function" ? initialValue() : initialValue;
|
|
818
|
+
const value = typeof initialValue === "function" ? initialValue() : initialValue;
|
|
775
819
|
|
|
776
|
-
|
|
820
|
+
if (value != null) {
|
|
821
|
+
// Update the cache.
|
|
822
|
+
cacheValue(value); // Make sure we return this value as our current value.
|
|
777
823
|
|
|
778
|
-
|
|
824
|
+
currentValue = value;
|
|
825
|
+
}
|
|
779
826
|
} // Now we have everything, let's return it.
|
|
780
827
|
|
|
781
828
|
|
|
782
829
|
return [currentValue, cacheValue];
|
|
783
830
|
};
|
|
784
831
|
|
|
832
|
+
const DefaultScope$1 = "useCachedEffect";
|
|
833
|
+
/**
|
|
834
|
+
* Hook to execute and cache an async operation on the client.
|
|
835
|
+
*
|
|
836
|
+
* This hook executes the given handler on the client if there is no
|
|
837
|
+
* cached result to use.
|
|
838
|
+
*
|
|
839
|
+
* Results are cached so they can be shared between equivalent invocations.
|
|
840
|
+
* In-flight requests are also shared, so that concurrent calls will
|
|
841
|
+
* behave as one might exect. Cache updates invoked by one hook instance
|
|
842
|
+
* do not trigger renders in components that use the same requestID; however,
|
|
843
|
+
* that should not matter since concurrent requests will share the same
|
|
844
|
+
* in-flight request, and subsequent renders will grab from the cache.
|
|
845
|
+
*
|
|
846
|
+
* Once the request has been tried once and a non-loading response has been
|
|
847
|
+
* cached, the request will not executed made again.
|
|
848
|
+
*/
|
|
849
|
+
|
|
850
|
+
const useCachedEffect = (requestId, handler, options = {}) => {
|
|
851
|
+
const {
|
|
852
|
+
skip: hardSkip = false,
|
|
853
|
+
retainResultOnChange = false,
|
|
854
|
+
onResultChanged,
|
|
855
|
+
scope = DefaultScope$1
|
|
856
|
+
} = options; // Plug in to the request interception framework for code that wants
|
|
857
|
+
// to use that.
|
|
858
|
+
|
|
859
|
+
const interceptedHandler = useRequestInterception(requestId, handler); // Instead of using state, which would be local to just this hook instance,
|
|
860
|
+
// we use a shared in-memory cache.
|
|
861
|
+
|
|
862
|
+
const [mostRecentResult, setMostRecentResult] = useSharedCache(requestId, // The key of the cached item
|
|
863
|
+
scope // The scope of the cached items
|
|
864
|
+
// No default value. We don't want the loading status there; to ensure
|
|
865
|
+
// that all calls when the request is in-flight will update once that
|
|
866
|
+
// request is done, we want the cache to be empty until that point.
|
|
867
|
+
); // Build a function that will update the cache and either invoke the
|
|
868
|
+
// callback provided in options, or force an update.
|
|
869
|
+
|
|
870
|
+
const forceUpdate = useForceUpdate();
|
|
871
|
+
const setCacheAndNotify = React.useCallback(value => {
|
|
872
|
+
setMostRecentResult(value); // If our caller provided a cacheUpdated callback, we use that.
|
|
873
|
+
// Otherwise, we toggle our little state update.
|
|
874
|
+
|
|
875
|
+
if (onResultChanged != null) {
|
|
876
|
+
onResultChanged(value);
|
|
877
|
+
} else {
|
|
878
|
+
forceUpdate();
|
|
879
|
+
}
|
|
880
|
+
}, [setMostRecentResult, onResultChanged, forceUpdate]); // We need to trigger a re-render when the request ID changes as that
|
|
881
|
+
// indicates its a different request. We don't default the current id as
|
|
882
|
+
// this is a proxy for the first render, where we will make the request
|
|
883
|
+
// if we don't already have a cached value.
|
|
884
|
+
|
|
885
|
+
const requestIdRef = React.useRef();
|
|
886
|
+
const previousRequestId = requestIdRef.current; // Calculate our soft skip state.
|
|
887
|
+
// Soft skip changes are things that should skip the effect if something
|
|
888
|
+
// else triggers the effect to run, but should not itself trigger the effect
|
|
889
|
+
// (which would cancel a previous invocation).
|
|
890
|
+
|
|
891
|
+
const softSkip = React.useMemo(() => {
|
|
892
|
+
if (requestId === previousRequestId) {
|
|
893
|
+
// If the requestId is unchanged, it means we already rendered at
|
|
894
|
+
// least once and so we already made the request at least once. So
|
|
895
|
+
// we can bail out right here.
|
|
896
|
+
return true;
|
|
897
|
+
} // If we already have a cached value, we're going to skip.
|
|
898
|
+
|
|
899
|
+
|
|
900
|
+
if (mostRecentResult != null) {
|
|
901
|
+
return true;
|
|
902
|
+
}
|
|
903
|
+
|
|
904
|
+
return false;
|
|
905
|
+
}, [requestId, previousRequestId, mostRecentResult]); // So now we make sure the client-side request happens per our various
|
|
906
|
+
// options.
|
|
907
|
+
|
|
908
|
+
React.useEffect(() => {
|
|
909
|
+
let cancel = false; // We don't do anything if we've been told to hard skip (a hard skip
|
|
910
|
+
// means we should cancel the previous request and is therefore a
|
|
911
|
+
// dependency on that), or we have determined we have already done
|
|
912
|
+
// enough and can soft skip (a soft skip doesn't trigger the request
|
|
913
|
+
// to re-run; we don't want to cancel the in progress effect if we're
|
|
914
|
+
// soft skipping.
|
|
915
|
+
|
|
916
|
+
if (hardSkip || softSkip) {
|
|
917
|
+
return;
|
|
918
|
+
} // If we got here, we're going to perform the request.
|
|
919
|
+
// Let's make sure our ref is set to the most recent requestId.
|
|
920
|
+
|
|
921
|
+
|
|
922
|
+
requestIdRef.current = requestId; // OK, we've done all our checks and things. It's time to make the
|
|
923
|
+
// request. We use our request fulfillment here so that in-flight
|
|
924
|
+
// requests are shared.
|
|
925
|
+
// NOTE: Our request fulfillment handles the error cases here.
|
|
926
|
+
// Catching shouldn't serve a purpose.
|
|
927
|
+
// eslint-disable-next-line promise/catch-or-return
|
|
928
|
+
|
|
929
|
+
RequestFulfillment.Default.fulfill(requestId, {
|
|
930
|
+
handler: interceptedHandler
|
|
931
|
+
}).then(result => {
|
|
932
|
+
if (cancel) {
|
|
933
|
+
// We don't modify our result if an earlier effect was
|
|
934
|
+
// cancelled as it means that this hook no longer cares about
|
|
935
|
+
// that old request.
|
|
936
|
+
return;
|
|
937
|
+
}
|
|
938
|
+
|
|
939
|
+
setCacheAndNotify(result);
|
|
940
|
+
return; // Shut up eslint always-return rule.
|
|
941
|
+
});
|
|
942
|
+
return () => {
|
|
943
|
+
// TODO(somewhatabstract, FEI-4276): Eventually, we will want to be
|
|
944
|
+
// able abort in-flight requests, but for now, we don't have that.
|
|
945
|
+
// (Of course, we will only want to abort them if no one is waiting
|
|
946
|
+
// on them)
|
|
947
|
+
// For now, we just block cancelled requests from changing our
|
|
948
|
+
// cache.
|
|
949
|
+
cancel = true;
|
|
950
|
+
}; // We only want to run this effect if the requestId, or skip values
|
|
951
|
+
// change. These are the only two things that should affect the
|
|
952
|
+
// cancellation of a pending request. We do not update if the handler
|
|
953
|
+
// changes, in order to simplify the API - otherwise, callers would
|
|
954
|
+
// not be able to use inline functions with this hook.
|
|
955
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
956
|
+
}, [hardSkip, requestId]); // We track the last result we returned in order to support the
|
|
957
|
+
// "retainResultOnChange" option.
|
|
958
|
+
|
|
959
|
+
const lastResultAgnosticOfIdRef = React.useRef(Status.loading());
|
|
960
|
+
const loadingResult = retainResultOnChange ? lastResultAgnosticOfIdRef.current : Status.loading(); // Loading is a transient state, so we only use it here; it's not something
|
|
961
|
+
// we cache.
|
|
962
|
+
|
|
963
|
+
const result = React.useMemo(() => mostRecentResult != null ? mostRecentResult : loadingResult, [mostRecentResult, loadingResult]);
|
|
964
|
+
lastResultAgnosticOfIdRef.current = result;
|
|
965
|
+
return result;
|
|
966
|
+
};
|
|
967
|
+
|
|
968
|
+
/**
|
|
969
|
+
* Policies to define how a hydratable effect should behave client-side.
|
|
970
|
+
*/
|
|
971
|
+
const WhenClientSide = require("flow-enums-runtime").Mirrored(["DoNotHydrate", "ExecuteWhenNoResult", "ExecuteWhenNoSuccessResult", "AlwaysExecute"]);
|
|
972
|
+
const DefaultScope = "useHydratableEffect";
|
|
973
|
+
/**
|
|
974
|
+
* Hook to execute an async operation on server and client.
|
|
975
|
+
*
|
|
976
|
+
* This hook executes the given handler on the server and on the client,
|
|
977
|
+
* and, depending on the given options, can hydrate the server-side result.
|
|
978
|
+
*
|
|
979
|
+
* Results are cached on the client so they can be shared between equivalent
|
|
980
|
+
* invocations. Cache changes from one hook instance do not trigger renders
|
|
981
|
+
* in components that use the same requestID.
|
|
982
|
+
*/
|
|
983
|
+
|
|
984
|
+
const useHydratableEffect = (requestId, handler, options = {}) => {
|
|
985
|
+
const {
|
|
986
|
+
clientBehavior = WhenClientSide.ExecuteWhenNoSuccessResult,
|
|
987
|
+
skip = false,
|
|
988
|
+
retainResultOnChange = false,
|
|
989
|
+
onResultChanged,
|
|
990
|
+
scope = DefaultScope
|
|
991
|
+
} = options; // Now we instruct the server to perform the operation.
|
|
992
|
+
// When client-side, this will look up any response for hydration; it does
|
|
993
|
+
// not invoke the handler.
|
|
994
|
+
|
|
995
|
+
const serverResult = useServerEffect(requestId, // If we're skipped (unlikely in server worlds, but maybe),
|
|
996
|
+
// just give an aborted response.
|
|
997
|
+
skip ? () => Promise.reject(new AbortError("skipped")) : handler, // Only hydrate if our behavior isn't telling us not to.
|
|
998
|
+
clientBehavior !== WhenClientSide.DoNotHydrate);
|
|
999
|
+
const getDefaultCacheValue = React.useCallback(() => {
|
|
1000
|
+
// If we don't have a requestId, it's our first render, the one
|
|
1001
|
+
// where we hydrated. So defer to our clientBehavior value.
|
|
1002
|
+
switch (clientBehavior) {
|
|
1003
|
+
case WhenClientSide.DoNotHydrate:
|
|
1004
|
+
case WhenClientSide.AlwaysExecute:
|
|
1005
|
+
// Either we weren't hydrating at all, or we don't care
|
|
1006
|
+
// if we hydrated something or not, either way, we're
|
|
1007
|
+
// doing a request.
|
|
1008
|
+
return null;
|
|
1009
|
+
|
|
1010
|
+
case WhenClientSide.ExecuteWhenNoResult:
|
|
1011
|
+
// We only execute if we didn't hydrate something.
|
|
1012
|
+
// So, returning the hydration result as default for our
|
|
1013
|
+
// cache, will then prevent the cached effect running.
|
|
1014
|
+
return serverResult;
|
|
1015
|
+
|
|
1016
|
+
case WhenClientSide.ExecuteWhenNoSuccessResult:
|
|
1017
|
+
// We only execute if we didn't hydrate a success result.
|
|
1018
|
+
if ((serverResult == null ? void 0 : serverResult.status) === "success") {
|
|
1019
|
+
// So, returning the hydration result as default for our
|
|
1020
|
+
// cache, will then prevent the cached effect running.
|
|
1021
|
+
return serverResult;
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
return null;
|
|
1025
|
+
} // There is no reason for this to change after the first render,
|
|
1026
|
+
// you might think, but the function closes around serverResult and if
|
|
1027
|
+
// the requestId changes, it still returns the hydrate result of the
|
|
1028
|
+
// first render of the previous requestId. This then means that the
|
|
1029
|
+
// hydrate result is still the same, and the effect is not re-executed
|
|
1030
|
+
// because the cache gets incorrectly defaulted.
|
|
1031
|
+
// However, we don't want to bother doing anything with this on
|
|
1032
|
+
// client behavior changing since that truly is irrelevant.
|
|
1033
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
1034
|
+
|
|
1035
|
+
}, [serverResult]); // Instead of using state, which would be local to just this hook instance,
|
|
1036
|
+
// we use a shared in-memory cache.
|
|
1037
|
+
|
|
1038
|
+
useSharedCache(requestId, // The key of the cached item
|
|
1039
|
+
scope, // The scope of the cached items
|
|
1040
|
+
getDefaultCacheValue); // When we're client-side, we ultimately want the result from this call.
|
|
1041
|
+
|
|
1042
|
+
const clientResult = useCachedEffect(requestId, handler, {
|
|
1043
|
+
skip,
|
|
1044
|
+
onResultChanged,
|
|
1045
|
+
retainResultOnChange,
|
|
1046
|
+
scope
|
|
1047
|
+
}); // OK, now which result do we return.
|
|
1048
|
+
// Well, we return the serverResult on our very first call and then
|
|
1049
|
+
// the clientResult thereafter. The great thing is that after the very
|
|
1050
|
+
// first call, the serverResult is going to be `null` anyway.
|
|
1051
|
+
|
|
1052
|
+
return serverResult != null ? serverResult : clientResult;
|
|
1053
|
+
};
|
|
1054
|
+
|
|
1055
|
+
/**
|
|
1056
|
+
* This component is the main component of Wonder Blocks Data. With this, data
|
|
1057
|
+
* requirements can be placed in a React application in a manner that will
|
|
1058
|
+
* support server-side rendering and efficient caching.
|
|
1059
|
+
*/
|
|
1060
|
+
const Data = ({
|
|
1061
|
+
requestId,
|
|
1062
|
+
handler,
|
|
1063
|
+
children,
|
|
1064
|
+
retainResultOnChange: _retainResultOnChange = false,
|
|
1065
|
+
clientBehavior: _clientBehavior = WhenClientSide.ExecuteWhenNoSuccessResult
|
|
1066
|
+
}) => {
|
|
1067
|
+
const result = useHydratableEffect(requestId, handler, {
|
|
1068
|
+
retainResultOnChange: _retainResultOnChange,
|
|
1069
|
+
clientBehavior: _clientBehavior
|
|
1070
|
+
});
|
|
1071
|
+
return children(result);
|
|
1072
|
+
};
|
|
1073
|
+
|
|
1074
|
+
/**
|
|
1075
|
+
* This component provides a mechanism to intercept data requests.
|
|
1076
|
+
* This is for use in testing.
|
|
1077
|
+
*
|
|
1078
|
+
* This component is not recommended for use in production code as it
|
|
1079
|
+
* can prevent predictable functioning of the Wonder Blocks Data framework.
|
|
1080
|
+
* One possible side-effect is that inflight requests from the interceptor could
|
|
1081
|
+
* be picked up by `Data` component requests from outside the children of this
|
|
1082
|
+
* component.
|
|
1083
|
+
*
|
|
1084
|
+
* Interceptions within the same component tree are chained such that the
|
|
1085
|
+
* interceptor closest to the intercepted request is called first, and the
|
|
1086
|
+
* furthest interceptor is called last.
|
|
1087
|
+
*/
|
|
1088
|
+
const InterceptRequests = ({
|
|
1089
|
+
interceptor,
|
|
1090
|
+
children
|
|
1091
|
+
}) => {
|
|
1092
|
+
const interceptors = React.useContext(InterceptContext);
|
|
1093
|
+
const updatedInterceptors = React.useMemo( // We could build this in reverse order so that our hook that does
|
|
1094
|
+
// the interception didn't have to use reduceRight, but I think it
|
|
1095
|
+
// is easier to think about if we do this in component tree order.
|
|
1096
|
+
() => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
|
|
1097
|
+
return /*#__PURE__*/React.createElement(InterceptContext.Provider, {
|
|
1098
|
+
value: updatedInterceptors
|
|
1099
|
+
}, children);
|
|
1100
|
+
};
|
|
1101
|
+
|
|
785
1102
|
const GqlRouterContext = /*#__PURE__*/React.createContext(null);
|
|
786
1103
|
|
|
787
1104
|
/**
|
|
@@ -816,17 +1133,57 @@ const GqlRouter = ({
|
|
|
816
1133
|
}, children);
|
|
817
1134
|
};
|
|
818
1135
|
|
|
1136
|
+
/**
|
|
1137
|
+
* Construct a complete GqlContext from current defaults and a partial context.
|
|
1138
|
+
*
|
|
1139
|
+
* Values in the partial context that are `undefined` will be ignored.
|
|
1140
|
+
* Values in the partial context that are `null` will be deleted.
|
|
1141
|
+
*/
|
|
1142
|
+
const mergeGqlContext = (defaultContext, overrides) => {
|
|
1143
|
+
// Let's merge the partial context default context. We deliberately
|
|
1144
|
+
// don't spread because spreading would overwrite default context
|
|
1145
|
+
// values with undefined or null if the partial context includes a value
|
|
1146
|
+
// explicitly set to undefined or null.
|
|
1147
|
+
return Object.keys(overrides).reduce((acc, key) => {
|
|
1148
|
+
// Undefined values are ignored.
|
|
1149
|
+
if (overrides[key] !== undefined) {
|
|
1150
|
+
if (overrides[key] === null) {
|
|
1151
|
+
// Null indicates we delete this context value.
|
|
1152
|
+
delete acc[key];
|
|
1153
|
+
} else {
|
|
1154
|
+
// Otherwise, we set it.
|
|
1155
|
+
acc[key] = overrides[key];
|
|
1156
|
+
}
|
|
1157
|
+
}
|
|
1158
|
+
|
|
1159
|
+
return acc;
|
|
1160
|
+
}, _extends({}, defaultContext));
|
|
1161
|
+
};
|
|
1162
|
+
|
|
819
1163
|
/**
|
|
820
1164
|
* Error kinds for GqlError.
|
|
821
1165
|
*/
|
|
822
|
-
const GqlErrors = Object.freeze(
|
|
823
|
-
|
|
824
|
-
|
|
1166
|
+
const GqlErrors = Object.freeze({
|
|
1167
|
+
/**
|
|
1168
|
+
* An internal framework error.
|
|
1169
|
+
*/
|
|
1170
|
+
Internal: "Internal",
|
|
1171
|
+
|
|
1172
|
+
/**
|
|
1173
|
+
* Response does not have the correct structure for a GraphQL response.
|
|
1174
|
+
*/
|
|
825
1175
|
BadResponse: "BadResponse",
|
|
1176
|
+
|
|
1177
|
+
/**
|
|
1178
|
+
* A valid GraphQL result with errors field in the payload.
|
|
1179
|
+
*/
|
|
826
1180
|
ErrorResult: "ErrorResult"
|
|
827
|
-
})
|
|
1181
|
+
});
|
|
828
1182
|
/**
|
|
829
1183
|
* An error from the GQL API.
|
|
1184
|
+
*
|
|
1185
|
+
* Errors of this type will have names of the format:
|
|
1186
|
+
* `${kind}GqlError`
|
|
830
1187
|
*/
|
|
831
1188
|
|
|
832
1189
|
class GqlError extends KindError {
|
|
@@ -837,12 +1194,48 @@ class GqlError extends KindError {
|
|
|
837
1194
|
super(message, kind, {
|
|
838
1195
|
metadata,
|
|
839
1196
|
cause,
|
|
840
|
-
|
|
1197
|
+
name: "Gql"
|
|
841
1198
|
});
|
|
842
1199
|
}
|
|
843
1200
|
|
|
844
1201
|
}
|
|
845
1202
|
|
|
1203
|
+
/**
|
|
1204
|
+
* Construct a GqlRouterContext from the current one and partial context.
|
|
1205
|
+
*/
|
|
1206
|
+
const useGqlRouterContext = (contextOverrides = {}) => {
|
|
1207
|
+
// This hook only works if the `GqlRouter` has been used to setup context.
|
|
1208
|
+
const gqlRouterContext = useContext(GqlRouterContext);
|
|
1209
|
+
|
|
1210
|
+
if (gqlRouterContext == null) {
|
|
1211
|
+
throw new GqlError("No GqlRouter", GqlErrors.Internal);
|
|
1212
|
+
}
|
|
1213
|
+
|
|
1214
|
+
const {
|
|
1215
|
+
fetch,
|
|
1216
|
+
defaultContext
|
|
1217
|
+
} = gqlRouterContext;
|
|
1218
|
+
const contextRef = useRef(defaultContext);
|
|
1219
|
+
const mergedContext = mergeGqlContext(defaultContext, contextOverrides); // Now, we can see if this represents a new context and if so,
|
|
1220
|
+
// update our ref and return the merged value.
|
|
1221
|
+
|
|
1222
|
+
const refKeys = Object.keys(contextRef.current);
|
|
1223
|
+
const mergedKeys = Object.keys(mergedContext);
|
|
1224
|
+
const shouldWeUpdateRef = refKeys.length !== mergedKeys.length || mergedKeys.every(key => contextRef.current[key] !== mergedContext[key]);
|
|
1225
|
+
|
|
1226
|
+
if (shouldWeUpdateRef) {
|
|
1227
|
+
contextRef.current = mergedContext;
|
|
1228
|
+
} // OK, now we're up-to-date, let's memoize our final result.
|
|
1229
|
+
|
|
1230
|
+
|
|
1231
|
+
const finalContext = contextRef.current;
|
|
1232
|
+
const finalRouterContext = useMemo(() => ({
|
|
1233
|
+
fetch,
|
|
1234
|
+
defaultContext: finalContext
|
|
1235
|
+
}), [fetch, finalContext]);
|
|
1236
|
+
return finalRouterContext;
|
|
1237
|
+
};
|
|
1238
|
+
|
|
846
1239
|
/**
|
|
847
1240
|
* Validate a GQL operation response and extract the data.
|
|
848
1241
|
*/
|
|
@@ -856,7 +1249,7 @@ const getGqlDataFromResponse = async response => {
|
|
|
856
1249
|
try {
|
|
857
1250
|
result = JSON.parse(bodyText);
|
|
858
1251
|
} catch (e) {
|
|
859
|
-
throw new
|
|
1252
|
+
throw new DataError("Failed to parse response", DataErrors.Parse, {
|
|
860
1253
|
metadata: {
|
|
861
1254
|
statusCode: response.status,
|
|
862
1255
|
bodyText
|
|
@@ -867,7 +1260,7 @@ const getGqlDataFromResponse = async response => {
|
|
|
867
1260
|
|
|
868
1261
|
|
|
869
1262
|
if (response.status >= 300) {
|
|
870
|
-
throw new
|
|
1263
|
+
throw new DataError("Response unsuccessful", DataErrors.Network, {
|
|
871
1264
|
metadata: {
|
|
872
1265
|
statusCode: response.status,
|
|
873
1266
|
result
|
|
@@ -913,59 +1306,48 @@ const getGqlDataFromResponse = async response => {
|
|
|
913
1306
|
* Values in the partial context given to the returned fetch function will
|
|
914
1307
|
* only be included if they have a value other than undefined.
|
|
915
1308
|
*/
|
|
916
|
-
const useGql = () => {
|
|
1309
|
+
const useGql = (context = {}) => {
|
|
917
1310
|
// This hook only works if the `GqlRouter` has been used to setup context.
|
|
918
|
-
const gqlRouterContext =
|
|
919
|
-
|
|
920
|
-
if (gqlRouterContext == null) {
|
|
921
|
-
throw new GqlError("No GqlRouter", GqlErrors.Internal);
|
|
922
|
-
}
|
|
923
|
-
|
|
924
|
-
const {
|
|
925
|
-
fetch,
|
|
926
|
-
defaultContext
|
|
927
|
-
} = gqlRouterContext; // Let's memoize the gqlFetch function we create based off our context.
|
|
1311
|
+
const gqlRouterContext = useGqlRouterContext(context); // Let's memoize the gqlFetch function we create based off our context.
|
|
928
1312
|
// That way, even if the context happens to change, if its values don't
|
|
929
1313
|
// we give the same function instance back to our callers instead of
|
|
930
1314
|
// making a new one. That then means they can safely use the return value
|
|
931
1315
|
// in hooks deps without fear of it triggering extra renders.
|
|
932
1316
|
|
|
933
|
-
const gqlFetch =
|
|
1317
|
+
const gqlFetch = useCallback((operation, options = Object.freeze({})) => {
|
|
1318
|
+
const {
|
|
1319
|
+
fetch,
|
|
1320
|
+
defaultContext
|
|
1321
|
+
} = gqlRouterContext;
|
|
934
1322
|
const {
|
|
935
1323
|
variables,
|
|
936
1324
|
context = {}
|
|
937
|
-
} = options;
|
|
938
|
-
|
|
939
|
-
// spreading would overwrite default context values with
|
|
940
|
-
// undefined if the partial context includes a value explicitly
|
|
941
|
-
// set to undefined. Instead, we use a map/reduce of keys.
|
|
942
|
-
|
|
943
|
-
const mergedContext = Object.keys(context).reduce((acc, key) => {
|
|
944
|
-
if (context[key] !== undefined) {
|
|
945
|
-
acc[key] = context[key];
|
|
946
|
-
}
|
|
947
|
-
|
|
948
|
-
return acc;
|
|
949
|
-
}, _extends({}, defaultContext)); // Invoke the fetch and extract the data.
|
|
950
|
-
|
|
951
|
-
return fetch(operation, variables, mergedContext).then(getGqlDataFromResponse, error => {
|
|
952
|
-
// Return null if the request was aborted.
|
|
953
|
-
// The only way to detect this reliably, it seems, is to
|
|
954
|
-
// check the error name and see if it's "AbortError" (this
|
|
955
|
-
// is also what Apollo does).
|
|
956
|
-
// Even then, it's reliant on the fetch supporting aborts.
|
|
957
|
-
if (error.name === "AbortError") {
|
|
958
|
-
return null;
|
|
959
|
-
} // Need to make sure we pass other errors along.
|
|
960
|
-
|
|
1325
|
+
} = options;
|
|
1326
|
+
const finalContext = mergeGqlContext(defaultContext, context); // Invoke the fetch and extract the data.
|
|
961
1327
|
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
}, [fetch, defaultContext]);
|
|
1328
|
+
return fetch(operation, variables, finalContext).then(getGqlDataFromResponse);
|
|
1329
|
+
}, [gqlRouterContext]);
|
|
965
1330
|
return gqlFetch;
|
|
966
1331
|
};
|
|
967
1332
|
|
|
1333
|
+
/**
|
|
1334
|
+
* Initialize the hydration cache.
|
|
1335
|
+
*
|
|
1336
|
+
* @param {ResponseCache} source The cache content to use for initializing the
|
|
1337
|
+
* cache.
|
|
1338
|
+
* @throws {Error} If the cache is already initialized.
|
|
1339
|
+
*/
|
|
968
1340
|
const initializeCache = source => SsrCache.Default.initialize(source);
|
|
1341
|
+
/**
|
|
1342
|
+
* Fulfill all tracked data requests.
|
|
1343
|
+
*
|
|
1344
|
+
* This is for use with the `TrackData` component during server-side rendering.
|
|
1345
|
+
*
|
|
1346
|
+
* @throws {Error} If executed outside of server-side rendering.
|
|
1347
|
+
* @returns {Promise<void>} A promise that resolves when all tracked requests
|
|
1348
|
+
* have been fulfilled.
|
|
1349
|
+
*/
|
|
1350
|
+
|
|
969
1351
|
const fulfillAllDataRequests = () => {
|
|
970
1352
|
if (!Server.isServerSide()) {
|
|
971
1353
|
return Promise.reject(new Error("Data requests are not tracked when client-side"));
|
|
@@ -973,6 +1355,16 @@ const fulfillAllDataRequests = () => {
|
|
|
973
1355
|
|
|
974
1356
|
return RequestTracker.Default.fulfillTrackedRequests();
|
|
975
1357
|
};
|
|
1358
|
+
/**
|
|
1359
|
+
* Indicate if there are unfulfilled tracked requests.
|
|
1360
|
+
*
|
|
1361
|
+
* This is used in conjunction with `TrackData`.
|
|
1362
|
+
*
|
|
1363
|
+
* @throws {Error} If executed outside of server-side rendering.
|
|
1364
|
+
* @returns {boolean} `true` if there are unfulfilled tracked requests;
|
|
1365
|
+
* otherwise, `false`.
|
|
1366
|
+
*/
|
|
1367
|
+
|
|
976
1368
|
const hasUnfulfilledRequests = () => {
|
|
977
1369
|
if (!Server.isServerSide()) {
|
|
978
1370
|
throw new Error("Data requests are not tracked when client-side");
|
|
@@ -980,7 +1372,21 @@ const hasUnfulfilledRequests = () => {
|
|
|
980
1372
|
|
|
981
1373
|
return RequestTracker.Default.hasUnfulfilledRequests;
|
|
982
1374
|
};
|
|
1375
|
+
/**
|
|
1376
|
+
* Remove the request identified from the cached hydration responses.
|
|
1377
|
+
*
|
|
1378
|
+
* @param {string} id The request ID of the response to remove from the cache.
|
|
1379
|
+
*/
|
|
1380
|
+
|
|
983
1381
|
const removeFromCache = id => SsrCache.Default.remove(id);
|
|
1382
|
+
/**
|
|
1383
|
+
* Remove all cached hydration responses that match the given predicate.
|
|
1384
|
+
*
|
|
1385
|
+
* @param {(id: string) => boolean} [predicate] The predicate to match against
|
|
1386
|
+
* the cached hydration responses. If no predicate is provided, all cached
|
|
1387
|
+
* hydration responses will be removed.
|
|
1388
|
+
*/
|
|
1389
|
+
|
|
984
1390
|
const removeAllFromCache = predicate => SsrCache.Default.removeAll(predicate);
|
|
985
1391
|
|
|
986
|
-
export { Data, GqlError, GqlErrors, GqlRouter, InterceptRequests, ScopedInMemoryCache, TrackData, clearSharedCache, fulfillAllDataRequests, hasUnfulfilledRequests, initializeCache, removeAllFromCache, removeFromCache, useGql,
|
|
1392
|
+
export { Data, DataError, DataErrors, GqlError, GqlErrors, GqlRouter, InterceptRequests, RequestFulfillment, ScopedInMemoryCache, SerializableInMemoryCache, Status, TrackData, WhenClientSide, clearSharedCache, fulfillAllDataRequests, hasUnfulfilledRequests, initializeCache, removeAllFromCache, removeFromCache, useCachedEffect, useGql, useHydratableEffect, useServerEffect, useSharedCache };
|