@khanacademy/wonder-blocks-data 5.0.1 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/dist/es/index.js +767 -371
- package/dist/index.js +1194 -564
- package/legacy-docs.md +3 -0
- package/package.json +2 -2
- package/src/__docs__/_overview_.stories.mdx +18 -0
- package/src/__docs__/_overview_graphql.stories.mdx +35 -0
- package/src/__docs__/_overview_ssr_.stories.mdx +185 -0
- package/src/__docs__/_overview_testing_.stories.mdx +123 -0
- package/src/__docs__/exports.clear-shared-cache.stories.mdx +20 -0
- package/src/__docs__/exports.data-error.stories.mdx +23 -0
- package/src/__docs__/exports.data-errors.stories.mdx +23 -0
- package/src/{components/data.md → __docs__/exports.data.stories.mdx} +15 -18
- package/src/__docs__/exports.fulfill-all-data-requests.stories.mdx +24 -0
- package/src/__docs__/exports.gql-error.stories.mdx +23 -0
- package/src/__docs__/exports.gql-errors.stories.mdx +20 -0
- package/src/__docs__/exports.gql-router.stories.mdx +29 -0
- package/src/__docs__/exports.has-unfulfilled-requests.stories.mdx +20 -0
- package/src/{components/intercept-requests.md → __docs__/exports.intercept-requests.stories.mdx} +16 -1
- package/src/__docs__/exports.intialize-cache.stories.mdx +29 -0
- package/src/__docs__/exports.remove-all-from-cache.stories.mdx +24 -0
- package/src/__docs__/exports.remove-from-cache.stories.mdx +25 -0
- package/src/__docs__/exports.request-fulfillment.stories.mdx +36 -0
- package/src/__docs__/exports.scoped-in-memory-cache.stories.mdx +92 -0
- package/src/__docs__/exports.serializable-in-memory-cache.stories.mdx +112 -0
- package/src/__docs__/exports.status.stories.mdx +31 -0
- package/src/{components/track-data.md → __docs__/exports.track-data.stories.mdx} +15 -0
- package/src/__docs__/exports.use-cached-effect.stories.mdx +41 -0
- package/src/__docs__/exports.use-gql.stories.mdx +73 -0
- package/src/__docs__/exports.use-hydratable-effect.stories.mdx +43 -0
- package/src/__docs__/exports.use-server-effect.stories.mdx +50 -0
- package/src/__docs__/exports.use-shared-cache.stories.mdx +30 -0
- package/src/__docs__/exports.when-client-side.stories.mdx +33 -0
- package/src/__docs__/types.cached-response.stories.mdx +29 -0
- package/src/__docs__/types.error-options.stories.mdx +21 -0
- package/src/__docs__/types.gql-context.stories.mdx +20 -0
- package/src/__docs__/types.gql-fetch-fn.stories.mdx +24 -0
- package/src/__docs__/types.gql-fetch-options.stories.mdx +24 -0
- package/src/__docs__/types.gql-operation-type.stories.mdx +24 -0
- package/src/__docs__/types.gql-operation.stories.mdx +67 -0
- package/src/__docs__/types.response-cache.stories.mdx +33 -0
- package/src/__docs__/types.result.stories.mdx +39 -0
- package/src/__docs__/types.scoped-cache.stories.mdx +27 -0
- package/src/__docs__/types.valid-cache-data.stories.mdx +23 -0
- package/src/__tests__/__snapshots__/generated-snapshot.test.js.snap +0 -80
- package/src/__tests__/generated-snapshot.test.js +0 -24
- package/src/components/__tests__/data.test.js +149 -128
- package/src/components/data.js +22 -112
- package/src/components/intercept-requests.js +1 -1
- package/src/hooks/__tests__/__snapshots__/use-shared-cache.test.js.snap +8 -8
- package/src/hooks/__tests__/use-cached-effect.test.js +507 -0
- package/src/hooks/__tests__/use-gql-router-context.test.js +133 -0
- package/src/hooks/__tests__/use-gql.test.js +1 -30
- package/src/hooks/__tests__/use-hydratable-effect.test.js +705 -0
- package/src/hooks/__tests__/use-server-effect.test.js +90 -11
- package/src/hooks/use-cached-effect.js +225 -0
- package/src/hooks/use-gql-router-context.js +50 -0
- package/src/hooks/use-gql.js +22 -52
- package/src/hooks/use-hydratable-effect.js +206 -0
- package/src/hooks/use-request-interception.js +20 -23
- package/src/hooks/use-server-effect.js +42 -10
- package/src/hooks/use-shared-cache.js +13 -11
- package/src/index.js +53 -3
- package/src/util/__tests__/__snapshots__/serializable-in-memory-cache.test.js.snap +19 -0
- package/src/util/__tests__/merge-gql-context.test.js +74 -0
- package/src/util/__tests__/request-fulfillment.test.js +23 -42
- package/src/util/__tests__/request-tracking.test.js +26 -7
- package/src/util/__tests__/result-from-cache-response.test.js +19 -5
- package/src/util/__tests__/scoped-in-memory-cache.test.js +6 -85
- package/src/util/__tests__/serializable-in-memory-cache.test.js +398 -0
- package/src/util/__tests__/ssr-cache.test.js +52 -52
- package/src/util/data-error.js +58 -0
- package/src/util/get-gql-data-from-response.js +3 -2
- package/src/util/gql-error.js +19 -11
- package/src/util/merge-gql-context.js +34 -0
- package/src/util/request-fulfillment.js +49 -46
- package/src/util/request-tracking.js +69 -15
- package/src/util/result-from-cache-response.js +12 -16
- package/src/util/scoped-in-memory-cache.js +24 -47
- package/src/util/serializable-in-memory-cache.js +49 -0
- package/src/util/ssr-cache.js +9 -8
- package/src/util/status.js +30 -0
- package/src/util/types.js +18 -1
- package/docs.md +0 -122
package/dist/es/index.js
CHANGED
|
@@ -1,117 +1,216 @@
|
|
|
1
|
-
import { Server } from '@khanacademy/wonder-blocks-core';
|
|
2
|
-
import { KindError,
|
|
3
|
-
import * as React from 'react';
|
|
4
|
-
import { useContext, useMemo } from 'react';
|
|
1
|
+
import { Server, useForceUpdate } from '@khanacademy/wonder-blocks-core';
|
|
2
|
+
import { KindError, clone } from '@khanacademy/wonder-stuff-core';
|
|
5
3
|
import _extends from '@babel/runtime/helpers/extends';
|
|
4
|
+
import * as React from 'react';
|
|
5
|
+
import { useContext, useRef, useMemo, useCallback } from 'react';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Error kinds for DataError.
|
|
9
|
+
*/
|
|
10
|
+
const DataErrors = Object.freeze({
|
|
11
|
+
/**
|
|
12
|
+
* The kind of error is not known.
|
|
13
|
+
*/
|
|
14
|
+
Unknown: "Unknown",
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* The error is internal to the executing code.
|
|
18
|
+
*/
|
|
19
|
+
Internal: "Internal",
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* There was a problem with the provided input.
|
|
23
|
+
*/
|
|
24
|
+
InvalidInput: "InvalidInput",
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* A network error occurred.
|
|
28
|
+
*/
|
|
29
|
+
Network: "Network",
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Response could not be parsed.
|
|
33
|
+
*/
|
|
34
|
+
Parse: "Parse",
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* An error that occurred during SSR and was hydrated from cache
|
|
38
|
+
*/
|
|
39
|
+
Hydrated: "Hydrated"
|
|
40
|
+
});
|
|
41
|
+
/**
|
|
42
|
+
* An error from the Wonder Blocks Data API.
|
|
43
|
+
*
|
|
44
|
+
* Errors of this type will have names of the format:
|
|
45
|
+
* `${kind}DataError`
|
|
46
|
+
*/
|
|
47
|
+
|
|
48
|
+
class DataError extends KindError {
|
|
49
|
+
constructor(message, kind, {
|
|
50
|
+
metadata,
|
|
51
|
+
cause
|
|
52
|
+
} = {}) {
|
|
53
|
+
super(message, kind, {
|
|
54
|
+
metadata,
|
|
55
|
+
cause,
|
|
56
|
+
name: "Data"
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
}
|
|
6
61
|
|
|
7
62
|
/**
|
|
8
63
|
* Describe an in-memory cache.
|
|
9
64
|
*/
|
|
10
65
|
class ScopedInMemoryCache {
|
|
11
|
-
constructor(initialCache =
|
|
12
|
-
this.
|
|
13
|
-
|
|
66
|
+
constructor(initialCache = {}) {
|
|
67
|
+
this._cache = initialCache;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Indicate if this cache is being used or not.
|
|
71
|
+
*
|
|
72
|
+
* When the cache has entries, returns `true`; otherwise, returns `false`.
|
|
73
|
+
*/
|
|
14
74
|
|
|
15
|
-
if (!id || typeof id !== "string") {
|
|
16
|
-
throw new KindError("id must be non-empty string", Errors.InvalidInput);
|
|
17
|
-
}
|
|
18
75
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
76
|
+
get inUse() {
|
|
77
|
+
return Object.keys(this._cache).length > 0;
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Set a value in the cache.
|
|
81
|
+
*/
|
|
22
82
|
|
|
23
|
-
if (typeof value === "function") {
|
|
24
|
-
throw new KindError("value must be a non-function value", Errors.InvalidInput);
|
|
25
|
-
}
|
|
26
83
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
};
|
|
84
|
+
set(scope, id, value) {
|
|
85
|
+
var _this$_cache$scope;
|
|
30
86
|
|
|
31
|
-
|
|
32
|
-
|
|
87
|
+
if (!id || typeof id !== "string") {
|
|
88
|
+
throw new DataError("id must be non-empty string", DataErrors.InvalidInput);
|
|
89
|
+
}
|
|
33
90
|
|
|
34
|
-
|
|
35
|
-
|
|
91
|
+
if (!scope || typeof scope !== "string") {
|
|
92
|
+
throw new DataError("scope must be non-empty string", DataErrors.InvalidInput);
|
|
93
|
+
}
|
|
36
94
|
|
|
37
|
-
|
|
38
|
-
|
|
95
|
+
if (typeof value === "function") {
|
|
96
|
+
throw new DataError("value must be a non-function value", DataErrors.InvalidInput);
|
|
97
|
+
}
|
|
39
98
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
99
|
+
this._cache[scope] = (_this$_cache$scope = this._cache[scope]) != null ? _this$_cache$scope : {};
|
|
100
|
+
this._cache[scope][id] = value;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Retrieve a value from the cache.
|
|
104
|
+
*/
|
|
43
105
|
|
|
44
|
-
delete this._cache[scope][id];
|
|
45
106
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
}
|
|
49
|
-
};
|
|
107
|
+
get(scope, id) {
|
|
108
|
+
var _this$_cache$scope$id, _this$_cache$scope2;
|
|
50
109
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
110
|
+
return (_this$_cache$scope$id = (_this$_cache$scope2 = this._cache[scope]) == null ? void 0 : _this$_cache$scope2[id]) != null ? _this$_cache$scope$id : null;
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Purge an item from the cache.
|
|
114
|
+
*/
|
|
55
115
|
|
|
56
|
-
if (predicate == null) {
|
|
57
|
-
delete this._cache[scope];
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
60
116
|
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
delete this._cache[scope][key];
|
|
64
|
-
}
|
|
65
|
-
}
|
|
117
|
+
purge(scope, id) {
|
|
118
|
+
var _this$_cache$scope3;
|
|
66
119
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
};
|
|
120
|
+
if (!((_this$_cache$scope3 = this._cache[scope]) != null && _this$_cache$scope3[id])) {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
71
123
|
|
|
72
|
-
this.
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
124
|
+
delete this._cache[scope][id];
|
|
125
|
+
|
|
126
|
+
if (Object.keys(this._cache[scope]).length === 0) {
|
|
127
|
+
delete this._cache[scope];
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Purge a scope of items that match the given predicate.
|
|
132
|
+
*
|
|
133
|
+
* If the predicate is omitted, then all items in the scope are purged.
|
|
134
|
+
*/
|
|
77
135
|
|
|
78
|
-
for (const scope of Object.keys(this._cache)) {
|
|
79
|
-
this.purgeScope(scope, (id, value) => predicate(scope, id, value));
|
|
80
|
-
}
|
|
81
|
-
};
|
|
82
136
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
137
|
+
purgeScope(scope, predicate) {
|
|
138
|
+
if (!this._cache[scope]) {
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (predicate == null) {
|
|
143
|
+
delete this._cache[scope];
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
for (const key of Object.keys(this._cache[scope])) {
|
|
148
|
+
if (predicate(key, this._cache[scope][key])) {
|
|
149
|
+
delete this._cache[scope][key];
|
|
88
150
|
}
|
|
89
|
-
}
|
|
151
|
+
}
|
|
90
152
|
|
|
91
|
-
|
|
92
|
-
this._cache
|
|
93
|
-
} catch (e) {
|
|
94
|
-
throw new KindError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, Errors.InvalidInput);
|
|
153
|
+
if (Object.keys(this._cache[scope]).length === 0) {
|
|
154
|
+
delete this._cache[scope];
|
|
95
155
|
}
|
|
96
156
|
}
|
|
97
157
|
/**
|
|
98
|
-
*
|
|
158
|
+
* Purge all items from the cache that match the given predicate.
|
|
99
159
|
*
|
|
100
|
-
*
|
|
160
|
+
* If the predicate is omitted, then all items in the cache are purged.
|
|
101
161
|
*/
|
|
102
162
|
|
|
103
163
|
|
|
104
|
-
|
|
105
|
-
|
|
164
|
+
purgeAll(predicate) {
|
|
165
|
+
if (predicate == null) {
|
|
166
|
+
this._cache = {};
|
|
167
|
+
return;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
for (const scope of Object.keys(this._cache)) {
|
|
171
|
+
this.purgeScope(scope, (id, value) => predicate(scope, id, value));
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Describe a serializable in-memory cache.
|
|
179
|
+
*/
|
|
180
|
+
class SerializableInMemoryCache extends ScopedInMemoryCache {
|
|
181
|
+
constructor(initialCache = {}) {
|
|
182
|
+
try {
|
|
183
|
+
super(clone(initialCache));
|
|
184
|
+
} catch (e) {
|
|
185
|
+
throw new DataError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, DataErrors.InvalidInput);
|
|
186
|
+
}
|
|
106
187
|
}
|
|
107
188
|
/**
|
|
108
189
|
* Set a value in the cache.
|
|
109
190
|
*/
|
|
110
191
|
|
|
111
192
|
|
|
193
|
+
set(scope, id, value) {
|
|
194
|
+
super.set(scope, id, Object.freeze(clone(value)));
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Clone the cache.
|
|
198
|
+
*/
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
clone() {
|
|
202
|
+
try {
|
|
203
|
+
return clone(this._cache);
|
|
204
|
+
} catch (e) {
|
|
205
|
+
throw new DataError("An error occurred while trying to clone the cache", DataErrors.Internal, {
|
|
206
|
+
cause: e
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
112
211
|
}
|
|
113
212
|
|
|
114
|
-
const DefaultScope = "default";
|
|
213
|
+
const DefaultScope$2 = "default";
|
|
115
214
|
/**
|
|
116
215
|
* The default instance is stored here.
|
|
117
216
|
* It's created below in the Default() static property.
|
|
@@ -140,9 +239,9 @@ class SsrCache {
|
|
|
140
239
|
throw new Error("Cannot initialize data response cache more than once");
|
|
141
240
|
}
|
|
142
241
|
|
|
143
|
-
this._hydrationCache = new
|
|
242
|
+
this._hydrationCache = new SerializableInMemoryCache({
|
|
144
243
|
// $FlowIgnore[incompatible-call]
|
|
145
|
-
[DefaultScope]: source
|
|
244
|
+
[DefaultScope$2]: source
|
|
146
245
|
});
|
|
147
246
|
};
|
|
148
247
|
|
|
@@ -162,7 +261,7 @@ class SsrCache {
|
|
|
162
261
|
|
|
163
262
|
// Get the cached entry for this value.
|
|
164
263
|
// We first look in the ssr cache and then the hydration cache.
|
|
165
|
-
const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope, id); // If we are not server-side and we hydrated something, let's clear
|
|
264
|
+
const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope$2, id); // If we are not server-side and we hydrated something, let's clear
|
|
166
265
|
// that from the hydration cache to save memory.
|
|
167
266
|
|
|
168
267
|
if (this._ssrOnlyCache == null && internalEntry != null) {
|
|
@@ -172,7 +271,7 @@ class SsrCache {
|
|
|
172
271
|
// that's not an expected use-case. If two different places use the
|
|
173
272
|
// same handler and options (i.e. the same request), then the
|
|
174
273
|
// handler should cater to that to ensure they share the result.
|
|
175
|
-
this._hydrationCache.purge(DefaultScope, id);
|
|
274
|
+
this._hydrationCache.purge(DefaultScope$2, id);
|
|
176
275
|
} // Getting the typing right between the in-memory cache and this
|
|
177
276
|
// is hard. Just telling flow it's OK.
|
|
178
277
|
// $FlowIgnore[incompatible-return]
|
|
@@ -188,7 +287,7 @@ class SsrCache {
|
|
|
188
287
|
// to match the key of the entry we're removing, but that's an
|
|
189
288
|
// inefficient way to remove a single item, so let's not do that.
|
|
190
289
|
// Delete the entry from the appropriate cache.
|
|
191
|
-
return this._hydrationCache.purge(DefaultScope, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope, id)) != null ? _this$_ssrOnlyCache$p : false);
|
|
290
|
+
return this._hydrationCache.purge(DefaultScope$2, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$p : false);
|
|
192
291
|
};
|
|
193
292
|
|
|
194
293
|
this.removeAll = predicate => {
|
|
@@ -215,11 +314,11 @@ class SsrCache {
|
|
|
215
314
|
// $FlowIgnore[incompatible-return]
|
|
216
315
|
|
|
217
316
|
|
|
218
|
-
return (_cache$DefaultScope = cache[DefaultScope]) != null ? _cache$DefaultScope : {};
|
|
317
|
+
return (_cache$DefaultScope = cache[DefaultScope$2]) != null ? _cache$DefaultScope : {};
|
|
219
318
|
};
|
|
220
319
|
|
|
221
|
-
this._ssrOnlyCache = Server.isServerSide() ? ssrOnlyCache || new
|
|
222
|
-
this._hydrationCache = hydrationCache || new
|
|
320
|
+
this._ssrOnlyCache = Server.isServerSide() ? ssrOnlyCache || new SerializableInMemoryCache() : undefined;
|
|
321
|
+
this._hydrationCache = hydrationCache || new SerializableInMemoryCache();
|
|
223
322
|
}
|
|
224
323
|
|
|
225
324
|
_setCachedResponse(id, entry, hydrate) {
|
|
@@ -229,14 +328,14 @@ class SsrCache {
|
|
|
229
328
|
// We are server-side.
|
|
230
329
|
// We need to store this value.
|
|
231
330
|
if (hydrate) {
|
|
232
|
-
this._hydrationCache.set(DefaultScope, id, frozenEntry);
|
|
331
|
+
this._hydrationCache.set(DefaultScope$2, id, frozenEntry);
|
|
233
332
|
} else {
|
|
234
333
|
var _this$_ssrOnlyCache4;
|
|
235
334
|
|
|
236
335
|
// Usually, when server-side, this cache will always be present.
|
|
237
336
|
// We do fake server-side in our doc example though, when it
|
|
238
337
|
// won't be.
|
|
239
|
-
(_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope, id, frozenEntry);
|
|
338
|
+
(_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope$2, id, frozenEntry);
|
|
240
339
|
}
|
|
241
340
|
}
|
|
242
341
|
|
|
@@ -252,17 +351,13 @@ class SsrCache {
|
|
|
252
351
|
}
|
|
253
352
|
|
|
254
353
|
let _default$1;
|
|
354
|
+
/**
|
|
355
|
+
* This fulfills a request, making sure that in-flight requests are shared.
|
|
356
|
+
*/
|
|
255
357
|
|
|
256
|
-
class RequestFulfillment {
|
|
257
|
-
static get Default() {
|
|
258
|
-
if (!_default$1) {
|
|
259
|
-
_default$1 = new RequestFulfillment();
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
return _default$1;
|
|
263
|
-
}
|
|
264
358
|
|
|
265
|
-
|
|
359
|
+
class RequestFulfillment {
|
|
360
|
+
constructor() {
|
|
266
361
|
this._requests = {};
|
|
267
362
|
|
|
268
363
|
this.fulfill = (id, {
|
|
@@ -282,57 +377,48 @@ class RequestFulfillment {
|
|
|
282
377
|
*/
|
|
283
378
|
|
|
284
379
|
|
|
285
|
-
const {
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
}
|
|
380
|
+
const request = handler().then(data => ({
|
|
381
|
+
status: "success",
|
|
382
|
+
data
|
|
383
|
+
})).catch(error => {
|
|
384
|
+
const actualError = typeof error === "string" ? new DataError("Request failed", DataErrors.Unknown, {
|
|
385
|
+
metadata: {
|
|
386
|
+
unexpectedError: error
|
|
387
|
+
}
|
|
388
|
+
}) : error; // Return aborted result if the request was aborted.
|
|
389
|
+
// The only way to detect this reliably, it seems, is to
|
|
390
|
+
// check the error name and see if it's "AbortError" (this
|
|
391
|
+
// is also what Apollo does).
|
|
392
|
+
// Even then, it's reliant on the handler supporting aborts.
|
|
393
|
+
// TODO(somewhatabstract, FEI-4276): Add first class abort
|
|
394
|
+
// support to the handler API.
|
|
395
|
+
|
|
396
|
+
if (actualError.name === "AbortError") {
|
|
397
|
+
return {
|
|
398
|
+
status: "aborted"
|
|
399
|
+
};
|
|
400
|
+
}
|
|
289
401
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
402
|
+
return {
|
|
403
|
+
status: "error",
|
|
404
|
+
error: actualError
|
|
405
|
+
};
|
|
406
|
+
}).finally(() => {
|
|
407
|
+
delete this._requests[id];
|
|
408
|
+
}); // Store the request in our cache.
|
|
293
409
|
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
return null;
|
|
297
|
-
}
|
|
298
|
-
/**
|
|
299
|
-
* Let's cache the data!
|
|
300
|
-
*
|
|
301
|
-
* NOTE: This only caches when we're server side.
|
|
302
|
-
*/
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
return cacheData(id, data, _hydrate);
|
|
306
|
-
}).catch(error => {
|
|
307
|
-
delete this._requests[id];
|
|
308
|
-
/**
|
|
309
|
-
* Let's cache the error!
|
|
310
|
-
*
|
|
311
|
-
* NOTE: This only caches when we're server side.
|
|
312
|
-
*/
|
|
313
|
-
|
|
314
|
-
return cacheError(id, error, _hydrate);
|
|
315
|
-
});
|
|
316
|
-
this._requests[id] = request;
|
|
317
|
-
return request;
|
|
318
|
-
} catch (e) {
|
|
319
|
-
/**
|
|
320
|
-
* In this case, we don't cache an inflight request, because there
|
|
321
|
-
* really isn't one.
|
|
322
|
-
*/
|
|
323
|
-
return Promise.resolve(cacheError(id, e, _hydrate));
|
|
324
|
-
}
|
|
410
|
+
this._requests[id] = request;
|
|
411
|
+
return request;
|
|
325
412
|
};
|
|
326
|
-
|
|
327
|
-
this._responseCache = responseCache || SsrCache.Default;
|
|
328
413
|
}
|
|
329
|
-
/**
|
|
330
|
-
* Get a promise of a request for a given handler and options.
|
|
331
|
-
*
|
|
332
|
-
* This will return an inflight request if one exists, otherwise it will
|
|
333
|
-
* make a new request. Inflight requests are deleted once they resolve.
|
|
334
|
-
*/
|
|
335
414
|
|
|
415
|
+
static get Default() {
|
|
416
|
+
if (!_default$1) {
|
|
417
|
+
_default$1 = new RequestFulfillment();
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
return _default$1;
|
|
421
|
+
}
|
|
336
422
|
|
|
337
423
|
}
|
|
338
424
|
|
|
@@ -389,11 +475,54 @@ class RequestTracker {
|
|
|
389
475
|
|
|
390
476
|
this.fulfillTrackedRequests = () => {
|
|
391
477
|
const promises = [];
|
|
478
|
+
const {
|
|
479
|
+
cacheData,
|
|
480
|
+
cacheError
|
|
481
|
+
} = this._responseCache;
|
|
392
482
|
|
|
393
483
|
for (const requestKey of Object.keys(this._trackedRequests)) {
|
|
394
|
-
const
|
|
395
|
-
|
|
396
|
-
|
|
484
|
+
const options = this._trackedRequests[requestKey];
|
|
485
|
+
|
|
486
|
+
try {
|
|
487
|
+
promises.push(this._requestFulfillment.fulfill(requestKey, _extends({}, options)).then(result => {
|
|
488
|
+
switch (result.status) {
|
|
489
|
+
case "success":
|
|
490
|
+
/**
|
|
491
|
+
* Let's cache the data!
|
|
492
|
+
*
|
|
493
|
+
* NOTE: This only caches when we're
|
|
494
|
+
* server side.
|
|
495
|
+
*/
|
|
496
|
+
cacheData(requestKey, result.data, options.hydrate);
|
|
497
|
+
break;
|
|
498
|
+
|
|
499
|
+
case "error":
|
|
500
|
+
/**
|
|
501
|
+
* Let's cache the error!
|
|
502
|
+
*
|
|
503
|
+
* NOTE: This only caches when we're
|
|
504
|
+
* server side.
|
|
505
|
+
*/
|
|
506
|
+
cacheError(requestKey, result.error, options.hydrate);
|
|
507
|
+
break;
|
|
508
|
+
} // For status === "loading":
|
|
509
|
+
// Could never get here unless we wrote
|
|
510
|
+
// the code wrong. Rather than bloat
|
|
511
|
+
// code with useless error, just ignore.
|
|
512
|
+
// For status === "aborted":
|
|
513
|
+
// We won't cache this.
|
|
514
|
+
// We don't hydrate aborted requests,
|
|
515
|
+
// so the client would just see them
|
|
516
|
+
// as unfulfilled data.
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
return;
|
|
520
|
+
}));
|
|
521
|
+
} catch (e) {
|
|
522
|
+
// This captures if there are problems in the code that
|
|
523
|
+
// begins the requests.
|
|
524
|
+
promises.push(Promise.resolve(cacheError(requestKey, e, options.hydrate)));
|
|
525
|
+
}
|
|
397
526
|
}
|
|
398
527
|
/**
|
|
399
528
|
* Clear out our tracked info.
|
|
@@ -401,16 +530,15 @@ class RequestTracker {
|
|
|
401
530
|
* We call this now for a simpler API.
|
|
402
531
|
*
|
|
403
532
|
* If we reset the tracked calls after all promises resolve, any
|
|
404
|
-
*
|
|
533
|
+
* request tracking done while promises are in flight would be lost.
|
|
405
534
|
*
|
|
406
535
|
* If we don't reset at all, then we have to expose the `reset` call
|
|
407
536
|
* for consumers to use, or they'll only ever be able to accumulate
|
|
408
537
|
* more and more tracked requests, having to fulfill them all every
|
|
409
538
|
* time.
|
|
410
539
|
*
|
|
411
|
-
* Calling it here means we can have multiple "track -> request"
|
|
412
|
-
* in a row and in an easy to reason about manner.
|
|
413
|
-
*
|
|
540
|
+
* Calling it here means we can have multiple "track -> request"
|
|
541
|
+
* cycles in a row and in an easy to reason about manner.
|
|
414
542
|
*/
|
|
415
543
|
|
|
416
544
|
|
|
@@ -423,7 +551,7 @@ class RequestTracker {
|
|
|
423
551
|
};
|
|
424
552
|
|
|
425
553
|
this._responseCache = responseCache || SsrCache.Default;
|
|
426
|
-
this._requestFulfillment = new RequestFulfillment(
|
|
554
|
+
this._requestFulfillment = new RequestFulfillment();
|
|
427
555
|
}
|
|
428
556
|
/**
|
|
429
557
|
* Track a request.
|
|
@@ -472,38 +600,56 @@ class TrackData extends React.Component {
|
|
|
472
600
|
|
|
473
601
|
}
|
|
474
602
|
|
|
603
|
+
const loadingStatus = Object.freeze({
|
|
604
|
+
status: "loading"
|
|
605
|
+
});
|
|
606
|
+
const abortedStatus = Object.freeze({
|
|
607
|
+
status: "aborted"
|
|
608
|
+
});
|
|
475
609
|
/**
|
|
476
|
-
*
|
|
477
|
-
*
|
|
478
|
-
* This hook registers an asynchronous action to be performed during
|
|
479
|
-
* server-side rendering. The action is performed only once, and the result
|
|
480
|
-
* is cached against the given identifier so that subsequent calls return that
|
|
481
|
-
* cached result allowing components to render more of the component.
|
|
482
|
-
*
|
|
483
|
-
* This hook requires the Wonder Blocks Data functionality for resolving
|
|
484
|
-
* pending requests, as well as support for the hydration cache to be
|
|
485
|
-
* embedded into a page so that the result can by hydrated (if that is a
|
|
486
|
-
* requirement).
|
|
487
|
-
*
|
|
488
|
-
* The asynchronous action is never invoked on the client-side.
|
|
610
|
+
* Create Result<TData> instances with specific statuses.
|
|
489
611
|
*/
|
|
490
|
-
const useServerEffect = (requestId, handler, hydrate = true) => {
|
|
491
|
-
// If we're server-side or hydrating, we'll have a cached entry to use.
|
|
492
|
-
// So we get that and use it to initialize our state.
|
|
493
|
-
// This works in both hydration and SSR because the very first call to
|
|
494
|
-
// this will have cached data in those cases as it will be present on the
|
|
495
|
-
// initial render - and subsequent renders on the client it will be null.
|
|
496
|
-
const cachedResult = SsrCache.Default.getEntry(requestId); // We only track data requests when we are server-side and we don't
|
|
497
|
-
// already have a result, as given by the cachedData (which is also the
|
|
498
|
-
// initial value for the result state).
|
|
499
612
|
|
|
500
|
-
|
|
613
|
+
const Status = Object.freeze({
|
|
614
|
+
loading: () => loadingStatus,
|
|
615
|
+
aborted: () => abortedStatus,
|
|
616
|
+
success: data => ({
|
|
617
|
+
status: "success",
|
|
618
|
+
data
|
|
619
|
+
}),
|
|
620
|
+
error: error => ({
|
|
621
|
+
status: "error",
|
|
622
|
+
error
|
|
623
|
+
})
|
|
624
|
+
});
|
|
625
|
+
|
|
626
|
+
/**
|
|
627
|
+
* Turns a cache entry into a stateful result.
|
|
628
|
+
*/
|
|
629
|
+
const resultFromCachedResponse = cacheEntry => {
|
|
630
|
+
// No cache entry means no result to be hydrated.
|
|
631
|
+
if (cacheEntry == null) {
|
|
632
|
+
return null;
|
|
633
|
+
}
|
|
501
634
|
|
|
502
|
-
|
|
503
|
-
|
|
635
|
+
const {
|
|
636
|
+
data,
|
|
637
|
+
error
|
|
638
|
+
} = cacheEntry;
|
|
639
|
+
|
|
640
|
+
if (error != null) {
|
|
641
|
+
// Let's hydrate the error. We don't persist everything about the
|
|
642
|
+
// original error on the server, hence why we only superficially
|
|
643
|
+
// hydrate it to a GqlHydratedError.
|
|
644
|
+
return Status.error(new DataError(error, DataErrors.Hydrated));
|
|
504
645
|
}
|
|
505
646
|
|
|
506
|
-
|
|
647
|
+
if (data != null) {
|
|
648
|
+
return Status.success(data);
|
|
649
|
+
} // We shouldn't get here since we don't actually cache null data.
|
|
650
|
+
|
|
651
|
+
|
|
652
|
+
return Status.aborted();
|
|
507
653
|
};
|
|
508
654
|
|
|
509
655
|
/**
|
|
@@ -533,7 +679,7 @@ const useRequestInterception = (requestId, handler) => {
|
|
|
533
679
|
// We memoize this so that it only changes if something related to it
|
|
534
680
|
// changes.
|
|
535
681
|
|
|
536
|
-
const interceptedHandler = React.
|
|
682
|
+
const interceptedHandler = React.useCallback(() => {
|
|
537
683
|
// Call the interceptors from closest to furthest.
|
|
538
684
|
// If one returns a non-null result, then we keep that.
|
|
539
685
|
const interceptResponse = interceptors.reduceRight((prev, interceptor) => {
|
|
@@ -553,158 +699,45 @@ const useRequestInterception = (requestId, handler) => {
|
|
|
553
699
|
};
|
|
554
700
|
|
|
555
701
|
/**
|
|
556
|
-
*
|
|
702
|
+
* Hook to perform an asynchronous action during server-side rendering.
|
|
703
|
+
*
|
|
704
|
+
* This hook registers an asynchronous action to be performed during
|
|
705
|
+
* server-side rendering. The action is performed only once, and the result
|
|
706
|
+
* is cached against the given identifier so that subsequent calls return that
|
|
707
|
+
* cached result allowing components to render more of the component.
|
|
708
|
+
*
|
|
709
|
+
* This hook requires the Wonder Blocks Data functionality for resolving
|
|
710
|
+
* pending requests, as well as support for the hydration cache to be
|
|
711
|
+
* embedded into a page so that the result can by hydrated (if that is a
|
|
712
|
+
* requirement).
|
|
713
|
+
*
|
|
714
|
+
* The asynchronous action is never invoked on the client-side.
|
|
557
715
|
*/
|
|
558
|
-
const
|
|
559
|
-
// No cache entry means we didn't load one yet.
|
|
560
|
-
if (cacheEntry == null) {
|
|
561
|
-
return {
|
|
562
|
-
status: "loading"
|
|
563
|
-
};
|
|
564
|
-
}
|
|
565
|
-
|
|
716
|
+
const useServerEffect = (requestId, handler, options = {}) => {
|
|
566
717
|
const {
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
} =
|
|
570
|
-
|
|
571
|
-
if (error != null) {
|
|
572
|
-
return {
|
|
573
|
-
status: "error",
|
|
574
|
-
error
|
|
575
|
-
};
|
|
576
|
-
}
|
|
577
|
-
|
|
578
|
-
if (data != null) {
|
|
579
|
-
return {
|
|
580
|
-
status: "success",
|
|
581
|
-
data
|
|
582
|
-
};
|
|
583
|
-
}
|
|
718
|
+
hydrate = true,
|
|
719
|
+
skip = false
|
|
720
|
+
} = options; // Plug in to the request interception framework for code that wants
|
|
721
|
+
// to use that.
|
|
584
722
|
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
/**
|
|
591
|
-
* This component is the main component of Wonder Blocks Data. With this, data
|
|
592
|
-
* requirements can be placed in a React application in a manner that will
|
|
593
|
-
* support server-side rendering and efficient caching.
|
|
594
|
-
*/
|
|
595
|
-
const Data = ({
|
|
596
|
-
requestId,
|
|
597
|
-
handler,
|
|
598
|
-
children,
|
|
599
|
-
hydrate,
|
|
600
|
-
showOldDataWhileLoading,
|
|
601
|
-
alwaysRequestOnHydration
|
|
602
|
-
}) => {
|
|
603
|
-
const interceptedHandler = useRequestInterception(requestId, handler);
|
|
604
|
-
const hydrateResult = useServerEffect(requestId, interceptedHandler, hydrate);
|
|
605
|
-
const [currentResult, setResult] = React.useState(hydrateResult); // Here we make sure the request still occurs client-side as needed.
|
|
606
|
-
// This is for legacy usage that expects this. Eventually we will want
|
|
607
|
-
// to deprecate.
|
|
608
|
-
|
|
609
|
-
React.useEffect(() => {
|
|
610
|
-
// This is here until I can do a better documentation example for
|
|
611
|
-
// the TrackData docs.
|
|
612
|
-
// istanbul ignore next
|
|
613
|
-
if (Server.isServerSide()) {
|
|
614
|
-
return;
|
|
615
|
-
} // We don't bother with this if we have hydration data and we're not
|
|
616
|
-
// forcing a request on hydration.
|
|
617
|
-
// We don't care if these things change after the first render,
|
|
618
|
-
// so we don't want them in the inputs array.
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
if (!alwaysRequestOnHydration && (hydrateResult == null ? void 0 : hydrateResult.data) != null) {
|
|
622
|
-
return;
|
|
623
|
-
} // If we're not hydrating a result and we're not going to render
|
|
624
|
-
// with old data until we're loaded, we want to make sure we set our
|
|
625
|
-
// result to null so that we're in the loading state.
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
if (!showOldDataWhileLoading) {
|
|
629
|
-
// Mark ourselves as loading.
|
|
630
|
-
setResult(null);
|
|
631
|
-
} // We aren't server-side, so let's make the request.
|
|
632
|
-
// We don't need to use our built-in request fulfillment here if we
|
|
633
|
-
// don't want, but it does mean we'll share inflight requests for the
|
|
634
|
-
// same ID and the result will be in the same format as the
|
|
635
|
-
// hydrated value.
|
|
723
|
+
const interceptedHandler = useRequestInterception(requestId, handler); // If we're server-side or hydrating, we'll have a cached entry to use.
|
|
724
|
+
// So we get that and use it to initialize our state.
|
|
725
|
+
// This works in both hydration and SSR because the very first call to
|
|
726
|
+
// this will have cached data in those cases as it will be present on the
|
|
727
|
+
// initial render - and subsequent renders on the client it will be null.
|
|
636
728
|
|
|
729
|
+
const cachedResult = SsrCache.Default.getEntry(requestId); // We only track data requests when we are server-side, we are not skipping
|
|
730
|
+
// the request, and we don't already have a result, as given by the
|
|
731
|
+
// cachedData (which is also the initial value for the result state).
|
|
637
732
|
|
|
638
|
-
|
|
639
|
-
RequestFulfillment.Default.fulfill(requestId, {
|
|
640
|
-
handler: interceptedHandler
|
|
641
|
-
}).then(result => {
|
|
642
|
-
if (cancel) {
|
|
643
|
-
return;
|
|
644
|
-
}
|
|
645
|
-
|
|
646
|
-
setResult(result);
|
|
647
|
-
return;
|
|
648
|
-
}).catch(e => {
|
|
649
|
-
if (cancel) {
|
|
650
|
-
return;
|
|
651
|
-
}
|
|
652
|
-
/**
|
|
653
|
-
* We should never get here as errors in fulfillment are part
|
|
654
|
-
* of the `then`, but if we do.
|
|
655
|
-
*/
|
|
656
|
-
// eslint-disable-next-line no-console
|
|
733
|
+
const maybeTrack = useContext(TrackerContext);
|
|
657
734
|
|
|
735
|
+
if (!skip && cachedResult == null && Server.isServerSide()) {
|
|
736
|
+
maybeTrack == null ? void 0 : maybeTrack(requestId, interceptedHandler, hydrate);
|
|
737
|
+
} // A null result means there was no result to hydrate.
|
|
658
738
|
|
|
659
|
-
console.error(`Unexpected error occurred during data fulfillment: ${e}`);
|
|
660
|
-
setResult({
|
|
661
|
-
error: typeof e === "string" ? e : e.message
|
|
662
|
-
});
|
|
663
|
-
return;
|
|
664
|
-
});
|
|
665
|
-
return () => {
|
|
666
|
-
cancel = true;
|
|
667
|
-
}; // If the handler changes, we don't care. The ID is what indicates
|
|
668
|
-
// the request that should be made and folks shouldn't be changing the
|
|
669
|
-
// handler without changing the ID as well.
|
|
670
|
-
// In addition, we don't want to include hydrateResult nor
|
|
671
|
-
// alwaysRequestOnHydration as them changinng after the first pass
|
|
672
|
-
// is irrelevant.
|
|
673
|
-
// Finally, we don't want to include showOldDataWhileLoading as that
|
|
674
|
-
// changing on its own is also not relevant. It only matters if the
|
|
675
|
-
// request itself changes. All of which is to say that we only
|
|
676
|
-
// run this effect for the ID changing.
|
|
677
|
-
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
678
|
-
}, [requestId]);
|
|
679
|
-
return children(resultFromCachedResponse(currentResult));
|
|
680
|
-
};
|
|
681
739
|
|
|
682
|
-
|
|
683
|
-
* This component provides a mechanism to intercept data requests.
|
|
684
|
-
* This is for use in testing.
|
|
685
|
-
*
|
|
686
|
-
* This component is not recommended for use in production code as it
|
|
687
|
-
* can prevent predictable functioning of the Wonder Blocks Data framework.
|
|
688
|
-
* One possible side-effect is that inflight requests from the interceptor could
|
|
689
|
-
* be picked up by `Data` component requests from outside the children of this
|
|
690
|
-
* component.
|
|
691
|
-
*
|
|
692
|
-
* Interceptions within the same component tree are chained such that the
|
|
693
|
-
* interceptor closest to the intercepted request is called first, and the
|
|
694
|
-
* furthest interceptor is called last.
|
|
695
|
-
*/
|
|
696
|
-
const InterceptRequests = ({
|
|
697
|
-
interceptor,
|
|
698
|
-
children
|
|
699
|
-
}) => {
|
|
700
|
-
const interceptors = React.useContext(InterceptContext);
|
|
701
|
-
const updatedInterceptors = React.useMemo( // We could build this in reverse order so that our hook that does
|
|
702
|
-
// the interception didn't have to use reduceRight, but I think it
|
|
703
|
-
// is easier to think about if we do this in component tree order.
|
|
704
|
-
() => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
|
|
705
|
-
return /*#__PURE__*/React.createElement(InterceptContext.Provider, {
|
|
706
|
-
value: updatedInterceptors
|
|
707
|
-
}, children);
|
|
740
|
+
return cachedResult == null ? null : resultFromCachedResponse(cachedResult);
|
|
708
741
|
};
|
|
709
742
|
|
|
710
743
|
/**
|
|
@@ -751,16 +784,16 @@ const clearSharedCache = (scope = "") => {
|
|
|
751
784
|
const useSharedCache = (id, scope, initialValue) => {
|
|
752
785
|
// Verify arguments.
|
|
753
786
|
if (!id || typeof id !== "string") {
|
|
754
|
-
throw new
|
|
787
|
+
throw new DataError("id must be a non-empty string", DataErrors.InvalidInput);
|
|
755
788
|
}
|
|
756
789
|
|
|
757
790
|
if (!scope || typeof scope !== "string") {
|
|
758
|
-
throw new
|
|
791
|
+
throw new DataError("scope must be a non-empty string", DataErrors.InvalidInput);
|
|
759
792
|
} // Memoize our APIs.
|
|
760
793
|
// This one allows callers to set or replace the cached value.
|
|
761
794
|
|
|
762
795
|
|
|
763
|
-
const cacheValue = React.
|
|
796
|
+
const cacheValue = React.useCallback(value => value == null ? cache.purge(scope, id) : cache.set(scope, id, value), [id, scope]); // We don't memo-ize the current value, just in case the cache was updated
|
|
764
797
|
// since our last run through. Also, our cache does not know what type it
|
|
765
798
|
// stores, so we have to cast it to the type we're exporting. This is a
|
|
766
799
|
// dev time courtesy, rather than a runtime thing.
|
|
@@ -771,17 +804,291 @@ const useSharedCache = (id, scope, initialValue) => {
|
|
|
771
804
|
|
|
772
805
|
if (currentValue == null && initialValue !== undefined) {
|
|
773
806
|
// Get the initial value.
|
|
774
|
-
const value = typeof initialValue === "function" ? initialValue() : initialValue;
|
|
807
|
+
const value = typeof initialValue === "function" ? initialValue() : initialValue;
|
|
775
808
|
|
|
776
|
-
|
|
809
|
+
if (value != null) {
|
|
810
|
+
// Update the cache.
|
|
811
|
+
cacheValue(value); // Make sure we return this value as our current value.
|
|
777
812
|
|
|
778
|
-
|
|
813
|
+
currentValue = value;
|
|
814
|
+
}
|
|
779
815
|
} // Now we have everything, let's return it.
|
|
780
816
|
|
|
781
817
|
|
|
782
818
|
return [currentValue, cacheValue];
|
|
783
819
|
};
|
|
784
820
|
|
|
821
|
+
const DefaultScope$1 = "useCachedEffect";
|
|
822
|
+
/**
|
|
823
|
+
* Hook to execute and cache an async operation on the client.
|
|
824
|
+
*
|
|
825
|
+
* This hook executes the given handler on the client if there is no
|
|
826
|
+
* cached result to use.
|
|
827
|
+
*
|
|
828
|
+
* Results are cached so they can be shared between equivalent invocations.
|
|
829
|
+
* In-flight requests are also shared, so that concurrent calls will
|
|
830
|
+
* behave as one might exect. Cache updates invoked by one hook instance
|
|
831
|
+
* do not trigger renders in components that use the same requestID; however,
|
|
832
|
+
* that should not matter since concurrent requests will share the same
|
|
833
|
+
* in-flight request, and subsequent renders will grab from the cache.
|
|
834
|
+
*
|
|
835
|
+
* Once the request has been tried once and a non-loading response has been
|
|
836
|
+
* cached, the request will not executed made again.
|
|
837
|
+
*/
|
|
838
|
+
|
|
839
|
+
const useCachedEffect = (requestId, handler, options = {}) => {
|
|
840
|
+
const {
|
|
841
|
+
skip: hardSkip = false,
|
|
842
|
+
retainResultOnChange = false,
|
|
843
|
+
onResultChanged,
|
|
844
|
+
scope = DefaultScope$1
|
|
845
|
+
} = options; // Plug in to the request interception framework for code that wants
|
|
846
|
+
// to use that.
|
|
847
|
+
|
|
848
|
+
const interceptedHandler = useRequestInterception(requestId, handler); // Instead of using state, which would be local to just this hook instance,
|
|
849
|
+
// we use a shared in-memory cache.
|
|
850
|
+
|
|
851
|
+
const [mostRecentResult, setMostRecentResult] = useSharedCache(requestId, // The key of the cached item
|
|
852
|
+
scope // The scope of the cached items
|
|
853
|
+
// No default value. We don't want the loading status there; to ensure
|
|
854
|
+
// that all calls when the request is in-flight will update once that
|
|
855
|
+
// request is done, we want the cache to be empty until that point.
|
|
856
|
+
); // Build a function that will update the cache and either invoke the
|
|
857
|
+
// callback provided in options, or force an update.
|
|
858
|
+
|
|
859
|
+
const forceUpdate = useForceUpdate();
|
|
860
|
+
const setCacheAndNotify = React.useCallback(value => {
|
|
861
|
+
setMostRecentResult(value); // If our caller provided a cacheUpdated callback, we use that.
|
|
862
|
+
// Otherwise, we toggle our little state update.
|
|
863
|
+
|
|
864
|
+
if (onResultChanged != null) {
|
|
865
|
+
onResultChanged(value);
|
|
866
|
+
} else {
|
|
867
|
+
forceUpdate();
|
|
868
|
+
}
|
|
869
|
+
}, [setMostRecentResult, onResultChanged, forceUpdate]); // We need to trigger a re-render when the request ID changes as that
|
|
870
|
+
// indicates its a different request. We don't default the current id as
|
|
871
|
+
// this is a proxy for the first render, where we will make the request
|
|
872
|
+
// if we don't already have a cached value.
|
|
873
|
+
|
|
874
|
+
const requestIdRef = React.useRef();
|
|
875
|
+
const previousRequestId = requestIdRef.current; // Calculate our soft skip state.
|
|
876
|
+
// Soft skip changes are things that should skip the effect if something
|
|
877
|
+
// else triggers the effect to run, but should not itself trigger the effect
|
|
878
|
+
// (which would cancel a previous invocation).
|
|
879
|
+
|
|
880
|
+
const softSkip = React.useMemo(() => {
|
|
881
|
+
if (requestId === previousRequestId) {
|
|
882
|
+
// If the requestId is unchanged, it means we already rendered at
|
|
883
|
+
// least once and so we already made the request at least once. So
|
|
884
|
+
// we can bail out right here.
|
|
885
|
+
return true;
|
|
886
|
+
} // If we already have a cached value, we're going to skip.
|
|
887
|
+
|
|
888
|
+
|
|
889
|
+
if (mostRecentResult != null) {
|
|
890
|
+
return true;
|
|
891
|
+
}
|
|
892
|
+
|
|
893
|
+
return false;
|
|
894
|
+
}, [requestId, previousRequestId, mostRecentResult]); // So now we make sure the client-side request happens per our various
|
|
895
|
+
// options.
|
|
896
|
+
|
|
897
|
+
React.useEffect(() => {
|
|
898
|
+
let cancel = false; // We don't do anything if we've been told to hard skip (a hard skip
|
|
899
|
+
// means we should cancel the previous request and is therefore a
|
|
900
|
+
// dependency on that), or we have determined we have already done
|
|
901
|
+
// enough and can soft skip (a soft skip doesn't trigger the request
|
|
902
|
+
// to re-run; we don't want to cancel the in progress effect if we're
|
|
903
|
+
// soft skipping.
|
|
904
|
+
|
|
905
|
+
if (hardSkip || softSkip) {
|
|
906
|
+
return;
|
|
907
|
+
} // If we got here, we're going to perform the request.
|
|
908
|
+
// Let's make sure our ref is set to the most recent requestId.
|
|
909
|
+
|
|
910
|
+
|
|
911
|
+
requestIdRef.current = requestId; // OK, we've done all our checks and things. It's time to make the
|
|
912
|
+
// request. We use our request fulfillment here so that in-flight
|
|
913
|
+
// requests are shared.
|
|
914
|
+
// NOTE: Our request fulfillment handles the error cases here.
|
|
915
|
+
// Catching shouldn't serve a purpose.
|
|
916
|
+
// eslint-disable-next-line promise/catch-or-return
|
|
917
|
+
|
|
918
|
+
RequestFulfillment.Default.fulfill(requestId, {
|
|
919
|
+
handler: interceptedHandler
|
|
920
|
+
}).then(result => {
|
|
921
|
+
if (cancel) {
|
|
922
|
+
// We don't modify our result if an earlier effect was
|
|
923
|
+
// cancelled as it means that this hook no longer cares about
|
|
924
|
+
// that old request.
|
|
925
|
+
return;
|
|
926
|
+
}
|
|
927
|
+
|
|
928
|
+
setCacheAndNotify(result);
|
|
929
|
+
return; // Shut up eslint always-return rule.
|
|
930
|
+
});
|
|
931
|
+
return () => {
|
|
932
|
+
// TODO(somewhatabstract, FEI-4276): Eventually, we will want to be
|
|
933
|
+
// able abort in-flight requests, but for now, we don't have that.
|
|
934
|
+
// (Of course, we will only want to abort them if no one is waiting
|
|
935
|
+
// on them)
|
|
936
|
+
// For now, we just block cancelled requests from changing our
|
|
937
|
+
// cache.
|
|
938
|
+
cancel = true;
|
|
939
|
+
}; // We only want to run this effect if the requestId, or skip values
|
|
940
|
+
// change. These are the only two things that should affect the
|
|
941
|
+
// cancellation of a pending request. We do not update if the handler
|
|
942
|
+
// changes, in order to simplify the API - otherwise, callers would
|
|
943
|
+
// not be able to use inline functions with this hook.
|
|
944
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
945
|
+
}, [hardSkip, requestId]); // We track the last result we returned in order to support the
|
|
946
|
+
// "retainResultOnChange" option.
|
|
947
|
+
|
|
948
|
+
const lastResultAgnosticOfIdRef = React.useRef(Status.loading());
|
|
949
|
+
const loadingResult = retainResultOnChange ? lastResultAgnosticOfIdRef.current : Status.loading(); // Loading is a transient state, so we only use it here; it's not something
|
|
950
|
+
// we cache.
|
|
951
|
+
|
|
952
|
+
const result = React.useMemo(() => mostRecentResult != null ? mostRecentResult : loadingResult, [mostRecentResult, loadingResult]);
|
|
953
|
+
lastResultAgnosticOfIdRef.current = result;
|
|
954
|
+
return result;
|
|
955
|
+
};
|
|
956
|
+
|
|
957
|
+
/**
|
|
958
|
+
* Policies to define how a hydratable effect should behave client-side.
|
|
959
|
+
*/
|
|
960
|
+
const WhenClientSide = require("flow-enums-runtime").Mirrored(["DoNotHydrate", "ExecuteWhenNoResult", "ExecuteWhenNoSuccessResult", "AlwaysExecute"]);
|
|
961
|
+
const DefaultScope = "useHydratableEffect";
|
|
962
|
+
/**
|
|
963
|
+
* Hook to execute an async operation on server and client.
|
|
964
|
+
*
|
|
965
|
+
* This hook executes the given handler on the server and on the client,
|
|
966
|
+
* and, depending on the given options, can hydrate the server-side result.
|
|
967
|
+
*
|
|
968
|
+
* Results are cached on the client so they can be shared between equivalent
|
|
969
|
+
* invocations. Cache changes from one hook instance do not trigger renders
|
|
970
|
+
* in components that use the same requestID.
|
|
971
|
+
*/
|
|
972
|
+
|
|
973
|
+
const useHydratableEffect = (requestId, handler, options = {}) => {
|
|
974
|
+
const {
|
|
975
|
+
clientBehavior = WhenClientSide.ExecuteWhenNoSuccessResult,
|
|
976
|
+
skip = false,
|
|
977
|
+
retainResultOnChange = false,
|
|
978
|
+
onResultChanged,
|
|
979
|
+
scope = DefaultScope
|
|
980
|
+
} = options; // Now we instruct the server to perform the operation.
|
|
981
|
+
// When client-side, this will look up any response for hydration; it does
|
|
982
|
+
// not invoke the handler.
|
|
983
|
+
|
|
984
|
+
const serverResult = useServerEffect(requestId, handler, {
|
|
985
|
+
// Only hydrate if our behavior isn't telling us not to.
|
|
986
|
+
hydrate: clientBehavior !== WhenClientSide.DoNotHydrate,
|
|
987
|
+
skip
|
|
988
|
+
});
|
|
989
|
+
const getDefaultCacheValue = React.useCallback(() => {
|
|
990
|
+
// If we don't have a requestId, it's our first render, the one
|
|
991
|
+
// where we hydrated. So defer to our clientBehavior value.
|
|
992
|
+
switch (clientBehavior) {
|
|
993
|
+
case WhenClientSide.DoNotHydrate:
|
|
994
|
+
case WhenClientSide.AlwaysExecute:
|
|
995
|
+
// Either we weren't hydrating at all, or we don't care
|
|
996
|
+
// if we hydrated something or not, either way, we're
|
|
997
|
+
// doing a request.
|
|
998
|
+
return null;
|
|
999
|
+
|
|
1000
|
+
case WhenClientSide.ExecuteWhenNoResult:
|
|
1001
|
+
// We only execute if we didn't hydrate something.
|
|
1002
|
+
// So, returning the hydration result as default for our
|
|
1003
|
+
// cache, will then prevent the cached effect running.
|
|
1004
|
+
return serverResult;
|
|
1005
|
+
|
|
1006
|
+
case WhenClientSide.ExecuteWhenNoSuccessResult:
|
|
1007
|
+
// We only execute if we didn't hydrate a success result.
|
|
1008
|
+
if ((serverResult == null ? void 0 : serverResult.status) === "success") {
|
|
1009
|
+
// So, returning the hydration result as default for our
|
|
1010
|
+
// cache, will then prevent the cached effect running.
|
|
1011
|
+
return serverResult;
|
|
1012
|
+
}
|
|
1013
|
+
|
|
1014
|
+
return null;
|
|
1015
|
+
} // There is no reason for this to change after the first render,
|
|
1016
|
+
// you might think, but the function closes around serverResult and if
|
|
1017
|
+
// the requestId changes, it still returns the hydrate result of the
|
|
1018
|
+
// first render of the previous requestId. This then means that the
|
|
1019
|
+
// hydrate result is still the same, and the effect is not re-executed
|
|
1020
|
+
// because the cache gets incorrectly defaulted.
|
|
1021
|
+
// However, we don't want to bother doing anything with this on
|
|
1022
|
+
// client behavior changing since that truly is irrelevant.
|
|
1023
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
1024
|
+
|
|
1025
|
+
}, [serverResult]); // Instead of using state, which would be local to just this hook instance,
|
|
1026
|
+
// we use a shared in-memory cache.
|
|
1027
|
+
|
|
1028
|
+
useSharedCache(requestId, // The key of the cached item
|
|
1029
|
+
scope, // The scope of the cached items
|
|
1030
|
+
getDefaultCacheValue); // When we're client-side, we ultimately want the result from this call.
|
|
1031
|
+
|
|
1032
|
+
const clientResult = useCachedEffect(requestId, handler, {
|
|
1033
|
+
skip,
|
|
1034
|
+
onResultChanged,
|
|
1035
|
+
retainResultOnChange,
|
|
1036
|
+
scope
|
|
1037
|
+
}); // OK, now which result do we return.
|
|
1038
|
+
// Well, we return the serverResult on our very first call and then
|
|
1039
|
+
// the clientResult thereafter. The great thing is that after the very
|
|
1040
|
+
// first call, the serverResult is going to be `null` anyway.
|
|
1041
|
+
|
|
1042
|
+
return serverResult != null ? serverResult : clientResult;
|
|
1043
|
+
};
|
|
1044
|
+
|
|
1045
|
+
/**
|
|
1046
|
+
* This component is the main component of Wonder Blocks Data. With this, data
|
|
1047
|
+
* requirements can be placed in a React application in a manner that will
|
|
1048
|
+
* support server-side rendering and efficient caching.
|
|
1049
|
+
*/
|
|
1050
|
+
const Data = ({
|
|
1051
|
+
requestId,
|
|
1052
|
+
handler,
|
|
1053
|
+
children,
|
|
1054
|
+
retainResultOnChange: _retainResultOnChange = false,
|
|
1055
|
+
clientBehavior: _clientBehavior = WhenClientSide.ExecuteWhenNoSuccessResult
|
|
1056
|
+
}) => {
|
|
1057
|
+
const result = useHydratableEffect(requestId, handler, {
|
|
1058
|
+
retainResultOnChange: _retainResultOnChange,
|
|
1059
|
+
clientBehavior: _clientBehavior
|
|
1060
|
+
});
|
|
1061
|
+
return children(result);
|
|
1062
|
+
};
|
|
1063
|
+
|
|
1064
|
+
/**
|
|
1065
|
+
* This component provides a mechanism to intercept data requests.
|
|
1066
|
+
* This is for use in testing.
|
|
1067
|
+
*
|
|
1068
|
+
* This component is not recommended for use in production code as it
|
|
1069
|
+
* can prevent predictable functioning of the Wonder Blocks Data framework.
|
|
1070
|
+
* One possible side-effect is that inflight requests from the interceptor could
|
|
1071
|
+
* be picked up by `Data` component requests from outside the children of this
|
|
1072
|
+
* component.
|
|
1073
|
+
*
|
|
1074
|
+
* Interceptions within the same component tree are chained such that the
|
|
1075
|
+
* interceptor closest to the intercepted request is called first, and the
|
|
1076
|
+
* furthest interceptor is called last.
|
|
1077
|
+
*/
|
|
1078
|
+
const InterceptRequests = ({
|
|
1079
|
+
interceptor,
|
|
1080
|
+
children
|
|
1081
|
+
}) => {
|
|
1082
|
+
const interceptors = React.useContext(InterceptContext);
|
|
1083
|
+
const updatedInterceptors = React.useMemo( // We could build this in reverse order so that our hook that does
|
|
1084
|
+
// the interception didn't have to use reduceRight, but I think it
|
|
1085
|
+
// is easier to think about if we do this in component tree order.
|
|
1086
|
+
() => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
|
|
1087
|
+
return /*#__PURE__*/React.createElement(InterceptContext.Provider, {
|
|
1088
|
+
value: updatedInterceptors
|
|
1089
|
+
}, children);
|
|
1090
|
+
};
|
|
1091
|
+
|
|
785
1092
|
const GqlRouterContext = /*#__PURE__*/React.createContext(null);
|
|
786
1093
|
|
|
787
1094
|
/**
|
|
@@ -816,17 +1123,57 @@ const GqlRouter = ({
|
|
|
816
1123
|
}, children);
|
|
817
1124
|
};
|
|
818
1125
|
|
|
1126
|
+
/**
|
|
1127
|
+
* Construct a complete GqlContext from current defaults and a partial context.
|
|
1128
|
+
*
|
|
1129
|
+
* Values in the partial context that are `undefined` will be ignored.
|
|
1130
|
+
* Values in the partial context that are `null` will be deleted.
|
|
1131
|
+
*/
|
|
1132
|
+
const mergeGqlContext = (defaultContext, overrides) => {
|
|
1133
|
+
// Let's merge the partial context default context. We deliberately
|
|
1134
|
+
// don't spread because spreading would overwrite default context
|
|
1135
|
+
// values with undefined or null if the partial context includes a value
|
|
1136
|
+
// explicitly set to undefined or null.
|
|
1137
|
+
return Object.keys(overrides).reduce((acc, key) => {
|
|
1138
|
+
// Undefined values are ignored.
|
|
1139
|
+
if (overrides[key] !== undefined) {
|
|
1140
|
+
if (overrides[key] === null) {
|
|
1141
|
+
// Null indicates we delete this context value.
|
|
1142
|
+
delete acc[key];
|
|
1143
|
+
} else {
|
|
1144
|
+
// Otherwise, we set it.
|
|
1145
|
+
acc[key] = overrides[key];
|
|
1146
|
+
}
|
|
1147
|
+
}
|
|
1148
|
+
|
|
1149
|
+
return acc;
|
|
1150
|
+
}, _extends({}, defaultContext));
|
|
1151
|
+
};
|
|
1152
|
+
|
|
819
1153
|
/**
|
|
820
1154
|
* Error kinds for GqlError.
|
|
821
1155
|
*/
|
|
822
|
-
const GqlErrors = Object.freeze(
|
|
823
|
-
|
|
824
|
-
|
|
1156
|
+
const GqlErrors = Object.freeze({
|
|
1157
|
+
/**
|
|
1158
|
+
* An internal framework error.
|
|
1159
|
+
*/
|
|
1160
|
+
Internal: "Internal",
|
|
1161
|
+
|
|
1162
|
+
/**
|
|
1163
|
+
* Response does not have the correct structure for a GraphQL response.
|
|
1164
|
+
*/
|
|
825
1165
|
BadResponse: "BadResponse",
|
|
1166
|
+
|
|
1167
|
+
/**
|
|
1168
|
+
* A valid GraphQL result with errors field in the payload.
|
|
1169
|
+
*/
|
|
826
1170
|
ErrorResult: "ErrorResult"
|
|
827
|
-
})
|
|
1171
|
+
});
|
|
828
1172
|
/**
|
|
829
1173
|
* An error from the GQL API.
|
|
1174
|
+
*
|
|
1175
|
+
* Errors of this type will have names of the format:
|
|
1176
|
+
* `${kind}GqlError`
|
|
830
1177
|
*/
|
|
831
1178
|
|
|
832
1179
|
class GqlError extends KindError {
|
|
@@ -837,12 +1184,48 @@ class GqlError extends KindError {
|
|
|
837
1184
|
super(message, kind, {
|
|
838
1185
|
metadata,
|
|
839
1186
|
cause,
|
|
840
|
-
|
|
1187
|
+
name: "Gql"
|
|
841
1188
|
});
|
|
842
1189
|
}
|
|
843
1190
|
|
|
844
1191
|
}
|
|
845
1192
|
|
|
1193
|
+
/**
|
|
1194
|
+
* Construct a GqlRouterContext from the current one and partial context.
|
|
1195
|
+
*/
|
|
1196
|
+
const useGqlRouterContext = (contextOverrides = {}) => {
|
|
1197
|
+
// This hook only works if the `GqlRouter` has been used to setup context.
|
|
1198
|
+
const gqlRouterContext = useContext(GqlRouterContext);
|
|
1199
|
+
|
|
1200
|
+
if (gqlRouterContext == null) {
|
|
1201
|
+
throw new GqlError("No GqlRouter", GqlErrors.Internal);
|
|
1202
|
+
}
|
|
1203
|
+
|
|
1204
|
+
const {
|
|
1205
|
+
fetch,
|
|
1206
|
+
defaultContext
|
|
1207
|
+
} = gqlRouterContext;
|
|
1208
|
+
const contextRef = useRef(defaultContext);
|
|
1209
|
+
const mergedContext = mergeGqlContext(defaultContext, contextOverrides); // Now, we can see if this represents a new context and if so,
|
|
1210
|
+
// update our ref and return the merged value.
|
|
1211
|
+
|
|
1212
|
+
const refKeys = Object.keys(contextRef.current);
|
|
1213
|
+
const mergedKeys = Object.keys(mergedContext);
|
|
1214
|
+
const shouldWeUpdateRef = refKeys.length !== mergedKeys.length || mergedKeys.every(key => contextRef.current[key] !== mergedContext[key]);
|
|
1215
|
+
|
|
1216
|
+
if (shouldWeUpdateRef) {
|
|
1217
|
+
contextRef.current = mergedContext;
|
|
1218
|
+
} // OK, now we're up-to-date, let's memoize our final result.
|
|
1219
|
+
|
|
1220
|
+
|
|
1221
|
+
const finalContext = contextRef.current;
|
|
1222
|
+
const finalRouterContext = useMemo(() => ({
|
|
1223
|
+
fetch,
|
|
1224
|
+
defaultContext: finalContext
|
|
1225
|
+
}), [fetch, finalContext]);
|
|
1226
|
+
return finalRouterContext;
|
|
1227
|
+
};
|
|
1228
|
+
|
|
846
1229
|
/**
|
|
847
1230
|
* Validate a GQL operation response and extract the data.
|
|
848
1231
|
*/
|
|
@@ -856,7 +1239,7 @@ const getGqlDataFromResponse = async response => {
|
|
|
856
1239
|
try {
|
|
857
1240
|
result = JSON.parse(bodyText);
|
|
858
1241
|
} catch (e) {
|
|
859
|
-
throw new
|
|
1242
|
+
throw new DataError("Failed to parse response", DataErrors.Parse, {
|
|
860
1243
|
metadata: {
|
|
861
1244
|
statusCode: response.status,
|
|
862
1245
|
bodyText
|
|
@@ -867,7 +1250,7 @@ const getGqlDataFromResponse = async response => {
|
|
|
867
1250
|
|
|
868
1251
|
|
|
869
1252
|
if (response.status >= 300) {
|
|
870
|
-
throw new
|
|
1253
|
+
throw new DataError("Response unsuccessful", DataErrors.Network, {
|
|
871
1254
|
metadata: {
|
|
872
1255
|
statusCode: response.status,
|
|
873
1256
|
result
|
|
@@ -913,59 +1296,48 @@ const getGqlDataFromResponse = async response => {
|
|
|
913
1296
|
* Values in the partial context given to the returned fetch function will
|
|
914
1297
|
* only be included if they have a value other than undefined.
|
|
915
1298
|
*/
|
|
916
|
-
const useGql = () => {
|
|
1299
|
+
const useGql = (context = {}) => {
|
|
917
1300
|
// This hook only works if the `GqlRouter` has been used to setup context.
|
|
918
|
-
const gqlRouterContext =
|
|
919
|
-
|
|
920
|
-
if (gqlRouterContext == null) {
|
|
921
|
-
throw new GqlError("No GqlRouter", GqlErrors.Internal);
|
|
922
|
-
}
|
|
923
|
-
|
|
924
|
-
const {
|
|
925
|
-
fetch,
|
|
926
|
-
defaultContext
|
|
927
|
-
} = gqlRouterContext; // Let's memoize the gqlFetch function we create based off our context.
|
|
1301
|
+
const gqlRouterContext = useGqlRouterContext(context); // Let's memoize the gqlFetch function we create based off our context.
|
|
928
1302
|
// That way, even if the context happens to change, if its values don't
|
|
929
1303
|
// we give the same function instance back to our callers instead of
|
|
930
1304
|
// making a new one. That then means they can safely use the return value
|
|
931
1305
|
// in hooks deps without fear of it triggering extra renders.
|
|
932
1306
|
|
|
933
|
-
const gqlFetch =
|
|
1307
|
+
const gqlFetch = useCallback((operation, options = Object.freeze({})) => {
|
|
1308
|
+
const {
|
|
1309
|
+
fetch,
|
|
1310
|
+
defaultContext
|
|
1311
|
+
} = gqlRouterContext;
|
|
934
1312
|
const {
|
|
935
1313
|
variables,
|
|
936
1314
|
context = {}
|
|
937
|
-
} = options;
|
|
938
|
-
|
|
939
|
-
// spreading would overwrite default context values with
|
|
940
|
-
// undefined if the partial context includes a value explicitly
|
|
941
|
-
// set to undefined. Instead, we use a map/reduce of keys.
|
|
942
|
-
|
|
943
|
-
const mergedContext = Object.keys(context).reduce((acc, key) => {
|
|
944
|
-
if (context[key] !== undefined) {
|
|
945
|
-
acc[key] = context[key];
|
|
946
|
-
}
|
|
947
|
-
|
|
948
|
-
return acc;
|
|
949
|
-
}, _extends({}, defaultContext)); // Invoke the fetch and extract the data.
|
|
1315
|
+
} = options;
|
|
1316
|
+
const finalContext = mergeGqlContext(defaultContext, context); // Invoke the fetch and extract the data.
|
|
950
1317
|
|
|
951
|
-
return fetch(operation, variables,
|
|
952
|
-
|
|
953
|
-
// The only way to detect this reliably, it seems, is to
|
|
954
|
-
// check the error name and see if it's "AbortError" (this
|
|
955
|
-
// is also what Apollo does).
|
|
956
|
-
// Even then, it's reliant on the fetch supporting aborts.
|
|
957
|
-
if (error.name === "AbortError") {
|
|
958
|
-
return null;
|
|
959
|
-
} // Need to make sure we pass other errors along.
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
throw error;
|
|
963
|
-
});
|
|
964
|
-
}, [fetch, defaultContext]);
|
|
1318
|
+
return fetch(operation, variables, finalContext).then(getGqlDataFromResponse);
|
|
1319
|
+
}, [gqlRouterContext]);
|
|
965
1320
|
return gqlFetch;
|
|
966
1321
|
};
|
|
967
1322
|
|
|
1323
|
+
/**
|
|
1324
|
+
* Initialize the hydration cache.
|
|
1325
|
+
*
|
|
1326
|
+
* @param {ResponseCache} source The cache content to use for initializing the
|
|
1327
|
+
* cache.
|
|
1328
|
+
* @throws {Error} If the cache is already initialized.
|
|
1329
|
+
*/
|
|
968
1330
|
const initializeCache = source => SsrCache.Default.initialize(source);
|
|
1331
|
+
/**
|
|
1332
|
+
* Fulfill all tracked data requests.
|
|
1333
|
+
*
|
|
1334
|
+
* This is for use with the `TrackData` component during server-side rendering.
|
|
1335
|
+
*
|
|
1336
|
+
* @throws {Error} If executed outside of server-side rendering.
|
|
1337
|
+
* @returns {Promise<void>} A promise that resolves when all tracked requests
|
|
1338
|
+
* have been fulfilled.
|
|
1339
|
+
*/
|
|
1340
|
+
|
|
969
1341
|
const fulfillAllDataRequests = () => {
|
|
970
1342
|
if (!Server.isServerSide()) {
|
|
971
1343
|
return Promise.reject(new Error("Data requests are not tracked when client-side"));
|
|
@@ -973,6 +1345,16 @@ const fulfillAllDataRequests = () => {
|
|
|
973
1345
|
|
|
974
1346
|
return RequestTracker.Default.fulfillTrackedRequests();
|
|
975
1347
|
};
|
|
1348
|
+
/**
|
|
1349
|
+
* Indicate if there are unfulfilled tracked requests.
|
|
1350
|
+
*
|
|
1351
|
+
* This is used in conjunction with `TrackData`.
|
|
1352
|
+
*
|
|
1353
|
+
* @throws {Error} If executed outside of server-side rendering.
|
|
1354
|
+
* @returns {boolean} `true` if there are unfulfilled tracked requests;
|
|
1355
|
+
* otherwise, `false`.
|
|
1356
|
+
*/
|
|
1357
|
+
|
|
976
1358
|
const hasUnfulfilledRequests = () => {
|
|
977
1359
|
if (!Server.isServerSide()) {
|
|
978
1360
|
throw new Error("Data requests are not tracked when client-side");
|
|
@@ -980,7 +1362,21 @@ const hasUnfulfilledRequests = () => {
|
|
|
980
1362
|
|
|
981
1363
|
return RequestTracker.Default.hasUnfulfilledRequests;
|
|
982
1364
|
};
|
|
1365
|
+
/**
|
|
1366
|
+
* Remove the request identified from the cached hydration responses.
|
|
1367
|
+
*
|
|
1368
|
+
* @param {string} id The request ID of the response to remove from the cache.
|
|
1369
|
+
*/
|
|
1370
|
+
|
|
983
1371
|
const removeFromCache = id => SsrCache.Default.remove(id);
|
|
1372
|
+
/**
|
|
1373
|
+
* Remove all cached hydration responses that match the given predicate.
|
|
1374
|
+
*
|
|
1375
|
+
* @param {(id: string) => boolean} [predicate] The predicate to match against
|
|
1376
|
+
* the cached hydration responses. If no predicate is provided, all cached
|
|
1377
|
+
* hydration responses will be removed.
|
|
1378
|
+
*/
|
|
1379
|
+
|
|
984
1380
|
const removeAllFromCache = predicate => SsrCache.Default.removeAll(predicate);
|
|
985
1381
|
|
|
986
|
-
export { Data, GqlError, GqlErrors, GqlRouter, InterceptRequests, ScopedInMemoryCache, TrackData, clearSharedCache, fulfillAllDataRequests, hasUnfulfilledRequests, initializeCache, removeAllFromCache, removeFromCache,
|
|
1382
|
+
export { Data, DataError, DataErrors, GqlError, GqlErrors, GqlRouter, InterceptRequests, RequestFulfillment, ScopedInMemoryCache, SerializableInMemoryCache, Status, TrackData, WhenClientSide, clearSharedCache, fulfillAllDataRequests, hasUnfulfilledRequests, initializeCache, removeAllFromCache, removeFromCache, useCachedEffect, useGql, useHydratableEffect, useServerEffect, useSharedCache };
|