@khanacademy/wonder-blocks-data 4.0.0 → 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/dist/es/index.js +793 -375
- package/dist/index.js +1203 -523
- package/legacy-docs.md +3 -0
- package/package.json +2 -2
- package/src/__docs__/_overview_.stories.mdx +18 -0
- package/src/__docs__/_overview_graphql.stories.mdx +35 -0
- package/src/__docs__/_overview_ssr_.stories.mdx +185 -0
- package/src/__docs__/_overview_testing_.stories.mdx +123 -0
- package/src/__docs__/exports.clear-shared-cache.stories.mdx +20 -0
- package/src/__docs__/exports.data-error.stories.mdx +23 -0
- package/src/__docs__/exports.data-errors.stories.mdx +23 -0
- package/src/{components/data.md → __docs__/exports.data.stories.mdx} +15 -18
- package/src/__docs__/exports.fulfill-all-data-requests.stories.mdx +24 -0
- package/src/__docs__/exports.gql-error.stories.mdx +23 -0
- package/src/__docs__/exports.gql-errors.stories.mdx +20 -0
- package/src/__docs__/exports.gql-router.stories.mdx +29 -0
- package/src/__docs__/exports.has-unfulfilled-requests.stories.mdx +20 -0
- package/src/__docs__/exports.intercept-requests.stories.mdx +69 -0
- package/src/__docs__/exports.intialize-cache.stories.mdx +29 -0
- package/src/__docs__/exports.remove-all-from-cache.stories.mdx +24 -0
- package/src/__docs__/exports.remove-from-cache.stories.mdx +25 -0
- package/src/__docs__/exports.request-fulfillment.stories.mdx +36 -0
- package/src/__docs__/exports.scoped-in-memory-cache.stories.mdx +92 -0
- package/src/__docs__/exports.serializable-in-memory-cache.stories.mdx +112 -0
- package/src/__docs__/exports.status.stories.mdx +31 -0
- package/src/{components/track-data.md → __docs__/exports.track-data.stories.mdx} +15 -0
- package/src/__docs__/exports.use-cached-effect.stories.mdx +41 -0
- package/src/__docs__/exports.use-gql.stories.mdx +73 -0
- package/src/__docs__/exports.use-hydratable-effect.stories.mdx +43 -0
- package/src/__docs__/exports.use-server-effect.stories.mdx +38 -0
- package/src/__docs__/exports.use-shared-cache.stories.mdx +30 -0
- package/src/__docs__/exports.when-client-side.stories.mdx +33 -0
- package/src/__docs__/types.cached-response.stories.mdx +29 -0
- package/src/__docs__/types.error-options.stories.mdx +21 -0
- package/src/__docs__/types.gql-context.stories.mdx +20 -0
- package/src/__docs__/types.gql-fetch-fn.stories.mdx +24 -0
- package/src/__docs__/types.gql-fetch-options.stories.mdx +24 -0
- package/src/__docs__/types.gql-operation-type.stories.mdx +24 -0
- package/src/__docs__/types.gql-operation.stories.mdx +67 -0
- package/src/__docs__/types.response-cache.stories.mdx +33 -0
- package/src/__docs__/types.result.stories.mdx +39 -0
- package/src/__docs__/types.scoped-cache.stories.mdx +27 -0
- package/src/__docs__/types.valid-cache-data.stories.mdx +23 -0
- package/src/__tests__/__snapshots__/generated-snapshot.test.js.snap +0 -80
- package/src/__tests__/generated-snapshot.test.js +7 -31
- package/src/components/__tests__/data.test.js +160 -154
- package/src/components/__tests__/intercept-requests.test.js +58 -0
- package/src/components/data.js +22 -126
- package/src/components/intercept-context.js +4 -5
- package/src/components/intercept-requests.js +69 -0
- package/src/hooks/__tests__/__snapshots__/use-shared-cache.test.js.snap +8 -8
- package/src/hooks/__tests__/use-cached-effect.test.js +507 -0
- package/src/hooks/__tests__/use-gql-router-context.test.js +133 -0
- package/src/hooks/__tests__/use-gql.test.js +1 -30
- package/src/hooks/__tests__/use-hydratable-effect.test.js +708 -0
- package/src/hooks/__tests__/use-request-interception.test.js +255 -0
- package/src/hooks/__tests__/use-server-effect.test.js +39 -11
- package/src/hooks/use-cached-effect.js +225 -0
- package/src/hooks/use-gql-router-context.js +50 -0
- package/src/hooks/use-gql.js +22 -52
- package/src/hooks/use-hydratable-effect.js +206 -0
- package/src/hooks/use-request-interception.js +51 -0
- package/src/hooks/use-server-effect.js +14 -7
- package/src/hooks/use-shared-cache.js +13 -11
- package/src/index.js +54 -2
- package/src/util/__tests__/__snapshots__/serializable-in-memory-cache.test.js.snap +19 -0
- package/src/util/__tests__/merge-gql-context.test.js +74 -0
- package/src/util/__tests__/request-fulfillment.test.js +23 -42
- package/src/util/__tests__/request-tracking.test.js +26 -7
- package/src/util/__tests__/result-from-cache-response.test.js +19 -5
- package/src/util/__tests__/scoped-in-memory-cache.test.js +6 -85
- package/src/util/__tests__/serializable-in-memory-cache.test.js +398 -0
- package/src/util/__tests__/ssr-cache.test.js +52 -52
- package/src/util/abort-error.js +15 -0
- package/src/util/data-error.js +58 -0
- package/src/util/get-gql-data-from-response.js +3 -2
- package/src/util/gql-error.js +19 -11
- package/src/util/merge-gql-context.js +34 -0
- package/src/util/request-fulfillment.js +49 -46
- package/src/util/request-tracking.js +69 -15
- package/src/util/result-from-cache-response.js +12 -16
- package/src/util/scoped-in-memory-cache.js +24 -47
- package/src/util/serializable-in-memory-cache.js +49 -0
- package/src/util/ssr-cache.js +9 -8
- package/src/util/status.js +30 -0
- package/src/util/types.js +18 -1
- package/docs.md +0 -122
- package/src/components/__tests__/intercept-data.test.js +0 -63
- package/src/components/intercept-data.js +0 -66
- package/src/components/intercept-data.md +0 -51
package/dist/es/index.js
CHANGED
|
@@ -1,117 +1,216 @@
|
|
|
1
|
-
import { Server } from '@khanacademy/wonder-blocks-core';
|
|
2
|
-
import { KindError,
|
|
3
|
-
import * as React from 'react';
|
|
4
|
-
import { useContext, useMemo } from 'react';
|
|
1
|
+
import { Server, useForceUpdate } from '@khanacademy/wonder-blocks-core';
|
|
2
|
+
import { KindError, clone } from '@khanacademy/wonder-stuff-core';
|
|
5
3
|
import _extends from '@babel/runtime/helpers/extends';
|
|
4
|
+
import * as React from 'react';
|
|
5
|
+
import { useContext, useRef, useMemo, useCallback } from 'react';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Error kinds for DataError.
|
|
9
|
+
*/
|
|
10
|
+
const DataErrors = Object.freeze({
|
|
11
|
+
/**
|
|
12
|
+
* The kind of error is not known.
|
|
13
|
+
*/
|
|
14
|
+
Unknown: "Unknown",
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* The error is internal to the executing code.
|
|
18
|
+
*/
|
|
19
|
+
Internal: "Internal",
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* There was a problem with the provided input.
|
|
23
|
+
*/
|
|
24
|
+
InvalidInput: "InvalidInput",
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* A network error occurred.
|
|
28
|
+
*/
|
|
29
|
+
Network: "Network",
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Response could not be parsed.
|
|
33
|
+
*/
|
|
34
|
+
Parse: "Parse",
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* An error that occurred during SSR and was hydrated from cache
|
|
38
|
+
*/
|
|
39
|
+
Hydrated: "Hydrated"
|
|
40
|
+
});
|
|
41
|
+
/**
|
|
42
|
+
* An error from the Wonder Blocks Data API.
|
|
43
|
+
*
|
|
44
|
+
* Errors of this type will have names of the format:
|
|
45
|
+
* `${kind}DataError`
|
|
46
|
+
*/
|
|
47
|
+
|
|
48
|
+
class DataError extends KindError {
|
|
49
|
+
constructor(message, kind, {
|
|
50
|
+
metadata,
|
|
51
|
+
cause
|
|
52
|
+
} = {}) {
|
|
53
|
+
super(message, kind, {
|
|
54
|
+
metadata,
|
|
55
|
+
cause,
|
|
56
|
+
name: "Data"
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
}
|
|
6
61
|
|
|
7
62
|
/**
|
|
8
63
|
* Describe an in-memory cache.
|
|
9
64
|
*/
|
|
10
65
|
class ScopedInMemoryCache {
|
|
11
|
-
constructor(initialCache =
|
|
12
|
-
this.
|
|
13
|
-
|
|
66
|
+
constructor(initialCache = {}) {
|
|
67
|
+
this._cache = initialCache;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Indicate if this cache is being used or not.
|
|
71
|
+
*
|
|
72
|
+
* When the cache has entries, returns `true`; otherwise, returns `false`.
|
|
73
|
+
*/
|
|
14
74
|
|
|
15
|
-
if (!id || typeof id !== "string") {
|
|
16
|
-
throw new KindError("id must be non-empty string", Errors.InvalidInput);
|
|
17
|
-
}
|
|
18
75
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
76
|
+
get inUse() {
|
|
77
|
+
return Object.keys(this._cache).length > 0;
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Set a value in the cache.
|
|
81
|
+
*/
|
|
22
82
|
|
|
23
|
-
if (typeof value === "function") {
|
|
24
|
-
throw new KindError("value must be a non-function value", Errors.InvalidInput);
|
|
25
|
-
}
|
|
26
83
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
84
|
+
set(scope, id, value) {
|
|
85
|
+
var _this$_cache$scope;
|
|
86
|
+
|
|
87
|
+
if (!id || typeof id !== "string") {
|
|
88
|
+
throw new DataError("id must be non-empty string", DataErrors.InvalidInput);
|
|
89
|
+
}
|
|
30
90
|
|
|
31
|
-
|
|
32
|
-
|
|
91
|
+
if (!scope || typeof scope !== "string") {
|
|
92
|
+
throw new DataError("scope must be non-empty string", DataErrors.InvalidInput);
|
|
93
|
+
}
|
|
33
94
|
|
|
34
|
-
|
|
35
|
-
|
|
95
|
+
if (typeof value === "function") {
|
|
96
|
+
throw new DataError("value must be a non-function value", DataErrors.InvalidInput);
|
|
97
|
+
}
|
|
36
98
|
|
|
37
|
-
this.
|
|
38
|
-
|
|
99
|
+
this._cache[scope] = (_this$_cache$scope = this._cache[scope]) != null ? _this$_cache$scope : {};
|
|
100
|
+
this._cache[scope][id] = value;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Retrieve a value from the cache.
|
|
104
|
+
*/
|
|
39
105
|
|
|
40
|
-
if (!((_this$_cache$scope3 = this._cache[scope]) != null && _this$_cache$scope3[id])) {
|
|
41
|
-
return;
|
|
42
|
-
}
|
|
43
106
|
|
|
44
|
-
|
|
107
|
+
get(scope, id) {
|
|
108
|
+
var _this$_cache$scope$id, _this$_cache$scope2;
|
|
45
109
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
110
|
+
return (_this$_cache$scope$id = (_this$_cache$scope2 = this._cache[scope]) == null ? void 0 : _this$_cache$scope2[id]) != null ? _this$_cache$scope$id : null;
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Purge an item from the cache.
|
|
114
|
+
*/
|
|
50
115
|
|
|
51
|
-
this.purgeScope = (scope, predicate) => {
|
|
52
|
-
if (!this._cache[scope]) {
|
|
53
|
-
return;
|
|
54
|
-
}
|
|
55
116
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
117
|
+
purge(scope, id) {
|
|
118
|
+
var _this$_cache$scope3;
|
|
60
119
|
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
}
|
|
65
|
-
}
|
|
120
|
+
if (!((_this$_cache$scope3 = this._cache[scope]) != null && _this$_cache$scope3[id])) {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
66
123
|
|
|
67
|
-
|
|
68
|
-
delete this._cache[scope];
|
|
69
|
-
}
|
|
70
|
-
};
|
|
124
|
+
delete this._cache[scope][id];
|
|
71
125
|
|
|
72
|
-
this.
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
126
|
+
if (Object.keys(this._cache[scope]).length === 0) {
|
|
127
|
+
delete this._cache[scope];
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Purge a scope of items that match the given predicate.
|
|
132
|
+
*
|
|
133
|
+
* If the predicate is omitted, then all items in the scope are purged.
|
|
134
|
+
*/
|
|
77
135
|
|
|
78
|
-
for (const scope of Object.keys(this._cache)) {
|
|
79
|
-
this.purgeScope(scope, (id, value) => predicate(scope, id, value));
|
|
80
|
-
}
|
|
81
|
-
};
|
|
82
136
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
137
|
+
purgeScope(scope, predicate) {
|
|
138
|
+
if (!this._cache[scope]) {
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (predicate == null) {
|
|
143
|
+
delete this._cache[scope];
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
for (const key of Object.keys(this._cache[scope])) {
|
|
148
|
+
if (predicate(key, this._cache[scope][key])) {
|
|
149
|
+
delete this._cache[scope][key];
|
|
88
150
|
}
|
|
89
|
-
}
|
|
151
|
+
}
|
|
90
152
|
|
|
91
|
-
|
|
92
|
-
this._cache
|
|
93
|
-
} catch (e) {
|
|
94
|
-
throw new KindError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, Errors.InvalidInput);
|
|
153
|
+
if (Object.keys(this._cache[scope]).length === 0) {
|
|
154
|
+
delete this._cache[scope];
|
|
95
155
|
}
|
|
96
156
|
}
|
|
97
157
|
/**
|
|
98
|
-
*
|
|
158
|
+
* Purge all items from the cache that match the given predicate.
|
|
99
159
|
*
|
|
100
|
-
*
|
|
160
|
+
* If the predicate is omitted, then all items in the cache are purged.
|
|
101
161
|
*/
|
|
102
162
|
|
|
103
163
|
|
|
104
|
-
|
|
105
|
-
|
|
164
|
+
purgeAll(predicate) {
|
|
165
|
+
if (predicate == null) {
|
|
166
|
+
this._cache = {};
|
|
167
|
+
return;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
for (const scope of Object.keys(this._cache)) {
|
|
171
|
+
this.purgeScope(scope, (id, value) => predicate(scope, id, value));
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Describe a serializable in-memory cache.
|
|
179
|
+
*/
|
|
180
|
+
class SerializableInMemoryCache extends ScopedInMemoryCache {
|
|
181
|
+
constructor(initialCache = {}) {
|
|
182
|
+
try {
|
|
183
|
+
super(clone(initialCache));
|
|
184
|
+
} catch (e) {
|
|
185
|
+
throw new DataError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, DataErrors.InvalidInput);
|
|
186
|
+
}
|
|
106
187
|
}
|
|
107
188
|
/**
|
|
108
189
|
* Set a value in the cache.
|
|
109
190
|
*/
|
|
110
191
|
|
|
111
192
|
|
|
193
|
+
set(scope, id, value) {
|
|
194
|
+
super.set(scope, id, Object.freeze(clone(value)));
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Clone the cache.
|
|
198
|
+
*/
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
clone() {
|
|
202
|
+
try {
|
|
203
|
+
return clone(this._cache);
|
|
204
|
+
} catch (e) {
|
|
205
|
+
throw new DataError("An error occurred while trying to clone the cache", DataErrors.Internal, {
|
|
206
|
+
cause: e
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
112
211
|
}
|
|
113
212
|
|
|
114
|
-
const DefaultScope = "default";
|
|
213
|
+
const DefaultScope$2 = "default";
|
|
115
214
|
/**
|
|
116
215
|
* The default instance is stored here.
|
|
117
216
|
* It's created below in the Default() static property.
|
|
@@ -140,9 +239,9 @@ class SsrCache {
|
|
|
140
239
|
throw new Error("Cannot initialize data response cache more than once");
|
|
141
240
|
}
|
|
142
241
|
|
|
143
|
-
this._hydrationCache = new
|
|
242
|
+
this._hydrationCache = new SerializableInMemoryCache({
|
|
144
243
|
// $FlowIgnore[incompatible-call]
|
|
145
|
-
[DefaultScope]: source
|
|
244
|
+
[DefaultScope$2]: source
|
|
146
245
|
});
|
|
147
246
|
};
|
|
148
247
|
|
|
@@ -162,7 +261,7 @@ class SsrCache {
|
|
|
162
261
|
|
|
163
262
|
// Get the cached entry for this value.
|
|
164
263
|
// We first look in the ssr cache and then the hydration cache.
|
|
165
|
-
const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope, id); // If we are not server-side and we hydrated something, let's clear
|
|
264
|
+
const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope$2, id); // If we are not server-side and we hydrated something, let's clear
|
|
166
265
|
// that from the hydration cache to save memory.
|
|
167
266
|
|
|
168
267
|
if (this._ssrOnlyCache == null && internalEntry != null) {
|
|
@@ -172,7 +271,7 @@ class SsrCache {
|
|
|
172
271
|
// that's not an expected use-case. If two different places use the
|
|
173
272
|
// same handler and options (i.e. the same request), then the
|
|
174
273
|
// handler should cater to that to ensure they share the result.
|
|
175
|
-
this._hydrationCache.purge(DefaultScope, id);
|
|
274
|
+
this._hydrationCache.purge(DefaultScope$2, id);
|
|
176
275
|
} // Getting the typing right between the in-memory cache and this
|
|
177
276
|
// is hard. Just telling flow it's OK.
|
|
178
277
|
// $FlowIgnore[incompatible-return]
|
|
@@ -188,7 +287,7 @@ class SsrCache {
|
|
|
188
287
|
// to match the key of the entry we're removing, but that's an
|
|
189
288
|
// inefficient way to remove a single item, so let's not do that.
|
|
190
289
|
// Delete the entry from the appropriate cache.
|
|
191
|
-
return this._hydrationCache.purge(DefaultScope, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope, id)) != null ? _this$_ssrOnlyCache$p : false);
|
|
290
|
+
return this._hydrationCache.purge(DefaultScope$2, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$p : false);
|
|
192
291
|
};
|
|
193
292
|
|
|
194
293
|
this.removeAll = predicate => {
|
|
@@ -215,11 +314,11 @@ class SsrCache {
|
|
|
215
314
|
// $FlowIgnore[incompatible-return]
|
|
216
315
|
|
|
217
316
|
|
|
218
|
-
return (_cache$DefaultScope = cache[DefaultScope]) != null ? _cache$DefaultScope : {};
|
|
317
|
+
return (_cache$DefaultScope = cache[DefaultScope$2]) != null ? _cache$DefaultScope : {};
|
|
219
318
|
};
|
|
220
319
|
|
|
221
|
-
this._ssrOnlyCache = Server.isServerSide() ? ssrOnlyCache || new
|
|
222
|
-
this._hydrationCache = hydrationCache || new
|
|
320
|
+
this._ssrOnlyCache = Server.isServerSide() ? ssrOnlyCache || new SerializableInMemoryCache() : undefined;
|
|
321
|
+
this._hydrationCache = hydrationCache || new SerializableInMemoryCache();
|
|
223
322
|
}
|
|
224
323
|
|
|
225
324
|
_setCachedResponse(id, entry, hydrate) {
|
|
@@ -229,14 +328,14 @@ class SsrCache {
|
|
|
229
328
|
// We are server-side.
|
|
230
329
|
// We need to store this value.
|
|
231
330
|
if (hydrate) {
|
|
232
|
-
this._hydrationCache.set(DefaultScope, id, frozenEntry);
|
|
331
|
+
this._hydrationCache.set(DefaultScope$2, id, frozenEntry);
|
|
233
332
|
} else {
|
|
234
333
|
var _this$_ssrOnlyCache4;
|
|
235
334
|
|
|
236
335
|
// Usually, when server-side, this cache will always be present.
|
|
237
336
|
// We do fake server-side in our doc example though, when it
|
|
238
337
|
// won't be.
|
|
239
|
-
(_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope, id, frozenEntry);
|
|
338
|
+
(_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope$2, id, frozenEntry);
|
|
240
339
|
}
|
|
241
340
|
}
|
|
242
341
|
|
|
@@ -252,17 +351,13 @@ class SsrCache {
|
|
|
252
351
|
}
|
|
253
352
|
|
|
254
353
|
let _default$1;
|
|
354
|
+
/**
|
|
355
|
+
* This fulfills a request, making sure that in-flight requests are shared.
|
|
356
|
+
*/
|
|
255
357
|
|
|
256
|
-
class RequestFulfillment {
|
|
257
|
-
static get Default() {
|
|
258
|
-
if (!_default$1) {
|
|
259
|
-
_default$1 = new RequestFulfillment();
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
return _default$1;
|
|
263
|
-
}
|
|
264
358
|
|
|
265
|
-
|
|
359
|
+
class RequestFulfillment {
|
|
360
|
+
constructor() {
|
|
266
361
|
this._requests = {};
|
|
267
362
|
|
|
268
363
|
this.fulfill = (id, {
|
|
@@ -282,57 +377,48 @@ class RequestFulfillment {
|
|
|
282
377
|
*/
|
|
283
378
|
|
|
284
379
|
|
|
285
|
-
const {
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
}
|
|
380
|
+
const request = handler().then(data => ({
|
|
381
|
+
status: "success",
|
|
382
|
+
data
|
|
383
|
+
})).catch(error => {
|
|
384
|
+
const actualError = typeof error === "string" ? new DataError("Request failed", DataErrors.Unknown, {
|
|
385
|
+
metadata: {
|
|
386
|
+
unexpectedError: error
|
|
387
|
+
}
|
|
388
|
+
}) : error; // Return aborted result if the request was aborted.
|
|
389
|
+
// The only way to detect this reliably, it seems, is to
|
|
390
|
+
// check the error name and see if it's "AbortError" (this
|
|
391
|
+
// is also what Apollo does).
|
|
392
|
+
// Even then, it's reliant on the handler supporting aborts.
|
|
393
|
+
// TODO(somewhatabstract, FEI-4276): Add first class abort
|
|
394
|
+
// support to the handler API.
|
|
395
|
+
|
|
396
|
+
if (actualError.name === "AbortError") {
|
|
397
|
+
return {
|
|
398
|
+
status: "aborted"
|
|
399
|
+
};
|
|
400
|
+
}
|
|
289
401
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
402
|
+
return {
|
|
403
|
+
status: "error",
|
|
404
|
+
error: actualError
|
|
405
|
+
};
|
|
406
|
+
}).finally(() => {
|
|
407
|
+
delete this._requests[id];
|
|
408
|
+
}); // Store the request in our cache.
|
|
293
409
|
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
return null;
|
|
297
|
-
}
|
|
298
|
-
/**
|
|
299
|
-
* Let's cache the data!
|
|
300
|
-
*
|
|
301
|
-
* NOTE: This only caches when we're server side.
|
|
302
|
-
*/
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
return cacheData(id, data, _hydrate);
|
|
306
|
-
}).catch(error => {
|
|
307
|
-
delete this._requests[id];
|
|
308
|
-
/**
|
|
309
|
-
* Let's cache the error!
|
|
310
|
-
*
|
|
311
|
-
* NOTE: This only caches when we're server side.
|
|
312
|
-
*/
|
|
313
|
-
|
|
314
|
-
return cacheError(id, error, _hydrate);
|
|
315
|
-
});
|
|
316
|
-
this._requests[id] = request;
|
|
317
|
-
return request;
|
|
318
|
-
} catch (e) {
|
|
319
|
-
/**
|
|
320
|
-
* In this case, we don't cache an inflight request, because there
|
|
321
|
-
* really isn't one.
|
|
322
|
-
*/
|
|
323
|
-
return Promise.resolve(cacheError(id, e, _hydrate));
|
|
324
|
-
}
|
|
410
|
+
this._requests[id] = request;
|
|
411
|
+
return request;
|
|
325
412
|
};
|
|
326
|
-
|
|
327
|
-
this._responseCache = responseCache || SsrCache.Default;
|
|
328
413
|
}
|
|
329
|
-
/**
|
|
330
|
-
* Get a promise of a request for a given handler and options.
|
|
331
|
-
*
|
|
332
|
-
* This will return an inflight request if one exists, otherwise it will
|
|
333
|
-
* make a new request. Inflight requests are deleted once they resolve.
|
|
334
|
-
*/
|
|
335
414
|
|
|
415
|
+
static get Default() {
|
|
416
|
+
if (!_default$1) {
|
|
417
|
+
_default$1 = new RequestFulfillment();
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
return _default$1;
|
|
421
|
+
}
|
|
336
422
|
|
|
337
423
|
}
|
|
338
424
|
|
|
@@ -389,11 +475,54 @@ class RequestTracker {
|
|
|
389
475
|
|
|
390
476
|
this.fulfillTrackedRequests = () => {
|
|
391
477
|
const promises = [];
|
|
478
|
+
const {
|
|
479
|
+
cacheData,
|
|
480
|
+
cacheError
|
|
481
|
+
} = this._responseCache;
|
|
392
482
|
|
|
393
483
|
for (const requestKey of Object.keys(this._trackedRequests)) {
|
|
394
|
-
const
|
|
395
|
-
|
|
396
|
-
|
|
484
|
+
const options = this._trackedRequests[requestKey];
|
|
485
|
+
|
|
486
|
+
try {
|
|
487
|
+
promises.push(this._requestFulfillment.fulfill(requestKey, _extends({}, options)).then(result => {
|
|
488
|
+
switch (result.status) {
|
|
489
|
+
case "success":
|
|
490
|
+
/**
|
|
491
|
+
* Let's cache the data!
|
|
492
|
+
*
|
|
493
|
+
* NOTE: This only caches when we're
|
|
494
|
+
* server side.
|
|
495
|
+
*/
|
|
496
|
+
cacheData(requestKey, result.data, options.hydrate);
|
|
497
|
+
break;
|
|
498
|
+
|
|
499
|
+
case "error":
|
|
500
|
+
/**
|
|
501
|
+
* Let's cache the error!
|
|
502
|
+
*
|
|
503
|
+
* NOTE: This only caches when we're
|
|
504
|
+
* server side.
|
|
505
|
+
*/
|
|
506
|
+
cacheError(requestKey, result.error, options.hydrate);
|
|
507
|
+
break;
|
|
508
|
+
} // For status === "loading":
|
|
509
|
+
// Could never get here unless we wrote
|
|
510
|
+
// the code wrong. Rather than bloat
|
|
511
|
+
// code with useless error, just ignore.
|
|
512
|
+
// For status === "aborted":
|
|
513
|
+
// We won't cache this.
|
|
514
|
+
// We don't hydrate aborted requests,
|
|
515
|
+
// so the client would just see them
|
|
516
|
+
// as unfulfilled data.
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
return;
|
|
520
|
+
}));
|
|
521
|
+
} catch (e) {
|
|
522
|
+
// This captures if there are problems in the code that
|
|
523
|
+
// begins the requests.
|
|
524
|
+
promises.push(Promise.resolve(cacheError(requestKey, e, options.hydrate)));
|
|
525
|
+
}
|
|
397
526
|
}
|
|
398
527
|
/**
|
|
399
528
|
* Clear out our tracked info.
|
|
@@ -401,16 +530,15 @@ class RequestTracker {
|
|
|
401
530
|
* We call this now for a simpler API.
|
|
402
531
|
*
|
|
403
532
|
* If we reset the tracked calls after all promises resolve, any
|
|
404
|
-
*
|
|
533
|
+
* request tracking done while promises are in flight would be lost.
|
|
405
534
|
*
|
|
406
535
|
* If we don't reset at all, then we have to expose the `reset` call
|
|
407
536
|
* for consumers to use, or they'll only ever be able to accumulate
|
|
408
537
|
* more and more tracked requests, having to fulfill them all every
|
|
409
538
|
* time.
|
|
410
539
|
*
|
|
411
|
-
* Calling it here means we can have multiple "track -> request"
|
|
412
|
-
* in a row and in an easy to reason about manner.
|
|
413
|
-
*
|
|
540
|
+
* Calling it here means we can have multiple "track -> request"
|
|
541
|
+
* cycles in a row and in an easy to reason about manner.
|
|
414
542
|
*/
|
|
415
543
|
|
|
416
544
|
|
|
@@ -423,7 +551,7 @@ class RequestTracker {
|
|
|
423
551
|
};
|
|
424
552
|
|
|
425
553
|
this._responseCache = responseCache || SsrCache.Default;
|
|
426
|
-
this._requestFulfillment = new RequestFulfillment(
|
|
554
|
+
this._requestFulfillment = new RequestFulfillment();
|
|
427
555
|
}
|
|
428
556
|
/**
|
|
429
557
|
* Track a request.
|
|
@@ -473,55 +601,50 @@ class TrackData extends React.Component {
|
|
|
473
601
|
}
|
|
474
602
|
|
|
475
603
|
/**
|
|
476
|
-
*
|
|
604
|
+
* Simple implementation to represent aborting.
|
|
477
605
|
*
|
|
478
|
-
*
|
|
606
|
+
* Other frameworks may provide this too, so we won't be sharing this with
|
|
607
|
+
* the outside world. It's just a utility for test and internal use whenever
|
|
608
|
+
* we need to represent the concept of aborted things.
|
|
479
609
|
*/
|
|
480
|
-
|
|
610
|
+
class AbortError extends Error {
|
|
611
|
+
constructor(message) {
|
|
612
|
+
super(message);
|
|
613
|
+
this.name = "AbortError";
|
|
614
|
+
}
|
|
615
|
+
|
|
616
|
+
}
|
|
481
617
|
|
|
618
|
+
const loadingStatus = Object.freeze({
|
|
619
|
+
status: "loading"
|
|
620
|
+
});
|
|
621
|
+
const abortedStatus = Object.freeze({
|
|
622
|
+
status: "aborted"
|
|
623
|
+
});
|
|
482
624
|
/**
|
|
483
|
-
*
|
|
484
|
-
*
|
|
485
|
-
* This hook registers an asynchronous action to be performed during
|
|
486
|
-
* server-side rendering. The action is performed only once, and the result
|
|
487
|
-
* is cached against the given identifier so that subsequent calls return that
|
|
488
|
-
* cached result allowing components to render more of the component.
|
|
489
|
-
*
|
|
490
|
-
* This hook requires the Wonder Blocks Data functionality for resolving
|
|
491
|
-
* pending requests, as well as support for the hydration cache to be
|
|
492
|
-
* embedded into a page so that the result can by hydrated (if that is a
|
|
493
|
-
* requirement).
|
|
494
|
-
*
|
|
495
|
-
* The asynchronous action is never invoked on the client-side.
|
|
625
|
+
* Create Result<TData> instances with specific statuses.
|
|
496
626
|
*/
|
|
497
|
-
const useServerEffect = (id, handler, hydrate = true) => {
|
|
498
|
-
// If we're server-side or hydrating, we'll have a cached entry to use.
|
|
499
|
-
// So we get that and use it to initialize our state.
|
|
500
|
-
// This works in both hydration and SSR because the very first call to
|
|
501
|
-
// this will have cached data in those cases as it will be present on the
|
|
502
|
-
// initial render - and subsequent renders on the client it will be null.
|
|
503
|
-
const cachedResult = SsrCache.Default.getEntry(id); // We only track data requests when we are server-side and we don't
|
|
504
|
-
// already have a result, as given by the cachedData (which is also the
|
|
505
|
-
// initial value for the result state).
|
|
506
|
-
|
|
507
|
-
const maybeTrack = useContext(TrackerContext);
|
|
508
|
-
|
|
509
|
-
if (cachedResult == null && Server.isServerSide()) {
|
|
510
|
-
maybeTrack == null ? void 0 : maybeTrack(id, handler, hydrate);
|
|
511
|
-
}
|
|
512
627
|
|
|
513
|
-
|
|
514
|
-
|
|
628
|
+
const Status = Object.freeze({
|
|
629
|
+
loading: () => loadingStatus,
|
|
630
|
+
aborted: () => abortedStatus,
|
|
631
|
+
success: data => ({
|
|
632
|
+
status: "success",
|
|
633
|
+
data
|
|
634
|
+
}),
|
|
635
|
+
error: error => ({
|
|
636
|
+
status: "error",
|
|
637
|
+
error
|
|
638
|
+
})
|
|
639
|
+
});
|
|
515
640
|
|
|
516
641
|
/**
|
|
517
642
|
* Turns a cache entry into a stateful result.
|
|
518
643
|
*/
|
|
519
644
|
const resultFromCachedResponse = cacheEntry => {
|
|
520
|
-
// No cache entry means
|
|
645
|
+
// No cache entry means no result to be hydrated.
|
|
521
646
|
if (cacheEntry == null) {
|
|
522
|
-
return
|
|
523
|
-
status: "loading"
|
|
524
|
-
};
|
|
647
|
+
return null;
|
|
525
648
|
}
|
|
526
649
|
|
|
527
650
|
const {
|
|
@@ -530,162 +653,102 @@ const resultFromCachedResponse = cacheEntry => {
|
|
|
530
653
|
} = cacheEntry;
|
|
531
654
|
|
|
532
655
|
if (error != null) {
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
656
|
+
// Let's hydrate the error. We don't persist everything about the
|
|
657
|
+
// original error on the server, hence why we only superficially
|
|
658
|
+
// hydrate it to a GqlHydratedError.
|
|
659
|
+
return Status.error(new DataError(error, DataErrors.Hydrated));
|
|
537
660
|
}
|
|
538
661
|
|
|
539
662
|
if (data != null) {
|
|
540
|
-
return
|
|
541
|
-
|
|
542
|
-
data
|
|
543
|
-
};
|
|
544
|
-
}
|
|
663
|
+
return Status.success(data);
|
|
664
|
+
} // We shouldn't get here since we don't actually cache null data.
|
|
545
665
|
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
};
|
|
666
|
+
|
|
667
|
+
return Status.aborted();
|
|
549
668
|
};
|
|
550
669
|
|
|
551
670
|
/**
|
|
552
|
-
*
|
|
553
|
-
*
|
|
554
|
-
*
|
|
671
|
+
* InterceptContext defines a map from request ID to interception methods.
|
|
672
|
+
*
|
|
673
|
+
* INTERNAL USE ONLY
|
|
555
674
|
*/
|
|
556
|
-
const
|
|
557
|
-
requestId,
|
|
558
|
-
handler,
|
|
559
|
-
children,
|
|
560
|
-
hydrate,
|
|
561
|
-
showOldDataWhileLoading,
|
|
562
|
-
alwaysRequestOnHydration
|
|
563
|
-
}) => {
|
|
564
|
-
// Lookup to see if there's an interceptor for the handler.
|
|
565
|
-
// If we have one, we need to replace the handler with one that
|
|
566
|
-
// uses the interceptor.
|
|
567
|
-
const interceptorMap = React.useContext(InterceptContext); // If we have an interceptor, we need to replace the handler with one
|
|
568
|
-
// that uses the interceptor. This helper function generates a new
|
|
569
|
-
// handler.
|
|
570
|
-
|
|
571
|
-
const maybeInterceptedHandler = React.useMemo(() => {
|
|
572
|
-
const interceptor = interceptorMap[requestId];
|
|
573
|
-
|
|
574
|
-
if (interceptor == null) {
|
|
575
|
-
return handler;
|
|
576
|
-
}
|
|
675
|
+
const InterceptContext = /*#__PURE__*/React.createContext([]);
|
|
577
676
|
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
// with old data until we're loaded, we want to make sure we set our
|
|
605
|
-
// result to null so that we're in the loading state.
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
if (!showOldDataWhileLoading) {
|
|
609
|
-
// Mark ourselves as loading.
|
|
610
|
-
setResult(null);
|
|
611
|
-
} // We aren't server-side, so let's make the request.
|
|
612
|
-
// We don't need to use our built-in request fulfillment here if we
|
|
613
|
-
// don't want, but it does mean we'll share inflight requests for the
|
|
614
|
-
// same ID and the result will be in the same format as the
|
|
615
|
-
// hydrated value.
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
let cancel = false;
|
|
619
|
-
RequestFulfillment.Default.fulfill(requestId, {
|
|
620
|
-
handler: maybeInterceptedHandler
|
|
621
|
-
}).then(result => {
|
|
622
|
-
if (cancel) {
|
|
623
|
-
return;
|
|
624
|
-
}
|
|
625
|
-
|
|
626
|
-
setResult(result);
|
|
627
|
-
return;
|
|
628
|
-
}).catch(e => {
|
|
629
|
-
if (cancel) {
|
|
630
|
-
return;
|
|
677
|
+
/**
|
|
678
|
+
* Allow request handling to be intercepted.
|
|
679
|
+
*
|
|
680
|
+
* Hook to take a uniquely identified request handler and return a
|
|
681
|
+
* method that will support request interception from the InterceptRequest
|
|
682
|
+
* component.
|
|
683
|
+
*
|
|
684
|
+
* If you want request interception to be supported with `useServerEffect` or
|
|
685
|
+
* any client-side effect that uses the handler, call this first to generate
|
|
686
|
+
* an intercepted handler, and then invoke `useServerEffect` (or other things)
|
|
687
|
+
* with that intercepted handler.
|
|
688
|
+
*/
|
|
689
|
+
const useRequestInterception = (requestId, handler) => {
|
|
690
|
+
// Get the interceptors that have been registered.
|
|
691
|
+
const interceptors = React.useContext(InterceptContext); // Now, we need to create a new handler that will check if the
|
|
692
|
+
// request is intercepted before ultimately calling the original handler
|
|
693
|
+
// if nothing intercepted it.
|
|
694
|
+
// We memoize this so that it only changes if something related to it
|
|
695
|
+
// changes.
|
|
696
|
+
|
|
697
|
+
const interceptedHandler = React.useCallback(() => {
|
|
698
|
+
// Call the interceptors from closest to furthest.
|
|
699
|
+
// If one returns a non-null result, then we keep that.
|
|
700
|
+
const interceptResponse = interceptors.reduceRight((prev, interceptor) => {
|
|
701
|
+
if (prev != null) {
|
|
702
|
+
return prev;
|
|
631
703
|
}
|
|
632
|
-
/**
|
|
633
|
-
* We should never get here as errors in fulfillment are part
|
|
634
|
-
* of the `then`, but if we do.
|
|
635
|
-
*/
|
|
636
|
-
// eslint-disable-next-line no-console
|
|
637
704
|
|
|
705
|
+
return interceptor(requestId);
|
|
706
|
+
}, null); // If nothing intercepted this request, invoke the original handler.
|
|
707
|
+
// NOTE: We can't guarantee all interceptors return the same type
|
|
708
|
+
// as our handler, so how can flow know? Let's just suppress that.
|
|
709
|
+
// $FlowFixMe[incompatible-return]
|
|
638
710
|
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
});
|
|
643
|
-
return;
|
|
644
|
-
});
|
|
645
|
-
return () => {
|
|
646
|
-
cancel = true;
|
|
647
|
-
}; // If the handler changes, we don't care. The ID is what indicates
|
|
648
|
-
// the request that should be made and folks shouldn't be changing the
|
|
649
|
-
// handler without changing the ID as well.
|
|
650
|
-
// In addition, we don't want to include hydrateResult nor
|
|
651
|
-
// alwaysRequestOnHydration as them changinng after the first pass
|
|
652
|
-
// is irrelevant.
|
|
653
|
-
// Finally, we don't want to include showOldDataWhileLoading as that
|
|
654
|
-
// changing on its own is also not relevant. It only matters if the
|
|
655
|
-
// request itself changes. All of which is to say that we only
|
|
656
|
-
// run this effect for the ID changing.
|
|
657
|
-
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
658
|
-
}, [requestId]);
|
|
659
|
-
return children(resultFromCachedResponse(currentResult));
|
|
711
|
+
return interceptResponse != null ? interceptResponse : handler();
|
|
712
|
+
}, [handler, interceptors, requestId]);
|
|
713
|
+
return interceptedHandler;
|
|
660
714
|
};
|
|
661
715
|
|
|
662
716
|
/**
|
|
663
|
-
*
|
|
664
|
-
* This is for use in testing.
|
|
717
|
+
* Hook to perform an asynchronous action during server-side rendering.
|
|
665
718
|
*
|
|
666
|
-
* This
|
|
667
|
-
*
|
|
668
|
-
*
|
|
669
|
-
*
|
|
670
|
-
*
|
|
719
|
+
* This hook registers an asynchronous action to be performed during
|
|
720
|
+
* server-side rendering. The action is performed only once, and the result
|
|
721
|
+
* is cached against the given identifier so that subsequent calls return that
|
|
722
|
+
* cached result allowing components to render more of the component.
|
|
723
|
+
*
|
|
724
|
+
* This hook requires the Wonder Blocks Data functionality for resolving
|
|
725
|
+
* pending requests, as well as support for the hydration cache to be
|
|
726
|
+
* embedded into a page so that the result can by hydrated (if that is a
|
|
727
|
+
* requirement).
|
|
671
728
|
*
|
|
672
|
-
*
|
|
673
|
-
* rendered within this one that intercepts the same id, then that
|
|
674
|
-
* new instance will replace this interceptor for its children. All methods
|
|
675
|
-
* will be replaced.
|
|
729
|
+
* The asynchronous action is never invoked on the client-side.
|
|
676
730
|
*/
|
|
677
|
-
const
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
731
|
+
const useServerEffect = (requestId, handler, hydrate = true) => {
|
|
732
|
+
// Plug in to the request interception framework for code that wants
|
|
733
|
+
// to use that.
|
|
734
|
+
const interceptedHandler = useRequestInterception(requestId, handler); // If we're server-side or hydrating, we'll have a cached entry to use.
|
|
735
|
+
// So we get that and use it to initialize our state.
|
|
736
|
+
// This works in both hydration and SSR because the very first call to
|
|
737
|
+
// this will have cached data in those cases as it will be present on the
|
|
738
|
+
// initial render - and subsequent renders on the client it will be null.
|
|
739
|
+
|
|
740
|
+
const cachedResult = SsrCache.Default.getEntry(requestId); // We only track data requests when we are server-side and we don't
|
|
741
|
+
// already have a result, as given by the cachedData (which is also the
|
|
742
|
+
// initial value for the result state).
|
|
743
|
+
|
|
744
|
+
const maybeTrack = useContext(TrackerContext);
|
|
745
|
+
|
|
746
|
+
if (cachedResult == null && Server.isServerSide()) {
|
|
747
|
+
maybeTrack == null ? void 0 : maybeTrack(requestId, interceptedHandler, hydrate);
|
|
748
|
+
} // A null result means there was no result to hydrate.
|
|
749
|
+
|
|
750
|
+
|
|
751
|
+
return cachedResult == null ? null : resultFromCachedResponse(cachedResult);
|
|
689
752
|
};
|
|
690
753
|
|
|
691
754
|
/**
|
|
@@ -732,16 +795,16 @@ const clearSharedCache = (scope = "") => {
|
|
|
732
795
|
const useSharedCache = (id, scope, initialValue) => {
|
|
733
796
|
// Verify arguments.
|
|
734
797
|
if (!id || typeof id !== "string") {
|
|
735
|
-
throw new
|
|
798
|
+
throw new DataError("id must be a non-empty string", DataErrors.InvalidInput);
|
|
736
799
|
}
|
|
737
800
|
|
|
738
801
|
if (!scope || typeof scope !== "string") {
|
|
739
|
-
throw new
|
|
802
|
+
throw new DataError("scope must be a non-empty string", DataErrors.InvalidInput);
|
|
740
803
|
} // Memoize our APIs.
|
|
741
804
|
// This one allows callers to set or replace the cached value.
|
|
742
805
|
|
|
743
806
|
|
|
744
|
-
const cacheValue = React.
|
|
807
|
+
const cacheValue = React.useCallback(value => value == null ? cache.purge(scope, id) : cache.set(scope, id, value), [id, scope]); // We don't memo-ize the current value, just in case the cache was updated
|
|
745
808
|
// since our last run through. Also, our cache does not know what type it
|
|
746
809
|
// stores, so we have to cast it to the type we're exporting. This is a
|
|
747
810
|
// dev time courtesy, rather than a runtime thing.
|
|
@@ -752,17 +815,283 @@ const useSharedCache = (id, scope, initialValue) => {
|
|
|
752
815
|
|
|
753
816
|
if (currentValue == null && initialValue !== undefined) {
|
|
754
817
|
// Get the initial value.
|
|
755
|
-
const value = typeof initialValue === "function" ? initialValue() : initialValue;
|
|
818
|
+
const value = typeof initialValue === "function" ? initialValue() : initialValue;
|
|
756
819
|
|
|
757
|
-
|
|
820
|
+
if (value != null) {
|
|
821
|
+
// Update the cache.
|
|
822
|
+
cacheValue(value); // Make sure we return this value as our current value.
|
|
758
823
|
|
|
759
|
-
|
|
824
|
+
currentValue = value;
|
|
825
|
+
}
|
|
760
826
|
} // Now we have everything, let's return it.
|
|
761
827
|
|
|
762
828
|
|
|
763
829
|
return [currentValue, cacheValue];
|
|
764
830
|
};
|
|
765
831
|
|
|
832
|
+
const DefaultScope$1 = "useCachedEffect";
|
|
833
|
+
/**
|
|
834
|
+
* Hook to execute and cache an async operation on the client.
|
|
835
|
+
*
|
|
836
|
+
* This hook executes the given handler on the client if there is no
|
|
837
|
+
* cached result to use.
|
|
838
|
+
*
|
|
839
|
+
* Results are cached so they can be shared between equivalent invocations.
|
|
840
|
+
* In-flight requests are also shared, so that concurrent calls will
|
|
841
|
+
* behave as one might exect. Cache updates invoked by one hook instance
|
|
842
|
+
* do not trigger renders in components that use the same requestID; however,
|
|
843
|
+
* that should not matter since concurrent requests will share the same
|
|
844
|
+
* in-flight request, and subsequent renders will grab from the cache.
|
|
845
|
+
*
|
|
846
|
+
* Once the request has been tried once and a non-loading response has been
|
|
847
|
+
* cached, the request will not executed made again.
|
|
848
|
+
*/
|
|
849
|
+
|
|
850
|
+
const useCachedEffect = (requestId, handler, options = {}) => {
|
|
851
|
+
const {
|
|
852
|
+
skip: hardSkip = false,
|
|
853
|
+
retainResultOnChange = false,
|
|
854
|
+
onResultChanged,
|
|
855
|
+
scope = DefaultScope$1
|
|
856
|
+
} = options; // Plug in to the request interception framework for code that wants
|
|
857
|
+
// to use that.
|
|
858
|
+
|
|
859
|
+
const interceptedHandler = useRequestInterception(requestId, handler); // Instead of using state, which would be local to just this hook instance,
|
|
860
|
+
// we use a shared in-memory cache.
|
|
861
|
+
|
|
862
|
+
const [mostRecentResult, setMostRecentResult] = useSharedCache(requestId, // The key of the cached item
|
|
863
|
+
scope // The scope of the cached items
|
|
864
|
+
// No default value. We don't want the loading status there; to ensure
|
|
865
|
+
// that all calls when the request is in-flight will update once that
|
|
866
|
+
// request is done, we want the cache to be empty until that point.
|
|
867
|
+
); // Build a function that will update the cache and either invoke the
|
|
868
|
+
// callback provided in options, or force an update.
|
|
869
|
+
|
|
870
|
+
const forceUpdate = useForceUpdate();
|
|
871
|
+
const setCacheAndNotify = React.useCallback(value => {
|
|
872
|
+
setMostRecentResult(value); // If our caller provided a cacheUpdated callback, we use that.
|
|
873
|
+
// Otherwise, we toggle our little state update.
|
|
874
|
+
|
|
875
|
+
if (onResultChanged != null) {
|
|
876
|
+
onResultChanged(value);
|
|
877
|
+
} else {
|
|
878
|
+
forceUpdate();
|
|
879
|
+
}
|
|
880
|
+
}, [setMostRecentResult, onResultChanged, forceUpdate]); // We need to trigger a re-render when the request ID changes as that
|
|
881
|
+
// indicates its a different request. We don't default the current id as
|
|
882
|
+
// this is a proxy for the first render, where we will make the request
|
|
883
|
+
// if we don't already have a cached value.
|
|
884
|
+
|
|
885
|
+
const requestIdRef = React.useRef();
|
|
886
|
+
const previousRequestId = requestIdRef.current; // Calculate our soft skip state.
|
|
887
|
+
// Soft skip changes are things that should skip the effect if something
|
|
888
|
+
// else triggers the effect to run, but should not itself trigger the effect
|
|
889
|
+
// (which would cancel a previous invocation).
|
|
890
|
+
|
|
891
|
+
const softSkip = React.useMemo(() => {
|
|
892
|
+
if (requestId === previousRequestId) {
|
|
893
|
+
// If the requestId is unchanged, it means we already rendered at
|
|
894
|
+
// least once and so we already made the request at least once. So
|
|
895
|
+
// we can bail out right here.
|
|
896
|
+
return true;
|
|
897
|
+
} // If we already have a cached value, we're going to skip.
|
|
898
|
+
|
|
899
|
+
|
|
900
|
+
if (mostRecentResult != null) {
|
|
901
|
+
return true;
|
|
902
|
+
}
|
|
903
|
+
|
|
904
|
+
return false;
|
|
905
|
+
}, [requestId, previousRequestId, mostRecentResult]); // So now we make sure the client-side request happens per our various
|
|
906
|
+
// options.
|
|
907
|
+
|
|
908
|
+
React.useEffect(() => {
|
|
909
|
+
let cancel = false; // We don't do anything if we've been told to hard skip (a hard skip
|
|
910
|
+
// means we should cancel the previous request and is therefore a
|
|
911
|
+
// dependency on that), or we have determined we have already done
|
|
912
|
+
// enough and can soft skip (a soft skip doesn't trigger the request
|
|
913
|
+
// to re-run; we don't want to cancel the in progress effect if we're
|
|
914
|
+
// soft skipping.
|
|
915
|
+
|
|
916
|
+
if (hardSkip || softSkip) {
|
|
917
|
+
return;
|
|
918
|
+
} // If we got here, we're going to perform the request.
|
|
919
|
+
// Let's make sure our ref is set to the most recent requestId.
|
|
920
|
+
|
|
921
|
+
|
|
922
|
+
requestIdRef.current = requestId; // OK, we've done all our checks and things. It's time to make the
|
|
923
|
+
// request. We use our request fulfillment here so that in-flight
|
|
924
|
+
// requests are shared.
|
|
925
|
+
// NOTE: Our request fulfillment handles the error cases here.
|
|
926
|
+
// Catching shouldn't serve a purpose.
|
|
927
|
+
// eslint-disable-next-line promise/catch-or-return
|
|
928
|
+
|
|
929
|
+
RequestFulfillment.Default.fulfill(requestId, {
|
|
930
|
+
handler: interceptedHandler
|
|
931
|
+
}).then(result => {
|
|
932
|
+
if (cancel) {
|
|
933
|
+
// We don't modify our result if an earlier effect was
|
|
934
|
+
// cancelled as it means that this hook no longer cares about
|
|
935
|
+
// that old request.
|
|
936
|
+
return;
|
|
937
|
+
}
|
|
938
|
+
|
|
939
|
+
setCacheAndNotify(result);
|
|
940
|
+
return; // Shut up eslint always-return rule.
|
|
941
|
+
});
|
|
942
|
+
return () => {
|
|
943
|
+
// TODO(somewhatabstract, FEI-4276): Eventually, we will want to be
|
|
944
|
+
// able abort in-flight requests, but for now, we don't have that.
|
|
945
|
+
// (Of course, we will only want to abort them if no one is waiting
|
|
946
|
+
// on them)
|
|
947
|
+
// For now, we just block cancelled requests from changing our
|
|
948
|
+
// cache.
|
|
949
|
+
cancel = true;
|
|
950
|
+
}; // We only want to run this effect if the requestId, or skip values
|
|
951
|
+
// change. These are the only two things that should affect the
|
|
952
|
+
// cancellation of a pending request. We do not update if the handler
|
|
953
|
+
// changes, in order to simplify the API - otherwise, callers would
|
|
954
|
+
// not be able to use inline functions with this hook.
|
|
955
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
956
|
+
}, [hardSkip, requestId]); // We track the last result we returned in order to support the
|
|
957
|
+
// "retainResultOnChange" option.
|
|
958
|
+
|
|
959
|
+
const lastResultAgnosticOfIdRef = React.useRef(Status.loading());
|
|
960
|
+
const loadingResult = retainResultOnChange ? lastResultAgnosticOfIdRef.current : Status.loading(); // Loading is a transient state, so we only use it here; it's not something
|
|
961
|
+
// we cache.
|
|
962
|
+
|
|
963
|
+
const result = React.useMemo(() => mostRecentResult != null ? mostRecentResult : loadingResult, [mostRecentResult, loadingResult]);
|
|
964
|
+
lastResultAgnosticOfIdRef.current = result;
|
|
965
|
+
return result;
|
|
966
|
+
};
|
|
967
|
+
|
|
968
|
+
/**
|
|
969
|
+
* Policies to define how a hydratable effect should behave client-side.
|
|
970
|
+
*/
|
|
971
|
+
const WhenClientSide = require("flow-enums-runtime").Mirrored(["DoNotHydrate", "ExecuteWhenNoResult", "ExecuteWhenNoSuccessResult", "AlwaysExecute"]);
|
|
972
|
+
const DefaultScope = "useHydratableEffect";
|
|
973
|
+
/**
|
|
974
|
+
* Hook to execute an async operation on server and client.
|
|
975
|
+
*
|
|
976
|
+
* This hook executes the given handler on the server and on the client,
|
|
977
|
+
* and, depending on the given options, can hydrate the server-side result.
|
|
978
|
+
*
|
|
979
|
+
* Results are cached on the client so they can be shared between equivalent
|
|
980
|
+
* invocations. Cache changes from one hook instance do not trigger renders
|
|
981
|
+
* in components that use the same requestID.
|
|
982
|
+
*/
|
|
983
|
+
|
|
984
|
+
const useHydratableEffect = (requestId, handler, options = {}) => {
|
|
985
|
+
const {
|
|
986
|
+
clientBehavior = WhenClientSide.ExecuteWhenNoSuccessResult,
|
|
987
|
+
skip = false,
|
|
988
|
+
retainResultOnChange = false,
|
|
989
|
+
onResultChanged,
|
|
990
|
+
scope = DefaultScope
|
|
991
|
+
} = options; // Now we instruct the server to perform the operation.
|
|
992
|
+
// When client-side, this will look up any response for hydration; it does
|
|
993
|
+
// not invoke the handler.
|
|
994
|
+
|
|
995
|
+
const serverResult = useServerEffect(requestId, // If we're skipped (unlikely in server worlds, but maybe),
|
|
996
|
+
// just give an aborted response.
|
|
997
|
+
skip ? () => Promise.reject(new AbortError("skipped")) : handler, // Only hydrate if our behavior isn't telling us not to.
|
|
998
|
+
clientBehavior !== WhenClientSide.DoNotHydrate);
|
|
999
|
+
const getDefaultCacheValue = React.useCallback(() => {
|
|
1000
|
+
// If we don't have a requestId, it's our first render, the one
|
|
1001
|
+
// where we hydrated. So defer to our clientBehavior value.
|
|
1002
|
+
switch (clientBehavior) {
|
|
1003
|
+
case WhenClientSide.DoNotHydrate:
|
|
1004
|
+
case WhenClientSide.AlwaysExecute:
|
|
1005
|
+
// Either we weren't hydrating at all, or we don't care
|
|
1006
|
+
// if we hydrated something or not, either way, we're
|
|
1007
|
+
// doing a request.
|
|
1008
|
+
return null;
|
|
1009
|
+
|
|
1010
|
+
case WhenClientSide.ExecuteWhenNoResult:
|
|
1011
|
+
// We only execute if we didn't hydrate something.
|
|
1012
|
+
// So, returning the hydration result as default for our
|
|
1013
|
+
// cache, will then prevent the cached effect running.
|
|
1014
|
+
return serverResult;
|
|
1015
|
+
|
|
1016
|
+
case WhenClientSide.ExecuteWhenNoSuccessResult:
|
|
1017
|
+
// We only execute if we didn't hydrate a success result.
|
|
1018
|
+
if ((serverResult == null ? void 0 : serverResult.status) === "success") {
|
|
1019
|
+
// So, returning the hydration result as default for our
|
|
1020
|
+
// cache, will then prevent the cached effect running.
|
|
1021
|
+
return serverResult;
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
return null;
|
|
1025
|
+
} // There is no reason for this to change after the first render.
|
|
1026
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
1027
|
+
|
|
1028
|
+
}, []); // Instead of using state, which would be local to just this hook instance,
|
|
1029
|
+
// we use a shared in-memory cache.
|
|
1030
|
+
|
|
1031
|
+
useSharedCache(requestId, // The key of the cached item
|
|
1032
|
+
scope, // The scope of the cached items
|
|
1033
|
+
getDefaultCacheValue); // When we're client-side, we ultimately want the result from this call.
|
|
1034
|
+
|
|
1035
|
+
const clientResult = useCachedEffect(requestId, handler, {
|
|
1036
|
+
skip,
|
|
1037
|
+
onResultChanged,
|
|
1038
|
+
retainResultOnChange,
|
|
1039
|
+
scope
|
|
1040
|
+
}); // OK, now which result do we return.
|
|
1041
|
+
// Well, we return the serverResult on our very first call and then
|
|
1042
|
+
// the clientResult thereafter. The great thing is that after the very
|
|
1043
|
+
// first call, the serverResult is going to be `null` anyway.
|
|
1044
|
+
|
|
1045
|
+
return serverResult != null ? serverResult : clientResult;
|
|
1046
|
+
};
|
|
1047
|
+
|
|
1048
|
+
/**
|
|
1049
|
+
* This component is the main component of Wonder Blocks Data. With this, data
|
|
1050
|
+
* requirements can be placed in a React application in a manner that will
|
|
1051
|
+
* support server-side rendering and efficient caching.
|
|
1052
|
+
*/
|
|
1053
|
+
const Data = ({
|
|
1054
|
+
requestId,
|
|
1055
|
+
handler,
|
|
1056
|
+
children,
|
|
1057
|
+
retainResultOnChange: _retainResultOnChange = false,
|
|
1058
|
+
clientBehavior: _clientBehavior = WhenClientSide.ExecuteWhenNoSuccessResult
|
|
1059
|
+
}) => {
|
|
1060
|
+
const result = useHydratableEffect(requestId, handler, {
|
|
1061
|
+
retainResultOnChange: _retainResultOnChange,
|
|
1062
|
+
clientBehavior: _clientBehavior
|
|
1063
|
+
});
|
|
1064
|
+
return children(result);
|
|
1065
|
+
};
|
|
1066
|
+
|
|
1067
|
+
/**
|
|
1068
|
+
* This component provides a mechanism to intercept data requests.
|
|
1069
|
+
* This is for use in testing.
|
|
1070
|
+
*
|
|
1071
|
+
* This component is not recommended for use in production code as it
|
|
1072
|
+
* can prevent predictable functioning of the Wonder Blocks Data framework.
|
|
1073
|
+
* One possible side-effect is that inflight requests from the interceptor could
|
|
1074
|
+
* be picked up by `Data` component requests from outside the children of this
|
|
1075
|
+
* component.
|
|
1076
|
+
*
|
|
1077
|
+
* Interceptions within the same component tree are chained such that the
|
|
1078
|
+
* interceptor closest to the intercepted request is called first, and the
|
|
1079
|
+
* furthest interceptor is called last.
|
|
1080
|
+
*/
|
|
1081
|
+
const InterceptRequests = ({
|
|
1082
|
+
interceptor,
|
|
1083
|
+
children
|
|
1084
|
+
}) => {
|
|
1085
|
+
const interceptors = React.useContext(InterceptContext);
|
|
1086
|
+
const updatedInterceptors = React.useMemo( // We could build this in reverse order so that our hook that does
|
|
1087
|
+
// the interception didn't have to use reduceRight, but I think it
|
|
1088
|
+
// is easier to think about if we do this in component tree order.
|
|
1089
|
+
() => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
|
|
1090
|
+
return /*#__PURE__*/React.createElement(InterceptContext.Provider, {
|
|
1091
|
+
value: updatedInterceptors
|
|
1092
|
+
}, children);
|
|
1093
|
+
};
|
|
1094
|
+
|
|
766
1095
|
const GqlRouterContext = /*#__PURE__*/React.createContext(null);
|
|
767
1096
|
|
|
768
1097
|
/**
|
|
@@ -797,17 +1126,57 @@ const GqlRouter = ({
|
|
|
797
1126
|
}, children);
|
|
798
1127
|
};
|
|
799
1128
|
|
|
1129
|
+
/**
|
|
1130
|
+
* Construct a complete GqlContext from current defaults and a partial context.
|
|
1131
|
+
*
|
|
1132
|
+
* Values in the partial context that are `undefined` will be ignored.
|
|
1133
|
+
* Values in the partial context that are `null` will be deleted.
|
|
1134
|
+
*/
|
|
1135
|
+
const mergeGqlContext = (defaultContext, overrides) => {
|
|
1136
|
+
// Let's merge the partial context default context. We deliberately
|
|
1137
|
+
// don't spread because spreading would overwrite default context
|
|
1138
|
+
// values with undefined or null if the partial context includes a value
|
|
1139
|
+
// explicitly set to undefined or null.
|
|
1140
|
+
return Object.keys(overrides).reduce((acc, key) => {
|
|
1141
|
+
// Undefined values are ignored.
|
|
1142
|
+
if (overrides[key] !== undefined) {
|
|
1143
|
+
if (overrides[key] === null) {
|
|
1144
|
+
// Null indicates we delete this context value.
|
|
1145
|
+
delete acc[key];
|
|
1146
|
+
} else {
|
|
1147
|
+
// Otherwise, we set it.
|
|
1148
|
+
acc[key] = overrides[key];
|
|
1149
|
+
}
|
|
1150
|
+
}
|
|
1151
|
+
|
|
1152
|
+
return acc;
|
|
1153
|
+
}, _extends({}, defaultContext));
|
|
1154
|
+
};
|
|
1155
|
+
|
|
800
1156
|
/**
|
|
801
1157
|
* Error kinds for GqlError.
|
|
802
1158
|
*/
|
|
803
|
-
const GqlErrors = Object.freeze(
|
|
804
|
-
|
|
805
|
-
|
|
1159
|
+
const GqlErrors = Object.freeze({
|
|
1160
|
+
/**
|
|
1161
|
+
* An internal framework error.
|
|
1162
|
+
*/
|
|
1163
|
+
Internal: "Internal",
|
|
1164
|
+
|
|
1165
|
+
/**
|
|
1166
|
+
* Response does not have the correct structure for a GraphQL response.
|
|
1167
|
+
*/
|
|
806
1168
|
BadResponse: "BadResponse",
|
|
1169
|
+
|
|
1170
|
+
/**
|
|
1171
|
+
* A valid GraphQL result with errors field in the payload.
|
|
1172
|
+
*/
|
|
807
1173
|
ErrorResult: "ErrorResult"
|
|
808
|
-
})
|
|
1174
|
+
});
|
|
809
1175
|
/**
|
|
810
1176
|
* An error from the GQL API.
|
|
1177
|
+
*
|
|
1178
|
+
* Errors of this type will have names of the format:
|
|
1179
|
+
* `${kind}GqlError`
|
|
811
1180
|
*/
|
|
812
1181
|
|
|
813
1182
|
class GqlError extends KindError {
|
|
@@ -818,12 +1187,48 @@ class GqlError extends KindError {
|
|
|
818
1187
|
super(message, kind, {
|
|
819
1188
|
metadata,
|
|
820
1189
|
cause,
|
|
821
|
-
|
|
1190
|
+
name: "Gql"
|
|
822
1191
|
});
|
|
823
1192
|
}
|
|
824
1193
|
|
|
825
1194
|
}
|
|
826
1195
|
|
|
1196
|
+
/**
|
|
1197
|
+
* Construct a GqlRouterContext from the current one and partial context.
|
|
1198
|
+
*/
|
|
1199
|
+
const useGqlRouterContext = (contextOverrides = {}) => {
|
|
1200
|
+
// This hook only works if the `GqlRouter` has been used to setup context.
|
|
1201
|
+
const gqlRouterContext = useContext(GqlRouterContext);
|
|
1202
|
+
|
|
1203
|
+
if (gqlRouterContext == null) {
|
|
1204
|
+
throw new GqlError("No GqlRouter", GqlErrors.Internal);
|
|
1205
|
+
}
|
|
1206
|
+
|
|
1207
|
+
const {
|
|
1208
|
+
fetch,
|
|
1209
|
+
defaultContext
|
|
1210
|
+
} = gqlRouterContext;
|
|
1211
|
+
const contextRef = useRef(defaultContext);
|
|
1212
|
+
const mergedContext = mergeGqlContext(defaultContext, contextOverrides); // Now, we can see if this represents a new context and if so,
|
|
1213
|
+
// update our ref and return the merged value.
|
|
1214
|
+
|
|
1215
|
+
const refKeys = Object.keys(contextRef.current);
|
|
1216
|
+
const mergedKeys = Object.keys(mergedContext);
|
|
1217
|
+
const shouldWeUpdateRef = refKeys.length !== mergedKeys.length || mergedKeys.every(key => contextRef.current[key] !== mergedContext[key]);
|
|
1218
|
+
|
|
1219
|
+
if (shouldWeUpdateRef) {
|
|
1220
|
+
contextRef.current = mergedContext;
|
|
1221
|
+
} // OK, now we're up-to-date, let's memoize our final result.
|
|
1222
|
+
|
|
1223
|
+
|
|
1224
|
+
const finalContext = contextRef.current;
|
|
1225
|
+
const finalRouterContext = useMemo(() => ({
|
|
1226
|
+
fetch,
|
|
1227
|
+
defaultContext: finalContext
|
|
1228
|
+
}), [fetch, finalContext]);
|
|
1229
|
+
return finalRouterContext;
|
|
1230
|
+
};
|
|
1231
|
+
|
|
827
1232
|
/**
|
|
828
1233
|
* Validate a GQL operation response and extract the data.
|
|
829
1234
|
*/
|
|
@@ -837,7 +1242,7 @@ const getGqlDataFromResponse = async response => {
|
|
|
837
1242
|
try {
|
|
838
1243
|
result = JSON.parse(bodyText);
|
|
839
1244
|
} catch (e) {
|
|
840
|
-
throw new
|
|
1245
|
+
throw new DataError("Failed to parse response", DataErrors.Parse, {
|
|
841
1246
|
metadata: {
|
|
842
1247
|
statusCode: response.status,
|
|
843
1248
|
bodyText
|
|
@@ -848,7 +1253,7 @@ const getGqlDataFromResponse = async response => {
|
|
|
848
1253
|
|
|
849
1254
|
|
|
850
1255
|
if (response.status >= 300) {
|
|
851
|
-
throw new
|
|
1256
|
+
throw new DataError("Response unsuccessful", DataErrors.Network, {
|
|
852
1257
|
metadata: {
|
|
853
1258
|
statusCode: response.status,
|
|
854
1259
|
result
|
|
@@ -894,59 +1299,48 @@ const getGqlDataFromResponse = async response => {
|
|
|
894
1299
|
* Values in the partial context given to the returned fetch function will
|
|
895
1300
|
* only be included if they have a value other than undefined.
|
|
896
1301
|
*/
|
|
897
|
-
const useGql = () => {
|
|
1302
|
+
const useGql = (context = {}) => {
|
|
898
1303
|
// This hook only works if the `GqlRouter` has been used to setup context.
|
|
899
|
-
const gqlRouterContext =
|
|
900
|
-
|
|
901
|
-
if (gqlRouterContext == null) {
|
|
902
|
-
throw new GqlError("No GqlRouter", GqlErrors.Internal);
|
|
903
|
-
}
|
|
904
|
-
|
|
905
|
-
const {
|
|
906
|
-
fetch,
|
|
907
|
-
defaultContext
|
|
908
|
-
} = gqlRouterContext; // Let's memoize the gqlFetch function we create based off our context.
|
|
1304
|
+
const gqlRouterContext = useGqlRouterContext(context); // Let's memoize the gqlFetch function we create based off our context.
|
|
909
1305
|
// That way, even if the context happens to change, if its values don't
|
|
910
1306
|
// we give the same function instance back to our callers instead of
|
|
911
1307
|
// making a new one. That then means they can safely use the return value
|
|
912
1308
|
// in hooks deps without fear of it triggering extra renders.
|
|
913
1309
|
|
|
914
|
-
const gqlFetch =
|
|
1310
|
+
const gqlFetch = useCallback((operation, options = Object.freeze({})) => {
|
|
1311
|
+
const {
|
|
1312
|
+
fetch,
|
|
1313
|
+
defaultContext
|
|
1314
|
+
} = gqlRouterContext;
|
|
915
1315
|
const {
|
|
916
1316
|
variables,
|
|
917
1317
|
context = {}
|
|
918
|
-
} = options;
|
|
919
|
-
|
|
920
|
-
// spreading would overwrite default context values with
|
|
921
|
-
// undefined if the partial context includes a value explicitly
|
|
922
|
-
// set to undefined. Instead, we use a map/reduce of keys.
|
|
923
|
-
|
|
924
|
-
const mergedContext = Object.keys(context).reduce((acc, key) => {
|
|
925
|
-
if (context[key] !== undefined) {
|
|
926
|
-
acc[key] = context[key];
|
|
927
|
-
}
|
|
928
|
-
|
|
929
|
-
return acc;
|
|
930
|
-
}, _extends({}, defaultContext)); // Invoke the fetch and extract the data.
|
|
931
|
-
|
|
932
|
-
return fetch(operation, variables, mergedContext).then(getGqlDataFromResponse, error => {
|
|
933
|
-
// Return null if the request was aborted.
|
|
934
|
-
// The only way to detect this reliably, it seems, is to
|
|
935
|
-
// check the error name and see if it's "AbortError" (this
|
|
936
|
-
// is also what Apollo does).
|
|
937
|
-
// Even then, it's reliant on the fetch supporting aborts.
|
|
938
|
-
if (error.name === "AbortError") {
|
|
939
|
-
return null;
|
|
940
|
-
} // Need to make sure we pass other errors along.
|
|
1318
|
+
} = options;
|
|
1319
|
+
const finalContext = mergeGqlContext(defaultContext, context); // Invoke the fetch and extract the data.
|
|
941
1320
|
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
});
|
|
945
|
-
}, [fetch, defaultContext]);
|
|
1321
|
+
return fetch(operation, variables, finalContext).then(getGqlDataFromResponse);
|
|
1322
|
+
}, [gqlRouterContext]);
|
|
946
1323
|
return gqlFetch;
|
|
947
1324
|
};
|
|
948
1325
|
|
|
1326
|
+
/**
|
|
1327
|
+
* Initialize the hydration cache.
|
|
1328
|
+
*
|
|
1329
|
+
* @param {ResponseCache} source The cache content to use for initializing the
|
|
1330
|
+
* cache.
|
|
1331
|
+
* @throws {Error} If the cache is already initialized.
|
|
1332
|
+
*/
|
|
949
1333
|
const initializeCache = source => SsrCache.Default.initialize(source);
|
|
1334
|
+
/**
|
|
1335
|
+
* Fulfill all tracked data requests.
|
|
1336
|
+
*
|
|
1337
|
+
* This is for use with the `TrackData` component during server-side rendering.
|
|
1338
|
+
*
|
|
1339
|
+
* @throws {Error} If executed outside of server-side rendering.
|
|
1340
|
+
* @returns {Promise<void>} A promise that resolves when all tracked requests
|
|
1341
|
+
* have been fulfilled.
|
|
1342
|
+
*/
|
|
1343
|
+
|
|
950
1344
|
const fulfillAllDataRequests = () => {
|
|
951
1345
|
if (!Server.isServerSide()) {
|
|
952
1346
|
return Promise.reject(new Error("Data requests are not tracked when client-side"));
|
|
@@ -954,6 +1348,16 @@ const fulfillAllDataRequests = () => {
|
|
|
954
1348
|
|
|
955
1349
|
return RequestTracker.Default.fulfillTrackedRequests();
|
|
956
1350
|
};
|
|
1351
|
+
/**
|
|
1352
|
+
* Indicate if there are unfulfilled tracked requests.
|
|
1353
|
+
*
|
|
1354
|
+
* This is used in conjunction with `TrackData`.
|
|
1355
|
+
*
|
|
1356
|
+
* @throws {Error} If executed outside of server-side rendering.
|
|
1357
|
+
* @returns {boolean} `true` if there are unfulfilled tracked requests;
|
|
1358
|
+
* otherwise, `false`.
|
|
1359
|
+
*/
|
|
1360
|
+
|
|
957
1361
|
const hasUnfulfilledRequests = () => {
|
|
958
1362
|
if (!Server.isServerSide()) {
|
|
959
1363
|
throw new Error("Data requests are not tracked when client-side");
|
|
@@ -961,7 +1365,21 @@ const hasUnfulfilledRequests = () => {
|
|
|
961
1365
|
|
|
962
1366
|
return RequestTracker.Default.hasUnfulfilledRequests;
|
|
963
1367
|
};
|
|
1368
|
+
/**
|
|
1369
|
+
* Remove the request identified from the cached hydration responses.
|
|
1370
|
+
*
|
|
1371
|
+
* @param {string} id The request ID of the response to remove from the cache.
|
|
1372
|
+
*/
|
|
1373
|
+
|
|
964
1374
|
const removeFromCache = id => SsrCache.Default.remove(id);
|
|
1375
|
+
/**
|
|
1376
|
+
* Remove all cached hydration responses that match the given predicate.
|
|
1377
|
+
*
|
|
1378
|
+
* @param {(id: string) => boolean} [predicate] The predicate to match against
|
|
1379
|
+
* the cached hydration responses. If no predicate is provided, all cached
|
|
1380
|
+
* hydration responses will be removed.
|
|
1381
|
+
*/
|
|
1382
|
+
|
|
965
1383
|
const removeAllFromCache = predicate => SsrCache.Default.removeAll(predicate);
|
|
966
1384
|
|
|
967
|
-
export { Data, GqlError, GqlErrors, GqlRouter,
|
|
1385
|
+
export { Data, DataError, DataErrors, GqlError, GqlErrors, GqlRouter, InterceptRequests, RequestFulfillment, ScopedInMemoryCache, SerializableInMemoryCache, Status, TrackData, WhenClientSide, clearSharedCache, fulfillAllDataRequests, hasUnfulfilledRequests, initializeCache, removeAllFromCache, removeFromCache, useCachedEffect, useGql, useHydratableEffect, useServerEffect, useSharedCache };
|