@khanacademy/wonder-blocks-data 6.0.1 → 8.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/CHANGELOG.md +32 -0
  2. package/dist/es/index.js +329 -777
  3. package/dist/index.js +1196 -834
  4. package/package.json +3 -3
  5. package/src/__docs__/_overview_ssr_.stories.mdx +13 -13
  6. package/src/__docs__/exports.abort-inflight-requests.stories.mdx +20 -0
  7. package/src/__docs__/exports.data.stories.mdx +3 -3
  8. package/src/__docs__/{exports.fulfill-all-data-requests.stories.mdx → exports.fetch-tracked-requests.stories.mdx} +5 -5
  9. package/src/__docs__/exports.get-gql-request-id.stories.mdx +24 -0
  10. package/src/__docs__/{exports.has-unfulfilled-requests.stories.mdx → exports.has-tracked-requests-to-be-fetched.stories.mdx} +4 -4
  11. package/src/__docs__/exports.intialize-hydration-cache.stories.mdx +29 -0
  12. package/src/__docs__/exports.purge-caches.stories.mdx +23 -0
  13. package/src/__docs__/{exports.remove-all-from-cache.stories.mdx → exports.purge-hydration-cache.stories.mdx} +4 -4
  14. package/src/__docs__/{exports.clear-shared-cache.stories.mdx → exports.purge-shared-cache.stories.mdx} +4 -4
  15. package/src/__docs__/exports.track-data.stories.mdx +4 -4
  16. package/src/__docs__/exports.use-cached-effect.stories.mdx +7 -4
  17. package/src/__docs__/exports.use-gql.stories.mdx +1 -33
  18. package/src/__docs__/exports.use-server-effect.stories.mdx +14 -2
  19. package/src/__docs__/exports.use-shared-cache.stories.mdx +2 -2
  20. package/src/__docs__/types.fetch-policy.stories.mdx +44 -0
  21. package/src/__docs__/types.response-cache.stories.mdx +1 -1
  22. package/src/__tests__/generated-snapshot.test.js +5 -5
  23. package/src/components/__tests__/data.test.js +2 -6
  24. package/src/hooks/__tests__/use-cached-effect.test.js +341 -100
  25. package/src/hooks/__tests__/use-hydratable-effect.test.js +17 -34
  26. package/src/hooks/__tests__/use-server-effect.test.js +51 -0
  27. package/src/hooks/__tests__/use-shared-cache.test.js +6 -6
  28. package/src/hooks/use-cached-effect.js +169 -93
  29. package/src/hooks/use-hydratable-effect.js +12 -12
  30. package/src/hooks/use-server-effect.js +30 -5
  31. package/src/hooks/use-shared-cache.js +2 -2
  32. package/src/index.js +14 -78
  33. package/src/util/__tests__/get-gql-request-id.test.js +74 -0
  34. package/src/util/__tests__/graphql-document-node-parser.test.js +542 -0
  35. package/src/util/__tests__/hydration-cache-api.test.js +35 -0
  36. package/src/util/__tests__/purge-caches.test.js +29 -0
  37. package/src/util/__tests__/request-api.test.js +188 -0
  38. package/src/util/__tests__/request-fulfillment.test.js +42 -0
  39. package/src/util/__tests__/ssr-cache.test.js +10 -60
  40. package/src/util/__tests__/to-gql-operation.test.js +42 -0
  41. package/src/util/data-error.js +6 -0
  42. package/src/util/get-gql-request-id.js +50 -0
  43. package/src/util/graphql-document-node-parser.js +133 -0
  44. package/src/util/graphql-types.js +30 -0
  45. package/src/util/hydration-cache-api.js +28 -0
  46. package/src/util/purge-caches.js +15 -0
  47. package/src/util/request-api.js +66 -0
  48. package/src/util/request-fulfillment.js +32 -12
  49. package/src/util/request-tracking.js +1 -1
  50. package/src/util/ssr-cache.js +1 -21
  51. package/src/util/to-gql-operation.js +44 -0
  52. package/src/util/types.js +31 -0
  53. package/src/__docs__/exports.intialize-cache.stories.mdx +0 -29
  54. package/src/__docs__/exports.remove-from-cache.stories.mdx +0 -25
  55. package/src/__docs__/exports.request-fulfillment.stories.mdx +0 -36
  56. package/src/util/abort-error.js +0 -15
package/dist/es/index.js CHANGED
@@ -4,47 +4,17 @@ import _extends from '@babel/runtime/helpers/extends';
4
4
  import * as React from 'react';
5
5
  import { useContext, useRef, useMemo, useCallback } from 'react';
6
6
 
7
- /**
8
- * Error kinds for DataError.
9
- */
7
+ const FetchPolicy = require("flow-enums-runtime").Mirrored(["CacheBeforeNetwork", "CacheAndNetwork", "CacheOnly", "NetworkOnly"]);
8
+
10
9
  const DataErrors = Object.freeze({
11
- /**
12
- * The kind of error is not known.
13
- */
14
10
  Unknown: "Unknown",
15
-
16
- /**
17
- * The error is internal to the executing code.
18
- */
19
11
  Internal: "Internal",
20
-
21
- /**
22
- * There was a problem with the provided input.
23
- */
24
12
  InvalidInput: "InvalidInput",
25
-
26
- /**
27
- * A network error occurred.
28
- */
29
13
  Network: "Network",
30
-
31
- /**
32
- * Response could not be parsed.
33
- */
14
+ NotAllowed: "NotAllowed",
34
15
  Parse: "Parse",
35
-
36
- /**
37
- * An error that occurred during SSR and was hydrated from cache
38
- */
39
16
  Hydrated: "Hydrated"
40
17
  });
41
- /**
42
- * An error from the Wonder Blocks Data API.
43
- *
44
- * Errors of this type will have names of the format:
45
- * `${kind}DataError`
46
- */
47
-
48
18
  class DataError extends KindError {
49
19
  constructor(message, kind, {
50
20
  metadata,
@@ -59,27 +29,14 @@ class DataError extends KindError {
59
29
 
60
30
  }
61
31
 
62
- /**
63
- * Describe an in-memory cache.
64
- */
65
32
  class ScopedInMemoryCache {
66
33
  constructor(initialCache = {}) {
67
34
  this._cache = initialCache;
68
35
  }
69
- /**
70
- * Indicate if this cache is being used or not.
71
- *
72
- * When the cache has entries, returns `true`; otherwise, returns `false`.
73
- */
74
-
75
36
 
76
37
  get inUse() {
77
38
  return Object.keys(this._cache).length > 0;
78
39
  }
79
- /**
80
- * Set a value in the cache.
81
- */
82
-
83
40
 
84
41
  set(scope, id, value) {
85
42
  var _this$_cache$scope;
@@ -99,20 +56,12 @@ class ScopedInMemoryCache {
99
56
  this._cache[scope] = (_this$_cache$scope = this._cache[scope]) != null ? _this$_cache$scope : {};
100
57
  this._cache[scope][id] = value;
101
58
  }
102
- /**
103
- * Retrieve a value from the cache.
104
- */
105
-
106
59
 
107
60
  get(scope, id) {
108
61
  var _this$_cache$scope$id, _this$_cache$scope2;
109
62
 
110
63
  return (_this$_cache$scope$id = (_this$_cache$scope2 = this._cache[scope]) == null ? void 0 : _this$_cache$scope2[id]) != null ? _this$_cache$scope$id : null;
111
64
  }
112
- /**
113
- * Purge an item from the cache.
114
- */
115
-
116
65
 
117
66
  purge(scope, id) {
118
67
  var _this$_cache$scope3;
@@ -127,12 +76,6 @@ class ScopedInMemoryCache {
127
76
  delete this._cache[scope];
128
77
  }
129
78
  }
130
- /**
131
- * Purge a scope of items that match the given predicate.
132
- *
133
- * If the predicate is omitted, then all items in the scope are purged.
134
- */
135
-
136
79
 
137
80
  purgeScope(scope, predicate) {
138
81
  if (!this._cache[scope]) {
@@ -154,12 +97,6 @@ class ScopedInMemoryCache {
154
97
  delete this._cache[scope];
155
98
  }
156
99
  }
157
- /**
158
- * Purge all items from the cache that match the given predicate.
159
- *
160
- * If the predicate is omitted, then all items in the cache are purged.
161
- */
162
-
163
100
 
164
101
  purgeAll(predicate) {
165
102
  if (predicate == null) {
@@ -174,9 +111,6 @@ class ScopedInMemoryCache {
174
111
 
175
112
  }
176
113
 
177
- /**
178
- * Describe a serializable in-memory cache.
179
- */
180
114
  class SerializableInMemoryCache extends ScopedInMemoryCache {
181
115
  constructor(initialCache = {}) {
182
116
  try {
@@ -185,18 +119,10 @@ class SerializableInMemoryCache extends ScopedInMemoryCache {
185
119
  throw new DataError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, DataErrors.InvalidInput);
186
120
  }
187
121
  }
188
- /**
189
- * Set a value in the cache.
190
- */
191
-
192
122
 
193
123
  set(scope, id, value) {
194
124
  super.set(scope, id, Object.freeze(clone(value)));
195
125
  }
196
- /**
197
- * Clone the cache.
198
- */
199
-
200
126
 
201
127
  clone() {
202
128
  try {
@@ -211,18 +137,8 @@ class SerializableInMemoryCache extends ScopedInMemoryCache {
211
137
  }
212
138
 
213
139
  const DefaultScope$2 = "default";
214
- /**
215
- * The default instance is stored here.
216
- * It's created below in the Default() static property.
217
- */
218
140
 
219
141
  let _default$2;
220
- /**
221
- * Implements the response cache.
222
- *
223
- * INTERNAL USE ONLY
224
- */
225
-
226
142
 
227
143
  class SsrCache {
228
144
  static get Default() {
@@ -240,7 +156,6 @@ class SsrCache {
240
156
  }
241
157
 
242
158
  this._hydrationCache = new SerializableInMemoryCache({
243
- // $FlowIgnore[incompatible-call]
244
159
  [DefaultScope$2]: source
245
160
  });
246
161
  };
@@ -259,60 +174,29 @@ class SsrCache {
259
174
  this.getEntry = id => {
260
175
  var _this$_ssrOnlyCache$g, _this$_ssrOnlyCache;
261
176
 
262
- // Get the cached entry for this value.
263
- // We first look in the ssr cache and then the hydration cache.
264
- const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope$2, id); // If we are not server-side and we hydrated something, let's clear
265
- // that from the hydration cache to save memory.
177
+ const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope$2, id);
266
178
 
267
179
  if (this._ssrOnlyCache == null && internalEntry != null) {
268
- // We now delete this from our hydration cache as we don't need it.
269
- // This does mean that if another handler of the same type but
270
- // without some sort of linked cache won't get the value, but
271
- // that's not an expected use-case. If two different places use the
272
- // same handler and options (i.e. the same request), then the
273
- // handler should cater to that to ensure they share the result.
274
180
  this._hydrationCache.purge(DefaultScope$2, id);
275
- } // Getting the typing right between the in-memory cache and this
276
- // is hard. Just telling flow it's OK.
277
- // $FlowIgnore[incompatible-return]
278
-
181
+ }
279
182
 
280
183
  return internalEntry;
281
184
  };
282
185
 
283
- this.remove = id => {
284
- var _this$_ssrOnlyCache$p, _this$_ssrOnlyCache2;
285
-
286
- // NOTE(somewhatabstract): We could invoke removeAll with a predicate
287
- // to match the key of the entry we're removing, but that's an
288
- // inefficient way to remove a single item, so let's not do that.
289
- // Delete the entry from the appropriate cache.
290
- return this._hydrationCache.purge(DefaultScope$2, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$p : false);
291
- };
292
-
293
- this.removeAll = predicate => {
294
- var _this$_ssrOnlyCache3;
186
+ this.purgeData = predicate => {
187
+ var _this$_ssrOnlyCache2;
295
188
 
296
- const realPredicate = predicate ? // We know what we're putting into the cache so let's assume it
297
- // conforms.
298
- // $FlowIgnore[incompatible-call]
299
- (_, key, cachedEntry) => predicate(key, cachedEntry) : undefined; // Apply the predicate to what we have in our caches.
189
+ const realPredicate = predicate ? (_, key, cachedEntry) => predicate(key, cachedEntry) : undefined;
300
190
 
301
191
  this._hydrationCache.purgeAll(realPredicate);
302
192
 
303
- (_this$_ssrOnlyCache3 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache3.purgeAll(realPredicate);
193
+ (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purgeAll(realPredicate);
304
194
  };
305
195
 
306
196
  this.cloneHydratableData = () => {
307
197
  var _cache$DefaultScope;
308
198
 
309
- // We return our hydration cache only.
310
- const cache = this._hydrationCache.clone(); // If we're empty, we still want to return an object, so we default
311
- // to an empty object.
312
- // We only need the default scope out of our scoped in-memory cache.
313
- // We know that it conforms to our expectations.
314
- // $FlowIgnore[incompatible-return]
315
-
199
+ const cache = this._hydrationCache.clone();
316
200
 
317
201
  return (_cache$DefaultScope = cache[DefaultScope$2]) != null ? _cache$DefaultScope : {};
318
202
  };
@@ -325,36 +209,24 @@ class SsrCache {
325
209
  const frozenEntry = Object.freeze(entry);
326
210
 
327
211
  if (Server.isServerSide()) {
328
- // We are server-side.
329
- // We need to store this value.
330
212
  if (hydrate) {
331
213
  this._hydrationCache.set(DefaultScope$2, id, frozenEntry);
332
214
  } else {
333
- var _this$_ssrOnlyCache4;
215
+ var _this$_ssrOnlyCache3;
334
216
 
335
- // Usually, when server-side, this cache will always be present.
336
- // We do fake server-side in our doc example though, when it
337
- // won't be.
338
- (_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope$2, id, frozenEntry);
217
+ (_this$_ssrOnlyCache3 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache3.set(DefaultScope$2, id, frozenEntry);
339
218
  }
340
219
  }
341
220
 
342
221
  return frozenEntry;
343
222
  }
344
- /**
345
- * Initialize the cache from a given cache state.
346
- *
347
- * This can only be called if the cache is not already in use.
348
- */
349
-
350
223
 
351
224
  }
352
225
 
353
- let _default$1;
354
- /**
355
- * This fulfills a request, making sure that in-flight requests are shared.
356
- */
226
+ const initializeHydrationCache = source => SsrCache.Default.initialize(source);
227
+ const purgeHydrationCache = predicate => SsrCache.Default.purgeData(predicate);
357
228
 
229
+ let _default$1;
358
230
 
359
231
  class RequestFulfillment {
360
232
  constructor() {
@@ -364,18 +236,11 @@ class RequestFulfillment {
364
236
  handler,
365
237
  hydrate: _hydrate = true
366
238
  }) => {
367
- /**
368
- * If we have an inflight request, we'll provide that.
369
- */
370
239
  const inflight = this._requests[id];
371
240
 
372
241
  if (inflight) {
373
242
  return inflight;
374
243
  }
375
- /**
376
- * We don't have an inflight request, so let's set one up.
377
- */
378
-
379
244
 
380
245
  const request = handler().then(data => ({
381
246
  status: "success",
@@ -385,13 +250,7 @@ class RequestFulfillment {
385
250
  metadata: {
386
251
  unexpectedError: error
387
252
  }
388
- }) : error; // Return aborted result if the request was aborted.
389
- // The only way to detect this reliably, it seems, is to
390
- // check the error name and see if it's "AbortError" (this
391
- // is also what Apollo does).
392
- // Even then, it's reliant on the handler supporting aborts.
393
- // TODO(somewhatabstract, FEI-4276): Add first class abort
394
- // support to the handler API.
253
+ }) : error;
395
254
 
396
255
  if (actualError.name === "AbortError") {
397
256
  return {
@@ -405,11 +264,18 @@ class RequestFulfillment {
405
264
  };
406
265
  }).finally(() => {
407
266
  delete this._requests[id];
408
- }); // Store the request in our cache.
409
-
267
+ });
410
268
  this._requests[id] = request;
411
269
  return request;
412
270
  };
271
+
272
+ this.abort = id => {
273
+ delete this._requests[id];
274
+ };
275
+
276
+ this.abortAll = () => {
277
+ Object.keys(this._requests).forEach(id => this.abort(id));
278
+ };
413
279
  }
414
280
 
415
281
  static get Default() {
@@ -422,24 +288,9 @@ class RequestFulfillment {
422
288
 
423
289
  }
424
290
 
425
- /**
426
- * Used to inject our tracking function into the render framework.
427
- *
428
- * INTERNAL USE ONLY
429
- */
430
- const TrackerContext = new React.createContext(null);
431
- /**
432
- * The default instance is stored here.
433
- * It's created below in the Default() static property.
434
- */
291
+ const TrackerContext = React.createContext(null);
435
292
 
436
293
  let _default;
437
- /**
438
- * Implements request tracking and fulfillment.
439
- *
440
- * INTERNAL USE ONLY
441
- */
442
-
443
294
 
444
295
  class RequestTracker {
445
296
  static get Default() {
@@ -449,18 +300,11 @@ class RequestTracker {
449
300
 
450
301
  return _default;
451
302
  }
452
- /**
453
- * These are the caches for tracked requests, their handlers, and responses.
454
- */
455
-
456
303
 
457
304
  constructor(responseCache = undefined) {
458
305
  this._trackedRequests = {};
459
306
 
460
307
  this.trackDataRequest = (id, handler, hydrate) => {
461
- /**
462
- * If we don't already have this tracked, then let's track it.
463
- */
464
308
  if (this._trackedRequests[id] == null) {
465
309
  this._trackedRequests[id] = {
466
310
  handler,
@@ -487,144 +331,125 @@ class RequestTracker {
487
331
  promises.push(this._requestFulfillment.fulfill(requestKey, _extends({}, options)).then(result => {
488
332
  switch (result.status) {
489
333
  case "success":
490
- /**
491
- * Let's cache the data!
492
- *
493
- * NOTE: This only caches when we're
494
- * server side.
495
- */
496
334
  cacheData(requestKey, result.data, options.hydrate);
497
335
  break;
498
336
 
499
337
  case "error":
500
- /**
501
- * Let's cache the error!
502
- *
503
- * NOTE: This only caches when we're
504
- * server side.
505
- */
506
338
  cacheError(requestKey, result.error, options.hydrate);
507
339
  break;
508
- } // For status === "loading":
509
- // Could never get here unless we wrote
510
- // the code wrong. Rather than bloat
511
- // code with useless error, just ignore.
512
- // For status === "aborted":
513
- // We won't cache this.
514
- // We don't hydrate aborted requests,
515
- // so the client would just see them
516
- // as unfulfilled data.
517
-
340
+ }
518
341
 
519
342
  return;
520
343
  }));
521
344
  } catch (e) {
522
- // This captures if there are problems in the code that
523
- // begins the requests.
524
345
  promises.push(Promise.resolve(cacheError(requestKey, e, options.hydrate)));
525
346
  }
526
347
  }
527
- /**
528
- * Clear out our tracked info.
529
- *
530
- * We call this now for a simpler API.
531
- *
532
- * If we reset the tracked calls after all promises resolve, any
533
- * request tracking done while promises are in flight would be lost.
534
- *
535
- * If we don't reset at all, then we have to expose the `reset` call
536
- * for consumers to use, or they'll only ever be able to accumulate
537
- * more and more tracked requests, having to fulfill them all every
538
- * time.
539
- *
540
- * Calling it here means we can have multiple "track -> request"
541
- * cycles in a row and in an easy to reason about manner.
542
- */
543
-
544
348
 
545
349
  this.reset();
546
- /**
547
- * Let's wait for everything to fulfill, and then clone the cached data.
548
- */
549
-
550
350
  return Promise.all(promises).then(() => this._responseCache.cloneHydratableData());
551
351
  };
552
352
 
553
353
  this._responseCache = responseCache || SsrCache.Default;
554
354
  this._requestFulfillment = new RequestFulfillment();
555
355
  }
556
- /**
557
- * Track a request.
558
- *
559
- * This method caches a request and its handler for use during server-side
560
- * rendering to allow us to fulfill requests before producing a final render.
561
- */
562
356
 
563
-
564
- /**
565
- * Indicates if we have requests waiting to be fulfilled.
566
- */
567
357
  get hasUnfulfilledRequests() {
568
358
  return Object.keys(this._trackedRequests).length > 0;
569
359
  }
570
- /**
571
- * Initiate fulfillment of all tracked requests.
572
- *
573
- * This loops over the requests that were tracked using TrackData, and asks
574
- * the respective handlers to fulfill those requests in the order they were
575
- * tracked.
576
- *
577
- * Calling this method marks tracked requests as fulfilled; requests are
578
- * removed from the list of tracked requests by calling this method.
579
- *
580
- * @returns {Promise<ResponseCache>} The promise of the data that was
581
- * cached as a result of fulfilling the tracked requests.
582
- */
583
-
584
360
 
585
361
  }
586
362
 
587
- /**
588
- * Component to enable data request tracking when server-side rendering.
589
- */
363
+ const SSRCheck = () => {
364
+ if (Server.isServerSide()) {
365
+ return null;
366
+ }
367
+
368
+ if (process.env.NODE_ENV === "production") {
369
+ return new DataError("No CSR tracking", DataErrors.NotAllowed);
370
+ } else {
371
+ return new DataError("Data requests are not tracked for fulfillment when when client-side", DataErrors.NotAllowed);
372
+ }
373
+ };
374
+
375
+ const fetchTrackedRequests = () => {
376
+ const ssrCheck = SSRCheck();
377
+
378
+ if (ssrCheck != null) {
379
+ return Promise.reject(ssrCheck);
380
+ }
381
+
382
+ return RequestTracker.Default.fulfillTrackedRequests();
383
+ };
384
+ const hasTrackedRequestsToBeFetched = () => {
385
+ const ssrCheck = SSRCheck();
386
+
387
+ if (ssrCheck != null) {
388
+ throw ssrCheck;
389
+ }
390
+
391
+ return RequestTracker.Default.hasUnfulfilledRequests;
392
+ };
393
+ const abortInflightRequests = () => {
394
+ RequestFulfillment.Default.abortAll();
395
+ };
396
+
397
+ const cache$1 = new ScopedInMemoryCache();
398
+ const purgeSharedCache = (scope = "") => {
399
+ if (scope && typeof scope === "string") {
400
+ cache$1.purgeScope(scope);
401
+ } else {
402
+ cache$1.purgeAll();
403
+ }
404
+ };
405
+ const useSharedCache = (id, scope, initialValue) => {
406
+ if (!id || typeof id !== "string") {
407
+ throw new DataError("id must be a non-empty string", DataErrors.InvalidInput);
408
+ }
409
+
410
+ if (!scope || typeof scope !== "string") {
411
+ throw new DataError("scope must be a non-empty string", DataErrors.InvalidInput);
412
+ }
413
+
414
+ const cacheValue = React.useCallback(value => value == null ? cache$1.purge(scope, id) : cache$1.set(scope, id, value), [id, scope]);
415
+ let currentValue = cache$1.get(scope, id);
416
+
417
+ if (currentValue == null && initialValue !== undefined) {
418
+ const value = typeof initialValue === "function" ? initialValue() : initialValue;
419
+
420
+ if (value != null) {
421
+ cacheValue(value);
422
+ currentValue = value;
423
+ }
424
+ }
425
+
426
+ return [currentValue, cacheValue];
427
+ };
428
+
429
+ const purgeCaches = () => {
430
+ purgeSharedCache();
431
+ purgeHydrationCache();
432
+ };
433
+
590
434
  class TrackData extends React.Component {
591
435
  render() {
592
436
  if (!Server.isServerSide()) {
593
437
  throw new Error("This component is not for use during client-side rendering");
594
438
  }
595
439
 
596
- return /*#__PURE__*/React.createElement(TrackerContext.Provider, {
440
+ return React.createElement(TrackerContext.Provider, {
597
441
  value: RequestTracker.Default.trackDataRequest
598
442
  }, this.props.children);
599
443
  }
600
444
 
601
445
  }
602
446
 
603
- /**
604
- * Simple implementation to represent aborting.
605
- *
606
- * Other frameworks may provide this too, so we won't be sharing this with
607
- * the outside world. It's just a utility for test and internal use whenever
608
- * we need to represent the concept of aborted things.
609
- */
610
- class AbortError extends Error {
611
- constructor(message) {
612
- super(message);
613
- this.name = "AbortError";
614
- }
615
-
616
- }
617
-
618
447
  const loadingStatus = Object.freeze({
619
448
  status: "loading"
620
449
  });
621
450
  const abortedStatus = Object.freeze({
622
451
  status: "aborted"
623
452
  });
624
- /**
625
- * Create Result<TData> instances with specific statuses.
626
- */
627
-
628
453
  const Status = Object.freeze({
629
454
  loading: () => loadingStatus,
630
455
  aborted: () => abortedStatus,
@@ -638,11 +463,7 @@ const Status = Object.freeze({
638
463
  })
639
464
  });
640
465
 
641
- /**
642
- * Turns a cache entry into a stateful result.
643
- */
644
466
  const resultFromCachedResponse = cacheEntry => {
645
- // No cache entry means no result to be hydrated.
646
467
  if (cacheEntry == null) {
647
468
  return null;
648
469
  }
@@ -653,334 +474,163 @@ const resultFromCachedResponse = cacheEntry => {
653
474
  } = cacheEntry;
654
475
 
655
476
  if (error != null) {
656
- // Let's hydrate the error. We don't persist everything about the
657
- // original error on the server, hence why we only superficially
658
- // hydrate it to a GqlHydratedError.
659
477
  return Status.error(new DataError(error, DataErrors.Hydrated));
660
478
  }
661
479
 
662
480
  if (data != null) {
663
481
  return Status.success(data);
664
- } // We shouldn't get here since we don't actually cache null data.
665
-
482
+ }
666
483
 
667
484
  return Status.aborted();
668
485
  };
669
486
 
670
- /**
671
- * InterceptContext defines a map from request ID to interception methods.
672
- *
673
- * INTERNAL USE ONLY
674
- */
675
- const InterceptContext = /*#__PURE__*/React.createContext([]);
676
-
677
- /**
678
- * Allow request handling to be intercepted.
679
- *
680
- * Hook to take a uniquely identified request handler and return a
681
- * method that will support request interception from the InterceptRequest
682
- * component.
683
- *
684
- * If you want request interception to be supported with `useServerEffect` or
685
- * any client-side effect that uses the handler, call this first to generate
686
- * an intercepted handler, and then invoke `useServerEffect` (or other things)
687
- * with that intercepted handler.
688
- */
689
- const useRequestInterception = (requestId, handler) => {
690
- // Get the interceptors that have been registered.
691
- const interceptors = React.useContext(InterceptContext); // Now, we need to create a new handler that will check if the
692
- // request is intercepted before ultimately calling the original handler
693
- // if nothing intercepted it.
694
- // We memoize this so that it only changes if something related to it
695
- // changes.
487
+ const InterceptContext = React.createContext([]);
696
488
 
489
+ const useRequestInterception = (requestId, handler) => {
490
+ const interceptors = React.useContext(InterceptContext);
697
491
  const interceptedHandler = React.useCallback(() => {
698
- // Call the interceptors from closest to furthest.
699
- // If one returns a non-null result, then we keep that.
700
492
  const interceptResponse = interceptors.reduceRight((prev, interceptor) => {
701
493
  if (prev != null) {
702
494
  return prev;
703
495
  }
704
496
 
705
497
  return interceptor(requestId);
706
- }, null); // If nothing intercepted this request, invoke the original handler.
707
- // NOTE: We can't guarantee all interceptors return the same type
708
- // as our handler, so how can flow know? Let's just suppress that.
709
- // $FlowFixMe[incompatible-return]
710
-
498
+ }, null);
711
499
  return interceptResponse != null ? interceptResponse : handler();
712
500
  }, [handler, interceptors, requestId]);
713
501
  return interceptedHandler;
714
502
  };
715
503
 
716
- /**
717
- * Hook to perform an asynchronous action during server-side rendering.
718
- *
719
- * This hook registers an asynchronous action to be performed during
720
- * server-side rendering. The action is performed only once, and the result
721
- * is cached against the given identifier so that subsequent calls return that
722
- * cached result allowing components to render more of the component.
723
- *
724
- * This hook requires the Wonder Blocks Data functionality for resolving
725
- * pending requests, as well as support for the hydration cache to be
726
- * embedded into a page so that the result can by hydrated (if that is a
727
- * requirement).
728
- *
729
- * The asynchronous action is never invoked on the client-side.
730
- */
731
- const useServerEffect = (requestId, handler, hydrate = true) => {
732
- // Plug in to the request interception framework for code that wants
733
- // to use that.
734
- const interceptedHandler = useRequestInterception(requestId, handler); // If we're server-side or hydrating, we'll have a cached entry to use.
735
- // So we get that and use it to initialize our state.
736
- // This works in both hydration and SSR because the very first call to
737
- // this will have cached data in those cases as it will be present on the
738
- // initial render - and subsequent renders on the client it will be null.
739
-
740
- const cachedResult = SsrCache.Default.getEntry(requestId); // We only track data requests when we are server-side and we don't
741
- // already have a result, as given by the cachedData (which is also the
742
- // initial value for the result state).
743
-
504
+ const useServerEffect = (requestId, handler, options = {}) => {
505
+ const {
506
+ hydrate = true,
507
+ skip = false
508
+ } = options;
509
+ const interceptedHandler = useRequestInterception(requestId, handler);
510
+ const cachedResult = SsrCache.Default.getEntry(requestId);
744
511
  const maybeTrack = useContext(TrackerContext);
745
512
 
746
- if (cachedResult == null && Server.isServerSide()) {
513
+ if (!skip && cachedResult == null && Server.isServerSide()) {
747
514
  maybeTrack == null ? void 0 : maybeTrack(requestId, interceptedHandler, hydrate);
748
- } // A null result means there was no result to hydrate.
749
-
515
+ }
750
516
 
751
517
  return cachedResult == null ? null : resultFromCachedResponse(cachedResult);
752
518
  };
753
519
 
754
- /**
755
- * This is the cache.
756
- * It's incredibly complex.
757
- * Very in-memory. So cache. Such complex. Wow.
758
- */
759
- const cache = new ScopedInMemoryCache();
760
- /**
761
- * Clear the in-memory cache or a single scope within it.
762
- */
763
-
764
- const clearSharedCache = (scope = "") => {
765
- // If we have a valid scope (empty string is falsy), then clear that scope.
766
- if (scope && typeof scope === "string") {
767
- cache.purgeScope(scope);
768
- } else {
769
- // Just reset the object. This should be sufficient.
770
- cache.purgeAll();
771
- }
772
- };
773
- /**
774
- * Hook to retrieve data from and store data in an in-memory cache.
775
- *
776
- * @returns {[?ReadOnlyCacheValue, CacheValueFn]}
777
- * Returns an array containing the current cache entry (or undefined), a
778
- * function to set the cache entry (passing null or undefined to this function
779
- * will delete the entry).
780
- *
781
- * To clear a single scope within the cache or the entire cache,
782
- * the `clearScopedCache` export is available.
783
- *
784
- * NOTE: Unlike useState or useReducer, we don't automatically update folks
785
- * if the value they reference changes. We might add it later (if we need to),
786
- * but the likelihood here is that things won't be changing in this cache in a
787
- * way where we would need that. If we do (and likely only in specific
788
- * circumstances), we should consider adding a simple boolean useState that can
789
- * be toggled to cause a rerender whenever the referenced cached data changes
790
- * so that callers can re-render on cache changes. However, we should make
791
- * sure this toggling is optional - or we could use a callback argument, to
792
- * achieve this on an as-needed basis.
793
- */
794
-
795
- const useSharedCache = (id, scope, initialValue) => {
796
- // Verify arguments.
797
- if (!id || typeof id !== "string") {
798
- throw new DataError("id must be a non-empty string", DataErrors.InvalidInput);
799
- }
800
-
801
- if (!scope || typeof scope !== "string") {
802
- throw new DataError("scope must be a non-empty string", DataErrors.InvalidInput);
803
- } // Memoize our APIs.
804
- // This one allows callers to set or replace the cached value.
520
+ const DefaultScope$1 = "useCachedEffect";
521
+ const useCachedEffect = (requestId, handler, options = {}) => {
522
+ var _ref;
805
523
 
524
+ const {
525
+ fetchPolicy = FetchPolicy.CacheBeforeNetwork,
526
+ skip: hardSkip = false,
527
+ retainResultOnChange = false,
528
+ onResultChanged,
529
+ scope = DefaultScope$1
530
+ } = options;
531
+ const interceptedHandler = useRequestInterception(requestId, handler);
532
+ const [mostRecentResult, setMostRecentResult] = useSharedCache(requestId, scope);
533
+ const forceUpdate = useForceUpdate();
534
+ const networkResultRef = React.useRef();
535
+ const currentRequestRef = React.useRef();
536
+ const fetchRequest = React.useMemo(() => {
537
+ var _currentRequestRef$cu;
806
538
 
807
- const cacheValue = React.useCallback(value => value == null ? cache.purge(scope, id) : cache.set(scope, id, value), [id, scope]); // We don't memo-ize the current value, just in case the cache was updated
808
- // since our last run through. Also, our cache does not know what type it
809
- // stores, so we have to cast it to the type we're exporting. This is a
810
- // dev time courtesy, rather than a runtime thing.
811
- // $FlowIgnore[incompatible-type]
539
+ (_currentRequestRef$cu = currentRequestRef.current) == null ? void 0 : _currentRequestRef$cu.cancel();
540
+ currentRequestRef.current = null;
541
+ networkResultRef.current = null;
812
542
 
813
- let currentValue = cache.get(scope, id); // If we have an initial value, we need to add it to the cache
814
- // and use it as our current value.
543
+ const fetchFn = () => {
544
+ var _currentRequestRef$cu2, _currentRequestRef$cu3;
815
545
 
816
- if (currentValue == null && initialValue !== undefined) {
817
- // Get the initial value.
818
- const value = typeof initialValue === "function" ? initialValue() : initialValue;
546
+ if (fetchPolicy === FetchPolicy.CacheOnly) {
547
+ throw new DataError("Cannot fetch with CacheOnly policy", DataErrors.NotAllowed);
548
+ }
819
549
 
820
- if (value != null) {
821
- // Update the cache.
822
- cacheValue(value); // Make sure we return this value as our current value.
550
+ const request = RequestFulfillment.Default.fulfill(`${requestId}|${scope}`, {
551
+ handler: interceptedHandler
552
+ });
823
553
 
824
- currentValue = value;
825
- }
826
- } // Now we have everything, let's return it.
554
+ if (request === ((_currentRequestRef$cu2 = currentRequestRef.current) == null ? void 0 : _currentRequestRef$cu2.request)) {
555
+ return;
556
+ }
827
557
 
558
+ networkResultRef.current = null;
559
+ (_currentRequestRef$cu3 = currentRequestRef.current) == null ? void 0 : _currentRequestRef$cu3.cancel();
560
+ let cancel = false;
561
+ request.then(result => {
562
+ currentRequestRef.current = null;
828
563
 
829
- return [currentValue, cacheValue];
830
- };
564
+ if (cancel) {
565
+ return;
566
+ }
831
567
 
832
- const DefaultScope$1 = "useCachedEffect";
833
- /**
834
- * Hook to execute and cache an async operation on the client.
835
- *
836
- * This hook executes the given handler on the client if there is no
837
- * cached result to use.
838
- *
839
- * Results are cached so they can be shared between equivalent invocations.
840
- * In-flight requests are also shared, so that concurrent calls will
841
- * behave as one might exect. Cache updates invoked by one hook instance
842
- * do not trigger renders in components that use the same requestID; however,
843
- * that should not matter since concurrent requests will share the same
844
- * in-flight request, and subsequent renders will grab from the cache.
845
- *
846
- * Once the request has been tried once and a non-loading response has been
847
- * cached, the request will not executed made again.
848
- */
568
+ setMostRecentResult(result);
569
+ networkResultRef.current = result;
849
570
 
850
- const useCachedEffect = (requestId, handler, options = {}) => {
851
- const {
852
- skip: hardSkip = false,
853
- retainResultOnChange = false,
854
- onResultChanged,
855
- scope = DefaultScope$1
856
- } = options; // Plug in to the request interception framework for code that wants
857
- // to use that.
571
+ if (onResultChanged != null) {
572
+ onResultChanged(result);
573
+ } else {
574
+ forceUpdate();
575
+ }
858
576
 
859
- const interceptedHandler = useRequestInterception(requestId, handler); // Instead of using state, which would be local to just this hook instance,
860
- // we use a shared in-memory cache.
577
+ return;
578
+ });
579
+ currentRequestRef.current = {
580
+ requestId,
581
+ request,
861
582
 
862
- const [mostRecentResult, setMostRecentResult] = useSharedCache(requestId, // The key of the cached item
863
- scope // The scope of the cached items
864
- // No default value. We don't want the loading status there; to ensure
865
- // that all calls when the request is in-flight will update once that
866
- // request is done, we want the cache to be empty until that point.
867
- ); // Build a function that will update the cache and either invoke the
868
- // callback provided in options, or force an update.
583
+ cancel() {
584
+ cancel = true;
585
+ RequestFulfillment.Default.abort(requestId);
586
+ }
869
587
 
870
- const forceUpdate = useForceUpdate();
871
- const setCacheAndNotify = React.useCallback(value => {
872
- setMostRecentResult(value); // If our caller provided a cacheUpdated callback, we use that.
873
- // Otherwise, we toggle our little state update.
588
+ };
589
+ };
874
590
 
875
- if (onResultChanged != null) {
876
- onResultChanged(value);
877
- } else {
878
- forceUpdate();
879
- }
880
- }, [setMostRecentResult, onResultChanged, forceUpdate]); // We need to trigger a re-render when the request ID changes as that
881
- // indicates its a different request. We don't default the current id as
882
- // this is a proxy for the first render, where we will make the request
883
- // if we don't already have a cached value.
884
-
885
- const requestIdRef = React.useRef();
886
- const previousRequestId = requestIdRef.current; // Calculate our soft skip state.
887
- // Soft skip changes are things that should skip the effect if something
888
- // else triggers the effect to run, but should not itself trigger the effect
889
- // (which would cancel a previous invocation).
890
-
891
- const softSkip = React.useMemo(() => {
892
- if (requestId === previousRequestId) {
893
- // If the requestId is unchanged, it means we already rendered at
894
- // least once and so we already made the request at least once. So
895
- // we can bail out right here.
896
- return true;
897
- } // If we already have a cached value, we're going to skip.
898
-
899
-
900
- if (mostRecentResult != null) {
901
- return true;
591
+ return fetchFn;
592
+ }, [requestId, onResultChanged, forceUpdate, setMostRecentResult, fetchPolicy]);
593
+ const requestIdRef = React.useRef(requestId);
594
+ const shouldFetch = React.useMemo(() => {
595
+ if (hardSkip) {
596
+ return false;
902
597
  }
903
598
 
904
- return false;
905
- }, [requestId, previousRequestId, mostRecentResult]); // So now we make sure the client-side request happens per our various
906
- // options.
599
+ switch (fetchPolicy) {
600
+ case FetchPolicy.CacheOnly:
601
+ return false;
907
602
 
603
+ case FetchPolicy.CacheBeforeNetwork:
604
+ return mostRecentResult == null || requestId !== requestIdRef.current;
605
+
606
+ case FetchPolicy.CacheAndNetwork:
607
+ case FetchPolicy.NetworkOnly:
608
+ return networkResultRef.current == null;
609
+ }
610
+ }, [requestId, mostRecentResult, fetchPolicy, hardSkip]);
611
+ requestIdRef.current = requestId;
908
612
  React.useEffect(() => {
909
- let cancel = false; // We don't do anything if we've been told to hard skip (a hard skip
910
- // means we should cancel the previous request and is therefore a
911
- // dependency on that), or we have determined we have already done
912
- // enough and can soft skip (a soft skip doesn't trigger the request
913
- // to re-run; we don't want to cancel the in progress effect if we're
914
- // soft skipping.
915
-
916
- if (hardSkip || softSkip) {
613
+ if (!shouldFetch) {
917
614
  return;
918
- } // If we got here, we're going to perform the request.
919
- // Let's make sure our ref is set to the most recent requestId.
920
-
921
-
922
- requestIdRef.current = requestId; // OK, we've done all our checks and things. It's time to make the
923
- // request. We use our request fulfillment here so that in-flight
924
- // requests are shared.
925
- // NOTE: Our request fulfillment handles the error cases here.
926
- // Catching shouldn't serve a purpose.
927
- // eslint-disable-next-line promise/catch-or-return
928
-
929
- RequestFulfillment.Default.fulfill(requestId, {
930
- handler: interceptedHandler
931
- }).then(result => {
932
- if (cancel) {
933
- // We don't modify our result if an earlier effect was
934
- // cancelled as it means that this hook no longer cares about
935
- // that old request.
936
- return;
937
- }
615
+ }
938
616
 
939
- setCacheAndNotify(result);
940
- return; // Shut up eslint always-return rule.
941
- });
617
+ fetchRequest();
942
618
  return () => {
943
- // TODO(somewhatabstract, FEI-4276): Eventually, we will want to be
944
- // able abort in-flight requests, but for now, we don't have that.
945
- // (Of course, we will only want to abort them if no one is waiting
946
- // on them)
947
- // For now, we just block cancelled requests from changing our
948
- // cache.
949
- cancel = true;
950
- }; // We only want to run this effect if the requestId, or skip values
951
- // change. These are the only two things that should affect the
952
- // cancellation of a pending request. We do not update if the handler
953
- // changes, in order to simplify the API - otherwise, callers would
954
- // not be able to use inline functions with this hook.
955
- // eslint-disable-next-line react-hooks/exhaustive-deps
956
- }, [hardSkip, requestId]); // We track the last result we returned in order to support the
957
- // "retainResultOnChange" option.
619
+ var _currentRequestRef$cu4;
958
620
 
621
+ (_currentRequestRef$cu4 = currentRequestRef.current) == null ? void 0 : _currentRequestRef$cu4.cancel();
622
+ currentRequestRef.current = null;
623
+ };
624
+ }, [shouldFetch, fetchRequest]);
959
625
  const lastResultAgnosticOfIdRef = React.useRef(Status.loading());
960
- const loadingResult = retainResultOnChange ? lastResultAgnosticOfIdRef.current : Status.loading(); // Loading is a transient state, so we only use it here; it's not something
961
- // we cache.
962
-
963
- const result = React.useMemo(() => mostRecentResult != null ? mostRecentResult : loadingResult, [mostRecentResult, loadingResult]);
626
+ const loadingResult = retainResultOnChange ? lastResultAgnosticOfIdRef.current : Status.loading();
627
+ const result = (_ref = fetchPolicy === FetchPolicy.NetworkOnly ? networkResultRef.current : mostRecentResult) != null ? _ref : loadingResult;
964
628
  lastResultAgnosticOfIdRef.current = result;
965
- return result;
629
+ return [result, fetchRequest];
966
630
  };
967
631
 
968
- /**
969
- * Policies to define how a hydratable effect should behave client-side.
970
- */
971
632
  const WhenClientSide = require("flow-enums-runtime").Mirrored(["DoNotHydrate", "ExecuteWhenNoResult", "ExecuteWhenNoSuccessResult", "AlwaysExecute"]);
972
633
  const DefaultScope = "useHydratableEffect";
973
- /**
974
- * Hook to execute an async operation on server and client.
975
- *
976
- * This hook executes the given handler on the server and on the client,
977
- * and, depending on the given options, can hydrate the server-side result.
978
- *
979
- * Results are cached on the client so they can be shared between equivalent
980
- * invocations. Cache changes from one hook instance do not trigger renders
981
- * in components that use the same requestID.
982
- */
983
-
984
634
  const useHydratableEffect = (requestId, handler, options = {}) => {
985
635
  const {
986
636
  clientBehavior = WhenClientSide.ExecuteWhenNoSuccessResult,
@@ -988,75 +638,39 @@ const useHydratableEffect = (requestId, handler, options = {}) => {
988
638
  retainResultOnChange = false,
989
639
  onResultChanged,
990
640
  scope = DefaultScope
991
- } = options; // Now we instruct the server to perform the operation.
992
- // When client-side, this will look up any response for hydration; it does
993
- // not invoke the handler.
994
-
995
- const serverResult = useServerEffect(requestId, // If we're skipped (unlikely in server worlds, but maybe),
996
- // just give an aborted response.
997
- skip ? () => Promise.reject(new AbortError("skipped")) : handler, // Only hydrate if our behavior isn't telling us not to.
998
- clientBehavior !== WhenClientSide.DoNotHydrate);
641
+ } = options;
642
+ const serverResult = useServerEffect(requestId, handler, {
643
+ hydrate: clientBehavior !== WhenClientSide.DoNotHydrate,
644
+ skip
645
+ });
999
646
  const getDefaultCacheValue = React.useCallback(() => {
1000
- // If we don't have a requestId, it's our first render, the one
1001
- // where we hydrated. So defer to our clientBehavior value.
1002
647
  switch (clientBehavior) {
1003
648
  case WhenClientSide.DoNotHydrate:
1004
649
  case WhenClientSide.AlwaysExecute:
1005
- // Either we weren't hydrating at all, or we don't care
1006
- // if we hydrated something or not, either way, we're
1007
- // doing a request.
1008
650
  return null;
1009
651
 
1010
652
  case WhenClientSide.ExecuteWhenNoResult:
1011
- // We only execute if we didn't hydrate something.
1012
- // So, returning the hydration result as default for our
1013
- // cache, will then prevent the cached effect running.
1014
653
  return serverResult;
1015
654
 
1016
655
  case WhenClientSide.ExecuteWhenNoSuccessResult:
1017
- // We only execute if we didn't hydrate a success result.
1018
656
  if ((serverResult == null ? void 0 : serverResult.status) === "success") {
1019
- // So, returning the hydration result as default for our
1020
- // cache, will then prevent the cached effect running.
1021
657
  return serverResult;
1022
658
  }
1023
659
 
1024
660
  return null;
1025
- } // There is no reason for this to change after the first render,
1026
- // you might think, but the function closes around serverResult and if
1027
- // the requestId changes, it still returns the hydrate result of the
1028
- // first render of the previous requestId. This then means that the
1029
- // hydrate result is still the same, and the effect is not re-executed
1030
- // because the cache gets incorrectly defaulted.
1031
- // However, we don't want to bother doing anything with this on
1032
- // client behavior changing since that truly is irrelevant.
1033
- // eslint-disable-next-line react-hooks/exhaustive-deps
1034
-
1035
- }, [serverResult]); // Instead of using state, which would be local to just this hook instance,
1036
- // we use a shared in-memory cache.
1037
-
1038
- useSharedCache(requestId, // The key of the cached item
1039
- scope, // The scope of the cached items
1040
- getDefaultCacheValue); // When we're client-side, we ultimately want the result from this call.
1041
-
1042
- const clientResult = useCachedEffect(requestId, handler, {
661
+ }
662
+ }, [serverResult]);
663
+ useSharedCache(requestId, scope, getDefaultCacheValue);
664
+ const [clientResult] = useCachedEffect(requestId, handler, {
1043
665
  skip,
1044
666
  onResultChanged,
1045
667
  retainResultOnChange,
1046
- scope
1047
- }); // OK, now which result do we return.
1048
- // Well, we return the serverResult on our very first call and then
1049
- // the clientResult thereafter. The great thing is that after the very
1050
- // first call, the serverResult is going to be `null` anyway.
1051
-
668
+ scope,
669
+ fetchPolicy: FetchPolicy.CacheBeforeNetwork
670
+ });
1052
671
  return serverResult != null ? serverResult : clientResult;
1053
672
  };
1054
673
 
1055
- /**
1056
- * This component is the main component of Wonder Blocks Data. With this, data
1057
- * requirements can be placed in a React application in a manner that will
1058
- * support server-side rendering and efficient caching.
1059
- */
1060
674
  const Data = ({
1061
675
  requestId,
1062
676
  handler,
@@ -1071,87 +685,143 @@ const Data = ({
1071
685
  return children(result);
1072
686
  };
1073
687
 
1074
- /**
1075
- * This component provides a mechanism to intercept data requests.
1076
- * This is for use in testing.
1077
- *
1078
- * This component is not recommended for use in production code as it
1079
- * can prevent predictable functioning of the Wonder Blocks Data framework.
1080
- * One possible side-effect is that inflight requests from the interceptor could
1081
- * be picked up by `Data` component requests from outside the children of this
1082
- * component.
1083
- *
1084
- * Interceptions within the same component tree are chained such that the
1085
- * interceptor closest to the intercepted request is called first, and the
1086
- * furthest interceptor is called last.
1087
- */
1088
688
  const InterceptRequests = ({
1089
689
  interceptor,
1090
690
  children
1091
691
  }) => {
1092
692
  const interceptors = React.useContext(InterceptContext);
1093
- const updatedInterceptors = React.useMemo( // We could build this in reverse order so that our hook that does
1094
- // the interception didn't have to use reduceRight, but I think it
1095
- // is easier to think about if we do this in component tree order.
1096
- () => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
1097
- return /*#__PURE__*/React.createElement(InterceptContext.Provider, {
693
+ const updatedInterceptors = React.useMemo(() => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
694
+ return React.createElement(InterceptContext.Provider, {
1098
695
  value: updatedInterceptors
1099
696
  }, children);
1100
697
  };
1101
698
 
1102
- const GqlRouterContext = /*#__PURE__*/React.createContext(null);
699
+ const toString = valid => {
700
+ var _JSON$stringify;
701
+
702
+ if (typeof valid === "string") {
703
+ return valid;
704
+ }
705
+
706
+ return (_JSON$stringify = JSON.stringify(valid)) != null ? _JSON$stringify : "";
707
+ };
708
+
709
+ const getGqlRequestId = (operation, variables, context) => {
710
+ const parts = [];
711
+ const sortableContext = new URLSearchParams(context);
712
+ sortableContext.sort();
713
+ parts.push(sortableContext.toString());
714
+ parts.push(operation.id);
715
+
716
+ if (variables != null) {
717
+ const stringifiedVariables = Object.keys(variables).reduce((acc, key) => {
718
+ acc[key] = toString(variables[key]);
719
+ return acc;
720
+ }, {});
721
+ const sortableVariables = new URLSearchParams(stringifiedVariables);
722
+ sortableVariables.sort();
723
+ parts.push(sortableVariables.toString());
724
+ }
725
+
726
+ return parts.join("|");
727
+ };
728
+
729
+ const DocumentTypes = Object.freeze({
730
+ query: "query",
731
+ mutation: "mutation"
732
+ });
733
+ const cache = new Map();
734
+ function graphQLDocumentNodeParser(document) {
735
+ var _definition$name;
736
+
737
+ const cached = cache.get(document);
738
+
739
+ if (cached) {
740
+ return cached;
741
+ }
742
+
743
+ if (!(document != null && document.kind)) {
744
+ if (process.env.NODE_ENV === "production") {
745
+ throw new DataError("Bad DocumentNode", DataErrors.InvalidInput);
746
+ } else {
747
+ throw new DataError(`Argument of ${JSON.stringify(document)} passed to parser was not a valid GraphQL ` + `DocumentNode. You may need to use 'graphql-tag' or another method ` + `to convert your operation into a document`, DataErrors.InvalidInput);
748
+ }
749
+ }
750
+
751
+ const fragments = document.definitions.filter(x => x.kind === "FragmentDefinition");
752
+ const queries = document.definitions.filter(x => x.kind === "OperationDefinition" && x.operation === "query");
753
+ const mutations = document.definitions.filter(x => x.kind === "OperationDefinition" && x.operation === "mutation");
754
+ const subscriptions = document.definitions.filter(x => x.kind === "OperationDefinition" && x.operation === "subscription");
755
+
756
+ if (fragments.length && !queries.length && !mutations.length) {
757
+ if (process.env.NODE_ENV === "production") {
758
+ throw new DataError("Fragment only", DataErrors.InvalidInput);
759
+ } else {
760
+ throw new DataError(`Passing only a fragment to 'graphql' is not supported. ` + `You must include a query or mutation as well`, DataErrors.InvalidInput);
761
+ }
762
+ }
763
+
764
+ if (subscriptions.length) {
765
+ if (process.env.NODE_ENV === "production") {
766
+ throw new DataError("No subscriptions", DataErrors.InvalidInput);
767
+ } else {
768
+ throw new DataError(`We do not support subscriptions. ` + `${JSON.stringify(document)} had ${subscriptions.length} subscriptions`, DataErrors.InvalidInput);
769
+ }
770
+ }
771
+
772
+ if (queries.length + mutations.length > 1) {
773
+ if (process.env.NODE_ENV === "production") {
774
+ throw new DataError("Too many ops", DataErrors.InvalidInput);
775
+ } else {
776
+ throw new DataError(`We only support one query or mutation per component. ` + `${JSON.stringify(document)} had ${queries.length} queries and ` + `${mutations.length} mutations. `, DataErrors.InvalidInput);
777
+ }
778
+ }
779
+
780
+ const type = queries.length ? DocumentTypes.query : DocumentTypes.mutation;
781
+ const definitions = queries.length ? queries : mutations;
782
+ const definition = definitions[0];
783
+ const variables = definition.variableDefinitions || [];
784
+ const name = ((_definition$name = definition.name) == null ? void 0 : _definition$name.kind) === "Name" ? definition.name.value : "data";
785
+ const payload = {
786
+ name,
787
+ type,
788
+ variables
789
+ };
790
+ cache.set(document, payload);
791
+ return payload;
792
+ }
793
+
794
+ const toGqlOperation = documentNode => {
795
+ const definition = graphQLDocumentNodeParser(documentNode);
796
+ const wbDataOperation = {
797
+ id: definition.name,
798
+ type: definition.type
799
+ };
800
+ return wbDataOperation;
801
+ };
802
+
803
+ const GqlRouterContext = React.createContext(null);
1103
804
 
1104
- /**
1105
- * Configure GraphQL routing for GraphQL hooks and components.
1106
- *
1107
- * These can be nested. Components and hooks relying on the GraphQL routing
1108
- * will use the configuration from their closest ancestral GqlRouter.
1109
- */
1110
805
  const GqlRouter = ({
1111
806
  defaultContext: thisDefaultContext,
1112
807
  fetch: thisFetch,
1113
808
  children
1114
809
  }) => {
1115
- // We don't care if we're nested. We always force our callers to define
1116
- // everything. It makes for a clearer API and requires less error checking
1117
- // code (assuming our flow types are correct). We also don't default fetch
1118
- // to anything - our callers can tell us what function to use quite easily.
1119
- // If code that consumes this wants more nuanced nesting, it can implement
1120
- // it within its own GqlRouter than then defers to this one.
1121
- // We want to always use the same object if things haven't changed to avoid
1122
- // over-rendering consumers of our context, let's memoize the configuration.
1123
- // By doing this, if a component under children that uses this context
1124
- // uses React.memo, we won't force it to re-render every time we render
1125
- // because we'll only change the context value if something has actually
1126
- // changed.
1127
810
  const configuration = React.useMemo(() => ({
1128
811
  fetch: thisFetch,
1129
812
  defaultContext: thisDefaultContext
1130
813
  }), [thisDefaultContext, thisFetch]);
1131
- return /*#__PURE__*/React.createElement(GqlRouterContext.Provider, {
814
+ return React.createElement(GqlRouterContext.Provider, {
1132
815
  value: configuration
1133
816
  }, children);
1134
817
  };
1135
818
 
1136
- /**
1137
- * Construct a complete GqlContext from current defaults and a partial context.
1138
- *
1139
- * Values in the partial context that are `undefined` will be ignored.
1140
- * Values in the partial context that are `null` will be deleted.
1141
- */
1142
819
  const mergeGqlContext = (defaultContext, overrides) => {
1143
- // Let's merge the partial context default context. We deliberately
1144
- // don't spread because spreading would overwrite default context
1145
- // values with undefined or null if the partial context includes a value
1146
- // explicitly set to undefined or null.
1147
820
  return Object.keys(overrides).reduce((acc, key) => {
1148
- // Undefined values are ignored.
1149
821
  if (overrides[key] !== undefined) {
1150
822
  if (overrides[key] === null) {
1151
- // Null indicates we delete this context value.
1152
823
  delete acc[key];
1153
824
  } else {
1154
- // Otherwise, we set it.
1155
825
  acc[key] = overrides[key];
1156
826
  }
1157
827
  }
@@ -1160,32 +830,11 @@ const mergeGqlContext = (defaultContext, overrides) => {
1160
830
  }, _extends({}, defaultContext));
1161
831
  };
1162
832
 
1163
- /**
1164
- * Error kinds for GqlError.
1165
- */
1166
833
  const GqlErrors = Object.freeze({
1167
- /**
1168
- * An internal framework error.
1169
- */
1170
834
  Internal: "Internal",
1171
-
1172
- /**
1173
- * Response does not have the correct structure for a GraphQL response.
1174
- */
1175
835
  BadResponse: "BadResponse",
1176
-
1177
- /**
1178
- * A valid GraphQL result with errors field in the payload.
1179
- */
1180
836
  ErrorResult: "ErrorResult"
1181
837
  });
1182
- /**
1183
- * An error from the GQL API.
1184
- *
1185
- * Errors of this type will have names of the format:
1186
- * `${kind}GqlError`
1187
- */
1188
-
1189
838
  class GqlError extends KindError {
1190
839
  constructor(message, kind, {
1191
840
  metadata,
@@ -1200,11 +849,7 @@ class GqlError extends KindError {
1200
849
 
1201
850
  }
1202
851
 
1203
- /**
1204
- * Construct a GqlRouterContext from the current one and partial context.
1205
- */
1206
852
  const useGqlRouterContext = (contextOverrides = {}) => {
1207
- // This hook only works if the `GqlRouter` has been used to setup context.
1208
853
  const gqlRouterContext = useContext(GqlRouterContext);
1209
854
 
1210
855
  if (gqlRouterContext == null) {
@@ -1216,17 +861,14 @@ const useGqlRouterContext = (contextOverrides = {}) => {
1216
861
  defaultContext
1217
862
  } = gqlRouterContext;
1218
863
  const contextRef = useRef(defaultContext);
1219
- const mergedContext = mergeGqlContext(defaultContext, contextOverrides); // Now, we can see if this represents a new context and if so,
1220
- // update our ref and return the merged value.
1221
-
864
+ const mergedContext = mergeGqlContext(defaultContext, contextOverrides);
1222
865
  const refKeys = Object.keys(contextRef.current);
1223
866
  const mergedKeys = Object.keys(mergedContext);
1224
867
  const shouldWeUpdateRef = refKeys.length !== mergedKeys.length || mergedKeys.every(key => contextRef.current[key] !== mergedContext[key]);
1225
868
 
1226
869
  if (shouldWeUpdateRef) {
1227
870
  contextRef.current = mergedContext;
1228
- } // OK, now we're up-to-date, let's memoize our final result.
1229
-
871
+ }
1230
872
 
1231
873
  const finalContext = contextRef.current;
1232
874
  const finalRouterContext = useMemo(() => ({
@@ -1236,13 +878,7 @@ const useGqlRouterContext = (contextOverrides = {}) => {
1236
878
  return finalRouterContext;
1237
879
  };
1238
880
 
1239
- /**
1240
- * Validate a GQL operation response and extract the data.
1241
- */
1242
-
1243
881
  const getGqlDataFromResponse = async response => {
1244
- // Get the response as text, that way we can use the text in error
1245
- // messaging, should our parsing fail.
1246
882
  const bodyText = await response.text();
1247
883
  let result;
1248
884
 
@@ -1256,8 +892,7 @@ const getGqlDataFromResponse = async response => {
1256
892
  },
1257
893
  cause: e
1258
894
  });
1259
- } // Check for a bad status code.
1260
-
895
+ }
1261
896
 
1262
897
  if (response.status >= 300) {
1263
898
  throw new DataError("Response unsuccessful", DataErrors.Network, {
@@ -1266,22 +901,16 @@ const getGqlDataFromResponse = async response => {
1266
901
  result
1267
902
  }
1268
903
  });
1269
- } // Check that we have a valid result payload.
1270
-
904
+ }
1271
905
 
1272
- if ( // Flow shouldn't be warning about this.
1273
- // $FlowIgnore[method-unbinding]
1274
- !Object.prototype.hasOwnProperty.call(result, "data") && // Flow shouldn't be warning about this.
1275
- // $FlowIgnore[method-unbinding]
1276
- !Object.prototype.hasOwnProperty.call(result, "errors")) {
906
+ if (!Object.prototype.hasOwnProperty.call(result, "data") && !Object.prototype.hasOwnProperty.call(result, "errors")) {
1277
907
  throw new GqlError("Server response missing", GqlErrors.BadResponse, {
1278
908
  metadata: {
1279
909
  statusCode: response.status,
1280
910
  result
1281
911
  }
1282
912
  });
1283
- } // If the response payload has errors, throw an error.
1284
-
913
+ }
1285
914
 
1286
915
  if (result.errors != null && Array.isArray(result.errors) && result.errors.length > 0) {
1287
916
  throw new GqlError("GraphQL errors", GqlErrors.ErrorResult, {
@@ -1290,30 +919,13 @@ const getGqlDataFromResponse = async response => {
1290
919
  result
1291
920
  }
1292
921
  });
1293
- } // We got here, so return the data.
1294
-
922
+ }
1295
923
 
1296
924
  return result.data;
1297
925
  };
1298
926
 
1299
- /**
1300
- * Hook to obtain a gqlFetch function for performing GraphQL requests.
1301
- *
1302
- * The fetch function will resolve null if the request was aborted, otherwise
1303
- * it will resolve the data returned by the GraphQL server.
1304
- *
1305
- * Context is merged with the default context provided to the GqlRouter.
1306
- * Values in the partial context given to the returned fetch function will
1307
- * only be included if they have a value other than undefined.
1308
- */
1309
927
  const useGql = (context = {}) => {
1310
- // This hook only works if the `GqlRouter` has been used to setup context.
1311
- const gqlRouterContext = useGqlRouterContext(context); // Let's memoize the gqlFetch function we create based off our context.
1312
- // That way, even if the context happens to change, if its values don't
1313
- // we give the same function instance back to our callers instead of
1314
- // making a new one. That then means they can safely use the return value
1315
- // in hooks deps without fear of it triggering extra renders.
1316
-
928
+ const gqlRouterContext = useGqlRouterContext(context);
1317
929
  const gqlFetch = useCallback((operation, options = Object.freeze({})) => {
1318
930
  const {
1319
931
  fetch,
@@ -1323,70 +935,10 @@ const useGql = (context = {}) => {
1323
935
  variables,
1324
936
  context = {}
1325
937
  } = options;
1326
- const finalContext = mergeGqlContext(defaultContext, context); // Invoke the fetch and extract the data.
1327
-
938
+ const finalContext = mergeGqlContext(defaultContext, context);
1328
939
  return fetch(operation, variables, finalContext).then(getGqlDataFromResponse);
1329
940
  }, [gqlRouterContext]);
1330
941
  return gqlFetch;
1331
942
  };
1332
943
 
1333
- /**
1334
- * Initialize the hydration cache.
1335
- *
1336
- * @param {ResponseCache} source The cache content to use for initializing the
1337
- * cache.
1338
- * @throws {Error} If the cache is already initialized.
1339
- */
1340
- const initializeCache = source => SsrCache.Default.initialize(source);
1341
- /**
1342
- * Fulfill all tracked data requests.
1343
- *
1344
- * This is for use with the `TrackData` component during server-side rendering.
1345
- *
1346
- * @throws {Error} If executed outside of server-side rendering.
1347
- * @returns {Promise<void>} A promise that resolves when all tracked requests
1348
- * have been fulfilled.
1349
- */
1350
-
1351
- const fulfillAllDataRequests = () => {
1352
- if (!Server.isServerSide()) {
1353
- return Promise.reject(new Error("Data requests are not tracked when client-side"));
1354
- }
1355
-
1356
- return RequestTracker.Default.fulfillTrackedRequests();
1357
- };
1358
- /**
1359
- * Indicate if there are unfulfilled tracked requests.
1360
- *
1361
- * This is used in conjunction with `TrackData`.
1362
- *
1363
- * @throws {Error} If executed outside of server-side rendering.
1364
- * @returns {boolean} `true` if there are unfulfilled tracked requests;
1365
- * otherwise, `false`.
1366
- */
1367
-
1368
- const hasUnfulfilledRequests = () => {
1369
- if (!Server.isServerSide()) {
1370
- throw new Error("Data requests are not tracked when client-side");
1371
- }
1372
-
1373
- return RequestTracker.Default.hasUnfulfilledRequests;
1374
- };
1375
- /**
1376
- * Remove the request identified from the cached hydration responses.
1377
- *
1378
- * @param {string} id The request ID of the response to remove from the cache.
1379
- */
1380
-
1381
- const removeFromCache = id => SsrCache.Default.remove(id);
1382
- /**
1383
- * Remove all cached hydration responses that match the given predicate.
1384
- *
1385
- * @param {(id: string) => boolean} [predicate] The predicate to match against
1386
- * the cached hydration responses. If no predicate is provided, all cached
1387
- * hydration responses will be removed.
1388
- */
1389
-
1390
- const removeAllFromCache = predicate => SsrCache.Default.removeAll(predicate);
1391
-
1392
- export { Data, DataError, DataErrors, GqlError, GqlErrors, GqlRouter, InterceptRequests, RequestFulfillment, ScopedInMemoryCache, SerializableInMemoryCache, Status, TrackData, WhenClientSide, clearSharedCache, fulfillAllDataRequests, hasUnfulfilledRequests, initializeCache, removeAllFromCache, removeFromCache, useCachedEffect, useGql, useHydratableEffect, useServerEffect, useSharedCache };
944
+ export { Data, DataError, DataErrors, FetchPolicy, GqlError, GqlErrors, GqlRouter, InterceptRequests, ScopedInMemoryCache, SerializableInMemoryCache, Status, TrackData, WhenClientSide, abortInflightRequests, fetchTrackedRequests, getGqlRequestId, graphQLDocumentNodeParser, hasTrackedRequestsToBeFetched, initializeHydrationCache, purgeCaches, purgeHydrationCache, purgeSharedCache, toGqlOperation, useCachedEffect, useGql, useHydratableEffect, useServerEffect, useSharedCache };