@khanacademy/wonder-blocks-data 7.0.0 → 7.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @khanacademy/wonder-blocks-data
2
2
 
3
+ ## 7.0.1
4
+
5
+ ### Patch Changes
6
+
7
+ - @khanacademy/wonder-blocks-core@4.3.1
8
+
3
9
  ## 7.0.0
4
10
 
5
11
  ### Major Changes
package/dist/es/index.js CHANGED
@@ -4,47 +4,14 @@ import _extends from '@babel/runtime/helpers/extends';
4
4
  import * as React from 'react';
5
5
  import { useContext, useRef, useMemo, useCallback } from 'react';
6
6
 
7
- /**
8
- * Error kinds for DataError.
9
- */
10
7
  const DataErrors = Object.freeze({
11
- /**
12
- * The kind of error is not known.
13
- */
14
8
  Unknown: "Unknown",
15
-
16
- /**
17
- * The error is internal to the executing code.
18
- */
19
9
  Internal: "Internal",
20
-
21
- /**
22
- * There was a problem with the provided input.
23
- */
24
10
  InvalidInput: "InvalidInput",
25
-
26
- /**
27
- * A network error occurred.
28
- */
29
11
  Network: "Network",
30
-
31
- /**
32
- * Response could not be parsed.
33
- */
34
12
  Parse: "Parse",
35
-
36
- /**
37
- * An error that occurred during SSR and was hydrated from cache
38
- */
39
13
  Hydrated: "Hydrated"
40
14
  });
41
- /**
42
- * An error from the Wonder Blocks Data API.
43
- *
44
- * Errors of this type will have names of the format:
45
- * `${kind}DataError`
46
- */
47
-
48
15
  class DataError extends KindError {
49
16
  constructor(message, kind, {
50
17
  metadata,
@@ -59,27 +26,14 @@ class DataError extends KindError {
59
26
 
60
27
  }
61
28
 
62
- /**
63
- * Describe an in-memory cache.
64
- */
65
29
  class ScopedInMemoryCache {
66
30
  constructor(initialCache = {}) {
67
31
  this._cache = initialCache;
68
32
  }
69
- /**
70
- * Indicate if this cache is being used or not.
71
- *
72
- * When the cache has entries, returns `true`; otherwise, returns `false`.
73
- */
74
-
75
33
 
76
34
  get inUse() {
77
35
  return Object.keys(this._cache).length > 0;
78
36
  }
79
- /**
80
- * Set a value in the cache.
81
- */
82
-
83
37
 
84
38
  set(scope, id, value) {
85
39
  var _this$_cache$scope;
@@ -99,20 +53,12 @@ class ScopedInMemoryCache {
99
53
  this._cache[scope] = (_this$_cache$scope = this._cache[scope]) != null ? _this$_cache$scope : {};
100
54
  this._cache[scope][id] = value;
101
55
  }
102
- /**
103
- * Retrieve a value from the cache.
104
- */
105
-
106
56
 
107
57
  get(scope, id) {
108
58
  var _this$_cache$scope$id, _this$_cache$scope2;
109
59
 
110
60
  return (_this$_cache$scope$id = (_this$_cache$scope2 = this._cache[scope]) == null ? void 0 : _this$_cache$scope2[id]) != null ? _this$_cache$scope$id : null;
111
61
  }
112
- /**
113
- * Purge an item from the cache.
114
- */
115
-
116
62
 
117
63
  purge(scope, id) {
118
64
  var _this$_cache$scope3;
@@ -127,12 +73,6 @@ class ScopedInMemoryCache {
127
73
  delete this._cache[scope];
128
74
  }
129
75
  }
130
- /**
131
- * Purge a scope of items that match the given predicate.
132
- *
133
- * If the predicate is omitted, then all items in the scope are purged.
134
- */
135
-
136
76
 
137
77
  purgeScope(scope, predicate) {
138
78
  if (!this._cache[scope]) {
@@ -154,12 +94,6 @@ class ScopedInMemoryCache {
154
94
  delete this._cache[scope];
155
95
  }
156
96
  }
157
- /**
158
- * Purge all items from the cache that match the given predicate.
159
- *
160
- * If the predicate is omitted, then all items in the cache are purged.
161
- */
162
-
163
97
 
164
98
  purgeAll(predicate) {
165
99
  if (predicate == null) {
@@ -174,9 +108,6 @@ class ScopedInMemoryCache {
174
108
 
175
109
  }
176
110
 
177
- /**
178
- * Describe a serializable in-memory cache.
179
- */
180
111
  class SerializableInMemoryCache extends ScopedInMemoryCache {
181
112
  constructor(initialCache = {}) {
182
113
  try {
@@ -185,18 +116,10 @@ class SerializableInMemoryCache extends ScopedInMemoryCache {
185
116
  throw new DataError(`An error occurred trying to initialize from a response cache snapshot: ${e}`, DataErrors.InvalidInput);
186
117
  }
187
118
  }
188
- /**
189
- * Set a value in the cache.
190
- */
191
-
192
119
 
193
120
  set(scope, id, value) {
194
121
  super.set(scope, id, Object.freeze(clone(value)));
195
122
  }
196
- /**
197
- * Clone the cache.
198
- */
199
-
200
123
 
201
124
  clone() {
202
125
  try {
@@ -211,18 +134,8 @@ class SerializableInMemoryCache extends ScopedInMemoryCache {
211
134
  }
212
135
 
213
136
  const DefaultScope$2 = "default";
214
- /**
215
- * The default instance is stored here.
216
- * It's created below in the Default() static property.
217
- */
218
137
 
219
138
  let _default$2;
220
- /**
221
- * Implements the response cache.
222
- *
223
- * INTERNAL USE ONLY
224
- */
225
-
226
139
 
227
140
  class SsrCache {
228
141
  static get Default() {
@@ -240,7 +153,6 @@ class SsrCache {
240
153
  }
241
154
 
242
155
  this._hydrationCache = new SerializableInMemoryCache({
243
- // $FlowIgnore[incompatible-call]
244
156
  [DefaultScope$2]: source
245
157
  });
246
158
  };
@@ -259,23 +171,11 @@ class SsrCache {
259
171
  this.getEntry = id => {
260
172
  var _this$_ssrOnlyCache$g, _this$_ssrOnlyCache;
261
173
 
262
- // Get the cached entry for this value.
263
- // We first look in the ssr cache and then the hydration cache.
264
- const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope$2, id); // If we are not server-side and we hydrated something, let's clear
265
- // that from the hydration cache to save memory.
174
+ const internalEntry = (_this$_ssrOnlyCache$g = (_this$_ssrOnlyCache = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache.get(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$g : this._hydrationCache.get(DefaultScope$2, id);
266
175
 
267
176
  if (this._ssrOnlyCache == null && internalEntry != null) {
268
- // We now delete this from our hydration cache as we don't need it.
269
- // This does mean that if another handler of the same type but
270
- // without some sort of linked cache won't get the value, but
271
- // that's not an expected use-case. If two different places use the
272
- // same handler and options (i.e. the same request), then the
273
- // handler should cater to that to ensure they share the result.
274
177
  this._hydrationCache.purge(DefaultScope$2, id);
275
- } // Getting the typing right between the in-memory cache and this
276
- // is hard. Just telling flow it's OK.
277
- // $FlowIgnore[incompatible-return]
278
-
178
+ }
279
179
 
280
180
  return internalEntry;
281
181
  };
@@ -283,20 +183,13 @@ class SsrCache {
283
183
  this.remove = id => {
284
184
  var _this$_ssrOnlyCache$p, _this$_ssrOnlyCache2;
285
185
 
286
- // NOTE(somewhatabstract): We could invoke removeAll with a predicate
287
- // to match the key of the entry we're removing, but that's an
288
- // inefficient way to remove a single item, so let's not do that.
289
- // Delete the entry from the appropriate cache.
290
186
  return this._hydrationCache.purge(DefaultScope$2, id) || ((_this$_ssrOnlyCache$p = (_this$_ssrOnlyCache2 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache2.purge(DefaultScope$2, id)) != null ? _this$_ssrOnlyCache$p : false);
291
187
  };
292
188
 
293
189
  this.removeAll = predicate => {
294
190
  var _this$_ssrOnlyCache3;
295
191
 
296
- const realPredicate = predicate ? // We know what we're putting into the cache so let's assume it
297
- // conforms.
298
- // $FlowIgnore[incompatible-call]
299
- (_, key, cachedEntry) => predicate(key, cachedEntry) : undefined; // Apply the predicate to what we have in our caches.
192
+ const realPredicate = predicate ? (_, key, cachedEntry) => predicate(key, cachedEntry) : undefined;
300
193
 
301
194
  this._hydrationCache.purgeAll(realPredicate);
302
195
 
@@ -306,13 +199,7 @@ class SsrCache {
306
199
  this.cloneHydratableData = () => {
307
200
  var _cache$DefaultScope;
308
201
 
309
- // We return our hydration cache only.
310
- const cache = this._hydrationCache.clone(); // If we're empty, we still want to return an object, so we default
311
- // to an empty object.
312
- // We only need the default scope out of our scoped in-memory cache.
313
- // We know that it conforms to our expectations.
314
- // $FlowIgnore[incompatible-return]
315
-
202
+ const cache = this._hydrationCache.clone();
316
203
 
317
204
  return (_cache$DefaultScope = cache[DefaultScope$2]) != null ? _cache$DefaultScope : {};
318
205
  };
@@ -325,36 +212,21 @@ class SsrCache {
325
212
  const frozenEntry = Object.freeze(entry);
326
213
 
327
214
  if (Server.isServerSide()) {
328
- // We are server-side.
329
- // We need to store this value.
330
215
  if (hydrate) {
331
216
  this._hydrationCache.set(DefaultScope$2, id, frozenEntry);
332
217
  } else {
333
218
  var _this$_ssrOnlyCache4;
334
219
 
335
- // Usually, when server-side, this cache will always be present.
336
- // We do fake server-side in our doc example though, when it
337
- // won't be.
338
220
  (_this$_ssrOnlyCache4 = this._ssrOnlyCache) == null ? void 0 : _this$_ssrOnlyCache4.set(DefaultScope$2, id, frozenEntry);
339
221
  }
340
222
  }
341
223
 
342
224
  return frozenEntry;
343
225
  }
344
- /**
345
- * Initialize the cache from a given cache state.
346
- *
347
- * This can only be called if the cache is not already in use.
348
- */
349
-
350
226
 
351
227
  }
352
228
 
353
229
  let _default$1;
354
- /**
355
- * This fulfills a request, making sure that in-flight requests are shared.
356
- */
357
-
358
230
 
359
231
  class RequestFulfillment {
360
232
  constructor() {
@@ -364,18 +236,11 @@ class RequestFulfillment {
364
236
  handler,
365
237
  hydrate: _hydrate = true
366
238
  }) => {
367
- /**
368
- * If we have an inflight request, we'll provide that.
369
- */
370
239
  const inflight = this._requests[id];
371
240
 
372
241
  if (inflight) {
373
242
  return inflight;
374
243
  }
375
- /**
376
- * We don't have an inflight request, so let's set one up.
377
- */
378
-
379
244
 
380
245
  const request = handler().then(data => ({
381
246
  status: "success",
@@ -385,13 +250,7 @@ class RequestFulfillment {
385
250
  metadata: {
386
251
  unexpectedError: error
387
252
  }
388
- }) : error; // Return aborted result if the request was aborted.
389
- // The only way to detect this reliably, it seems, is to
390
- // check the error name and see if it's "AbortError" (this
391
- // is also what Apollo does).
392
- // Even then, it's reliant on the handler supporting aborts.
393
- // TODO(somewhatabstract, FEI-4276): Add first class abort
394
- // support to the handler API.
253
+ }) : error;
395
254
 
396
255
  if (actualError.name === "AbortError") {
397
256
  return {
@@ -405,8 +264,7 @@ class RequestFulfillment {
405
264
  };
406
265
  }).finally(() => {
407
266
  delete this._requests[id];
408
- }); // Store the request in our cache.
409
-
267
+ });
410
268
  this._requests[id] = request;
411
269
  return request;
412
270
  };
@@ -422,24 +280,9 @@ class RequestFulfillment {
422
280
 
423
281
  }
424
282
 
425
- /**
426
- * Used to inject our tracking function into the render framework.
427
- *
428
- * INTERNAL USE ONLY
429
- */
430
283
  const TrackerContext = new React.createContext(null);
431
- /**
432
- * The default instance is stored here.
433
- * It's created below in the Default() static property.
434
- */
435
284
 
436
285
  let _default;
437
- /**
438
- * Implements request tracking and fulfillment.
439
- *
440
- * INTERNAL USE ONLY
441
- */
442
-
443
286
 
444
287
  class RequestTracker {
445
288
  static get Default() {
@@ -449,18 +292,11 @@ class RequestTracker {
449
292
 
450
293
  return _default;
451
294
  }
452
- /**
453
- * These are the caches for tracked requests, their handlers, and responses.
454
- */
455
-
456
295
 
457
296
  constructor(responseCache = undefined) {
458
297
  this._trackedRequests = {};
459
298
 
460
299
  this.trackDataRequest = (id, handler, hydrate) => {
461
- /**
462
- * If we don't already have this tracked, then let's track it.
463
- */
464
300
  if (this._trackedRequests[id] == null) {
465
301
  this._trackedRequests[id] = {
466
302
  handler,
@@ -487,113 +323,42 @@ class RequestTracker {
487
323
  promises.push(this._requestFulfillment.fulfill(requestKey, _extends({}, options)).then(result => {
488
324
  switch (result.status) {
489
325
  case "success":
490
- /**
491
- * Let's cache the data!
492
- *
493
- * NOTE: This only caches when we're
494
- * server side.
495
- */
496
326
  cacheData(requestKey, result.data, options.hydrate);
497
327
  break;
498
328
 
499
329
  case "error":
500
- /**
501
- * Let's cache the error!
502
- *
503
- * NOTE: This only caches when we're
504
- * server side.
505
- */
506
330
  cacheError(requestKey, result.error, options.hydrate);
507
331
  break;
508
- } // For status === "loading":
509
- // Could never get here unless we wrote
510
- // the code wrong. Rather than bloat
511
- // code with useless error, just ignore.
512
- // For status === "aborted":
513
- // We won't cache this.
514
- // We don't hydrate aborted requests,
515
- // so the client would just see them
516
- // as unfulfilled data.
517
-
332
+ }
518
333
 
519
334
  return;
520
335
  }));
521
336
  } catch (e) {
522
- // This captures if there are problems in the code that
523
- // begins the requests.
524
337
  promises.push(Promise.resolve(cacheError(requestKey, e, options.hydrate)));
525
338
  }
526
339
  }
527
- /**
528
- * Clear out our tracked info.
529
- *
530
- * We call this now for a simpler API.
531
- *
532
- * If we reset the tracked calls after all promises resolve, any
533
- * request tracking done while promises are in flight would be lost.
534
- *
535
- * If we don't reset at all, then we have to expose the `reset` call
536
- * for consumers to use, or they'll only ever be able to accumulate
537
- * more and more tracked requests, having to fulfill them all every
538
- * time.
539
- *
540
- * Calling it here means we can have multiple "track -> request"
541
- * cycles in a row and in an easy to reason about manner.
542
- */
543
-
544
340
 
545
341
  this.reset();
546
- /**
547
- * Let's wait for everything to fulfill, and then clone the cached data.
548
- */
549
-
550
342
  return Promise.all(promises).then(() => this._responseCache.cloneHydratableData());
551
343
  };
552
344
 
553
345
  this._responseCache = responseCache || SsrCache.Default;
554
346
  this._requestFulfillment = new RequestFulfillment();
555
347
  }
556
- /**
557
- * Track a request.
558
- *
559
- * This method caches a request and its handler for use during server-side
560
- * rendering to allow us to fulfill requests before producing a final render.
561
- */
562
-
563
348
 
564
- /**
565
- * Indicates if we have requests waiting to be fulfilled.
566
- */
567
349
  get hasUnfulfilledRequests() {
568
350
  return Object.keys(this._trackedRequests).length > 0;
569
351
  }
570
- /**
571
- * Initiate fulfillment of all tracked requests.
572
- *
573
- * This loops over the requests that were tracked using TrackData, and asks
574
- * the respective handlers to fulfill those requests in the order they were
575
- * tracked.
576
- *
577
- * Calling this method marks tracked requests as fulfilled; requests are
578
- * removed from the list of tracked requests by calling this method.
579
- *
580
- * @returns {Promise<ResponseCache>} The promise of the data that was
581
- * cached as a result of fulfilling the tracked requests.
582
- */
583
-
584
352
 
585
353
  }
586
354
 
587
- /**
588
- * Component to enable data request tracking when server-side rendering.
589
- */
590
355
  class TrackData extends React.Component {
591
356
  render() {
592
357
  if (!Server.isServerSide()) {
593
358
  throw new Error("This component is not for use during client-side rendering");
594
359
  }
595
360
 
596
- return /*#__PURE__*/React.createElement(TrackerContext.Provider, {
361
+ return React.createElement(TrackerContext.Provider, {
597
362
  value: RequestTracker.Default.trackDataRequest
598
363
  }, this.props.children);
599
364
  }
@@ -606,10 +371,6 @@ const loadingStatus = Object.freeze({
606
371
  const abortedStatus = Object.freeze({
607
372
  status: "aborted"
608
373
  });
609
- /**
610
- * Create Result<TData> instances with specific statuses.
611
- */
612
-
613
374
  const Status = Object.freeze({
614
375
  loading: () => loadingStatus,
615
376
  aborted: () => abortedStatus,
@@ -623,11 +384,7 @@ const Status = Object.freeze({
623
384
  })
624
385
  });
625
386
 
626
- /**
627
- * Turns a cache entry into a stateful result.
628
- */
629
387
  const resultFromCachedResponse = cacheEntry => {
630
- // No cache entry means no result to be hydrated.
631
388
  if (cacheEntry == null) {
632
389
  return null;
633
390
  }
@@ -638,338 +395,145 @@ const resultFromCachedResponse = cacheEntry => {
638
395
  } = cacheEntry;
639
396
 
640
397
  if (error != null) {
641
- // Let's hydrate the error. We don't persist everything about the
642
- // original error on the server, hence why we only superficially
643
- // hydrate it to a GqlHydratedError.
644
398
  return Status.error(new DataError(error, DataErrors.Hydrated));
645
399
  }
646
400
 
647
401
  if (data != null) {
648
402
  return Status.success(data);
649
- } // We shouldn't get here since we don't actually cache null data.
650
-
403
+ }
651
404
 
652
405
  return Status.aborted();
653
406
  };
654
407
 
655
- /**
656
- * InterceptContext defines a map from request ID to interception methods.
657
- *
658
- * INTERNAL USE ONLY
659
- */
660
- const InterceptContext = /*#__PURE__*/React.createContext([]);
661
-
662
- /**
663
- * Allow request handling to be intercepted.
664
- *
665
- * Hook to take a uniquely identified request handler and return a
666
- * method that will support request interception from the InterceptRequest
667
- * component.
668
- *
669
- * If you want request interception to be supported with `useServerEffect` or
670
- * any client-side effect that uses the handler, call this first to generate
671
- * an intercepted handler, and then invoke `useServerEffect` (or other things)
672
- * with that intercepted handler.
673
- */
674
- const useRequestInterception = (requestId, handler) => {
675
- // Get the interceptors that have been registered.
676
- const interceptors = React.useContext(InterceptContext); // Now, we need to create a new handler that will check if the
677
- // request is intercepted before ultimately calling the original handler
678
- // if nothing intercepted it.
679
- // We memoize this so that it only changes if something related to it
680
- // changes.
408
+ const InterceptContext = React.createContext([]);
681
409
 
410
+ const useRequestInterception = (requestId, handler) => {
411
+ const interceptors = React.useContext(InterceptContext);
682
412
  const interceptedHandler = React.useCallback(() => {
683
- // Call the interceptors from closest to furthest.
684
- // If one returns a non-null result, then we keep that.
685
413
  const interceptResponse = interceptors.reduceRight((prev, interceptor) => {
686
414
  if (prev != null) {
687
415
  return prev;
688
416
  }
689
417
 
690
418
  return interceptor(requestId);
691
- }, null); // If nothing intercepted this request, invoke the original handler.
692
- // NOTE: We can't guarantee all interceptors return the same type
693
- // as our handler, so how can flow know? Let's just suppress that.
694
- // $FlowFixMe[incompatible-return]
695
-
419
+ }, null);
696
420
  return interceptResponse != null ? interceptResponse : handler();
697
421
  }, [handler, interceptors, requestId]);
698
422
  return interceptedHandler;
699
423
  };
700
424
 
701
- /**
702
- * Hook to perform an asynchronous action during server-side rendering.
703
- *
704
- * This hook registers an asynchronous action to be performed during
705
- * server-side rendering. The action is performed only once, and the result
706
- * is cached against the given identifier so that subsequent calls return that
707
- * cached result allowing components to render more of the component.
708
- *
709
- * This hook requires the Wonder Blocks Data functionality for resolving
710
- * pending requests, as well as support for the hydration cache to be
711
- * embedded into a page so that the result can by hydrated (if that is a
712
- * requirement).
713
- *
714
- * The asynchronous action is never invoked on the client-side.
715
- */
716
425
  const useServerEffect = (requestId, handler, options = {}) => {
717
426
  const {
718
427
  hydrate = true,
719
428
  skip = false
720
- } = options; // Plug in to the request interception framework for code that wants
721
- // to use that.
722
-
723
- const interceptedHandler = useRequestInterception(requestId, handler); // If we're server-side or hydrating, we'll have a cached entry to use.
724
- // So we get that and use it to initialize our state.
725
- // This works in both hydration and SSR because the very first call to
726
- // this will have cached data in those cases as it will be present on the
727
- // initial render - and subsequent renders on the client it will be null.
728
-
729
- const cachedResult = SsrCache.Default.getEntry(requestId); // We only track data requests when we are server-side, we are not skipping
730
- // the request, and we don't already have a result, as given by the
731
- // cachedData (which is also the initial value for the result state).
732
-
429
+ } = options;
430
+ const interceptedHandler = useRequestInterception(requestId, handler);
431
+ const cachedResult = SsrCache.Default.getEntry(requestId);
733
432
  const maybeTrack = useContext(TrackerContext);
734
433
 
735
434
  if (!skip && cachedResult == null && Server.isServerSide()) {
736
435
  maybeTrack == null ? void 0 : maybeTrack(requestId, interceptedHandler, hydrate);
737
- } // A null result means there was no result to hydrate.
738
-
436
+ }
739
437
 
740
438
  return cachedResult == null ? null : resultFromCachedResponse(cachedResult);
741
439
  };
742
440
 
743
- /**
744
- * This is the cache.
745
- * It's incredibly complex.
746
- * Very in-memory. So cache. Such complex. Wow.
747
- */
748
441
  const cache = new ScopedInMemoryCache();
749
- /**
750
- * Clear the in-memory cache or a single scope within it.
751
- */
752
-
753
442
  const clearSharedCache = (scope = "") => {
754
- // If we have a valid scope (empty string is falsy), then clear that scope.
755
443
  if (scope && typeof scope === "string") {
756
444
  cache.purgeScope(scope);
757
445
  } else {
758
- // Just reset the object. This should be sufficient.
759
446
  cache.purgeAll();
760
447
  }
761
448
  };
762
- /**
763
- * Hook to retrieve data from and store data in an in-memory cache.
764
- *
765
- * @returns {[?ReadOnlyCacheValue, CacheValueFn]}
766
- * Returns an array containing the current cache entry (or undefined), a
767
- * function to set the cache entry (passing null or undefined to this function
768
- * will delete the entry).
769
- *
770
- * To clear a single scope within the cache or the entire cache,
771
- * the `clearScopedCache` export is available.
772
- *
773
- * NOTE: Unlike useState or useReducer, we don't automatically update folks
774
- * if the value they reference changes. We might add it later (if we need to),
775
- * but the likelihood here is that things won't be changing in this cache in a
776
- * way where we would need that. If we do (and likely only in specific
777
- * circumstances), we should consider adding a simple boolean useState that can
778
- * be toggled to cause a rerender whenever the referenced cached data changes
779
- * so that callers can re-render on cache changes. However, we should make
780
- * sure this toggling is optional - or we could use a callback argument, to
781
- * achieve this on an as-needed basis.
782
- */
783
-
784
449
  const useSharedCache = (id, scope, initialValue) => {
785
- // Verify arguments.
786
450
  if (!id || typeof id !== "string") {
787
451
  throw new DataError("id must be a non-empty string", DataErrors.InvalidInput);
788
452
  }
789
453
 
790
454
  if (!scope || typeof scope !== "string") {
791
455
  throw new DataError("scope must be a non-empty string", DataErrors.InvalidInput);
792
- } // Memoize our APIs.
793
- // This one allows callers to set or replace the cached value.
794
-
795
-
796
- const cacheValue = React.useCallback(value => value == null ? cache.purge(scope, id) : cache.set(scope, id, value), [id, scope]); // We don't memo-ize the current value, just in case the cache was updated
797
- // since our last run through. Also, our cache does not know what type it
798
- // stores, so we have to cast it to the type we're exporting. This is a
799
- // dev time courtesy, rather than a runtime thing.
800
- // $FlowIgnore[incompatible-type]
456
+ }
801
457
 
802
- let currentValue = cache.get(scope, id); // If we have an initial value, we need to add it to the cache
803
- // and use it as our current value.
458
+ const cacheValue = React.useCallback(value => value == null ? cache.purge(scope, id) : cache.set(scope, id, value), [id, scope]);
459
+ let currentValue = cache.get(scope, id);
804
460
 
805
461
  if (currentValue == null && initialValue !== undefined) {
806
- // Get the initial value.
807
462
  const value = typeof initialValue === "function" ? initialValue() : initialValue;
808
463
 
809
464
  if (value != null) {
810
- // Update the cache.
811
- cacheValue(value); // Make sure we return this value as our current value.
812
-
465
+ cacheValue(value);
813
466
  currentValue = value;
814
467
  }
815
- } // Now we have everything, let's return it.
816
-
468
+ }
817
469
 
818
470
  return [currentValue, cacheValue];
819
471
  };
820
472
 
821
473
  const DefaultScope$1 = "useCachedEffect";
822
- /**
823
- * Hook to execute and cache an async operation on the client.
824
- *
825
- * This hook executes the given handler on the client if there is no
826
- * cached result to use.
827
- *
828
- * Results are cached so they can be shared between equivalent invocations.
829
- * In-flight requests are also shared, so that concurrent calls will
830
- * behave as one might exect. Cache updates invoked by one hook instance
831
- * do not trigger renders in components that use the same requestID; however,
832
- * that should not matter since concurrent requests will share the same
833
- * in-flight request, and subsequent renders will grab from the cache.
834
- *
835
- * Once the request has been tried once and a non-loading response has been
836
- * cached, the request will not executed made again.
837
- */
838
-
839
474
  const useCachedEffect = (requestId, handler, options = {}) => {
840
475
  const {
841
476
  skip: hardSkip = false,
842
477
  retainResultOnChange = false,
843
478
  onResultChanged,
844
479
  scope = DefaultScope$1
845
- } = options; // Plug in to the request interception framework for code that wants
846
- // to use that.
847
-
848
- const interceptedHandler = useRequestInterception(requestId, handler); // Instead of using state, which would be local to just this hook instance,
849
- // we use a shared in-memory cache.
850
-
851
- const [mostRecentResult, setMostRecentResult] = useSharedCache(requestId, // The key of the cached item
852
- scope // The scope of the cached items
853
- // No default value. We don't want the loading status there; to ensure
854
- // that all calls when the request is in-flight will update once that
855
- // request is done, we want the cache to be empty until that point.
856
- ); // Build a function that will update the cache and either invoke the
857
- // callback provided in options, or force an update.
858
-
480
+ } = options;
481
+ const interceptedHandler = useRequestInterception(requestId, handler);
482
+ const [mostRecentResult, setMostRecentResult] = useSharedCache(requestId, scope);
859
483
  const forceUpdate = useForceUpdate();
860
484
  const setCacheAndNotify = React.useCallback(value => {
861
- setMostRecentResult(value); // If our caller provided a cacheUpdated callback, we use that.
862
- // Otherwise, we toggle our little state update.
485
+ setMostRecentResult(value);
863
486
 
864
487
  if (onResultChanged != null) {
865
488
  onResultChanged(value);
866
489
  } else {
867
490
  forceUpdate();
868
491
  }
869
- }, [setMostRecentResult, onResultChanged, forceUpdate]); // We need to trigger a re-render when the request ID changes as that
870
- // indicates its a different request. We don't default the current id as
871
- // this is a proxy for the first render, where we will make the request
872
- // if we don't already have a cached value.
873
-
492
+ }, [setMostRecentResult, onResultChanged, forceUpdate]);
874
493
  const requestIdRef = React.useRef();
875
- const previousRequestId = requestIdRef.current; // Calculate our soft skip state.
876
- // Soft skip changes are things that should skip the effect if something
877
- // else triggers the effect to run, but should not itself trigger the effect
878
- // (which would cancel a previous invocation).
879
-
494
+ const previousRequestId = requestIdRef.current;
880
495
  const softSkip = React.useMemo(() => {
881
496
  if (requestId === previousRequestId) {
882
- // If the requestId is unchanged, it means we already rendered at
883
- // least once and so we already made the request at least once. So
884
- // we can bail out right here.
885
497
  return true;
886
- } // If we already have a cached value, we're going to skip.
887
-
498
+ }
888
499
 
889
500
  if (mostRecentResult != null) {
890
501
  return true;
891
502
  }
892
503
 
893
504
  return false;
894
- }, [requestId, previousRequestId, mostRecentResult]); // So now we make sure the client-side request happens per our various
895
- // options.
896
-
505
+ }, [requestId, previousRequestId, mostRecentResult]);
897
506
  React.useEffect(() => {
898
- let cancel = false; // We don't do anything if we've been told to hard skip (a hard skip
899
- // means we should cancel the previous request and is therefore a
900
- // dependency on that), or we have determined we have already done
901
- // enough and can soft skip (a soft skip doesn't trigger the request
902
- // to re-run; we don't want to cancel the in progress effect if we're
903
- // soft skipping.
507
+ let cancel = false;
904
508
 
905
509
  if (hardSkip || softSkip) {
906
510
  return;
907
- } // If we got here, we're going to perform the request.
908
- // Let's make sure our ref is set to the most recent requestId.
909
-
910
-
911
- requestIdRef.current = requestId; // OK, we've done all our checks and things. It's time to make the
912
- // request. We use our request fulfillment here so that in-flight
913
- // requests are shared.
914
- // NOTE: Our request fulfillment handles the error cases here.
915
- // Catching shouldn't serve a purpose.
916
- // eslint-disable-next-line promise/catch-or-return
511
+ }
917
512
 
513
+ requestIdRef.current = requestId;
918
514
  RequestFulfillment.Default.fulfill(requestId, {
919
515
  handler: interceptedHandler
920
516
  }).then(result => {
921
517
  if (cancel) {
922
- // We don't modify our result if an earlier effect was
923
- // cancelled as it means that this hook no longer cares about
924
- // that old request.
925
518
  return;
926
519
  }
927
520
 
928
521
  setCacheAndNotify(result);
929
- return; // Shut up eslint always-return rule.
522
+ return;
930
523
  });
931
524
  return () => {
932
- // TODO(somewhatabstract, FEI-4276): Eventually, we will want to be
933
- // able abort in-flight requests, but for now, we don't have that.
934
- // (Of course, we will only want to abort them if no one is waiting
935
- // on them)
936
- // For now, we just block cancelled requests from changing our
937
- // cache.
938
525
  cancel = true;
939
- }; // We only want to run this effect if the requestId, or skip values
940
- // change. These are the only two things that should affect the
941
- // cancellation of a pending request. We do not update if the handler
942
- // changes, in order to simplify the API - otherwise, callers would
943
- // not be able to use inline functions with this hook.
944
- // eslint-disable-next-line react-hooks/exhaustive-deps
945
- }, [hardSkip, requestId]); // We track the last result we returned in order to support the
946
- // "retainResultOnChange" option.
947
-
526
+ };
527
+ }, [hardSkip, requestId]);
948
528
  const lastResultAgnosticOfIdRef = React.useRef(Status.loading());
949
- const loadingResult = retainResultOnChange ? lastResultAgnosticOfIdRef.current : Status.loading(); // Loading is a transient state, so we only use it here; it's not something
950
- // we cache.
951
-
529
+ const loadingResult = retainResultOnChange ? lastResultAgnosticOfIdRef.current : Status.loading();
952
530
  const result = React.useMemo(() => mostRecentResult != null ? mostRecentResult : loadingResult, [mostRecentResult, loadingResult]);
953
531
  lastResultAgnosticOfIdRef.current = result;
954
532
  return result;
955
533
  };
956
534
 
957
- /**
958
- * Policies to define how a hydratable effect should behave client-side.
959
- */
960
535
  const WhenClientSide = require("flow-enums-runtime").Mirrored(["DoNotHydrate", "ExecuteWhenNoResult", "ExecuteWhenNoSuccessResult", "AlwaysExecute"]);
961
536
  const DefaultScope = "useHydratableEffect";
962
- /**
963
- * Hook to execute an async operation on server and client.
964
- *
965
- * This hook executes the given handler on the server and on the client,
966
- * and, depending on the given options, can hydrate the server-side result.
967
- *
968
- * Results are cached on the client so they can be shared between equivalent
969
- * invocations. Cache changes from one hook instance do not trigger renders
970
- * in components that use the same requestID.
971
- */
972
-
973
537
  const useHydratableEffect = (requestId, handler, options = {}) => {
974
538
  const {
975
539
  clientBehavior = WhenClientSide.ExecuteWhenNoSuccessResult,
@@ -977,76 +541,38 @@ const useHydratableEffect = (requestId, handler, options = {}) => {
977
541
  retainResultOnChange = false,
978
542
  onResultChanged,
979
543
  scope = DefaultScope
980
- } = options; // Now we instruct the server to perform the operation.
981
- // When client-side, this will look up any response for hydration; it does
982
- // not invoke the handler.
983
-
544
+ } = options;
984
545
  const serverResult = useServerEffect(requestId, handler, {
985
- // Only hydrate if our behavior isn't telling us not to.
986
546
  hydrate: clientBehavior !== WhenClientSide.DoNotHydrate,
987
547
  skip
988
548
  });
989
549
  const getDefaultCacheValue = React.useCallback(() => {
990
- // If we don't have a requestId, it's our first render, the one
991
- // where we hydrated. So defer to our clientBehavior value.
992
550
  switch (clientBehavior) {
993
551
  case WhenClientSide.DoNotHydrate:
994
552
  case WhenClientSide.AlwaysExecute:
995
- // Either we weren't hydrating at all, or we don't care
996
- // if we hydrated something or not, either way, we're
997
- // doing a request.
998
553
  return null;
999
554
 
1000
555
  case WhenClientSide.ExecuteWhenNoResult:
1001
- // We only execute if we didn't hydrate something.
1002
- // So, returning the hydration result as default for our
1003
- // cache, will then prevent the cached effect running.
1004
556
  return serverResult;
1005
557
 
1006
558
  case WhenClientSide.ExecuteWhenNoSuccessResult:
1007
- // We only execute if we didn't hydrate a success result.
1008
559
  if ((serverResult == null ? void 0 : serverResult.status) === "success") {
1009
- // So, returning the hydration result as default for our
1010
- // cache, will then prevent the cached effect running.
1011
560
  return serverResult;
1012
561
  }
1013
562
 
1014
563
  return null;
1015
- } // There is no reason for this to change after the first render,
1016
- // you might think, but the function closes around serverResult and if
1017
- // the requestId changes, it still returns the hydrate result of the
1018
- // first render of the previous requestId. This then means that the
1019
- // hydrate result is still the same, and the effect is not re-executed
1020
- // because the cache gets incorrectly defaulted.
1021
- // However, we don't want to bother doing anything with this on
1022
- // client behavior changing since that truly is irrelevant.
1023
- // eslint-disable-next-line react-hooks/exhaustive-deps
1024
-
1025
- }, [serverResult]); // Instead of using state, which would be local to just this hook instance,
1026
- // we use a shared in-memory cache.
1027
-
1028
- useSharedCache(requestId, // The key of the cached item
1029
- scope, // The scope of the cached items
1030
- getDefaultCacheValue); // When we're client-side, we ultimately want the result from this call.
1031
-
564
+ }
565
+ }, [serverResult]);
566
+ useSharedCache(requestId, scope, getDefaultCacheValue);
1032
567
  const clientResult = useCachedEffect(requestId, handler, {
1033
568
  skip,
1034
569
  onResultChanged,
1035
570
  retainResultOnChange,
1036
571
  scope
1037
- }); // OK, now which result do we return.
1038
- // Well, we return the serverResult on our very first call and then
1039
- // the clientResult thereafter. The great thing is that after the very
1040
- // first call, the serverResult is going to be `null` anyway.
1041
-
572
+ });
1042
573
  return serverResult != null ? serverResult : clientResult;
1043
574
  };
1044
575
 
1045
- /**
1046
- * This component is the main component of Wonder Blocks Data. With this, data
1047
- * requirements can be placed in a React application in a manner that will
1048
- * support server-side rendering and efficient caching.
1049
- */
1050
576
  const Data = ({
1051
577
  requestId,
1052
578
  handler,
@@ -1061,87 +587,39 @@ const Data = ({
1061
587
  return children(result);
1062
588
  };
1063
589
 
1064
- /**
1065
- * This component provides a mechanism to intercept data requests.
1066
- * This is for use in testing.
1067
- *
1068
- * This component is not recommended for use in production code as it
1069
- * can prevent predictable functioning of the Wonder Blocks Data framework.
1070
- * One possible side-effect is that inflight requests from the interceptor could
1071
- * be picked up by `Data` component requests from outside the children of this
1072
- * component.
1073
- *
1074
- * Interceptions within the same component tree are chained such that the
1075
- * interceptor closest to the intercepted request is called first, and the
1076
- * furthest interceptor is called last.
1077
- */
1078
590
  const InterceptRequests = ({
1079
591
  interceptor,
1080
592
  children
1081
593
  }) => {
1082
594
  const interceptors = React.useContext(InterceptContext);
1083
- const updatedInterceptors = React.useMemo( // We could build this in reverse order so that our hook that does
1084
- // the interception didn't have to use reduceRight, but I think it
1085
- // is easier to think about if we do this in component tree order.
1086
- () => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
1087
- return /*#__PURE__*/React.createElement(InterceptContext.Provider, {
595
+ const updatedInterceptors = React.useMemo(() => [].concat(interceptors, [interceptor]), [interceptors, interceptor]);
596
+ return React.createElement(InterceptContext.Provider, {
1088
597
  value: updatedInterceptors
1089
598
  }, children);
1090
599
  };
1091
600
 
1092
- const GqlRouterContext = /*#__PURE__*/React.createContext(null);
601
+ const GqlRouterContext = React.createContext(null);
1093
602
 
1094
- /**
1095
- * Configure GraphQL routing for GraphQL hooks and components.
1096
- *
1097
- * These can be nested. Components and hooks relying on the GraphQL routing
1098
- * will use the configuration from their closest ancestral GqlRouter.
1099
- */
1100
603
  const GqlRouter = ({
1101
604
  defaultContext: thisDefaultContext,
1102
605
  fetch: thisFetch,
1103
606
  children
1104
607
  }) => {
1105
- // We don't care if we're nested. We always force our callers to define
1106
- // everything. It makes for a clearer API and requires less error checking
1107
- // code (assuming our flow types are correct). We also don't default fetch
1108
- // to anything - our callers can tell us what function to use quite easily.
1109
- // If code that consumes this wants more nuanced nesting, it can implement
1110
- // it within its own GqlRouter than then defers to this one.
1111
- // We want to always use the same object if things haven't changed to avoid
1112
- // over-rendering consumers of our context, let's memoize the configuration.
1113
- // By doing this, if a component under children that uses this context
1114
- // uses React.memo, we won't force it to re-render every time we render
1115
- // because we'll only change the context value if something has actually
1116
- // changed.
1117
608
  const configuration = React.useMemo(() => ({
1118
609
  fetch: thisFetch,
1119
610
  defaultContext: thisDefaultContext
1120
611
  }), [thisDefaultContext, thisFetch]);
1121
- return /*#__PURE__*/React.createElement(GqlRouterContext.Provider, {
612
+ return React.createElement(GqlRouterContext.Provider, {
1122
613
  value: configuration
1123
614
  }, children);
1124
615
  };
1125
616
 
1126
- /**
1127
- * Construct a complete GqlContext from current defaults and a partial context.
1128
- *
1129
- * Values in the partial context that are `undefined` will be ignored.
1130
- * Values in the partial context that are `null` will be deleted.
1131
- */
1132
617
  const mergeGqlContext = (defaultContext, overrides) => {
1133
- // Let's merge the partial context default context. We deliberately
1134
- // don't spread because spreading would overwrite default context
1135
- // values with undefined or null if the partial context includes a value
1136
- // explicitly set to undefined or null.
1137
618
  return Object.keys(overrides).reduce((acc, key) => {
1138
- // Undefined values are ignored.
1139
619
  if (overrides[key] !== undefined) {
1140
620
  if (overrides[key] === null) {
1141
- // Null indicates we delete this context value.
1142
621
  delete acc[key];
1143
622
  } else {
1144
- // Otherwise, we set it.
1145
623
  acc[key] = overrides[key];
1146
624
  }
1147
625
  }
@@ -1150,32 +628,11 @@ const mergeGqlContext = (defaultContext, overrides) => {
1150
628
  }, _extends({}, defaultContext));
1151
629
  };
1152
630
 
1153
- /**
1154
- * Error kinds for GqlError.
1155
- */
1156
631
  const GqlErrors = Object.freeze({
1157
- /**
1158
- * An internal framework error.
1159
- */
1160
632
  Internal: "Internal",
1161
-
1162
- /**
1163
- * Response does not have the correct structure for a GraphQL response.
1164
- */
1165
633
  BadResponse: "BadResponse",
1166
-
1167
- /**
1168
- * A valid GraphQL result with errors field in the payload.
1169
- */
1170
634
  ErrorResult: "ErrorResult"
1171
635
  });
1172
- /**
1173
- * An error from the GQL API.
1174
- *
1175
- * Errors of this type will have names of the format:
1176
- * `${kind}GqlError`
1177
- */
1178
-
1179
636
  class GqlError extends KindError {
1180
637
  constructor(message, kind, {
1181
638
  metadata,
@@ -1190,11 +647,7 @@ class GqlError extends KindError {
1190
647
 
1191
648
  }
1192
649
 
1193
- /**
1194
- * Construct a GqlRouterContext from the current one and partial context.
1195
- */
1196
650
  const useGqlRouterContext = (contextOverrides = {}) => {
1197
- // This hook only works if the `GqlRouter` has been used to setup context.
1198
651
  const gqlRouterContext = useContext(GqlRouterContext);
1199
652
 
1200
653
  if (gqlRouterContext == null) {
@@ -1206,17 +659,14 @@ const useGqlRouterContext = (contextOverrides = {}) => {
1206
659
  defaultContext
1207
660
  } = gqlRouterContext;
1208
661
  const contextRef = useRef(defaultContext);
1209
- const mergedContext = mergeGqlContext(defaultContext, contextOverrides); // Now, we can see if this represents a new context and if so,
1210
- // update our ref and return the merged value.
1211
-
662
+ const mergedContext = mergeGqlContext(defaultContext, contextOverrides);
1212
663
  const refKeys = Object.keys(contextRef.current);
1213
664
  const mergedKeys = Object.keys(mergedContext);
1214
665
  const shouldWeUpdateRef = refKeys.length !== mergedKeys.length || mergedKeys.every(key => contextRef.current[key] !== mergedContext[key]);
1215
666
 
1216
667
  if (shouldWeUpdateRef) {
1217
668
  contextRef.current = mergedContext;
1218
- } // OK, now we're up-to-date, let's memoize our final result.
1219
-
669
+ }
1220
670
 
1221
671
  const finalContext = contextRef.current;
1222
672
  const finalRouterContext = useMemo(() => ({
@@ -1226,13 +676,7 @@ const useGqlRouterContext = (contextOverrides = {}) => {
1226
676
  return finalRouterContext;
1227
677
  };
1228
678
 
1229
- /**
1230
- * Validate a GQL operation response and extract the data.
1231
- */
1232
-
1233
679
  const getGqlDataFromResponse = async response => {
1234
- // Get the response as text, that way we can use the text in error
1235
- // messaging, should our parsing fail.
1236
680
  const bodyText = await response.text();
1237
681
  let result;
1238
682
 
@@ -1246,8 +690,7 @@ const getGqlDataFromResponse = async response => {
1246
690
  },
1247
691
  cause: e
1248
692
  });
1249
- } // Check for a bad status code.
1250
-
693
+ }
1251
694
 
1252
695
  if (response.status >= 300) {
1253
696
  throw new DataError("Response unsuccessful", DataErrors.Network, {
@@ -1256,22 +699,16 @@ const getGqlDataFromResponse = async response => {
1256
699
  result
1257
700
  }
1258
701
  });
1259
- } // Check that we have a valid result payload.
1260
-
702
+ }
1261
703
 
1262
- if ( // Flow shouldn't be warning about this.
1263
- // $FlowIgnore[method-unbinding]
1264
- !Object.prototype.hasOwnProperty.call(result, "data") && // Flow shouldn't be warning about this.
1265
- // $FlowIgnore[method-unbinding]
1266
- !Object.prototype.hasOwnProperty.call(result, "errors")) {
704
+ if (!Object.prototype.hasOwnProperty.call(result, "data") && !Object.prototype.hasOwnProperty.call(result, "errors")) {
1267
705
  throw new GqlError("Server response missing", GqlErrors.BadResponse, {
1268
706
  metadata: {
1269
707
  statusCode: response.status,
1270
708
  result
1271
709
  }
1272
710
  });
1273
- } // If the response payload has errors, throw an error.
1274
-
711
+ }
1275
712
 
1276
713
  if (result.errors != null && Array.isArray(result.errors) && result.errors.length > 0) {
1277
714
  throw new GqlError("GraphQL errors", GqlErrors.ErrorResult, {
@@ -1280,30 +717,13 @@ const getGqlDataFromResponse = async response => {
1280
717
  result
1281
718
  }
1282
719
  });
1283
- } // We got here, so return the data.
1284
-
720
+ }
1285
721
 
1286
722
  return result.data;
1287
723
  };
1288
724
 
1289
- /**
1290
- * Hook to obtain a gqlFetch function for performing GraphQL requests.
1291
- *
1292
- * The fetch function will resolve null if the request was aborted, otherwise
1293
- * it will resolve the data returned by the GraphQL server.
1294
- *
1295
- * Context is merged with the default context provided to the GqlRouter.
1296
- * Values in the partial context given to the returned fetch function will
1297
- * only be included if they have a value other than undefined.
1298
- */
1299
725
  const useGql = (context = {}) => {
1300
- // This hook only works if the `GqlRouter` has been used to setup context.
1301
- const gqlRouterContext = useGqlRouterContext(context); // Let's memoize the gqlFetch function we create based off our context.
1302
- // That way, even if the context happens to change, if its values don't
1303
- // we give the same function instance back to our callers instead of
1304
- // making a new one. That then means they can safely use the return value
1305
- // in hooks deps without fear of it triggering extra renders.
1306
-
726
+ const gqlRouterContext = useGqlRouterContext(context);
1307
727
  const gqlFetch = useCallback((operation, options = Object.freeze({})) => {
1308
728
  const {
1309
729
  fetch,
@@ -1313,31 +733,13 @@ const useGql = (context = {}) => {
1313
733
  variables,
1314
734
  context = {}
1315
735
  } = options;
1316
- const finalContext = mergeGqlContext(defaultContext, context); // Invoke the fetch and extract the data.
1317
-
736
+ const finalContext = mergeGqlContext(defaultContext, context);
1318
737
  return fetch(operation, variables, finalContext).then(getGqlDataFromResponse);
1319
738
  }, [gqlRouterContext]);
1320
739
  return gqlFetch;
1321
740
  };
1322
741
 
1323
- /**
1324
- * Initialize the hydration cache.
1325
- *
1326
- * @param {ResponseCache} source The cache content to use for initializing the
1327
- * cache.
1328
- * @throws {Error} If the cache is already initialized.
1329
- */
1330
742
  const initializeCache = source => SsrCache.Default.initialize(source);
1331
- /**
1332
- * Fulfill all tracked data requests.
1333
- *
1334
- * This is for use with the `TrackData` component during server-side rendering.
1335
- *
1336
- * @throws {Error} If executed outside of server-side rendering.
1337
- * @returns {Promise<void>} A promise that resolves when all tracked requests
1338
- * have been fulfilled.
1339
- */
1340
-
1341
743
  const fulfillAllDataRequests = () => {
1342
744
  if (!Server.isServerSide()) {
1343
745
  return Promise.reject(new Error("Data requests are not tracked when client-side"));
@@ -1345,16 +747,6 @@ const fulfillAllDataRequests = () => {
1345
747
 
1346
748
  return RequestTracker.Default.fulfillTrackedRequests();
1347
749
  };
1348
- /**
1349
- * Indicate if there are unfulfilled tracked requests.
1350
- *
1351
- * This is used in conjunction with `TrackData`.
1352
- *
1353
- * @throws {Error} If executed outside of server-side rendering.
1354
- * @returns {boolean} `true` if there are unfulfilled tracked requests;
1355
- * otherwise, `false`.
1356
- */
1357
-
1358
750
  const hasUnfulfilledRequests = () => {
1359
751
  if (!Server.isServerSide()) {
1360
752
  throw new Error("Data requests are not tracked when client-side");
@@ -1362,21 +754,7 @@ const hasUnfulfilledRequests = () => {
1362
754
 
1363
755
  return RequestTracker.Default.hasUnfulfilledRequests;
1364
756
  };
1365
- /**
1366
- * Remove the request identified from the cached hydration responses.
1367
- *
1368
- * @param {string} id The request ID of the response to remove from the cache.
1369
- */
1370
-
1371
757
  const removeFromCache = id => SsrCache.Default.remove(id);
1372
- /**
1373
- * Remove all cached hydration responses that match the given predicate.
1374
- *
1375
- * @param {(id: string) => boolean} [predicate] The predicate to match against
1376
- * the cached hydration responses. If no predicate is provided, all cached
1377
- * hydration responses will be removed.
1378
- */
1379
-
1380
758
  const removeAllFromCache = predicate => SsrCache.Default.removeAll(predicate);
1381
759
 
1382
760
  export { Data, DataError, DataErrors, GqlError, GqlErrors, GqlRouter, InterceptRequests, RequestFulfillment, ScopedInMemoryCache, SerializableInMemoryCache, Status, TrackData, WhenClientSide, clearSharedCache, fulfillAllDataRequests, hasUnfulfilledRequests, initializeCache, removeAllFromCache, removeFromCache, useCachedEffect, useGql, useHydratableEffect, useServerEffect, useSharedCache };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@khanacademy/wonder-blocks-data",
3
- "version": "7.0.0",
3
+ "version": "7.0.1",
4
4
  "design": "v1",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -14,14 +14,14 @@
14
14
  },
15
15
  "dependencies": {
16
16
  "@babel/runtime": "^7.16.3",
17
- "@khanacademy/wonder-blocks-core": "^4.3.0"
17
+ "@khanacademy/wonder-blocks-core": "^4.3.1"
18
18
  },
19
19
  "peerDependencies": {
20
20
  "@khanacademy/wonder-stuff-core": "^0.1.2",
21
21
  "react": "16.14.0"
22
22
  },
23
23
  "devDependencies": {
24
- "wb-dev-build-settings": "^0.3.0"
24
+ "wb-dev-build-settings": "^0.4.0"
25
25
  },
26
26
  "author": "",
27
27
  "license": "MIT"