@salesforce/lds-runtime-bridge 1.233.0 → 1.236.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ldsRuntimeBridge.js +341 -915
- package/package.json +2 -2
package/dist/ldsRuntimeBridge.js
CHANGED
|
@@ -12,7 +12,7 @@
|
|
|
12
12
|
* *******************************************************************************************
|
|
13
13
|
*/
|
|
14
14
|
import { setDefaultLuvio } from 'force/ldsEngine';
|
|
15
|
-
import { StoreKeySet, serializeStructuredKey, Reader, deepFreeze, emitAdapterEvent, InMemoryStore, Environment, Luvio } from 'force/luvioEngine';
|
|
15
|
+
import { StoreKeySet, serializeStructuredKey, StringKeyInMemoryStore, Reader, deepFreeze, emitAdapterEvent, InMemoryStore, Environment, Luvio } from 'force/luvioEngine';
|
|
16
16
|
import { instrumentLuvio } from 'force/ldsInstrumentation';
|
|
17
17
|
import { keyBuilderRecord } from 'force/ldsAdaptersUiapi';
|
|
18
18
|
import '@salesforce/gate/lds.graphqlEvalExcludeStaleRecords';
|
|
@@ -131,7 +131,7 @@ function publishDurableStoreEntries(durableRecords, put, publishMetadata) {
|
|
|
131
131
|
* will refresh the snapshot from network, and then run the results from network
|
|
132
132
|
* through L2 ingestion, returning the subsequent revived snapshot.
|
|
133
133
|
*/
|
|
134
|
-
function reviveSnapshot(baseEnvironment, durableStore, unavailableSnapshot, durableStoreErrorHandler, buildL1Snapshot, reviveMetrics = { l2Trips: [] }) {
|
|
134
|
+
function reviveSnapshot(baseEnvironment, durableStore, unavailableSnapshot, durableStoreErrorHandler, buildL1Snapshot, revivingStore, reviveMetrics = { l2Trips: [] }) {
|
|
135
135
|
const { recordId, select, missingLinks, seenRecords, state } = unavailableSnapshot;
|
|
136
136
|
// L2 can only revive Unfulfilled snapshots that have a selector since they have the
|
|
137
137
|
// info needed to revive (like missingLinks) and rebuild. Otherwise return L1 snapshot.
|
|
@@ -141,10 +141,21 @@ function reviveSnapshot(baseEnvironment, durableStore, unavailableSnapshot, dura
|
|
|
141
141
|
metrics: reviveMetrics,
|
|
142
142
|
});
|
|
143
143
|
}
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
144
|
+
const keysToReviveSet = new StoreKeySet();
|
|
145
|
+
if (revivingStore) {
|
|
146
|
+
// Any stale keys since the last l2 read should be cleared and fetched again
|
|
147
|
+
for (const staleKey of revivingStore.staleEntries) {
|
|
148
|
+
keysToReviveSet.add(staleKey);
|
|
149
|
+
}
|
|
150
|
+
revivingStore.clearStale();
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
// when not using a reviving store:
|
|
154
|
+
// in case L1 store changes/deallocs a record while we are doing the async read
|
|
155
|
+
// we attempt to read all keys from L2 - so combine recordId with any seenRecords
|
|
156
|
+
keysToReviveSet.add(recordId);
|
|
157
|
+
keysToReviveSet.merge(seenRecords);
|
|
158
|
+
}
|
|
148
159
|
keysToReviveSet.merge(missingLinks);
|
|
149
160
|
const keysToRevive = keysToReviveSet.keysAsArray();
|
|
150
161
|
const canonicalKeys = keysToRevive.map((x) => serializeStructuredKey(baseEnvironment.storeGetCanonicalKey(x)));
|
|
@@ -194,7 +205,7 @@ function reviveSnapshot(baseEnvironment, durableStore, unavailableSnapshot, dura
|
|
|
194
205
|
for (let i = 0, len = newKeys.length; i < len; i++) {
|
|
195
206
|
const newSnapshotSeenKey = newKeys[i];
|
|
196
207
|
if (!alreadyRequestedOrRevivedSet.has(newSnapshotSeenKey)) {
|
|
197
|
-
return reviveSnapshot(baseEnvironment, durableStore, snapshot, durableStoreErrorHandler, buildL1Snapshot, reviveMetrics);
|
|
208
|
+
return reviveSnapshot(baseEnvironment, durableStore, snapshot, durableStoreErrorHandler, buildL1Snapshot, revivingStore, reviveMetrics);
|
|
198
209
|
}
|
|
199
210
|
}
|
|
200
211
|
}
|
|
@@ -283,8 +294,9 @@ class DurableTTLStore {
|
|
|
283
294
|
}
|
|
284
295
|
}
|
|
285
296
|
|
|
286
|
-
function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler, redirects, additionalDurableStoreOperations = []) {
|
|
297
|
+
function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler, redirects, additionalDurableStoreOperations = [], enableDurableMetadataRefresh = false) {
|
|
287
298
|
const durableRecords = create$2(null);
|
|
299
|
+
const refreshedDurableRecords = create$2(null);
|
|
288
300
|
const evictedRecords = create$2(null);
|
|
289
301
|
const { records, metadata: storeMetadata, visitedIds, refreshedIds, } = store.fallbackStringKeyInMemoryStore;
|
|
290
302
|
// TODO: W-8909393 Once metadata is stored in its own segment we need to
|
|
@@ -294,32 +306,36 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
|
|
|
294
306
|
for (let i = 0, len = keys$1.length; i < len; i += 1) {
|
|
295
307
|
const key = keys$1[i];
|
|
296
308
|
const record = records[key];
|
|
309
|
+
const wasVisited = visitedIds[key] !== undefined;
|
|
297
310
|
// this record has been evicted, evict from DS
|
|
298
|
-
if (record === undefined) {
|
|
311
|
+
if (wasVisited && record === undefined) {
|
|
299
312
|
evictedRecords[key] = true;
|
|
300
313
|
continue;
|
|
301
314
|
}
|
|
302
315
|
const metadata = storeMetadata[key];
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
durableRecords[key].metadata = {
|
|
308
|
-
...metadata,
|
|
309
|
-
metadataVersion: DURABLE_METADATA_VERSION,
|
|
310
|
-
};
|
|
311
|
-
}
|
|
316
|
+
const entries = wasVisited === true || enableDurableMetadataRefresh === false
|
|
317
|
+
? durableRecords
|
|
318
|
+
: refreshedDurableRecords;
|
|
319
|
+
setRecordTo(entries, key, record, metadata);
|
|
312
320
|
}
|
|
313
321
|
const durableStoreOperations = additionalDurableStoreOperations;
|
|
314
|
-
// publishes
|
|
315
322
|
const recordKeys = keys$2(durableRecords);
|
|
316
323
|
if (recordKeys.length > 0) {
|
|
324
|
+
// publishes with data
|
|
317
325
|
durableStoreOperations.push({
|
|
318
326
|
type: 'setEntries',
|
|
319
327
|
entries: durableRecords,
|
|
320
328
|
segment: DefaultDurableSegment,
|
|
321
329
|
});
|
|
322
330
|
}
|
|
331
|
+
if (keys$2(refreshedDurableRecords).length > 0) {
|
|
332
|
+
// publishes with only metadata updates
|
|
333
|
+
durableStoreOperations.push({
|
|
334
|
+
type: 'setMetadata',
|
|
335
|
+
entries: refreshedDurableRecords,
|
|
336
|
+
segment: DefaultDurableSegment,
|
|
337
|
+
});
|
|
338
|
+
}
|
|
323
339
|
// redirects
|
|
324
340
|
redirects.forEach((value, key) => {
|
|
325
341
|
durableStoreOperations.push({
|
|
@@ -346,6 +362,17 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
|
|
|
346
362
|
}
|
|
347
363
|
return Promise.resolve();
|
|
348
364
|
}
|
|
365
|
+
function setRecordTo(entries, key, record, metadata) {
|
|
366
|
+
entries[key] = {
|
|
367
|
+
data: record,
|
|
368
|
+
};
|
|
369
|
+
if (metadata !== undefined) {
|
|
370
|
+
entries[key].metadata = {
|
|
371
|
+
...metadata,
|
|
372
|
+
metadataVersion: DURABLE_METADATA_VERSION,
|
|
373
|
+
};
|
|
374
|
+
}
|
|
375
|
+
}
|
|
349
376
|
|
|
350
377
|
const DurableEnvironmentEventDiscriminator = 'durable';
|
|
351
378
|
function emitDurableEnvironmentAdapterEvent(eventData, observers) {
|
|
@@ -390,6 +417,50 @@ async function reviveRedirects(durableStore, env) {
|
|
|
390
417
|
}
|
|
391
418
|
}
|
|
392
419
|
|
|
420
|
+
function buildRevivingStagingStore(upstreamStore) {
|
|
421
|
+
const localStore = new StringKeyInMemoryStore();
|
|
422
|
+
const staleEntries = new Set();
|
|
423
|
+
function readEntry(key) {
|
|
424
|
+
if (typeof key !== 'string') {
|
|
425
|
+
return upstreamStore.readEntry(key);
|
|
426
|
+
}
|
|
427
|
+
let storeEntry = localStore.readEntry(key);
|
|
428
|
+
if (!storeEntry) {
|
|
429
|
+
// read from upstream store...
|
|
430
|
+
storeEntry = upstreamStore.readEntry(key);
|
|
431
|
+
// put it in our store to avoid it getting evicted prior to the next durable store read
|
|
432
|
+
localStore.put(key, storeEntry);
|
|
433
|
+
}
|
|
434
|
+
return storeEntry;
|
|
435
|
+
}
|
|
436
|
+
// Entries are marked stale by the durable store change listener. They are not
|
|
437
|
+
// immediately evicted so as to not result in a cache miss during a rebuild.
|
|
438
|
+
// The revive process will clear stale entries and read them from the durable store
|
|
439
|
+
// on the next revive loop.
|
|
440
|
+
function markStale(key) {
|
|
441
|
+
staleEntries.add(key);
|
|
442
|
+
}
|
|
443
|
+
// The revive loop clears stale entries right before reading from the durable store.
|
|
444
|
+
// Any stale entries will be revived to ensure they are present in L1 and match the
|
|
445
|
+
// latest data.
|
|
446
|
+
function clearStale() {
|
|
447
|
+
for (const key of staleEntries) {
|
|
448
|
+
localStore.dealloc(key);
|
|
449
|
+
}
|
|
450
|
+
staleEntries.clear();
|
|
451
|
+
}
|
|
452
|
+
// All functions other than `readEntry` pass through to the upstream store.
|
|
453
|
+
// A reviving store is only "active" during a call to `environment.storeLookup`, and will
|
|
454
|
+
// be used by the reader attempting to build an L1 snapshot. Immediately after the L1 rebuild
|
|
455
|
+
// the reviving store becomes inactive other than receiving change notifications.
|
|
456
|
+
return create$2(upstreamStore, {
|
|
457
|
+
readEntry: { value: readEntry },
|
|
458
|
+
markStale: { value: markStale },
|
|
459
|
+
clearStale: { value: clearStale },
|
|
460
|
+
staleEntries: { value: staleEntries },
|
|
461
|
+
});
|
|
462
|
+
}
|
|
463
|
+
|
|
393
464
|
const AdapterContextSegment = 'ADAPTER-CONTEXT';
|
|
394
465
|
const ADAPTER_CONTEXT_ID_SUFFIX = '__NAMED_CONTEXT';
|
|
395
466
|
async function reviveOrCreateContext(adapterId, durableStore, durableStoreErrorHandler, contextStores, pendingContextStoreKeys, onContextLoaded) {
|
|
@@ -445,14 +516,16 @@ function isUnfulfilledSnapshot(cachedSnapshotResult) {
|
|
|
445
516
|
* @param durableStore A DurableStore implementation
|
|
446
517
|
* @param instrumentation An instrumentation function implementation
|
|
447
518
|
*/
|
|
448
|
-
function makeDurable(environment, { durableStore, instrumentation }) {
|
|
449
|
-
let
|
|
519
|
+
function makeDurable(environment, { durableStore, instrumentation, useRevivingStore, enableDurableMetadataRefresh = false, }) {
|
|
520
|
+
let stagingStore = null;
|
|
450
521
|
const durableTTLStore = new DurableTTLStore(durableStore);
|
|
451
522
|
const mergeKeysPromiseMap = new Map();
|
|
452
523
|
// When a context store is mutated we write it to L2, which causes DS on change
|
|
453
524
|
// event. If this instance of makeDurable caused that L2 write we can ignore that
|
|
454
525
|
// on change event. This Set helps us do that.
|
|
455
526
|
const pendingContextStoreKeys = new Set();
|
|
527
|
+
// Reviving stores are tracked so that they can be notified of durable store change notifications.
|
|
528
|
+
const revivingStores = new Set();
|
|
456
529
|
// redirects that need to be flushed to the durable store
|
|
457
530
|
const pendingStoreRedirects = new Map();
|
|
458
531
|
const contextStores = create$2(null);
|
|
@@ -478,6 +551,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
478
551
|
const defaultSegmentKeys = [];
|
|
479
552
|
const adapterContextSegmentKeys = [];
|
|
480
553
|
const redirectSegmentKeys = [];
|
|
554
|
+
const metadataRefreshSegmentKeys = [];
|
|
481
555
|
const messagingSegmentKeys = [];
|
|
482
556
|
let shouldBroadcast = false;
|
|
483
557
|
for (let i = 0, len = changes.length; i < len; i++) {
|
|
@@ -485,7 +559,12 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
485
559
|
// we only care about changes to the data which is stored in the default
|
|
486
560
|
// segment or the adapter context
|
|
487
561
|
if (change.segment === DefaultDurableSegment) {
|
|
488
|
-
|
|
562
|
+
if (change.type === 'setMetadata') {
|
|
563
|
+
metadataRefreshSegmentKeys.push(...change.ids);
|
|
564
|
+
}
|
|
565
|
+
else {
|
|
566
|
+
defaultSegmentKeys.push(...change.ids);
|
|
567
|
+
}
|
|
489
568
|
}
|
|
490
569
|
else if (change.segment === AdapterContextSegment) {
|
|
491
570
|
adapterContextSegmentKeys.push(...change.ids);
|
|
@@ -549,9 +628,26 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
549
628
|
// and go through an entire broadcast/revive cycle for unchanged data
|
|
550
629
|
// call base environment storeEvict so this evict is not tracked for durable deletion
|
|
551
630
|
environment.storeEvict(key);
|
|
631
|
+
for (const revivingStore of revivingStores) {
|
|
632
|
+
revivingStore.markStale(key);
|
|
633
|
+
}
|
|
552
634
|
}
|
|
553
635
|
shouldBroadcast = true;
|
|
554
636
|
}
|
|
637
|
+
// process metadata only refreshes
|
|
638
|
+
if (metadataRefreshSegmentKeys.length > 0) {
|
|
639
|
+
const entries = await durableStore.getMetadata(metadataRefreshSegmentKeys, DefaultDurableSegment);
|
|
640
|
+
if (entries !== undefined) {
|
|
641
|
+
const entryKeys = keys$2(entries);
|
|
642
|
+
for (let i = 0, len = entryKeys.length; i < len; i++) {
|
|
643
|
+
const entryKey = entryKeys[i];
|
|
644
|
+
const { metadata } = entries[entryKey];
|
|
645
|
+
if (metadata !== undefined) {
|
|
646
|
+
environment.putStoreMetadata(entryKey, metadata, false);
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
}
|
|
555
651
|
if (shouldBroadcast) {
|
|
556
652
|
await environment.storeBroadcast(rebuildSnapshot, environment.snapshotAvailable);
|
|
557
653
|
}
|
|
@@ -577,10 +673,10 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
577
673
|
};
|
|
578
674
|
const storePublish = function (key, data) {
|
|
579
675
|
validateNotDisposed();
|
|
580
|
-
if (
|
|
581
|
-
|
|
676
|
+
if (stagingStore === null) {
|
|
677
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
582
678
|
}
|
|
583
|
-
|
|
679
|
+
stagingStore.publish(key, data);
|
|
584
680
|
// remove record from main luvio L1 cache while we are on the synchronous path
|
|
585
681
|
// because we do not want some other code attempting to use the
|
|
586
682
|
// in-memory values before the durable store onChanged handler
|
|
@@ -589,26 +685,26 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
589
685
|
};
|
|
590
686
|
const publishStoreMetadata = function (recordId, storeMetadata) {
|
|
591
687
|
validateNotDisposed();
|
|
592
|
-
if (
|
|
593
|
-
|
|
688
|
+
if (stagingStore === null) {
|
|
689
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
594
690
|
}
|
|
595
|
-
|
|
691
|
+
stagingStore.publishMetadata(recordId, storeMetadata);
|
|
596
692
|
};
|
|
597
693
|
const storeIngest = function (key, ingest, response, luvio) {
|
|
598
694
|
validateNotDisposed();
|
|
599
695
|
// we don't ingest to the luvio L1 store from network directly, we ingest to
|
|
600
696
|
// L2 and let DurableStore on change event revive keys into luvio L1 store
|
|
601
|
-
if (
|
|
602
|
-
|
|
697
|
+
if (stagingStore === null) {
|
|
698
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
603
699
|
}
|
|
604
|
-
environment.storeIngest(key, ingest, response, luvio,
|
|
700
|
+
environment.storeIngest(key, ingest, response, luvio, stagingStore);
|
|
605
701
|
};
|
|
606
702
|
const storeIngestError = function (key, errorSnapshot, storeMetadataParams, _storeOverride) {
|
|
607
703
|
validateNotDisposed();
|
|
608
|
-
if (
|
|
609
|
-
|
|
704
|
+
if (stagingStore === null) {
|
|
705
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
610
706
|
}
|
|
611
|
-
environment.storeIngestError(key, errorSnapshot, storeMetadataParams,
|
|
707
|
+
environment.storeIngestError(key, errorSnapshot, storeMetadataParams, stagingStore);
|
|
612
708
|
};
|
|
613
709
|
const storeBroadcast = function (_rebuildSnapshot, _snapshotDataAvailable) {
|
|
614
710
|
validateNotDisposed();
|
|
@@ -619,19 +715,19 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
619
715
|
};
|
|
620
716
|
const publishChangesToDurableStore = function (additionalDurableStoreOperations) {
|
|
621
717
|
validateNotDisposed();
|
|
622
|
-
if (
|
|
718
|
+
if (stagingStore === null) {
|
|
623
719
|
return Promise.resolve();
|
|
624
720
|
}
|
|
625
|
-
const promise = flushInMemoryStoreValuesToDurableStore(
|
|
721
|
+
const promise = flushInMemoryStoreValuesToDurableStore(stagingStore, durableStore, durableStoreErrorHandler, new Map(pendingStoreRedirects), additionalDurableStoreOperations, enableDurableMetadataRefresh);
|
|
626
722
|
pendingStoreRedirects.clear();
|
|
627
|
-
|
|
723
|
+
stagingStore = null;
|
|
628
724
|
return promise;
|
|
629
725
|
};
|
|
630
726
|
const storeLookup = function (sel, createSnapshot, refresh, ttlStrategy) {
|
|
631
727
|
validateNotDisposed();
|
|
632
|
-
// if this lookup is right after an ingest there will be a staging store
|
|
633
|
-
if (
|
|
634
|
-
const reader = new Reader(
|
|
728
|
+
// if this lookup is right after an ingest or during a revive there will be a staging store
|
|
729
|
+
if (stagingStore !== null) {
|
|
730
|
+
const reader = new Reader(stagingStore, sel.variables, refresh, undefined, ttlStrategy);
|
|
635
731
|
return reader.read(sel);
|
|
636
732
|
}
|
|
637
733
|
// otherwise this is from buildCachedSnapshot and we should use the luvio
|
|
@@ -640,24 +736,24 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
640
736
|
};
|
|
641
737
|
const storeEvict = function (key) {
|
|
642
738
|
validateNotDisposed();
|
|
643
|
-
if (
|
|
644
|
-
|
|
739
|
+
if (stagingStore === null) {
|
|
740
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
645
741
|
}
|
|
646
|
-
|
|
742
|
+
stagingStore.evict(key);
|
|
647
743
|
};
|
|
648
744
|
const getNode = function (key) {
|
|
649
745
|
validateNotDisposed();
|
|
650
|
-
if (
|
|
651
|
-
|
|
746
|
+
if (stagingStore === null) {
|
|
747
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
652
748
|
}
|
|
653
|
-
return environment.getNode(key,
|
|
749
|
+
return environment.getNode(key, stagingStore);
|
|
654
750
|
};
|
|
655
751
|
const wrapNormalizedGraphNode = function (normalized) {
|
|
656
752
|
validateNotDisposed();
|
|
657
|
-
if (
|
|
658
|
-
|
|
753
|
+
if (stagingStore === null) {
|
|
754
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
659
755
|
}
|
|
660
|
-
return environment.wrapNormalizedGraphNode(normalized,
|
|
756
|
+
return environment.wrapNormalizedGraphNode(normalized, stagingStore);
|
|
661
757
|
};
|
|
662
758
|
const rebuildSnapshot = function (snapshot, onRebuild) {
|
|
663
759
|
validateNotDisposed();
|
|
@@ -669,7 +765,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
669
765
|
return;
|
|
670
766
|
}
|
|
671
767
|
// Do an L2 revive and emit to subscriber using the callback.
|
|
672
|
-
|
|
768
|
+
reviveSnapshotWrapper(rebuilt, () => {
|
|
673
769
|
// reviveSnapshot will revive into L1, and since "records" is a reference
|
|
674
770
|
// (and not a copy) to the L1 records we can use it for rebuild
|
|
675
771
|
let rebuiltSnap;
|
|
@@ -710,10 +806,10 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
710
806
|
// the next publishChangesToDurableStore. NOTE: we don't need to call
|
|
711
807
|
// redirect on the base environment store because staging store and base
|
|
712
808
|
// L1 store share the same redirect and reverseRedirectKeys
|
|
713
|
-
if (
|
|
714
|
-
|
|
809
|
+
if (stagingStore === null) {
|
|
810
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
715
811
|
}
|
|
716
|
-
|
|
812
|
+
stagingStore.redirect(existingKey, canonicalKey);
|
|
717
813
|
};
|
|
718
814
|
const storeSetTTLOverride = function (namespace, representationName, ttl) {
|
|
719
815
|
validateNotDisposed();
|
|
@@ -754,7 +850,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
754
850
|
if (isUnfulfilledSnapshot(snapshot)) {
|
|
755
851
|
const start = Date.now();
|
|
756
852
|
emitDurableEnvironmentAdapterEvent({ type: 'l2-revive-start' }, adapterRequestContext.eventObservers);
|
|
757
|
-
const revivedSnapshot =
|
|
853
|
+
const revivedSnapshot = reviveSnapshotWrapper(snapshot, () => injectedStoreLookup(snapshot.select, snapshot.refresh)).then((result) => {
|
|
758
854
|
emitDurableEnvironmentAdapterEvent({
|
|
759
855
|
type: 'l2-revive-end',
|
|
760
856
|
snapshot: result.snapshot,
|
|
@@ -779,15 +875,15 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
779
875
|
};
|
|
780
876
|
const getIngestStagingStoreRecords = function () {
|
|
781
877
|
validateNotDisposed();
|
|
782
|
-
if (
|
|
783
|
-
return
|
|
878
|
+
if (stagingStore !== null) {
|
|
879
|
+
return stagingStore.fallbackStringKeyInMemoryStore.records;
|
|
784
880
|
}
|
|
785
881
|
return {};
|
|
786
882
|
};
|
|
787
883
|
const getIngestStagingStoreMetadata = function () {
|
|
788
884
|
validateNotDisposed();
|
|
789
|
-
if (
|
|
790
|
-
return
|
|
885
|
+
if (stagingStore !== null) {
|
|
886
|
+
return stagingStore.fallbackStringKeyInMemoryStore.metadata;
|
|
791
887
|
}
|
|
792
888
|
return {};
|
|
793
889
|
};
|
|
@@ -826,22 +922,20 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
826
922
|
}
|
|
827
923
|
await Promise.all(pendingPromises);
|
|
828
924
|
const entries = await durableStore.getEntries(keysToReviveAsArray, DefaultDurableSegment);
|
|
829
|
-
|
|
925
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
830
926
|
publishDurableStoreEntries(entries, (key, record) => {
|
|
831
927
|
if (typeof key === 'string') {
|
|
832
|
-
|
|
833
|
-
record;
|
|
928
|
+
stagingStore.fallbackStringKeyInMemoryStore.records[key] = record;
|
|
834
929
|
}
|
|
835
930
|
else {
|
|
836
|
-
|
|
931
|
+
stagingStore.recordsMap.set(key, record);
|
|
837
932
|
}
|
|
838
933
|
}, (key, metadata) => {
|
|
839
934
|
if (typeof key === 'string') {
|
|
840
|
-
|
|
841
|
-
metadata;
|
|
935
|
+
stagingStore.fallbackStringKeyInMemoryStore.metadata[key] = metadata;
|
|
842
936
|
}
|
|
843
937
|
else {
|
|
844
|
-
|
|
938
|
+
stagingStore.metadataMap.set(key, metadata);
|
|
845
939
|
}
|
|
846
940
|
});
|
|
847
941
|
snapshotFromMemoryIngest = await ingestAndBroadcastFunc();
|
|
@@ -870,7 +964,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
870
964
|
// we aren't doing any merging so we don't have to synchronize, the
|
|
871
965
|
// underlying DurableStore implementation takes care of R/W sync
|
|
872
966
|
// so all we have to do is ingest then write to L2
|
|
873
|
-
|
|
967
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
874
968
|
snapshotFromMemoryIngest = await ingestAndBroadcastFunc();
|
|
875
969
|
}
|
|
876
970
|
if (snapshotFromMemoryIngest === undefined) {
|
|
@@ -881,12 +975,12 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
881
975
|
}
|
|
882
976
|
// if snapshot from staging store lookup is unfulfilled then do an L2 lookup
|
|
883
977
|
const { select, refresh } = snapshotFromMemoryIngest;
|
|
884
|
-
const result = await
|
|
978
|
+
const result = await reviveSnapshotWrapper(snapshotFromMemoryIngest, () => environment.storeLookup(select, environment.createSnapshot, refresh));
|
|
885
979
|
return result.snapshot;
|
|
886
980
|
};
|
|
887
981
|
const handleErrorResponse = async function (ingestAndBroadcastFunc) {
|
|
888
982
|
validateNotDisposed();
|
|
889
|
-
|
|
983
|
+
stagingStore = buildIngestStagingStore(environment);
|
|
890
984
|
return ingestAndBroadcastFunc();
|
|
891
985
|
};
|
|
892
986
|
const getNotifyChangeStoreEntries = function (keys) {
|
|
@@ -937,6 +1031,27 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
937
1031
|
await durableStore.setEntries({ notifyStoreUpdateAvailable: { data: entryKeys } }, MessagingDurableSegment);
|
|
938
1032
|
return Promise.resolve(undefined);
|
|
939
1033
|
};
|
|
1034
|
+
const reviveSnapshotWrapper = function (unavailableSnapshot, buildL1Snapshot) {
|
|
1035
|
+
let revivingStore = undefined;
|
|
1036
|
+
if (useRevivingStore) {
|
|
1037
|
+
// NOTE: `store` is private, there doesn't seem to be a better,
|
|
1038
|
+
// cleaner way of accessing it from a derived environment.
|
|
1039
|
+
let baseStore = environment.store;
|
|
1040
|
+
// If we're rebuilding during an ingest, the existing staging store should be the base store.
|
|
1041
|
+
if (stagingStore) {
|
|
1042
|
+
baseStore = stagingStore;
|
|
1043
|
+
}
|
|
1044
|
+
let revivingStore = buildRevivingStagingStore(baseStore);
|
|
1045
|
+
revivingStores.add(revivingStore);
|
|
1046
|
+
}
|
|
1047
|
+
return reviveSnapshot(environment, durableStore, unavailableSnapshot, durableStoreErrorHandler, () => {
|
|
1048
|
+
const tempStore = stagingStore;
|
|
1049
|
+
const result = buildL1Snapshot();
|
|
1050
|
+
stagingStore = tempStore;
|
|
1051
|
+
return result;
|
|
1052
|
+
}, revivingStore).finally(() => {
|
|
1053
|
+
});
|
|
1054
|
+
};
|
|
940
1055
|
// set the default cache policy of the base environment
|
|
941
1056
|
environment.setDefaultCachePolicy({
|
|
942
1057
|
type: 'stale-while-revalidate',
|
|
@@ -971,7 +1086,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
971
1086
|
});
|
|
972
1087
|
}
|
|
973
1088
|
|
|
974
|
-
const { keys: keys$1, create: create$1, assign: assign$1, entries } = Object;
|
|
1089
|
+
const { keys: keys$1, create: create$1, assign: assign$1, entries, values: values$1 } = Object;
|
|
975
1090
|
const { stringify, parse } = JSON;
|
|
976
1091
|
|
|
977
1092
|
function selectColumnsFromTableWhereKeyIn(columnNames, table, keyColumnName, whereIn) {
|
|
@@ -1005,6 +1120,22 @@ class LdsDataTable {
|
|
|
1005
1120
|
}, reject);
|
|
1006
1121
|
});
|
|
1007
1122
|
}
|
|
1123
|
+
getMetadataByKeys(keys) {
|
|
1124
|
+
const query = selectColumnsFromTableWhereKeyIn([COLUMN_NAME_KEY$2, COLUMN_NAME_METADATA$1], this.tableName, COLUMN_NAME_KEY$2, keys);
|
|
1125
|
+
return new Promise((resolve, reject) => {
|
|
1126
|
+
this.plugin.query(query, keys, (results) => {
|
|
1127
|
+
resolve(results.rows.reduce((entries, row) => {
|
|
1128
|
+
const [key, stringifiedMetadata] = row;
|
|
1129
|
+
if (stringifiedMetadata !== undefined) {
|
|
1130
|
+
entries[key] = {
|
|
1131
|
+
metadata: parse(stringifiedMetadata),
|
|
1132
|
+
};
|
|
1133
|
+
}
|
|
1134
|
+
return entries;
|
|
1135
|
+
}, {}));
|
|
1136
|
+
}, reject);
|
|
1137
|
+
});
|
|
1138
|
+
}
|
|
1008
1139
|
getAll() {
|
|
1009
1140
|
return new Promise((resolve, reject) => {
|
|
1010
1141
|
this.plugin.query(this.getAllQuery, [], (x) => {
|
|
@@ -1031,6 +1162,24 @@ class LdsDataTable {
|
|
|
1031
1162
|
}, []),
|
|
1032
1163
|
};
|
|
1033
1164
|
}
|
|
1165
|
+
metadataToUpdateOperations(entries, segment) {
|
|
1166
|
+
return {
|
|
1167
|
+
type: 'update',
|
|
1168
|
+
table: this.tableName,
|
|
1169
|
+
keyColumn: COLUMN_NAME_KEY$2,
|
|
1170
|
+
context: {
|
|
1171
|
+
segment,
|
|
1172
|
+
type: 'setMetadata',
|
|
1173
|
+
},
|
|
1174
|
+
columns: [COLUMN_NAME_METADATA$1],
|
|
1175
|
+
values: keys$1(entries).reduce((values, key) => {
|
|
1176
|
+
const { metadata } = entries[key];
|
|
1177
|
+
const row = [metadata ? stringify(metadata) : null];
|
|
1178
|
+
values[key] = row;
|
|
1179
|
+
return values;
|
|
1180
|
+
}, {}),
|
|
1181
|
+
};
|
|
1182
|
+
}
|
|
1034
1183
|
mapToDurableEntries(sqliteResult) {
|
|
1035
1184
|
return sqliteResult.rows.reduce((entries, row) => {
|
|
1036
1185
|
const [key, stringifiedData, stringifiedMetadata] = row;
|
|
@@ -1077,6 +1226,25 @@ class LdsInternalDataTable {
|
|
|
1077
1226
|
}, reject);
|
|
1078
1227
|
});
|
|
1079
1228
|
}
|
|
1229
|
+
getMetadataByKeys(keys, namespace) {
|
|
1230
|
+
if (namespace === undefined) {
|
|
1231
|
+
throw Error('LdsInternalDataTable requires namespace');
|
|
1232
|
+
}
|
|
1233
|
+
const query = selectColumnsFromTableWhereKeyInNamespaced([COLUMN_NAME_KEY$1, COLUMN_NAME_METADATA], this.tableName, COLUMN_NAME_KEY$1, keys, COLUMN_NAME_NAMESPACE);
|
|
1234
|
+
return new Promise((resolve, reject) => {
|
|
1235
|
+
this.plugin.query(query, [namespace].concat(keys), (results) => {
|
|
1236
|
+
resolve(results.rows.reduce((entries, row) => {
|
|
1237
|
+
const [key, stringifiedMetadata] = row;
|
|
1238
|
+
if (stringifiedMetadata !== undefined) {
|
|
1239
|
+
entries[key] = {
|
|
1240
|
+
metadata: parse(stringifiedMetadata),
|
|
1241
|
+
};
|
|
1242
|
+
}
|
|
1243
|
+
return entries;
|
|
1244
|
+
}, {}));
|
|
1245
|
+
}, reject);
|
|
1246
|
+
});
|
|
1247
|
+
}
|
|
1080
1248
|
getAll(namespace) {
|
|
1081
1249
|
return new Promise((resolve, reject) => {
|
|
1082
1250
|
this.plugin.query(this.getAllQuery, [namespace], (x) => {
|
|
@@ -1110,6 +1278,42 @@ class LdsInternalDataTable {
|
|
|
1110
1278
|
}, []),
|
|
1111
1279
|
};
|
|
1112
1280
|
}
|
|
1281
|
+
metadataToUpdateOperations(entries, segment) {
|
|
1282
|
+
return {
|
|
1283
|
+
type: 'update',
|
|
1284
|
+
table: this.tableName,
|
|
1285
|
+
keyColumn: COLUMN_NAME_KEY$1,
|
|
1286
|
+
context: {
|
|
1287
|
+
segment,
|
|
1288
|
+
type: 'setMetadata',
|
|
1289
|
+
},
|
|
1290
|
+
columns: [COLUMN_NAME_METADATA],
|
|
1291
|
+
values: keys$1(entries).reduce((values, key) => {
|
|
1292
|
+
const { metadata } = entries[key];
|
|
1293
|
+
const row = [metadata ? stringify(metadata) : null];
|
|
1294
|
+
values[key] = row;
|
|
1295
|
+
return values;
|
|
1296
|
+
}, {}),
|
|
1297
|
+
};
|
|
1298
|
+
}
|
|
1299
|
+
metadataToUpdateSQLQueries(entries, segment) {
|
|
1300
|
+
return keys$1(entries).reduce((accu, key) => {
|
|
1301
|
+
const { metadata } = entries[key];
|
|
1302
|
+
if (metadata !== undefined) {
|
|
1303
|
+
accu.push({
|
|
1304
|
+
sql: `UPDATE ${this.tableName} SET ${COLUMN_NAME_METADATA} = ? WHERE (${COLUMN_NAME_KEY$1} IS ? AND ${COLUMN_NAME_NAMESPACE} IS ?)`,
|
|
1305
|
+
params: [stringify(metadata), key, segment],
|
|
1306
|
+
change: {
|
|
1307
|
+
ids: [key],
|
|
1308
|
+
segment,
|
|
1309
|
+
type: 'setMetadata',
|
|
1310
|
+
isExternalChange: false,
|
|
1311
|
+
},
|
|
1312
|
+
});
|
|
1313
|
+
}
|
|
1314
|
+
return accu;
|
|
1315
|
+
}, []);
|
|
1316
|
+
}
|
|
1113
1317
|
mapToDurableEntries(sqliteResult) {
|
|
1114
1318
|
return sqliteResult.rows.reduce((entries, row) => {
|
|
1115
1319
|
const [key, stringifiedData, stringifiedMetadata] = row;
|
|
@@ -1146,9 +1350,16 @@ class NimbusSqliteStore {
|
|
|
1146
1350
|
});
|
|
1147
1351
|
});
|
|
1148
1352
|
}
|
|
1353
|
+
batchQuery(queries) {
|
|
1354
|
+
const promises = queries.map((q) => this.query(q.sql, q.params));
|
|
1355
|
+
return Promise.all(promises);
|
|
1356
|
+
}
|
|
1149
1357
|
async getEntries(entryIds, segment) {
|
|
1150
1358
|
return this.getTable(segment).getByKeys(entryIds, segment);
|
|
1151
1359
|
}
|
|
1360
|
+
async getMetadata(entryIds, segment) {
|
|
1361
|
+
return this.getTable(segment).getMetadataByKeys(entryIds, segment);
|
|
1362
|
+
}
|
|
1152
1363
|
getAllEntries(segment) {
|
|
1153
1364
|
return this.getTable(segment).getAll(segment);
|
|
1154
1365
|
}
|
|
@@ -1157,12 +1368,30 @@ class NimbusSqliteStore {
|
|
|
1157
1368
|
const upsertOperation = table.entriesToUpsertOperations(entries, segment);
|
|
1158
1369
|
return this.batchOperationAsPromise([upsertOperation]);
|
|
1159
1370
|
}
|
|
1371
|
+
setMetadata(entries, segment) {
|
|
1372
|
+
const table = this.getTable(segment);
|
|
1373
|
+
const operation = this.plugin.supportsBatchUpdates === undefined ||
|
|
1374
|
+
this.plugin.supportsBatchUpdates() === false
|
|
1375
|
+
? table.entriesToUpsertOperations(entries, segment)
|
|
1376
|
+
: table.metadataToUpdateOperations(entries, segment);
|
|
1377
|
+
return this.batchOperationAsPromise([operation]);
|
|
1378
|
+
}
|
|
1160
1379
|
batchOperations(operations) {
|
|
1161
1380
|
const sqliteOperations = operations.reduce((acc, cur) => {
|
|
1162
1381
|
if (cur.type === 'setEntries') {
|
|
1163
1382
|
const table = this.getTable(cur.segment);
|
|
1164
1383
|
acc.push(table.entriesToUpsertOperations(cur.entries, cur.segment));
|
|
1165
1384
|
}
|
|
1385
|
+
else if (cur.type === 'setMetadata') {
|
|
1386
|
+
const table = this.getTable(cur.segment);
|
|
1387
|
+
if (this.plugin.supportsBatchUpdates === undefined ||
|
|
1388
|
+
this.plugin.supportsBatchUpdates() === false) {
|
|
1389
|
+
acc.push(table.entriesToUpsertOperations(cur.entries, cur.segment));
|
|
1390
|
+
}
|
|
1391
|
+
else {
|
|
1392
|
+
acc.push(table.metadataToUpdateOperations(cur.entries, cur.segment));
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1166
1395
|
else {
|
|
1167
1396
|
acc.push(this.idsToDeleteOperation(cur.ids, cur.segment));
|
|
1168
1397
|
}
|
|
@@ -1179,8 +1408,15 @@ class NimbusSqliteStore {
|
|
|
1179
1408
|
this.plugin
|
|
1180
1409
|
.registerOnChangedListener(async (changes) => {
|
|
1181
1410
|
const durableChanges = changes.map((c) => {
|
|
1411
|
+
let type = c.type === 'upsert' ? 'setEntries' : 'evictEntries';
|
|
1412
|
+
// if our context contains a type then set that as our main level type
|
|
1413
|
+
// allows us in the future of updates to specify the segment change happening
|
|
1414
|
+
// example being update call on metadata only or updating data
|
|
1415
|
+
if (c.type === 'update' && c.context.type !== undefined) {
|
|
1416
|
+
type = c.context.type;
|
|
1417
|
+
}
|
|
1182
1418
|
return {
|
|
1183
|
-
type
|
|
1419
|
+
type,
|
|
1184
1420
|
ids: c.keys,
|
|
1185
1421
|
isExternalChange: false,
|
|
1186
1422
|
segment: c.context.segment,
|
|
@@ -1274,6 +1510,7 @@ function isEntryDurableRecordRepresentation(entry, key) {
|
|
|
1274
1510
|
* For full license text, see the LICENSE.txt file
|
|
1275
1511
|
*/
|
|
1276
1512
|
|
|
1513
|
+
|
|
1277
1514
|
var DraftActionStatus;
|
|
1278
1515
|
(function (DraftActionStatus) {
|
|
1279
1516
|
DraftActionStatus["Pending"] = "pending";
|
|
@@ -1393,6 +1630,7 @@ var DraftQueueOperationType;
|
|
|
1393
1630
|
* Returns true if the value acts like a Promise, i.e. has a "then" function,
|
|
1394
1631
|
* otherwise returns false.
|
|
1395
1632
|
*/
|
|
1633
|
+
// eslint-disable-next-line no-redeclare
|
|
1396
1634
|
|
|
1397
1635
|
function _typeof$4(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof$4 = function _typeof(obj) { return typeof obj; }; } else { _typeof$4 = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof$4(obj); }
|
|
1398
1636
|
|
|
@@ -1936,38 +2174,6 @@ defineInspect(Token);
|
|
|
1936
2174
|
function isNode(maybeNode) {
|
|
1937
2175
|
return maybeNode != null && typeof maybeNode.kind === 'string';
|
|
1938
2176
|
}
|
|
1939
|
-
/**
|
|
1940
|
-
* The list of all possible AST node types.
|
|
1941
|
-
*/
|
|
1942
|
-
|
|
1943
|
-
/**
|
|
1944
|
-
* An exported enum describing the different kinds of tokens that the
|
|
1945
|
-
* lexer emits.
|
|
1946
|
-
*/
|
|
1947
|
-
Object.freeze({
|
|
1948
|
-
SOF: '<SOF>',
|
|
1949
|
-
EOF: '<EOF>',
|
|
1950
|
-
BANG: '!',
|
|
1951
|
-
DOLLAR: '$',
|
|
1952
|
-
AMP: '&',
|
|
1953
|
-
PAREN_L: '(',
|
|
1954
|
-
PAREN_R: ')',
|
|
1955
|
-
SPREAD: '...',
|
|
1956
|
-
COLON: ':',
|
|
1957
|
-
EQUALS: '=',
|
|
1958
|
-
AT: '@',
|
|
1959
|
-
BRACKET_L: '[',
|
|
1960
|
-
BRACKET_R: ']',
|
|
1961
|
-
BRACE_L: '{',
|
|
1962
|
-
PIPE: '|',
|
|
1963
|
-
BRACE_R: '}',
|
|
1964
|
-
NAME: 'Name',
|
|
1965
|
-
INT: 'Int',
|
|
1966
|
-
FLOAT: 'Float',
|
|
1967
|
-
STRING: 'String',
|
|
1968
|
-
BLOCK_STRING: 'BlockString',
|
|
1969
|
-
COMMENT: 'Comment'
|
|
1970
|
-
});
|
|
1971
2177
|
/**
|
|
1972
2178
|
* The enum type representing the token kinds values.
|
|
1973
2179
|
*/
|
|
@@ -3706,9 +3912,6 @@ function argsToArgsConfig(args) {
|
|
|
3706
3912
|
};
|
|
3707
3913
|
});
|
|
3708
3914
|
}
|
|
3709
|
-
function isRequiredArgument(arg) {
|
|
3710
|
-
return isNonNullType(arg.type) && arg.defaultValue === undefined;
|
|
3711
|
-
}
|
|
3712
3915
|
|
|
3713
3916
|
/**
|
|
3714
3917
|
* Interface Type Definition
|
|
@@ -5289,7 +5492,7 @@ defineInspect(GraphQLDirective);
|
|
|
5289
5492
|
/**
|
|
5290
5493
|
* Used to conditionally include fields or fragments.
|
|
5291
5494
|
*/
|
|
5292
|
-
|
|
5495
|
+
new GraphQLDirective({
|
|
5293
5496
|
name: 'include',
|
|
5294
5497
|
description: 'Directs the executor to include this field or fragment only when the `if` argument is true.',
|
|
5295
5498
|
locations: [DirectiveLocation.FIELD, DirectiveLocation.FRAGMENT_SPREAD, DirectiveLocation.INLINE_FRAGMENT],
|
|
@@ -5304,7 +5507,7 @@ var GraphQLIncludeDirective = new GraphQLDirective({
|
|
|
5304
5507
|
* Used to conditionally skip (exclude) fields or fragments.
|
|
5305
5508
|
*/
|
|
5306
5509
|
|
|
5307
|
-
|
|
5510
|
+
new GraphQLDirective({
|
|
5308
5511
|
name: 'skip',
|
|
5309
5512
|
description: 'Directs the executor to skip this field or fragment when the `if` argument is true.',
|
|
5310
5513
|
locations: [DirectiveLocation.FIELD, DirectiveLocation.FRAGMENT_SPREAD, DirectiveLocation.INLINE_FRAGMENT],
|
|
@@ -5324,7 +5527,7 @@ var DEFAULT_DEPRECATION_REASON = 'No longer supported';
|
|
|
5324
5527
|
* Used to declare element of a GraphQL schema as deprecated.
|
|
5325
5528
|
*/
|
|
5326
5529
|
|
|
5327
|
-
|
|
5530
|
+
new GraphQLDirective({
|
|
5328
5531
|
name: 'deprecated',
|
|
5329
5532
|
description: 'Marks an element of a GraphQL schema as no longer supported.',
|
|
5330
5533
|
locations: [DirectiveLocation.FIELD_DEFINITION, DirectiveLocation.ARGUMENT_DEFINITION, DirectiveLocation.INPUT_FIELD_DEFINITION, DirectiveLocation.ENUM_VALUE],
|
|
@@ -5340,7 +5543,7 @@ var GraphQLDeprecatedDirective = new GraphQLDirective({
|
|
|
5340
5543
|
* Used to provide a URL for specifying the behaviour of custom scalar definitions.
|
|
5341
5544
|
*/
|
|
5342
5545
|
|
|
5343
|
-
|
|
5546
|
+
new GraphQLDirective({
|
|
5344
5547
|
name: 'specifiedBy',
|
|
5345
5548
|
description: 'Exposes a URL that specifies the behaviour of this scalar.',
|
|
5346
5549
|
locations: [DirectiveLocation.SCALAR],
|
|
@@ -5351,810 +5554,14 @@ var GraphQLSpecifiedByDirective = new GraphQLDirective({
|
|
|
5351
5554
|
}
|
|
5352
5555
|
}
|
|
5353
5556
|
});
|
|
5354
|
-
|
|
5355
|
-
* The full list of specified directives.
|
|
5356
|
-
*/
|
|
5357
|
-
|
|
5358
|
-
var specifiedDirectives = Object.freeze([GraphQLIncludeDirective, GraphQLSkipDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective]);
|
|
5359
|
-
|
|
5360
|
-
function isTypeSystemDefinitionNode(node) {
|
|
5361
|
-
return node.kind === Kind.SCHEMA_DEFINITION || isTypeDefinitionNode(node) || node.kind === Kind.DIRECTIVE_DEFINITION;
|
|
5362
|
-
}
|
|
5363
|
-
function isTypeDefinitionNode(node) {
|
|
5364
|
-
return node.kind === Kind.SCALAR_TYPE_DEFINITION || node.kind === Kind.OBJECT_TYPE_DEFINITION || node.kind === Kind.INTERFACE_TYPE_DEFINITION || node.kind === Kind.UNION_TYPE_DEFINITION || node.kind === Kind.ENUM_TYPE_DEFINITION || node.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION;
|
|
5365
|
-
}
|
|
5366
|
-
function isTypeSystemExtensionNode(node) {
|
|
5367
|
-
return node.kind === Kind.SCHEMA_EXTENSION || isTypeExtensionNode(node);
|
|
5368
|
-
}
|
|
5369
|
-
function isTypeExtensionNode(node) {
|
|
5370
|
-
return node.kind === Kind.SCALAR_TYPE_EXTENSION || node.kind === Kind.OBJECT_TYPE_EXTENSION || node.kind === Kind.INTERFACE_TYPE_EXTENSION || node.kind === Kind.UNION_TYPE_EXTENSION || node.kind === Kind.ENUM_TYPE_EXTENSION || node.kind === Kind.INPUT_OBJECT_TYPE_EXTENSION;
|
|
5371
|
-
}
|
|
5372
|
-
|
|
5373
|
-
/**
|
|
5374
|
-
* Known type names
|
|
5375
|
-
*
|
|
5376
|
-
* A GraphQL document is only valid if referenced types (specifically
|
|
5377
|
-
* variable definitions and fragment conditions) are defined by the type schema.
|
|
5378
|
-
*/
|
|
5379
|
-
function KnownTypeNamesRule(context) {
|
|
5380
|
-
var schema = context.getSchema();
|
|
5381
|
-
var existingTypesMap = schema ? schema.getTypeMap() : Object.create(null);
|
|
5382
|
-
var definedTypes = Object.create(null);
|
|
5383
|
-
|
|
5384
|
-
for (var _i2 = 0, _context$getDocument$2 = context.getDocument().definitions; _i2 < _context$getDocument$2.length; _i2++) {
|
|
5385
|
-
var def = _context$getDocument$2[_i2];
|
|
5386
|
-
|
|
5387
|
-
if (isTypeDefinitionNode(def)) {
|
|
5388
|
-
definedTypes[def.name.value] = true;
|
|
5389
|
-
}
|
|
5390
|
-
}
|
|
5391
|
-
|
|
5392
|
-
var typeNames = Object.keys(existingTypesMap).concat(Object.keys(definedTypes));
|
|
5393
|
-
return {
|
|
5394
|
-
NamedType: function NamedType(node, _1, parent, _2, ancestors) {
|
|
5395
|
-
var typeName = node.name.value;
|
|
5396
|
-
|
|
5397
|
-
if (!existingTypesMap[typeName] && !definedTypes[typeName]) {
|
|
5398
|
-
var _ancestors$;
|
|
5399
|
-
|
|
5400
|
-
var definitionNode = (_ancestors$ = ancestors[2]) !== null && _ancestors$ !== void 0 ? _ancestors$ : parent;
|
|
5401
|
-
var isSDL = definitionNode != null && isSDLNode(definitionNode);
|
|
5402
|
-
|
|
5403
|
-
if (isSDL && isStandardTypeName(typeName)) {
|
|
5404
|
-
return;
|
|
5405
|
-
}
|
|
5406
|
-
|
|
5407
|
-
var suggestedTypes = suggestionList(typeName, isSDL ? standardTypeNames.concat(typeNames) : typeNames);
|
|
5408
|
-
context.reportError(new GraphQLError("Unknown type \"".concat(typeName, "\".") + didYouMean(suggestedTypes), node));
|
|
5409
|
-
}
|
|
5410
|
-
}
|
|
5411
|
-
};
|
|
5412
|
-
}
|
|
5413
|
-
var standardTypeNames = [].concat(specifiedScalarTypes, introspectionTypes).map(function (type) {
|
|
5557
|
+
[].concat(specifiedScalarTypes, introspectionTypes).map(function (type) {
|
|
5414
5558
|
return type.name;
|
|
5415
5559
|
});
|
|
5416
5560
|
|
|
5417
|
-
function isStandardTypeName(typeName) {
|
|
5418
|
-
return standardTypeNames.indexOf(typeName) !== -1;
|
|
5419
|
-
}
|
|
5420
|
-
|
|
5421
|
-
function isSDLNode(value) {
|
|
5422
|
-
return !Array.isArray(value) && (isTypeSystemDefinitionNode(value) || isTypeSystemExtensionNode(value));
|
|
5423
|
-
}
|
|
5424
|
-
|
|
5425
|
-
/**
|
|
5426
|
-
* Known directives
|
|
5427
|
-
*
|
|
5428
|
-
* A GraphQL document is only valid if all `@directives` are known by the
|
|
5429
|
-
* schema and legally positioned.
|
|
5430
|
-
*/
|
|
5431
|
-
function KnownDirectivesRule(context) {
|
|
5432
|
-
var locationsMap = Object.create(null);
|
|
5433
|
-
var schema = context.getSchema();
|
|
5434
|
-
var definedDirectives = schema ? schema.getDirectives() : specifiedDirectives;
|
|
5435
|
-
|
|
5436
|
-
for (var _i2 = 0; _i2 < definedDirectives.length; _i2++) {
|
|
5437
|
-
var directive = definedDirectives[_i2];
|
|
5438
|
-
locationsMap[directive.name] = directive.locations;
|
|
5439
|
-
}
|
|
5440
|
-
|
|
5441
|
-
var astDefinitions = context.getDocument().definitions;
|
|
5442
|
-
|
|
5443
|
-
for (var _i4 = 0; _i4 < astDefinitions.length; _i4++) {
|
|
5444
|
-
var def = astDefinitions[_i4];
|
|
5445
|
-
|
|
5446
|
-
if (def.kind === Kind.DIRECTIVE_DEFINITION) {
|
|
5447
|
-
locationsMap[def.name.value] = def.locations.map(function (name) {
|
|
5448
|
-
return name.value;
|
|
5449
|
-
});
|
|
5450
|
-
}
|
|
5451
|
-
}
|
|
5452
|
-
|
|
5453
|
-
return {
|
|
5454
|
-
Directive: function Directive(node, _key, _parent, _path, ancestors) {
|
|
5455
|
-
var name = node.name.value;
|
|
5456
|
-
var locations = locationsMap[name];
|
|
5457
|
-
|
|
5458
|
-
if (!locations) {
|
|
5459
|
-
context.reportError(new GraphQLError("Unknown directive \"@".concat(name, "\"."), node));
|
|
5460
|
-
return;
|
|
5461
|
-
}
|
|
5462
|
-
|
|
5463
|
-
var candidateLocation = getDirectiveLocationForASTPath(ancestors);
|
|
5464
|
-
|
|
5465
|
-
if (candidateLocation && locations.indexOf(candidateLocation) === -1) {
|
|
5466
|
-
context.reportError(new GraphQLError("Directive \"@".concat(name, "\" may not be used on ").concat(candidateLocation, "."), node));
|
|
5467
|
-
}
|
|
5468
|
-
}
|
|
5469
|
-
};
|
|
5470
|
-
}
|
|
5471
|
-
|
|
5472
|
-
function getDirectiveLocationForASTPath(ancestors) {
|
|
5473
|
-
var appliedTo = ancestors[ancestors.length - 1];
|
|
5474
|
-
!Array.isArray(appliedTo) || invariant(0);
|
|
5475
|
-
|
|
5476
|
-
switch (appliedTo.kind) {
|
|
5477
|
-
case Kind.OPERATION_DEFINITION:
|
|
5478
|
-
return getDirectiveLocationForOperation(appliedTo.operation);
|
|
5479
|
-
|
|
5480
|
-
case Kind.FIELD:
|
|
5481
|
-
return DirectiveLocation.FIELD;
|
|
5482
|
-
|
|
5483
|
-
case Kind.FRAGMENT_SPREAD:
|
|
5484
|
-
return DirectiveLocation.FRAGMENT_SPREAD;
|
|
5485
|
-
|
|
5486
|
-
case Kind.INLINE_FRAGMENT:
|
|
5487
|
-
return DirectiveLocation.INLINE_FRAGMENT;
|
|
5488
|
-
|
|
5489
|
-
case Kind.FRAGMENT_DEFINITION:
|
|
5490
|
-
return DirectiveLocation.FRAGMENT_DEFINITION;
|
|
5491
|
-
|
|
5492
|
-
case Kind.VARIABLE_DEFINITION:
|
|
5493
|
-
return DirectiveLocation.VARIABLE_DEFINITION;
|
|
5494
|
-
|
|
5495
|
-
case Kind.SCHEMA_DEFINITION:
|
|
5496
|
-
case Kind.SCHEMA_EXTENSION:
|
|
5497
|
-
return DirectiveLocation.SCHEMA;
|
|
5498
|
-
|
|
5499
|
-
case Kind.SCALAR_TYPE_DEFINITION:
|
|
5500
|
-
case Kind.SCALAR_TYPE_EXTENSION:
|
|
5501
|
-
return DirectiveLocation.SCALAR;
|
|
5502
|
-
|
|
5503
|
-
case Kind.OBJECT_TYPE_DEFINITION:
|
|
5504
|
-
case Kind.OBJECT_TYPE_EXTENSION:
|
|
5505
|
-
return DirectiveLocation.OBJECT;
|
|
5506
|
-
|
|
5507
|
-
case Kind.FIELD_DEFINITION:
|
|
5508
|
-
return DirectiveLocation.FIELD_DEFINITION;
|
|
5509
|
-
|
|
5510
|
-
case Kind.INTERFACE_TYPE_DEFINITION:
|
|
5511
|
-
case Kind.INTERFACE_TYPE_EXTENSION:
|
|
5512
|
-
return DirectiveLocation.INTERFACE;
|
|
5513
|
-
|
|
5514
|
-
case Kind.UNION_TYPE_DEFINITION:
|
|
5515
|
-
case Kind.UNION_TYPE_EXTENSION:
|
|
5516
|
-
return DirectiveLocation.UNION;
|
|
5517
|
-
|
|
5518
|
-
case Kind.ENUM_TYPE_DEFINITION:
|
|
5519
|
-
case Kind.ENUM_TYPE_EXTENSION:
|
|
5520
|
-
return DirectiveLocation.ENUM;
|
|
5521
|
-
|
|
5522
|
-
case Kind.ENUM_VALUE_DEFINITION:
|
|
5523
|
-
return DirectiveLocation.ENUM_VALUE;
|
|
5524
|
-
|
|
5525
|
-
case Kind.INPUT_OBJECT_TYPE_DEFINITION:
|
|
5526
|
-
case Kind.INPUT_OBJECT_TYPE_EXTENSION:
|
|
5527
|
-
return DirectiveLocation.INPUT_OBJECT;
|
|
5528
|
-
|
|
5529
|
-
case Kind.INPUT_VALUE_DEFINITION:
|
|
5530
|
-
{
|
|
5531
|
-
var parentNode = ancestors[ancestors.length - 3];
|
|
5532
|
-
return parentNode.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION ? DirectiveLocation.INPUT_FIELD_DEFINITION : DirectiveLocation.ARGUMENT_DEFINITION;
|
|
5533
|
-
}
|
|
5534
|
-
}
|
|
5535
|
-
}
|
|
5536
|
-
|
|
5537
|
-
function getDirectiveLocationForOperation(operation) {
|
|
5538
|
-
switch (operation) {
|
|
5539
|
-
case 'query':
|
|
5540
|
-
return DirectiveLocation.QUERY;
|
|
5541
|
-
|
|
5542
|
-
case 'mutation':
|
|
5543
|
-
return DirectiveLocation.MUTATION;
|
|
5544
|
-
|
|
5545
|
-
case 'subscription':
|
|
5546
|
-
return DirectiveLocation.SUBSCRIPTION;
|
|
5547
|
-
} // istanbul ignore next (Not reachable. All possible types have been considered)
|
|
5548
|
-
|
|
5549
|
-
|
|
5550
|
-
invariant(0, 'Unexpected operation: ' + inspect(operation));
|
|
5551
|
-
}
|
|
5552
|
-
|
|
5553
|
-
/**
|
|
5554
|
-
* Unique directive names per location
|
|
5555
|
-
*
|
|
5556
|
-
* A GraphQL document is only valid if all non-repeatable directives at
|
|
5557
|
-
* a given location are uniquely named.
|
|
5558
|
-
*/
|
|
5559
|
-
function UniqueDirectivesPerLocationRule(context) {
|
|
5560
|
-
var uniqueDirectiveMap = Object.create(null);
|
|
5561
|
-
var schema = context.getSchema();
|
|
5562
|
-
var definedDirectives = schema ? schema.getDirectives() : specifiedDirectives;
|
|
5563
|
-
|
|
5564
|
-
for (var _i2 = 0; _i2 < definedDirectives.length; _i2++) {
|
|
5565
|
-
var directive = definedDirectives[_i2];
|
|
5566
|
-
uniqueDirectiveMap[directive.name] = !directive.isRepeatable;
|
|
5567
|
-
}
|
|
5568
|
-
|
|
5569
|
-
var astDefinitions = context.getDocument().definitions;
|
|
5570
|
-
|
|
5571
|
-
for (var _i4 = 0; _i4 < astDefinitions.length; _i4++) {
|
|
5572
|
-
var def = astDefinitions[_i4];
|
|
5573
|
-
|
|
5574
|
-
if (def.kind === Kind.DIRECTIVE_DEFINITION) {
|
|
5575
|
-
uniqueDirectiveMap[def.name.value] = !def.repeatable;
|
|
5576
|
-
}
|
|
5577
|
-
}
|
|
5578
|
-
|
|
5579
|
-
var schemaDirectives = Object.create(null);
|
|
5580
|
-
var typeDirectivesMap = Object.create(null);
|
|
5581
|
-
return {
|
|
5582
|
-
// Many different AST nodes may contain directives. Rather than listing
|
|
5583
|
-
// them all, just listen for entering any node, and check to see if it
|
|
5584
|
-
// defines any directives.
|
|
5585
|
-
enter: function enter(node) {
|
|
5586
|
-
if (node.directives == null) {
|
|
5587
|
-
return;
|
|
5588
|
-
}
|
|
5589
|
-
|
|
5590
|
-
var seenDirectives;
|
|
5591
|
-
|
|
5592
|
-
if (node.kind === Kind.SCHEMA_DEFINITION || node.kind === Kind.SCHEMA_EXTENSION) {
|
|
5593
|
-
seenDirectives = schemaDirectives;
|
|
5594
|
-
} else if (isTypeDefinitionNode(node) || isTypeExtensionNode(node)) {
|
|
5595
|
-
var typeName = node.name.value;
|
|
5596
|
-
seenDirectives = typeDirectivesMap[typeName];
|
|
5597
|
-
|
|
5598
|
-
if (seenDirectives === undefined) {
|
|
5599
|
-
typeDirectivesMap[typeName] = seenDirectives = Object.create(null);
|
|
5600
|
-
}
|
|
5601
|
-
} else {
|
|
5602
|
-
seenDirectives = Object.create(null);
|
|
5603
|
-
}
|
|
5604
|
-
|
|
5605
|
-
for (var _i6 = 0, _node$directives2 = node.directives; _i6 < _node$directives2.length; _i6++) {
|
|
5606
|
-
var _directive = _node$directives2[_i6];
|
|
5607
|
-
var directiveName = _directive.name.value;
|
|
5608
|
-
|
|
5609
|
-
if (uniqueDirectiveMap[directiveName]) {
|
|
5610
|
-
if (seenDirectives[directiveName]) {
|
|
5611
|
-
context.reportError(new GraphQLError("The directive \"@".concat(directiveName, "\" can only be used once at this location."), [seenDirectives[directiveName], _directive]));
|
|
5612
|
-
} else {
|
|
5613
|
-
seenDirectives[directiveName] = _directive;
|
|
5614
|
-
}
|
|
5615
|
-
}
|
|
5616
|
-
}
|
|
5617
|
-
}
|
|
5618
|
-
};
|
|
5619
|
-
}
|
|
5620
|
-
|
|
5621
|
-
/**
|
|
5622
|
-
* @internal
|
|
5623
|
-
*/
|
|
5624
|
-
|
|
5625
|
-
function KnownArgumentNamesOnDirectivesRule(context) {
|
|
5626
|
-
var directiveArgs = Object.create(null);
|
|
5627
|
-
var schema = context.getSchema();
|
|
5628
|
-
var definedDirectives = schema ? schema.getDirectives() : specifiedDirectives;
|
|
5629
|
-
|
|
5630
|
-
for (var _i2 = 0; _i2 < definedDirectives.length; _i2++) {
|
|
5631
|
-
var directive = definedDirectives[_i2];
|
|
5632
|
-
directiveArgs[directive.name] = directive.args.map(function (arg) {
|
|
5633
|
-
return arg.name;
|
|
5634
|
-
});
|
|
5635
|
-
}
|
|
5636
|
-
|
|
5637
|
-
var astDefinitions = context.getDocument().definitions;
|
|
5638
|
-
|
|
5639
|
-
for (var _i4 = 0; _i4 < astDefinitions.length; _i4++) {
|
|
5640
|
-
var def = astDefinitions[_i4];
|
|
5641
|
-
|
|
5642
|
-
if (def.kind === Kind.DIRECTIVE_DEFINITION) {
|
|
5643
|
-
var _def$arguments;
|
|
5644
|
-
|
|
5645
|
-
// istanbul ignore next (See: 'https://github.com/graphql/graphql-js/issues/2203')
|
|
5646
|
-
var argsNodes = (_def$arguments = def.arguments) !== null && _def$arguments !== void 0 ? _def$arguments : [];
|
|
5647
|
-
directiveArgs[def.name.value] = argsNodes.map(function (arg) {
|
|
5648
|
-
return arg.name.value;
|
|
5649
|
-
});
|
|
5650
|
-
}
|
|
5651
|
-
}
|
|
5652
|
-
|
|
5653
|
-
return {
|
|
5654
|
-
Directive: function Directive(directiveNode) {
|
|
5655
|
-
var directiveName = directiveNode.name.value;
|
|
5656
|
-
var knownArgs = directiveArgs[directiveName];
|
|
5657
|
-
|
|
5658
|
-
if (directiveNode.arguments && knownArgs) {
|
|
5659
|
-
for (var _i6 = 0, _directiveNode$argume2 = directiveNode.arguments; _i6 < _directiveNode$argume2.length; _i6++) {
|
|
5660
|
-
var argNode = _directiveNode$argume2[_i6];
|
|
5661
|
-
var argName = argNode.name.value;
|
|
5662
|
-
|
|
5663
|
-
if (knownArgs.indexOf(argName) === -1) {
|
|
5664
|
-
var suggestions = suggestionList(argName, knownArgs);
|
|
5665
|
-
context.reportError(new GraphQLError("Unknown argument \"".concat(argName, "\" on directive \"@").concat(directiveName, "\".") + didYouMean(suggestions), argNode));
|
|
5666
|
-
}
|
|
5667
|
-
}
|
|
5668
|
-
}
|
|
5669
|
-
|
|
5670
|
-
return false;
|
|
5671
|
-
}
|
|
5672
|
-
};
|
|
5673
|
-
}
|
|
5674
|
-
|
|
5675
|
-
/**
|
|
5676
|
-
* Unique argument names
|
|
5677
|
-
*
|
|
5678
|
-
* A GraphQL field or directive is only valid if all supplied arguments are
|
|
5679
|
-
* uniquely named.
|
|
5680
|
-
*/
|
|
5681
|
-
function UniqueArgumentNamesRule(context) {
|
|
5682
|
-
var knownArgNames = Object.create(null);
|
|
5683
|
-
return {
|
|
5684
|
-
Field: function Field() {
|
|
5685
|
-
knownArgNames = Object.create(null);
|
|
5686
|
-
},
|
|
5687
|
-
Directive: function Directive() {
|
|
5688
|
-
knownArgNames = Object.create(null);
|
|
5689
|
-
},
|
|
5690
|
-
Argument: function Argument(node) {
|
|
5691
|
-
var argName = node.name.value;
|
|
5692
|
-
|
|
5693
|
-
if (knownArgNames[argName]) {
|
|
5694
|
-
context.reportError(new GraphQLError("There can be only one argument named \"".concat(argName, "\"."), [knownArgNames[argName], node.name]));
|
|
5695
|
-
} else {
|
|
5696
|
-
knownArgNames[argName] = node.name;
|
|
5697
|
-
}
|
|
5698
|
-
|
|
5699
|
-
return false;
|
|
5700
|
-
}
|
|
5701
|
-
};
|
|
5702
|
-
}
|
|
5703
|
-
|
|
5704
|
-
/**
|
|
5705
|
-
* @internal
|
|
5706
|
-
*/
|
|
5707
|
-
|
|
5708
|
-
function ProvidedRequiredArgumentsOnDirectivesRule(context) {
|
|
5709
|
-
var requiredArgsMap = Object.create(null);
|
|
5710
|
-
var schema = context.getSchema();
|
|
5711
|
-
var definedDirectives = schema ? schema.getDirectives() : specifiedDirectives;
|
|
5712
|
-
|
|
5713
|
-
for (var _i4 = 0; _i4 < definedDirectives.length; _i4++) {
|
|
5714
|
-
var directive = definedDirectives[_i4];
|
|
5715
|
-
requiredArgsMap[directive.name] = keyMap(directive.args.filter(isRequiredArgument), function (arg) {
|
|
5716
|
-
return arg.name;
|
|
5717
|
-
});
|
|
5718
|
-
}
|
|
5719
|
-
|
|
5720
|
-
var astDefinitions = context.getDocument().definitions;
|
|
5721
|
-
|
|
5722
|
-
for (var _i6 = 0; _i6 < astDefinitions.length; _i6++) {
|
|
5723
|
-
var def = astDefinitions[_i6];
|
|
5724
|
-
|
|
5725
|
-
if (def.kind === Kind.DIRECTIVE_DEFINITION) {
|
|
5726
|
-
var _def$arguments;
|
|
5727
|
-
|
|
5728
|
-
// istanbul ignore next (See: 'https://github.com/graphql/graphql-js/issues/2203')
|
|
5729
|
-
var argNodes = (_def$arguments = def.arguments) !== null && _def$arguments !== void 0 ? _def$arguments : [];
|
|
5730
|
-
requiredArgsMap[def.name.value] = keyMap(argNodes.filter(isRequiredArgumentNode), function (arg) {
|
|
5731
|
-
return arg.name.value;
|
|
5732
|
-
});
|
|
5733
|
-
}
|
|
5734
|
-
}
|
|
5735
|
-
|
|
5736
|
-
return {
|
|
5737
|
-
Directive: {
|
|
5738
|
-
// Validate on leave to allow for deeper errors to appear first.
|
|
5739
|
-
leave: function leave(directiveNode) {
|
|
5740
|
-
var directiveName = directiveNode.name.value;
|
|
5741
|
-
var requiredArgs = requiredArgsMap[directiveName];
|
|
5742
|
-
|
|
5743
|
-
if (requiredArgs) {
|
|
5744
|
-
var _directiveNode$argume;
|
|
5745
|
-
|
|
5746
|
-
// istanbul ignore next (See: 'https://github.com/graphql/graphql-js/issues/2203')
|
|
5747
|
-
var _argNodes = (_directiveNode$argume = directiveNode.arguments) !== null && _directiveNode$argume !== void 0 ? _directiveNode$argume : [];
|
|
5748
|
-
|
|
5749
|
-
var argNodeMap = keyMap(_argNodes, function (arg) {
|
|
5750
|
-
return arg.name.value;
|
|
5751
|
-
});
|
|
5752
|
-
|
|
5753
|
-
for (var _i8 = 0, _Object$keys2 = Object.keys(requiredArgs); _i8 < _Object$keys2.length; _i8++) {
|
|
5754
|
-
var argName = _Object$keys2[_i8];
|
|
5755
|
-
|
|
5756
|
-
if (!argNodeMap[argName]) {
|
|
5757
|
-
var argType = requiredArgs[argName].type;
|
|
5758
|
-
var argTypeStr = isType(argType) ? inspect(argType) : print(argType);
|
|
5759
|
-
context.reportError(new GraphQLError("Directive \"@".concat(directiveName, "\" argument \"").concat(argName, "\" of type \"").concat(argTypeStr, "\" is required, but it was not provided."), directiveNode));
|
|
5760
|
-
}
|
|
5761
|
-
}
|
|
5762
|
-
}
|
|
5763
|
-
}
|
|
5764
|
-
}
|
|
5765
|
-
};
|
|
5766
|
-
}
|
|
5767
|
-
|
|
5768
|
-
function isRequiredArgumentNode(arg) {
|
|
5769
|
-
return arg.type.kind === Kind.NON_NULL_TYPE && arg.defaultValue == null;
|
|
5770
|
-
}
|
|
5771
|
-
|
|
5772
|
-
/**
|
|
5773
|
-
* Unique input field names
|
|
5774
|
-
*
|
|
5775
|
-
* A GraphQL input object value is only valid if all supplied fields are
|
|
5776
|
-
* uniquely named.
|
|
5777
|
-
*/
|
|
5778
|
-
function UniqueInputFieldNamesRule(context) {
|
|
5779
|
-
var knownNameStack = [];
|
|
5780
|
-
var knownNames = Object.create(null);
|
|
5781
|
-
return {
|
|
5782
|
-
ObjectValue: {
|
|
5783
|
-
enter: function enter() {
|
|
5784
|
-
knownNameStack.push(knownNames);
|
|
5785
|
-
knownNames = Object.create(null);
|
|
5786
|
-
},
|
|
5787
|
-
leave: function leave() {
|
|
5788
|
-
knownNames = knownNameStack.pop();
|
|
5789
|
-
}
|
|
5790
|
-
},
|
|
5791
|
-
ObjectField: function ObjectField(node) {
|
|
5792
|
-
var fieldName = node.name.value;
|
|
5793
|
-
|
|
5794
|
-
if (knownNames[fieldName]) {
|
|
5795
|
-
context.reportError(new GraphQLError("There can be only one input field named \"".concat(fieldName, "\"."), [knownNames[fieldName], node.name]));
|
|
5796
|
-
} else {
|
|
5797
|
-
knownNames[fieldName] = node.name;
|
|
5798
|
-
}
|
|
5799
|
-
}
|
|
5800
|
-
};
|
|
5801
|
-
}
|
|
5802
|
-
|
|
5803
|
-
/**
|
|
5804
|
-
* Lone Schema definition
|
|
5805
|
-
*
|
|
5806
|
-
* A GraphQL document is only valid if it contains only one schema definition.
|
|
5807
|
-
*/
|
|
5808
|
-
function LoneSchemaDefinitionRule(context) {
|
|
5809
|
-
var _ref, _ref2, _oldSchema$astNode;
|
|
5810
|
-
|
|
5811
|
-
var oldSchema = context.getSchema();
|
|
5812
|
-
var alreadyDefined = (_ref = (_ref2 = (_oldSchema$astNode = oldSchema === null || oldSchema === void 0 ? void 0 : oldSchema.astNode) !== null && _oldSchema$astNode !== void 0 ? _oldSchema$astNode : oldSchema === null || oldSchema === void 0 ? void 0 : oldSchema.getQueryType()) !== null && _ref2 !== void 0 ? _ref2 : oldSchema === null || oldSchema === void 0 ? void 0 : oldSchema.getMutationType()) !== null && _ref !== void 0 ? _ref : oldSchema === null || oldSchema === void 0 ? void 0 : oldSchema.getSubscriptionType();
|
|
5813
|
-
var schemaDefinitionsCount = 0;
|
|
5814
|
-
return {
|
|
5815
|
-
SchemaDefinition: function SchemaDefinition(node) {
|
|
5816
|
-
if (alreadyDefined) {
|
|
5817
|
-
context.reportError(new GraphQLError('Cannot define a new schema within a schema extension.', node));
|
|
5818
|
-
return;
|
|
5819
|
-
}
|
|
5820
|
-
|
|
5821
|
-
if (schemaDefinitionsCount > 0) {
|
|
5822
|
-
context.reportError(new GraphQLError('Must provide only one schema definition.', node));
|
|
5823
|
-
}
|
|
5824
|
-
|
|
5825
|
-
++schemaDefinitionsCount;
|
|
5826
|
-
}
|
|
5827
|
-
};
|
|
5828
|
-
}
|
|
5829
|
-
|
|
5830
|
-
/**
|
|
5831
|
-
* Unique operation types
|
|
5832
|
-
*
|
|
5833
|
-
* A GraphQL document is only valid if it has only one type per operation.
|
|
5834
|
-
*/
|
|
5835
|
-
function UniqueOperationTypesRule(context) {
|
|
5836
|
-
var schema = context.getSchema();
|
|
5837
|
-
var definedOperationTypes = Object.create(null);
|
|
5838
|
-
var existingOperationTypes = schema ? {
|
|
5839
|
-
query: schema.getQueryType(),
|
|
5840
|
-
mutation: schema.getMutationType(),
|
|
5841
|
-
subscription: schema.getSubscriptionType()
|
|
5842
|
-
} : {};
|
|
5843
|
-
return {
|
|
5844
|
-
SchemaDefinition: checkOperationTypes,
|
|
5845
|
-
SchemaExtension: checkOperationTypes
|
|
5846
|
-
};
|
|
5847
|
-
|
|
5848
|
-
function checkOperationTypes(node) {
|
|
5849
|
-
var _node$operationTypes;
|
|
5850
|
-
|
|
5851
|
-
// istanbul ignore next (See: 'https://github.com/graphql/graphql-js/issues/2203')
|
|
5852
|
-
var operationTypesNodes = (_node$operationTypes = node.operationTypes) !== null && _node$operationTypes !== void 0 ? _node$operationTypes : [];
|
|
5853
|
-
|
|
5854
|
-
for (var _i2 = 0; _i2 < operationTypesNodes.length; _i2++) {
|
|
5855
|
-
var operationType = operationTypesNodes[_i2];
|
|
5856
|
-
var operation = operationType.operation;
|
|
5857
|
-
var alreadyDefinedOperationType = definedOperationTypes[operation];
|
|
5858
|
-
|
|
5859
|
-
if (existingOperationTypes[operation]) {
|
|
5860
|
-
context.reportError(new GraphQLError("Type for ".concat(operation, " already defined in the schema. It cannot be redefined."), operationType));
|
|
5861
|
-
} else if (alreadyDefinedOperationType) {
|
|
5862
|
-
context.reportError(new GraphQLError("There can be only one ".concat(operation, " type in schema."), [alreadyDefinedOperationType, operationType]));
|
|
5863
|
-
} else {
|
|
5864
|
-
definedOperationTypes[operation] = operationType;
|
|
5865
|
-
}
|
|
5866
|
-
}
|
|
5867
|
-
|
|
5868
|
-
return false;
|
|
5869
|
-
}
|
|
5870
|
-
}
|
|
5871
|
-
|
|
5872
|
-
/**
|
|
5873
|
-
* Unique type names
|
|
5874
|
-
*
|
|
5875
|
-
* A GraphQL document is only valid if all defined types have unique names.
|
|
5876
|
-
*/
|
|
5877
|
-
function UniqueTypeNamesRule(context) {
|
|
5878
|
-
var knownTypeNames = Object.create(null);
|
|
5879
|
-
var schema = context.getSchema();
|
|
5880
|
-
return {
|
|
5881
|
-
ScalarTypeDefinition: checkTypeName,
|
|
5882
|
-
ObjectTypeDefinition: checkTypeName,
|
|
5883
|
-
InterfaceTypeDefinition: checkTypeName,
|
|
5884
|
-
UnionTypeDefinition: checkTypeName,
|
|
5885
|
-
EnumTypeDefinition: checkTypeName,
|
|
5886
|
-
InputObjectTypeDefinition: checkTypeName
|
|
5887
|
-
};
|
|
5888
|
-
|
|
5889
|
-
function checkTypeName(node) {
|
|
5890
|
-
var typeName = node.name.value;
|
|
5891
|
-
|
|
5892
|
-
if (schema !== null && schema !== void 0 && schema.getType(typeName)) {
|
|
5893
|
-
context.reportError(new GraphQLError("Type \"".concat(typeName, "\" already exists in the schema. It cannot also be defined in this type definition."), node.name));
|
|
5894
|
-
return;
|
|
5895
|
-
}
|
|
5896
|
-
|
|
5897
|
-
if (knownTypeNames[typeName]) {
|
|
5898
|
-
context.reportError(new GraphQLError("There can be only one type named \"".concat(typeName, "\"."), [knownTypeNames[typeName], node.name]));
|
|
5899
|
-
} else {
|
|
5900
|
-
knownTypeNames[typeName] = node.name;
|
|
5901
|
-
}
|
|
5902
|
-
|
|
5903
|
-
return false;
|
|
5904
|
-
}
|
|
5905
|
-
}
|
|
5906
|
-
|
|
5907
|
-
/**
|
|
5908
|
-
* Unique enum value names
|
|
5909
|
-
*
|
|
5910
|
-
* A GraphQL enum type is only valid if all its values are uniquely named.
|
|
5911
|
-
*/
|
|
5912
|
-
function UniqueEnumValueNamesRule(context) {
|
|
5913
|
-
var schema = context.getSchema();
|
|
5914
|
-
var existingTypeMap = schema ? schema.getTypeMap() : Object.create(null);
|
|
5915
|
-
var knownValueNames = Object.create(null);
|
|
5916
|
-
return {
|
|
5917
|
-
EnumTypeDefinition: checkValueUniqueness,
|
|
5918
|
-
EnumTypeExtension: checkValueUniqueness
|
|
5919
|
-
};
|
|
5920
|
-
|
|
5921
|
-
function checkValueUniqueness(node) {
|
|
5922
|
-
var _node$values;
|
|
5923
|
-
|
|
5924
|
-
var typeName = node.name.value;
|
|
5925
|
-
|
|
5926
|
-
if (!knownValueNames[typeName]) {
|
|
5927
|
-
knownValueNames[typeName] = Object.create(null);
|
|
5928
|
-
} // istanbul ignore next (See: 'https://github.com/graphql/graphql-js/issues/2203')
|
|
5929
|
-
|
|
5930
|
-
|
|
5931
|
-
var valueNodes = (_node$values = node.values) !== null && _node$values !== void 0 ? _node$values : [];
|
|
5932
|
-
var valueNames = knownValueNames[typeName];
|
|
5933
|
-
|
|
5934
|
-
for (var _i2 = 0; _i2 < valueNodes.length; _i2++) {
|
|
5935
|
-
var valueDef = valueNodes[_i2];
|
|
5936
|
-
var valueName = valueDef.name.value;
|
|
5937
|
-
var existingType = existingTypeMap[typeName];
|
|
5938
|
-
|
|
5939
|
-
if (isEnumType(existingType) && existingType.getValue(valueName)) {
|
|
5940
|
-
context.reportError(new GraphQLError("Enum value \"".concat(typeName, ".").concat(valueName, "\" already exists in the schema. It cannot also be defined in this type extension."), valueDef.name));
|
|
5941
|
-
} else if (valueNames[valueName]) {
|
|
5942
|
-
context.reportError(new GraphQLError("Enum value \"".concat(typeName, ".").concat(valueName, "\" can only be defined once."), [valueNames[valueName], valueDef.name]));
|
|
5943
|
-
} else {
|
|
5944
|
-
valueNames[valueName] = valueDef.name;
|
|
5945
|
-
}
|
|
5946
|
-
}
|
|
5947
|
-
|
|
5948
|
-
return false;
|
|
5949
|
-
}
|
|
5950
|
-
}
|
|
5951
|
-
|
|
5952
|
-
/**
|
|
5953
|
-
* Unique field definition names
|
|
5954
|
-
*
|
|
5955
|
-
* A GraphQL complex type is only valid if all its fields are uniquely named.
|
|
5956
|
-
*/
|
|
5957
|
-
function UniqueFieldDefinitionNamesRule(context) {
|
|
5958
|
-
var schema = context.getSchema();
|
|
5959
|
-
var existingTypeMap = schema ? schema.getTypeMap() : Object.create(null);
|
|
5960
|
-
var knownFieldNames = Object.create(null);
|
|
5961
|
-
return {
|
|
5962
|
-
InputObjectTypeDefinition: checkFieldUniqueness,
|
|
5963
|
-
InputObjectTypeExtension: checkFieldUniqueness,
|
|
5964
|
-
InterfaceTypeDefinition: checkFieldUniqueness,
|
|
5965
|
-
InterfaceTypeExtension: checkFieldUniqueness,
|
|
5966
|
-
ObjectTypeDefinition: checkFieldUniqueness,
|
|
5967
|
-
ObjectTypeExtension: checkFieldUniqueness
|
|
5968
|
-
};
|
|
5969
|
-
|
|
5970
|
-
function checkFieldUniqueness(node) {
|
|
5971
|
-
var _node$fields;
|
|
5972
|
-
|
|
5973
|
-
var typeName = node.name.value;
|
|
5974
|
-
|
|
5975
|
-
if (!knownFieldNames[typeName]) {
|
|
5976
|
-
knownFieldNames[typeName] = Object.create(null);
|
|
5977
|
-
} // istanbul ignore next (See: 'https://github.com/graphql/graphql-js/issues/2203')
|
|
5978
|
-
|
|
5979
|
-
|
|
5980
|
-
var fieldNodes = (_node$fields = node.fields) !== null && _node$fields !== void 0 ? _node$fields : [];
|
|
5981
|
-
var fieldNames = knownFieldNames[typeName];
|
|
5982
|
-
|
|
5983
|
-
for (var _i2 = 0; _i2 < fieldNodes.length; _i2++) {
|
|
5984
|
-
var fieldDef = fieldNodes[_i2];
|
|
5985
|
-
var fieldName = fieldDef.name.value;
|
|
5986
|
-
|
|
5987
|
-
if (hasField(existingTypeMap[typeName], fieldName)) {
|
|
5988
|
-
context.reportError(new GraphQLError("Field \"".concat(typeName, ".").concat(fieldName, "\" already exists in the schema. It cannot also be defined in this type extension."), fieldDef.name));
|
|
5989
|
-
} else if (fieldNames[fieldName]) {
|
|
5990
|
-
context.reportError(new GraphQLError("Field \"".concat(typeName, ".").concat(fieldName, "\" can only be defined once."), [fieldNames[fieldName], fieldDef.name]));
|
|
5991
|
-
} else {
|
|
5992
|
-
fieldNames[fieldName] = fieldDef.name;
|
|
5993
|
-
}
|
|
5994
|
-
}
|
|
5995
|
-
|
|
5996
|
-
return false;
|
|
5997
|
-
}
|
|
5998
|
-
}
|
|
5999
|
-
|
|
6000
|
-
function hasField(type, fieldName) {
|
|
6001
|
-
if (isObjectType(type) || isInterfaceType(type) || isInputObjectType(type)) {
|
|
6002
|
-
return type.getFields()[fieldName] != null;
|
|
6003
|
-
}
|
|
6004
|
-
|
|
6005
|
-
return false;
|
|
6006
|
-
}
|
|
6007
|
-
|
|
6008
|
-
/**
|
|
6009
|
-
* Unique directive names
|
|
6010
|
-
*
|
|
6011
|
-
* A GraphQL document is only valid if all defined directives have unique names.
|
|
6012
|
-
*/
|
|
6013
|
-
function UniqueDirectiveNamesRule(context) {
|
|
6014
|
-
var knownDirectiveNames = Object.create(null);
|
|
6015
|
-
var schema = context.getSchema();
|
|
6016
|
-
return {
|
|
6017
|
-
DirectiveDefinition: function DirectiveDefinition(node) {
|
|
6018
|
-
var directiveName = node.name.value;
|
|
6019
|
-
|
|
6020
|
-
if (schema !== null && schema !== void 0 && schema.getDirective(directiveName)) {
|
|
6021
|
-
context.reportError(new GraphQLError("Directive \"@".concat(directiveName, "\" already exists in the schema. It cannot be redefined."), node.name));
|
|
6022
|
-
return;
|
|
6023
|
-
}
|
|
6024
|
-
|
|
6025
|
-
if (knownDirectiveNames[directiveName]) {
|
|
6026
|
-
context.reportError(new GraphQLError("There can be only one directive named \"@".concat(directiveName, "\"."), [knownDirectiveNames[directiveName], node.name]));
|
|
6027
|
-
} else {
|
|
6028
|
-
knownDirectiveNames[directiveName] = node.name;
|
|
6029
|
-
}
|
|
6030
|
-
|
|
6031
|
-
return false;
|
|
6032
|
-
}
|
|
6033
|
-
};
|
|
6034
|
-
}
|
|
6035
|
-
|
|
6036
5561
|
var _defKindToExtKind;
|
|
6037
5562
|
|
|
6038
5563
|
function _defineProperty$1(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
|
6039
|
-
|
|
6040
|
-
/**
|
|
6041
|
-
* Possible type extension
|
|
6042
|
-
*
|
|
6043
|
-
* A type extension is only valid if the type is defined and has the same kind.
|
|
6044
|
-
*/
|
|
6045
|
-
function PossibleTypeExtensionsRule(context) {
|
|
6046
|
-
var schema = context.getSchema();
|
|
6047
|
-
var definedTypes = Object.create(null);
|
|
6048
|
-
|
|
6049
|
-
for (var _i2 = 0, _context$getDocument$2 = context.getDocument().definitions; _i2 < _context$getDocument$2.length; _i2++) {
|
|
6050
|
-
var def = _context$getDocument$2[_i2];
|
|
6051
|
-
|
|
6052
|
-
if (isTypeDefinitionNode(def)) {
|
|
6053
|
-
definedTypes[def.name.value] = def;
|
|
6054
|
-
}
|
|
6055
|
-
}
|
|
6056
|
-
|
|
6057
|
-
return {
|
|
6058
|
-
ScalarTypeExtension: checkExtension,
|
|
6059
|
-
ObjectTypeExtension: checkExtension,
|
|
6060
|
-
InterfaceTypeExtension: checkExtension,
|
|
6061
|
-
UnionTypeExtension: checkExtension,
|
|
6062
|
-
EnumTypeExtension: checkExtension,
|
|
6063
|
-
InputObjectTypeExtension: checkExtension
|
|
6064
|
-
};
|
|
6065
|
-
|
|
6066
|
-
function checkExtension(node) {
|
|
6067
|
-
var typeName = node.name.value;
|
|
6068
|
-
var defNode = definedTypes[typeName];
|
|
6069
|
-
var existingType = schema === null || schema === void 0 ? void 0 : schema.getType(typeName);
|
|
6070
|
-
var expectedKind;
|
|
6071
|
-
|
|
6072
|
-
if (defNode) {
|
|
6073
|
-
expectedKind = defKindToExtKind[defNode.kind];
|
|
6074
|
-
} else if (existingType) {
|
|
6075
|
-
expectedKind = typeToExtKind(existingType);
|
|
6076
|
-
}
|
|
6077
|
-
|
|
6078
|
-
if (expectedKind) {
|
|
6079
|
-
if (expectedKind !== node.kind) {
|
|
6080
|
-
var kindStr = extensionKindToTypeName(node.kind);
|
|
6081
|
-
context.reportError(new GraphQLError("Cannot extend non-".concat(kindStr, " type \"").concat(typeName, "\"."), defNode ? [defNode, node] : node));
|
|
6082
|
-
}
|
|
6083
|
-
} else {
|
|
6084
|
-
var allTypeNames = Object.keys(definedTypes);
|
|
6085
|
-
|
|
6086
|
-
if (schema) {
|
|
6087
|
-
allTypeNames = allTypeNames.concat(Object.keys(schema.getTypeMap()));
|
|
6088
|
-
}
|
|
6089
|
-
|
|
6090
|
-
var suggestedTypes = suggestionList(typeName, allTypeNames);
|
|
6091
|
-
context.reportError(new GraphQLError("Cannot extend type \"".concat(typeName, "\" because it is not defined.") + didYouMean(suggestedTypes), node.name));
|
|
6092
|
-
}
|
|
6093
|
-
}
|
|
6094
|
-
}
|
|
6095
|
-
var defKindToExtKind = (_defKindToExtKind = {}, _defineProperty$1(_defKindToExtKind, Kind.SCALAR_TYPE_DEFINITION, Kind.SCALAR_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.OBJECT_TYPE_DEFINITION, Kind.OBJECT_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.INTERFACE_TYPE_DEFINITION, Kind.INTERFACE_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.UNION_TYPE_DEFINITION, Kind.UNION_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.ENUM_TYPE_DEFINITION, Kind.ENUM_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.INPUT_OBJECT_TYPE_DEFINITION, Kind.INPUT_OBJECT_TYPE_EXTENSION), _defKindToExtKind);
|
|
6096
|
-
|
|
6097
|
-
function typeToExtKind(type) {
|
|
6098
|
-
if (isScalarType(type)) {
|
|
6099
|
-
return Kind.SCALAR_TYPE_EXTENSION;
|
|
6100
|
-
}
|
|
6101
|
-
|
|
6102
|
-
if (isObjectType(type)) {
|
|
6103
|
-
return Kind.OBJECT_TYPE_EXTENSION;
|
|
6104
|
-
}
|
|
6105
|
-
|
|
6106
|
-
if (isInterfaceType(type)) {
|
|
6107
|
-
return Kind.INTERFACE_TYPE_EXTENSION;
|
|
6108
|
-
}
|
|
6109
|
-
|
|
6110
|
-
if (isUnionType(type)) {
|
|
6111
|
-
return Kind.UNION_TYPE_EXTENSION;
|
|
6112
|
-
}
|
|
6113
|
-
|
|
6114
|
-
if (isEnumType(type)) {
|
|
6115
|
-
return Kind.ENUM_TYPE_EXTENSION;
|
|
6116
|
-
} // istanbul ignore else (See: 'https://github.com/graphql/graphql-js/issues/2618')
|
|
6117
|
-
|
|
6118
|
-
|
|
6119
|
-
if (isInputObjectType(type)) {
|
|
6120
|
-
return Kind.INPUT_OBJECT_TYPE_EXTENSION;
|
|
6121
|
-
} // istanbul ignore next (Not reachable. All possible types have been considered)
|
|
6122
|
-
|
|
6123
|
-
|
|
6124
|
-
invariant(0, 'Unexpected type: ' + inspect(type));
|
|
6125
|
-
}
|
|
6126
|
-
|
|
6127
|
-
function extensionKindToTypeName(kind) {
|
|
6128
|
-
switch (kind) {
|
|
6129
|
-
case Kind.SCALAR_TYPE_EXTENSION:
|
|
6130
|
-
return 'scalar';
|
|
6131
|
-
|
|
6132
|
-
case Kind.OBJECT_TYPE_EXTENSION:
|
|
6133
|
-
return 'object';
|
|
6134
|
-
|
|
6135
|
-
case Kind.INTERFACE_TYPE_EXTENSION:
|
|
6136
|
-
return 'interface';
|
|
6137
|
-
|
|
6138
|
-
case Kind.UNION_TYPE_EXTENSION:
|
|
6139
|
-
return 'union';
|
|
6140
|
-
|
|
6141
|
-
case Kind.ENUM_TYPE_EXTENSION:
|
|
6142
|
-
return 'enum';
|
|
6143
|
-
|
|
6144
|
-
case Kind.INPUT_OBJECT_TYPE_EXTENSION:
|
|
6145
|
-
return 'input object';
|
|
6146
|
-
} // istanbul ignore next (Not reachable. All possible types have been considered)
|
|
6147
|
-
|
|
6148
|
-
|
|
6149
|
-
invariant(0, 'Unexpected kind: ' + inspect(kind));
|
|
6150
|
-
}
|
|
6151
|
-
|
|
6152
|
-
// Spec Section: "Executable Definitions"
|
|
6153
|
-
/**
|
|
6154
|
-
* @internal
|
|
6155
|
-
*/
|
|
6156
|
-
|
|
6157
|
-
Object.freeze([LoneSchemaDefinitionRule, UniqueOperationTypesRule, UniqueTypeNamesRule, UniqueEnumValueNamesRule, UniqueFieldDefinitionNamesRule, UniqueDirectiveNamesRule, KnownTypeNamesRule, KnownDirectivesRule, UniqueDirectivesPerLocationRule, PossibleTypeExtensionsRule, KnownArgumentNamesOnDirectivesRule, UniqueArgumentNamesRule, UniqueInputFieldNamesRule, ProvidedRequiredArgumentsOnDirectivesRule]);
|
|
5564
|
+
(_defKindToExtKind = {}, _defineProperty$1(_defKindToExtKind, Kind.SCALAR_TYPE_DEFINITION, Kind.SCALAR_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.OBJECT_TYPE_DEFINITION, Kind.OBJECT_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.INTERFACE_TYPE_DEFINITION, Kind.INTERFACE_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.UNION_TYPE_DEFINITION, Kind.UNION_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.ENUM_TYPE_DEFINITION, Kind.ENUM_TYPE_EXTENSION), _defineProperty$1(_defKindToExtKind, Kind.INPUT_OBJECT_TYPE_DEFINITION, Kind.INPUT_OBJECT_TYPE_EXTENSION), _defKindToExtKind);
|
|
6158
5565
|
keyMap(specifiedScalarTypes.concat(introspectionTypes), function (type) {
|
|
6159
5566
|
return type.name;
|
|
6160
5567
|
});
|
|
@@ -6165,6 +5572,7 @@ keyMap(specifiedScalarTypes.concat(introspectionTypes), function (type) {
|
|
|
6165
5572
|
* For full license text, see the LICENSE.txt file
|
|
6166
5573
|
*/
|
|
6167
5574
|
|
|
5575
|
+
|
|
6168
5576
|
var DateLiteral;
|
|
6169
5577
|
(function (DateLiteral) {
|
|
6170
5578
|
DateLiteral["NEXT_YEAR"] = "NEXT_YEAR";
|
|
@@ -6239,6 +5647,7 @@ var PredicateType;
|
|
|
6239
5647
|
* For full license text, see the LICENSE.txt file
|
|
6240
5648
|
*/
|
|
6241
5649
|
|
|
5650
|
+
|
|
6242
5651
|
const { keys, values, create, assign, freeze } = Object;
|
|
6243
5652
|
|
|
6244
5653
|
/**
|
|
@@ -6470,14 +5879,30 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
6470
5879
|
const operationsWithDenormedRecords = [];
|
|
6471
5880
|
for (let i = 0, len = operations.length; i < len; i++) {
|
|
6472
5881
|
const operation = operations[i];
|
|
6473
|
-
if (
|
|
6474
|
-
|
|
6475
|
-
|
|
5882
|
+
if (durableStore.plugin !== undefined &&
|
|
5883
|
+
durableStore.plugin.supportsBatchUpdates !== undefined &&
|
|
5884
|
+
durableStore.plugin.supportsBatchUpdates() === true) {
|
|
5885
|
+
if (operation.segment !== DefaultDurableSegment ||
|
|
5886
|
+
operation.type !== 'setEntries') {
|
|
5887
|
+
operationsWithDenormedRecords.push(operation);
|
|
5888
|
+
continue;
|
|
5889
|
+
}
|
|
5890
|
+
operationsWithDenormedRecords.push({
|
|
5891
|
+
...operation,
|
|
5892
|
+
entries: denormalizeEntries(operation.entries),
|
|
5893
|
+
});
|
|
5894
|
+
}
|
|
5895
|
+
else {
|
|
5896
|
+
if (operation.segment !== DefaultDurableSegment ||
|
|
5897
|
+
operation.type === 'evictEntries') {
|
|
5898
|
+
operationsWithDenormedRecords.push(operation);
|
|
5899
|
+
continue;
|
|
5900
|
+
}
|
|
5901
|
+
operationsWithDenormedRecords.push({
|
|
5902
|
+
...operation,
|
|
5903
|
+
entries: denormalizeEntries(operation.entries),
|
|
5904
|
+
});
|
|
6476
5905
|
}
|
|
6477
|
-
operationsWithDenormedRecords.push({
|
|
6478
|
-
...operation,
|
|
6479
|
-
entries: denormalizeEntries(operation.entries),
|
|
6480
|
-
});
|
|
6481
5906
|
}
|
|
6482
5907
|
return durableStore.batchOperations(operationsWithDenormedRecords);
|
|
6483
5908
|
};
|
|
@@ -6551,6 +5976,7 @@ function getRuntime() {
|
|
|
6551
5976
|
}
|
|
6552
5977
|
|
|
6553
5978
|
// so eslint doesn't complain about nimbus
|
|
5979
|
+
/* global __nimbus */
|
|
6554
5980
|
function ldsRuntimeBridge() {
|
|
6555
5981
|
if (typeof __nimbus !== 'undefined' &&
|
|
6556
5982
|
__nimbus.plugins !== undefined &&
|
|
@@ -6567,4 +5993,4 @@ function ldsRuntimeBridge() {
|
|
|
6567
5993
|
}
|
|
6568
5994
|
|
|
6569
5995
|
export { ldsRuntimeBridge as default };
|
|
6570
|
-
// version: 1.
|
|
5996
|
+
// version: 1.236.0-036823f57
|