@salesforce/lds-runtime-mobile 1.130.9 → 1.131.0-dev10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +666 -201
- package/package.json +21 -21
- package/sfdc/main.js +666 -201
package/dist/main.js
CHANGED
|
@@ -14,10 +14,12 @@
|
|
|
14
14
|
import { withRegistration, register } from '@salesforce/lds-default-luvio';
|
|
15
15
|
import { setupInstrumentation, instrumentAdapter as instrumentAdapter$1, instrumentLuvio, setLdsAdaptersUiapiInstrumentation, setLdsNetworkAdapterInstrumentation } from '@salesforce/lds-instrumentation';
|
|
16
16
|
import { HttpStatusCode, StoreKeyMap, buildStaleWhileRevalidateImplementation, StoreKeySet, serializeStructuredKey, Reader, emitAdapterEvent, createCustomAdapterEventEmitter, isFileReference, Environment, Luvio, InMemoryStore } from '@luvio/engine';
|
|
17
|
+
import excludeStaleRecordsGate from '@salesforce/gate/lds.graphqlEvalExcludeStaleRecords';
|
|
17
18
|
import { parseAndVisit, Kind, visit, execute, buildSchema, isObjectType, defaultFieldResolver } from '@luvio/graphql-parser';
|
|
18
|
-
import { getRecordId18, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion } from '@salesforce/lds-adapters-uiapi';
|
|
19
|
+
import { getRecordId18, serializeFieldArguments, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion } from '@salesforce/lds-adapters-uiapi';
|
|
19
20
|
import caseSensitiveUserId from '@salesforce/user/Id';
|
|
20
21
|
import { idleDetector, getInstrumentation } from 'o11y/client';
|
|
22
|
+
import ldsUseShortUrlGate from '@salesforce/gate/lds.useShortUrl';
|
|
21
23
|
import { instrument as instrument$1 } from '@salesforce/lds-bindings';
|
|
22
24
|
import LOCALE from '@salesforce/i18n/locale';
|
|
23
25
|
import CURRENCY from '@salesforce/i18n/currency';
|
|
@@ -27,6 +29,9 @@ import shortTimeFormat from '@salesforce/i18n/dateTime.shortTimeFormat';
|
|
|
27
29
|
import shortDateTimeFormat from '@salesforce/i18n/dateTime.shortDateTimeFormat';
|
|
28
30
|
import { getDateTimeFormat, getDateTimeISO8601Parser, getNumberFormat } from 'lightning/i18nService';
|
|
29
31
|
import formattingOptions from 'lightning/i18nCldrOptions';
|
|
32
|
+
import eagerEvalValidAt from '@salesforce/gate/lds.eagerEvalValidAt';
|
|
33
|
+
import eagerEvalStaleWhileRevalidate from '@salesforce/gate/lds.eagerEvalStaleWhileRevalidate';
|
|
34
|
+
import eagerEvalDefaultCachePolicy from '@salesforce/gate/lds.eagerEvalDefaultCachePolicy';
|
|
30
35
|
|
|
31
36
|
/**
|
|
32
37
|
* Copyright (c) 2022, Salesforce, Inc.,
|
|
@@ -37,7 +42,7 @@ import formattingOptions from 'lightning/i18nCldrOptions';
|
|
|
37
42
|
const { parse: parse$5, stringify: stringify$5 } = JSON;
|
|
38
43
|
const { join: join$2, push: push$2, unshift } = Array.prototype;
|
|
39
44
|
const { isArray: isArray$6 } = Array;
|
|
40
|
-
const { entries: entries$3, keys: keys$
|
|
45
|
+
const { entries: entries$3, keys: keys$7 } = Object;
|
|
41
46
|
|
|
42
47
|
const UI_API_BASE_URI = '/services/data/v58.0/ui-api';
|
|
43
48
|
|
|
@@ -102,7 +107,7 @@ function isSpanningRecord$1(fieldValue) {
|
|
|
102
107
|
function mergeRecordFields$1(first, second) {
|
|
103
108
|
const { fields: targetFields } = first;
|
|
104
109
|
const { fields: sourceFields } = second;
|
|
105
|
-
const fieldNames = keys$
|
|
110
|
+
const fieldNames = keys$7(sourceFields);
|
|
106
111
|
for (let i = 0, len = fieldNames.length; i < len; i += 1) {
|
|
107
112
|
const fieldName = fieldNames[i];
|
|
108
113
|
const sourceField = sourceFields[fieldName];
|
|
@@ -515,8 +520,9 @@ function isDeprecatedDurableStoreEntry(durableRecord) {
|
|
|
515
520
|
return false;
|
|
516
521
|
}
|
|
517
522
|
const DefaultDurableSegment = 'DEFAULT';
|
|
523
|
+
const RedirectDurableSegment = 'REDIRECT_KEYS';
|
|
518
524
|
|
|
519
|
-
const { keys: keys$
|
|
525
|
+
const { keys: keys$6, create: create$5, assign: assign$5, freeze: freeze$1 } = Object;
|
|
520
526
|
const { isArray: isArray$5 } = Array;
|
|
521
527
|
|
|
522
528
|
//Durable store error instrumentation key
|
|
@@ -554,7 +560,7 @@ function deepFreeze(value) {
|
|
|
554
560
|
}
|
|
555
561
|
}
|
|
556
562
|
else {
|
|
557
|
-
const keys$1 = keys$
|
|
563
|
+
const keys$1 = keys$6(value);
|
|
558
564
|
for (let i = 0, len = keys$1.length; i < len; i += 1) {
|
|
559
565
|
deepFreeze(value[keys$1[i]]);
|
|
560
566
|
}
|
|
@@ -577,13 +583,13 @@ function isStoreEntryError(storeRecord) {
|
|
|
577
583
|
* @param pendingWriter the PendingWriter (this is going away soon)
|
|
578
584
|
* @returns
|
|
579
585
|
*/
|
|
580
|
-
function publishDurableStoreEntries(durableRecords,
|
|
586
|
+
function publishDurableStoreEntries(durableRecords, put, publishMetadata) {
|
|
581
587
|
const revivedKeys = new StoreKeySet();
|
|
582
588
|
let hadUnexpectedShape = false;
|
|
583
589
|
if (durableRecords === undefined) {
|
|
584
590
|
return { revivedKeys, hadUnexpectedShape };
|
|
585
591
|
}
|
|
586
|
-
const durableKeys = keys$
|
|
592
|
+
const durableKeys = keys$6(durableRecords);
|
|
587
593
|
if (durableKeys.length === 0) {
|
|
588
594
|
// no records to revive
|
|
589
595
|
return { revivedKeys, hadUnexpectedShape };
|
|
@@ -615,7 +621,7 @@ function publishDurableStoreEntries(durableRecords, publish, publishMetadata) {
|
|
|
615
621
|
// freeze errors on way into L1
|
|
616
622
|
deepFreeze(data.error);
|
|
617
623
|
}
|
|
618
|
-
|
|
624
|
+
put(key, data);
|
|
619
625
|
revivedKeys.add(key);
|
|
620
626
|
}
|
|
621
627
|
return { revivedKeys, hadUnexpectedShape };
|
|
@@ -655,7 +661,7 @@ unavailableSnapshot, durableStoreErrorHandler, buildL1Snapshot, reviveMetrics =
|
|
|
655
661
|
// TODO [W-10072584]: instead of implicitly using L1 we should take in
|
|
656
662
|
// publish and publishMetadata funcs, so callers can decide where to
|
|
657
663
|
// revive to (like they pass in how to do the buildL1Snapshot)
|
|
658
|
-
baseEnvironment.
|
|
664
|
+
baseEnvironment.storePut.bind(baseEnvironment), baseEnvironment.publishStoreMetadata.bind(baseEnvironment));
|
|
659
665
|
// if the data coming back from DS had an unexpected shape then just
|
|
660
666
|
// return the L1 snapshot
|
|
661
667
|
if (hadUnexpectedShape === true) {
|
|
@@ -757,7 +763,7 @@ class DurableTTLStore {
|
|
|
757
763
|
overrides,
|
|
758
764
|
};
|
|
759
765
|
}
|
|
760
|
-
const keys$1 = keys$
|
|
766
|
+
const keys$1 = keys$6(entries);
|
|
761
767
|
for (let i = 0, len = keys$1.length; i < len; i++) {
|
|
762
768
|
const key = keys$1[i];
|
|
763
769
|
const entry = entries[key];
|
|
@@ -788,14 +794,14 @@ function copy(source) {
|
|
|
788
794
|
}
|
|
789
795
|
return { ...source };
|
|
790
796
|
}
|
|
791
|
-
function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler) {
|
|
797
|
+
function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler, redirects, additionalDurableStoreOperations = []) {
|
|
792
798
|
const durableRecords = create$5(null);
|
|
793
799
|
const evictedRecords = create$5(null);
|
|
794
800
|
const { records, metadata: storeMetadata, visitedIds, refreshedIds, } = store.fallbackStringKeyInMemoryStore;
|
|
795
801
|
// TODO: W-8909393 Once metadata is stored in its own segment we need to
|
|
796
802
|
// call setEntries for the visitedIds on default segment and call setEntries
|
|
797
803
|
// on the metadata segment for the refreshedIds
|
|
798
|
-
const keys$1 = keys$
|
|
804
|
+
const keys$1 = keys$6({ ...visitedIds, ...refreshedIds });
|
|
799
805
|
for (let i = 0, len = keys$1.length; i < len; i += 1) {
|
|
800
806
|
const key = keys$1[i];
|
|
801
807
|
const record = records[key];
|
|
@@ -817,9 +823,9 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
|
|
|
817
823
|
};
|
|
818
824
|
}
|
|
819
825
|
}
|
|
820
|
-
const durableStoreOperations =
|
|
826
|
+
const durableStoreOperations = additionalDurableStoreOperations;
|
|
821
827
|
// publishes
|
|
822
|
-
const recordKeys = keys$
|
|
828
|
+
const recordKeys = keys$6(durableRecords);
|
|
823
829
|
if (recordKeys.length > 0) {
|
|
824
830
|
durableStoreOperations.push({
|
|
825
831
|
type: 'setEntries',
|
|
@@ -827,8 +833,20 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
|
|
|
827
833
|
segment: DefaultDurableSegment,
|
|
828
834
|
});
|
|
829
835
|
}
|
|
836
|
+
// redirects
|
|
837
|
+
redirects.forEach((value, key) => {
|
|
838
|
+
durableStoreOperations.push({
|
|
839
|
+
type: 'setEntries',
|
|
840
|
+
entries: {
|
|
841
|
+
[key]: {
|
|
842
|
+
data: { key, redirect: value },
|
|
843
|
+
},
|
|
844
|
+
},
|
|
845
|
+
segment: RedirectDurableSegment,
|
|
846
|
+
});
|
|
847
|
+
});
|
|
830
848
|
// evicts
|
|
831
|
-
const evictedKeys = keys$
|
|
849
|
+
const evictedKeys = keys$6(evictedRecords);
|
|
832
850
|
if (evictedKeys.length > 0) {
|
|
833
851
|
durableStoreOperations.push({
|
|
834
852
|
type: 'evictEntries',
|
|
@@ -872,6 +890,19 @@ function buildIngestStagingStore(environment) {
|
|
|
872
890
|
return environment.storeBuildIngestionStagingStore();
|
|
873
891
|
}
|
|
874
892
|
|
|
893
|
+
async function reviveRedirects(durableStore, env) {
|
|
894
|
+
const entries = await durableStore.getAllEntries(RedirectDurableSegment);
|
|
895
|
+
if (entries) {
|
|
896
|
+
for (const durableEntry of Object.keys(entries)) {
|
|
897
|
+
const entry = entries[durableEntry];
|
|
898
|
+
const { data: { key, redirect }, } = entry;
|
|
899
|
+
if (entry) {
|
|
900
|
+
env.storeRedirect(key, redirect);
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
|
|
875
906
|
const AdapterContextSegment = 'ADAPTER-CONTEXT';
|
|
876
907
|
const ADAPTER_CONTEXT_ID_SUFFIX = '__NAMED_CONTEXT';
|
|
877
908
|
async function reviveOrCreateContext(adapterId, durableStore, durableStoreErrorHandler, contextStores, pendingContextStoreKeys, onContextLoaded) {
|
|
@@ -909,7 +940,7 @@ async function reviveOrCreateContext(adapterId, durableStore, durableStoreErrorH
|
|
|
909
940
|
}
|
|
910
941
|
return contextReturn();
|
|
911
942
|
}
|
|
912
|
-
function isUnfulfilledSnapshot(cachedSnapshotResult) {
|
|
943
|
+
function isUnfulfilledSnapshot$1(cachedSnapshotResult) {
|
|
913
944
|
if (cachedSnapshotResult === undefined) {
|
|
914
945
|
return false;
|
|
915
946
|
}
|
|
@@ -934,13 +965,18 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
934
965
|
// event. If this instance of makeDurable caused that L2 write we can ignore that
|
|
935
966
|
// on change event. This Set helps us do that.
|
|
936
967
|
const pendingContextStoreKeys = new Set();
|
|
968
|
+
// redirects that need to be flushed to the durable store
|
|
969
|
+
const pendingStoreRedirects = new Map();
|
|
937
970
|
const contextStores = create$5(null);
|
|
938
971
|
let initializationPromise = new Promise((resolve) => {
|
|
939
972
|
const finish = () => {
|
|
940
973
|
resolve();
|
|
941
974
|
initializationPromise = undefined;
|
|
942
975
|
};
|
|
943
|
-
|
|
976
|
+
Promise.all([
|
|
977
|
+
reviveTTLOverrides(durableTTLStore, environment),
|
|
978
|
+
reviveRedirects(durableStore, environment),
|
|
979
|
+
]).then(finish);
|
|
944
980
|
});
|
|
945
981
|
//instrumentation for durable store errors
|
|
946
982
|
const durableStoreErrorHandler = handleDurableStoreRejection(instrumentation);
|
|
@@ -953,6 +989,8 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
953
989
|
const unsubscribe = durableStore.registerOnChangedListener(async (changes) => {
|
|
954
990
|
const defaultSegmentKeys = [];
|
|
955
991
|
const adapterContextSegmentKeys = [];
|
|
992
|
+
const redirectSegmentKeys = [];
|
|
993
|
+
let shouldBroadcast = false;
|
|
956
994
|
for (let i = 0, len = changes.length; i < len; i++) {
|
|
957
995
|
const change = changes[i];
|
|
958
996
|
// we only care about changes to the data which is stored in the default
|
|
@@ -963,6 +1001,20 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
963
1001
|
else if (change.segment === AdapterContextSegment) {
|
|
964
1002
|
adapterContextSegmentKeys.push(...change.ids);
|
|
965
1003
|
}
|
|
1004
|
+
else if (change.segment === RedirectDurableSegment) {
|
|
1005
|
+
redirectSegmentKeys.push(...change.ids);
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
if (redirectSegmentKeys.length > 0) {
|
|
1009
|
+
const redirectEntries = await durableStore.getEntries(redirectSegmentKeys, RedirectDurableSegment);
|
|
1010
|
+
if (redirectEntries !== undefined) {
|
|
1011
|
+
const redirectKeys = Object.keys(redirectEntries);
|
|
1012
|
+
for (const key of redirectKeys) {
|
|
1013
|
+
const redirectData = redirectEntries[key];
|
|
1014
|
+
environment.storeRedirect(redirectData.data.key, redirectData.data.redirect);
|
|
1015
|
+
shouldBroadcast = true;
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
966
1018
|
}
|
|
967
1019
|
// process adapter context changes
|
|
968
1020
|
const adapterContextKeysFromDifferentInstance = [];
|
|
@@ -982,7 +1034,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
982
1034
|
try {
|
|
983
1035
|
const entries = await durableStore.getEntries(adapterContextKeysFromDifferentInstance, AdapterContextSegment);
|
|
984
1036
|
if (entries !== undefined) {
|
|
985
|
-
const entryKeys = keys$
|
|
1037
|
+
const entryKeys = keys$6(entries);
|
|
986
1038
|
for (let i = 0, len = entryKeys.length; i < len; i++) {
|
|
987
1039
|
const entryKey = entryKeys[i];
|
|
988
1040
|
const entry = entries[entryKey];
|
|
@@ -999,10 +1051,6 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
999
1051
|
if (defaultSegmentKeysLength > 0) {
|
|
1000
1052
|
for (let i = 0; i < defaultSegmentKeysLength; i++) {
|
|
1001
1053
|
const key = defaultSegmentKeys[i];
|
|
1002
|
-
const canonical = environment.storeGetCanonicalKey(key);
|
|
1003
|
-
if (canonical !== key) {
|
|
1004
|
-
continue;
|
|
1005
|
-
}
|
|
1006
1054
|
// TODO: W-8909393 If expiration is the only thing that changed we should not evict the data... so
|
|
1007
1055
|
// if we stored expiration and data at different keys (or same keys in different segments)
|
|
1008
1056
|
// then we could know if only the expiration has changed and we wouldn't need to evict
|
|
@@ -1010,6 +1058,9 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
1010
1058
|
// call base environment storeEvict so this evict is not tracked for durable deletion
|
|
1011
1059
|
environment.storeEvict(key);
|
|
1012
1060
|
}
|
|
1061
|
+
shouldBroadcast = true;
|
|
1062
|
+
}
|
|
1063
|
+
if (shouldBroadcast) {
|
|
1013
1064
|
await environment.storeBroadcast(rebuildSnapshot, environment.snapshotAvailable);
|
|
1014
1065
|
}
|
|
1015
1066
|
});
|
|
@@ -1060,12 +1111,13 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
1060
1111
|
// call the base storeBroadcast
|
|
1061
1112
|
return publishChangesToDurableStore();
|
|
1062
1113
|
};
|
|
1063
|
-
const publishChangesToDurableStore = function () {
|
|
1114
|
+
const publishChangesToDurableStore = function (additionalDurableStoreOperations) {
|
|
1064
1115
|
validateNotDisposed();
|
|
1065
1116
|
if (ingestStagingStore === null) {
|
|
1066
1117
|
return Promise.resolve();
|
|
1067
1118
|
}
|
|
1068
|
-
const promise = flushInMemoryStoreValuesToDurableStore(ingestStagingStore, durableStore, durableStoreErrorHandler);
|
|
1119
|
+
const promise = flushInMemoryStoreValuesToDurableStore(ingestStagingStore, durableStore, durableStoreErrorHandler, new Map(pendingStoreRedirects), additionalDurableStoreOperations);
|
|
1120
|
+
pendingStoreRedirects.clear();
|
|
1069
1121
|
ingestStagingStore = null;
|
|
1070
1122
|
return promise;
|
|
1071
1123
|
};
|
|
@@ -1147,6 +1199,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
1147
1199
|
};
|
|
1148
1200
|
const storeRedirect = function (existingKey, canonicalKey) {
|
|
1149
1201
|
validateNotDisposed();
|
|
1202
|
+
pendingStoreRedirects.set(existingKey, canonicalKey);
|
|
1150
1203
|
// call redirect on staging store so "old" keys are removed from L2 on
|
|
1151
1204
|
// the next publishChangesToDurableStore. NOTE: we don't need to call
|
|
1152
1205
|
// redirect on the base environment store because staging store and base
|
|
@@ -1192,7 +1245,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
|
|
|
1192
1245
|
const snapshot = buildCachedSnapshot(injectedBuildSnapshotContext, injectedStoreLookup, luvio);
|
|
1193
1246
|
// if the adapter attempted to do an L1 lookup and it was unfulfilled
|
|
1194
1247
|
// then we can attempt an L2 lookup
|
|
1195
|
-
if (isUnfulfilledSnapshot(snapshot)) {
|
|
1248
|
+
if (isUnfulfilledSnapshot$1(snapshot)) {
|
|
1196
1249
|
const start = Date.now();
|
|
1197
1250
|
emitDurableEnvironmentAdapterEvent({ type: 'l2-revive-start' }, adapterRequestContext.eventObservers);
|
|
1198
1251
|
const revivedSnapshot = reviveSnapshot(environment, durableStore, snapshot, durableStoreErrorHandler, () => injectedStoreLookup(snapshot.select, snapshot.refresh)).then((result) => {
|
|
@@ -1799,6 +1852,12 @@ function getRelationshipInfo(apiName, fieldName, infoMap) {
|
|
|
1799
1852
|
function stringLiteral(value, safe = false, isCaseSensitive = false) {
|
|
1800
1853
|
return { type: ValueType.StringLiteral, value, safe, isCaseSensitive };
|
|
1801
1854
|
}
|
|
1855
|
+
function isStringLiteral(expression) {
|
|
1856
|
+
return expression.type === ValueType.StringLiteral;
|
|
1857
|
+
}
|
|
1858
|
+
function isStringArray(expression) {
|
|
1859
|
+
return expression.type === ValueType.StringArray;
|
|
1860
|
+
}
|
|
1802
1861
|
function comparison(left, operator, right) {
|
|
1803
1862
|
return { type: PredicateType$1.comparison, left, right, operator };
|
|
1804
1863
|
}
|
|
@@ -2058,7 +2117,7 @@ function comparisonOperatorToSql(operator) {
|
|
|
2058
2117
|
case ComparisonOperator.eq:
|
|
2059
2118
|
return '=';
|
|
2060
2119
|
case ComparisonOperator.ne:
|
|
2061
|
-
return '
|
|
2120
|
+
return 'IS NOT';
|
|
2062
2121
|
case ComparisonOperator.gt:
|
|
2063
2122
|
return '>';
|
|
2064
2123
|
case ComparisonOperator.gte:
|
|
@@ -2098,9 +2157,13 @@ const recordSuffix = 'edges';
|
|
|
2098
2157
|
const pathPrefix = '$';
|
|
2099
2158
|
const recordsCTE = 'recordsCTE';
|
|
2100
2159
|
const MultiPickListValueSeparator$1 = ';';
|
|
2160
|
+
const recordCTESQL = excludeStaleRecordsGate.isOpen({ fallback: false })
|
|
2161
|
+
? `WITH ${recordsCTE} AS NOT materialized ` +
|
|
2162
|
+
`(select data, metadata from lds_data where key like 'UiApi::RecordRepresentation:%')`
|
|
2163
|
+
: `WITH ${recordsCTE} AS NOT materialized ` +
|
|
2164
|
+
`(select data from lds_data where key like 'UiApi::RecordRepresentation:%')`;
|
|
2101
2165
|
function cteSql() {
|
|
2102
|
-
return
|
|
2103
|
-
`(select data from lds_data where key like 'UiApi::RecordRepresentation:%')`);
|
|
2166
|
+
return recordCTESQL;
|
|
2104
2167
|
}
|
|
2105
2168
|
function computeSql(rootQuery) {
|
|
2106
2169
|
const fields = rootQuery.connections.map((connection) => {
|
|
@@ -2377,6 +2440,14 @@ function expressionToSql(expression, targetDataType, operator) {
|
|
|
2377
2440
|
if (expression.subfield === 'displayValue' && targetDataType === 'Boolean') {
|
|
2378
2441
|
return { sql: 'null', bindings: [] };
|
|
2379
2442
|
}
|
|
2443
|
+
// metadata extract is somewhat different than a data extract
|
|
2444
|
+
if (expression.metadata === true) {
|
|
2445
|
+
let sql = `json_extract("${expression.jsonAlias}".metadata, '${pathPrefix}.${expression.field}')`;
|
|
2446
|
+
if (targetDataType !== undefined) {
|
|
2447
|
+
sql = coerceToTargetDataType(sql, targetDataType);
|
|
2448
|
+
}
|
|
2449
|
+
return { sql, bindings: [] };
|
|
2450
|
+
}
|
|
2380
2451
|
let path = extractPath(expression.field, expression.subfield);
|
|
2381
2452
|
// For multiple picklist includes/excluding filtering, we need to prefix and suffix the field value with ';'
|
|
2382
2453
|
// to make the match safe.
|
|
@@ -2569,6 +2640,9 @@ function compoundOperatorToSql(operator) {
|
|
|
2569
2640
|
}
|
|
2570
2641
|
}
|
|
2571
2642
|
|
|
2643
|
+
const { isArray: isArray$4 } = Array;
|
|
2644
|
+
const { keys: keys$5 } = Object;
|
|
2645
|
+
|
|
2572
2646
|
function isListValueNode(node) {
|
|
2573
2647
|
return node.kind === 'ListValue';
|
|
2574
2648
|
}
|
|
@@ -2621,13 +2695,13 @@ function isScalarDataType(type) {
|
|
|
2621
2695
|
const NotOperator = 'not';
|
|
2622
2696
|
const { eq, ne, gt, gte, lt, lte, nin, like, includes, excludes } = ComparisonOperator;
|
|
2623
2697
|
const inOp = ComparisonOperator.in;
|
|
2624
|
-
function fieldsToFilters(fieldValues, joinAlias, apiName, input, compoundOperator = CompoundOperator.and, joins) {
|
|
2698
|
+
function fieldsToFilters(fieldValues, joinAlias, apiName, input, compoundOperator = CompoundOperator.and, joins, draftFunctions) {
|
|
2625
2699
|
const results = fieldValues
|
|
2626
2700
|
.map((value) => {
|
|
2627
2701
|
if (!isObjectValueNode$1(value)) {
|
|
2628
2702
|
return [failure([message('Parent filter node should be an object.')])];
|
|
2629
2703
|
}
|
|
2630
|
-
return Object.entries(value.fields).map(([key, value]) => filter(key, value, joinAlias, apiName, input, joins));
|
|
2704
|
+
return Object.entries(value.fields).map(([key, value]) => filter(key, value, joinAlias, apiName, input, joins, draftFunctions));
|
|
2631
2705
|
})
|
|
2632
2706
|
.reduce(flatten$1, []);
|
|
2633
2707
|
const failures = results.filter(isFailure).reduce(flatMap(errors), []);
|
|
@@ -2649,22 +2723,22 @@ function fieldsToFilters(fieldValues, joinAlias, apiName, input, compoundOperato
|
|
|
2649
2723
|
return success({ predicate: resolvedPredicate });
|
|
2650
2724
|
}
|
|
2651
2725
|
//{where: {Field: ... | and: ... | or: ... | not: ...}}
|
|
2652
|
-
function recordFilter(where, joinAlias, apiName, input, joins) {
|
|
2726
|
+
function recordFilter(where, joinAlias, apiName, input, joins, draftFunctions) {
|
|
2653
2727
|
if (where === undefined) {
|
|
2654
2728
|
return success(undefined);
|
|
2655
2729
|
}
|
|
2656
2730
|
// when 'recordFilter' starts, there is no 'NotPredicated'
|
|
2657
|
-
return fieldsToFilters([where.value], joinAlias, apiName, input, CompoundOperator.and, joins).map((result) => result.predicate === undefined || isEmptyPredicate(result.predicate) ? undefined : result);
|
|
2731
|
+
return fieldsToFilters([where.value], joinAlias, apiName, input, CompoundOperator.and, joins, draftFunctions).map((result) => result.predicate === undefined || isEmptyPredicate(result.predicate) ? undefined : result);
|
|
2658
2732
|
}
|
|
2659
|
-
function filter(name, value, tableAlias, apiName, input, joins) {
|
|
2733
|
+
function filter(name, value, tableAlias, apiName, input, joins, draftFunctions) {
|
|
2660
2734
|
if (isCompoundOperator(name)) {
|
|
2661
2735
|
if (!isListValueNode(value)) {
|
|
2662
2736
|
return failure([message(`Value for ${name} node must be a list.`)]);
|
|
2663
2737
|
}
|
|
2664
|
-
return compoundPredicate(name, value, tableAlias, apiName, input, joins);
|
|
2738
|
+
return compoundPredicate(name, value, tableAlias, apiName, input, joins, draftFunctions);
|
|
2665
2739
|
}
|
|
2666
2740
|
if (name === NotOperator) {
|
|
2667
|
-
const children = fieldsToFilters([value], tableAlias, apiName, input, CompoundOperator.and, joins);
|
|
2741
|
+
const children = fieldsToFilters([value], tableAlias, apiName, input, CompoundOperator.and, joins, draftFunctions);
|
|
2668
2742
|
// take the children of a not predicate
|
|
2669
2743
|
// and wrap them all inside it
|
|
2670
2744
|
return children.flatMap((container) => {
|
|
@@ -2682,10 +2756,10 @@ function filter(name, value, tableAlias, apiName, input, joins) {
|
|
|
2682
2756
|
if (!isObjectValueNode$1(value)) {
|
|
2683
2757
|
return failure([message('Filter node must be an object or list.')]);
|
|
2684
2758
|
}
|
|
2685
|
-
return fieldFilter(name, value, tableAlias, apiName, input, joins);
|
|
2759
|
+
return fieldFilter(name, value, tableAlias, apiName, input, joins, draftFunctions);
|
|
2686
2760
|
}
|
|
2687
|
-
function compoundPredicate(operator, list, joinAlias, apiName, input, joins) {
|
|
2688
|
-
return fieldsToFilters(list.values, joinAlias, apiName, input, operator, joins);
|
|
2761
|
+
function compoundPredicate(operator, list, joinAlias, apiName, input, joins, draftFunctions) {
|
|
2762
|
+
return fieldsToFilters(list.values, joinAlias, apiName, input, operator, joins, draftFunctions);
|
|
2689
2763
|
}
|
|
2690
2764
|
/**
|
|
2691
2765
|
* spans a FieldNode with its ObjectValueNode is passed. All the predicates is added into the 'join' array.
|
|
@@ -2696,7 +2770,7 @@ function compoundPredicate(operator, list, joinAlias, apiName, input, joins) {
|
|
|
2696
2770
|
* @param joins
|
|
2697
2771
|
* @returns undefined predicate
|
|
2698
2772
|
*/
|
|
2699
|
-
function spanningFilter(fieldInfo, fieldNode, alias, input, joins) {
|
|
2773
|
+
function spanningFilter(fieldInfo, fieldNode, alias, input, joins, draftFunctions) {
|
|
2700
2774
|
const { apiName: fieldName, referenceToInfos, relationshipName } = fieldInfo;
|
|
2701
2775
|
const referenceInfo = referenceToInfos[0];
|
|
2702
2776
|
const jsonAlias = `${alias}.${relationshipName}`;
|
|
@@ -2715,7 +2789,7 @@ function spanningFilter(fieldInfo, fieldNode, alias, input, joins) {
|
|
|
2715
2789
|
joins.push(join);
|
|
2716
2790
|
// moves constraint predicate to where
|
|
2717
2791
|
const constraintPredicates = [];
|
|
2718
|
-
const filterResult = fieldsToFilters([fieldNode], jsonAlias, apiName, input, CompoundOperator.and, joins);
|
|
2792
|
+
const filterResult = fieldsToFilters([fieldNode], jsonAlias, apiName, input, CompoundOperator.and, joins, draftFunctions);
|
|
2719
2793
|
if (filterResult.isSuccess === false) {
|
|
2720
2794
|
return filterResult;
|
|
2721
2795
|
}
|
|
@@ -2727,7 +2801,11 @@ function spanningFilter(fieldInfo, fieldNode, alias, input, joins) {
|
|
|
2727
2801
|
});
|
|
2728
2802
|
return success({ predicate: combinePredicates(constraintPredicates, CompoundOperator.and) });
|
|
2729
2803
|
}
|
|
2730
|
-
function
|
|
2804
|
+
function isIDValueField$1(fieldInfo) {
|
|
2805
|
+
return (fieldInfo.apiName === 'Id' ||
|
|
2806
|
+
(fieldInfo.referenceToInfos !== undefined && fieldInfo.referenceToInfos.length > 0));
|
|
2807
|
+
}
|
|
2808
|
+
function fieldFilter(fieldName, fieldNode, alias, apiName, input, joins, draftFunctions) {
|
|
2731
2809
|
const fieldInfoResult = getFieldInfo(apiName, fieldName, input);
|
|
2732
2810
|
if (fieldInfoResult.isSuccess === false) {
|
|
2733
2811
|
return failure([fieldInfoResult.error]);
|
|
@@ -2737,8 +2815,9 @@ function fieldFilter(fieldName, fieldNode, alias, apiName, input, joins) {
|
|
|
2737
2815
|
return failure([message(`Field ${fieldName} for type ${apiName} not found.`)]);
|
|
2738
2816
|
}
|
|
2739
2817
|
if (fieldInfo.dataType === 'Reference' && fieldInfo.relationshipName === fieldName) {
|
|
2740
|
-
return spanningFilter(fieldInfo, fieldNode, alias, input, joins);
|
|
2818
|
+
return spanningFilter(fieldInfo, fieldNode, alias, input, joins, draftFunctions);
|
|
2741
2819
|
}
|
|
2820
|
+
const idProcessingNeeded = isIDValueField$1(fieldInfo);
|
|
2742
2821
|
const extract = {
|
|
2743
2822
|
type: ValueType.Extract,
|
|
2744
2823
|
jsonAlias: alias,
|
|
@@ -2826,7 +2905,29 @@ function fieldFilter(fieldName, fieldNode, alias, apiName, input, joins) {
|
|
|
2826
2905
|
children,
|
|
2827
2906
|
};
|
|
2828
2907
|
}
|
|
2829
|
-
|
|
2908
|
+
const rightOperand = op.value;
|
|
2909
|
+
if (idProcessingNeeded) {
|
|
2910
|
+
if (isStringLiteral(rightOperand)) {
|
|
2911
|
+
if (rightOperand.value !== null) {
|
|
2912
|
+
if (draftFunctions.isDraftId(rightOperand.value)) {
|
|
2913
|
+
rightOperand.value = draftFunctions.getCanonicalId(rightOperand.value);
|
|
2914
|
+
}
|
|
2915
|
+
}
|
|
2916
|
+
}
|
|
2917
|
+
else if (isStringArray(rightOperand)) {
|
|
2918
|
+
if (rightOperand.value !== null) {
|
|
2919
|
+
rightOperand.value = rightOperand.value.map((originalId) => {
|
|
2920
|
+
if (originalId !== null) {
|
|
2921
|
+
if (draftFunctions.isDraftId(originalId)) {
|
|
2922
|
+
return draftFunctions.getCanonicalId(originalId);
|
|
2923
|
+
}
|
|
2924
|
+
}
|
|
2925
|
+
return originalId;
|
|
2926
|
+
});
|
|
2927
|
+
}
|
|
2928
|
+
}
|
|
2929
|
+
}
|
|
2930
|
+
return comparison(extract, op.operator, rightOperand);
|
|
2830
2931
|
});
|
|
2831
2932
|
const combined = combinePredicates(comparisons.concat(...dateFunction.value), CompoundOperator.and);
|
|
2832
2933
|
const container = {
|
|
@@ -3033,7 +3134,11 @@ function operatorWithValue(operator, valueNode, objectInfoDataType) {
|
|
|
3033
3134
|
}
|
|
3034
3135
|
if (objectInfoDataType === 'Double') {
|
|
3035
3136
|
if (isScalarOperatorType(operator)) {
|
|
3036
|
-
|
|
3137
|
+
// allow a float/double value to be passed
|
|
3138
|
+
// also allow an integer to be passed to a double, but not a double to an integer
|
|
3139
|
+
const isFloatOrInt = is(valueNode, 'FloatValue') ||
|
|
3140
|
+
is(valueNode, 'IntValue');
|
|
3141
|
+
return isFloatOrInt
|
|
3037
3142
|
? success({
|
|
3038
3143
|
type: 'DoubleOperator',
|
|
3039
3144
|
operator,
|
|
@@ -3922,7 +4027,7 @@ function recordQuery(selection, apiName, alias, predicates, input) {
|
|
|
3922
4027
|
const orderByJoins = [];
|
|
3923
4028
|
const orderByResult = parseOrderBy(orderByArg, alias, apiName, input.objectInfoMap, orderByJoins);
|
|
3924
4029
|
const filterJoins = [];
|
|
3925
|
-
const whereResult = recordFilter(whereArg, alias, apiName, input.objectInfoMap, filterJoins);
|
|
4030
|
+
const whereResult = recordFilter(whereArg, alias, apiName, input.objectInfoMap, filterJoins, input.draftFunctions);
|
|
3926
4031
|
const scopeJoins = [];
|
|
3927
4032
|
const scopeResult = scopeFilter(scopeArg, alias, apiName, input, scopeJoins);
|
|
3928
4033
|
let additionalPredicates = [];
|
|
@@ -3970,6 +4075,33 @@ function recordQuery(selection, apiName, alias, predicates, input) {
|
|
|
3970
4075
|
const draftsField = { type: FieldType.Scalar, extract, path: 'node._drafts' };
|
|
3971
4076
|
const idExtract = { type: ValueType.Extract, jsonAlias: alias, field: 'Id' };
|
|
3972
4077
|
const idField = { type: FieldType.Scalar, extract: idExtract, path: 'node.Id' };
|
|
4078
|
+
// When the exclude stale records gate is open and there is a root timestamp
|
|
4079
|
+
// in the parser input, inject an additional predicate to limit the search
|
|
4080
|
+
// to records that either have drafts associated to them or were ingested at
|
|
4081
|
+
// least as recently as the query.
|
|
4082
|
+
if (excludeStaleRecordsGate.isOpen({ fallback: false }) && input.rootTimestamp !== undefined) {
|
|
4083
|
+
const timestampCheck = {
|
|
4084
|
+
type: PredicateType$1.comparison,
|
|
4085
|
+
left: {
|
|
4086
|
+
type: ValueType.Extract,
|
|
4087
|
+
jsonAlias: alias,
|
|
4088
|
+
field: 'ingestionTimestamp',
|
|
4089
|
+
metadata: true,
|
|
4090
|
+
},
|
|
4091
|
+
operator: ComparisonOperator.gte,
|
|
4092
|
+
right: { type: ValueType.IntLiteral, value: input.rootTimestamp },
|
|
4093
|
+
};
|
|
4094
|
+
const isDraft = {
|
|
4095
|
+
type: PredicateType$1.nullComparison,
|
|
4096
|
+
left: { type: ValueType.Extract, jsonAlias: alias, field: 'drafts' },
|
|
4097
|
+
operator: NullComparisonOperator.isNot,
|
|
4098
|
+
};
|
|
4099
|
+
predicates.push({
|
|
4100
|
+
type: PredicateType$1.compound,
|
|
4101
|
+
operator: CompoundOperator.or,
|
|
4102
|
+
children: [timestampCheck, isDraft],
|
|
4103
|
+
});
|
|
4104
|
+
}
|
|
3973
4105
|
return queryContainer(internalFields, alias, apiName, predicates).map((result) => {
|
|
3974
4106
|
const { fields, predicates } = result;
|
|
3975
4107
|
const allFields = removeDuplicateFields(fields.concat(...[draftsField, idField]));
|
|
@@ -4018,6 +4150,20 @@ function rootRecordQuery(selection, input) {
|
|
|
4018
4150
|
if (input.objectInfoMap[alias] === undefined) {
|
|
4019
4151
|
return failure([missingObjectInfo(apiName)]);
|
|
4020
4152
|
}
|
|
4153
|
+
// When the exclude stale records gate is open and the query has an
|
|
4154
|
+
// ingestion timestamp in its cache metadata, associate that with the input
|
|
4155
|
+
// so it can later be used to limit the search to records were ingested at
|
|
4156
|
+
// least as recently as the query.
|
|
4157
|
+
if (excludeStaleRecordsGate.isOpen({ fallback: false })) {
|
|
4158
|
+
const key = input.connectionKeyBuilder(selection, input.config.variables);
|
|
4159
|
+
const queryMetadata = input.metadata[key];
|
|
4160
|
+
// If there is no metadata for this query or it somehow lacks a timestamp
|
|
4161
|
+
// skip setting the root timestamp
|
|
4162
|
+
if (queryMetadata !== undefined && queryMetadata.ingestionTimestamp !== undefined) {
|
|
4163
|
+
// subtract 10ms from timestamp to account for ingestion processing time
|
|
4164
|
+
input.rootTimestamp = queryMetadata.ingestionTimestamp - 10;
|
|
4165
|
+
}
|
|
4166
|
+
}
|
|
4021
4167
|
return recordQuery(selection, alias, apiName, [], input);
|
|
4022
4168
|
}
|
|
4023
4169
|
function rootQuery(recordNodes, input) {
|
|
@@ -4084,27 +4230,19 @@ function generateVariableGQLQuery(document, variables) {
|
|
|
4084
4230
|
? node.arguments
|
|
4085
4231
|
.map((args) => {
|
|
4086
4232
|
const { value, name } = args;
|
|
4233
|
+
//The variable is on the top level, for example `where: ${filter}`
|
|
4087
4234
|
if (is(value, 'Variable')) {
|
|
4088
|
-
|
|
4089
|
-
|
|
4090
|
-
|
|
4091
|
-
|
|
4092
|
-
return `${name}: ${transform(query)}`;
|
|
4093
|
-
};
|
|
4094
|
-
switch (name) {
|
|
4095
|
-
case 'scope':
|
|
4096
|
-
case 'orderBy':
|
|
4097
|
-
return buildRecordQueryString(name, jsonString, removeAllQuotations);
|
|
4098
|
-
default:
|
|
4099
|
-
return buildRecordQueryString(name, jsonString, removeQuotationsFromKeys);
|
|
4100
|
-
}
|
|
4101
|
-
}
|
|
4102
|
-
return '';
|
|
4235
|
+
return generateVariableNodeQuery(value, name, name, variables);
|
|
4236
|
+
}
|
|
4237
|
+
else if (isObjectValueNode$1(value)) {
|
|
4238
|
+
return generateVariableSubQuery(value, name, name, variables);
|
|
4103
4239
|
}
|
|
4104
4240
|
})
|
|
4105
4241
|
.filter(Boolean)
|
|
4106
4242
|
: undefined;
|
|
4107
|
-
return gqlArguments
|
|
4243
|
+
return gqlArguments
|
|
4244
|
+
? `${node.name} ${gqlArguments.length > 0 ? '(' + gqlArguments.join(',') + ')' : ''} { Id }`
|
|
4245
|
+
: '';
|
|
4108
4246
|
})
|
|
4109
4247
|
//remove empty strings
|
|
4110
4248
|
.filter(Boolean)
|
|
@@ -4114,6 +4252,51 @@ function generateVariableGQLQuery(document, variables) {
|
|
|
4114
4252
|
return [accu.slice(0, 1), query, accu.slice(1, length)].join('');
|
|
4115
4253
|
}, '{ }'));
|
|
4116
4254
|
}
|
|
4255
|
+
/**
|
|
4256
|
+
* Given a LuvioValueNode, generates a sql with its variable node replaced with actual value.
|
|
4257
|
+
* @param valueNode G
|
|
4258
|
+
* @param name
|
|
4259
|
+
* @param type
|
|
4260
|
+
* @param variables
|
|
4261
|
+
* @returns
|
|
4262
|
+
*/
|
|
4263
|
+
function generateVariableSubQuery(valueNode, name, type, variables) {
|
|
4264
|
+
switch (valueNode.kind) {
|
|
4265
|
+
case Kind.OBJECT: {
|
|
4266
|
+
// For example, `{ Id: { eq: $draftId } }` is a `ObjectValueNode`, which has field keys 'Id'
|
|
4267
|
+
const resultQuery = keys$5(valueNode.fields)
|
|
4268
|
+
.map((key) => generateVariableSubQuery(valueNode.fields[key], key, type, variables))
|
|
4269
|
+
.filter((subquery) => subquery.length > 0)
|
|
4270
|
+
.join(',');
|
|
4271
|
+
if (resultQuery.length > 0) {
|
|
4272
|
+
return `${name}: {${resultQuery}}`;
|
|
4273
|
+
}
|
|
4274
|
+
return resultQuery;
|
|
4275
|
+
}
|
|
4276
|
+
case Kind.VARIABLE:
|
|
4277
|
+
return generateVariableNodeQuery(valueNode, name, type, variables);
|
|
4278
|
+
default:
|
|
4279
|
+
return '';
|
|
4280
|
+
}
|
|
4281
|
+
}
|
|
4282
|
+
// Generate a sql for the variable node with its actual value.
|
|
4283
|
+
function generateVariableNodeQuery(value, name, type, variables) {
|
|
4284
|
+
const variable = variables[value.name];
|
|
4285
|
+
if (variable) {
|
|
4286
|
+
const jsonString = JSON.stringify(variable);
|
|
4287
|
+
const buildRecordQueryString = (name, query, transform) => {
|
|
4288
|
+
return `${name}: ${transform(query)}`;
|
|
4289
|
+
};
|
|
4290
|
+
switch (type) {
|
|
4291
|
+
case 'scope':
|
|
4292
|
+
case 'orderBy':
|
|
4293
|
+
return buildRecordQueryString(name, jsonString, removeAllQuotations);
|
|
4294
|
+
default:
|
|
4295
|
+
return buildRecordQueryString(name, jsonString, removeQuotationsFromKeys);
|
|
4296
|
+
}
|
|
4297
|
+
}
|
|
4298
|
+
return '';
|
|
4299
|
+
}
|
|
4117
4300
|
/**
|
|
4118
4301
|
* Given an AST with variables
|
|
4119
4302
|
* Swap out the LuvioArgumentNodes on the original AST with ones generated from its variables.
|
|
@@ -4132,14 +4315,43 @@ function swapVariableArguments(document, variables) {
|
|
|
4132
4315
|
const { name } = node;
|
|
4133
4316
|
const first = allArgumentRecords.find((n) => n.name === name);
|
|
4134
4317
|
if (first) {
|
|
4135
|
-
|
|
4136
|
-
first.arguments =
|
|
4137
|
-
? first.arguments.filter((x) => x.value.kind !== 'Variable')
|
|
4138
|
-
: [], node.arguments ? node.arguments : []);
|
|
4318
|
+
const swappedArgments = swapArgumentWithVariableNodes(node.arguments, first.arguments);
|
|
4319
|
+
first.arguments = swappedArgments ? swappedArgments : [];
|
|
4139
4320
|
}
|
|
4140
4321
|
});
|
|
4141
4322
|
}
|
|
4142
4323
|
}
|
|
4324
|
+
// Replaces the variable node in original LuvioArgumentNode with the actual value in the swapped node with the same path.
|
|
4325
|
+
function swapArgumentWithVariableNodes(swapped, original) {
|
|
4326
|
+
if (swapped === undefined || original === undefined) {
|
|
4327
|
+
return original;
|
|
4328
|
+
}
|
|
4329
|
+
return original.map((x) => {
|
|
4330
|
+
const targetNode = swapped.find((y) => y.name === x.name);
|
|
4331
|
+
if (targetNode === undefined) {
|
|
4332
|
+
return x;
|
|
4333
|
+
}
|
|
4334
|
+
if (x.value.kind === 'Variable') {
|
|
4335
|
+
return targetNode;
|
|
4336
|
+
}
|
|
4337
|
+
swapValueNodeWithVariableNodes(x.value, targetNode.value);
|
|
4338
|
+
return x;
|
|
4339
|
+
});
|
|
4340
|
+
}
|
|
4341
|
+
function swapValueNodeWithVariableNodes(original, swapped) {
|
|
4342
|
+
if (original.kind === Kind.OBJECT) {
|
|
4343
|
+
for (const key of keys$5(original.fields)) {
|
|
4344
|
+
if (isObjectValueNode$1(swapped) && swapped.fields[key]) {
|
|
4345
|
+
if (is(original.fields[key], 'Variable')) {
|
|
4346
|
+
original.fields[key] = swapped.fields[key];
|
|
4347
|
+
}
|
|
4348
|
+
else {
|
|
4349
|
+
swapValueNodeWithVariableNodes(original.fields[key], swapped.fields[key]);
|
|
4350
|
+
}
|
|
4351
|
+
}
|
|
4352
|
+
}
|
|
4353
|
+
}
|
|
4354
|
+
}
|
|
4143
4355
|
//find top level record queries
|
|
4144
4356
|
function findRecordSelections(document) {
|
|
4145
4357
|
return document.definitions
|
|
@@ -4174,12 +4386,24 @@ class StoreEvalPreconditioner {
|
|
|
4174
4386
|
* Missing object info records triggers the objectInfoService (same instance shared with Drafts code), to
|
|
4175
4387
|
* use the getObjectInfos adapter to efficiently fetch the necessary records.
|
|
4176
4388
|
*/
|
|
4177
|
-
async createRootQuery(config, objectInfoService, userId) {
|
|
4389
|
+
async createRootQuery(config, objectInfoService, userId, draftFunctions, connectionKeyBuilder, sqliteStore) {
|
|
4178
4390
|
const { query: ast, variables } = config;
|
|
4179
4391
|
swapVariableArguments(ast, variables);
|
|
4180
4392
|
// Parse out top-level record queries types we know we will need, since spanning fields will
|
|
4181
4393
|
// require at least this top level record present to resolve relationship lookups
|
|
4182
|
-
const
|
|
4394
|
+
const recordSelections = findRecordSelections(ast);
|
|
4395
|
+
let metadata = {};
|
|
4396
|
+
if (excludeStaleRecordsGate.isOpen({ fallback: false })) {
|
|
4397
|
+
const keys = recordSelections.map((rs) => connectionKeyBuilder(rs, variables));
|
|
4398
|
+
let sqlResult = await sqliteStore.query(`select key, metadata from lds_data where key in (${keys
|
|
4399
|
+
.map(() => '?')
|
|
4400
|
+
.join(',')})`, keys);
|
|
4401
|
+
metadata = sqlResult.rows.reduce((metadata, row) => {
|
|
4402
|
+
metadata[row[0]] = JSON.parse(row[1]);
|
|
4403
|
+
return metadata;
|
|
4404
|
+
}, {});
|
|
4405
|
+
}
|
|
4406
|
+
const topLevelNeededRecords = recordSelections.map((selection) => selection.name);
|
|
4183
4407
|
// Seed the initial list of things to fetch
|
|
4184
4408
|
const neededObjectInfos = new Set(topLevelNeededRecords);
|
|
4185
4409
|
// Seed the list of things we've _tried_ to fetch. As we iterate and
|
|
@@ -4219,8 +4443,12 @@ class StoreEvalPreconditioner {
|
|
|
4219
4443
|
};
|
|
4220
4444
|
// Attempt to parse the AST given what we know already...
|
|
4221
4445
|
astTransformResult = transform(ast, {
|
|
4446
|
+
config,
|
|
4222
4447
|
userId,
|
|
4223
4448
|
objectInfoMap,
|
|
4449
|
+
draftFunctions,
|
|
4450
|
+
connectionKeyBuilder,
|
|
4451
|
+
metadata,
|
|
4224
4452
|
});
|
|
4225
4453
|
if (astTransformResult.isSuccess === false) {
|
|
4226
4454
|
for (const error of astTransformResult.error) {
|
|
@@ -4258,8 +4486,6 @@ class StoreEvalPreconditioner {
|
|
|
4258
4486
|
}
|
|
4259
4487
|
}
|
|
4260
4488
|
|
|
4261
|
-
const { isArray: isArray$4 } = Array;
|
|
4262
|
-
|
|
4263
4489
|
/**
|
|
4264
4490
|
* Checks a GraphQL-shaped network response for errors, returning true if it does
|
|
4265
4491
|
* and false otherwise
|
|
@@ -4297,15 +4523,15 @@ async function evaluateSqlite(query, eventEmitter, store) {
|
|
|
4297
4523
|
return { data, seenRecords };
|
|
4298
4524
|
}
|
|
4299
4525
|
const wrapStartEndEvents = (storeEval) => {
|
|
4300
|
-
return async (config, nonEvaluatedSnapshotOrPromise, observers) => {
|
|
4526
|
+
return async (config, nonEvaluatedSnapshotOrPromise, observers, connectionKeyBuilder) => {
|
|
4301
4527
|
const eventEmitter = createCustomAdapterEventEmitter(GRAPHQL_EVAL_NAMESPACE$1, observers);
|
|
4302
4528
|
eventEmitter({ type: 'graphql-eval-start' });
|
|
4303
|
-
const snapshot = await storeEval(config, nonEvaluatedSnapshotOrPromise, observers);
|
|
4529
|
+
const snapshot = await storeEval(config, nonEvaluatedSnapshotOrPromise, observers, connectionKeyBuilder);
|
|
4304
4530
|
eventEmitter({ type: 'graphql-eval-end' });
|
|
4305
4531
|
return snapshot;
|
|
4306
4532
|
};
|
|
4307
4533
|
};
|
|
4308
|
-
function sqliteStoreEvalFactory(userId, sqliteStore, objectInfoService) {
|
|
4534
|
+
function sqliteStoreEvalFactory(userId, sqliteStore, objectInfoService, draftFunctions) {
|
|
4309
4535
|
// if the store does not support eval then use the noop version
|
|
4310
4536
|
if (sqliteStore.isEvalSupported() === false) {
|
|
4311
4537
|
return noopStoreEval;
|
|
@@ -4313,7 +4539,7 @@ function sqliteStoreEvalFactory(userId, sqliteStore, objectInfoService) {
|
|
|
4313
4539
|
const preconditioner = new StoreEvalPreconditioner();
|
|
4314
4540
|
return makeStoreEval(preconditioner, objectInfoService, userId, async () => {
|
|
4315
4541
|
return undefined;
|
|
4316
|
-
}, (query, _context, eventEmitter) => evaluateSqlite(query, eventEmitter, sqliteStore));
|
|
4542
|
+
}, (query, _context, eventEmitter) => evaluateSqlite(query, eventEmitter, sqliteStore), draftFunctions, sqliteStore);
|
|
4317
4543
|
}
|
|
4318
4544
|
async function noopStoreEval(_config, nonEvaluatedSnapshotOrPromise, _observers) {
|
|
4319
4545
|
return nonEvaluatedSnapshotOrPromise;
|
|
@@ -4332,8 +4558,8 @@ function isErrorSnapshotThatShouldGetReturnedToCaller$1(snapshot) {
|
|
|
4332
4558
|
function generateUniqueRecordId$1() {
|
|
4333
4559
|
return `${GRAPHQL_ROOT_KEY$1}${Date.now() + Math.random().toFixed(5).split('.')[1]}`;
|
|
4334
4560
|
}
|
|
4335
|
-
function makeStoreEval(preconditioner, objectInfoService, userId, contextProvider, queryEvaluator) {
|
|
4336
|
-
const storeEval = async (config, nonEvaluatedSnapshotOrPromise, observers) => {
|
|
4561
|
+
function makeStoreEval(preconditioner, objectInfoService, userId, contextProvider, queryEvaluator, draftFunctions, sqliteStore) {
|
|
4562
|
+
const storeEval = async (config, nonEvaluatedSnapshotOrPromise, observers, connectionKeyBuilder) => {
|
|
4337
4563
|
const eventEmitter = createCustomAdapterEventEmitter(GRAPHQL_EVAL_NAMESPACE$1, observers);
|
|
4338
4564
|
// the non-eval'ed input could either be a snapshot or promise of one so
|
|
4339
4565
|
// await it here to normalize the input to a snapshot
|
|
@@ -4362,7 +4588,7 @@ function makeStoreEval(preconditioner, objectInfoService, userId, contextProvide
|
|
|
4362
4588
|
context = await contextProvider();
|
|
4363
4589
|
// Ensures ObjectInfo metadata is available for the request. Sourcing records from L1/L2/Network
|
|
4364
4590
|
// is delegated to the getObjectInfo adapter via objectInfoService
|
|
4365
|
-
rootQuery = await preconditioner.createRootQuery(config, objectInfoService, userId);
|
|
4591
|
+
rootQuery = await preconditioner.createRootQuery(config, objectInfoService, userId, draftFunctions, connectionKeyBuilder, sqliteStore);
|
|
4366
4592
|
}
|
|
4367
4593
|
catch (error) {
|
|
4368
4594
|
eventEmitter({
|
|
@@ -4393,7 +4619,7 @@ function makeStoreEval(preconditioner, objectInfoService, userId, contextProvide
|
|
|
4393
4619
|
try {
|
|
4394
4620
|
const { data, seenRecords } = await queryEvaluator(rootQuery, context, eventEmitter);
|
|
4395
4621
|
const rebuildWithStoreEval = ((originalSnapshot) => {
|
|
4396
|
-
return storeEval(config, originalSnapshot, observers);
|
|
4622
|
+
return storeEval(config, originalSnapshot, observers, connectionKeyBuilder);
|
|
4397
4623
|
});
|
|
4398
4624
|
const recordId = generateUniqueRecordId$1();
|
|
4399
4625
|
// if the non-eval'ed snapshot was an error then we return a synthetic
|
|
@@ -4692,7 +4918,7 @@ function createDraftSynthesisErrorResponse(message = 'failed to synthesize draft
|
|
|
4692
4918
|
return new DraftErrorFetchResponse(HttpStatusCode.BadRequest, error);
|
|
4693
4919
|
}
|
|
4694
4920
|
|
|
4695
|
-
const { keys: keys$4, create: create$4, assign: assign$
|
|
4921
|
+
const { keys: keys$4, create: create$4, assign: assign$4, values: values$1 } = Object;
|
|
4696
4922
|
const { stringify: stringify$4, parse: parse$4 } = JSON;
|
|
4697
4923
|
const { isArray: isArray$3 } = Array;
|
|
4698
4924
|
|
|
@@ -4739,7 +4965,6 @@ function buildLuvioOverrideForDraftAdapters(luvio, handler, extractTargetIdFromC
|
|
|
4739
4965
|
}
|
|
4740
4966
|
|
|
4741
4967
|
const DraftIdMappingKeyPrefix240 = 'DraftIdMapping::';
|
|
4742
|
-
const DraftKeyMappingKeyPrefix = 'DraftKeyMapping::V2::';
|
|
4743
4968
|
const DRAFT_ID_MAPPINGS_SEGMENT = 'DRAFT_ID_MAPPINGS';
|
|
4744
4969
|
function isLegacyDraftIdMapping(key, data) {
|
|
4745
4970
|
return key.startsWith(DraftIdMappingKeyPrefix240);
|
|
@@ -4749,9 +4974,6 @@ function isLegacyDraftIdMapping(key, data) {
|
|
|
4749
4974
|
function getRecordKeyForId$1(id) {
|
|
4750
4975
|
return `UiApi::RecordRepresentation:${id}`;
|
|
4751
4976
|
}
|
|
4752
|
-
function generateDraftIdMappingKey(draftIdMapping) {
|
|
4753
|
-
return `${DraftKeyMappingKeyPrefix}${draftIdMapping.draftKey}::${draftIdMapping.canonicalKey}`;
|
|
4754
|
-
}
|
|
4755
4977
|
/**
|
|
4756
4978
|
*
|
|
4757
4979
|
* @param mappingIds (optional) requested mapping ids, if undefined all will be retrieved
|
|
@@ -4785,6 +5007,15 @@ async function getDraftIdMappings(durableStore, mappingIds) {
|
|
|
4785
5007
|
}
|
|
4786
5008
|
return mappings;
|
|
4787
5009
|
}
|
|
5010
|
+
async function clearDraftIdSegment(durableStore) {
|
|
5011
|
+
const entries = await durableStore.getAllEntries(DRAFT_ID_MAPPINGS_SEGMENT);
|
|
5012
|
+
if (entries) {
|
|
5013
|
+
const keys$1 = keys$4(entries);
|
|
5014
|
+
if (keys$1.length > 0) {
|
|
5015
|
+
await durableStore.evictEntries(keys$1, DRAFT_ID_MAPPINGS_SEGMENT);
|
|
5016
|
+
}
|
|
5017
|
+
}
|
|
5018
|
+
}
|
|
4788
5019
|
|
|
4789
5020
|
/**
|
|
4790
5021
|
* Generates a time-ordered, unique id to associate with a DraftAction. Ensures
|
|
@@ -4875,9 +5106,6 @@ function customActionHandler(executor, id, draftQueue) {
|
|
|
4875
5106
|
});
|
|
4876
5107
|
return queueOperations;
|
|
4877
5108
|
};
|
|
4878
|
-
const getRedirectMappings = (_action) => {
|
|
4879
|
-
return undefined;
|
|
4880
|
-
};
|
|
4881
5109
|
return {
|
|
4882
5110
|
handlerId: id,
|
|
4883
5111
|
enqueue: (data) => {
|
|
@@ -4889,7 +5117,6 @@ function customActionHandler(executor, id, draftQueue) {
|
|
|
4889
5117
|
handleReplaceAction: () => {
|
|
4890
5118
|
throw Error('replaceAction not supported for custom actions');
|
|
4891
5119
|
},
|
|
4892
|
-
getRedirectMappings,
|
|
4893
5120
|
handleActionRemoved: () => Promise.resolve(),
|
|
4894
5121
|
handleActionCompleted: () => Promise.resolve(),
|
|
4895
5122
|
handleActionEnqueued: () => Promise.resolve(),
|
|
@@ -5064,17 +5291,11 @@ class DurableDraftQueue {
|
|
|
5064
5291
|
const handler = this.getHandler(action.handler);
|
|
5065
5292
|
let queue = await this.getQueueActions();
|
|
5066
5293
|
const queueOperations = handler.getQueueOperationsForCompletingDrafts(queue, action);
|
|
5067
|
-
|
|
5068
|
-
|
|
5069
|
-
|
|
5070
|
-
: idAndKeyMappings.map((m) => {
|
|
5071
|
-
return { draftKey: m.draftKey, canonicalKey: m.canonicalKey };
|
|
5072
|
-
});
|
|
5073
|
-
await this.draftStore.completeAction(queueOperations, keyMappings);
|
|
5074
|
-
queue = await this.getQueueActions();
|
|
5294
|
+
// write the queue operations to the store prior to ingesting the result
|
|
5295
|
+
await this.draftStore.completeAction(queueOperations);
|
|
5296
|
+
await handler.handleActionCompleted(action, queueOperations, values$1(this.handlers));
|
|
5075
5297
|
this.retryIntervalMilliseconds = 0;
|
|
5076
5298
|
this.uploadingActionId = undefined;
|
|
5077
|
-
await handler.handleActionCompleted(action, queueOperations, queue, values$1(this.handlers));
|
|
5078
5299
|
await this.notifyChangedListeners({
|
|
5079
5300
|
type: DraftQueueEventType.ActionCompleted,
|
|
5080
5301
|
action,
|
|
@@ -5393,7 +5614,7 @@ class DurableDraftStore {
|
|
|
5393
5614
|
};
|
|
5394
5615
|
return this.enqueueAction(deleteAction);
|
|
5395
5616
|
}
|
|
5396
|
-
completeAction(queueOperations
|
|
5617
|
+
completeAction(queueOperations) {
|
|
5397
5618
|
const action = () => {
|
|
5398
5619
|
const durableStoreOperations = [];
|
|
5399
5620
|
const { draftStore } = this;
|
|
@@ -5426,18 +5647,6 @@ class DurableDraftStore {
|
|
|
5426
5647
|
});
|
|
5427
5648
|
}
|
|
5428
5649
|
}
|
|
5429
|
-
if (mappings !== undefined) {
|
|
5430
|
-
const entries = {};
|
|
5431
|
-
for (const mapping of mappings) {
|
|
5432
|
-
const mappingKey = generateDraftIdMappingKey(mapping);
|
|
5433
|
-
entries[mappingKey] = { data: mapping };
|
|
5434
|
-
}
|
|
5435
|
-
durableStoreOperations.push({
|
|
5436
|
-
entries,
|
|
5437
|
-
type: 'setEntries',
|
|
5438
|
-
segment: DRAFT_ID_MAPPINGS_SEGMENT,
|
|
5439
|
-
});
|
|
5440
|
-
}
|
|
5441
5650
|
return this.durableStore.batchOperations(durableStoreOperations);
|
|
5442
5651
|
};
|
|
5443
5652
|
return this.enqueueAction(action);
|
|
@@ -5716,7 +5925,7 @@ class AbstractResourceRequestActionHandler {
|
|
|
5716
5925
|
},
|
|
5717
5926
|
];
|
|
5718
5927
|
}
|
|
5719
|
-
async handleActionCompleted(action, queueOperations,
|
|
5928
|
+
async handleActionCompleted(action, queueOperations, allHandlers) {
|
|
5720
5929
|
const { data: request, tag } = action;
|
|
5721
5930
|
const { method } = request;
|
|
5722
5931
|
if (method === 'delete') {
|
|
@@ -5793,6 +6002,8 @@ class AbstractResourceRequestActionHandler {
|
|
|
5793
6002
|
};
|
|
5794
6003
|
// overlay metadata
|
|
5795
6004
|
merged.metadata = { ...targetMetadata, ...sourceMetadata };
|
|
6005
|
+
// put status back to pending to auto upload if queue is active and targed is at the head.
|
|
6006
|
+
merged.status = DraftActionStatus.Pending;
|
|
5796
6007
|
return merged;
|
|
5797
6008
|
}
|
|
5798
6009
|
shouldDeleteActionByTagOnRemoval(action) {
|
|
@@ -5828,11 +6039,18 @@ class AbstractResourceRequestActionHandler {
|
|
|
5828
6039
|
async ingestResponses(responses, action) {
|
|
5829
6040
|
const luvio = this.getLuvio();
|
|
5830
6041
|
await luvio.handleSuccessResponse(() => {
|
|
6042
|
+
if (action.status === DraftActionStatus.Completed) {
|
|
6043
|
+
const mappings = this.getRedirectMappings(action);
|
|
6044
|
+
if (mappings) {
|
|
6045
|
+
mappings.forEach((mapping) => {
|
|
6046
|
+
luvio.storeRedirect(mapping.draftKey, mapping.canonicalKey);
|
|
6047
|
+
});
|
|
6048
|
+
}
|
|
6049
|
+
}
|
|
5831
6050
|
for (const entry of responses) {
|
|
5832
6051
|
const { response, synchronousIngest } = entry;
|
|
5833
6052
|
synchronousIngest(response, action);
|
|
5834
6053
|
}
|
|
5835
|
-
// must call base broadcast
|
|
5836
6054
|
return luvio.storeBroadcast();
|
|
5837
6055
|
},
|
|
5838
6056
|
// getTypeCacheKeysRecord uses the response, not the full path factory
|
|
@@ -6160,6 +6378,8 @@ class DraftManager {
|
|
|
6160
6378
|
|
|
6161
6379
|
function makeEnvironmentDraftAware(luvio, env, durableStore, handlers, draftQueue) {
|
|
6162
6380
|
const draftMetadata = {};
|
|
6381
|
+
// in 246 luvio took charge of persisting redirect mappings, this needs to stick around
|
|
6382
|
+
// for a couple of releases to support older environments
|
|
6163
6383
|
// setup existing store redirects when bootstrapping the environment
|
|
6164
6384
|
(async () => {
|
|
6165
6385
|
const mappings = await getDraftIdMappings(durableStore);
|
|
@@ -6167,23 +6387,9 @@ function makeEnvironmentDraftAware(luvio, env, durableStore, handlers, draftQueu
|
|
|
6167
6387
|
const { draftKey, canonicalKey } = mapping;
|
|
6168
6388
|
env.storeRedirect(draftKey, canonicalKey);
|
|
6169
6389
|
});
|
|
6390
|
+
await env.storeBroadcast(env.rebuildSnapshot, env.snapshotAvailable);
|
|
6391
|
+
await clearDraftIdSegment(durableStore);
|
|
6170
6392
|
})();
|
|
6171
|
-
durableStore.registerOnChangedListener(async (changes) => {
|
|
6172
|
-
const draftIdMappingsIds = [];
|
|
6173
|
-
for (let i = 0, len = changes.length; i < len; i++) {
|
|
6174
|
-
const change = changes[i];
|
|
6175
|
-
if (change.segment === DRAFT_ID_MAPPINGS_SEGMENT) {
|
|
6176
|
-
draftIdMappingsIds.push(...change.ids);
|
|
6177
|
-
}
|
|
6178
|
-
}
|
|
6179
|
-
if (draftIdMappingsIds.length > 0) {
|
|
6180
|
-
const mappings = await getDraftIdMappings(durableStore, draftIdMappingsIds);
|
|
6181
|
-
mappings.forEach((mapping) => {
|
|
6182
|
-
const { draftKey, canonicalKey } = mapping;
|
|
6183
|
-
env.storeRedirect(draftKey, canonicalKey);
|
|
6184
|
-
});
|
|
6185
|
-
}
|
|
6186
|
-
});
|
|
6187
6393
|
const handleSuccessResponse = async function (ingestAndBroadcastFunc, getResponseCacheKeysFunc) {
|
|
6188
6394
|
const queue = await draftQueue.getQueueActions();
|
|
6189
6395
|
if (queue.length === 0) {
|
|
@@ -6251,16 +6457,23 @@ function makeEnvironmentDraftAware(luvio, env, durableStore, handlers, draftQueu
|
|
|
6251
6457
|
|
|
6252
6458
|
const API_NAMESPACE = 'UiApi';
|
|
6253
6459
|
const RECORD_REPRESENTATION_NAME = 'RecordRepresentation';
|
|
6460
|
+
const RECORD_VIEW_ENTITY_REPRESENTATION_NAME = 'RecordViewEntityRepresentation';
|
|
6254
6461
|
const RECORD_ID_PREFIX = `${API_NAMESPACE}::${RECORD_REPRESENTATION_NAME}:`;
|
|
6462
|
+
const RECORD_VIEW_ENTITY_ID_PREFIX = `${API_NAMESPACE}::${RECORD_VIEW_ENTITY_REPRESENTATION_NAME}:Name:`;
|
|
6255
6463
|
const RECORD_FIELDS_KEY_JUNCTION = '__fields__';
|
|
6256
6464
|
function isStoreKeyRecordId(key) {
|
|
6257
6465
|
return key.indexOf(RECORD_ID_PREFIX) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION) === -1;
|
|
6258
6466
|
}
|
|
6467
|
+
function isStoreKeyRecordViewEntity(key) {
|
|
6468
|
+
return (key.indexOf(RECORD_VIEW_ENTITY_ID_PREFIX) > -1 &&
|
|
6469
|
+
key.indexOf(RECORD_FIELDS_KEY_JUNCTION) === -1);
|
|
6470
|
+
}
|
|
6259
6471
|
function isStoreKeyRecordField(key) {
|
|
6260
6472
|
return key.indexOf(RECORD_ID_PREFIX) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION) > -1;
|
|
6261
6473
|
}
|
|
6262
6474
|
function extractRecordIdFromStoreKey(key) {
|
|
6263
|
-
if (key === undefined ||
|
|
6475
|
+
if (key === undefined ||
|
|
6476
|
+
(key.indexOf(RECORD_ID_PREFIX) === -1 && key.indexOf(RECORD_VIEW_ENTITY_ID_PREFIX) === -1)) {
|
|
6264
6477
|
return undefined;
|
|
6265
6478
|
}
|
|
6266
6479
|
const parts = key.split(':');
|
|
@@ -6275,7 +6488,8 @@ function isStoreRecordError(storeRecord) {
|
|
|
6275
6488
|
}
|
|
6276
6489
|
function isEntryDurableRecordRepresentation(entry, key) {
|
|
6277
6490
|
// Either a DurableRecordRepresentation or StoreRecordError can live at a record key
|
|
6278
|
-
return isStoreKeyRecordId(key)
|
|
6491
|
+
return ((isStoreKeyRecordId(key) || isStoreKeyRecordViewEntity(key)) &&
|
|
6492
|
+
entry.data.__type === undefined);
|
|
6279
6493
|
}
|
|
6280
6494
|
|
|
6281
6495
|
/**
|
|
@@ -6411,7 +6625,7 @@ function isArrayLike(x) {
|
|
|
6411
6625
|
(x.length === 0 || (x.length > 0 && Object.prototype.hasOwnProperty.call(x, x.length - 1))));
|
|
6412
6626
|
}
|
|
6413
6627
|
|
|
6414
|
-
const { create: create$3, keys: keys$3, values, entries: entries$2 } = Object;
|
|
6628
|
+
const { create: create$3, keys: keys$3, values, entries: entries$2, assign: assign$3 } = Object;
|
|
6415
6629
|
const { stringify: stringify$3, parse: parse$3 } = JSON;
|
|
6416
6630
|
const { isArray: isArray$2 } = Array;
|
|
6417
6631
|
|
|
@@ -6850,6 +7064,10 @@ function dateTimeRange(input, op, field, alias) {
|
|
|
6850
7064
|
};
|
|
6851
7065
|
}
|
|
6852
7066
|
|
|
7067
|
+
const JSON_EXTRACT_PATH_INGESTION_TIMESTAMP = '$.ingestionTimestamp';
|
|
7068
|
+
const JSON_EXTRACT_PATH_INGESTION_APINAME = '$.apiName';
|
|
7069
|
+
const JSON_EXTRACT_PATH_DRAFTS = '$.drafts';
|
|
7070
|
+
|
|
6853
7071
|
const MultiPickListValueSeparator = ';';
|
|
6854
7072
|
function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draftFunctions) {
|
|
6855
7073
|
if (!where)
|
|
@@ -6911,7 +7129,7 @@ function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draf
|
|
|
6911
7129
|
return [
|
|
6912
7130
|
{
|
|
6913
7131
|
alias: childAlias,
|
|
6914
|
-
leftPath:
|
|
7132
|
+
leftPath: JSON_EXTRACT_PATH_INGESTION_APINAME,
|
|
6915
7133
|
operator: '=',
|
|
6916
7134
|
value: entityName,
|
|
6917
7135
|
dataType: 'String',
|
|
@@ -7211,15 +7429,10 @@ function singlePredicateToSql(predicate, defaultAlias, isChildNotPredicate = fal
|
|
|
7211
7429
|
// If an explicit collating sequence is required on an IN operator it should be applied to the left operand,
|
|
7212
7430
|
// like this: "x COLLATE NOCASE IN (y,z, ...)".
|
|
7213
7431
|
const nullCheck = `json_extract("${alias}".data, '${leftPath}') ${operator === 'IN' ? 'IS' : 'IS NOT'} ?`;
|
|
7214
|
-
|
|
7215
|
-
if (valueBinding.length > 0) {
|
|
7216
|
-
sql = `json_extract("${alias}".data, '${leftPath}')${isCaseSensitive === true ? '' : ` COLLATE NOCASE`} ${operator} ${questionSql} ${includesNull ? `OR ${nullCheck}` : ''}`;
|
|
7217
|
-
}
|
|
7218
|
-
else {
|
|
7219
|
-
sql = `${includesNull ? nullCheck : ''}`;
|
|
7220
|
-
}
|
|
7432
|
+
sql = `json_extract("${alias}".data, '${leftPath}')${isCaseSensitive === true ? '' : ` COLLATE NOCASE`} ${operator} ${questionSql}`;
|
|
7221
7433
|
binding.push(...valueBinding);
|
|
7222
7434
|
if (includesNull) {
|
|
7435
|
+
sql = `(${sql} OR ${nullCheck})`;
|
|
7223
7436
|
binding.push(null);
|
|
7224
7437
|
}
|
|
7225
7438
|
}
|
|
@@ -7367,11 +7580,18 @@ function buildQuery(config) {
|
|
|
7367
7580
|
const joins = buildJoins(config);
|
|
7368
7581
|
const predicates = buildPredicates(config);
|
|
7369
7582
|
const orderBy = buildOrderBy(config);
|
|
7583
|
+
const staleRecordsSql = excludeStaleRecordsGate.isOpen({ fallback: false })
|
|
7584
|
+
? `AND (
|
|
7585
|
+
json_extract("${config.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ?
|
|
7586
|
+
OR json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL
|
|
7587
|
+
)`
|
|
7588
|
+
: '';
|
|
7370
7589
|
const sql = `
|
|
7371
7590
|
SELECT "${config.alias}".data
|
|
7372
7591
|
FROM lds_data "${config.alias}" ${joins.sql}
|
|
7373
7592
|
WHERE "${config.alias}".key like 'UiApi::RecordRepresentation:%'
|
|
7374
|
-
AND json_extract("${config.alias}".data, '
|
|
7593
|
+
AND json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_INGESTION_APINAME}') = ?
|
|
7594
|
+
${staleRecordsSql}
|
|
7375
7595
|
${predicates.sql}
|
|
7376
7596
|
${orderBy.sql}
|
|
7377
7597
|
LIMIT ?
|
|
@@ -7384,6 +7604,7 @@ function buildQuery(config) {
|
|
|
7384
7604
|
...joins.bindings,
|
|
7385
7605
|
// the api name for the main record type
|
|
7386
7606
|
config.alias,
|
|
7607
|
+
...(excludeStaleRecordsGate.isOpen({ fallback: false }) ? [config.ingestionTimestamp] : []),
|
|
7387
7608
|
// where clause and parent scope bindings
|
|
7388
7609
|
...predicates.bindings,
|
|
7389
7610
|
// limit binding
|
|
@@ -7409,19 +7630,33 @@ function buildJoins(config) {
|
|
|
7409
7630
|
if (allJoins.length === 0)
|
|
7410
7631
|
return { sql, bindings };
|
|
7411
7632
|
sql = allJoins.reduce((joinAccumulator, join) => {
|
|
7633
|
+
let timestampAdded = false;
|
|
7412
7634
|
const joinConditions = join.conditions.reduce((conditionAccumulator, condition) => {
|
|
7413
7635
|
let joined_sql;
|
|
7636
|
+
const joinMetadataTimestamp = excludeStaleRecordsGate.isOpen({ fallback: false })
|
|
7637
|
+
? ` AND (json_extract("${join.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ? OR json_extract("${join.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL)`
|
|
7638
|
+
: '';
|
|
7414
7639
|
// predicate on a value, use the newly joined table
|
|
7415
7640
|
if ('type' in condition) {
|
|
7416
7641
|
const { sql, binding } = predicateToSQL(condition, join.alias);
|
|
7417
|
-
joined_sql = ` AND ${sql}`;
|
|
7642
|
+
joined_sql = ` AND ${sql}${timestampAdded ? '' : joinMetadataTimestamp}`;
|
|
7418
7643
|
bindings.push(...binding);
|
|
7644
|
+
if (excludeStaleRecordsGate.isOpen({ fallback: false }) &&
|
|
7645
|
+
timestampAdded === false) {
|
|
7646
|
+
bindings.push(config.ingestionTimestamp);
|
|
7647
|
+
timestampAdded = true;
|
|
7648
|
+
}
|
|
7419
7649
|
}
|
|
7420
7650
|
else {
|
|
7421
7651
|
// predicate on a path
|
|
7422
7652
|
const left = ` AND json_extract("${join.to}".data, '${condition.leftPath}')`;
|
|
7423
7653
|
const right = `json_extract("${join.alias}".data, '${condition.rightPath}')`;
|
|
7424
|
-
joined_sql = `${left} = ${right}`;
|
|
7654
|
+
joined_sql = `${left} = ${right}${timestampAdded ? '' : joinMetadataTimestamp}`;
|
|
7655
|
+
if (excludeStaleRecordsGate.isOpen({ fallback: false }) &&
|
|
7656
|
+
timestampAdded === false) {
|
|
7657
|
+
bindings.push(config.ingestionTimestamp);
|
|
7658
|
+
timestampAdded = true;
|
|
7659
|
+
}
|
|
7425
7660
|
}
|
|
7426
7661
|
conditionAccumulator += joined_sql;
|
|
7427
7662
|
return conditionAccumulator;
|
|
@@ -8100,7 +8335,7 @@ function orderByToPredicate(orderBy, recordType, alias, objectInfoMap, joins) {
|
|
|
8100
8335
|
function pathForKey(key) {
|
|
8101
8336
|
switch (key) {
|
|
8102
8337
|
case 'ApiName':
|
|
8103
|
-
return
|
|
8338
|
+
return JSON_EXTRACT_PATH_INGESTION_APINAME;
|
|
8104
8339
|
case 'WeakEtag':
|
|
8105
8340
|
return '$.weakEtag';
|
|
8106
8341
|
case 'Id':
|
|
@@ -8122,7 +8357,7 @@ function scopeToJoins(scope = '', settings) {
|
|
|
8122
8357
|
{
|
|
8123
8358
|
type: PredicateType.single,
|
|
8124
8359
|
alias: 'ServiceAppointment_AssignedResource',
|
|
8125
|
-
leftPath:
|
|
8360
|
+
leftPath: JSON_EXTRACT_PATH_INGESTION_APINAME,
|
|
8126
8361
|
operator: '=',
|
|
8127
8362
|
value: 'AssignedResource',
|
|
8128
8363
|
dataType: 'String',
|
|
@@ -8143,7 +8378,7 @@ function scopeToJoins(scope = '', settings) {
|
|
|
8143
8378
|
{
|
|
8144
8379
|
type: PredicateType.single,
|
|
8145
8380
|
alias: 'ServiceAppointment_AssignedResource_ServiceResource',
|
|
8146
|
-
leftPath:
|
|
8381
|
+
leftPath: JSON_EXTRACT_PATH_INGESTION_APINAME,
|
|
8147
8382
|
operator: '=',
|
|
8148
8383
|
value: 'ServiceResource',
|
|
8149
8384
|
dataType: 'String',
|
|
@@ -8248,14 +8483,21 @@ function addResolversToSchema(schema, polyFields) {
|
|
|
8248
8483
|
// Fields of the `RecordQuery` type are the record queries for the entity types
|
|
8249
8484
|
// supported for the org
|
|
8250
8485
|
for (const recordQuery of fields) {
|
|
8251
|
-
recordQuery.resolve = function recordConnectionResolver(record, args) {
|
|
8486
|
+
recordQuery.resolve = async function recordConnectionResolver(record, args, { query }, info) {
|
|
8487
|
+
const { name: currentFieldName } = recordQuery;
|
|
8488
|
+
let ingestionTimestamp = 0;
|
|
8489
|
+
if (excludeStaleRecordsGate.isOpen({ fallback: false })) {
|
|
8490
|
+
// at our record query we fetch each ingestion time stamp and pass it down to each lower resolver to query against
|
|
8491
|
+
ingestionTimestamp = await fetchIngestionTimeStampFromDatabase(currentFieldName, info, args, query);
|
|
8492
|
+
}
|
|
8252
8493
|
// In the SF schema, the relevant arguments are passed into RecordQuery fields, but actually used
|
|
8253
8494
|
// down in the edge resolvers. For this resolver, we can just return what was passed in
|
|
8254
8495
|
// to make it available to the next execution step
|
|
8255
8496
|
return {
|
|
8256
8497
|
parentArgs: args,
|
|
8257
8498
|
parentRecord: record,
|
|
8258
|
-
currentFieldName
|
|
8499
|
+
currentFieldName,
|
|
8500
|
+
ingestionTimestamp,
|
|
8259
8501
|
};
|
|
8260
8502
|
};
|
|
8261
8503
|
}
|
|
@@ -8301,10 +8543,11 @@ function addResolversToSchema(schema, polyFields) {
|
|
|
8301
8543
|
// }
|
|
8302
8544
|
for (const field of fields) {
|
|
8303
8545
|
if (field.name === 'node') {
|
|
8304
|
-
field.resolve = function nodeResolver(
|
|
8546
|
+
field.resolve = function nodeResolver(obj, _args, { seenRecordIds }) {
|
|
8547
|
+
const { record, ingestionTimestamp } = obj;
|
|
8305
8548
|
const recordRepresentation = parse$3(record);
|
|
8306
8549
|
seenRecordIds.add(recordRepresentation.id);
|
|
8307
|
-
return recordRepresentation;
|
|
8550
|
+
return { recordRepresentation, ingestionTimestamp };
|
|
8308
8551
|
};
|
|
8309
8552
|
}
|
|
8310
8553
|
}
|
|
@@ -8327,40 +8570,40 @@ function addResolversToSchema(schema, polyFields) {
|
|
|
8327
8570
|
for (const field of fields) {
|
|
8328
8571
|
switch (field.name) {
|
|
8329
8572
|
case 'Id':
|
|
8330
|
-
field.resolve = (record) => record.id;
|
|
8573
|
+
field.resolve = ({ recordRepresentation: record }) => record.id;
|
|
8331
8574
|
break;
|
|
8332
8575
|
case 'ApiName':
|
|
8333
|
-
field.resolve = (record) => record.apiName;
|
|
8576
|
+
field.resolve = ({ recordRepresentation: record }) => record.apiName;
|
|
8334
8577
|
break;
|
|
8335
8578
|
case 'WeakEtag':
|
|
8336
|
-
field.resolve = (record) => record.weakEtag;
|
|
8579
|
+
field.resolve = ({ recordRepresentation: record }) => record.weakEtag;
|
|
8337
8580
|
break;
|
|
8338
8581
|
case '_drafts':
|
|
8339
|
-
field.resolve = (record) => {
|
|
8582
|
+
field.resolve = ({ recordRepresentation: record, }) => {
|
|
8340
8583
|
return record.drafts ? record.drafts : null;
|
|
8341
8584
|
};
|
|
8342
8585
|
break;
|
|
8343
8586
|
case 'LastModifiedById':
|
|
8344
|
-
field.resolve = (record) => {
|
|
8587
|
+
field.resolve = ({ recordRepresentation: record }) => {
|
|
8345
8588
|
return record.lastModifiedById
|
|
8346
8589
|
? { value: record.lastModifiedById }
|
|
8347
8590
|
: null;
|
|
8348
8591
|
};
|
|
8349
8592
|
break;
|
|
8350
8593
|
case 'LastModifiedDate':
|
|
8351
|
-
field.resolve = (record) => {
|
|
8594
|
+
field.resolve = ({ recordRepresentation: record }) => {
|
|
8352
8595
|
return record.lastModifiedDate
|
|
8353
8596
|
? { value: record.lastModifiedDate }
|
|
8354
8597
|
: null;
|
|
8355
8598
|
};
|
|
8356
8599
|
break;
|
|
8357
8600
|
case 'SystemModstamp':
|
|
8358
|
-
field.resolve = (record) => {
|
|
8601
|
+
field.resolve = ({ recordRepresentation: record }) => {
|
|
8359
8602
|
return record.systemModstamp ? { value: record.systemModstamp } : null;
|
|
8360
8603
|
};
|
|
8361
8604
|
break;
|
|
8362
8605
|
case 'RecordTypeId':
|
|
8363
|
-
field.resolve = (record) => {
|
|
8606
|
+
field.resolve = ({ recordRepresentation: record }) => {
|
|
8364
8607
|
return record.recordTypeId ? { value: record.recordTypeId } : null;
|
|
8365
8608
|
};
|
|
8366
8609
|
break;
|
|
@@ -8372,7 +8615,17 @@ function addResolversToSchema(schema, polyFields) {
|
|
|
8372
8615
|
.getInterfaces()
|
|
8373
8616
|
.find((iface) => iface.name === 'Record')) ||
|
|
8374
8617
|
(recordFieldType && recordFieldType.name === 'Record')) {
|
|
8375
|
-
field.resolve = function relationResolver(
|
|
8618
|
+
field.resolve = async function relationResolver(obj, _args, { Record, seenRecordIds }) {
|
|
8619
|
+
const fetchRecordOrNull = async (key) => {
|
|
8620
|
+
const recordRepresentation = await Record.load(key);
|
|
8621
|
+
return recordRepresentation !== null
|
|
8622
|
+
? {
|
|
8623
|
+
recordRepresentation,
|
|
8624
|
+
ingestionTimestamp,
|
|
8625
|
+
}
|
|
8626
|
+
: null;
|
|
8627
|
+
};
|
|
8628
|
+
const { recordRepresentation: record, ingestionTimestamp } = obj;
|
|
8376
8629
|
const fieldName = field.name.endsWith('__r')
|
|
8377
8630
|
? field.name.replace('__r', '__c')
|
|
8378
8631
|
: field.name;
|
|
@@ -8382,26 +8635,28 @@ function addResolversToSchema(schema, polyFields) {
|
|
|
8382
8635
|
if (!id)
|
|
8383
8636
|
return null;
|
|
8384
8637
|
if (id['__ref'] !== undefined) {
|
|
8385
|
-
return
|
|
8638
|
+
return fetchRecordOrNull(record.fields[`${field.name}Id`].value);
|
|
8386
8639
|
}
|
|
8387
8640
|
seenRecordIds.add(id);
|
|
8388
|
-
return
|
|
8641
|
+
return fetchRecordOrNull(id);
|
|
8389
8642
|
};
|
|
8390
8643
|
}
|
|
8391
8644
|
else if (isObjectType(recordFieldType) &&
|
|
8392
8645
|
field.type.name.endsWith('Connection')) {
|
|
8393
8646
|
// spanning field to a connection
|
|
8394
|
-
field.resolve = (
|
|
8395
|
-
seenRecordIds.add(
|
|
8647
|
+
field.resolve = async ({ recordRepresentation, ingestionTimestamp }, args, { seenRecordIds }) => {
|
|
8648
|
+
seenRecordIds.add(recordRepresentation.id);
|
|
8649
|
+
const { name: currentFieldName } = field;
|
|
8396
8650
|
return {
|
|
8397
8651
|
parentArgs: args,
|
|
8398
|
-
parentRecord:
|
|
8399
|
-
currentFieldName
|
|
8652
|
+
parentRecord: recordRepresentation,
|
|
8653
|
+
currentFieldName,
|
|
8654
|
+
ingestionTimestamp,
|
|
8400
8655
|
};
|
|
8401
8656
|
};
|
|
8402
8657
|
}
|
|
8403
8658
|
else {
|
|
8404
|
-
field.resolve = function recordFieldResolver(record) {
|
|
8659
|
+
field.resolve = function recordFieldResolver({ recordRepresentation: record, }) {
|
|
8405
8660
|
return record.fields[field.name] || null;
|
|
8406
8661
|
};
|
|
8407
8662
|
}
|
|
@@ -8413,7 +8668,7 @@ function addResolversToSchema(schema, polyFields) {
|
|
|
8413
8668
|
if (recordInterface !== undefined && baseRecord !== undefined) {
|
|
8414
8669
|
// Applys 'resolveType' of GraphQLInterfaceType to 'Record' interface. Since all the heterogenous types are named as 'apiName', the type with same name as the loaded record 'apiName' property is the type wanted.
|
|
8415
8670
|
// GraphQL executor would match InLineFragment' condition with type and keeps the deeper level field resolving going.
|
|
8416
|
-
recordInterface.resolveType = function (value) {
|
|
8671
|
+
recordInterface.resolveType = function ({ recordRepresentation: value, }) {
|
|
8417
8672
|
const targetType = polyTypes.find((type) => type.name === value.apiName);
|
|
8418
8673
|
return targetType === undefined ? baseRecord : targetType;
|
|
8419
8674
|
};
|
|
@@ -8421,7 +8676,7 @@ function addResolversToSchema(schema, polyFields) {
|
|
|
8421
8676
|
return schema;
|
|
8422
8677
|
}
|
|
8423
8678
|
async function connectionEdgeResolver(obj, _args, context) {
|
|
8424
|
-
const { parentArgs = {}, parentRecord, currentFieldName } = obj;
|
|
8679
|
+
const { parentArgs = {}, parentRecord, currentFieldName, ingestionTimestamp } = obj;
|
|
8425
8680
|
const { query, objectInfos, draftFunctions } = context;
|
|
8426
8681
|
let joins = [];
|
|
8427
8682
|
let alias = currentFieldName;
|
|
@@ -8452,10 +8707,19 @@ async function connectionEdgeResolver(obj, _args, context) {
|
|
|
8452
8707
|
predicates,
|
|
8453
8708
|
orderBy: orderByToPredicate(parentArgs.orderBy, alias, alias, context.objectInfos),
|
|
8454
8709
|
limit: parentArgs.first,
|
|
8710
|
+
ingestionTimestamp,
|
|
8455
8711
|
};
|
|
8456
8712
|
const { sql, bindings } = buildQuery(queryConfig);
|
|
8457
8713
|
const results = await query(sql, bindings);
|
|
8458
|
-
|
|
8714
|
+
//map each sql result with the ingestion timestamp to pass it down a level
|
|
8715
|
+
return results.rows
|
|
8716
|
+
.map((row) => row[0])
|
|
8717
|
+
.map((record) => {
|
|
8718
|
+
return {
|
|
8719
|
+
record,
|
|
8720
|
+
ingestionTimestamp,
|
|
8721
|
+
};
|
|
8722
|
+
});
|
|
8459
8723
|
}
|
|
8460
8724
|
/**
|
|
8461
8725
|
* Converts a childRelationship into a predicate
|
|
@@ -8481,6 +8745,53 @@ function isRecordType(type) {
|
|
|
8481
8745
|
const interfaces = type.getInterfaces();
|
|
8482
8746
|
return Boolean(interfaces.find((iface) => iface.name === 'Record'));
|
|
8483
8747
|
}
|
|
8748
|
+
/**
|
|
8749
|
+
* Builds the top level record query key based on AST data
|
|
8750
|
+
* @param operation
|
|
8751
|
+
* @param variables
|
|
8752
|
+
* @param argumentNodes
|
|
8753
|
+
* @param currentFieldName
|
|
8754
|
+
* @returns
|
|
8755
|
+
*/
|
|
8756
|
+
function buildKeyStringForRecordQuery(variables, argumentNodes, currentFieldName) {
|
|
8757
|
+
const filteredArgumentNodes = assign$3([], argumentNodes).filter((node) => node.name.value !== 'first' && node.name.value !== 'after');
|
|
8758
|
+
const argumentString = filteredArgumentNodes.length > 0
|
|
8759
|
+
? `__${serializeFieldArguments(filteredArgumentNodes, variables)}`
|
|
8760
|
+
: '';
|
|
8761
|
+
return `UiApi::GraphQLRepresentation__uiapi__query__${currentFieldName}${argumentString}`;
|
|
8762
|
+
}
|
|
8763
|
+
/**
|
|
8764
|
+
* fetches a query level ingestion time stamp from the L2 cache
|
|
8765
|
+
* if no query has been seen then the timestamp is 0
|
|
8766
|
+
* @param apiName
|
|
8767
|
+
* @param info
|
|
8768
|
+
* @param args
|
|
8769
|
+
* @param query
|
|
8770
|
+
* @returns
|
|
8771
|
+
*/
|
|
8772
|
+
async function fetchIngestionTimeStampFromDatabase(apiName, info, args, query) {
|
|
8773
|
+
const { variableValues } = info;
|
|
8774
|
+
// if we cannot find the query key in the database then default to 0 as we assume we have not seen the query
|
|
8775
|
+
// and all the data is not stale
|
|
8776
|
+
let ingestionTimestamp = 0;
|
|
8777
|
+
if (info.fieldNodes.length > 0 && info.fieldNodes[0].arguments !== undefined) {
|
|
8778
|
+
const key = buildKeyStringForRecordQuery(
|
|
8779
|
+
// join varables passed from query to the argument variables given from the AST
|
|
8780
|
+
{ ...variableValues, ...args }, info.fieldNodes[0].arguments, apiName);
|
|
8781
|
+
const sql = `
|
|
8782
|
+
SELECT json_extract(metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}')
|
|
8783
|
+
FROM lds_data
|
|
8784
|
+
WHERE key IS ?
|
|
8785
|
+
`;
|
|
8786
|
+
const results = await query(sql, [key]);
|
|
8787
|
+
const [timestamp] = results.rows.map((row) => row[0]);
|
|
8788
|
+
if (timestamp !== null && typeof timestamp === 'number') {
|
|
8789
|
+
//go back 10 ms to adjust for margin of error when top level query is stored and when raml objects are stored
|
|
8790
|
+
ingestionTimestamp = timestamp - 10;
|
|
8791
|
+
}
|
|
8792
|
+
}
|
|
8793
|
+
return ingestionTimestamp;
|
|
8794
|
+
}
|
|
8484
8795
|
|
|
8485
8796
|
const baseTypeDefinitions = /* GraphQL */ `
|
|
8486
8797
|
schema {
|
|
@@ -11782,6 +12093,15 @@ function buildDurableRecordRepresentation(normalizedRecord, records, pendingEntr
|
|
|
11782
12093
|
links,
|
|
11783
12094
|
};
|
|
11784
12095
|
}
|
|
12096
|
+
function getDenormalizedKey(originalKey, recordId, luvio) {
|
|
12097
|
+
// this will likely need to be handled when moving to structured keys
|
|
12098
|
+
// note record view entities dont have an associated keybuilder. They get ingested as records to a different key format
|
|
12099
|
+
// see the override for how they are handled packages/lds-adapters-uiapi/src/raml-artifacts/types/RecordRepresentation/keyBuilderFromType.ts
|
|
12100
|
+
if (originalKey.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX)) {
|
|
12101
|
+
return RECORD_VIEW_ENTITY_ID_PREFIX + recordId;
|
|
12102
|
+
}
|
|
12103
|
+
return keyBuilderRecord(luvio, { recordId });
|
|
12104
|
+
}
|
|
11785
12105
|
function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata) {
|
|
11786
12106
|
const getEntries = function (entries, segment) {
|
|
11787
12107
|
// this HOF only inspects records in the default segment
|
|
@@ -11796,14 +12116,24 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
11796
12116
|
const filteredEntryIds = [];
|
|
11797
12117
|
// map of records to avoid requesting duplicate record keys when requesting both records and fields
|
|
11798
12118
|
const recordEntries = {};
|
|
12119
|
+
const recordViewEntries = {};
|
|
11799
12120
|
for (let i = 0, len = entriesLength; i < len; i++) {
|
|
11800
12121
|
const id = entries[i];
|
|
11801
12122
|
const recordId = extractRecordIdFromStoreKey(id);
|
|
11802
12123
|
if (recordId !== undefined) {
|
|
11803
|
-
if (
|
|
11804
|
-
|
|
11805
|
-
|
|
11806
|
-
|
|
12124
|
+
if (id.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX)) {
|
|
12125
|
+
if (recordViewEntries[recordId] === undefined) {
|
|
12126
|
+
const key = getDenormalizedKey(id, recordId, luvio);
|
|
12127
|
+
recordViewEntries[recordId] = true;
|
|
12128
|
+
filteredEntryIds.push(key);
|
|
12129
|
+
}
|
|
12130
|
+
}
|
|
12131
|
+
else {
|
|
12132
|
+
if (recordEntries[recordId] === undefined) {
|
|
12133
|
+
const key = getDenormalizedKey(id, recordId, luvio);
|
|
12134
|
+
recordEntries[recordId] = true;
|
|
12135
|
+
filteredEntryIds.push(key);
|
|
12136
|
+
}
|
|
11807
12137
|
}
|
|
11808
12138
|
}
|
|
11809
12139
|
else {
|
|
@@ -11837,6 +12167,7 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
11837
12167
|
const putEntries = create$2(null);
|
|
11838
12168
|
const keys$1 = keys$2(entries);
|
|
11839
12169
|
const putRecords = {};
|
|
12170
|
+
const putRecordViews = {};
|
|
11840
12171
|
const storeRecords = getStoreRecords !== undefined ? getStoreRecords() : {};
|
|
11841
12172
|
const storeMetadata = getStoreMetadata !== undefined ? getStoreMetadata() : {};
|
|
11842
12173
|
for (let i = 0, len = keys$1.length; i < len; i++) {
|
|
@@ -11845,10 +12176,18 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
11845
12176
|
const recordId = extractRecordIdFromStoreKey(key);
|
|
11846
12177
|
// do not put normalized field values
|
|
11847
12178
|
if (recordId !== undefined) {
|
|
11848
|
-
const
|
|
11849
|
-
if (
|
|
11850
|
-
|
|
12179
|
+
const isRecordView = key.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX);
|
|
12180
|
+
if (isRecordView) {
|
|
12181
|
+
if (putRecordViews[recordId] === true) {
|
|
12182
|
+
continue;
|
|
12183
|
+
}
|
|
11851
12184
|
}
|
|
12185
|
+
else {
|
|
12186
|
+
if (putRecords[recordId] === true) {
|
|
12187
|
+
continue;
|
|
12188
|
+
}
|
|
12189
|
+
}
|
|
12190
|
+
const recordKey = getDenormalizedKey(key, recordId, luvio);
|
|
11852
12191
|
const recordEntries = entries;
|
|
11853
12192
|
const entry = recordEntries[recordKey];
|
|
11854
12193
|
let record = entry && entry.data;
|
|
@@ -11860,7 +12199,12 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
11860
12199
|
continue;
|
|
11861
12200
|
}
|
|
11862
12201
|
}
|
|
11863
|
-
|
|
12202
|
+
if (isRecordView) {
|
|
12203
|
+
putRecordViews[recordId] = true;
|
|
12204
|
+
}
|
|
12205
|
+
else {
|
|
12206
|
+
putRecords[recordId] = true;
|
|
12207
|
+
}
|
|
11864
12208
|
if (isStoreRecordError(record)) {
|
|
11865
12209
|
putEntries[recordKey] = value;
|
|
11866
12210
|
continue;
|
|
@@ -11938,6 +12282,19 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
11938
12282
|
});
|
|
11939
12283
|
}
|
|
11940
12284
|
|
|
12285
|
+
/**
|
|
12286
|
+
* This function takes an unknown error and normalizes it to an Error object
|
|
12287
|
+
*/
|
|
12288
|
+
function normalizeError$1(error) {
|
|
12289
|
+
if (typeof error === 'object' && error instanceof Error) {
|
|
12290
|
+
return error;
|
|
12291
|
+
}
|
|
12292
|
+
else if (typeof error === 'string') {
|
|
12293
|
+
return new Error(error);
|
|
12294
|
+
}
|
|
12295
|
+
return new Error(stringify$2(error));
|
|
12296
|
+
}
|
|
12297
|
+
|
|
11941
12298
|
const PERFORM_QUICK_ACTION_ENDPOINT_REGEX = /^\/ui-api\/actions\/perform-quick-action\/.*$/;
|
|
11942
12299
|
/**
|
|
11943
12300
|
* Checks if a provided resource request is a POST operation on the record
|
|
@@ -11956,7 +12313,7 @@ function performQuickActionDraftEnvironment(luvio, env, handler) {
|
|
|
11956
12313
|
return env.dispatchResourceRequest(request, context, eventObservers);
|
|
11957
12314
|
}
|
|
11958
12315
|
const { data } = await handler.enqueue(request).catch((err) => {
|
|
11959
|
-
throw createDraftSynthesisErrorResponse(err.message);
|
|
12316
|
+
throw createDraftSynthesisErrorResponse(normalizeError$1(err).message);
|
|
11960
12317
|
});
|
|
11961
12318
|
if (data === undefined) {
|
|
11962
12319
|
return Promise.reject(createDraftSynthesisErrorResponse());
|
|
@@ -12383,6 +12740,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
|
|
|
12383
12740
|
query: injectedAST,
|
|
12384
12741
|
},
|
|
12385
12742
|
luvio,
|
|
12743
|
+
gqlEval: true,
|
|
12386
12744
|
}, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy));
|
|
12387
12745
|
if (isErrorSnapshotThatShouldGetReturnedToCaller(nonEvaluatedSnapshot)) {
|
|
12388
12746
|
return nonEvaluatedSnapshot;
|
|
@@ -13511,6 +13869,10 @@ class ScopedFieldsCollection {
|
|
|
13511
13869
|
}
|
|
13512
13870
|
|
|
13513
13871
|
const MAX_STRING_LENGTH_PER_CHUNK = 10000;
|
|
13872
|
+
//Salesforce/Akamai cdn uri max size is 8898 bytes, short than normal. Per
|
|
13873
|
+
//https://help.salesforce.com/s/articleView?id=sf.community_builder_cdn_considerations.htm&type=5
|
|
13874
|
+
//Due to we don't know the domain ResourceRequest, here we give 8000
|
|
13875
|
+
const MAX_URL_LENGTH = 8000;
|
|
13514
13876
|
const PARSE_ERROR = 'PARSE_AGGREGATE_UI_RESPONSE_ERROR';
|
|
13515
13877
|
function isErrorResponse(response) {
|
|
13516
13878
|
return response.httpStatusCode >= 400;
|
|
@@ -13589,8 +13951,8 @@ function buildAggregateUiUrl(params, resourceRequest) {
|
|
|
13589
13951
|
}
|
|
13590
13952
|
return `${resourceRequest.baseUri}${resourceRequest.basePath}?${join$1.call(queryString, '&')}`;
|
|
13591
13953
|
}
|
|
13592
|
-
function shouldUseAggregateUiForFields(fieldsArray, optionalFieldsArray) {
|
|
13593
|
-
return fieldsArray.length + optionalFieldsArray.length >=
|
|
13954
|
+
function shouldUseAggregateUiForFields(fieldsArray, optionalFieldsArray, maxLengthPerChunk) {
|
|
13955
|
+
return fieldsArray.length + optionalFieldsArray.length >= maxLengthPerChunk;
|
|
13594
13956
|
}
|
|
13595
13957
|
function isSpanningRecord(fieldValue) {
|
|
13596
13958
|
return fieldValue !== null && typeof fieldValue === 'object';
|
|
@@ -13653,14 +14015,15 @@ function createAggregateBatchRequestInfo(resourceRequest, endpoint) {
|
|
|
13653
14015
|
if (fieldsArray.length === 0 && optionalFieldsArray.length === 0) {
|
|
13654
14016
|
return undefined;
|
|
13655
14017
|
}
|
|
14018
|
+
const allowedMaxStringLengthPerChunk = getMaxLengthPerChunkAllowed(resourceRequest);
|
|
13656
14019
|
const fieldsString = fieldsArray.join(',');
|
|
13657
14020
|
const optionalFieldsString = optionalFieldsArray.join(',');
|
|
13658
|
-
const shouldUseAggregate = shouldUseAggregateUiForFields(fieldsString, optionalFieldsString);
|
|
14021
|
+
const shouldUseAggregate = shouldUseAggregateUiForFields(fieldsString, optionalFieldsString, allowedMaxStringLengthPerChunk);
|
|
13659
14022
|
if (!shouldUseAggregate) {
|
|
13660
14023
|
return undefined;
|
|
13661
14024
|
}
|
|
13662
|
-
const fieldCollection = ScopedFieldsCollection.fromQueryParameterValue(fieldsString).split(
|
|
13663
|
-
const optionalFieldCollection = ScopedFieldsCollection.fromQueryParameterValue(optionalFieldsString).split(
|
|
14025
|
+
const fieldCollection = ScopedFieldsCollection.fromQueryParameterValue(fieldsString).split(allowedMaxStringLengthPerChunk);
|
|
14026
|
+
const optionalFieldCollection = ScopedFieldsCollection.fromQueryParameterValue(optionalFieldsString).split(allowedMaxStringLengthPerChunk);
|
|
13664
14027
|
return {
|
|
13665
14028
|
fieldCollection,
|
|
13666
14029
|
optionalFieldCollection,
|
|
@@ -13733,6 +14096,25 @@ function isGetRequestForEndpoint(endpoint, request) {
|
|
|
13733
14096
|
function arrayOrEmpty(array) {
|
|
13734
14097
|
return array !== undefined && isArray(array) ? array : [];
|
|
13735
14098
|
}
|
|
14099
|
+
/**
|
|
14100
|
+
* Calculate the max lengh per chunk.
|
|
14101
|
+
* If useShortUrlGate is open, allow max chunk size is MAX_URL_LENGTH - the url without fields and optional fields in url.
|
|
14102
|
+
* Otherwise MAX_STRING_LENGTH_PER_CHUNK
|
|
14103
|
+
* @param resourceRequest
|
|
14104
|
+
* @returns
|
|
14105
|
+
*/
|
|
14106
|
+
function getMaxLengthPerChunkAllowed(request) {
|
|
14107
|
+
if (!ldsUseShortUrlGate.isOpen({ fallback: false })) {
|
|
14108
|
+
return MAX_STRING_LENGTH_PER_CHUNK;
|
|
14109
|
+
}
|
|
14110
|
+
// Too much work to get exact length of the final url, so use stringified json to get the rough length.
|
|
14111
|
+
const roughUrlLengthWithoutFieldsAndOptionFields = request.basePath.length +
|
|
14112
|
+
request.baseUri.length +
|
|
14113
|
+
(request.urlParams ? stringify$1(request.urlParams).length : 0) +
|
|
14114
|
+
stringify$1({ ...request.queryParams, fields: {}, optionalFields: {} }).length;
|
|
14115
|
+
// MAX_URL_LENGTH - full lenght without fields, optionalFields
|
|
14116
|
+
return MAX_URL_LENGTH - roughUrlLengthWithoutFieldsAndOptionFields;
|
|
14117
|
+
}
|
|
13736
14118
|
|
|
13737
14119
|
const RECORD_ENDPOINT_REGEX = /^\/ui-api\/records\/?(([a-zA-Z0-9]+))?$/;
|
|
13738
14120
|
const referenceId$3 = 'LDS_Records_AggregateUi';
|
|
@@ -13953,15 +14335,15 @@ class NimbusDraftQueue {
|
|
|
13953
14335
|
enqueue(handlerId, data) {
|
|
13954
14336
|
const callProxyMethod = __nimbus.plugins.LdsDraftQueue.callProxyMethod;
|
|
13955
14337
|
if (callProxyMethod === undefined) {
|
|
13956
|
-
return Promise.reject('callProxyMethod not defined on the nimbus plugin');
|
|
14338
|
+
return Promise.reject(new Error('callProxyMethod not defined on the nimbus plugin'));
|
|
13957
14339
|
}
|
|
13958
14340
|
const serializedAction = stringify([handlerId, data]);
|
|
13959
14341
|
return new Promise((resolve, reject) => {
|
|
13960
14342
|
callProxyMethod('enqueue', serializedAction, (serializedActionResponse) => {
|
|
13961
14343
|
const response = parse(serializedActionResponse);
|
|
13962
14344
|
resolve(response);
|
|
13963
|
-
}, (
|
|
13964
|
-
reject(
|
|
14345
|
+
}, (errorMessage) => {
|
|
14346
|
+
reject(new Error(errorMessage));
|
|
13965
14347
|
});
|
|
13966
14348
|
});
|
|
13967
14349
|
}
|
|
@@ -13971,18 +14353,18 @@ class NimbusDraftQueue {
|
|
|
13971
14353
|
return Promise.resolve;
|
|
13972
14354
|
}
|
|
13973
14355
|
processNextAction() {
|
|
13974
|
-
return Promise.reject('Cannot call processNextAction from the NimbusDraftQueue');
|
|
14356
|
+
return Promise.reject(new Error('Cannot call processNextAction from the NimbusDraftQueue'));
|
|
13975
14357
|
}
|
|
13976
14358
|
getQueueActions() {
|
|
13977
14359
|
const callProxyMethod = __nimbus.plugins.LdsDraftQueue.callProxyMethod;
|
|
13978
14360
|
if (callProxyMethod === undefined) {
|
|
13979
|
-
return Promise.reject('callProxyMethod not defined on the nimbus plugin');
|
|
14361
|
+
return Promise.reject(new Error('callProxyMethod not defined on the nimbus plugin'));
|
|
13980
14362
|
}
|
|
13981
14363
|
return new Promise((resolve, reject) => {
|
|
13982
14364
|
callProxyMethod('getQueueActions', stringify([]), (serializedQueue) => {
|
|
13983
14365
|
resolve(parse(serializedQueue));
|
|
13984
|
-
}, (
|
|
13985
|
-
reject(
|
|
14366
|
+
}, (errorMessage) => {
|
|
14367
|
+
reject(new Error(errorMessage));
|
|
13986
14368
|
});
|
|
13987
14369
|
});
|
|
13988
14370
|
}
|
|
@@ -14010,32 +14392,32 @@ class NimbusDraftQueue {
|
|
|
14010
14392
|
throw new Error('Cannot call getQueueState from the NimbusDraftQueue');
|
|
14011
14393
|
}
|
|
14012
14394
|
removeDraftAction(_actionId) {
|
|
14013
|
-
return Promise.reject('Cannot call removeDraftAction from the NimbusDraftQueue');
|
|
14395
|
+
return Promise.reject(new Error('Cannot call removeDraftAction from the NimbusDraftQueue'));
|
|
14014
14396
|
}
|
|
14015
14397
|
startQueue() {
|
|
14016
|
-
return Promise.reject('Cannot call startQueue from the NimbusDraftQueue');
|
|
14398
|
+
return Promise.reject(new Error('Cannot call startQueue from the NimbusDraftQueue'));
|
|
14017
14399
|
}
|
|
14018
14400
|
stopQueue() {
|
|
14019
|
-
return Promise.reject('Cannot call stopQueue from the NimbusDraftQueue');
|
|
14401
|
+
return Promise.reject(new Error('Cannot call stopQueue from the NimbusDraftQueue'));
|
|
14020
14402
|
}
|
|
14021
14403
|
replaceAction(_actionId, _withActionId) {
|
|
14022
|
-
return Promise.reject('Cannot call replaceAction from the NimbusDraftQueue');
|
|
14404
|
+
return Promise.reject(new Error('Cannot call replaceAction from the NimbusDraftQueue'));
|
|
14023
14405
|
}
|
|
14024
14406
|
mergeActions(_targetActionId, _sourceActionId) {
|
|
14025
|
-
return Promise.reject('Cannot call mergeActions from the NimbusDraftQueue');
|
|
14407
|
+
return Promise.reject(new Error('Cannot call mergeActions from the NimbusDraftQueue'));
|
|
14026
14408
|
}
|
|
14027
14409
|
setMetadata(_actionId, _metadata) {
|
|
14028
|
-
return Promise.reject('Cannot call setMetadata from the NimbusDraftQueue');
|
|
14410
|
+
return Promise.reject(new Error('Cannot call setMetadata from the NimbusDraftQueue'));
|
|
14029
14411
|
}
|
|
14030
14412
|
addHandler(_handler) {
|
|
14031
14413
|
// no-op -- all registration occurs against concrete draft queue
|
|
14032
14414
|
return Promise.resolve();
|
|
14033
14415
|
}
|
|
14034
14416
|
addCustomHandler(_id, _executor) {
|
|
14035
|
-
return Promise.reject('Cannot call setMetadata from the NimbusDraftQueue');
|
|
14417
|
+
return Promise.reject(new Error('Cannot call setMetadata from the NimbusDraftQueue'));
|
|
14036
14418
|
}
|
|
14037
14419
|
removeHandler(_id) {
|
|
14038
|
-
return Promise.reject('Cannot call setMetadata from the NimbusDraftQueue');
|
|
14420
|
+
return Promise.reject(new Error('Cannot call setMetadata from the NimbusDraftQueue'));
|
|
14039
14421
|
}
|
|
14040
14422
|
}
|
|
14041
14423
|
|
|
@@ -14547,6 +14929,7 @@ function instrumentAdapter(adapter, metadata) {
|
|
|
14547
14929
|
return instrumentAdapter$1(instrumentedMobileAdapter, metadata, {
|
|
14548
14930
|
trackL1Hits: true,
|
|
14549
14931
|
trackL2Hits: true,
|
|
14932
|
+
trackCacheMisses: true,
|
|
14550
14933
|
reportObserver: (report) => {
|
|
14551
14934
|
for (const observer of reportObservers) {
|
|
14552
14935
|
observer(report);
|
|
@@ -14849,7 +15232,7 @@ class NimbusSqliteStore {
|
|
|
14849
15232
|
registerOnChangedListener(listener) {
|
|
14850
15233
|
let unsubscribeId = undefined;
|
|
14851
15234
|
this.plugin
|
|
14852
|
-
.registerOnChangedListener((changes) => {
|
|
15235
|
+
.registerOnChangedListener(async (changes) => {
|
|
14853
15236
|
const durableChanges = changes.map((c) => {
|
|
14854
15237
|
return {
|
|
14855
15238
|
type: c.type === 'upsert' ? 'setEntries' : 'evictEntries',
|
|
@@ -14858,7 +15241,7 @@ class NimbusSqliteStore {
|
|
|
14858
15241
|
segment: c.context.segment,
|
|
14859
15242
|
};
|
|
14860
15243
|
});
|
|
14861
|
-
listener(durableChanges);
|
|
15244
|
+
await listener(durableChanges);
|
|
14862
15245
|
})
|
|
14863
15246
|
.then((unsub) => {
|
|
14864
15247
|
unsubscribeId = unsub;
|
|
@@ -14964,8 +15347,17 @@ function formatDisplayValue(value, datatype) {
|
|
|
14964
15347
|
}
|
|
14965
15348
|
}
|
|
14966
15349
|
|
|
14967
|
-
|
|
15350
|
+
function isUnfulfilledSnapshot(cachedSnapshotResult) {
|
|
15351
|
+
if (cachedSnapshotResult === undefined) {
|
|
15352
|
+
return false;
|
|
15353
|
+
}
|
|
15354
|
+
if ('then' in cachedSnapshotResult) {
|
|
15355
|
+
return false;
|
|
15356
|
+
}
|
|
15357
|
+
return cachedSnapshotResult.state === 'Unfulfilled';
|
|
15358
|
+
}
|
|
14968
15359
|
function makeEnvironmentGraphqlAware(environment) {
|
|
15360
|
+
//TODO: [W-12734162] - rebuild non-evaluated snapshot when graph rebuild is triggered. The dependency work on luvio needs to be done.
|
|
14969
15361
|
const rebuildSnapshot = function (snapshot, onRebuild) {
|
|
14970
15362
|
if (isStoreEvalSnapshot(snapshot)) {
|
|
14971
15363
|
snapshot.rebuildWithStoreEval(snapshot).then((rebuilt) => {
|
|
@@ -14981,8 +15373,72 @@ function makeEnvironmentGraphqlAware(environment) {
|
|
|
14981
15373
|
}
|
|
14982
15374
|
return environment.rebuildSnapshot(snapshot, onRebuild);
|
|
14983
15375
|
};
|
|
15376
|
+
const applyCachePolicy = function (luvio, adapterRequestContext, buildSnapshotContext, buildCachedSnapshot, buildNetworkSnapshot) {
|
|
15377
|
+
// Early exit for non-evaluating adapters
|
|
15378
|
+
let graphqlBuildSnapshotContext = buildSnapshotContext;
|
|
15379
|
+
if (graphqlBuildSnapshotContext.gqlEval !== true) {
|
|
15380
|
+
return environment.applyCachePolicy(luvio, adapterRequestContext, buildSnapshotContext, buildCachedSnapshot, buildNetworkSnapshot);
|
|
15381
|
+
}
|
|
15382
|
+
var localBuildCachedSnapshot = buildCachedSnapshot;
|
|
15383
|
+
const hoistUnfulfilledToStale = (context, storeLookup, luvio) => {
|
|
15384
|
+
const upstream = buildCachedSnapshot(context, storeLookup, luvio);
|
|
15385
|
+
if (upstream === undefined)
|
|
15386
|
+
return upstream;
|
|
15387
|
+
if (isUnfulfilledSnapshot(upstream)) {
|
|
15388
|
+
return {
|
|
15389
|
+
...upstream,
|
|
15390
|
+
data: upstream.data || {},
|
|
15391
|
+
state: 'Stale',
|
|
15392
|
+
};
|
|
15393
|
+
}
|
|
15394
|
+
else if ('then' in upstream) {
|
|
15395
|
+
return upstream.then((snapshot) => {
|
|
15396
|
+
if (snapshot === undefined)
|
|
15397
|
+
return snapshot;
|
|
15398
|
+
if (isUnfulfilledSnapshot(snapshot)) {
|
|
15399
|
+
return {
|
|
15400
|
+
...snapshot,
|
|
15401
|
+
data: snapshot.data || {},
|
|
15402
|
+
state: 'Stale',
|
|
15403
|
+
};
|
|
15404
|
+
}
|
|
15405
|
+
return snapshot;
|
|
15406
|
+
});
|
|
15407
|
+
}
|
|
15408
|
+
return upstream;
|
|
15409
|
+
};
|
|
15410
|
+
const { cachePolicy } = adapterRequestContext;
|
|
15411
|
+
if (eagerEvalValidAt.isOpen({ fallback: false }) &&
|
|
15412
|
+
cachePolicy &&
|
|
15413
|
+
cachePolicy.type === 'valid-at' &&
|
|
15414
|
+
cachePolicy.timestamp === 0 &&
|
|
15415
|
+
cachePolicy.basePolicy &&
|
|
15416
|
+
cachePolicy.basePolicy.type === 'stale-while-revalidate') {
|
|
15417
|
+
localBuildCachedSnapshot = hoistUnfulfilledToStale;
|
|
15418
|
+
}
|
|
15419
|
+
if (eagerEvalStaleWhileRevalidate.isOpen({ fallback: false }) &&
|
|
15420
|
+
cachePolicy &&
|
|
15421
|
+
cachePolicy.type === 'stale-while-revalidate' &&
|
|
15422
|
+
cachePolicy.staleDurationSeconds >= Number.MAX_SAFE_INTEGER) {
|
|
15423
|
+
localBuildCachedSnapshot = hoistUnfulfilledToStale;
|
|
15424
|
+
}
|
|
15425
|
+
if (eagerEvalDefaultCachePolicy.isOpen({ fallback: false }) &&
|
|
15426
|
+
(cachePolicy === undefined || cachePolicy === null)) {
|
|
15427
|
+
localBuildCachedSnapshot = hoistUnfulfilledToStale;
|
|
15428
|
+
}
|
|
15429
|
+
return environment.applyCachePolicy(luvio, adapterRequestContext, buildSnapshotContext, localBuildCachedSnapshot, buildNetworkSnapshot);
|
|
15430
|
+
};
|
|
14984
15431
|
return create(environment, {
|
|
14985
15432
|
rebuildSnapshot: { value: rebuildSnapshot },
|
|
15433
|
+
applyCachePolicy: { value: applyCachePolicy },
|
|
15434
|
+
defaultCachePolicy: {
|
|
15435
|
+
get() {
|
|
15436
|
+
return environment.defaultCachePolicy;
|
|
15437
|
+
},
|
|
15438
|
+
set(value) {
|
|
15439
|
+
environment.defaultCachePolicy = value;
|
|
15440
|
+
},
|
|
15441
|
+
},
|
|
14986
15442
|
});
|
|
14987
15443
|
}
|
|
14988
15444
|
|
|
@@ -15633,8 +16089,17 @@ function getRuntime() {
|
|
|
15633
16089
|
lazyObjectInfoService = new ObjectInfoService(getObjectInfo, getObjectInfos, internalAdapterDurableStore);
|
|
15634
16090
|
// set storeEval function for lds-adapters-graghql to use
|
|
15635
16091
|
withRegistration('@salesforce/lds-adapters-graphql', (registration) => {
|
|
15636
|
-
const { configuration: { setStoreEval }, } = registration;
|
|
15637
|
-
|
|
16092
|
+
const { configuration: { setStoreEval, setDraftFunctions }, } = registration;
|
|
16093
|
+
const getCanonicalId = (id) => {
|
|
16094
|
+
var _a;
|
|
16095
|
+
return ((_a = extractRecordIdFromStoreKey(lazyLuvio.storeGetCanonicalKey(RECORD_ID_PREFIX + id))) !== null && _a !== void 0 ? _a : id);
|
|
16096
|
+
};
|
|
16097
|
+
const draftFuncs = {
|
|
16098
|
+
isDraftId: isGenerated,
|
|
16099
|
+
getCanonicalId,
|
|
16100
|
+
};
|
|
16101
|
+
setStoreEval(sqliteStoreEvalFactory(userId, lazyBaseDurableStore, lazyObjectInfoService, draftFuncs));
|
|
16102
|
+
setDraftFunctions(draftFuncs);
|
|
15638
16103
|
});
|
|
15639
16104
|
// creates a durable store that denormalizes scalar fields for records
|
|
15640
16105
|
let getIngestRecords;
|
|
@@ -15726,4 +16191,4 @@ register({
|
|
|
15726
16191
|
});
|
|
15727
16192
|
|
|
15728
16193
|
export { getRuntime, registerReportObserver, reportGraphqlQueryParseError };
|
|
15729
|
-
// version: 1.
|
|
16194
|
+
// version: 1.131.0-dev10-b950ecbb3
|