@salesforce/lds-runtime-mobile 1.302.0 → 1.304.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +518 -74
- package/dist/types/NimbusDraftQueue.d.ts +1 -0
- package/dist/types/mocks.d.ts +1 -1
- package/package.json +18 -18
- package/sfdc/main.js +518 -74
- package/sfdc/types/NimbusDraftQueue.d.ts +1 -0
- package/sfdc/types/mocks.d.ts +1 -1
package/dist/main.js
CHANGED
|
@@ -20,7 +20,7 @@ import { setupInstrumentation, instrumentAdapter as instrumentAdapter$1, instrum
|
|
|
20
20
|
import { HttpStatusCode, setBypassDeepFreeze, StoreKeySet, serializeStructuredKey, StringKeyInMemoryStore, Reader, deepFreeze, emitAdapterEvent, createCustomAdapterEventEmitter, StoreKeyMap, isFileReference, Environment, Luvio, InMemoryStore } from '@luvio/engine';
|
|
21
21
|
import excludeStaleRecordsGate from '@salesforce/gate/lds.graphqlEvalExcludeStaleRecords';
|
|
22
22
|
import { parseAndVisit, Kind as Kind$1, buildSchema, isObjectType, defaultFieldResolver, visit, execute, parse as parse$7, extendSchema, isScalarType } from '@luvio/graphql-parser';
|
|
23
|
-
import { RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION, RECORD_REPRESENTATION_NAME, extractRecordIdFromStoreKey, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, isStoreKeyRecordViewEntity, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, getRecordId18, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, getObjectInfoDirectoryAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion
|
|
23
|
+
import { RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION, RECORD_REPRESENTATION_NAME, extractRecordIdFromStoreKey, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, isStoreKeyRecordViewEntity, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, getRecordId18, getRecordsAdapterFactory, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, getObjectInfoDirectoryAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion } from '@salesforce/lds-adapters-uiapi-mobile';
|
|
24
24
|
import ldsIdempotencyWriteDisabled from '@salesforce/gate/lds.idempotencyWriteDisabled';
|
|
25
25
|
import ldsBackdatingEnabled from '@salesforce/gate/lds.backdatingEnabled';
|
|
26
26
|
import FIRST_DAY_OF_WEEK from '@salesforce/i18n/firstDayOfWeek';
|
|
@@ -1575,6 +1575,32 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
|
|
|
1575
1575
|
}, revivingStore).finally(() => {
|
|
1576
1576
|
});
|
|
1577
1577
|
};
|
|
1578
|
+
const expirePossibleStaleRecords = async function (keys$1, config, refresh) {
|
|
1579
|
+
validateNotDisposed();
|
|
1580
|
+
const metadataKeys = keys$1.map(serializeStructuredKey);
|
|
1581
|
+
const now = Date.now();
|
|
1582
|
+
const entries = await durableStore.getMetadata(metadataKeys, DefaultDurableSegment);
|
|
1583
|
+
if (entries === undefined || keys$8(entries).length === 0) {
|
|
1584
|
+
return environment.expirePossibleStaleRecords(keys$1);
|
|
1585
|
+
}
|
|
1586
|
+
let metaDataChanged = false;
|
|
1587
|
+
const metadataEntries = metadataKeys.reduce((accu, key) => {
|
|
1588
|
+
const metadataEntry = entries[key];
|
|
1589
|
+
if (metadataEntry.metadata !== undefined) {
|
|
1590
|
+
const metadata = { ...metadataEntry.metadata, expirationTimestamp: now };
|
|
1591
|
+
accu[key] = { metadata };
|
|
1592
|
+
metaDataChanged = true;
|
|
1593
|
+
}
|
|
1594
|
+
return accu;
|
|
1595
|
+
}, {});
|
|
1596
|
+
if (metaDataChanged) {
|
|
1597
|
+
await durableStore.setMetadata(metadataEntries, DefaultDurableSegment);
|
|
1598
|
+
}
|
|
1599
|
+
if (config !== undefined && refresh !== undefined) {
|
|
1600
|
+
return environment.refreshPossibleStaleRecords(config, refresh);
|
|
1601
|
+
}
|
|
1602
|
+
return Promise.resolve();
|
|
1603
|
+
};
|
|
1578
1604
|
// set the default cache policy of the base environment
|
|
1579
1605
|
environment.setDefaultCachePolicy({
|
|
1580
1606
|
type: 'stale-while-revalidate',
|
|
@@ -1607,6 +1633,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
|
|
|
1607
1633
|
handleErrorResponse: { value: handleErrorResponse },
|
|
1608
1634
|
getNotifyChangeStoreEntries: { value: getNotifyChangeStoreEntries },
|
|
1609
1635
|
notifyStoreUpdateAvailable: { value: notifyStoreUpdateAvailable },
|
|
1636
|
+
expirePossibleStaleRecords: { value: expirePossibleStaleRecords },
|
|
1610
1637
|
});
|
|
1611
1638
|
}
|
|
1612
1639
|
|
|
@@ -5655,7 +5682,7 @@ class DurableDraftQueue {
|
|
|
5655
5682
|
if (status === DraftActionStatus.Error) {
|
|
5656
5683
|
this.state = DraftQueueState.Error;
|
|
5657
5684
|
this.processingAction = undefined;
|
|
5658
|
-
this.notifyChangedListeners({
|
|
5685
|
+
await this.notifyChangedListeners({
|
|
5659
5686
|
type: DraftQueueEventType.ActionFailed,
|
|
5660
5687
|
action: action,
|
|
5661
5688
|
});
|
|
@@ -5671,7 +5698,7 @@ class DurableDraftQueue {
|
|
|
5671
5698
|
if (this.state === DraftQueueState.Waiting) {
|
|
5672
5699
|
this.state = DraftQueueState.Started;
|
|
5673
5700
|
}
|
|
5674
|
-
this.notifyChangedListeners({
|
|
5701
|
+
await this.notifyChangedListeners({
|
|
5675
5702
|
type: DraftQueueEventType.ActionUploading,
|
|
5676
5703
|
action: { ...action, status: DraftActionStatus.Uploading },
|
|
5677
5704
|
});
|
|
@@ -5748,6 +5775,31 @@ class DurableDraftQueue {
|
|
|
5748
5775
|
await this.startQueue();
|
|
5749
5776
|
}
|
|
5750
5777
|
}
|
|
5778
|
+
async updateDraftAction(action) {
|
|
5779
|
+
// stop queue manually
|
|
5780
|
+
this.stopQueueManually();
|
|
5781
|
+
const actionStatus = await this.statusOfAction(action.id);
|
|
5782
|
+
if (actionStatus === DraftActionStatus.Uploading) {
|
|
5783
|
+
return Promise.reject('cannot update an uploading action');
|
|
5784
|
+
}
|
|
5785
|
+
// save the action into the draft store
|
|
5786
|
+
await this.draftStore.writeAction(action);
|
|
5787
|
+
// make the handler replay these drafts on the record
|
|
5788
|
+
const handler = this.getHandler(action.handler);
|
|
5789
|
+
const queue = await this.getQueueActions();
|
|
5790
|
+
await handler.handleActionEnqueued(action, queue);
|
|
5791
|
+
// start queue safely
|
|
5792
|
+
return this.startQueueSafe();
|
|
5793
|
+
}
|
|
5794
|
+
async statusOfAction(actionId) {
|
|
5795
|
+
const queue = await this.getQueueActions();
|
|
5796
|
+
const actions = queue.filter((action) => action.id === actionId);
|
|
5797
|
+
if (actions.length === 0) {
|
|
5798
|
+
return Promise.reject('cannot update non-existent action');
|
|
5799
|
+
}
|
|
5800
|
+
const action = actions[0];
|
|
5801
|
+
return action.status;
|
|
5802
|
+
}
|
|
5751
5803
|
replaceAction(targetActionId, sourceActionId) {
|
|
5752
5804
|
return this.replaceOrMergeActions(targetActionId, sourceActionId, false);
|
|
5753
5805
|
}
|
|
@@ -5778,17 +5830,21 @@ class DurableDraftQueue {
|
|
|
5778
5830
|
});
|
|
5779
5831
|
return action;
|
|
5780
5832
|
}
|
|
5781
|
-
scheduleRetryWithSpecifiedDelay(retryDelayInMs) {
|
|
5833
|
+
async scheduleRetryWithSpecifiedDelay(retryDelayInMs) {
|
|
5834
|
+
await this.notifyChangedListeners({
|
|
5835
|
+
type: DraftQueueEventType.QueueStateChanged,
|
|
5836
|
+
state: DraftQueueState.Waiting,
|
|
5837
|
+
});
|
|
5782
5838
|
this.timeoutHandler = setTimeout(() => {
|
|
5783
5839
|
if (this.state !== DraftQueueState.Stopped) {
|
|
5784
5840
|
this.processNextAction();
|
|
5785
5841
|
}
|
|
5786
5842
|
}, retryDelayInMs);
|
|
5787
5843
|
}
|
|
5788
|
-
scheduleRetry() {
|
|
5844
|
+
async scheduleRetry() {
|
|
5789
5845
|
const newInterval = this.retryIntervalMilliseconds * 2;
|
|
5790
5846
|
this.retryIntervalMilliseconds = Math.min(Math.max(newInterval, this.minimumRetryInterval), this.maximumRetryInterval);
|
|
5791
|
-
this.scheduleRetryWithSpecifiedDelay(this.retryIntervalMilliseconds);
|
|
5847
|
+
return this.scheduleRetryWithSpecifiedDelay(this.retryIntervalMilliseconds);
|
|
5792
5848
|
}
|
|
5793
5849
|
async getActionsForReplaceOrMerge(targetActionId, sourceActionId) {
|
|
5794
5850
|
const actions = await this.getQueueActions();
|
|
@@ -5914,7 +5970,8 @@ class DurableDraftStore {
|
|
|
5914
5970
|
const actionArray = [];
|
|
5915
5971
|
for (let i = 0, len = keys$1.length; i < len; i++) {
|
|
5916
5972
|
const key = keys$1[i];
|
|
5917
|
-
|
|
5973
|
+
// clone draft so we don't expose the internal draft store
|
|
5974
|
+
actionArray.push(clone$1(draftStore[key]));
|
|
5918
5975
|
}
|
|
5919
5976
|
return actionArray;
|
|
5920
5977
|
});
|
|
@@ -6563,6 +6620,7 @@ var DraftQueueOperationType;
|
|
|
6563
6620
|
DraftQueueOperationType["ItemUpdated"] = "updated";
|
|
6564
6621
|
DraftQueueOperationType["QueueStarted"] = "started";
|
|
6565
6622
|
DraftQueueOperationType["QueueStopped"] = "stopped";
|
|
6623
|
+
DraftQueueOperationType["QueueWaiting"] = "waiting";
|
|
6566
6624
|
})(DraftQueueOperationType || (DraftQueueOperationType = {}));
|
|
6567
6625
|
/**
|
|
6568
6626
|
* Converts the internal DraftAction's ResourceRequest into
|
|
@@ -6605,6 +6663,16 @@ function toQueueState(queue) {
|
|
|
6605
6663
|
};
|
|
6606
6664
|
}
|
|
6607
6665
|
class DraftManager {
|
|
6666
|
+
shouldEmitEvent(event) {
|
|
6667
|
+
// Waiting events cannot be emitted prior to 252 native clients
|
|
6668
|
+
// TODO [W-16102411]: we can safely remove this backwards compatible code in 256
|
|
6669
|
+
if (isDraftQueueStateChangeEvent(event) &&
|
|
6670
|
+
event.state === DraftQueueState.Waiting &&
|
|
6671
|
+
this.listenerVersion === undefined) {
|
|
6672
|
+
return false;
|
|
6673
|
+
}
|
|
6674
|
+
return this.draftEventsShouldBeEmitted.includes(event.type);
|
|
6675
|
+
}
|
|
6608
6676
|
constructor(draftQueue) {
|
|
6609
6677
|
this.listeners = [];
|
|
6610
6678
|
this.draftEventsShouldBeEmitted = [
|
|
@@ -6618,7 +6686,7 @@ class DraftManager {
|
|
|
6618
6686
|
];
|
|
6619
6687
|
this.draftQueue = draftQueue;
|
|
6620
6688
|
draftQueue.registerOnChangedListener((event) => {
|
|
6621
|
-
if (this.
|
|
6689
|
+
if (this.shouldEmitEvent(event)) {
|
|
6622
6690
|
return this.callListeners(event);
|
|
6623
6691
|
}
|
|
6624
6692
|
return Promise.resolve();
|
|
@@ -6648,6 +6716,8 @@ class DraftManager {
|
|
|
6648
6716
|
return DraftQueueOperationType.QueueStarted;
|
|
6649
6717
|
case DraftQueueState.Stopped:
|
|
6650
6718
|
return DraftQueueOperationType.QueueStopped;
|
|
6719
|
+
case DraftQueueState.Waiting:
|
|
6720
|
+
return DraftQueueOperationType.QueueWaiting;
|
|
6651
6721
|
default:
|
|
6652
6722
|
throw Error('Unsupported event type');
|
|
6653
6723
|
}
|
|
@@ -6705,7 +6775,8 @@ class DraftManager {
|
|
|
6705
6775
|
*
|
|
6706
6776
|
* @param listener The listener closure to subscribe to changes
|
|
6707
6777
|
*/
|
|
6708
|
-
registerDraftQueueChangedListener(listener) {
|
|
6778
|
+
registerDraftQueueChangedListener(listener, version = undefined) {
|
|
6779
|
+
this.listenerVersion = version;
|
|
6709
6780
|
this.listeners.push(listener);
|
|
6710
6781
|
return () => {
|
|
6711
6782
|
this.listeners = this.listeners.filter((l) => {
|
|
@@ -6732,6 +6803,60 @@ class DraftManager {
|
|
|
6732
6803
|
};
|
|
6733
6804
|
});
|
|
6734
6805
|
}
|
|
6806
|
+
async mergePerformQuickAction(actionId, fields) {
|
|
6807
|
+
if (!this.isValidFieldMap(fields)) {
|
|
6808
|
+
return Promise.reject('fields is not valid');
|
|
6809
|
+
}
|
|
6810
|
+
const queue = await this.draftQueue.getQueueActions();
|
|
6811
|
+
const actions = queue.filter((action) => action.id === actionId);
|
|
6812
|
+
if (actions.length === 0) {
|
|
6813
|
+
return Promise.reject('cannot edit non-existent action');
|
|
6814
|
+
}
|
|
6815
|
+
const action = actions[0];
|
|
6816
|
+
if (!this.isPerformQuickActionDraft(action, 'post')) {
|
|
6817
|
+
return Promise.reject('cannot edit incompatible action type or uploading actions');
|
|
6818
|
+
}
|
|
6819
|
+
action.data.body.fields = { ...action.data.body.fields, ...fields };
|
|
6820
|
+
await this.draftQueue.updateDraftAction(action);
|
|
6821
|
+
return this.buildDraftQueueItem(action);
|
|
6822
|
+
}
|
|
6823
|
+
isValidFieldMap(fields) {
|
|
6824
|
+
const keys$1 = keys$6(fields);
|
|
6825
|
+
const validTypes = ['string', 'number', 'null', 'boolean'];
|
|
6826
|
+
for (let i = 0; i < keys$1.length; i++) {
|
|
6827
|
+
const key = keys$1[i];
|
|
6828
|
+
const value = fields[key];
|
|
6829
|
+
if (!validTypes.includes(typeof value)) {
|
|
6830
|
+
return false;
|
|
6831
|
+
}
|
|
6832
|
+
}
|
|
6833
|
+
return true;
|
|
6834
|
+
}
|
|
6835
|
+
isPerformQuickActionDraft(action, method) {
|
|
6836
|
+
const data = action.data;
|
|
6837
|
+
const isPerformQuickAction = data.basePath.startsWith('/ui-api/actions/perform-quick-action/');
|
|
6838
|
+
const methodMatches = data.method === method;
|
|
6839
|
+
const notUploading = action.status !== DraftActionStatus.Uploading;
|
|
6840
|
+
return isPerformQuickAction && methodMatches && notUploading;
|
|
6841
|
+
}
|
|
6842
|
+
async mergePerformUpdateRecordQuickAction(actionId, fields) {
|
|
6843
|
+
if (!this.isValidFieldMap(fields)) {
|
|
6844
|
+
return Promise.reject('fields is not valid');
|
|
6845
|
+
}
|
|
6846
|
+
const queue = await this.draftQueue.getQueueActions();
|
|
6847
|
+
const actions = queue.filter((action) => action.id === actionId);
|
|
6848
|
+
if (actions.length === 0) {
|
|
6849
|
+
return Promise.reject('cannot edit non-existent action');
|
|
6850
|
+
}
|
|
6851
|
+
const action = actions[0];
|
|
6852
|
+
if (!this.isPerformQuickActionDraft(action, 'patch')) {
|
|
6853
|
+
return Promise.reject('cannot edit incompatible action type or uploading actions');
|
|
6854
|
+
}
|
|
6855
|
+
const data = action.data;
|
|
6856
|
+
data.body.fields = { ...data.body.fields, ...fields };
|
|
6857
|
+
await this.draftQueue.updateDraftAction(action);
|
|
6858
|
+
return this.buildDraftQueueItem(action);
|
|
6859
|
+
}
|
|
6735
6860
|
buildDraftQueueItem(action) {
|
|
6736
6861
|
const operationType = getOperationTypeFrom(action);
|
|
6737
6862
|
const { id, status, timestamp, targetId, metadata } = action;
|
|
@@ -7039,6 +7164,12 @@ function getDenormalizedRecord(recordKey, durableStore) {
|
|
|
7039
7164
|
function isStoreRecordError(storeRecord) {
|
|
7040
7165
|
return storeRecord.__type === 'error';
|
|
7041
7166
|
}
|
|
7167
|
+
function isDraftFieldPending(field) {
|
|
7168
|
+
return !!(field.__state && field.__state.pending === true);
|
|
7169
|
+
}
|
|
7170
|
+
function isDraftFieldMissing(field) {
|
|
7171
|
+
return !!(field.__state && field.__state.isMissing === true);
|
|
7172
|
+
}
|
|
7042
7173
|
|
|
7043
7174
|
/**
|
|
7044
7175
|
* Checks if a resource request is a GET method on the record endpoint
|
|
@@ -7730,12 +7861,9 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
|
|
|
7730
7861
|
}
|
|
7731
7862
|
const { dataType, relationshipName, referenceToInfos } = fieldInfo;
|
|
7732
7863
|
const draftFieldNode = record.fields[draftField];
|
|
7733
|
-
// JHORST: revisit this logic
|
|
7734
7864
|
// do not try to apply drafts on nodes that are pending or missing
|
|
7735
|
-
if (draftFieldNode
|
|
7736
|
-
|
|
7737
|
-
draftFieldNode.__state.isMissing === true)
|
|
7738
|
-
continue;
|
|
7865
|
+
if (isDraftFieldPending(draftFieldNode) || isDraftFieldMissing(draftFieldNode)) {
|
|
7866
|
+
continue;
|
|
7739
7867
|
}
|
|
7740
7868
|
const draftFieldValue = draftFieldNode.value;
|
|
7741
7869
|
if (dataType === 'Reference' && relationshipName !== null) {
|
|
@@ -8530,6 +8658,7 @@ class QuickActionExecutionRepresentationHandler extends AbstractResourceRequestA
|
|
|
8530
8658
|
isCreated: true,
|
|
8531
8659
|
isSuccess: true,
|
|
8532
8660
|
successMessage: `record created.`,
|
|
8661
|
+
drafts: { draftActionId: action.id },
|
|
8533
8662
|
});
|
|
8534
8663
|
}
|
|
8535
8664
|
getDraftMetadata(_key) {
|
|
@@ -8647,6 +8776,7 @@ class UpdateRecordQuickActionExecutionRepresentationHandler extends AbstractReso
|
|
|
8647
8776
|
isCreated: false,
|
|
8648
8777
|
isSuccess: true,
|
|
8649
8778
|
successMessage: `record updated.`,
|
|
8779
|
+
drafts: { draftActionId: action.id },
|
|
8650
8780
|
});
|
|
8651
8781
|
}
|
|
8652
8782
|
async getDraftMetadata(key) {
|
|
@@ -8724,7 +8854,29 @@ function isCreateContentDocumentAndVersionDraftAdapterEvent(customEvent) {
|
|
|
8724
8854
|
return customEvent.namespace === CONTENT_DOCUMENT_AND_VERSION_NAMESPACE;
|
|
8725
8855
|
}
|
|
8726
8856
|
|
|
8857
|
+
// so eslint doesn't complain about nimbus
|
|
8858
|
+
/* global __nimbus */
|
|
8727
8859
|
const ContentDocumentCompositeKeyPrefix = 'UiApi::ContentDocumentCompositeRepresentation:';
|
|
8860
|
+
function chunkToBase64(chunk) {
|
|
8861
|
+
let binary = '';
|
|
8862
|
+
const chunkSize = 32 * 1024;
|
|
8863
|
+
for (let i = 0; i < chunk.length; i += chunkSize) {
|
|
8864
|
+
binary += String.fromCharCode.apply(null, chunk.subarray(i, i + chunkSize));
|
|
8865
|
+
}
|
|
8866
|
+
return btoa(binary);
|
|
8867
|
+
}
|
|
8868
|
+
async function streamBufferToBinaryStore(binaryStore, buffer, mimeType) {
|
|
8869
|
+
const uri = await binaryStore.createStream(mimeType);
|
|
8870
|
+
const bufferSize = 64 * 1024; // 64k buffer size
|
|
8871
|
+
const uint8Array = new Uint8Array(buffer);
|
|
8872
|
+
for (let offset = 0; offset < uint8Array.length; offset += bufferSize) {
|
|
8873
|
+
const chunk = uint8Array.subarray(offset, Math.min(offset + bufferSize, uint8Array.length));
|
|
8874
|
+
const base64Chunk = chunkToBase64(chunk);
|
|
8875
|
+
await binaryStore.writeToStream(uri, base64Chunk);
|
|
8876
|
+
}
|
|
8877
|
+
await binaryStore.closeStream(uri);
|
|
8878
|
+
return uri;
|
|
8879
|
+
}
|
|
8728
8880
|
function createContentDocumentAndVersionDraftAdapterFactory(luvio, binaryStore, actionHandler) {
|
|
8729
8881
|
const overriddenLuvio = buildLuvioOverrideForDraftAdapters(luvio, actionHandler, (key) => {
|
|
8730
8882
|
// if the key is for our top-level response shape
|
|
@@ -8747,7 +8899,14 @@ function createContentDocumentAndVersionDraftAdapterFactory(luvio, binaryStore,
|
|
|
8747
8899
|
const { fileData } = config;
|
|
8748
8900
|
const { name, size, type } = fileData;
|
|
8749
8901
|
const buffer = await fileData.arrayBuffer();
|
|
8750
|
-
|
|
8902
|
+
var uri;
|
|
8903
|
+
// see if new chunking-api exists, if it doesnt fall back to memory-intensive mobile api
|
|
8904
|
+
if (!__nimbus.plugins.LdsBinaryStorePlugin.createStream) {
|
|
8905
|
+
uri = await binaryStore.store(new Uint8Array(buffer), type, size);
|
|
8906
|
+
}
|
|
8907
|
+
else {
|
|
8908
|
+
uri = await streamBufferToBinaryStore(binaryStore, buffer, type);
|
|
8909
|
+
}
|
|
8751
8910
|
config.fileData = {
|
|
8752
8911
|
isFileReference: true,
|
|
8753
8912
|
handle: uri,
|
|
@@ -9586,7 +9745,7 @@ function recordLoaderFactory(query) {
|
|
|
9586
9745
|
return new DataLoader(batchRecordQuery);
|
|
9587
9746
|
}
|
|
9588
9747
|
|
|
9589
|
-
function createContext(store, objectInfos, eventEmitter, settings, snapshot, draftFunctions) {
|
|
9748
|
+
function createContext(store, objectInfos, eventEmitter, settings, snapshot, mappedCursors = new Map(), draftFunctions) {
|
|
9590
9749
|
store.query.bind(store);
|
|
9591
9750
|
const query = (sql, params) => {
|
|
9592
9751
|
const now = Date.now();
|
|
@@ -9613,7 +9772,9 @@ function createContext(store, objectInfos, eventEmitter, settings, snapshot, dra
|
|
|
9613
9772
|
Record,
|
|
9614
9773
|
snapshot,
|
|
9615
9774
|
seenRecordIds: new Set(),
|
|
9775
|
+
possibleStaleRecordMap: new Map(),
|
|
9616
9776
|
draftFunctions,
|
|
9777
|
+
mappedCursors,
|
|
9617
9778
|
};
|
|
9618
9779
|
}
|
|
9619
9780
|
|
|
@@ -10224,7 +10385,6 @@ function isTodayStartOfWeek() {
|
|
|
10224
10385
|
|
|
10225
10386
|
const JSON_EXTRACT_PATH_INGESTION_TIMESTAMP = '$.ingestionTimestamp';
|
|
10226
10387
|
const JSON_EXTRACT_PATH_INGESTION_APINAME = '$.apiName';
|
|
10227
|
-
const JSON_EXTRACT_PATH_DRAFTS = '$.drafts';
|
|
10228
10388
|
|
|
10229
10389
|
const MultiPickListValueSeparator = ';';
|
|
10230
10390
|
function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draftFunctions) {
|
|
@@ -10765,14 +10925,10 @@ function buildQuery(config) {
|
|
|
10765
10925
|
const predicates = buildPredicates(config);
|
|
10766
10926
|
const orderBy = buildOrderBy(config);
|
|
10767
10927
|
const sql = `
|
|
10768
|
-
SELECT "${config.alias}".data
|
|
10928
|
+
SELECT "${config.alias}".data, "${config.alias}".metadata
|
|
10769
10929
|
FROM lds_data "${config.alias}" ${joins.sql}
|
|
10770
10930
|
WHERE "${config.alias}".key like 'UiApi::RecordRepresentation:%'
|
|
10771
10931
|
AND json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_INGESTION_APINAME}') = '${config.alias}'
|
|
10772
|
-
AND (
|
|
10773
|
-
json_extract("${config.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ?
|
|
10774
|
-
OR json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL
|
|
10775
|
-
)
|
|
10776
10932
|
${predicates.sql}
|
|
10777
10933
|
${orderBy.sql}
|
|
10778
10934
|
LIMIT ?
|
|
@@ -10784,7 +10940,6 @@ function buildQuery(config) {
|
|
|
10784
10940
|
const bindings = [
|
|
10785
10941
|
// bindings from predicates on joins
|
|
10786
10942
|
...joins.bindings,
|
|
10787
|
-
config.ingestionTimestamp,
|
|
10788
10943
|
// where clause and parent scope bindings
|
|
10789
10944
|
...predicates.bindings,
|
|
10790
10945
|
// limit binding
|
|
@@ -10811,29 +10966,19 @@ function buildJoins(config) {
|
|
|
10811
10966
|
if (allJoins.length === 0)
|
|
10812
10967
|
return { sql, bindings };
|
|
10813
10968
|
sql = allJoins.reduce((joinAccumulator, join) => {
|
|
10814
|
-
let timestampAdded = false;
|
|
10815
10969
|
const joinConditions = join.conditions.reduce((conditionAccumulator, condition) => {
|
|
10816
10970
|
let joined_sql;
|
|
10817
|
-
const joinMetadataTimestamp = ` AND (json_extract("${join.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ? OR json_extract("${join.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL)`;
|
|
10818
10971
|
// predicate on a value, use the newly joined table
|
|
10819
10972
|
if ('type' in condition) {
|
|
10820
10973
|
const { sql, binding } = predicateToSQL(condition, join.alias);
|
|
10821
|
-
joined_sql = ` AND ${sql}
|
|
10974
|
+
joined_sql = ` AND ${sql}`;
|
|
10822
10975
|
bindings.push(...binding);
|
|
10823
|
-
if (timestampAdded === false) {
|
|
10824
|
-
bindings.push(config.ingestionTimestamp);
|
|
10825
|
-
timestampAdded = true;
|
|
10826
|
-
}
|
|
10827
10976
|
}
|
|
10828
10977
|
else {
|
|
10829
10978
|
// predicate on a path
|
|
10830
10979
|
const left = ` AND json_extract("${join.to}".data, '${condition.leftPath}')`;
|
|
10831
10980
|
const right = `json_extract("${join.alias}".data, '${condition.rightPath}')`;
|
|
10832
|
-
joined_sql = `${left} = ${right}
|
|
10833
|
-
if (timestampAdded === false) {
|
|
10834
|
-
bindings.push(config.ingestionTimestamp);
|
|
10835
|
-
timestampAdded = true;
|
|
10836
|
-
}
|
|
10981
|
+
joined_sql = `${left} = ${right}`;
|
|
10837
10982
|
}
|
|
10838
10983
|
conditionAccumulator += joined_sql;
|
|
10839
10984
|
return conditionAccumulator;
|
|
@@ -11502,11 +11647,15 @@ async function readIngestionTimestampForKey(key, query) {
|
|
|
11502
11647
|
}
|
|
11503
11648
|
return ingestionTimestamp;
|
|
11504
11649
|
}
|
|
11505
|
-
|
|
11506
|
-
const
|
|
11507
|
-
|
|
11508
|
-
|
|
11509
|
-
|
|
11650
|
+
function isObjectDefinitionNode(node) {
|
|
11651
|
+
const { kind } = node;
|
|
11652
|
+
return typeof kind === 'string' && kind === 'OperationDefinition';
|
|
11653
|
+
}
|
|
11654
|
+
function operationNodeAncestor(ancestors) {
|
|
11655
|
+
let operationNode = ancestors.find((a) => {
|
|
11656
|
+
return !(a instanceof Array) && isObjectDefinitionNode(a);
|
|
11657
|
+
});
|
|
11658
|
+
return operationNode;
|
|
11510
11659
|
}
|
|
11511
11660
|
|
|
11512
11661
|
function findSpanningField(name) {
|
|
@@ -11707,44 +11856,87 @@ function atobPolyfill(data) {
|
|
|
11707
11856
|
const base64encode = typeof btoa === 'function' ? btoa : btoaPolyfill;
|
|
11708
11857
|
const base64decode = typeof atob === 'function' ? atob : atobPolyfill;
|
|
11709
11858
|
|
|
11859
|
+
// this truthy value is used to indicate a premature end of results
|
|
11860
|
+
const EARLY_END = 1;
|
|
11710
11861
|
function cursorResolver(source) {
|
|
11711
|
-
|
|
11862
|
+
let cursor = {
|
|
11863
|
+
i: source.index,
|
|
11864
|
+
};
|
|
11865
|
+
if (source.earlyEnd) {
|
|
11866
|
+
cursor.e = EARLY_END;
|
|
11867
|
+
}
|
|
11868
|
+
return encodeV1Cursor(cursor);
|
|
11712
11869
|
}
|
|
11713
11870
|
function pageInfoResolver(source) {
|
|
11714
11871
|
if (source.records.length === 0) {
|
|
11872
|
+
// we may have found no records, but if more exist we need to
|
|
11873
|
+
// return a valid cursor that can be passed as the next `after`
|
|
11874
|
+
if (source.earlyEnd) {
|
|
11875
|
+
return {
|
|
11876
|
+
startCursor: null,
|
|
11877
|
+
endCursor: encodeV1Cursor({
|
|
11878
|
+
i: source.offset,
|
|
11879
|
+
e: EARLY_END,
|
|
11880
|
+
}),
|
|
11881
|
+
hasNextPage: source.hasNextPage,
|
|
11882
|
+
};
|
|
11883
|
+
}
|
|
11715
11884
|
return {
|
|
11716
11885
|
startCursor: null,
|
|
11717
11886
|
endCursor: null,
|
|
11718
|
-
hasNextPage:
|
|
11887
|
+
hasNextPage: source.hasNextPage,
|
|
11719
11888
|
};
|
|
11720
11889
|
}
|
|
11721
11890
|
let startIndex = source.records[0].index;
|
|
11891
|
+
let startCursor = {
|
|
11892
|
+
i: startIndex,
|
|
11893
|
+
};
|
|
11722
11894
|
let endIndex = source.records[source.records.length - 1].index;
|
|
11895
|
+
let endCursor = {
|
|
11896
|
+
i: endIndex,
|
|
11897
|
+
};
|
|
11898
|
+
if (source.earlyEnd) {
|
|
11899
|
+
startCursor.e = EARLY_END;
|
|
11900
|
+
endCursor.e = EARLY_END;
|
|
11901
|
+
}
|
|
11723
11902
|
return {
|
|
11724
|
-
startCursor: encodeV1Cursor(
|
|
11725
|
-
endCursor: encodeV1Cursor(
|
|
11903
|
+
startCursor: encodeV1Cursor(startCursor),
|
|
11904
|
+
endCursor: encodeV1Cursor(endCursor),
|
|
11726
11905
|
hasNextPage: source.hasNextPage,
|
|
11727
11906
|
};
|
|
11728
11907
|
}
|
|
11729
11908
|
function pageResultCountResolver(source) {
|
|
11730
11909
|
return source.records.length;
|
|
11731
11910
|
}
|
|
11732
|
-
function
|
|
11733
|
-
return
|
|
11911
|
+
function isLocalCursor(maybeCursor) {
|
|
11912
|
+
return (!!maybeCursor &&
|
|
11913
|
+
typeof maybeCursor === 'object' &&
|
|
11914
|
+
'i' in maybeCursor &&
|
|
11915
|
+
typeof maybeCursor.i === 'number');
|
|
11916
|
+
}
|
|
11917
|
+
function encodeV1Cursor(cursor) {
|
|
11918
|
+
return base64encode(stringify$3(cursor));
|
|
11734
11919
|
}
|
|
11735
|
-
const
|
|
11920
|
+
const CURSOR_PARSE_ERROR = 'Unable to parse cursor';
|
|
11736
11921
|
function decodeV1Cursor(base64cursor) {
|
|
11737
|
-
|
|
11738
|
-
|
|
11922
|
+
let maybeCursor;
|
|
11923
|
+
try {
|
|
11924
|
+
const cursorString = base64decode(base64cursor);
|
|
11925
|
+
maybeCursor = parse$3(cursorString);
|
|
11926
|
+
}
|
|
11927
|
+
catch (error) {
|
|
11928
|
+
let message = CURSOR_PARSE_ERROR;
|
|
11929
|
+
if (error instanceof Error) {
|
|
11930
|
+
message += ': ' + error.message;
|
|
11931
|
+
}
|
|
11739
11932
|
// eslint-disable-next-line @salesforce/lds/no-error-in-production
|
|
11740
|
-
throw new Error(
|
|
11933
|
+
throw new Error(message);
|
|
11741
11934
|
}
|
|
11742
|
-
|
|
11743
|
-
if (!found || !found.groups) {
|
|
11935
|
+
if (!isLocalCursor(maybeCursor)) {
|
|
11744
11936
|
// eslint-disable-next-line @salesforce/lds/no-error-in-production
|
|
11745
|
-
throw new Error(
|
|
11937
|
+
throw new Error(CURSOR_PARSE_ERROR);
|
|
11746
11938
|
}
|
|
11747
|
-
return
|
|
11939
|
+
return maybeCursor;
|
|
11748
11940
|
}
|
|
11749
11941
|
/**
|
|
11750
11942
|
* Check the selections for any selection matching `pageInfo { hasNextPage }`
|
|
@@ -11782,6 +11974,164 @@ function selectionIncludesHasNextPage(selections, fragments) {
|
|
|
11782
11974
|
return false;
|
|
11783
11975
|
}
|
|
11784
11976
|
|
|
11977
|
+
const END_CURSOR = '__END__';
|
|
11978
|
+
// find the closest matching cursor in the server pagination metadata
|
|
11979
|
+
function mapCursorValue(originalValue, paginationMetadata) {
|
|
11980
|
+
let mappedValue = null;
|
|
11981
|
+
if (!originalValue) {
|
|
11982
|
+
return mappedValue;
|
|
11983
|
+
}
|
|
11984
|
+
// flip the pagination metadata into an array by index.
|
|
11985
|
+
let cursors = [];
|
|
11986
|
+
for (const [cursor, index] of Object.entries(paginationMetadata)) {
|
|
11987
|
+
if (index === undefined)
|
|
11988
|
+
continue;
|
|
11989
|
+
cursors[index] = cursor;
|
|
11990
|
+
}
|
|
11991
|
+
let cursor = decodeV1Cursor(originalValue);
|
|
11992
|
+
// cursors containe 1-based indexes, adjust back to 0-based
|
|
11993
|
+
let index = cursor.i - 1;
|
|
11994
|
+
if (
|
|
11995
|
+
// cursor.e being truthy means we had premature end of results and
|
|
11996
|
+
// should pin to the last known server cursor
|
|
11997
|
+
!cursor.e &&
|
|
11998
|
+
// check that the index we have is within the bounds of known cursors
|
|
11999
|
+
index >= 0 &&
|
|
12000
|
+
index < cursors.length &&
|
|
12001
|
+
// and make sure the cursor is not the server end marker
|
|
12002
|
+
cursors[index] !== END_CURSOR) {
|
|
12003
|
+
mappedValue = cursors[index];
|
|
12004
|
+
}
|
|
12005
|
+
else {
|
|
12006
|
+
// in this case, either our local cursor is beyond the max server cursor, or
|
|
12007
|
+
// the local cursor precedes the max server cursor and we ran out of locally
|
|
12008
|
+
// cached results. either way, find the last known server cursor and map to that.
|
|
12009
|
+
for (let i = cursors.length; i > 0; --i) {
|
|
12010
|
+
let cursor = cursors[i - 1];
|
|
12011
|
+
if (cursor !== END_CURSOR) {
|
|
12012
|
+
mappedValue = cursor;
|
|
12013
|
+
break;
|
|
12014
|
+
}
|
|
12015
|
+
}
|
|
12016
|
+
}
|
|
12017
|
+
return mappedValue;
|
|
12018
|
+
}
|
|
12019
|
+
// map all pagination cursors in the document
|
|
12020
|
+
async function mapPaginationCursors(originalAST, variables, store) {
|
|
12021
|
+
// first pass, identify record query cache keys for reading pagination metadata
|
|
12022
|
+
let requiredPaginationMetadataKeys = [];
|
|
12023
|
+
visit(originalAST, {
|
|
12024
|
+
Field(node, _key, _parent, _path, ancestors) {
|
|
12025
|
+
// is it a record query?
|
|
12026
|
+
if (!isRecordQuery(node)) {
|
|
12027
|
+
return;
|
|
12028
|
+
}
|
|
12029
|
+
// does it have a defined `after` argument?
|
|
12030
|
+
let after = node.arguments &&
|
|
12031
|
+
node.arguments.find((a) => {
|
|
12032
|
+
return a.name.value === 'after';
|
|
12033
|
+
});
|
|
12034
|
+
if (after && (after.value.kind === 'StringValue' || after.value.kind === 'Variable')) {
|
|
12035
|
+
let operationNode = operationNodeAncestor(ancestors);
|
|
12036
|
+
if (!operationNode) {
|
|
12037
|
+
return false;
|
|
12038
|
+
}
|
|
12039
|
+
let key = buildKeyStringForRecordQuery(operationNode, variables, node.arguments || [], node.name.value);
|
|
12040
|
+
requiredPaginationMetadataKeys.push(key);
|
|
12041
|
+
}
|
|
12042
|
+
// don't need to descend into this node
|
|
12043
|
+
return false;
|
|
12044
|
+
},
|
|
12045
|
+
});
|
|
12046
|
+
// read pagination metadata for identified record queries
|
|
12047
|
+
let paginationMetadataMap = await readPaginationMetadataForKeys(requiredPaginationMetadataKeys, store.query.bind(store));
|
|
12048
|
+
// holds the original cursor values that were mapped back to server cursors
|
|
12049
|
+
let mappedCursors = new Map();
|
|
12050
|
+
// rewrite nodes/variables with mapped cursors now that we read the pagination metadata
|
|
12051
|
+
let ast = visit(originalAST, {
|
|
12052
|
+
Field(node, _key, _parent, _path, ancestors) {
|
|
12053
|
+
// is it a record query?
|
|
12054
|
+
if (!isRecordQuery(node)) {
|
|
12055
|
+
// not returning false, we might be in the parent of a record query
|
|
12056
|
+
return;
|
|
12057
|
+
}
|
|
12058
|
+
// does it have a defined `after` argument?
|
|
12059
|
+
if (!node.arguments)
|
|
12060
|
+
return false;
|
|
12061
|
+
let after = node.arguments.find((a) => {
|
|
12062
|
+
return a.name.value === 'after';
|
|
12063
|
+
});
|
|
12064
|
+
if (!after)
|
|
12065
|
+
return false;
|
|
12066
|
+
if (after.value.kind === 'StringValue' || after.value.kind === 'Variable') {
|
|
12067
|
+
let operationNode = operationNodeAncestor(ancestors);
|
|
12068
|
+
if (!operationNode) {
|
|
12069
|
+
return false;
|
|
12070
|
+
}
|
|
12071
|
+
let key = buildKeyStringForRecordQuery(operationNode, variables, node.arguments || [], node.name.value);
|
|
12072
|
+
// pagination metadata may be missing, e.g. due to being offline
|
|
12073
|
+
let paginationMetadata = paginationMetadataMap.get(key) || {};
|
|
12074
|
+
if (after.value.kind === 'StringValue') {
|
|
12075
|
+
let originalValue = after.value.value;
|
|
12076
|
+
mappedCursors.set(key, originalValue);
|
|
12077
|
+
let mappedValue = mapCursorValue(originalValue, paginationMetadata);
|
|
12078
|
+
if (!mappedValue) {
|
|
12079
|
+
// there were no results from the server, remove after argument
|
|
12080
|
+
return {
|
|
12081
|
+
...node,
|
|
12082
|
+
arguments: node.arguments.filter((a) => a !== after),
|
|
12083
|
+
};
|
|
12084
|
+
}
|
|
12085
|
+
// return a new replacement node
|
|
12086
|
+
return {
|
|
12087
|
+
...node,
|
|
12088
|
+
arguments: node.arguments.map((a) => {
|
|
12089
|
+
if (a !== after)
|
|
12090
|
+
return a;
|
|
12091
|
+
return {
|
|
12092
|
+
...a,
|
|
12093
|
+
value: {
|
|
12094
|
+
kind: 'StringValue',
|
|
12095
|
+
value: mappedValue,
|
|
12096
|
+
},
|
|
12097
|
+
};
|
|
12098
|
+
}),
|
|
12099
|
+
};
|
|
12100
|
+
}
|
|
12101
|
+
else if (after.value.kind === 'Variable') {
|
|
12102
|
+
// rewrite the variable
|
|
12103
|
+
let variableName = after.value.name.value;
|
|
12104
|
+
let variableValue = variables[variableName];
|
|
12105
|
+
mappedCursors.set(key, variableValue);
|
|
12106
|
+
let mappedValue = mapCursorValue(variableValue, paginationMetadata);
|
|
12107
|
+
variables[variableName] = mappedValue;
|
|
12108
|
+
}
|
|
12109
|
+
// don't need to descend into this node
|
|
12110
|
+
return false;
|
|
12111
|
+
}
|
|
12112
|
+
},
|
|
12113
|
+
});
|
|
12114
|
+
return {
|
|
12115
|
+
ast,
|
|
12116
|
+
mappedCursors,
|
|
12117
|
+
};
|
|
12118
|
+
}
|
|
12119
|
+
async function readPaginationMetadataForKeys(keys, query) {
|
|
12120
|
+
let metadataMap = new Map();
|
|
12121
|
+
if (keys.length === 0)
|
|
12122
|
+
return metadataMap;
|
|
12123
|
+
const sql = `SELECT key, data FROM lds_data WHERE key in (${Array(keys.length)
|
|
12124
|
+
.fill('?')
|
|
12125
|
+
.join(',')})`;
|
|
12126
|
+
const results = await query(sql, keys.map((k) => k + '__pagination'));
|
|
12127
|
+
for (let row of results.rows) {
|
|
12128
|
+
let key = row[0].replace(/__pagination$/, '');
|
|
12129
|
+
let metadata = parse$3(row[1]);
|
|
12130
|
+
metadataMap.set(key, metadata);
|
|
12131
|
+
}
|
|
12132
|
+
return metadataMap;
|
|
12133
|
+
}
|
|
12134
|
+
|
|
11785
12135
|
/*
|
|
11786
12136
|
resolves connections...
|
|
11787
12137
|
*/
|
|
@@ -11803,8 +12153,14 @@ async function connectionResolver(obj, args, context, info) {
|
|
|
11803
12153
|
const childRelationship = parentObjectInfo &&
|
|
11804
12154
|
parentObjectInfo.childRelationships.find((rel) => rel.relationshipName === info.fieldName);
|
|
11805
12155
|
// or emit/throw if we want to report it
|
|
11806
|
-
if (!childRelationship)
|
|
11807
|
-
return {
|
|
12156
|
+
if (!childRelationship) {
|
|
12157
|
+
return {
|
|
12158
|
+
records: [],
|
|
12159
|
+
hasNextPage: false,
|
|
12160
|
+
earlyEnd: false,
|
|
12161
|
+
offset: 0,
|
|
12162
|
+
};
|
|
12163
|
+
}
|
|
11808
12164
|
alias = childRelationship.childObjectApiName;
|
|
11809
12165
|
childRelationshipFieldName = childRelationship.fieldName;
|
|
11810
12166
|
}
|
|
@@ -11823,7 +12179,12 @@ async function connectionResolver(obj, args, context, info) {
|
|
|
11823
12179
|
}
|
|
11824
12180
|
let offset = 0;
|
|
11825
12181
|
if (args.after) {
|
|
11826
|
-
|
|
12182
|
+
let originalCursor = context.mappedCursors.get(queryCacheKey);
|
|
12183
|
+
if (!originalCursor) {
|
|
12184
|
+
// eslint-disable-next-line @salesforce/lds/no-error-in-production
|
|
12185
|
+
throw new Error('Internal Error: unable to determine `after` cursor value');
|
|
12186
|
+
}
|
|
12187
|
+
offset = decodeV1Cursor(originalCursor).i;
|
|
11827
12188
|
}
|
|
11828
12189
|
// if the query wants to know `hasNextPage` then we need to request 1 additional record
|
|
11829
12190
|
let selections = info.fieldNodes
|
|
@@ -11832,7 +12193,7 @@ async function connectionResolver(obj, args, context, info) {
|
|
|
11832
12193
|
let wantsHasNextPage = selectionIncludesHasNextPage(selections, info.fragments);
|
|
11833
12194
|
let paginationMetadata = undefined;
|
|
11834
12195
|
if (wantsHasNextPage) {
|
|
11835
|
-
paginationMetadata = await
|
|
12196
|
+
paginationMetadata = await readPaginationMetadataForKeys([queryCacheKey], query);
|
|
11836
12197
|
}
|
|
11837
12198
|
let internalLimit = limit + (wantsHasNextPage ? 1 : 0);
|
|
11838
12199
|
// Alias starts as entity's ApiName
|
|
@@ -11843,36 +12204,60 @@ async function connectionResolver(obj, args, context, info) {
|
|
|
11843
12204
|
orderBy: orderByToPredicate(args.orderBy, alias, alias, context.objectInfos),
|
|
11844
12205
|
limit: internalLimit,
|
|
11845
12206
|
offset,
|
|
11846
|
-
ingestionTimestamp,
|
|
11847
12207
|
};
|
|
11848
12208
|
const { sql, bindings } = buildQuery(queryConfig);
|
|
11849
12209
|
const results = await query(sql, bindings);
|
|
11850
12210
|
let hasNextPage = false;
|
|
12211
|
+
let earlyEnd = false;
|
|
11851
12212
|
if (wantsHasNextPage) {
|
|
11852
12213
|
if (results.rows.length > limit) {
|
|
11853
12214
|
// more records exist in the cache
|
|
11854
12215
|
hasNextPage = true;
|
|
11855
12216
|
results.rows.pop();
|
|
11856
12217
|
}
|
|
11857
|
-
else if (!paginationMetadata ||
|
|
12218
|
+
else if (!paginationMetadata ||
|
|
12219
|
+
!paginationMetadata.has(queryCacheKey) ||
|
|
12220
|
+
paginationMetadata.get(queryCacheKey).__END__ === undefined) {
|
|
11858
12221
|
// more records may exist on the server
|
|
11859
12222
|
hasNextPage = true;
|
|
12223
|
+
// we hit the end of our local records, so we need to know that we
|
|
12224
|
+
// should start at the end of known server cursors
|
|
12225
|
+
if (results.rows.length < limit) {
|
|
12226
|
+
earlyEnd = true;
|
|
12227
|
+
}
|
|
11860
12228
|
}
|
|
11861
12229
|
}
|
|
11862
12230
|
//map each sql result with the ingestion timestamp to pass it down a level
|
|
11863
|
-
let records = results.rows
|
|
11864
|
-
|
|
11865
|
-
|
|
12231
|
+
let records = results.rows.map((row, index) => {
|
|
12232
|
+
const recordMetadataResult = {
|
|
12233
|
+
recordRepresentation: parse$3(row[0]),
|
|
12234
|
+
metadata: parse$3(row[1]),
|
|
12235
|
+
};
|
|
12236
|
+
const { recordRepresentation, metadata } = recordMetadataResult;
|
|
11866
12237
|
context.seenRecordIds.add(recordRepresentation.id);
|
|
12238
|
+
if (metadata.ingestionTimestamp < ingestionTimestamp &&
|
|
12239
|
+
recordRepresentation.drafts === undefined) {
|
|
12240
|
+
if (context.possibleStaleRecordMap.has(recordRepresentation.apiName) === false) {
|
|
12241
|
+
context.possibleStaleRecordMap.set(recordRepresentation.apiName, []);
|
|
12242
|
+
}
|
|
12243
|
+
const ids = context.possibleStaleRecordMap.get(recordRepresentation.apiName);
|
|
12244
|
+
if (ids !== undefined) {
|
|
12245
|
+
ids.push(recordRepresentation.id);
|
|
12246
|
+
context.possibleStaleRecordMap.set(recordRepresentation.apiName, ids);
|
|
12247
|
+
}
|
|
12248
|
+
}
|
|
11867
12249
|
return {
|
|
11868
12250
|
recordRepresentation,
|
|
11869
12251
|
ingestionTimestamp,
|
|
11870
|
-
index: index + offset,
|
|
12252
|
+
index: index + offset + 1,
|
|
12253
|
+
earlyEnd,
|
|
11871
12254
|
};
|
|
11872
12255
|
});
|
|
11873
12256
|
return {
|
|
11874
12257
|
records,
|
|
11875
12258
|
hasNextPage,
|
|
12259
|
+
earlyEnd,
|
|
12260
|
+
offset,
|
|
11876
12261
|
};
|
|
11877
12262
|
}
|
|
11878
12263
|
/**
|
|
@@ -12670,7 +13055,7 @@ function getTextAreaType(field) {
|
|
|
12670
13055
|
return 'TextAreaValue';
|
|
12671
13056
|
}
|
|
12672
13057
|
|
|
12673
|
-
async function evaluate(config, observers, settings, objectInfos, store, snapshot, cache, draftFunctions) {
|
|
13058
|
+
async function evaluate(config, observers, settings, objectInfos, store, snapshot, cache, draftFunctions, mappedCursors) {
|
|
12674
13059
|
const eventEmitter = createCustomAdapterEventEmitter(GRAPHQL_EVAL_NAMESPACE, observers);
|
|
12675
13060
|
// this is only wrapped in a try to execute the event after the result was returned
|
|
12676
13061
|
try {
|
|
@@ -12729,7 +13114,7 @@ async function evaluate(config, observers, settings, objectInfos, store, snapsho
|
|
|
12729
13114
|
eventEmitter({ type: 'graphql-preconditions-met' });
|
|
12730
13115
|
// create the resolver request context, runtime values and functions for
|
|
12731
13116
|
// resolvers to do their job.
|
|
12732
|
-
const contextValue = createContext(store, objectInfos, eventEmitter, settings, snapshot, draftFunctions);
|
|
13117
|
+
const contextValue = createContext(store, objectInfos, eventEmitter, settings, snapshot, mappedCursors, draftFunctions);
|
|
12733
13118
|
// We're building this from scratch from each request. If this becomes a
|
|
12734
13119
|
// hotspot we can pull it up and memoize it later
|
|
12735
13120
|
const schema = createSchemaWithCache(objectInfos, cache);
|
|
@@ -12754,7 +13139,11 @@ async function evaluate(config, observers, settings, objectInfos, store, snapsho
|
|
|
12754
13139
|
seenRecordIds.push(queryString);
|
|
12755
13140
|
});
|
|
12756
13141
|
}
|
|
12757
|
-
return {
|
|
13142
|
+
return {
|
|
13143
|
+
result,
|
|
13144
|
+
seenRecordIds,
|
|
13145
|
+
possibleStaleRecordMap: contextValue.possibleStaleRecordMap,
|
|
13146
|
+
};
|
|
12758
13147
|
}
|
|
12759
13148
|
finally {
|
|
12760
13149
|
eventEmitter({ type: 'graphql-eval-end' });
|
|
@@ -14478,7 +14867,11 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
|
|
|
14478
14867
|
return async function draftAwareGraphQLAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
|
|
14479
14868
|
//create a copy to not accidentally modify the AST in the astResolver map of luvio
|
|
14480
14869
|
const copy = parse$3(stringify$3(config.query));
|
|
14870
|
+
// the injected ast has extra fields needed for eval in it
|
|
14481
14871
|
let injectedAST;
|
|
14872
|
+
// the cursor mapped ast is passed upstream so it won't reject on our local cursors
|
|
14873
|
+
let cursorMappedAST;
|
|
14874
|
+
let mappedCursors = new Map();
|
|
14482
14875
|
let objectInfoNeeded = {};
|
|
14483
14876
|
let unmappedDraftIDs;
|
|
14484
14877
|
let internalRequestContext = {
|
|
@@ -14494,6 +14887,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
|
|
|
14494
14887
|
objectInfos: objectInfoNeeded,
|
|
14495
14888
|
unmappedDraftIDs,
|
|
14496
14889
|
} = await injectSyntheticFields(copy, objectInfoService, draftFunctions, config.variables));
|
|
14890
|
+
({ ast: cursorMappedAST, mappedCursors } = await mapPaginationCursors(injectedAST, config.variables || {}, store));
|
|
14497
14891
|
if (config.variables) {
|
|
14498
14892
|
config.variables = replaceDraftIdsInVariables(config.variables, draftFunctions, unmappedDraftIDs);
|
|
14499
14893
|
}
|
|
@@ -14525,7 +14919,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
|
|
|
14525
14919
|
const nonEvaluatedSnapshot = (await luvio.applyCachePolicy(internalRequestContext, {
|
|
14526
14920
|
config: {
|
|
14527
14921
|
...config,
|
|
14528
|
-
query:
|
|
14922
|
+
query: cursorMappedAST,
|
|
14529
14923
|
},
|
|
14530
14924
|
luvio,
|
|
14531
14925
|
gqlEval: true,
|
|
@@ -14538,12 +14932,17 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
|
|
|
14538
14932
|
: [];
|
|
14539
14933
|
let gqlResult;
|
|
14540
14934
|
let seenRecordIds;
|
|
14935
|
+
let possibleStaleRecordMap;
|
|
14541
14936
|
try {
|
|
14542
|
-
({
|
|
14937
|
+
({
|
|
14938
|
+
result: gqlResult,
|
|
14939
|
+
seenRecordIds,
|
|
14940
|
+
possibleStaleRecordMap,
|
|
14941
|
+
} = await evaluate({
|
|
14543
14942
|
...config,
|
|
14544
14943
|
//need to create another copy of the ast for future writes
|
|
14545
14944
|
query: parse$3(stringify$3(injectedAST)),
|
|
14546
|
-
}, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot, graphqlSchemaCache));
|
|
14945
|
+
}, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot, graphqlSchemaCache, draftFunctions, mappedCursors));
|
|
14547
14946
|
}
|
|
14548
14947
|
catch (throwable) {
|
|
14549
14948
|
const error = throwable;
|
|
@@ -14569,13 +14968,18 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
|
|
|
14569
14968
|
const seenRecords = createSeenRecords(seenRecordIds, nonEvaluatedSnapshot);
|
|
14570
14969
|
const recordId = generateUniqueRecordId();
|
|
14571
14970
|
const rebuildWithLocalEval = async (originalSnapshot) => {
|
|
14572
|
-
let { result: rebuildResult, seenRecordIds } = await evaluate({
|
|
14971
|
+
let { result: rebuildResult, seenRecordIds, possibleStaleRecordMap, } = await evaluate({
|
|
14573
14972
|
...config,
|
|
14574
14973
|
query: injectedAST,
|
|
14575
|
-
}, observers, { userId }, objectInfoNeeded, store, originalSnapshot, graphqlSchemaCache, draftFunctions);
|
|
14974
|
+
}, observers, { userId }, objectInfoNeeded, store, originalSnapshot, graphqlSchemaCache, draftFunctions, mappedCursors);
|
|
14576
14975
|
if (!rebuildResult.errors) {
|
|
14577
14976
|
rebuildResult = removeSyntheticFields(rebuildResult, config.query);
|
|
14578
14977
|
}
|
|
14978
|
+
let snapshotState = 'Fulfilled';
|
|
14979
|
+
if (possibleStaleRecordMap.size > 0) {
|
|
14980
|
+
initiateStaleRecordRefresh(luvio, possibleStaleRecordMap);
|
|
14981
|
+
snapshotState = 'Stale';
|
|
14982
|
+
}
|
|
14579
14983
|
if (objectsDeepEqual(rebuildResult, originalSnapshot.data)) {
|
|
14580
14984
|
return originalSnapshot;
|
|
14581
14985
|
}
|
|
@@ -14584,6 +14988,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
|
|
|
14584
14988
|
...originalSnapshot,
|
|
14585
14989
|
data: rebuildResult,
|
|
14586
14990
|
recordId,
|
|
14991
|
+
state: snapshotState,
|
|
14587
14992
|
seenRecords: createSeenRecords(seenRecordIds, nonEvaluatedSnapshot),
|
|
14588
14993
|
rebuildWithLocalEval,
|
|
14589
14994
|
};
|
|
@@ -14621,9 +15026,31 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
|
|
|
14621
15026
|
},
|
|
14622
15027
|
};
|
|
14623
15028
|
}
|
|
15029
|
+
if (possibleStaleRecordMap.size > 0) {
|
|
15030
|
+
initiateStaleRecordRefresh(luvio, possibleStaleRecordMap);
|
|
15031
|
+
resultSnapshot.state = 'Stale';
|
|
15032
|
+
}
|
|
14624
15033
|
return resultSnapshot;
|
|
14625
15034
|
};
|
|
14626
15035
|
}
|
|
15036
|
+
function initiateStaleRecordRefresh(luvio, keyMap) {
|
|
15037
|
+
const staleRecordKeys = from$1(keyMap.values())
|
|
15038
|
+
.flat()
|
|
15039
|
+
.map((id) => `UiApi::RecordRepresentation:${id}`);
|
|
15040
|
+
luvio.storeExpirePossibleStaleRecords(staleRecordKeys, makeGetRecordsConfig(keyMap), getRecordsAdapterFactory(luvio));
|
|
15041
|
+
}
|
|
15042
|
+
function makeGetRecordsConfig(keyMap) {
|
|
15043
|
+
const records = [];
|
|
15044
|
+
keyMap.forEach((recordIds, apiName) => {
|
|
15045
|
+
records.push({
|
|
15046
|
+
recordIds,
|
|
15047
|
+
fields: [`${apiName}.Id`],
|
|
15048
|
+
});
|
|
15049
|
+
});
|
|
15050
|
+
return {
|
|
15051
|
+
records,
|
|
15052
|
+
};
|
|
15053
|
+
}
|
|
14627
15054
|
|
|
14628
15055
|
function environmentAwareGraphQLBatchAdapterFactory(objectInfoService, luvio, isDraftId) {
|
|
14629
15056
|
return async function environmentAwareGraphQLBatchAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
|
|
@@ -15770,6 +16197,9 @@ class NimbusDraftQueue {
|
|
|
15770
16197
|
removeHandler(_id) {
|
|
15771
16198
|
return Promise.reject(new Error('Cannot call setMetadata from the NimbusDraftQueue'));
|
|
15772
16199
|
}
|
|
16200
|
+
updateDraftAction(_action) {
|
|
16201
|
+
return Promise.reject(new Error('Cannot call updateDraftAction from the NimbusDraftQueue'));
|
|
16202
|
+
}
|
|
15773
16203
|
}
|
|
15774
16204
|
|
|
15775
16205
|
function attachObserversToAdapterRequestContext(observers, adapterRequestContext) {
|
|
@@ -17027,6 +17457,21 @@ const NimbusBinaryStore = {
|
|
|
17027
17457
|
__nimbus.plugins.LdsBinaryStorePlugin.setCanonicalUrl(uri, canonicalUrl, ttlSeconds, resolve, (err) => reject(errorMessageToError(err)));
|
|
17028
17458
|
});
|
|
17029
17459
|
},
|
|
17460
|
+
createStream: function (type) {
|
|
17461
|
+
return new Promise((resolve, reject) => {
|
|
17462
|
+
__nimbus.plugins.LdsBinaryStorePlugin.createStream(type, resolve, (err) => reject(errorMessageToError(err)));
|
|
17463
|
+
});
|
|
17464
|
+
},
|
|
17465
|
+
writeToStream: function (uri, chunk) {
|
|
17466
|
+
return new Promise((resolve, reject) => {
|
|
17467
|
+
__nimbus.plugins.LdsBinaryStorePlugin.writeToStream(uri, chunk, resolve, (err) => reject(errorMessageToError(err)));
|
|
17468
|
+
});
|
|
17469
|
+
},
|
|
17470
|
+
closeStream: function (uri) {
|
|
17471
|
+
return new Promise((resolve, reject) => {
|
|
17472
|
+
__nimbus.plugins.LdsBinaryStorePlugin.closeStream(uri, resolve, (err) => reject(errorMessageToError(err)));
|
|
17473
|
+
});
|
|
17474
|
+
},
|
|
17030
17475
|
};
|
|
17031
17476
|
|
|
17032
17477
|
/**
|
|
@@ -18356,7 +18801,6 @@ let lazyDurableStore;
|
|
|
18356
18801
|
let lazyNetworkAdapter;
|
|
18357
18802
|
let lazyObjectInfoService;
|
|
18358
18803
|
let lazyGetRecords;
|
|
18359
|
-
// TODO [W-123]: JHORST hoist, optimize and test this function
|
|
18360
18804
|
const shouldFlush = (key, value) => {
|
|
18361
18805
|
if (!isStoreKeyRecordId$1(key)) {
|
|
18362
18806
|
return { flushValue: true };
|
|
@@ -18530,4 +18974,4 @@ register({
|
|
|
18530
18974
|
});
|
|
18531
18975
|
|
|
18532
18976
|
export { O11Y_NAMESPACE_LDS_MOBILE, getRuntime, registerReportObserver, reportGraphqlQueryParseError };
|
|
18533
|
-
// version: 1.
|
|
18977
|
+
// version: 1.304.0-aa3e5f9550
|