@salesforce/lds-runtime-mobile 1.299.0 → 1.301.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js CHANGED
@@ -19,11 +19,10 @@ import { withRegistration, register } from '@salesforce/lds-default-luvio';
19
19
  import { setupInstrumentation, instrumentAdapter as instrumentAdapter$1, instrumentLuvio, setLdsAdaptersUiapiInstrumentation, setLdsNetworkAdapterInstrumentation } from '@salesforce/lds-instrumentation';
20
20
  import { HttpStatusCode, setBypassDeepFreeze, StoreKeySet, serializeStructuredKey, StringKeyInMemoryStore, Reader, deepFreeze, emitAdapterEvent, createCustomAdapterEventEmitter, StoreKeyMap, isFileReference, Environment, Luvio, InMemoryStore } from '@luvio/engine';
21
21
  import excludeStaleRecordsGate from '@salesforce/gate/lds.graphqlEvalExcludeStaleRecords';
22
- import { parseAndVisit, Kind, buildSchema, isObjectType, defaultFieldResolver, visit, execute, parse as parse$7, extendSchema, isScalarType } from '@luvio/graphql-parser';
23
- import { RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION, RECORD_REPRESENTATION_NAME, extractRecordIdFromStoreKey, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, getRecordId18, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, getObjectInfoDirectoryAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion, getRecordsAdapterFactory } from '@salesforce/lds-adapters-uiapi-mobile';
22
+ import { parseAndVisit, Kind as Kind$1, buildSchema, isObjectType, defaultFieldResolver, visit, execute, parse as parse$7, extendSchema, isScalarType } from '@luvio/graphql-parser';
23
+ import { RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION, RECORD_REPRESENTATION_NAME, extractRecordIdFromStoreKey, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, isStoreKeyRecordViewEntity, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, getRecordId18, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, getObjectInfoDirectoryAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion, getRecordsAdapterFactory } from '@salesforce/lds-adapters-uiapi-mobile';
24
24
  import ldsIdempotencyWriteDisabled from '@salesforce/gate/lds.idempotencyWriteDisabled';
25
25
  import ldsBackdatingEnabled from '@salesforce/gate/lds.backdatingEnabled';
26
- import { extractRecordIdFromStoreKey as extractRecordIdFromStoreKey$1, RECORD_VIEW_ENTITY_ID_PREFIX, isStoreKeyRecordViewEntity, keyBuilderRecord as keyBuilderRecord$1, RECORD_ID_PREFIX as RECORD_ID_PREFIX$1, RECORD_FIELDS_KEY_JUNCTION as RECORD_FIELDS_KEY_JUNCTION$1 } from '@salesforce/lds-adapters-uiapi';
27
26
  import FIRST_DAY_OF_WEEK from '@salesforce/i18n/firstDayOfWeek';
28
27
  import caseSensitiveUserId from '@salesforce/user/Id';
29
28
  import { idleDetector, getInstrumentation } from 'o11y/client';
@@ -43,6 +42,7 @@ import eagerEvalDefaultCachePolicy from '@salesforce/gate/lds.eagerEvalDefaultCa
43
42
  import ldsPrimingGraphqlBatch from '@salesforce/gate/lds.primingGraphqlBatch';
44
43
  import ldsMetadataRefreshEnabled from '@salesforce/gate/lds.metadataRefreshEnabled';
45
44
  import graphqlL2AdapterGate from '@salesforce/gate/lmr.graphqlL2Adapter';
45
+ import { isStoreKeyRecordViewEntity as isStoreKeyRecordViewEntity$1, RECORD_ID_PREFIX as RECORD_ID_PREFIX$1, RECORD_FIELDS_KEY_JUNCTION as RECORD_FIELDS_KEY_JUNCTION$1 } from '@salesforce/lds-adapters-uiapi';
46
46
 
47
47
  /**
48
48
  * Copyright (c) 2022, Salesforce, Inc.,
@@ -792,7 +792,7 @@ class DurableTTLStore {
792
792
  }
793
793
  }
794
794
 
795
- function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler, redirects, additionalDurableStoreOperations = [], enableDurableMetadataRefresh = false) {
795
+ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler, redirects, shouldFlush, additionalDurableStoreOperations = [], enableDurableMetadataRefresh = false) {
796
796
  const durableRecords = create$7(null);
797
797
  const refreshedDurableRecords = create$7(null);
798
798
  const evictedRecords = create$7(null);
@@ -820,7 +820,16 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
820
820
  const entries = wasVisited === true || enableDurableMetadataRefresh === false
821
821
  ? durableRecords
822
822
  : refreshedDurableRecords;
823
- setRecordTo(entries, key, record, metadata);
823
+ const { flushValue: flushValue, forceFlushMetadata: flushMetadata } = shouldFlush(key, record);
824
+ if (flushValue) {
825
+ setRecordTo(entries, key, record, metadata);
826
+ }
827
+ else {
828
+ // If the record is not to be flushed, we still need to update the metadata
829
+ if (flushMetadata === true) {
830
+ setRecordTo(refreshedDurableRecords, key, record, metadata);
831
+ }
832
+ }
824
833
  }
825
834
  const durableStoreOperations = additionalDurableStoreOperations;
826
835
  const recordKeys = keys$8(durableRecords);
@@ -1021,7 +1030,7 @@ function isUnfulfilledSnapshot$1(cachedSnapshotResult) {
1021
1030
  * @param durableStore A DurableStore implementation
1022
1031
  * @param instrumentation An instrumentation function implementation
1023
1032
  */
1024
- function makeDurable(environment, { durableStore, instrumentation, useRevivingStore, enableDurableMetadataRefresh = false, disableDeepFreeze = false, }) {
1033
+ function makeDurable(environment, { durableStore, instrumentation, useRevivingStore, shouldFlush, enableDurableMetadataRefresh = false, disableDeepFreeze = false, }) {
1025
1034
  // runtimes can choose to disable deepFreeze, e.g. headless mobile runtime
1026
1035
  setBypassDeepFreeze(disableDeepFreeze);
1027
1036
  let stagingStore = null;
@@ -1228,7 +1237,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
1228
1237
  if (stagingStore === null) {
1229
1238
  return Promise.resolve();
1230
1239
  }
1231
- const promise = flushInMemoryStoreValuesToDurableStore(stagingStore, durableStore, durableStoreErrorHandler, new Map(pendingStoreRedirects), additionalDurableStoreOperations, enableDurableMetadataRefresh);
1240
+ const promise = flushInMemoryStoreValuesToDurableStore(stagingStore, durableStore, durableStoreErrorHandler, new Map(pendingStoreRedirects), shouldFlush !== null && shouldFlush !== void 0 ? shouldFlush : (() => ({ flushValue: true })), additionalDurableStoreOperations, enableDurableMetadataRefresh);
1232
1241
  pendingStoreRedirects.clear();
1233
1242
  stagingStore = null;
1234
1243
  return promise;
@@ -1607,11 +1616,9 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
1607
1616
  * For full license text, see the LICENSE.txt file
1608
1617
  */
1609
1618
 
1610
- function isStoreKeyRecordField(key) {
1611
- return key.indexOf(RECORD_ID_PREFIX) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION) > -1;
1612
- }
1613
- function buildRecordFieldStoreKey$1(recordKey, fieldName) {
1614
- return `${recordKey}${RECORD_FIELDS_KEY_JUNCTION}${fieldName}`;
1619
+
1620
+ function isStoreKeyRecordId$1(key) {
1621
+ return key.indexOf(RECORD_ID_PREFIX) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION) === -1;
1615
1622
  }
1616
1623
  function objectsDeepEqual(lhs, rhs) {
1617
1624
  if (lhs === rhs)
@@ -4482,7 +4489,7 @@ function generateVariableGQLQuery(document, variables) {
4482
4489
  */
4483
4490
  function generateVariableSubQuery(valueNode, name, type, variables) {
4484
4491
  switch (valueNode.kind) {
4485
- case Kind.OBJECT: {
4492
+ case Kind$1.OBJECT: {
4486
4493
  // For example, `{ Id: { eq: $draftId } }` is a `ObjectValueNode`, which has field keys 'Id'
4487
4494
  const resultQuery = keys$7(valueNode.fields)
4488
4495
  .map((key) => generateVariableSubQuery(valueNode.fields[key], key, type, variables))
@@ -4493,7 +4500,7 @@ function generateVariableSubQuery(valueNode, name, type, variables) {
4493
4500
  }
4494
4501
  return resultQuery;
4495
4502
  }
4496
- case Kind.VARIABLE:
4503
+ case Kind$1.VARIABLE:
4497
4504
  return generateVariableNodeQuery(valueNode, name, type, variables);
4498
4505
  default:
4499
4506
  return '';
@@ -4559,7 +4566,7 @@ function swapArgumentWithVariableNodes(swapped, original) {
4559
4566
  });
4560
4567
  }
4561
4568
  function swapValueNodeWithVariableNodes(original, swapped) {
4562
- if (original.kind === Kind.OBJECT) {
4569
+ if (original.kind === Kind$1.OBJECT) {
4563
4570
  for (const key of keys$7(original.fields)) {
4564
4571
  if (isObjectValueNode$1(swapped) && swapped.fields[key]) {
4565
4572
  if (is(original.fields[key], 'Variable')) {
@@ -7003,6 +7010,36 @@ function filterOutReferenceFieldsAndLinks(record) {
7003
7010
  return filteredRecords;
7004
7011
  }
7005
7012
 
7013
+ /**
7014
+ * Retrieves a denormalized record from the store
7015
+ * NOTE: do no use this if you don't know what you're doing, this can still contain normalized record references
7016
+ * @param recordKey record key
7017
+ * @param durableStore the durable store
7018
+ * @returns a DraftRecordRepresentation containing the requested fields
7019
+ */
7020
+ function getDenormalizedRecord(recordKey, durableStore) {
7021
+ return durableStore
7022
+ .getEntries([recordKey], DefaultDurableSegment)
7023
+ .then((entries) => {
7024
+ if (entries === undefined) {
7025
+ return undefined;
7026
+ }
7027
+ const denormalizedEntry = entries[recordKey];
7028
+ if (denormalizedEntry === undefined) {
7029
+ return undefined;
7030
+ }
7031
+ // don't include link information
7032
+ const denormalizedRecord = denormalizedEntry.data;
7033
+ if (isStoreRecordError(denormalizedRecord)) {
7034
+ return undefined;
7035
+ }
7036
+ return denormalizedRecord;
7037
+ });
7038
+ }
7039
+ function isStoreRecordError(storeRecord) {
7040
+ return storeRecord.__type === 'error';
7041
+ }
7042
+
7006
7043
  /**
7007
7044
  * Checks if a resource request is a GET method on the record endpoint
7008
7045
  * @param request the resource request
@@ -7072,7 +7109,7 @@ function getRecordDraftEnvironment(luvio, env, { isDraftId, durableStore }) {
7072
7109
  // if the canonical key matches the key in the resource request it means we do not have a
7073
7110
  // mapping in our cache so either the draft no longer exists in our cache or invalid fields were requested
7074
7111
  if (canonicalKey === recordKey) {
7075
- const doesDraftExist = (await durableStore.getDenormalizedRecord(recordKey)) !== undefined;
7112
+ const doesDraftExist = (await getDenormalizedRecord(recordKey, durableStore)) !== undefined;
7076
7113
  if (doesDraftExist === false) {
7077
7114
  return Promise.reject(createNotFoundResponse({
7078
7115
  message: 'Draft record no longer exists',
@@ -7692,8 +7729,24 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
7692
7729
  continue;
7693
7730
  }
7694
7731
  const { dataType, relationshipName, referenceToInfos } = fieldInfo;
7695
- const draftFieldValue = record.fields[draftField].value;
7732
+ const draftFieldNode = record.fields[draftField];
7733
+ // JHORST: revisit this logic
7734
+ // do not try to apply drafts on nodes that are pending or missing
7735
+ if (draftFieldNode.__state !== undefined) {
7736
+ if (draftFieldNode.__state.pending === true ||
7737
+ draftFieldNode.__state.isMissing === true)
7738
+ continue;
7739
+ }
7740
+ const draftFieldValue = draftFieldNode.value;
7696
7741
  if (dataType === 'Reference' && relationshipName !== null) {
7742
+ const recordField = record.fields[relationshipName];
7743
+ if (recordField && isFieldLink(recordField)) {
7744
+ const link = recordField.value;
7745
+ // record view entities are not supported by drafts, leave them alone
7746
+ if (isStoreKeyRecordViewEntity(link.__ref)) {
7747
+ continue;
7748
+ }
7749
+ }
7697
7750
  if (draftFieldValue === null) {
7698
7751
  recordFields[relationshipName] = {
7699
7752
  displayValue: null,
@@ -7708,7 +7761,7 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
7708
7761
  const referencedRecord = referencedRecords.get(key);
7709
7762
  recordFields[relationshipName] = {
7710
7763
  displayValue: null,
7711
- value: createLink$1(key),
7764
+ value: createLink(key),
7712
7765
  };
7713
7766
  // for custom objects, we select the 'Name' field
7714
7767
  // otherwise we check the object info for name fields.
@@ -7736,7 +7789,7 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
7736
7789
  }
7737
7790
  return { ...record, fields: recordFields };
7738
7791
  }
7739
- function createLink$1(key) {
7792
+ function createLink(key) {
7740
7793
  return { __ref: key };
7741
7794
  }
7742
7795
  function getReferenceInfoForKey(fieldName, field, luvio, objectInfo) {
@@ -7751,7 +7804,7 @@ function getReferenceInfoForKey(fieldName, field, luvio, objectInfo) {
7751
7804
  referenceFieldName: relationshipName,
7752
7805
  field: {
7753
7806
  displayValue: null,
7754
- value: createLink$1(key),
7807
+ value: createLink(key),
7755
7808
  },
7756
7809
  };
7757
7810
  }
@@ -7780,7 +7833,6 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
7780
7833
  this.isDraftId = isDraftId;
7781
7834
  this.recordService = recordService;
7782
7835
  this.handlerId = LDS_ACTION_HANDLER_ID;
7783
- this.collectedFields = create$5(null);
7784
7836
  recordService.registerRecordHandler(this);
7785
7837
  }
7786
7838
  async buildPendingAction(request, queue) {
@@ -7795,7 +7847,7 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
7795
7847
  // to use. We just need to know if we have this record, we don't care about what fields
7796
7848
  // are on this record. This can go away if we wrote a special getRecord adapter which
7797
7849
  // allowed us to get a record with the fields we have cached (i.e. would never go to the network)
7798
- const doesRecordExist = (await this.durableStore.getDenormalizedRecord(tag)) !== undefined;
7850
+ const doesRecordExist = (await getDenormalizedRecord(tag, this.durableStore)) !== undefined;
7799
7851
  if (!doesRecordExist) {
7800
7852
  // we are trying to patch or delete a record that we don't have cached. This is not supported.
7801
7853
  // we'll do one last ditch effort to fetch the record with just the Id field in case we're online.
@@ -8030,7 +8082,7 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
8030
8082
  }
8031
8083
  return filterOutReferenceFieldsAndLinks(recordWithDrafts);
8032
8084
  }
8033
- const record = await this.durableStore.getDenormalizedRecord(key);
8085
+ const record = await getDenormalizedRecord(key, this.durableStore);
8034
8086
  if (record === undefined) {
8035
8087
  // error - it is an invariant that all drafts have a corresponding record in the store
8036
8088
  throw Error('Could not find record that draft was applied to in the durable store');
@@ -8042,48 +8094,13 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
8042
8094
  return filterOutReferenceFieldsAndLinks(recordWithDrafts);
8043
8095
  }
8044
8096
  applyDraftsToIncomingData(key, data, draftMetadata, publishData) {
8045
- if (isField(key)) {
8046
- this.collectedFields[key] = data;
8047
- return publishData(key, data);
8048
- }
8049
- // otherwise we're a record
8050
8097
  if (draftMetadata === undefined) {
8051
- // no drafts applied to this record, publish and be done
8052
- this.collectedFields = create$5(null);
8053
8098
  return publishData(key, data);
8054
8099
  }
8055
- // create a denormalized record with the collected fields
8056
- const recordFieldNames = keys$5(data.fields);
8057
- const partialRecord = {
8058
- ...data,
8059
- fields: {},
8060
- };
8061
- for (const fieldName of recordFieldNames) {
8062
- const collectedField = this.collectedFields[buildRecordFieldStoreKey$1(key, fieldName)];
8063
- if (collectedField !== undefined) {
8064
- partialRecord.fields[fieldName] =
8065
- collectedField;
8066
- }
8067
- }
8068
- const recordWithDrafts = replayDraftsOnRecord(partialRecord, draftMetadata);
8100
+ const recordWithDrafts = replayDraftsOnRecord(data, draftMetadata);
8069
8101
  const recordWithSpanningRefLinks = applyReferenceLinksToDraft(recordWithDrafts, draftMetadata);
8070
- // publish the normalized fields
8071
- const normalizedRecord = {
8072
- ...recordWithSpanningRefLinks,
8073
- ...data,
8074
- fields: { ...data.fields },
8075
- lastModifiedDate: recordWithDrafts.lastModifiedDate,
8076
- lastModifiedById: recordWithDrafts.lastModifiedById,
8077
- };
8078
- for (const fieldName of keys$5(recordWithSpanningRefLinks.fields)) {
8079
- const fieldKey = buildRecordFieldStoreKey$1(key, fieldName);
8080
- normalizedRecord.fields[fieldName] = { __ref: fieldKey };
8081
- publishData(fieldKey, recordWithSpanningRefLinks.fields[fieldName]);
8082
- }
8083
8102
  // publish the normalized record
8084
- publishData(key, normalizedRecord);
8085
- // we've published the record, now clear the collected fields
8086
- this.collectedFields = create$5(null);
8103
+ publishData(key, recordWithSpanningRefLinks);
8087
8104
  }
8088
8105
  updateMetadata(existingMetadata, incomingMetadata) {
8089
8106
  // ensure the the api name cannot be overwritten in the incoming metadata
@@ -8098,7 +8115,7 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
8098
8115
  return request.body.apiName;
8099
8116
  }
8100
8117
  // otherwise we'll read it from the durable store if we can
8101
- const record = await this.durableStore.getDenormalizedRecord(recordKey);
8118
+ const record = await getDenormalizedRecord(recordKey, this.durableStore);
8102
8119
  if (record !== undefined) {
8103
8120
  return record.apiName;
8104
8121
  }
@@ -8203,9 +8220,6 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
8203
8220
  };
8204
8221
  }
8205
8222
  }
8206
- function isField(key, data) {
8207
- return isStoreKeyRecordField(key);
8208
- }
8209
8223
  // true if `createable/updateable` property of backdating field is true and draft does not include that fields. If the customer specifies
8210
8224
  // `LastModifiedDate`, it is not overwritten.
8211
8225
  function isBackdatingFieldEditable(objectInfo, backdatingFieldName, attributeName, draftActionFieldNames) {
@@ -8257,6 +8271,36 @@ function performQuickActionDraftEnvironment(luvio, env, handler) {
8257
8271
  });
8258
8272
  }
8259
8273
 
8274
+ const PERFORM_UPDATE_RECORD_QUICK_ACTION_ENDPOINT_REGEX = /^\/ui-api\/actions\/perform-quick-action\/.*$/;
8275
+ /**
8276
+ * Checks if a provided resource request is a PATCH operation on the record
8277
+ * endpoint. If so, it returns true indicating that the request should be enqueued instead of
8278
+ * hitting the network.
8279
+ * @param request the resource request
8280
+ */
8281
+ function isRequestPerformUpdateRecordQuickAction(request) {
8282
+ const { basePath, method } = request;
8283
+ return method === 'patch' && PERFORM_UPDATE_RECORD_QUICK_ACTION_ENDPOINT_REGEX.test(basePath);
8284
+ }
8285
+ function performUpdateRecordQuickActionDraftEnvironment(luvio, env, handler) {
8286
+ const dispatchResourceRequest = async function (request, context, eventObservers) {
8287
+ if (isRequestPerformUpdateRecordQuickAction(request) === false) {
8288
+ // only override requests to createRecord endpoint
8289
+ return env.dispatchResourceRequest(request, context, eventObservers);
8290
+ }
8291
+ const { data } = await handler.enqueue(request).catch((err) => {
8292
+ throw createDraftSynthesisErrorResponse(normalizeError$1(err).message);
8293
+ });
8294
+ if (data === undefined) {
8295
+ return Promise.reject(createDraftSynthesisErrorResponse());
8296
+ }
8297
+ return createOkResponse(data);
8298
+ };
8299
+ return create$5(env, {
8300
+ dispatchResourceRequest: { value: dispatchResourceRequest },
8301
+ });
8302
+ }
8303
+
8260
8304
  class UiApiDraftRecordService {
8261
8305
  constructor(draftQueue, getLuvio, durableStore, objectInfoAdapter, generateId, userId, formatDisplayValue) {
8262
8306
  this.recordEffectingHandlers = {};
@@ -8339,7 +8383,7 @@ class UiApiDraftRecordService {
8339
8383
  const key = getRecordKeyForId(this.getLuvio(), id);
8340
8384
  if (referenceMap.has(key) === false &&
8341
8385
  !referenceFields.some((refInfo) => refInfo.id === id)) {
8342
- const record = await this.durableStore.getDenormalizedRecord(key);
8386
+ const record = await getDenormalizedRecord(key, this.durableStore);
8343
8387
  if (record !== undefined) {
8344
8388
  referenceMap.set(key, record);
8345
8389
  }
@@ -8512,6 +8556,132 @@ class QuickActionExecutionRepresentationHandler extends AbstractResourceRequestA
8512
8556
  }
8513
8557
  }
8514
8558
 
8559
+ const UPDATE_RECORD_QUICK_ACTION_HANDLER = 'UPDATE_RECORD_QUICK_ACTION_HANDLER';
8560
+ class UpdateRecordQuickActionExecutionRepresentationHandler extends AbstractResourceRequestActionHandler {
8561
+ constructor(getLuvio, draftRecordService, draftQueue, networkAdapter, durableStore, isDraftId) {
8562
+ super(draftQueue, networkAdapter, getLuvio);
8563
+ this.draftRecordService = draftRecordService;
8564
+ this.durableStore = durableStore;
8565
+ this.isDraftId = isDraftId;
8566
+ this.handlerId = UPDATE_RECORD_QUICK_ACTION_HANDLER;
8567
+ this.getDenormedRecord = (key) => {
8568
+ return getDenormalizedRecord(key, this.durableStore);
8569
+ };
8570
+ draftRecordService.registerRecordHandler(this);
8571
+ }
8572
+ draftActionToDraftOperation(queueEntry) {
8573
+ return [
8574
+ {
8575
+ type: 'update',
8576
+ fields: queueEntry.data.body.fields,
8577
+ apiName: queueEntry.data.body.apiName,
8578
+ draftActionId: queueEntry.id,
8579
+ id: queueEntry.targetId,
8580
+ timestamp: queueEntry.timestamp,
8581
+ },
8582
+ ];
8583
+ }
8584
+ async handleActionEnqueued(action) {
8585
+ // as a side effect of enqueueing an action we have to synthesize
8586
+ // edits to the record in the cache
8587
+ const key = action.tag;
8588
+ const record = await this.getDenormedRecord(key);
8589
+ const draftMetadata = await this.getDraftMetadata(key);
8590
+ const recordWithDrafts = replayDraftsOnRecord(record, draftMetadata);
8591
+ const objectInfo = draftMetadata && draftMetadata.objectInfos.get(recordWithDrafts.apiName);
8592
+ //////
8593
+ // This operation is meant to remove any spanning fields from
8594
+ // the record prior to ingestion
8595
+ if (draftMetadata !== undefined && objectInfo !== undefined) {
8596
+ const fieldNames = keys$5(recordWithDrafts.fields);
8597
+ const fieldsCount = keys$5(fieldNames).length;
8598
+ let newFields = {};
8599
+ for (let i = 0; i < fieldsCount; i++) {
8600
+ const field = objectInfo.fields[fieldNames[i]];
8601
+ if (field !== undefined) {
8602
+ newFields[fieldNames[i]] = recordWithDrafts.fields[fieldNames[i]];
8603
+ }
8604
+ }
8605
+ recordWithDrafts.fields = newFields;
8606
+ }
8607
+ //////
8608
+ await this.ingestResponses([
8609
+ {
8610
+ response: recordWithDrafts,
8611
+ buildCacheKeysForResponse: this.draftRecordService.buildCacheKeysForRecordRepresentation.bind(this.draftRecordService),
8612
+ synchronousIngest: this.draftRecordService.synchronousIngestRecord.bind(this.draftRecordService),
8613
+ },
8614
+ ], action);
8615
+ return super.handleActionEnqueued(action);
8616
+ }
8617
+ canHandlePublish(_key) {
8618
+ return false;
8619
+ }
8620
+ canRepresentationContainDraftMetadata(_representationName) {
8621
+ return false;
8622
+ }
8623
+ getIdFromRequest(request) {
8624
+ const contextId = request.body.contextId;
8625
+ if (typeof contextId !== 'string') {
8626
+ throw Error('contextId not found in request body');
8627
+ }
8628
+ return Promise.resolve(contextId);
8629
+ }
8630
+ getIdFromResponseBody(responseBody) {
8631
+ return responseBody.id;
8632
+ }
8633
+ buildTagForTargetId(id) {
8634
+ return `UiApi::RecordRepresentation:${id}`;
8635
+ }
8636
+ getDataForAction(action) {
8637
+ let contextId = action.data.body.contextId;
8638
+ // populate contextId with user id if not supplied in the body
8639
+ if (contextId === undefined) {
8640
+ contextId = this.draftRecordService.getUserId();
8641
+ }
8642
+ return Promise.resolve({
8643
+ contextId,
8644
+ eTag: 'draft',
8645
+ feedItemId: '',
8646
+ id: action.targetId,
8647
+ isCreated: false,
8648
+ isSuccess: true,
8649
+ successMessage: `record updated.`,
8650
+ });
8651
+ }
8652
+ async getDraftMetadata(key) {
8653
+ const recordId = extractRecordIdFromStoreKey(key);
8654
+ if (recordId === undefined) {
8655
+ return undefined;
8656
+ }
8657
+ const status = await this.draftRecordService.getRecordDraftMetadata(recordId, undefined);
8658
+ return status && status.data;
8659
+ }
8660
+ applyDraftsToIncomingData(key, data, _draftMetadata, publishFn) {
8661
+ publishFn(key, data);
8662
+ }
8663
+ async handleActionRemoved(action) {
8664
+ await this.evictKey(action.tag);
8665
+ }
8666
+ async handleActionCompleted(action, _queueOperations, _allHandlers) {
8667
+ const luvio = this.getLuvio();
8668
+ await luvio.notifyStoreUpdateAvailable([action.tag]);
8669
+ }
8670
+ buildCacheKeysFromResponse(_response) {
8671
+ return new StoreKeyMap();
8672
+ }
8673
+ synchronousIngest(response) {
8674
+ const luvio = this.getLuvio();
8675
+ const key = keyBuilderQuickActionExecutionRepresentation(luvio, {
8676
+ id: response.id,
8677
+ });
8678
+ luvio.storeIngest(key, ingestQuickActionExecutionRepresentation, response);
8679
+ }
8680
+ mergeRequestBody() {
8681
+ throw Error('mergeActions not supported for QuickActionExecutionRepresentationHandler');
8682
+ }
8683
+ }
8684
+
8515
8685
  const CONTENT_DOCUMENT_DRAFT_ID_KEY = 'CONTENT_DOCUMENT_DRAFT_ID';
8516
8686
  const CONTENT_VERSION_DRAFT_ID_KEY = 'CONTENT_VERSION_DRAFT_ID';
8517
8687
  const CONTENT_DOCUMENT_LINK_DRAFT_ID_KEY = 'CONTENT_DOCUMENT_LINK_DRAFT_ID';
@@ -9075,389 +9245,6 @@ function deleteRecordDraftAdapterFactory(luvio, actionHandler) {
9075
9245
  };
9076
9246
  }
9077
9247
 
9078
- /**
9079
- * Copyright (c) 2022, Salesforce, Inc.,
9080
- * All rights reserved.
9081
- * For full license text, see the LICENSE.txt file
9082
- */
9083
-
9084
-
9085
- const { keys: keys$4, values: values$2, create: create$4, assign: assign$4, freeze, entries: entries$4 } = Object;
9086
-
9087
- function buildRecordFieldStoreKey(recordKey, fieldName) {
9088
- return `${recordKey}${RECORD_FIELDS_KEY_JUNCTION$1}${fieldName}`;
9089
- }
9090
- function isStoreKeyRecordId(key) {
9091
- return key.indexOf(RECORD_ID_PREFIX$1) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION$1) === -1;
9092
- }
9093
- function createLink(key) {
9094
- return { __ref: key };
9095
- }
9096
- function isStoreRecordError(storeRecord) {
9097
- return storeRecord.__type === 'error';
9098
- }
9099
- function isEntryDurableRecordRepresentation(entry, key) {
9100
- // Either a DurableRecordRepresentation or StoreRecordError can live at a record key
9101
- return ((isStoreKeyRecordId(key) || isStoreKeyRecordViewEntity(key)) &&
9102
- entry.data.__type === undefined);
9103
- }
9104
- /**
9105
- * Records are stored in the durable store with scalar fields denormalized. This function takes that denoramlized
9106
- * durable store record representation and normalizes it back out into the format the the luvio store expects it
9107
- * @param key Record store key
9108
- * @param entry Durable entry containing a denormalized record representation
9109
- * @returns a set of entries containing the normalized record and its normalized fields
9110
- */
9111
- function normalizeRecordFields(key, entry) {
9112
- const { data: record } = entry;
9113
- const { fields, links } = record;
9114
- const missingFieldLinks = keys$4(links);
9115
- const fieldNames = keys$4(fields);
9116
- const normalizedFields = {};
9117
- const returnEntries = {};
9118
- // restore fields
9119
- for (let i = 0, len = fieldNames.length; i < len; i++) {
9120
- const fieldName = fieldNames[i];
9121
- const field = fields[fieldName];
9122
- const fieldKey = buildRecordFieldStoreKey(key, fieldName);
9123
- returnEntries[fieldKey] = { data: field };
9124
- normalizedFields[fieldName] = createLink(fieldKey);
9125
- }
9126
- // restore missing fields
9127
- for (let i = 0, len = missingFieldLinks.length; i < len; i++) {
9128
- const fieldName = missingFieldLinks[i];
9129
- const link = links[fieldName];
9130
- if (link.isMissing === true) {
9131
- normalizedFields[fieldName] = { ...link, __ref: undefined };
9132
- }
9133
- }
9134
- returnEntries[key] = {
9135
- data: assign$4(record, { fields: normalizedFields }),
9136
- metadata: entry.metadata,
9137
- };
9138
- return returnEntries;
9139
- }
9140
- /**
9141
- * Transforms a record for storage in the durable store. The transformation involves denormalizing
9142
- * scalar fields and persisting link metadata to transform back into a normalized representation
9143
- *
9144
- * If the record contains pending fields this will return undefined as pending records do not get persisted
9145
- * to the durable store. There should be a refresh operation outbound that will bring in the updated record.
9146
- *
9147
- * @param normalizedRecord Record containing normalized field links
9148
- * @param recordStore a store containing referenced record fields
9149
- */
9150
- function buildDurableRecordRepresentation(normalizedRecord, records, pendingEntries, store) {
9151
- const fields = normalizedRecord.fields;
9152
- const filteredFields = {};
9153
- const links = {};
9154
- const fieldNames = keys$4(fields);
9155
- for (let i = 0, len = fieldNames.length; i < len; i++) {
9156
- const fieldName = fieldNames[i];
9157
- const field = fields[fieldName];
9158
- // pending fields get filtered out of the durable store
9159
- const { pending } = field;
9160
- if (pending === true) {
9161
- // do not write records with pending fields to the durable store
9162
- // there should be a refresh operation outbound that will bring in the updated record
9163
- return undefined;
9164
- }
9165
- const { __ref } = field;
9166
- if (__ref !== undefined) {
9167
- let ref = records[__ref];
9168
- if (pendingEntries !== undefined) {
9169
- // If the ref was part of the pending write that takes precedence
9170
- const pendingEntry = pendingEntries[__ref];
9171
- if (pendingEntry !== undefined) {
9172
- ref = pendingEntry.data;
9173
- }
9174
- }
9175
- // if field reference exists then add it to our filteredFields
9176
- if (ref !== undefined) {
9177
- filteredFields[fieldName] = ref;
9178
- }
9179
- else {
9180
- // if we have a store to read, try to find the field there too
9181
- // The durable ingest staging store may pass through to L1, and
9182
- // not all fields are necessarily published every time, so it is
9183
- // important to check L1 and not just the fields being published,
9184
- // otherwise we risk truncating the fields on the record.
9185
- if (store) {
9186
- ref = store.readEntry(__ref);
9187
- if (ref !== undefined) {
9188
- filteredFields[fieldName] = ref;
9189
- }
9190
- }
9191
- }
9192
- }
9193
- // we want to preserve fields that are missing nodes
9194
- if (field.isMissing === true) {
9195
- links[fieldName] = field;
9196
- }
9197
- }
9198
- return {
9199
- ...normalizedRecord,
9200
- fields: filteredFields,
9201
- links,
9202
- };
9203
- }
9204
- function getDenormalizedKey(originalKey, recordId, luvio) {
9205
- // this will likely need to be handled when moving to structured keys
9206
- // note record view entities dont have an associated keybuilder. They get ingested as records to a different key format
9207
- // see the override for how they are handled packages/lds-adapters-uiapi/src/raml-artifacts/types/RecordRepresentation/keyBuilderFromType.ts
9208
- if (originalKey.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX)) {
9209
- return RECORD_VIEW_ENTITY_ID_PREFIX + recordId;
9210
- }
9211
- return keyBuilderRecord$1(luvio, { recordId });
9212
- }
9213
- function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata, getStore, sqlStore) {
9214
- const getEntries = function (entries, segment) {
9215
- // this HOF only inspects records in the default segment
9216
- if (segment !== DefaultDurableSegment) {
9217
- return durableStore.getEntries(entries, segment);
9218
- }
9219
- const { length: entriesLength } = entries;
9220
- if (entriesLength === 0) {
9221
- return Promise.resolve({});
9222
- }
9223
- // filter out record field keys
9224
- const filteredEntryIds = [];
9225
- // map of records to avoid requesting duplicate record keys when requesting both records and fields
9226
- const recordEntries = {};
9227
- const recordViewEntries = {};
9228
- for (let i = 0, len = entriesLength; i < len; i++) {
9229
- const id = entries[i];
9230
- const recordId = extractRecordIdFromStoreKey$1(id);
9231
- if (recordId !== undefined) {
9232
- if (id.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX)) {
9233
- if (recordViewEntries[recordId] === undefined) {
9234
- const key = getDenormalizedKey(id, recordId, luvio);
9235
- recordViewEntries[recordId] = true;
9236
- filteredEntryIds.push(key);
9237
- }
9238
- }
9239
- else {
9240
- if (recordEntries[recordId] === undefined) {
9241
- const key = getDenormalizedKey(id, recordId, luvio);
9242
- recordEntries[recordId] = true;
9243
- filteredEntryIds.push(key);
9244
- }
9245
- }
9246
- }
9247
- else {
9248
- filteredEntryIds.push(id);
9249
- }
9250
- }
9251
- // call base getEntries
9252
- return durableStore.getEntries(filteredEntryIds, segment).then((durableEntries) => {
9253
- if (durableEntries === undefined) {
9254
- return undefined;
9255
- }
9256
- const returnEntries = create$4(null);
9257
- const keys$1 = keys$4(durableEntries);
9258
- for (let i = 0, len = keys$1.length; i < len; i++) {
9259
- const key = keys$1[i];
9260
- const value = durableEntries[key];
9261
- if (value === undefined) {
9262
- continue;
9263
- }
9264
- if (isEntryDurableRecordRepresentation(value, key)) {
9265
- assign$4(returnEntries, normalizeRecordFields(key, value));
9266
- }
9267
- else {
9268
- returnEntries[key] = value;
9269
- }
9270
- }
9271
- return returnEntries;
9272
- });
9273
- };
9274
- const denormalizeEntries = function (entries) {
9275
- let hasEntries = false;
9276
- let hasMetadata = false;
9277
- const putEntries = create$4(null);
9278
- const putMetadata = create$4(null);
9279
- const keys$1 = keys$4(entries);
9280
- const putRecords = {};
9281
- const putRecordViews = {};
9282
- const storeRecords = getStoreRecords !== undefined ? getStoreRecords() : {};
9283
- const storeMetadata = getStoreMetadata !== undefined ? getStoreMetadata() : {};
9284
- const store = getStore();
9285
- for (let i = 0, len = keys$1.length; i < len; i++) {
9286
- const key = keys$1[i];
9287
- let value = entries[key];
9288
- const recordId = extractRecordIdFromStoreKey$1(key);
9289
- // do not put normalized field values
9290
- if (recordId !== undefined) {
9291
- const isRecordView = key.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX);
9292
- if (isRecordView) {
9293
- if (putRecordViews[recordId] === true) {
9294
- continue;
9295
- }
9296
- }
9297
- else {
9298
- if (putRecords[recordId] === true) {
9299
- continue;
9300
- }
9301
- }
9302
- const recordKey = getDenormalizedKey(key, recordId, luvio);
9303
- const recordEntries = entries;
9304
- const entry = recordEntries[recordKey];
9305
- let record = entry && entry.data;
9306
- if (record === undefined) {
9307
- record = storeRecords[recordKey];
9308
- if (record === undefined) {
9309
- // fields are being published without a record for them existing,
9310
- // fields cannot exist standalone in the durable store
9311
- continue;
9312
- }
9313
- }
9314
- if (isRecordView) {
9315
- putRecordViews[recordId] = true;
9316
- }
9317
- else {
9318
- putRecords[recordId] = true;
9319
- }
9320
- if (isStoreRecordError(record)) {
9321
- hasEntries = true;
9322
- putEntries[recordKey] = value;
9323
- continue;
9324
- }
9325
- let metadata = entry && entry.metadata;
9326
- if (metadata === undefined) {
9327
- metadata = {
9328
- ...storeMetadata[recordKey],
9329
- metadataVersion: DURABLE_METADATA_VERSION,
9330
- };
9331
- }
9332
- const denormalizedRecord = buildDurableRecordRepresentation(record, storeRecords, recordEntries, store);
9333
- if (denormalizedRecord !== undefined) {
9334
- hasEntries = true;
9335
- putEntries[recordKey] = {
9336
- data: denormalizedRecord,
9337
- metadata,
9338
- };
9339
- // if undefined then it is pending
9340
- // we should still update metadata on pending records
9341
- }
9342
- else {
9343
- hasMetadata = true;
9344
- metadata.expirationTimestamp = metadata.ingestionTimestamp;
9345
- putMetadata[recordKey] = {
9346
- metadata,
9347
- };
9348
- }
9349
- }
9350
- else {
9351
- hasEntries = true;
9352
- putEntries[key] = value;
9353
- }
9354
- }
9355
- return { putEntries, putMetadata, hasEntries, hasMetadata };
9356
- };
9357
- const setEntries = function (entries, segment) {
9358
- if (segment !== DefaultDurableSegment) {
9359
- return durableStore.setEntries(entries, segment);
9360
- }
9361
- const { putEntries, putMetadata, hasEntries, hasMetadata } = denormalizeEntries(entries);
9362
- const promises = [
9363
- hasEntries ? durableStore.setEntries(putEntries, segment) : undefined,
9364
- ];
9365
- if (sqlStore !== undefined && sqlStore.isBatchUpdateSupported()) {
9366
- promises.push(hasMetadata && sqlStore !== undefined
9367
- ? durableStore.setMetadata(putMetadata, segment)
9368
- : undefined);
9369
- }
9370
- return Promise.all(promises).then(() => { });
9371
- };
9372
- const batchOperations = function (operations) {
9373
- const operationsWithDenormedRecords = [];
9374
- for (let i = 0, len = operations.length; i < len; i++) {
9375
- const operation = operations[i];
9376
- if (operation.type === 'setMetadata') {
9377
- // if setMetadata also contains entry data then it needs to be denormalized.
9378
- const keys$1 = keys$4(operation.entries);
9379
- if (keys$1.length > 0) {
9380
- const firstKey = keys$1[0];
9381
- // casted to any to check if data exists
9382
- const firstEntry = operation.entries[firstKey];
9383
- // it is not possible for setMetadata to contain entries with both data and no data in the same operation.
9384
- // this is determined by the plugin supporting update batch calls before it gets to this HOF.
9385
- // so we only need to check one entry to confirm this for performance
9386
- if (firstEntry.data !== undefined) {
9387
- const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
9388
- operationsWithDenormedRecords.push({
9389
- ...operation,
9390
- entries: putEntries,
9391
- });
9392
- if (hasMetadata &&
9393
- sqlStore !== undefined &&
9394
- sqlStore.isBatchUpdateSupported() === true) {
9395
- operationsWithDenormedRecords.push({
9396
- ...operation,
9397
- entries: putMetadata,
9398
- type: 'setMetadata',
9399
- });
9400
- }
9401
- }
9402
- else {
9403
- operationsWithDenormedRecords.push(operation);
9404
- }
9405
- }
9406
- continue;
9407
- }
9408
- if (operation.segment !== DefaultDurableSegment || operation.type === 'evictEntries') {
9409
- operationsWithDenormedRecords.push(operation);
9410
- continue;
9411
- }
9412
- const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
9413
- operationsWithDenormedRecords.push({
9414
- ...operation,
9415
- entries: putEntries,
9416
- });
9417
- if (hasMetadata &&
9418
- sqlStore !== undefined &&
9419
- sqlStore.isBatchUpdateSupported() === true) {
9420
- operationsWithDenormedRecords.push({
9421
- ...operation,
9422
- entries: putMetadata,
9423
- type: 'setMetadata',
9424
- });
9425
- }
9426
- }
9427
- return durableStore.batchOperations(operationsWithDenormedRecords);
9428
- };
9429
- /**
9430
- * Retrieves a denormalized record from the store
9431
- * NOTE: do no use this if you don't know what you're doing, this can still contain normalized record references
9432
- * @param recordKey record key
9433
- * @param durableStore the durable store
9434
- * @returns a DraftRecordRepresentation containing the requested fields
9435
- */
9436
- const getDenormalizedRecord = function (recordKey) {
9437
- return durableStore.getEntries([recordKey], DefaultDurableSegment).then((entries) => {
9438
- if (entries === undefined) {
9439
- return undefined;
9440
- }
9441
- const denormalizedEntry = entries[recordKey];
9442
- if (denormalizedEntry === undefined) {
9443
- return undefined;
9444
- }
9445
- // don't include link information
9446
- const denormalizedRecord = denormalizedEntry.data;
9447
- if (isStoreRecordError(denormalizedRecord)) {
9448
- return undefined;
9449
- }
9450
- return denormalizedRecord;
9451
- });
9452
- };
9453
- return create$4(durableStore, {
9454
- getEntries: { value: getEntries, writable: true },
9455
- setEntries: { value: setEntries, writable: true },
9456
- batchOperations: { value: batchOperations, writable: true },
9457
- getDenormalizedRecord: { value: getDenormalizedRecord, writable: true },
9458
- });
9459
- }
9460
-
9461
9248
  function serializeFieldArguments(argumentNodes, variables) {
9462
9249
  const mutableArgumentNodes = Object.assign([], argumentNodes);
9463
9250
  return `args__(${mutableArgumentNodes
@@ -9570,6 +9357,69 @@ function getOperationFromDocument(document, operationName) {
9570
9357
  return operations[0]; // If a named operation is not provided, we return the first one
9571
9358
  }
9572
9359
 
9360
+ /**
9361
+ * The set of allowed kind values for AST nodes.
9362
+ */
9363
+ var Kind = Object.freeze({
9364
+ // Name
9365
+ NAME: 'Name',
9366
+ // Document
9367
+ DOCUMENT: 'Document',
9368
+ OPERATION_DEFINITION: 'OperationDefinition',
9369
+ VARIABLE_DEFINITION: 'VariableDefinition',
9370
+ SELECTION_SET: 'SelectionSet',
9371
+ FIELD: 'Field',
9372
+ ARGUMENT: 'Argument',
9373
+ // Fragments
9374
+ FRAGMENT_SPREAD: 'FragmentSpread',
9375
+ INLINE_FRAGMENT: 'InlineFragment',
9376
+ FRAGMENT_DEFINITION: 'FragmentDefinition',
9377
+ // Values
9378
+ VARIABLE: 'Variable',
9379
+ INT: 'IntValue',
9380
+ FLOAT: 'FloatValue',
9381
+ STRING: 'StringValue',
9382
+ BOOLEAN: 'BooleanValue',
9383
+ NULL: 'NullValue',
9384
+ ENUM: 'EnumValue',
9385
+ LIST: 'ListValue',
9386
+ OBJECT: 'ObjectValue',
9387
+ OBJECT_FIELD: 'ObjectField',
9388
+ // Directives
9389
+ DIRECTIVE: 'Directive',
9390
+ // Types
9391
+ NAMED_TYPE: 'NamedType',
9392
+ LIST_TYPE: 'ListType',
9393
+ NON_NULL_TYPE: 'NonNullType',
9394
+ // Type System Definitions
9395
+ SCHEMA_DEFINITION: 'SchemaDefinition',
9396
+ OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition',
9397
+ // Type Definitions
9398
+ SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition',
9399
+ OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition',
9400
+ FIELD_DEFINITION: 'FieldDefinition',
9401
+ INPUT_VALUE_DEFINITION: 'InputValueDefinition',
9402
+ INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition',
9403
+ UNION_TYPE_DEFINITION: 'UnionTypeDefinition',
9404
+ ENUM_TYPE_DEFINITION: 'EnumTypeDefinition',
9405
+ ENUM_VALUE_DEFINITION: 'EnumValueDefinition',
9406
+ INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition',
9407
+ // Directive Definitions
9408
+ DIRECTIVE_DEFINITION: 'DirectiveDefinition',
9409
+ // Type System Extensions
9410
+ SCHEMA_EXTENSION: 'SchemaExtension',
9411
+ // Type Extensions
9412
+ SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension',
9413
+ OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension',
9414
+ INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension',
9415
+ UNION_TYPE_EXTENSION: 'UnionTypeExtension',
9416
+ ENUM_TYPE_EXTENSION: 'EnumTypeExtension',
9417
+ INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension'
9418
+ });
9419
+ /**
9420
+ * The enum type representing the possible kind values of AST nodes.
9421
+ */
9422
+
9573
9423
  /**
9574
9424
  * Copyright (c) 2022, Salesforce, Inc.,
9575
9425
  * All rights reserved.
@@ -9712,7 +9562,7 @@ function isArrayLike(x) {
9712
9562
  (x.length === 0 || (x.length > 0 && Object.prototype.hasOwnProperty.call(x, x.length - 1))));
9713
9563
  }
9714
9564
 
9715
- const { create: create$3, keys: keys$3, values: values$1, entries: entries$3, assign: assign$3 } = Object;
9565
+ const { create: create$4, keys: keys$4, values: values$2, entries: entries$4, assign: assign$4 } = Object;
9716
9566
  const { stringify: stringify$3, parse: parse$3 } = JSON;
9717
9567
  const { isArray: isArray$1, from: from$1 } = Array;
9718
9568
 
@@ -10066,7 +9916,7 @@ function dateTimePredicate(input, operator, field, alias) {
10066
9916
  return predicate;
10067
9917
  }
10068
9918
  else if (literal !== undefined) {
10069
- const isAvailableLiteral = values$1(DateLiteral).includes(literal);
9919
+ const isAvailableLiteral = values$2(DateLiteral).includes(literal);
10070
9920
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
10071
9921
  if (!isAvailableLiteral)
10072
9922
  throw new Error(`${literal} is not a valid DateLiteral`);
@@ -10080,7 +9930,7 @@ function dateTimePredicate(input, operator, field, alias) {
10080
9930
  }
10081
9931
  function dateTimeRange(input, op, field, alias) {
10082
9932
  const dateFunction = field.dataType === 'DateTime' ? 'datetime' : 'date';
10083
- const key = keys$3(input)[0];
9933
+ const key = keys$4(input)[0];
10084
9934
  let operator = op;
10085
9935
  if (operator === '=')
10086
9936
  operator = 'BETWEEN';
@@ -10381,7 +10231,7 @@ function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draf
10381
10231
  if (!where)
10382
10232
  return [];
10383
10233
  let predicates = [];
10384
- const fields = keys$3(where);
10234
+ const fields = keys$4(where);
10385
10235
  for (const field of fields) {
10386
10236
  if (field === 'and' || field === 'or') {
10387
10237
  predicates.push(processCompoundPredicate(field, where[field], recordType, alias, objectInfoMap, joins));
@@ -10430,7 +10280,7 @@ function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draf
10430
10280
  }
10431
10281
  else {
10432
10282
  // @W-12618378 polymorphic query sometimes does not work as expected on server. The GQL on certain entities could fail.
10433
- const entityNames = keys$3(where[field]);
10283
+ const entityNames = keys$4(where[field]);
10434
10284
  const polyPredicatesGroups = entityNames
10435
10285
  .filter((entityName) => fieldInfo.referenceToInfos.some((referenceInfo) => referenceInfo.apiName === entityName))
10436
10286
  .map((entityName) => {
@@ -10460,7 +10310,7 @@ function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draf
10460
10310
  }
10461
10311
  else {
10462
10312
  //`field` match the filedInfo's apiName
10463
- for (const [op, value] of entries$3(where[field])) {
10313
+ for (const [op, value] of entries$4(where[field])) {
10464
10314
  const operator = operatorToSql(op);
10465
10315
  /**
10466
10316
  Two types ID processing might be needed. Draft ID swapping is optional, which depends on DraftFunctions existence.
@@ -10914,18 +10764,15 @@ function buildQuery(config) {
10914
10764
  const joins = buildJoins(config);
10915
10765
  const predicates = buildPredicates(config);
10916
10766
  const orderBy = buildOrderBy(config);
10917
- const staleRecordsSql = excludeStaleRecordsGate.isOpen({ fallback: false })
10918
- ? `AND (
10919
- json_extract("${config.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ?
10920
- OR json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL
10921
- )`
10922
- : '';
10923
10767
  const sql = `
10924
10768
  SELECT "${config.alias}".data
10925
10769
  FROM lds_data "${config.alias}" ${joins.sql}
10926
10770
  WHERE "${config.alias}".key like 'UiApi::RecordRepresentation:%'
10927
10771
  AND json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_INGESTION_APINAME}') = '${config.alias}'
10928
- ${staleRecordsSql}
10772
+ AND (
10773
+ json_extract("${config.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ?
10774
+ OR json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL
10775
+ )
10929
10776
  ${predicates.sql}
10930
10777
  ${orderBy.sql}
10931
10778
  LIMIT ?
@@ -10937,7 +10784,7 @@ function buildQuery(config) {
10937
10784
  const bindings = [
10938
10785
  // bindings from predicates on joins
10939
10786
  ...joins.bindings,
10940
- ...(excludeStaleRecordsGate.isOpen({ fallback: false }) ? [config.ingestionTimestamp] : []),
10787
+ config.ingestionTimestamp,
10941
10788
  // where clause and parent scope bindings
10942
10789
  ...predicates.bindings,
10943
10790
  // limit binding
@@ -10951,7 +10798,7 @@ function dedupeJoins(joins) {
10951
10798
  for (const join of joins) {
10952
10799
  deduped[join.alias + join.to] = join;
10953
10800
  }
10954
- return values$1(deduped);
10801
+ return values$2(deduped);
10955
10802
  }
10956
10803
  function buildJoins(config) {
10957
10804
  let sql = '';
@@ -10967,16 +10814,13 @@ function buildJoins(config) {
10967
10814
  let timestampAdded = false;
10968
10815
  const joinConditions = join.conditions.reduce((conditionAccumulator, condition) => {
10969
10816
  let joined_sql;
10970
- const joinMetadataTimestamp = excludeStaleRecordsGate.isOpen({ fallback: false })
10971
- ? ` AND (json_extract("${join.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ? OR json_extract("${join.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL)`
10972
- : '';
10817
+ const joinMetadataTimestamp = ` AND (json_extract("${join.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ? OR json_extract("${join.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL)`;
10973
10818
  // predicate on a value, use the newly joined table
10974
10819
  if ('type' in condition) {
10975
10820
  const { sql, binding } = predicateToSQL(condition, join.alias);
10976
10821
  joined_sql = ` AND ${sql}${timestampAdded ? '' : joinMetadataTimestamp}`;
10977
10822
  bindings.push(...binding);
10978
- if (excludeStaleRecordsGate.isOpen({ fallback: false }) &&
10979
- timestampAdded === false) {
10823
+ if (timestampAdded === false) {
10980
10824
  bindings.push(config.ingestionTimestamp);
10981
10825
  timestampAdded = true;
10982
10826
  }
@@ -10986,8 +10830,7 @@ function buildJoins(config) {
10986
10830
  const left = ` AND json_extract("${join.to}".data, '${condition.leftPath}')`;
10987
10831
  const right = `json_extract("${join.alias}".data, '${condition.rightPath}')`;
10988
10832
  joined_sql = `${left} = ${right}${timestampAdded ? '' : joinMetadataTimestamp}`;
10989
- if (excludeStaleRecordsGate.isOpen({ fallback: false }) &&
10990
- timestampAdded === false) {
10833
+ if (timestampAdded === false) {
10991
10834
  bindings.push(config.ingestionTimestamp);
10992
10835
  timestampAdded = true;
10993
10836
  }
@@ -11041,19 +10884,19 @@ function isObjectValueNode(node) {
11041
10884
  return node.kind === 'ObjectValue';
11042
10885
  }
11043
10886
  function isStringValueNode(node) {
11044
- return node.kind === Kind.STRING;
10887
+ return node.kind === Kind$1.STRING;
11045
10888
  }
11046
10889
  function isFieldNode(node) {
11047
10890
  return node !== undefined && node.kind !== undefined ? node.kind === 'Field' : false;
11048
10891
  }
11049
10892
  function isFieldOrInlineFragmentNode(node) {
11050
10893
  return node !== undefined && node.kind !== undefined
11051
- ? node.kind === 'Field' || node.kind === Kind.INLINE_FRAGMENT
10894
+ ? node.kind === 'Field' || node.kind === Kind$1.INLINE_FRAGMENT
11052
10895
  : false;
11053
10896
  }
11054
10897
  function isInlineFragmentNode(node) {
11055
10898
  return node !== undefined && node.kind !== undefined
11056
- ? node.kind === Kind.INLINE_FRAGMENT
10899
+ ? node.kind === Kind$1.INLINE_FRAGMENT
11057
10900
  : false;
11058
10901
  }
11059
10902
  function isCompoundPredicate(predicate) {
@@ -11257,9 +11100,9 @@ function isCapableRelationship(node) {
11257
11100
  function isScopeArgumentNodeWithType(node, scopeType, variables) {
11258
11101
  if (node.name.value !== 'scope')
11259
11102
  return false;
11260
- if (node.value.kind !== Kind.ENUM && node.value.kind !== Kind.VARIABLE)
11103
+ if (node.value.kind !== Kind$1.ENUM && node.value.kind !== Kind$1.VARIABLE)
11261
11104
  return false;
11262
- if (node.value.kind === Kind.ENUM) {
11105
+ if (node.value.kind === Kind$1.ENUM) {
11263
11106
  if (node.value.value === scopeType) {
11264
11107
  return true;
11265
11108
  }
@@ -11366,7 +11209,7 @@ function isInlineFragmentFieldSpanning(node) {
11366
11209
  if (!node.selectionSet)
11367
11210
  return false;
11368
11211
  return node.selectionSet.selections.some((selection) => {
11369
- if (selection.kind !== Kind.INLINE_FRAGMENT)
11212
+ if (selection.kind !== Kind$1.INLINE_FRAGMENT)
11370
11213
  return false;
11371
11214
  return isFieldSpanning(selection, node);
11372
11215
  });
@@ -11629,7 +11472,7 @@ function depth(json, currentLevel = 0) {
11629
11472
  if (typeof json !== 'object') {
11630
11473
  return currentLevel;
11631
11474
  }
11632
- const keys$1 = keys$3(json);
11475
+ const keys$1 = keys$4(json);
11633
11476
  if (keys$1.length === 0)
11634
11477
  return 0;
11635
11478
  const depths = keys$1.map((key) => {
@@ -11642,7 +11485,7 @@ function flatten(previous, current) {
11642
11485
  return previous.concat(current);
11643
11486
  }
11644
11487
  function findFieldInfo(objectInfo, fieldName) {
11645
- return values$1(objectInfo.fields).find((field) => field.apiName === fieldName ||
11488
+ return values$2(objectInfo.fields).find((field) => field.apiName === fieldName ||
11646
11489
  (field.dataType === 'Reference' && field.relationshipName === fieldName));
11647
11490
  }
11648
11491
  async function readIngestionTimestampForKey(key, query) {
@@ -11659,6 +11502,12 @@ async function readIngestionTimestampForKey(key, query) {
11659
11502
  }
11660
11503
  return ingestionTimestamp;
11661
11504
  }
11505
+ async function readPaginationMetadataForKey(key, query) {
11506
+ const sql = `SELECT data FROM lds_data WHERE key=?`;
11507
+ const results = await query(sql, [key + '__pagination']);
11508
+ const [paginationMetadata] = results.rows.map((row) => parse$3(row[0]));
11509
+ return paginationMetadata || {};
11510
+ }
11662
11511
 
11663
11512
  function findSpanningField(name) {
11664
11513
  return (field) => {
@@ -11672,14 +11521,14 @@ function orderByToPredicate(orderBy, recordType, alias, objectInfoMap, joins) {
11672
11521
  return predicates;
11673
11522
  const isSpanning = depth(orderBy) > 2;
11674
11523
  if (isSpanning) {
11675
- const keys$1 = keys$3(orderBy);
11524
+ const keys$1 = keys$4(orderBy);
11676
11525
  for (let i = 0, len = keys$1.length; i < len; i++) {
11677
11526
  const key = keys$1[i];
11678
11527
  const parentFields = objectInfoMap[recordType].fields;
11679
- const fieldInfo = values$1(parentFields).find(findSpanningField(key));
11528
+ const fieldInfo = values$2(parentFields).find(findSpanningField(key));
11680
11529
  if (fieldInfo && fieldInfo.referenceToInfos.length > 0) {
11681
11530
  const { apiName } = fieldInfo.referenceToInfos[0];
11682
- const parentFieldInfo = values$1(objectInfoMap[recordType].fields).find(findSpanningField(fieldInfo.apiName));
11531
+ const parentFieldInfo = values$2(objectInfoMap[recordType].fields).find(findSpanningField(fieldInfo.apiName));
11683
11532
  if (parentFieldInfo !== undefined) {
11684
11533
  const path = {
11685
11534
  leftPath: `$.fields.${parentFieldInfo.apiName}.value`,
@@ -11702,7 +11551,7 @@ function orderByToPredicate(orderBy, recordType, alias, objectInfoMap, joins) {
11702
11551
  }
11703
11552
  }
11704
11553
  else {
11705
- const keys$1 = keys$3(orderBy);
11554
+ const keys$1 = keys$4(orderBy);
11706
11555
  for (let i = 0, len = keys$1.length; i < len; i++) {
11707
11556
  const key = keys$1[i];
11708
11557
  if (!objectInfoMap[recordType])
@@ -11862,21 +11711,23 @@ function cursorResolver(source) {
11862
11711
  return encodeV1Cursor(source.index);
11863
11712
  }
11864
11713
  function pageInfoResolver(source) {
11865
- if (source.length === 0) {
11714
+ if (source.records.length === 0) {
11866
11715
  return {
11867
11716
  startCursor: null,
11868
11717
  endCursor: null,
11718
+ hasNextPage: false,
11869
11719
  };
11870
11720
  }
11871
- let startIndex = source[0].index;
11872
- let endIndex = source[source.length - 1].index;
11721
+ let startIndex = source.records[0].index;
11722
+ let endIndex = source.records[source.records.length - 1].index;
11873
11723
  return {
11874
11724
  startCursor: encodeV1Cursor(startIndex),
11875
11725
  endCursor: encodeV1Cursor(endIndex),
11726
+ hasNextPage: source.hasNextPage,
11876
11727
  };
11877
11728
  }
11878
11729
  function pageResultCountResolver(source) {
11879
- return source.length;
11730
+ return source.records.length;
11880
11731
  }
11881
11732
  function encodeV1Cursor(index) {
11882
11733
  return base64encode(`v1:${index}`);
@@ -11895,17 +11746,53 @@ function decodeV1Cursor(base64cursor) {
11895
11746
  }
11896
11747
  return Number(found.groups.index);
11897
11748
  }
11749
+ /**
11750
+ * Check the selections for any selection matching `pageInfo { hasNextPage }`
11751
+ */
11752
+ function selectionIncludesHasNextPage(selections, fragments) {
11753
+ for (let selection of selections) {
11754
+ switch (selection.kind) {
11755
+ case Kind.FIELD: {
11756
+ if (selection.name.value === 'pageInfo') {
11757
+ if (!selection.selectionSet)
11758
+ continue;
11759
+ return selectionIncludesHasNextPage(selection.selectionSet.selections, fragments);
11760
+ }
11761
+ if (selection.name.value === 'hasNextPage') {
11762
+ return true;
11763
+ }
11764
+ break;
11765
+ }
11766
+ case Kind.FRAGMENT_SPREAD: {
11767
+ let fragment = fragments[selection.name.value];
11768
+ if (!fragment)
11769
+ return false;
11770
+ if (selectionIncludesHasNextPage(fragment.selectionSet.selections, fragments)) {
11771
+ return true;
11772
+ }
11773
+ break;
11774
+ }
11775
+ case Kind.INLINE_FRAGMENT:
11776
+ if (selectionIncludesHasNextPage(selection.selectionSet.selections, fragments)) {
11777
+ return true;
11778
+ }
11779
+ break;
11780
+ }
11781
+ }
11782
+ return false;
11783
+ }
11898
11784
 
11899
11785
  /*
11900
11786
  resolves connections...
11901
11787
  */
11902
11788
  async function connectionResolver(obj, args, context, info) {
11903
11789
  let { recordRepresentation: parentRecord, ingestionTimestamp = 0 } = obj;
11904
- if (!parentRecord && excludeStaleRecordsGate.isOpen({ fallback: false })) {
11790
+ let queryCacheKey = buildKeyStringForRecordQuery(info.operation, info.variableValues, info.fieldNodes[0].arguments || [], info.fieldName);
11791
+ const { query, objectInfos, draftFunctions } = context;
11792
+ if (!parentRecord) {
11905
11793
  // at our record query we fetch each ingestion time stamp and pass it down to each lower resolver to query against
11906
- ingestionTimestamp = await fetchIngestionTimeStampFromDatabase(info.fieldName, info, args, context.query);
11794
+ ingestionTimestamp = await readIngestionTimestampForKey(queryCacheKey, query);
11907
11795
  }
11908
- const { query, objectInfos, draftFunctions } = context;
11909
11796
  let joins = [];
11910
11797
  let alias = info.fieldName;
11911
11798
  let childRelationshipFieldName = undefined;
@@ -11917,7 +11804,7 @@ async function connectionResolver(obj, args, context, info) {
11917
11804
  parentObjectInfo.childRelationships.find((rel) => rel.relationshipName === info.fieldName);
11918
11805
  // or emit/throw if we want to report it
11919
11806
  if (!childRelationship)
11920
- return [];
11807
+ return { records: [], hasNextPage: false };
11921
11808
  alias = childRelationship.childObjectApiName;
11922
11809
  childRelationshipFieldName = childRelationship.fieldName;
11923
11810
  }
@@ -11929,24 +11816,51 @@ async function connectionResolver(obj, args, context, info) {
11929
11816
  ];
11930
11817
  const scopeJoins = scopeToJoins(args.scope, context.settings);
11931
11818
  joins.push(...scopeJoins);
11819
+ // Limit defaults to 10 records if unspecified
11820
+ let limit = 10;
11821
+ if (args.first) {
11822
+ limit = args.first;
11823
+ }
11932
11824
  let offset = 0;
11933
11825
  if (args.after) {
11934
11826
  offset = decodeV1Cursor(args.after) + 1;
11935
11827
  }
11828
+ // if the query wants to know `hasNextPage` then we need to request 1 additional record
11829
+ let selections = info.fieldNodes
11830
+ .map((n) => (n.selectionSet ? n.selectionSet.selections : []))
11831
+ .flat();
11832
+ let wantsHasNextPage = selectionIncludesHasNextPage(selections, info.fragments);
11833
+ let paginationMetadata = undefined;
11834
+ if (wantsHasNextPage) {
11835
+ paginationMetadata = await readPaginationMetadataForKey(queryCacheKey, query);
11836
+ }
11837
+ let internalLimit = limit + (wantsHasNextPage ? 1 : 0);
11936
11838
  // Alias starts as entity's ApiName
11937
11839
  const queryConfig = {
11938
11840
  alias,
11939
11841
  joins,
11940
11842
  predicates,
11941
11843
  orderBy: orderByToPredicate(args.orderBy, alias, alias, context.objectInfos),
11942
- limit: args.first,
11943
- offset: offset,
11844
+ limit: internalLimit,
11845
+ offset,
11944
11846
  ingestionTimestamp,
11945
11847
  };
11946
11848
  const { sql, bindings } = buildQuery(queryConfig);
11947
11849
  const results = await query(sql, bindings);
11850
+ let hasNextPage = false;
11851
+ if (wantsHasNextPage) {
11852
+ if (results.rows.length > limit) {
11853
+ // more records exist in the cache
11854
+ hasNextPage = true;
11855
+ results.rows.pop();
11856
+ }
11857
+ else if (!paginationMetadata || paginationMetadata.__END__ === undefined) {
11858
+ // more records may exist on the server
11859
+ hasNextPage = true;
11860
+ }
11861
+ }
11948
11862
  //map each sql result with the ingestion timestamp to pass it down a level
11949
- return results.rows
11863
+ let records = results.rows
11950
11864
  .map((row) => parse$3(row[0]))
11951
11865
  .map((recordRepresentation, index) => {
11952
11866
  context.seenRecordIds.add(recordRepresentation.id);
@@ -11956,6 +11870,10 @@ async function connectionResolver(obj, args, context, info) {
11956
11870
  index: index + offset,
11957
11871
  };
11958
11872
  });
11873
+ return {
11874
+ records,
11875
+ hasNextPage,
11876
+ };
11959
11877
  }
11960
11878
  /**
11961
11879
  * Converts a childRelationship into a predicate
@@ -11978,26 +11896,10 @@ function childRelationshipToPredicates(childRelationshipFieldName, parentId) {
11978
11896
  return predicates;
11979
11897
  }
11980
11898
  /**
11981
- * fetches a query level ingestion time stamp from the L2 cache
11982
- * if no query has been seen then the timestamp is 0
11983
- * @param apiName
11984
- * @param info
11985
- * @param args
11986
- * @param query
11987
- * @returns
11899
+ * Returns just the records array of a connection result, used for edges field
11988
11900
  */
11989
- async function fetchIngestionTimeStampFromDatabase(apiName, info, args, query) {
11990
- const { operation, variableValues } = info;
11991
- // if we cannot find the query key in the database then default to 0 as we assume we have not seen the query
11992
- // and all the data is not stale
11993
- let ingestionTimestamp = 0;
11994
- if (info.fieldNodes.length > 0 && info.fieldNodes[0].arguments !== undefined) {
11995
- const key = buildKeyStringForRecordQuery(operation,
11996
- // join varables passed from query to the argument variables given from the AST
11997
- { ...variableValues, ...args }, info.fieldNodes[0].arguments, apiName);
11998
- return readIngestionTimestampForKey(key, query);
11999
- }
12000
- return ingestionTimestamp;
11901
+ function edgesResolver(obj) {
11902
+ return obj.records;
12001
11903
  }
12002
11904
  /**
12003
11905
  * Builds the top level record query key based on AST data
@@ -12017,7 +11919,7 @@ function buildKeyStringForRecordQuery(operation, variables, argumentNodes, curre
12017
11919
  variables,
12018
11920
  fragmentMap: {},
12019
11921
  });
12020
- const filteredArgumentNodes = assign$3([], argumentNodes).filter((node) => node.name.value !== 'first' && node.name.value !== 'after');
11922
+ const filteredArgumentNodes = assign$4([], argumentNodes).filter((node) => node.name.value !== 'first' && node.name.value !== 'after');
12021
11923
  const argumentString = filteredArgumentNodes.length > 0
12022
11924
  ? '__' + serializeFieldArguments(filteredArgumentNodes, variables)
12023
11925
  : '';
@@ -12040,7 +11942,7 @@ function addResolversToSchema(schema, polyFields) {
12040
11942
  let baseRecord = undefined;
12041
11943
  // Concrete types for Polymorphic field
12042
11944
  const polyTypes = [];
12043
- for (const type of values$1(schema.getTypeMap())) {
11945
+ for (const type of values$2(schema.getTypeMap())) {
12044
11946
  if (type.name === 'Record') {
12045
11947
  recordInterface = type;
12046
11948
  }
@@ -12053,7 +11955,7 @@ function addResolversToSchema(schema, polyFields) {
12053
11955
  if (polyFields.find((fieldTypeName) => fieldTypeName === type.name) !== undefined) {
12054
11956
  polyTypes.push(type);
12055
11957
  }
12056
- const fields = values$1(type.getFields());
11958
+ const fields = values$2(type.getFields());
12057
11959
  // initialize the fields of current type with default behavior
12058
11960
  for (const field of fields) {
12059
11961
  field.resolve = defaultFieldResolver;
@@ -12114,7 +12016,7 @@ function addResolversToSchema(schema, polyFields) {
12114
12016
  for (const field of fields) {
12115
12017
  switch (field.name) {
12116
12018
  case 'edges':
12117
- field.resolve = passThroughResolver;
12019
+ field.resolve = edgesResolver;
12118
12020
  break;
12119
12021
  case 'pageInfo':
12120
12022
  field.resolve = pageInfoResolver;
@@ -12432,7 +12334,7 @@ function generateRecordQueries(schema, objectInfoMap) {
12432
12334
  // use a set to not allow duplicate scalars causing error(s)
12433
12335
  let addedTypedScalars = new Set();
12434
12336
  let allPolymorphicFieldTypeNames = new Set();
12435
- for (const name of keys$3(objectInfoMap)) {
12337
+ for (const name of keys$4(objectInfoMap)) {
12436
12338
  const objectInfo = objectInfoMap[name];
12437
12339
  const { apiName } = objectInfo;
12438
12340
  const type = schema.getType(apiName);
@@ -12501,7 +12403,7 @@ function createNewRecordQuery(schema, objectInfo, objectInfoMap) {
12501
12403
  const { apiName, childRelationships, fields: fieldsRepresentation } = objectInfo;
12502
12404
  typedScalars.add(`${apiName}_Filter`);
12503
12405
  typedScalars.add(`${apiName}_OrderBy`);
12504
- const { fields, polymorphicFieldTypeNames } = makeRecordField(values$1(fieldsRepresentation), objectInfo, objectInfoMap, parentRelationshipFields, 'Missing');
12406
+ const { fields, polymorphicFieldTypeNames } = makeRecordField(values$2(fieldsRepresentation), objectInfo, objectInfoMap, parentRelationshipFields, 'Missing');
12505
12407
  // handles child relationship
12506
12408
  const { spanningRecordConnections, typedScalars: spanningConnectionTypedScalars } = makeSpanningRecordConnections(schema, childRelationships, objectInfoMap, parentRelationshipFields);
12507
12409
  typedScalars = new Set([...typedScalars, ...spanningConnectionTypedScalars]);
@@ -12559,8 +12461,8 @@ function extendExistingRecordType(schema, type, objectInfo, objectInfoMap) {
12559
12461
  // use a set to not allow duplicate scalars causing error(s)
12560
12462
  let typedScalars = new Set();
12561
12463
  let parentRelationshipFields = new Set();
12562
- const existingFields = keys$3(type.getFields());
12563
- const missingFields = values$1(objectInfo.fields).filter((field) => {
12464
+ const existingFields = keys$4(type.getFields());
12465
+ const missingFields = values$2(objectInfo.fields).filter((field) => {
12564
12466
  return (existingFields.includes(field.apiName) === false ||
12565
12467
  (field.relationshipName !== null && field.referenceToInfos.length > 0));
12566
12468
  });
@@ -12642,7 +12544,7 @@ function makeSpanningRecordConnections(schema, childRelationships, objectInfoMap
12642
12544
  function makeRecordField(fieldRepresentations, objectInfo, objectInfoMap, existingParentRelationships, recordTypeInSchema, existingFields = []) {
12643
12545
  const polymorphicFieldTypeNames = new Set();
12644
12546
  let fields = ``;
12645
- for (const field of values$1(fieldRepresentations)) {
12547
+ for (const field of values$2(fieldRepresentations)) {
12646
12548
  if (!fieldsStaticallyAdded.includes(field.apiName) && recordTypeInSchema === 'Missing') {
12647
12549
  fields += `${field.apiName}: ${graphqlTypeForField(field, objectInfo)}\n`;
12648
12550
  }
@@ -12860,20 +12762,20 @@ async function evaluate(config, observers, settings, objectInfos, store, snapsho
12860
12762
  }
12861
12763
 
12862
12764
  const parentRelationshipDirective = {
12863
- kind: Kind.DIRECTIVE,
12765
+ kind: Kind$1.DIRECTIVE,
12864
12766
  name: {
12865
- kind: Kind.NAME,
12767
+ kind: Kind$1.NAME,
12866
12768
  value: 'category',
12867
12769
  },
12868
12770
  arguments: [
12869
12771
  {
12870
- kind: Kind.ARGUMENT,
12772
+ kind: Kind$1.ARGUMENT,
12871
12773
  name: {
12872
- kind: Kind.NAME,
12774
+ kind: Kind$1.NAME,
12873
12775
  value: 'name',
12874
12776
  },
12875
12777
  value: {
12876
- kind: Kind.STRING,
12778
+ kind: Kind$1.STRING,
12877
12779
  value: PARENT_RELATIONSHIP,
12878
12780
  block: false,
12879
12781
  },
@@ -12881,12 +12783,12 @@ const parentRelationshipDirective = {
12881
12783
  ],
12882
12784
  };
12883
12785
  const FieldValueNodeSelectionSet = {
12884
- kind: Kind.SELECTION_SET,
12786
+ kind: Kind$1.SELECTION_SET,
12885
12787
  selections: [
12886
12788
  {
12887
- kind: Kind.FIELD,
12789
+ kind: Kind$1.FIELD,
12888
12790
  name: {
12889
- kind: Kind.NAME,
12791
+ kind: Kind$1.NAME,
12890
12792
  value: 'value',
12891
12793
  },
12892
12794
  },
@@ -13147,7 +13049,7 @@ async function injectSyntheticFields(originalAST, objectInfoService, draftFuncti
13147
13049
  }
13148
13050
  function swapIdField(filterFields, objectInfo, swapped, idState, draftFunctions) {
13149
13051
  switch (filterFields.kind) {
13150
- case Kind.OBJECT: {
13052
+ case Kind$1.OBJECT: {
13151
13053
  const fieldNodes = filterFields.fields.map((fieldNode) => {
13152
13054
  // check at the object value node level if the node's name is an Id/Reference
13153
13055
  // if not then just pass the current swapped state
@@ -13165,7 +13067,7 @@ function swapIdField(filterFields, objectInfo, swapped, idState, draftFunctions)
13165
13067
  fields: fieldNodes,
13166
13068
  };
13167
13069
  }
13168
- case Kind.STRING: {
13070
+ case Kind$1.STRING: {
13169
13071
  if (!swapped) {
13170
13072
  return filterFields;
13171
13073
  }
@@ -13184,7 +13086,7 @@ function swapIdField(filterFields, objectInfo, swapped, idState, draftFunctions)
13184
13086
  block: false,
13185
13087
  };
13186
13088
  }
13187
- case Kind.LIST: {
13089
+ case Kind$1.LIST: {
13188
13090
  const values = filterFields.values.map((valueNode) => swapIdField(valueNode, objectInfo, swapped, idState, draftFunctions));
13189
13091
  return {
13190
13092
  kind: 'ListValue',
@@ -13204,7 +13106,7 @@ function isMineScopeAvailable(apiNamePath, pathToObjectApiNamesMap, objectInfos)
13204
13106
  const objectInfo = objectInfos[apiName[0]];
13205
13107
  if (!objectInfo)
13206
13108
  return false;
13207
- return values$1(objectInfo.fields).some((fieldInfo) => {
13109
+ return values$2(objectInfo.fields).some((fieldInfo) => {
13208
13110
  return (fieldInfo.apiName === 'OwnerId' &&
13209
13111
  fieldInfo.referenceToInfos.some((referenceToInfo) => {
13210
13112
  return referenceToInfo.apiName === 'User';
@@ -13270,14 +13172,14 @@ function mergeSelectionNodes(group1, group2) {
13270
13172
  function growObjectFieldTree(tree, parentNode, entryNode, totalNodes, startNodes) {
13271
13173
  entryNode.fields.forEach((objectFieldNode) => {
13272
13174
  // objectFieldNode: {Account: { Name : { eq: "xxyyzz"}}}; objectFieldNode.value: { Name : { eq: "xxyyzz"}}
13273
- if (objectFieldNode.value.kind === Kind.OBJECT) {
13175
+ if (objectFieldNode.value.kind === Kind$1.OBJECT) {
13274
13176
  if (objectFieldNode.name.value === 'not') {
13275
13177
  // recursively go to deeper level of filter.
13276
13178
  growObjectFieldTree(tree, parentNode, objectFieldNode.value, totalNodes, startNodes);
13277
13179
  }
13278
13180
  else {
13279
13181
  // Spanning Field 'Account'
13280
- if (objectFieldNode.value.fields.some((childObjectFieldNode) => childObjectFieldNode.value.kind === Kind.OBJECT)) {
13182
+ if (objectFieldNode.value.fields.some((childObjectFieldNode) => childObjectFieldNode.value.kind === Kind$1.OBJECT)) {
13281
13183
  if (!totalNodes.has(parentNode)) {
13282
13184
  totalNodes.add(parentNode);
13283
13185
  startNodes.add(parentNode);
@@ -13300,7 +13202,7 @@ function growObjectFieldTree(tree, parentNode, entryNode, totalNodes, startNodes
13300
13202
  }
13301
13203
  }
13302
13204
  }
13303
- else if (objectFieldNode.value.kind === Kind.LIST) {
13205
+ else if (objectFieldNode.value.kind === Kind$1.LIST) {
13304
13206
  objectFieldNode.value.values.filter(isObjectValueNode).forEach((childNode) => {
13305
13207
  growObjectFieldTree(tree, parentNode, childNode, totalNodes, startNodes);
13306
13208
  });
@@ -13414,7 +13316,7 @@ async function resolveObjectInfos(objectInfotree, pathToObjectApiNamesMap, start
13414
13316
  // eslint-disable-next-line
13415
13317
  throw new Error(`Unable to resolve ObjectInfo(s) for ${Array.from(startNodes)}`);
13416
13318
  }
13417
- if (keys$3(objectInfos).length < startNodes.size) {
13319
+ if (keys$4(objectInfos).length < startNodes.size) {
13418
13320
  // eslint-disable-next-line
13419
13321
  throw new Error(`Unable to resolve ObjectInfo(s) for ${Array.from(startNodes)}`);
13420
13322
  }
@@ -13578,7 +13480,7 @@ function injectFilter(filterNode, idState, parentPath, isParentPolymorphic, obje
13578
13480
  const injectedSelections = [];
13579
13481
  let isPolymorphicField = false;
13580
13482
  switch (filterNode.kind) {
13581
- case Kind.ARGUMENT:
13483
+ case Kind$1.ARGUMENT:
13582
13484
  if (filterNode.value.kind !== 'ObjectValue')
13583
13485
  return [];
13584
13486
  filterNode.value.fields.forEach((objectFieldNode) => {
@@ -13592,9 +13494,9 @@ function injectFilter(filterNode, idState, parentPath, isParentPolymorphic, obje
13592
13494
  }
13593
13495
  });
13594
13496
  return injectedSelections;
13595
- case Kind.OBJECT_FIELD:
13497
+ case Kind$1.OBJECT_FIELD:
13596
13498
  switch (filterNode.value.kind) {
13597
- case Kind.LIST: {
13499
+ case Kind$1.LIST: {
13598
13500
  filterNode.value.values.filter(isObjectValueNode).forEach((objectValueNode) => {
13599
13501
  objectValueNode.fields.forEach((objectFieldNode) => {
13600
13502
  const subResults = injectFilter(objectFieldNode, idState, parentPath, isParentPolymorphic, objectInfos, pathToObjectApiNamesMap, draftFunctions, queryNode);
@@ -13605,7 +13507,7 @@ function injectFilter(filterNode, idState, parentPath, isParentPolymorphic, obje
13605
13507
  });
13606
13508
  return injectedSelections;
13607
13509
  }
13608
- case Kind.OBJECT: {
13510
+ case Kind$1.OBJECT: {
13609
13511
  if (filterNode.name.value === 'not') {
13610
13512
  filterNode.value.fields.forEach((objectFieldNode) => {
13611
13513
  const subResults = injectFilter(objectFieldNode, idState, parentPath, isParentPolymorphic, objectInfos, pathToObjectApiNamesMap, draftFunctions, queryNode);
@@ -13735,9 +13637,9 @@ function injectFilter(filterNode, idState, parentPath, isParentPolymorphic, obje
13735
13637
  (isInlineFragment && !isTypeNameExisting)) {
13736
13638
  if (isInlineFragment) {
13737
13639
  sel = {
13738
- kind: Kind.INLINE_FRAGMENT,
13640
+ kind: Kind$1.INLINE_FRAGMENT,
13739
13641
  typeCondition: {
13740
- kind: Kind.NAMED_TYPE,
13642
+ kind: Kind$1.NAMED_TYPE,
13741
13643
  name: {
13742
13644
  kind: 'Name',
13743
13645
  value: filterNode.name.value,
@@ -13756,14 +13658,14 @@ function injectFilter(filterNode, idState, parentPath, isParentPolymorphic, obje
13756
13658
  ...sel,
13757
13659
  directives,
13758
13660
  selectionSet: {
13759
- kind: Kind.SELECTION_SET,
13661
+ kind: Kind$1.SELECTION_SET,
13760
13662
  selections: idField.concat(...subSelectionNodes),
13761
13663
  },
13762
13664
  }
13763
13665
  : {
13764
13666
  ...sel,
13765
13667
  selectionSet: {
13766
- kind: Kind.SELECTION_SET,
13668
+ kind: Kind$1.SELECTION_SET,
13767
13669
  selections: idField.concat(...subSelectionNodes),
13768
13670
  },
13769
13671
  };
@@ -13892,12 +13794,12 @@ function updateIDInfo(fieldNode, idState, draftFunctions) {
13892
13794
  if (isObjectValueNode(fieldNode.value)) {
13893
13795
  const idOpValueNode = fieldNode.value.fields[0];
13894
13796
  switch (idOpValueNode.value.kind) {
13895
- case Kind.STRING: {
13797
+ case Kind$1.STRING: {
13896
13798
  const id = idOpValueNode.value.value;
13897
13799
  idState.swapNeeded = draftFunctions.isDraftId(id);
13898
13800
  break;
13899
13801
  }
13900
- case Kind.LIST: {
13802
+ case Kind$1.LIST: {
13901
13803
  const listValues = idOpValueNode.value.values;
13902
13804
  idState.swapNeeded = listValues
13903
13805
  .filter(isStringValueNode)
@@ -13984,7 +13886,7 @@ function injectParentRelationships(selections, parentNode, parentPath, ancestors
13984
13886
  parentInjectedNodes.push({
13985
13887
  ...selection,
13986
13888
  selectionSet: {
13987
- kind: Kind.SELECTION_SET,
13889
+ kind: Kind$1.SELECTION_SET,
13988
13890
  selections: [...idSelection, ...subInjectedSelections],
13989
13891
  },
13990
13892
  });
@@ -14355,7 +14257,7 @@ function removeSyntheticFields(result, query) {
14355
14257
  output.data.uiapi = { ...output.data.uiapi };
14356
14258
  output.data.uiapi.query = { ...output.data.uiapi.query };
14357
14259
  const outputApiParent = output.data.uiapi.query;
14358
- const keys$1 = keys$3(nodeJson);
14260
+ const keys$1 = keys$4(nodeJson);
14359
14261
  keys$1.forEach((recordName) => {
14360
14262
  const outputApi = {};
14361
14263
  // Each connectionSelection's maps its name or alias to one of returned records. The record name could be `apiName' or alias
@@ -14375,7 +14277,7 @@ function removeSyntheticFields(result, query) {
14375
14277
  * @param jsonOutput JsonObject which will be populated with properties. It would only contains properties defined in 'FieldNode'
14376
14278
  */
14377
14279
  function createUserJsonOutput(selection, jsonInput, jsonOutput) {
14378
- const keys$1 = keys$3(jsonInput);
14280
+ const keys$1 = keys$4(jsonInput);
14379
14281
  if (selection.selectionSet) {
14380
14282
  createjsonOutput(selection.selectionSet.selections, jsonInput, jsonOutput);
14381
14283
  }
@@ -14384,7 +14286,7 @@ function createUserJsonOutput(selection, jsonInput, jsonOutput) {
14384
14286
  }
14385
14287
  }
14386
14288
  function createjsonOutput(selections, jsonInput, jsonOutput) {
14387
- const keys$1 = keys$3(jsonInput);
14289
+ const keys$1 = keys$4(jsonInput);
14388
14290
  selections.filter(isFieldNode).forEach((subSelection) => {
14389
14291
  const fieldName = subSelection.alias ? subSelection.alias.value : subSelection.name.value;
14390
14292
  if (keys$1.includes(fieldName)) {
@@ -14447,9 +14349,9 @@ function referenceIdFieldForRelationship(relationshipName) {
14447
14349
  */
14448
14350
  function createFieldNode(nameValue, selectionSet) {
14449
14351
  return {
14450
- kind: Kind.FIELD,
14352
+ kind: Kind$1.FIELD,
14451
14353
  name: {
14452
- kind: Kind.NAME,
14354
+ kind: Kind$1.NAME,
14453
14355
  value: nameValue,
14454
14356
  },
14455
14357
  selectionSet,
@@ -14554,7 +14456,7 @@ const replaceDraftIdsInVariables = (variables, draftFunctions, unmappedDraftIDs)
14554
14456
  }
14555
14457
  else if (typeof object === 'object' && object !== null) {
14556
14458
  let source = object;
14557
- return keys$3(source).reduce((acc, key) => {
14459
+ return keys$4(source).reduce((acc, key) => {
14558
14460
  acc[key] = replace(source[key]);
14559
14461
  return acc;
14560
14462
  }, {});
@@ -14563,7 +14465,7 @@ const replaceDraftIdsInVariables = (variables, draftFunctions, unmappedDraftIDs)
14563
14465
  return object;
14564
14466
  }
14565
14467
  };
14566
- let newVariables = keys$3(variables).reduce((acc, key) => {
14468
+ let newVariables = keys$4(variables).reduce((acc, key) => {
14567
14469
  acc[key] = replace(variables[key]);
14568
14470
  return acc;
14569
14471
  }, {});
@@ -14949,14 +14851,14 @@ const recordIdGenerator = (id) => {
14949
14851
  */
14950
14852
 
14951
14853
 
14952
- const { keys: keys$2, create: create$2, assign: assign$2, entries: entries$2 } = Object;
14854
+ const { keys: keys$3, create: create$3, assign: assign$3, entries: entries$3 } = Object;
14953
14855
  const { stringify: stringify$2, parse: parse$2 } = JSON;
14954
14856
  const { push: push$1, join: join$1, slice: slice$1 } = Array.prototype;
14955
14857
  const { isArray, from } = Array;
14956
14858
 
14957
14859
  function ldsParamsToString(params) {
14958
- const returnParams = create$2(null);
14959
- const keys$1 = keys$2(params);
14860
+ const returnParams = create$3(null);
14861
+ const keys$1 = keys$3(params);
14960
14862
  for (let i = 0, len = keys$1.length; i < len; i++) {
14961
14863
  const key = keys$1[i];
14962
14864
  const value = params[key];
@@ -14973,7 +14875,7 @@ function ldsParamsToString(params) {
14973
14875
  else {
14974
14876
  returnParams[key] = `${value}`;
14975
14877
  }
14976
- if (isObject(value) === true && keys$2(value).length > 0) {
14878
+ if (isObject(value) === true && keys$3(value).length > 0) {
14977
14879
  returnParams[key] = stringify$2(value);
14978
14880
  }
14979
14881
  }
@@ -15374,7 +15276,7 @@ function buildAggregateUiUrl(params, resourceRequest) {
15374
15276
  optionalFields,
15375
15277
  };
15376
15278
  const queryString = [];
15377
- for (const [key, value] of entries$2(mergedParams)) {
15279
+ for (const [key, value] of entries$3(mergedParams)) {
15378
15280
  if (value !== undefined) {
15379
15281
  queryString.push(`${key}=${isArray(value) ? value.join(',') : value}`);
15380
15282
  }
@@ -15390,7 +15292,7 @@ function isSpanningRecord(fieldValue) {
15390
15292
  function mergeRecordFields(first, second) {
15391
15293
  const { fields: targetFields } = first;
15392
15294
  const { fields: sourceFields } = second;
15393
- const fieldNames = keys$2(sourceFields);
15295
+ const fieldNames = keys$3(sourceFields);
15394
15296
  for (let i = 0, len = fieldNames.length; i < len; i += 1) {
15395
15297
  const fieldName = fieldNames[i];
15396
15298
  const sourceField = sourceFields[fieldName];
@@ -15554,7 +15456,7 @@ function calculateEstimatedTotalUrlLength(request) {
15554
15456
  const { baseUri, basePath, queryParams } = request;
15555
15457
  let url = `${baseUri}${basePath}`;
15556
15458
  if (queryParams) {
15557
- const queryParamString = entries$2(queryParams)
15459
+ const queryParamString = entries$3(queryParams)
15558
15460
  .map(([key, value]) => `${key}=${value}`)
15559
15461
  .join('&');
15560
15462
  if (queryParamString) {
@@ -15767,7 +15669,7 @@ function makeNetworkAdapterChunkRecordFields(networkAdapter, instrumentationSink
15767
15669
  }, networkAdapter);
15768
15670
  }
15769
15671
 
15770
- const { keys: keys$1, create: create$1, assign: assign$1, entries: entries$1 } = Object;
15672
+ const { keys: keys$2, create: create$2, assign: assign$2, entries: entries$2 } = Object;
15771
15673
  const { stringify: stringify$1, parse: parse$1 } = JSON;
15772
15674
  const { push, join, slice } = Array.prototype;
15773
15675
 
@@ -16123,7 +16025,7 @@ function instrumentDraftQueue(queue) {
16123
16025
  logError: false,
16124
16026
  });
16125
16027
  };
16126
- return create$1(queue, { mergeActions: { value: mergeActions } });
16028
+ return create$2(queue, { mergeActions: { value: mergeActions } });
16127
16029
  }
16128
16030
 
16129
16031
  // so eslint doesn't complain about nimbus
@@ -16202,7 +16104,7 @@ function enableObjectInfoCaching(env, ensureObjectInfoCached) {
16202
16104
  function dataIsObjectInfo(key, data) {
16203
16105
  return incomingObjectInfos.has(key);
16204
16106
  }
16205
- return create$1(env, {
16107
+ return create$2(env, {
16206
16108
  handleSuccessResponse: { value: handleSuccessResponse },
16207
16109
  storePublish: { value: storePublish },
16208
16110
  });
@@ -16213,10 +16115,11 @@ Builds adapter instances for environments that have cross-adapter dependencies.
16213
16115
  These are only to be used internally in this module and not exported.
16214
16116
  They do not use draft environments, just the makeDurable environment.
16215
16117
  */
16216
- function buildInternalAdapters(store, networkAdapter, durableStore, ensureObjectInfoCached) {
16118
+ function buildInternalAdapters(store, networkAdapter, durableStore, ensureObjectInfoCached, shouldFlush) {
16217
16119
  const baseEnv = new Environment(store, networkAdapter);
16218
16120
  const durableEnvironment = makeDurable(baseEnv, {
16219
16121
  durableStore,
16122
+ shouldFlush,
16220
16123
  });
16221
16124
  const objectInfoCachingEnvionment = enableObjectInfoCaching(durableEnvironment, ensureObjectInfoCached);
16222
16125
  const luvio = new Luvio(objectInfoCachingEnvionment);
@@ -16302,8 +16205,8 @@ class ObjectInfoService {
16302
16205
  }
16303
16206
  };
16304
16207
  // Local in-memory cache for apiName to key prefixes
16305
- this.apiNameToKeyPrefixMemoryCache = create$1(null);
16306
- this.keyPrefixToApiNameMemoryCache = create$1(null);
16208
+ this.apiNameToKeyPrefixMemoryCache = create$2(null);
16209
+ this.keyPrefixToApiNameMemoryCache = create$2(null);
16307
16210
  }
16308
16211
  /**
16309
16212
  * Size of return map not necessarily correlated with number of inputs. The
@@ -16447,7 +16350,7 @@ function registerReportObserver(reportObserver) {
16447
16350
  };
16448
16351
  }
16449
16352
 
16450
- const { keys, create, assign, entries, values } = Object;
16353
+ const { keys: keys$1, create: create$1, assign: assign$1, entries: entries$1, values: values$1 } = Object;
16451
16354
  const { stringify, parse } = JSON;
16452
16355
 
16453
16356
  function selectColumnsFromTableWhereKeyIn(columnNames, table, keyColumnName, whereIn) {
@@ -16514,7 +16417,7 @@ class LdsDataTable {
16514
16417
  },
16515
16418
  conflictColumns: this.conflictColumnNames,
16516
16419
  columns: this.columnNames,
16517
- rows: keys(entries).reduce((rows, key) => {
16420
+ rows: keys$1(entries).reduce((rows, key) => {
16518
16421
  const entry = entries[key];
16519
16422
  const { data, metadata } = entry;
16520
16423
  const row = [key, stringify(data), metadata ? stringify(metadata) : null];
@@ -16533,7 +16436,7 @@ class LdsDataTable {
16533
16436
  type: 'setMetadata',
16534
16437
  },
16535
16438
  columns: [COLUMN_NAME_METADATA$1],
16536
- values: keys(entries).reduce((values, key) => {
16439
+ values: keys$1(entries).reduce((values, key) => {
16537
16440
  const { metadata } = entries[key];
16538
16441
  const row = [metadata ? stringify(metadata) : null];
16539
16442
  values[key] = row;
@@ -16623,7 +16526,7 @@ class LdsInternalDataTable {
16623
16526
  },
16624
16527
  conflictColumns: this.conflictColumnNames,
16625
16528
  columns: this.columnNames,
16626
- rows: keys(entries).reduce((rows, key) => {
16529
+ rows: keys$1(entries).reduce((rows, key) => {
16627
16530
  const entry = entries[key];
16628
16531
  const { data, metadata } = entry;
16629
16532
  const row = [key, stringify(data)];
@@ -16649,7 +16552,7 @@ class LdsInternalDataTable {
16649
16552
  type: 'setMetadata',
16650
16553
  },
16651
16554
  columns: [COLUMN_NAME_METADATA],
16652
- values: keys(entries).reduce((values, key) => {
16555
+ values: keys$1(entries).reduce((values, key) => {
16653
16556
  const { metadata } = entries[key];
16654
16557
  const row = [metadata ? stringify(metadata) : null];
16655
16558
  values[key] = row;
@@ -16658,7 +16561,7 @@ class LdsInternalDataTable {
16658
16561
  };
16659
16562
  }
16660
16563
  metadataToUpdateSQLQueries(entries, segment) {
16661
- return keys(entries).reduce((accu, key) => {
16564
+ return keys$1(entries).reduce((accu, key) => {
16662
16565
  const { metadata } = entries[key];
16663
16566
  if (metadata !== undefined) {
16664
16567
  accu.push({
@@ -16729,7 +16632,7 @@ class NimbusSqliteStore {
16729
16632
  return this.getTable(segment).getAll(segment);
16730
16633
  }
16731
16634
  setEntries(entries, segment) {
16732
- if (keys(entries).length === 0) {
16635
+ if (keys$1(entries).length === 0) {
16733
16636
  return Promise.resolve();
16734
16637
  }
16735
16638
  const table = this.getTable(segment);
@@ -16737,7 +16640,7 @@ class NimbusSqliteStore {
16737
16640
  return this.batchOperationAsPromise([upsertOperation]);
16738
16641
  }
16739
16642
  setMetadata(entries, segment) {
16740
- if (keys(entries).length === 0) {
16643
+ if (keys$1(entries).length === 0) {
16741
16644
  return Promise.resolve();
16742
16645
  }
16743
16646
  const table = this.getTable(segment);
@@ -16756,13 +16659,13 @@ class NimbusSqliteStore {
16756
16659
  batchOperations(operations) {
16757
16660
  const sqliteOperations = operations.reduce((acc, cur) => {
16758
16661
  if (cur.type === 'setEntries') {
16759
- if (keys(cur.entries).length > 0) {
16662
+ if (keys$1(cur.entries).length > 0) {
16760
16663
  const table = this.getTable(cur.segment);
16761
16664
  acc.push(table.entriesToUpsertOperations(cur.entries, cur.segment));
16762
16665
  }
16763
16666
  }
16764
16667
  else if (cur.type === 'setMetadata') {
16765
- if (keys(cur.entries).length > 0) {
16668
+ if (keys$1(cur.entries).length > 0) {
16766
16669
  const table = this.getTable(cur.segment);
16767
16670
  if (this.supportsBatchUpdates) {
16768
16671
  acc.push(table.metadataToUpdateOperations(cur.entries, cur.segment));
@@ -16895,7 +16798,7 @@ class AbstractKeyValueDataTable {
16895
16798
  },
16896
16799
  conflictColumns: this.conflictColumnNames,
16897
16800
  columns: this.columnNames,
16898
- rows: keys(entries).reduce((rows, key) => {
16801
+ rows: keys$1(entries).reduce((rows, key) => {
16899
16802
  const entry = entries[key];
16900
16803
  rows.push([key, stringify(entry.data)]);
16901
16804
  return rows;
@@ -17087,7 +16990,7 @@ function makeEnvironmentGraphqlAware(environment) {
17087
16990
  }
17088
16991
  return environment.applyCachePolicy(luvio, adapterRequestContext, buildSnapshotContext, localBuildCachedSnapshot, buildNetworkSnapshot);
17089
16992
  };
17090
- return create$1(environment, {
16993
+ return create$2(environment, {
17091
16994
  rebuildSnapshot: { value: rebuildSnapshot },
17092
16995
  applyCachePolicy: { value: applyCachePolicy },
17093
16996
  setDefaultCachePolicy: { value: environment.setDefaultCachePolicy.bind(environment) },
@@ -18293,7 +18196,7 @@ async function aggressiveTrim(data, deallocateFn, options = {}) {
18293
18196
  const batchSize = options.batchSize !== undefined ? options.batchSize : DEFAULT_MAX_BATCH_SIZE;
18294
18197
  let deallocatedCount = 0;
18295
18198
  const { pendingTrimKeys, retainedIds, storeRecords } = data;
18296
- const storeKeyLength = keys$1(storeRecords).length;
18199
+ const storeKeyLength = keys$2(storeRecords).length;
18297
18200
  if (storeKeyLength <= maxStoreRecords) {
18298
18201
  return { deallocatedCount, trimKeysSkipped: pendingTrimKeys };
18299
18202
  }
@@ -18369,6 +18272,78 @@ function setupObserver() {
18369
18272
  }
18370
18273
  }
18371
18274
 
18275
+ /**
18276
+ * Copyright (c) 2022, Salesforce, Inc.,
18277
+ * All rights reserved.
18278
+ * For full license text, see the LICENSE.txt file
18279
+ */
18280
+
18281
+
18282
+ const { keys, values, create, assign, freeze, entries } = Object;
18283
+ function isStoreKeyRecordId(key) {
18284
+ return key.indexOf(RECORD_ID_PREFIX$1) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION$1) === -1;
18285
+ }
18286
+ function isEntryDurableRecordRepresentation(entry, key) {
18287
+ // Either a DurableRecordRepresentation or StoreRecordError can live at a record key
18288
+ return ((isStoreKeyRecordId(key) || isStoreKeyRecordViewEntity$1(key)) &&
18289
+ entry.data.__type === undefined);
18290
+ }
18291
+
18292
+ // TODO [W-15983267]: this is here for backwards compatibility from <252 and can be removed in 256
18293
+ // This HOF ensures that missing links that are stored in the "links" property in the durable store get restored to the "fields" property.
18294
+ // this code is necessary for backwards compatibility with records that were stored in the durable store before the "links" property was removed.
18295
+ function makeDurableStoreBackwardsCompatible(durableStore) {
18296
+ const getEntries = function (entries, segment) {
18297
+ // this HOF only inspects records in the default segment
18298
+ if (segment !== DefaultDurableSegment) {
18299
+ return durableStore.getEntries(entries, segment);
18300
+ }
18301
+ const { length: entriesLength } = entries;
18302
+ if (entriesLength === 0) {
18303
+ return Promise.resolve({});
18304
+ }
18305
+ // call base getEntries
18306
+ return durableStore.getEntries(entries, segment).then((durableEntries) => {
18307
+ if (durableEntries === undefined) {
18308
+ return undefined;
18309
+ }
18310
+ const returnEntries = create(null);
18311
+ const keys$1 = keys(durableEntries);
18312
+ for (let i = 0, len = keys$1.length; i < len; i++) {
18313
+ const key = keys$1[i];
18314
+ const value = durableEntries[key];
18315
+ if (value === undefined) {
18316
+ continue;
18317
+ }
18318
+ if (isEntryDurableRecordRepresentation(value, key)) {
18319
+ const record = value.data;
18320
+ const { links } = record;
18321
+ if (links !== undefined) {
18322
+ const missingLinks = keys(links);
18323
+ for (let j = 0, len = missingLinks.length; j < len; j++) {
18324
+ const fieldName = missingLinks[j];
18325
+ const link = links[fieldName];
18326
+ if (link !== undefined && link.isMissing === true) {
18327
+ record.fields[fieldName] = {
18328
+ __state: { isMissing: true },
18329
+ value: undefined,
18330
+ displayValue: undefined,
18331
+ };
18332
+ }
18333
+ }
18334
+ delete record.links;
18335
+ }
18336
+ }
18337
+ returnEntries[key] = value;
18338
+ }
18339
+ return returnEntries;
18340
+ });
18341
+ };
18342
+ return create(durableStore, {
18343
+ getEntries: { value: getEntries, writable: true },
18344
+ });
18345
+ }
18346
+
18372
18347
  // so eslint doesn't complain about nimbus
18373
18348
  // eslint-disable-next-line @typescript-eslint/no-unused-vars
18374
18349
  /* global __nimbus */
@@ -18377,10 +18352,32 @@ let lazyDraftManager;
18377
18352
  let lazyLuvio;
18378
18353
  let lazyInternalLuvio;
18379
18354
  let lazyEnvironment;
18380
- let lazyBaseDurableStore;
18355
+ let lazyDurableStore;
18381
18356
  let lazyNetworkAdapter;
18382
18357
  let lazyObjectInfoService;
18383
18358
  let lazyGetRecords;
18359
+ // TODO [W-123]: JHORST hoist, optimize and test this function
18360
+ const shouldFlush = (key, value) => {
18361
+ if (!isStoreKeyRecordId$1(key)) {
18362
+ return { flushValue: true };
18363
+ }
18364
+ if (value && typeof value === 'object') {
18365
+ const fields = value.fields;
18366
+ if (fields && typeof fields === 'object') {
18367
+ const keys = keys$2(fields);
18368
+ for (const key of keys) {
18369
+ const field = fields[key];
18370
+ if (fields && field.__state && field.__state.pending === true) {
18371
+ // Do not flush records if they contain pending fields but
18372
+ // flush the updated metadata
18373
+ return { flushValue: false, forceFlushMetadata: true };
18374
+ }
18375
+ }
18376
+ }
18377
+ return { flushValue: true };
18378
+ }
18379
+ return { flushValue: true };
18380
+ };
18384
18381
  /**
18385
18382
  * This returns the LDS on Mobile Runtime singleton object.
18386
18383
  */
@@ -18403,61 +18400,49 @@ function getRuntime() {
18403
18400
  lazyNetworkAdapter = platformNetworkAdapter(makeNetworkAdapterChunkRecordFields(NimbusNetworkAdapter, {
18404
18401
  reportChunkCandidateUrlLength: reportChunkCandidateUrlLength,
18405
18402
  }));
18406
- lazyBaseDurableStore = getNimbusDurableStore();
18403
+ lazyDurableStore = makeDurableStoreBackwardsCompatible(getNimbusDurableStore());
18407
18404
  // specific adapters
18408
18405
  const internalAdapterStore = new InMemoryStore();
18409
- let getIngestRecordsForInternalAdapters;
18410
- let getIngestMetadataForInternalAdapters;
18411
- let getIngestStoreInternal;
18412
- const internalAdapterDurableStore = makeRecordDenormalizingDurableStore(lazyLuvio, lazyBaseDurableStore, () => getIngestRecordsForInternalAdapters !== undefined
18413
- ? getIngestRecordsForInternalAdapters()
18414
- : {}, () => getIngestMetadataForInternalAdapters !== undefined
18415
- ? getIngestMetadataForInternalAdapters()
18416
- : {}, () => (getIngestStoreInternal !== undefined ? getIngestStoreInternal() : undefined));
18417
- const { adapters: { getObjectInfo, getObjectInfos, getRecord, getObjectInfoDirectory }, durableEnvironment: internalAdapterDurableEnvironment, luvio: internalLuvio, } = buildInternalAdapters(internalAdapterStore, lazyNetworkAdapter, internalAdapterDurableStore, (apiName, objectInfo) => lazyObjectInfoService.ensureObjectInfoCached(apiName, objectInfo));
18406
+ const { adapters: { getObjectInfo, getObjectInfos, getRecord, getObjectInfoDirectory }, luvio: internalLuvio, } = buildInternalAdapters(internalAdapterStore, lazyNetworkAdapter, lazyDurableStore, (apiName, objectInfo) => lazyObjectInfoService.ensureObjectInfoCached(apiName, objectInfo), shouldFlush);
18418
18407
  lazyInternalLuvio = internalLuvio;
18419
- getIngestRecordsForInternalAdapters =
18420
- internalAdapterDurableEnvironment.getIngestStagingStoreRecords;
18421
- getIngestMetadataForInternalAdapters =
18422
- internalAdapterDurableEnvironment.getIngestStagingStoreRecords;
18423
- getIngestStoreInternal = internalAdapterDurableEnvironment.getIngestStagingStore;
18424
- lazyObjectInfoService = new ObjectInfoService(getObjectInfo, getObjectInfos, getObjectInfoDirectory, lazyBaseDurableStore);
18425
- // creates a durable store that denormalizes scalar fields for records
18426
- let getIngestRecords;
18427
- let getIngestMetadata;
18428
- let getIngestStore;
18429
- const recordDenormingStore = makeRecordDenormalizingDurableStore(lazyLuvio, lazyBaseDurableStore, () => (getIngestRecords !== undefined ? getIngestRecords() : {}), () => (getIngestMetadata !== undefined ? getIngestMetadata() : {}), () => (getIngestStore !== undefined ? getIngestStore() : undefined), lazyBaseDurableStore);
18408
+ lazyObjectInfoService = new ObjectInfoService(getObjectInfo, getObjectInfos, getObjectInfoDirectory, lazyDurableStore);
18430
18409
  const baseEnv = new Environment(store, lazyNetworkAdapter);
18431
18410
  const gqlEnv = makeEnvironmentGraphqlAware(baseEnv);
18432
18411
  const durableEnv = makeDurable(gqlEnv, {
18433
- durableStore: recordDenormingStore,
18412
+ durableStore: lazyDurableStore,
18434
18413
  enableDurableMetadataRefresh: ldsMetadataRefreshEnabled.isOpen({ fallback: false }),
18435
18414
  // disable luvio deep freeze in headless environments
18436
18415
  disableDeepFreeze: typeof window === 'undefined',
18416
+ shouldFlush,
18437
18417
  });
18438
- getIngestRecords = durableEnv.getIngestStagingStoreRecords;
18439
- getIngestMetadata = durableEnv.getIngestStagingStoreMetadata;
18440
- getIngestStore = durableEnv.getIngestStagingStore;
18441
18418
  // draft queue
18442
- lazyDraftQueue = buildLdsDraftQueue(recordDenormingStore);
18443
- const draftService = new UiApiDraftRecordService(lazyDraftQueue, () => lazyLuvio, recordDenormingStore, getObjectInfo, newRecordId, userId, formatDisplayValue);
18444
- const uiApiRecordHandler = new UiApiActionHandler(() => lazyLuvio, lazyNetworkAdapter, recordDenormingStore, lazyDraftQueue, getRecord, lazyObjectInfoService, isGenerated, draftService);
18419
+ lazyDraftQueue = buildLdsDraftQueue(lazyDurableStore);
18420
+ const draftService = new UiApiDraftRecordService(lazyDraftQueue, () => lazyLuvio, lazyDurableStore, getObjectInfo, newRecordId, userId, formatDisplayValue);
18421
+ const uiApiRecordHandler = new UiApiActionHandler(() => lazyLuvio, lazyNetworkAdapter, lazyDurableStore, lazyDraftQueue, getRecord, lazyObjectInfoService, isGenerated, draftService);
18445
18422
  const quickActionHandler = new QuickActionExecutionRepresentationHandler(() => lazyLuvio, draftService, lazyDraftQueue, lazyNetworkAdapter, isGenerated);
18423
+ const updateRecordQuickActionHandler = new UpdateRecordQuickActionExecutionRepresentationHandler(() => lazyLuvio, draftService, lazyDraftQueue, lazyNetworkAdapter, lazyDurableStore, isGenerated);
18446
18424
  const contentDocumentCompositeActionHandler = new ContentDocumentCompositeRepresentationActionHandler(() => lazyLuvio, draftService, lazyDraftQueue, lazyNetworkAdapter, isGenerated, NimbusBinaryStore);
18447
18425
  lazyDraftQueue.addHandler(uiApiRecordHandler);
18448
18426
  lazyDraftQueue.addHandler(quickActionHandler);
18427
+ lazyDraftQueue.addHandler(updateRecordQuickActionHandler);
18449
18428
  lazyDraftQueue.addHandler(contentDocumentCompositeActionHandler);
18450
18429
  // draft manager
18451
18430
  lazyDraftManager = new DraftManager(lazyDraftQueue);
18452
18431
  const objectInfoCachingEnvironment = enableObjectInfoCaching(durableEnv, lazyObjectInfoService.ensureObjectInfoCached);
18453
- let draftEnv = makeEnvironmentDraftAware(lazyLuvio, objectInfoCachingEnvironment, recordDenormingStore, [uiApiRecordHandler, quickActionHandler, contentDocumentCompositeActionHandler], lazyDraftQueue);
18432
+ let draftEnv = makeEnvironmentDraftAware(lazyLuvio, objectInfoCachingEnvironment, lazyDurableStore, [
18433
+ uiApiRecordHandler,
18434
+ quickActionHandler,
18435
+ updateRecordQuickActionHandler,
18436
+ contentDocumentCompositeActionHandler,
18437
+ ], lazyDraftQueue);
18454
18438
  draftEnv = makeEnvironmentUiApiRecordDraftAware(lazyLuvio, {
18455
18439
  store,
18456
18440
  handler: uiApiRecordHandler,
18457
- durableStore: recordDenormingStore,
18441
+ durableStore: lazyDurableStore,
18458
18442
  isDraftId: isGenerated,
18459
18443
  }, draftEnv);
18460
18444
  draftEnv = performQuickActionDraftEnvironment(lazyLuvio, draftEnv, quickActionHandler);
18445
+ draftEnv = performUpdateRecordQuickActionDraftEnvironment(lazyLuvio, draftEnv, updateRecordQuickActionHandler);
18461
18446
  lazyEnvironment = draftEnv;
18462
18447
  lazyLuvio = new Luvio(lazyEnvironment, {
18463
18448
  instrument: instrumentLuvio,
@@ -18478,7 +18463,7 @@ function getRuntime() {
18478
18463
  isDraftId: isGenerated,
18479
18464
  getCanonicalId,
18480
18465
  };
18481
- setStoreEval(sqliteStoreEvalFactory(userId, lazyBaseDurableStore, lazyObjectInfoService, draftFuncs));
18466
+ setStoreEval(sqliteStoreEvalFactory(userId, lazyDurableStore, lazyObjectInfoService, draftFuncs));
18482
18467
  setDraftFunctions(draftFuncs);
18483
18468
  });
18484
18469
  // on core the graphql configuration is split so we need to set configureUIAPIGraphQL both in the
@@ -18490,7 +18475,7 @@ function getRuntime() {
18490
18475
  setDraftAwareGraphQLAdapter(graphqlEvalAdapter);
18491
18476
  }
18492
18477
  else {
18493
- const draftAwareGraphQLAdapter = draftAwareGraphQLAdapterFactory(userId, lazyObjectInfoService, lazyBaseDurableStore, lazyLuvio, isGenerated);
18478
+ const draftAwareGraphQLAdapter = draftAwareGraphQLAdapterFactory(userId, lazyObjectInfoService, lazyDurableStore, lazyLuvio, isGenerated);
18494
18479
  setDraftAwareGraphQLAdapter(
18495
18480
  // return a draft aware graphql adapter here
18496
18481
  draftAwareGraphQLAdapter);
@@ -18520,7 +18505,7 @@ function getRuntime() {
18520
18505
  draftQueue: lazyDraftQueue,
18521
18506
  createPrimingSession: (config) => {
18522
18507
  return primingSessionFactory({
18523
- store: lazyBaseDurableStore,
18508
+ store: lazyDurableStore,
18524
18509
  objectInfoService: lazyObjectInfoService,
18525
18510
  getLuvio: () => lazyLuvio,
18526
18511
  concurrency: config.concurrency,
@@ -18528,7 +18513,7 @@ function getRuntime() {
18528
18513
  getRecords: lazyGetRecords,
18529
18514
  });
18530
18515
  },
18531
- nimbusSqliteStore: lazyBaseDurableStore,
18516
+ nimbusSqliteStore: lazyDurableStore,
18532
18517
  };
18533
18518
  }
18534
18519
 
@@ -18545,4 +18530,4 @@ register({
18545
18530
  });
18546
18531
 
18547
18532
  export { O11Y_NAMESPACE_LDS_MOBILE, getRuntime, registerReportObserver, reportGraphqlQueryParseError };
18548
- // version: 1.299.0-83936bf1de
18533
+ // version: 1.301.0-ecd340c2e1