@salesforce/lds-worker-api 1.302.0 → 1.304.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -835,14 +835,7 @@ class StringKeyInMemoryStore {
835
835
  // of the function, in case the reference changes (because of an unsubscribe)
836
836
  const { snapshotSubscriptions } = this;
837
837
  // read metadata for each key, and mark as expired
838
- const expirationTimestamp = Date.now();
839
- for (let i = 0, len = keys.length; i < len; i++) {
840
- const key = keys[i];
841
- const metadata = this.readMetadata(key);
842
- if (metadata !== undefined) {
843
- this.publishMetadata(key, { ...metadata, expirationTimestamp });
844
- }
845
- }
838
+ this.expirePossibleStaleRecords(keys);
846
839
  // Process snapshot subscriptions
847
840
  const pendingPromises = [];
848
841
  for (let i = 0, len = snapshotSubscriptions.length; i < len; i++) {
@@ -937,6 +930,16 @@ class StringKeyInMemoryStore {
937
930
  this.metadata[canonicalKey] = storeMetadata;
938
931
  }
939
932
  }
933
+ expirePossibleStaleRecords(keys) {
934
+ const expirationTimestamp = Date.now();
935
+ for (let i = 0, len = keys.length; i < len; i++) {
936
+ const key = keys[i];
937
+ const metadata = this.readMetadata(key);
938
+ if (metadata !== undefined) {
939
+ this.publishMetadata(key, { ...metadata, expirationTimestamp });
940
+ }
941
+ }
942
+ }
940
943
  setTTLOverride(namespace, representationName, ttl) {
941
944
  this.ttlOverrides[getTTLOverrideKey(namespace, representationName)] = ttl;
942
945
  }
@@ -1531,14 +1534,7 @@ class InMemoryStore {
1531
1534
  // of the function, in case the reference changes (because of an unsubscribe)
1532
1535
  const { snapshotSubscriptions } = this;
1533
1536
  // read metadata for each key, and mark as expired
1534
- const expirationTimestamp = Date.now();
1535
- for (let i = 0, len = keys.length; i < len; i++) {
1536
- const key = keys[i];
1537
- const metadata = this.readMetadata(key);
1538
- if (metadata !== undefined) {
1539
- this.publishMetadata(key, { ...metadata, expirationTimestamp });
1540
- }
1541
- }
1537
+ this.expirePossibleStaleRecords(keys);
1542
1538
  // Process snapshot subscriptions
1543
1539
  const pendingPromises = [];
1544
1540
  for (let i = 0, len = snapshotSubscriptions.length; i < len; i++) {
@@ -1661,6 +1657,19 @@ class InMemoryStore {
1661
1657
  this.metadataMap.set(canonicalKey, storeMetadata);
1662
1658
  }
1663
1659
  }
1660
+ expirePossibleStaleRecords(keys) {
1661
+ if (keys.length > 0 && typeof keys[0] === 'string') {
1662
+ return this.fallbackStringKeyInMemoryStore.expirePossibleStaleRecords(keys);
1663
+ }
1664
+ const expirationTimestamp = Date.now();
1665
+ for (let i = 0, len = keys.length; i < len; i++) {
1666
+ const key = keys[i];
1667
+ const metadata = this.readMetadata(key);
1668
+ if (metadata !== undefined) {
1669
+ this.publishMetadata(key, { ...metadata, expirationTimestamp });
1670
+ }
1671
+ }
1672
+ }
1664
1673
  setTTLOverride(namespace, representationName, ttl) {
1665
1674
  // Set the TTLs in both the stores
1666
1675
  this.fallbackStringKeyInMemoryStore.setTTLOverride(namespace, representationName, ttl);
@@ -2250,6 +2259,20 @@ class GraphNode {
2250
2259
  const value = this.data[propertyName];
2251
2260
  return typeof value !== 'object' || value === null;
2252
2261
  }
2262
+ isMissing(propertyName) {
2263
+ const value = this.data[propertyName];
2264
+ if (value && typeof value.__state === 'object' && value.__state !== null) {
2265
+ return !!value.__state.isMissing;
2266
+ }
2267
+ return false;
2268
+ }
2269
+ isPending(propertyName) {
2270
+ const value = this.data[propertyName];
2271
+ if (value && typeof value.__state === 'object' && value.__state !== null) {
2272
+ return !!value.__state.pending;
2273
+ }
2274
+ return false;
2275
+ }
2253
2276
  write(propertyName, value) {
2254
2277
  this.data[propertyName] = value;
2255
2278
  const canonicalKey = this.store.getCanonicalRecordId(this.storeKey);
@@ -3633,6 +3656,30 @@ class Environment {
3633
3656
  buildStructuredKey(namespace, representationName, idValues) {
3634
3657
  return this.store.buildStructuredKey(namespace, representationName, idValues);
3635
3658
  }
3659
+ /**
3660
+ * Take a list of keys and marks them as stale to be refreshed.
3661
+ * Then will be refreshed with the provided refresh function.
3662
+ * If no refresh and makeConfig functions are provided it will refresh
3663
+ * time that record is trying to be fetched
3664
+ *
3665
+ * Example: one record from graphql needs to be refreshed and not
3666
+ * the entire graphql query
3667
+ *
3668
+ * @param keys
3669
+ * @param makeConfig
3670
+ * @param refresh
3671
+ * @returns
3672
+ */
3673
+ expirePossibleStaleRecords(keys, config, refresh) {
3674
+ this.store.expirePossibleStaleRecords(keys);
3675
+ if (refresh !== undefined && config !== undefined) {
3676
+ return this.refreshPossibleStaleRecords(config, refresh);
3677
+ }
3678
+ return Promise.resolve();
3679
+ }
3680
+ refreshPossibleStaleRecords(config, refresh) {
3681
+ return Promise.resolve(refresh(config, { cachePolicy: { type: 'no-cache' } })).then(() => { });
3682
+ }
3636
3683
  }
3637
3684
 
3638
3685
  class Luvio {
@@ -3699,6 +3746,9 @@ class Luvio {
3699
3746
  storeCleanup() {
3700
3747
  this.environment.storeCleanup();
3701
3748
  }
3749
+ storeExpirePossibleStaleRecords(keys, config, refresh) {
3750
+ return this.environment.expirePossibleStaleRecords(keys, config, refresh);
3751
+ }
3702
3752
  createSnapshot(selector, refresh) {
3703
3753
  return this.environment.createSnapshot(selector, refresh);
3704
3754
  }
@@ -4087,7 +4137,7 @@ function createResourceParamsImpl(config, configMetadata) {
4087
4137
  }
4088
4138
  return resourceParams;
4089
4139
  }
4090
- // engine version: 0.155.1-284dbf66
4140
+ // engine version: 0.156.3-04c1a80e
4091
4141
 
4092
4142
  /**
4093
4143
  * Copyright (c) 2022, Salesforce, Inc.,
@@ -4215,7 +4265,7 @@ function withDefaultLuvio(callback) {
4215
4265
  }
4216
4266
  callbacks.push(callback);
4217
4267
  }
4218
- // version: 1.302.0-e45992a1f4
4268
+ // version: 1.304.0-aa3e5f9550
4219
4269
 
4220
4270
  // TODO [TD-0081508]: once that TD is fulfilled we can probably change this file
4221
4271
  function instrumentAdapter$1(createFunction, _metadata) {
@@ -15714,7 +15764,7 @@ function gql(literals, ...subs) {
15714
15764
  }
15715
15765
  return superResult;
15716
15766
  }
15717
- // version: 1.302.0-e45992a1f4
15767
+ // version: 1.304.0-aa3e5f9550
15718
15768
 
15719
15769
  function unwrap(data) {
15720
15770
  // The lwc-luvio bindings import a function from lwc called "unwrap".
@@ -16643,7 +16693,7 @@ function createGraphQLWireAdapterConstructor(luvio, adapter, metadata, astResolv
16643
16693
  const { apiFamily, name } = metadata;
16644
16694
  return createGraphQLWireAdapterConstructor$1(adapter, `${apiFamily}.${name}`, luvio, astResolver);
16645
16695
  }
16646
- // version: 1.302.0-e45992a1f4
16696
+ // version: 1.304.0-aa3e5f9550
16647
16697
 
16648
16698
  /**
16649
16699
  * Copyright (c) 2022, Salesforce, Inc.,
@@ -16742,7 +16792,7 @@ var TypeCheckShapes;
16742
16792
  TypeCheckShapes[TypeCheckShapes["Integer"] = 3] = "Integer";
16743
16793
  TypeCheckShapes[TypeCheckShapes["Unsupported"] = 4] = "Unsupported";
16744
16794
  })(TypeCheckShapes || (TypeCheckShapes = {}));
16745
- // engine version: 0.155.1-284dbf66
16795
+ // engine version: 0.156.3-04c1a80e
16746
16796
 
16747
16797
  const { keys: ObjectKeys$3, create: ObjectCreate$3 } = Object;
16748
16798
 
@@ -18217,7 +18267,7 @@ const CHILD_RELATIONSHIP_SELECTION = {
18217
18267
  // be applied to a RecordRepresentation in environments configured with
18218
18268
  // drafts when the record has draft changes applied to it
18219
18269
  // TODO [W-8237087]: explore if this selection can only be added in environments where drafts are enabled
18220
- const DRAFTS_SELECTION = {
18270
+ const DRAFTS_SELECTION$1 = {
18221
18271
  kind: 'Object',
18222
18272
  opaque: true,
18223
18273
  name: 'drafts',
@@ -18231,7 +18281,7 @@ function createRecordSelection(fieldDefinition) {
18231
18281
  childRelationships: CHILD_RELATIONSHIP_SELECTION,
18232
18282
  fields: createPathSelection('fields', fieldDefinition),
18233
18283
  });
18234
- return [...sel.selections, DRAFTS_SELECTION];
18284
+ return [...sel.selections, DRAFTS_SELECTION$1];
18235
18285
  }
18236
18286
  /**
18237
18287
  * Convert a list of fields and optional fields into RecordRepresentation its equivalent
@@ -18248,7 +18298,7 @@ function buildSelectionFromRecord(record) {
18248
18298
  childRelationships: CHILD_RELATIONSHIP_SELECTION,
18249
18299
  fields: createPathSelectionFromValue(record.fields),
18250
18300
  });
18251
- return [...sel.selections, DRAFTS_SELECTION];
18301
+ return [...sel.selections, DRAFTS_SELECTION$1];
18252
18302
  }
18253
18303
 
18254
18304
  const MAX_RECORD_DEPTH = 5;
@@ -20472,6 +20522,12 @@ function readFieldStateFromValueNode(fieldNode) {
20472
20522
  }
20473
20523
  return fieldNode.__state.fields;
20474
20524
  }
20525
+ function writeFieldStateNodeValue(fieldNode, propertyName, value) {
20526
+ const node = fieldNode;
20527
+ const state = node.__state || {};
20528
+ state[propertyName] = value;
20529
+ node.write('__state', state);
20530
+ }
20475
20531
 
20476
20532
  const CUSTOM_API_NAME_SUFFIX = '__c';
20477
20533
  const DMO_API_NAME_SUFFIX = '__dlm';
@@ -20520,7 +20576,7 @@ function extractTrackedFieldsToTrie(recordId, node, root) {
20520
20576
  name: key,
20521
20577
  children: {},
20522
20578
  };
20523
- if (isMissing(fieldRep)) {
20579
+ if (fields.isMissing(key)) {
20524
20580
  current.children[key] = next;
20525
20581
  continue;
20526
20582
  }
@@ -20579,14 +20635,6 @@ function convertTrieToFieldsRecursively(root) {
20579
20635
  }
20580
20636
  return reduce$2.call(childKeys, (acc, cur) => concat$2.call(acc, convertTrieToFieldsRecursively(root.children[cur]).map((i) => `${root.name}.${i}`)), []);
20581
20637
  }
20582
- function isMissing(node) {
20583
- // TODO [W-15867870]: JHORST add support for isMissing on graphnode object
20584
- return node.data && node.data.__state && node.data.__state.isMissing === true;
20585
- }
20586
- function isPending(node) {
20587
- // TODO [W-15867870]: JHORST add support for pending on graphnode object
20588
- return node.data && node.data.__state && node.data.__state.pending === true;
20589
- }
20590
20638
  const BLANK_RECORD_FIELDS_TRIE = freeze$4({
20591
20639
  name: '',
20592
20640
  children: {},
@@ -20678,8 +20726,9 @@ function markNulledOutPath(record, path) {
20678
20726
  !isFrozen$2(resolved.data)) {
20679
20727
  const stateFields = readFieldStateFromValueNode(resolved.data);
20680
20728
  const fields = stateFields === undefined ? [] : stateFields;
20681
- // TODO [W-15838292]: JHORST add support for node state on graphnode object
20682
- resolved.write('__state', { fields: dedupe$2([...fields, path.join('.')]) });
20729
+ // Note that GraphNodes are frozen when NODE_ENV != production.
20730
+ // Use with care.
20731
+ writeFieldStateNodeValue(resolved, 'fields', dedupe$2([...fields, path.join('.')]));
20683
20732
  }
20684
20733
  }
20685
20734
  function markNulledOutRequiredFields(record, fields) {
@@ -20706,7 +20755,7 @@ function _markMissingPath(record, path) {
20706
20755
  return;
20707
20756
  }
20708
20757
  const fieldValueValue = fieldValueRepresentation.object(fieldName);
20709
- if (isPending(fieldValueValue)) {
20758
+ if (fieldValueRepresentation.isPending(fieldName)) {
20710
20759
  writeMissingFieldToStore(fieldValueRepresentation, fieldName);
20711
20760
  return;
20712
20761
  }
@@ -20736,7 +20785,8 @@ function _markMissingPath(record, path) {
20736
20785
  function writeMissingFieldToStore(field, fieldName) {
20737
20786
  // TODO [W-6900046]: remove cast, make RecordRepresentationNormalized['fields'] accept
20738
20787
  // an undefined/non-present __ref if isMissing is present
20739
- // TODO [W-15867870]: JHORST add support for isMissing on graphnode object
20788
+ // Note that GraphNodes are frozen when NODE_ENV != production.
20789
+ // Use with care.
20740
20790
  field.write(fieldName, {
20741
20791
  __state: {
20742
20792
  isMissing: true,
@@ -26503,6 +26553,12 @@ function keyBuilderFromType$r(luvio, object) {
26503
26553
  function normalize$A(input, existing, path, luvio, store, timestamp) {
26504
26554
  return input;
26505
26555
  }
26556
+ const DRAFTS_SELECTION = {
26557
+ kind: 'Object',
26558
+ opaque: true,
26559
+ name: 'drafts',
26560
+ required: false,
26561
+ };
26506
26562
  const select$1A = function QuickActionExecutionRepresentationSelect() {
26507
26563
  return {
26508
26564
  kind: 'Fragment',
@@ -26534,7 +26590,7 @@ const select$1A = function QuickActionExecutionRepresentationSelect() {
26534
26590
  {
26535
26591
  name: 'successMessage',
26536
26592
  kind: 'Scalar'
26537
- }
26593
+ }, DRAFTS_SELECTION,
26538
26594
  ]
26539
26595
  };
26540
26596
  };
@@ -44130,7 +44186,7 @@ withDefaultLuvio((luvio) => {
44130
44186
  throttle(60, 60000, setupNotifyAllListRecordUpdateAvailable(luvio));
44131
44187
  throttle(60, 60000, setupNotifyAllListInfoSummaryUpdateAvailable(luvio));
44132
44188
  });
44133
- // version: 1.302.0-5fb014108f
44189
+ // version: 1.304.0-d87b57badb
44134
44190
 
44135
44191
  var ldsIdempotencyWriteDisabled = {
44136
44192
  isOpen: function (e) {
@@ -45814,6 +45870,32 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
45814
45870
  }, revivingStore).finally(() => {
45815
45871
  });
45816
45872
  };
45873
+ const expirePossibleStaleRecords = async function (keys$1, config, refresh) {
45874
+ validateNotDisposed();
45875
+ const metadataKeys = keys$1.map(serializeStructuredKey);
45876
+ const now = Date.now();
45877
+ const entries = await durableStore.getMetadata(metadataKeys, DefaultDurableSegment);
45878
+ if (entries === undefined || keys$8(entries).length === 0) {
45879
+ return environment.expirePossibleStaleRecords(keys$1);
45880
+ }
45881
+ let metaDataChanged = false;
45882
+ const metadataEntries = metadataKeys.reduce((accu, key) => {
45883
+ const metadataEntry = entries[key];
45884
+ if (metadataEntry.metadata !== undefined) {
45885
+ const metadata = { ...metadataEntry.metadata, expirationTimestamp: now };
45886
+ accu[key] = { metadata };
45887
+ metaDataChanged = true;
45888
+ }
45889
+ return accu;
45890
+ }, {});
45891
+ if (metaDataChanged) {
45892
+ await durableStore.setMetadata(metadataEntries, DefaultDurableSegment);
45893
+ }
45894
+ if (config !== undefined && refresh !== undefined) {
45895
+ return environment.refreshPossibleStaleRecords(config, refresh);
45896
+ }
45897
+ return Promise.resolve();
45898
+ };
45817
45899
  // set the default cache policy of the base environment
45818
45900
  environment.setDefaultCachePolicy({
45819
45901
  type: 'stale-while-revalidate',
@@ -45846,6 +45928,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
45846
45928
  handleErrorResponse: { value: handleErrorResponse },
45847
45929
  getNotifyChangeStoreEntries: { value: getNotifyChangeStoreEntries },
45848
45930
  notifyStoreUpdateAvailable: { value: notifyStoreUpdateAvailable },
45931
+ expirePossibleStaleRecords: { value: expirePossibleStaleRecords },
45849
45932
  });
45850
45933
  }
45851
45934
 
@@ -49892,7 +49975,7 @@ class DurableDraftQueue {
49892
49975
  if (status === DraftActionStatus.Error) {
49893
49976
  this.state = DraftQueueState.Error;
49894
49977
  this.processingAction = undefined;
49895
- this.notifyChangedListeners({
49978
+ await this.notifyChangedListeners({
49896
49979
  type: DraftQueueEventType.ActionFailed,
49897
49980
  action: action,
49898
49981
  });
@@ -49908,7 +49991,7 @@ class DurableDraftQueue {
49908
49991
  if (this.state === DraftQueueState.Waiting) {
49909
49992
  this.state = DraftQueueState.Started;
49910
49993
  }
49911
- this.notifyChangedListeners({
49994
+ await this.notifyChangedListeners({
49912
49995
  type: DraftQueueEventType.ActionUploading,
49913
49996
  action: { ...action, status: DraftActionStatus.Uploading },
49914
49997
  });
@@ -49985,6 +50068,31 @@ class DurableDraftQueue {
49985
50068
  await this.startQueue();
49986
50069
  }
49987
50070
  }
50071
+ async updateDraftAction(action) {
50072
+ // stop queue manually
50073
+ this.stopQueueManually();
50074
+ const actionStatus = await this.statusOfAction(action.id);
50075
+ if (actionStatus === DraftActionStatus.Uploading) {
50076
+ return Promise.reject('cannot update an uploading action');
50077
+ }
50078
+ // save the action into the draft store
50079
+ await this.draftStore.writeAction(action);
50080
+ // make the handler replay these drafts on the record
50081
+ const handler = this.getHandler(action.handler);
50082
+ const queue = await this.getQueueActions();
50083
+ await handler.handleActionEnqueued(action, queue);
50084
+ // start queue safely
50085
+ return this.startQueueSafe();
50086
+ }
50087
+ async statusOfAction(actionId) {
50088
+ const queue = await this.getQueueActions();
50089
+ const actions = queue.filter((action) => action.id === actionId);
50090
+ if (actions.length === 0) {
50091
+ return Promise.reject('cannot update non-existent action');
50092
+ }
50093
+ const action = actions[0];
50094
+ return action.status;
50095
+ }
49988
50096
  replaceAction(targetActionId, sourceActionId) {
49989
50097
  return this.replaceOrMergeActions(targetActionId, sourceActionId, false);
49990
50098
  }
@@ -50015,17 +50123,21 @@ class DurableDraftQueue {
50015
50123
  });
50016
50124
  return action;
50017
50125
  }
50018
- scheduleRetryWithSpecifiedDelay(retryDelayInMs) {
50126
+ async scheduleRetryWithSpecifiedDelay(retryDelayInMs) {
50127
+ await this.notifyChangedListeners({
50128
+ type: DraftQueueEventType.QueueStateChanged,
50129
+ state: DraftQueueState.Waiting,
50130
+ });
50019
50131
  this.timeoutHandler = setTimeout(() => {
50020
50132
  if (this.state !== DraftQueueState.Stopped) {
50021
50133
  this.processNextAction();
50022
50134
  }
50023
50135
  }, retryDelayInMs);
50024
50136
  }
50025
- scheduleRetry() {
50137
+ async scheduleRetry() {
50026
50138
  const newInterval = this.retryIntervalMilliseconds * 2;
50027
50139
  this.retryIntervalMilliseconds = Math.min(Math.max(newInterval, this.minimumRetryInterval), this.maximumRetryInterval);
50028
- this.scheduleRetryWithSpecifiedDelay(this.retryIntervalMilliseconds);
50140
+ return this.scheduleRetryWithSpecifiedDelay(this.retryIntervalMilliseconds);
50029
50141
  }
50030
50142
  async getActionsForReplaceOrMerge(targetActionId, sourceActionId) {
50031
50143
  const actions = await this.getQueueActions();
@@ -50151,7 +50263,8 @@ class DurableDraftStore {
50151
50263
  const actionArray = [];
50152
50264
  for (let i = 0, len = keys$1.length; i < len; i++) {
50153
50265
  const key = keys$1[i];
50154
- actionArray.push(draftStore[key]);
50266
+ // clone draft so we don't expose the internal draft store
50267
+ actionArray.push(clone$1(draftStore[key]));
50155
50268
  }
50156
50269
  return actionArray;
50157
50270
  });
@@ -50786,6 +50899,7 @@ var DraftQueueOperationType;
50786
50899
  DraftQueueOperationType["ItemUpdated"] = "updated";
50787
50900
  DraftQueueOperationType["QueueStarted"] = "started";
50788
50901
  DraftQueueOperationType["QueueStopped"] = "stopped";
50902
+ DraftQueueOperationType["QueueWaiting"] = "waiting";
50789
50903
  })(DraftQueueOperationType || (DraftQueueOperationType = {}));
50790
50904
  /**
50791
50905
  * Converts the internal DraftAction's ResourceRequest into
@@ -50828,6 +50942,16 @@ function toQueueState(queue) {
50828
50942
  };
50829
50943
  }
50830
50944
  class DraftManager {
50945
+ shouldEmitEvent(event) {
50946
+ // Waiting events cannot be emitted prior to 252 native clients
50947
+ // TODO [W-16102411]: we can safely remove this backwards compatible code in 256
50948
+ if (isDraftQueueStateChangeEvent(event) &&
50949
+ event.state === DraftQueueState.Waiting &&
50950
+ this.listenerVersion === undefined) {
50951
+ return false;
50952
+ }
50953
+ return this.draftEventsShouldBeEmitted.includes(event.type);
50954
+ }
50831
50955
  constructor(draftQueue) {
50832
50956
  this.listeners = [];
50833
50957
  this.draftEventsShouldBeEmitted = [
@@ -50841,7 +50965,7 @@ class DraftManager {
50841
50965
  ];
50842
50966
  this.draftQueue = draftQueue;
50843
50967
  draftQueue.registerOnChangedListener((event) => {
50844
- if (this.draftEventsShouldBeEmitted.includes(event.type)) {
50968
+ if (this.shouldEmitEvent(event)) {
50845
50969
  return this.callListeners(event);
50846
50970
  }
50847
50971
  return Promise.resolve();
@@ -50871,6 +50995,8 @@ class DraftManager {
50871
50995
  return DraftQueueOperationType.QueueStarted;
50872
50996
  case DraftQueueState.Stopped:
50873
50997
  return DraftQueueOperationType.QueueStopped;
50998
+ case DraftQueueState.Waiting:
50999
+ return DraftQueueOperationType.QueueWaiting;
50874
51000
  default:
50875
51001
  throw Error('Unsupported event type');
50876
51002
  }
@@ -50928,7 +51054,8 @@ class DraftManager {
50928
51054
  *
50929
51055
  * @param listener The listener closure to subscribe to changes
50930
51056
  */
50931
- registerDraftQueueChangedListener(listener) {
51057
+ registerDraftQueueChangedListener(listener, version = undefined) {
51058
+ this.listenerVersion = version;
50932
51059
  this.listeners.push(listener);
50933
51060
  return () => {
50934
51061
  this.listeners = this.listeners.filter((l) => {
@@ -50955,6 +51082,60 @@ class DraftManager {
50955
51082
  };
50956
51083
  });
50957
51084
  }
51085
+ async mergePerformQuickAction(actionId, fields) {
51086
+ if (!this.isValidFieldMap(fields)) {
51087
+ return Promise.reject('fields is not valid');
51088
+ }
51089
+ const queue = await this.draftQueue.getQueueActions();
51090
+ const actions = queue.filter((action) => action.id === actionId);
51091
+ if (actions.length === 0) {
51092
+ return Promise.reject('cannot edit non-existent action');
51093
+ }
51094
+ const action = actions[0];
51095
+ if (!this.isPerformQuickActionDraft(action, 'post')) {
51096
+ return Promise.reject('cannot edit incompatible action type or uploading actions');
51097
+ }
51098
+ action.data.body.fields = { ...action.data.body.fields, ...fields };
51099
+ await this.draftQueue.updateDraftAction(action);
51100
+ return this.buildDraftQueueItem(action);
51101
+ }
51102
+ isValidFieldMap(fields) {
51103
+ const keys$1 = keys$6(fields);
51104
+ const validTypes = ['string', 'number', 'null', 'boolean'];
51105
+ for (let i = 0; i < keys$1.length; i++) {
51106
+ const key = keys$1[i];
51107
+ const value = fields[key];
51108
+ if (!validTypes.includes(typeof value)) {
51109
+ return false;
51110
+ }
51111
+ }
51112
+ return true;
51113
+ }
51114
+ isPerformQuickActionDraft(action, method) {
51115
+ const data = action.data;
51116
+ const isPerformQuickAction = data.basePath.startsWith('/ui-api/actions/perform-quick-action/');
51117
+ const methodMatches = data.method === method;
51118
+ const notUploading = action.status !== DraftActionStatus.Uploading;
51119
+ return isPerformQuickAction && methodMatches && notUploading;
51120
+ }
51121
+ async mergePerformUpdateRecordQuickAction(actionId, fields) {
51122
+ if (!this.isValidFieldMap(fields)) {
51123
+ return Promise.reject('fields is not valid');
51124
+ }
51125
+ const queue = await this.draftQueue.getQueueActions();
51126
+ const actions = queue.filter((action) => action.id === actionId);
51127
+ if (actions.length === 0) {
51128
+ return Promise.reject('cannot edit non-existent action');
51129
+ }
51130
+ const action = actions[0];
51131
+ if (!this.isPerformQuickActionDraft(action, 'patch')) {
51132
+ return Promise.reject('cannot edit incompatible action type or uploading actions');
51133
+ }
51134
+ const data = action.data;
51135
+ data.body.fields = { ...data.body.fields, ...fields };
51136
+ await this.draftQueue.updateDraftAction(action);
51137
+ return this.buildDraftQueueItem(action);
51138
+ }
50958
51139
  buildDraftQueueItem(action) {
50959
51140
  const operationType = getOperationTypeFrom(action);
50960
51141
  const { id, status, timestamp, targetId, metadata } = action;
@@ -51262,6 +51443,12 @@ function getDenormalizedRecord(recordKey, durableStore) {
51262
51443
  function isStoreRecordError(storeRecord) {
51263
51444
  return storeRecord.__type === 'error';
51264
51445
  }
51446
+ function isDraftFieldPending(field) {
51447
+ return !!(field.__state && field.__state.pending === true);
51448
+ }
51449
+ function isDraftFieldMissing(field) {
51450
+ return !!(field.__state && field.__state.isMissing === true);
51451
+ }
51265
51452
 
51266
51453
  /**
51267
51454
  * Checks if a resource request is a GET method on the record endpoint
@@ -51953,12 +52140,9 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
51953
52140
  }
51954
52141
  const { dataType, relationshipName, referenceToInfos } = fieldInfo;
51955
52142
  const draftFieldNode = record.fields[draftField];
51956
- // JHORST: revisit this logic
51957
52143
  // do not try to apply drafts on nodes that are pending or missing
51958
- if (draftFieldNode.__state !== undefined) {
51959
- if (draftFieldNode.__state.pending === true ||
51960
- draftFieldNode.__state.isMissing === true)
51961
- continue;
52144
+ if (isDraftFieldPending(draftFieldNode) || isDraftFieldMissing(draftFieldNode)) {
52145
+ continue;
51962
52146
  }
51963
52147
  const draftFieldValue = draftFieldNode.value;
51964
52148
  if (dataType === 'Reference' && relationshipName !== null) {
@@ -52753,6 +52937,7 @@ class QuickActionExecutionRepresentationHandler extends AbstractResourceRequestA
52753
52937
  isCreated: true,
52754
52938
  isSuccess: true,
52755
52939
  successMessage: `record created.`,
52940
+ drafts: { draftActionId: action.id },
52756
52941
  });
52757
52942
  }
52758
52943
  getDraftMetadata(_key) {
@@ -52870,6 +53055,7 @@ class UpdateRecordQuickActionExecutionRepresentationHandler extends AbstractReso
52870
53055
  isCreated: false,
52871
53056
  isSuccess: true,
52872
53057
  successMessage: `record updated.`,
53058
+ drafts: { draftActionId: action.id },
52873
53059
  });
52874
53060
  }
52875
53061
  async getDraftMetadata(key) {
@@ -52947,7 +53133,29 @@ function isCreateContentDocumentAndVersionDraftAdapterEvent(customEvent) {
52947
53133
  return customEvent.namespace === CONTENT_DOCUMENT_AND_VERSION_NAMESPACE;
52948
53134
  }
52949
53135
 
53136
+ // so eslint doesn't complain about nimbus
53137
+ /* global __nimbus */
52950
53138
  const ContentDocumentCompositeKeyPrefix = 'UiApi::ContentDocumentCompositeRepresentation:';
53139
+ function chunkToBase64(chunk) {
53140
+ let binary = '';
53141
+ const chunkSize = 32 * 1024;
53142
+ for (let i = 0; i < chunk.length; i += chunkSize) {
53143
+ binary += String.fromCharCode.apply(null, chunk.subarray(i, i + chunkSize));
53144
+ }
53145
+ return btoa(binary);
53146
+ }
53147
+ async function streamBufferToBinaryStore(binaryStore, buffer, mimeType) {
53148
+ const uri = await binaryStore.createStream(mimeType);
53149
+ const bufferSize = 64 * 1024; // 64k buffer size
53150
+ const uint8Array = new Uint8Array(buffer);
53151
+ for (let offset = 0; offset < uint8Array.length; offset += bufferSize) {
53152
+ const chunk = uint8Array.subarray(offset, Math.min(offset + bufferSize, uint8Array.length));
53153
+ const base64Chunk = chunkToBase64(chunk);
53154
+ await binaryStore.writeToStream(uri, base64Chunk);
53155
+ }
53156
+ await binaryStore.closeStream(uri);
53157
+ return uri;
53158
+ }
52951
53159
  function createContentDocumentAndVersionDraftAdapterFactory(luvio, binaryStore, actionHandler) {
52952
53160
  const overriddenLuvio = buildLuvioOverrideForDraftAdapters(luvio, actionHandler, (key) => {
52953
53161
  // if the key is for our top-level response shape
@@ -52970,7 +53178,14 @@ function createContentDocumentAndVersionDraftAdapterFactory(luvio, binaryStore,
52970
53178
  const { fileData } = config;
52971
53179
  const { name, size, type } = fileData;
52972
53180
  const buffer = await fileData.arrayBuffer();
52973
- const uri = await binaryStore.store(new Uint8Array(buffer), type, size);
53181
+ var uri;
53182
+ // see if new chunking-api exists, if it doesnt fall back to memory-intensive mobile api
53183
+ if (!__nimbus.plugins.LdsBinaryStorePlugin.createStream) {
53184
+ uri = await binaryStore.store(new Uint8Array(buffer), type, size);
53185
+ }
53186
+ else {
53187
+ uri = await streamBufferToBinaryStore(binaryStore, buffer, type);
53188
+ }
52974
53189
  config.fileData = {
52975
53190
  isFileReference: true,
52976
53191
  handle: uri,
@@ -53809,7 +54024,7 @@ function recordLoaderFactory(query) {
53809
54024
  return new DataLoader(batchRecordQuery);
53810
54025
  }
53811
54026
 
53812
- function createContext(store, objectInfos, eventEmitter, settings, snapshot, draftFunctions) {
54027
+ function createContext(store, objectInfos, eventEmitter, settings, snapshot, mappedCursors = new Map(), draftFunctions) {
53813
54028
  store.query.bind(store);
53814
54029
  const query = (sql, params) => {
53815
54030
  const now = Date.now();
@@ -53836,7 +54051,9 @@ function createContext(store, objectInfos, eventEmitter, settings, snapshot, dra
53836
54051
  Record,
53837
54052
  snapshot,
53838
54053
  seenRecordIds: new Set(),
54054
+ possibleStaleRecordMap: new Map(),
53839
54055
  draftFunctions,
54056
+ mappedCursors,
53840
54057
  };
53841
54058
  }
53842
54059
 
@@ -54447,7 +54664,6 @@ function isTodayStartOfWeek() {
54447
54664
 
54448
54665
  const JSON_EXTRACT_PATH_INGESTION_TIMESTAMP = '$.ingestionTimestamp';
54449
54666
  const JSON_EXTRACT_PATH_INGESTION_APINAME = '$.apiName';
54450
- const JSON_EXTRACT_PATH_DRAFTS = '$.drafts';
54451
54667
 
54452
54668
  const MultiPickListValueSeparator = ';';
54453
54669
  function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draftFunctions) {
@@ -54988,14 +55204,10 @@ function buildQuery(config) {
54988
55204
  const predicates = buildPredicates(config);
54989
55205
  const orderBy = buildOrderBy(config);
54990
55206
  const sql = `
54991
- SELECT "${config.alias}".data
55207
+ SELECT "${config.alias}".data, "${config.alias}".metadata
54992
55208
  FROM lds_data "${config.alias}" ${joins.sql}
54993
55209
  WHERE "${config.alias}".key like 'UiApi::RecordRepresentation:%'
54994
55210
  AND json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_INGESTION_APINAME}') = '${config.alias}'
54995
- AND (
54996
- json_extract("${config.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ?
54997
- OR json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL
54998
- )
54999
55211
  ${predicates.sql}
55000
55212
  ${orderBy.sql}
55001
55213
  LIMIT ?
@@ -55007,7 +55219,6 @@ function buildQuery(config) {
55007
55219
  const bindings = [
55008
55220
  // bindings from predicates on joins
55009
55221
  ...joins.bindings,
55010
- config.ingestionTimestamp,
55011
55222
  // where clause and parent scope bindings
55012
55223
  ...predicates.bindings,
55013
55224
  // limit binding
@@ -55034,29 +55245,19 @@ function buildJoins(config) {
55034
55245
  if (allJoins.length === 0)
55035
55246
  return { sql, bindings };
55036
55247
  sql = allJoins.reduce((joinAccumulator, join) => {
55037
- let timestampAdded = false;
55038
55248
  const joinConditions = join.conditions.reduce((conditionAccumulator, condition) => {
55039
55249
  let joined_sql;
55040
- const joinMetadataTimestamp = ` AND (json_extract("${join.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ? OR json_extract("${join.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL)`;
55041
55250
  // predicate on a value, use the newly joined table
55042
55251
  if ('type' in condition) {
55043
55252
  const { sql, binding } = predicateToSQL(condition, join.alias);
55044
- joined_sql = ` AND ${sql}${timestampAdded ? '' : joinMetadataTimestamp}`;
55253
+ joined_sql = ` AND ${sql}`;
55045
55254
  bindings.push(...binding);
55046
- if (timestampAdded === false) {
55047
- bindings.push(config.ingestionTimestamp);
55048
- timestampAdded = true;
55049
- }
55050
55255
  }
55051
55256
  else {
55052
55257
  // predicate on a path
55053
55258
  const left = ` AND json_extract("${join.to}".data, '${condition.leftPath}')`;
55054
55259
  const right = `json_extract("${join.alias}".data, '${condition.rightPath}')`;
55055
- joined_sql = `${left} = ${right}${timestampAdded ? '' : joinMetadataTimestamp}`;
55056
- if (timestampAdded === false) {
55057
- bindings.push(config.ingestionTimestamp);
55058
- timestampAdded = true;
55059
- }
55260
+ joined_sql = `${left} = ${right}`;
55060
55261
  }
55061
55262
  conditionAccumulator += joined_sql;
55062
55263
  return conditionAccumulator;
@@ -55725,11 +55926,15 @@ async function readIngestionTimestampForKey(key, query) {
55725
55926
  }
55726
55927
  return ingestionTimestamp;
55727
55928
  }
55728
- async function readPaginationMetadataForKey(key, query) {
55729
- const sql = `SELECT data FROM lds_data WHERE key=?`;
55730
- const results = await query(sql, [key + '__pagination']);
55731
- const [paginationMetadata] = results.rows.map((row) => parse$3(row[0]));
55732
- return paginationMetadata || {};
55929
+ function isObjectDefinitionNode(node) {
55930
+ const { kind } = node;
55931
+ return typeof kind === 'string' && kind === 'OperationDefinition';
55932
+ }
55933
+ function operationNodeAncestor(ancestors) {
55934
+ let operationNode = ancestors.find((a) => {
55935
+ return !(a instanceof Array) && isObjectDefinitionNode(a);
55936
+ });
55937
+ return operationNode;
55733
55938
  }
55734
55939
 
55735
55940
  function findSpanningField(name) {
@@ -55930,44 +56135,87 @@ function atobPolyfill(data) {
55930
56135
  const base64encode = typeof btoa === 'function' ? btoa : btoaPolyfill;
55931
56136
  const base64decode = typeof atob === 'function' ? atob : atobPolyfill;
55932
56137
 
56138
+ // this truthy value is used to indicate a premature end of results
56139
+ const EARLY_END = 1;
55933
56140
  function cursorResolver(source) {
55934
- return encodeV1Cursor(source.index);
56141
+ let cursor = {
56142
+ i: source.index,
56143
+ };
56144
+ if (source.earlyEnd) {
56145
+ cursor.e = EARLY_END;
56146
+ }
56147
+ return encodeV1Cursor(cursor);
55935
56148
  }
55936
56149
  function pageInfoResolver(source) {
55937
56150
  if (source.records.length === 0) {
56151
+ // we may have found no records, but if more exist we need to
56152
+ // return a valid cursor that can be passed as the next `after`
56153
+ if (source.earlyEnd) {
56154
+ return {
56155
+ startCursor: null,
56156
+ endCursor: encodeV1Cursor({
56157
+ i: source.offset,
56158
+ e: EARLY_END,
56159
+ }),
56160
+ hasNextPage: source.hasNextPage,
56161
+ };
56162
+ }
55938
56163
  return {
55939
56164
  startCursor: null,
55940
56165
  endCursor: null,
55941
- hasNextPage: false,
56166
+ hasNextPage: source.hasNextPage,
55942
56167
  };
55943
56168
  }
55944
56169
  let startIndex = source.records[0].index;
56170
+ let startCursor = {
56171
+ i: startIndex,
56172
+ };
55945
56173
  let endIndex = source.records[source.records.length - 1].index;
56174
+ let endCursor = {
56175
+ i: endIndex,
56176
+ };
56177
+ if (source.earlyEnd) {
56178
+ startCursor.e = EARLY_END;
56179
+ endCursor.e = EARLY_END;
56180
+ }
55946
56181
  return {
55947
- startCursor: encodeV1Cursor(startIndex),
55948
- endCursor: encodeV1Cursor(endIndex),
56182
+ startCursor: encodeV1Cursor(startCursor),
56183
+ endCursor: encodeV1Cursor(endCursor),
55949
56184
  hasNextPage: source.hasNextPage,
55950
56185
  };
55951
56186
  }
55952
56187
  function pageResultCountResolver(source) {
55953
56188
  return source.records.length;
55954
56189
  }
55955
- function encodeV1Cursor(index) {
55956
- return base64encode(`v1:${index}`);
56190
+ function isLocalCursor(maybeCursor) {
56191
+ return (!!maybeCursor &&
56192
+ typeof maybeCursor === 'object' &&
56193
+ 'i' in maybeCursor &&
56194
+ typeof maybeCursor.i === 'number');
55957
56195
  }
55958
- const cursorRegex = /^v1:(?<index>\d+)$/;
56196
+ function encodeV1Cursor(cursor) {
56197
+ return base64encode(stringify$3(cursor));
56198
+ }
56199
+ const CURSOR_PARSE_ERROR = 'Unable to parse cursor';
55959
56200
  function decodeV1Cursor(base64cursor) {
55960
- const cursor = base64decode(base64cursor);
55961
- if (!cursor) {
56201
+ let maybeCursor;
56202
+ try {
56203
+ const cursorString = base64decode(base64cursor);
56204
+ maybeCursor = parse$3(cursorString);
56205
+ }
56206
+ catch (error) {
56207
+ let message = CURSOR_PARSE_ERROR;
56208
+ if (error instanceof Error) {
56209
+ message += ': ' + error.message;
56210
+ }
55962
56211
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
55963
- throw new Error('Unable to parse cursor');
56212
+ throw new Error(message);
55964
56213
  }
55965
- const found = cursor.match(cursorRegex);
55966
- if (!found || !found.groups) {
56214
+ if (!isLocalCursor(maybeCursor)) {
55967
56215
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
55968
- throw new Error('Unable to parse cursor');
56216
+ throw new Error(CURSOR_PARSE_ERROR);
55969
56217
  }
55970
- return Number(found.groups.index);
56218
+ return maybeCursor;
55971
56219
  }
55972
56220
  /**
55973
56221
  * Check the selections for any selection matching `pageInfo { hasNextPage }`
@@ -56005,6 +56253,164 @@ function selectionIncludesHasNextPage(selections, fragments) {
56005
56253
  return false;
56006
56254
  }
56007
56255
 
56256
+ const END_CURSOR = '__END__';
56257
+ // find the closest matching cursor in the server pagination metadata
56258
+ function mapCursorValue(originalValue, paginationMetadata) {
56259
+ let mappedValue = null;
56260
+ if (!originalValue) {
56261
+ return mappedValue;
56262
+ }
56263
+ // flip the pagination metadata into an array by index.
56264
+ let cursors = [];
56265
+ for (const [cursor, index] of Object.entries(paginationMetadata)) {
56266
+ if (index === undefined)
56267
+ continue;
56268
+ cursors[index] = cursor;
56269
+ }
56270
+ let cursor = decodeV1Cursor(originalValue);
56271
+ // cursors containe 1-based indexes, adjust back to 0-based
56272
+ let index = cursor.i - 1;
56273
+ if (
56274
+ // cursor.e being truthy means we had premature end of results and
56275
+ // should pin to the last known server cursor
56276
+ !cursor.e &&
56277
+ // check that the index we have is within the bounds of known cursors
56278
+ index >= 0 &&
56279
+ index < cursors.length &&
56280
+ // and make sure the cursor is not the server end marker
56281
+ cursors[index] !== END_CURSOR) {
56282
+ mappedValue = cursors[index];
56283
+ }
56284
+ else {
56285
+ // in this case, either our local cursor is beyond the max server cursor, or
56286
+ // the local cursor precedes the max server cursor and we ran out of locally
56287
+ // cached results. either way, find the last known server cursor and map to that.
56288
+ for (let i = cursors.length; i > 0; --i) {
56289
+ let cursor = cursors[i - 1];
56290
+ if (cursor !== END_CURSOR) {
56291
+ mappedValue = cursor;
56292
+ break;
56293
+ }
56294
+ }
56295
+ }
56296
+ return mappedValue;
56297
+ }
56298
+ // map all pagination cursors in the document
56299
+ async function mapPaginationCursors(originalAST, variables, store) {
56300
+ // first pass, identify record query cache keys for reading pagination metadata
56301
+ let requiredPaginationMetadataKeys = [];
56302
+ visit$1(originalAST, {
56303
+ Field(node, _key, _parent, _path, ancestors) {
56304
+ // is it a record query?
56305
+ if (!isRecordQuery(node)) {
56306
+ return;
56307
+ }
56308
+ // does it have a defined `after` argument?
56309
+ let after = node.arguments &&
56310
+ node.arguments.find((a) => {
56311
+ return a.name.value === 'after';
56312
+ });
56313
+ if (after && (after.value.kind === 'StringValue' || after.value.kind === 'Variable')) {
56314
+ let operationNode = operationNodeAncestor(ancestors);
56315
+ if (!operationNode) {
56316
+ return false;
56317
+ }
56318
+ let key = buildKeyStringForRecordQuery(operationNode, variables, node.arguments || [], node.name.value);
56319
+ requiredPaginationMetadataKeys.push(key);
56320
+ }
56321
+ // don't need to descend into this node
56322
+ return false;
56323
+ },
56324
+ });
56325
+ // read pagination metadata for identified record queries
56326
+ let paginationMetadataMap = await readPaginationMetadataForKeys(requiredPaginationMetadataKeys, store.query.bind(store));
56327
+ // holds the original cursor values that were mapped back to server cursors
56328
+ let mappedCursors = new Map();
56329
+ // rewrite nodes/variables with mapped cursors now that we read the pagination metadata
56330
+ let ast = visit$1(originalAST, {
56331
+ Field(node, _key, _parent, _path, ancestors) {
56332
+ // is it a record query?
56333
+ if (!isRecordQuery(node)) {
56334
+ // not returning false, we might be in the parent of a record query
56335
+ return;
56336
+ }
56337
+ // does it have a defined `after` argument?
56338
+ if (!node.arguments)
56339
+ return false;
56340
+ let after = node.arguments.find((a) => {
56341
+ return a.name.value === 'after';
56342
+ });
56343
+ if (!after)
56344
+ return false;
56345
+ if (after.value.kind === 'StringValue' || after.value.kind === 'Variable') {
56346
+ let operationNode = operationNodeAncestor(ancestors);
56347
+ if (!operationNode) {
56348
+ return false;
56349
+ }
56350
+ let key = buildKeyStringForRecordQuery(operationNode, variables, node.arguments || [], node.name.value);
56351
+ // pagination metadata may be missing, e.g. due to being offline
56352
+ let paginationMetadata = paginationMetadataMap.get(key) || {};
56353
+ if (after.value.kind === 'StringValue') {
56354
+ let originalValue = after.value.value;
56355
+ mappedCursors.set(key, originalValue);
56356
+ let mappedValue = mapCursorValue(originalValue, paginationMetadata);
56357
+ if (!mappedValue) {
56358
+ // there were no results from the server, remove after argument
56359
+ return {
56360
+ ...node,
56361
+ arguments: node.arguments.filter((a) => a !== after),
56362
+ };
56363
+ }
56364
+ // return a new replacement node
56365
+ return {
56366
+ ...node,
56367
+ arguments: node.arguments.map((a) => {
56368
+ if (a !== after)
56369
+ return a;
56370
+ return {
56371
+ ...a,
56372
+ value: {
56373
+ kind: 'StringValue',
56374
+ value: mappedValue,
56375
+ },
56376
+ };
56377
+ }),
56378
+ };
56379
+ }
56380
+ else if (after.value.kind === 'Variable') {
56381
+ // rewrite the variable
56382
+ let variableName = after.value.name.value;
56383
+ let variableValue = variables[variableName];
56384
+ mappedCursors.set(key, variableValue);
56385
+ let mappedValue = mapCursorValue(variableValue, paginationMetadata);
56386
+ variables[variableName] = mappedValue;
56387
+ }
56388
+ // don't need to descend into this node
56389
+ return false;
56390
+ }
56391
+ },
56392
+ });
56393
+ return {
56394
+ ast,
56395
+ mappedCursors,
56396
+ };
56397
+ }
56398
+ async function readPaginationMetadataForKeys(keys, query) {
56399
+ let metadataMap = new Map();
56400
+ if (keys.length === 0)
56401
+ return metadataMap;
56402
+ const sql = `SELECT key, data FROM lds_data WHERE key in (${Array(keys.length)
56403
+ .fill('?')
56404
+ .join(',')})`;
56405
+ const results = await query(sql, keys.map((k) => k + '__pagination'));
56406
+ for (let row of results.rows) {
56407
+ let key = row[0].replace(/__pagination$/, '');
56408
+ let metadata = parse$3(row[1]);
56409
+ metadataMap.set(key, metadata);
56410
+ }
56411
+ return metadataMap;
56412
+ }
56413
+
56008
56414
  /*
56009
56415
  resolves connections...
56010
56416
  */
@@ -56026,8 +56432,14 @@ async function connectionResolver(obj, args, context, info) {
56026
56432
  const childRelationship = parentObjectInfo &&
56027
56433
  parentObjectInfo.childRelationships.find((rel) => rel.relationshipName === info.fieldName);
56028
56434
  // or emit/throw if we want to report it
56029
- if (!childRelationship)
56030
- return { records: [], hasNextPage: false };
56435
+ if (!childRelationship) {
56436
+ return {
56437
+ records: [],
56438
+ hasNextPage: false,
56439
+ earlyEnd: false,
56440
+ offset: 0,
56441
+ };
56442
+ }
56031
56443
  alias = childRelationship.childObjectApiName;
56032
56444
  childRelationshipFieldName = childRelationship.fieldName;
56033
56445
  }
@@ -56046,7 +56458,12 @@ async function connectionResolver(obj, args, context, info) {
56046
56458
  }
56047
56459
  let offset = 0;
56048
56460
  if (args.after) {
56049
- offset = decodeV1Cursor(args.after) + 1;
56461
+ let originalCursor = context.mappedCursors.get(queryCacheKey);
56462
+ if (!originalCursor) {
56463
+ // eslint-disable-next-line @salesforce/lds/no-error-in-production
56464
+ throw new Error('Internal Error: unable to determine `after` cursor value');
56465
+ }
56466
+ offset = decodeV1Cursor(originalCursor).i;
56050
56467
  }
56051
56468
  // if the query wants to know `hasNextPage` then we need to request 1 additional record
56052
56469
  let selections = info.fieldNodes
@@ -56055,7 +56472,7 @@ async function connectionResolver(obj, args, context, info) {
56055
56472
  let wantsHasNextPage = selectionIncludesHasNextPage(selections, info.fragments);
56056
56473
  let paginationMetadata = undefined;
56057
56474
  if (wantsHasNextPage) {
56058
- paginationMetadata = await readPaginationMetadataForKey(queryCacheKey, query);
56475
+ paginationMetadata = await readPaginationMetadataForKeys([queryCacheKey], query);
56059
56476
  }
56060
56477
  let internalLimit = limit + (wantsHasNextPage ? 1 : 0);
56061
56478
  // Alias starts as entity's ApiName
@@ -56066,36 +56483,60 @@ async function connectionResolver(obj, args, context, info) {
56066
56483
  orderBy: orderByToPredicate(args.orderBy, alias, alias, context.objectInfos),
56067
56484
  limit: internalLimit,
56068
56485
  offset,
56069
- ingestionTimestamp,
56070
56486
  };
56071
56487
  const { sql, bindings } = buildQuery(queryConfig);
56072
56488
  const results = await query(sql, bindings);
56073
56489
  let hasNextPage = false;
56490
+ let earlyEnd = false;
56074
56491
  if (wantsHasNextPage) {
56075
56492
  if (results.rows.length > limit) {
56076
56493
  // more records exist in the cache
56077
56494
  hasNextPage = true;
56078
56495
  results.rows.pop();
56079
56496
  }
56080
- else if (!paginationMetadata || paginationMetadata.__END__ === undefined) {
56497
+ else if (!paginationMetadata ||
56498
+ !paginationMetadata.has(queryCacheKey) ||
56499
+ paginationMetadata.get(queryCacheKey).__END__ === undefined) {
56081
56500
  // more records may exist on the server
56082
56501
  hasNextPage = true;
56502
+ // we hit the end of our local records, so we need to know that we
56503
+ // should start at the end of known server cursors
56504
+ if (results.rows.length < limit) {
56505
+ earlyEnd = true;
56506
+ }
56083
56507
  }
56084
56508
  }
56085
56509
  //map each sql result with the ingestion timestamp to pass it down a level
56086
- let records = results.rows
56087
- .map((row) => parse$3(row[0]))
56088
- .map((recordRepresentation, index) => {
56510
+ let records = results.rows.map((row, index) => {
56511
+ const recordMetadataResult = {
56512
+ recordRepresentation: parse$3(row[0]),
56513
+ metadata: parse$3(row[1]),
56514
+ };
56515
+ const { recordRepresentation, metadata } = recordMetadataResult;
56089
56516
  context.seenRecordIds.add(recordRepresentation.id);
56517
+ if (metadata.ingestionTimestamp < ingestionTimestamp &&
56518
+ recordRepresentation.drafts === undefined) {
56519
+ if (context.possibleStaleRecordMap.has(recordRepresentation.apiName) === false) {
56520
+ context.possibleStaleRecordMap.set(recordRepresentation.apiName, []);
56521
+ }
56522
+ const ids = context.possibleStaleRecordMap.get(recordRepresentation.apiName);
56523
+ if (ids !== undefined) {
56524
+ ids.push(recordRepresentation.id);
56525
+ context.possibleStaleRecordMap.set(recordRepresentation.apiName, ids);
56526
+ }
56527
+ }
56090
56528
  return {
56091
56529
  recordRepresentation,
56092
56530
  ingestionTimestamp,
56093
- index: index + offset,
56531
+ index: index + offset + 1,
56532
+ earlyEnd,
56094
56533
  };
56095
56534
  });
56096
56535
  return {
56097
56536
  records,
56098
56537
  hasNextPage,
56538
+ earlyEnd,
56539
+ offset,
56099
56540
  };
56100
56541
  }
56101
56542
  /**
@@ -56893,7 +57334,7 @@ function getTextAreaType(field) {
56893
57334
  return 'TextAreaValue';
56894
57335
  }
56895
57336
 
56896
- async function evaluate(config, observers, settings, objectInfos, store, snapshot, cache, draftFunctions) {
57337
+ async function evaluate(config, observers, settings, objectInfos, store, snapshot, cache, draftFunctions, mappedCursors) {
56897
57338
  const eventEmitter = createCustomAdapterEventEmitter(GRAPHQL_EVAL_NAMESPACE, observers);
56898
57339
  // this is only wrapped in a try to execute the event after the result was returned
56899
57340
  try {
@@ -56952,7 +57393,7 @@ async function evaluate(config, observers, settings, objectInfos, store, snapsho
56952
57393
  eventEmitter({ type: 'graphql-preconditions-met' });
56953
57394
  // create the resolver request context, runtime values and functions for
56954
57395
  // resolvers to do their job.
56955
- const contextValue = createContext(store, objectInfos, eventEmitter, settings, snapshot, draftFunctions);
57396
+ const contextValue = createContext(store, objectInfos, eventEmitter, settings, snapshot, mappedCursors, draftFunctions);
56956
57397
  // We're building this from scratch from each request. If this becomes a
56957
57398
  // hotspot we can pull it up and memoize it later
56958
57399
  const schema = createSchemaWithCache(objectInfos, cache);
@@ -56977,7 +57418,11 @@ async function evaluate(config, observers, settings, objectInfos, store, snapsho
56977
57418
  seenRecordIds.push(queryString);
56978
57419
  });
56979
57420
  }
56980
- return { result, seenRecordIds };
57421
+ return {
57422
+ result,
57423
+ seenRecordIds,
57424
+ possibleStaleRecordMap: contextValue.possibleStaleRecordMap,
57425
+ };
56981
57426
  }
56982
57427
  finally {
56983
57428
  eventEmitter({ type: 'graphql-eval-end' });
@@ -58701,7 +59146,11 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
58701
59146
  return async function draftAwareGraphQLAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
58702
59147
  //create a copy to not accidentally modify the AST in the astResolver map of luvio
58703
59148
  const copy = parse$3(stringify$3(config.query));
59149
+ // the injected ast has extra fields needed for eval in it
58704
59150
  let injectedAST;
59151
+ // the cursor mapped ast is passed upstream so it won't reject on our local cursors
59152
+ let cursorMappedAST;
59153
+ let mappedCursors = new Map();
58705
59154
  let objectInfoNeeded = {};
58706
59155
  let unmappedDraftIDs;
58707
59156
  let internalRequestContext = {
@@ -58717,6 +59166,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
58717
59166
  objectInfos: objectInfoNeeded,
58718
59167
  unmappedDraftIDs,
58719
59168
  } = await injectSyntheticFields(copy, objectInfoService, draftFunctions, config.variables));
59169
+ ({ ast: cursorMappedAST, mappedCursors } = await mapPaginationCursors(injectedAST, config.variables || {}, store));
58720
59170
  if (config.variables) {
58721
59171
  config.variables = replaceDraftIdsInVariables$1(config.variables, draftFunctions, unmappedDraftIDs);
58722
59172
  }
@@ -58748,7 +59198,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
58748
59198
  const nonEvaluatedSnapshot = (await luvio.applyCachePolicy(internalRequestContext, {
58749
59199
  config: {
58750
59200
  ...config,
58751
- query: injectedAST,
59201
+ query: cursorMappedAST,
58752
59202
  },
58753
59203
  luvio,
58754
59204
  gqlEval: true,
@@ -58761,12 +59211,17 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
58761
59211
  : [];
58762
59212
  let gqlResult;
58763
59213
  let seenRecordIds;
59214
+ let possibleStaleRecordMap;
58764
59215
  try {
58765
- ({ result: gqlResult, seenRecordIds } = await evaluate({
59216
+ ({
59217
+ result: gqlResult,
59218
+ seenRecordIds,
59219
+ possibleStaleRecordMap,
59220
+ } = await evaluate({
58766
59221
  ...config,
58767
59222
  //need to create another copy of the ast for future writes
58768
59223
  query: parse$3(stringify$3(injectedAST)),
58769
- }, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot, graphqlSchemaCache));
59224
+ }, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot, graphqlSchemaCache, draftFunctions, mappedCursors));
58770
59225
  }
58771
59226
  catch (throwable) {
58772
59227
  const error = throwable;
@@ -58792,13 +59247,18 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
58792
59247
  const seenRecords = createSeenRecords(seenRecordIds, nonEvaluatedSnapshot);
58793
59248
  const recordId = generateUniqueRecordId();
58794
59249
  const rebuildWithLocalEval = async (originalSnapshot) => {
58795
- let { result: rebuildResult, seenRecordIds } = await evaluate({
59250
+ let { result: rebuildResult, seenRecordIds, possibleStaleRecordMap, } = await evaluate({
58796
59251
  ...config,
58797
59252
  query: injectedAST,
58798
- }, observers, { userId }, objectInfoNeeded, store, originalSnapshot, graphqlSchemaCache, draftFunctions);
59253
+ }, observers, { userId }, objectInfoNeeded, store, originalSnapshot, graphqlSchemaCache, draftFunctions, mappedCursors);
58799
59254
  if (!rebuildResult.errors) {
58800
59255
  rebuildResult = removeSyntheticFields(rebuildResult, config.query);
58801
59256
  }
59257
+ let snapshotState = 'Fulfilled';
59258
+ if (possibleStaleRecordMap.size > 0) {
59259
+ initiateStaleRecordRefresh(luvio, possibleStaleRecordMap);
59260
+ snapshotState = 'Stale';
59261
+ }
58802
59262
  if (objectsDeepEqual(rebuildResult, originalSnapshot.data)) {
58803
59263
  return originalSnapshot;
58804
59264
  }
@@ -58807,6 +59267,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
58807
59267
  ...originalSnapshot,
58808
59268
  data: rebuildResult,
58809
59269
  recordId,
59270
+ state: snapshotState,
58810
59271
  seenRecords: createSeenRecords(seenRecordIds, nonEvaluatedSnapshot),
58811
59272
  rebuildWithLocalEval,
58812
59273
  };
@@ -58844,9 +59305,31 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
58844
59305
  },
58845
59306
  };
58846
59307
  }
59308
+ if (possibleStaleRecordMap.size > 0) {
59309
+ initiateStaleRecordRefresh(luvio, possibleStaleRecordMap);
59310
+ resultSnapshot.state = 'Stale';
59311
+ }
58847
59312
  return resultSnapshot;
58848
59313
  };
58849
59314
  }
59315
+ function initiateStaleRecordRefresh(luvio, keyMap) {
59316
+ const staleRecordKeys = from$1(keyMap.values())
59317
+ .flat()
59318
+ .map((id) => `UiApi::RecordRepresentation:${id}`);
59319
+ luvio.storeExpirePossibleStaleRecords(staleRecordKeys, makeGetRecordsConfig(keyMap), getRecordsAdapterFactory(luvio));
59320
+ }
59321
+ function makeGetRecordsConfig(keyMap) {
59322
+ const records = [];
59323
+ keyMap.forEach((recordIds, apiName) => {
59324
+ records.push({
59325
+ recordIds,
59326
+ fields: [`${apiName}.Id`],
59327
+ });
59328
+ });
59329
+ return {
59330
+ records,
59331
+ };
59332
+ }
58850
59333
 
58851
59334
  function environmentAwareGraphQLBatchAdapterFactory(objectInfoService, luvio, isDraftId) {
58852
59335
  return async function environmentAwareGraphQLBatchAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
@@ -59992,6 +60475,9 @@ class NimbusDraftQueue {
59992
60475
  removeHandler(_id) {
59993
60476
  return Promise.reject(new Error('Cannot call setMetadata from the NimbusDraftQueue'));
59994
60477
  }
60478
+ updateDraftAction(_action) {
60479
+ return Promise.reject(new Error('Cannot call updateDraftAction from the NimbusDraftQueue'));
60480
+ }
59995
60481
  }
59996
60482
 
59997
60483
  function attachObserversToAdapterRequestContext(observers, adapterRequestContext) {
@@ -61105,6 +61591,21 @@ const NimbusBinaryStore = {
61105
61591
  __nimbus.plugins.LdsBinaryStorePlugin.setCanonicalUrl(uri, canonicalUrl, ttlSeconds, resolve, (err) => reject(errorMessageToError(err)));
61106
61592
  });
61107
61593
  },
61594
+ createStream: function (type) {
61595
+ return new Promise((resolve, reject) => {
61596
+ __nimbus.plugins.LdsBinaryStorePlugin.createStream(type, resolve, (err) => reject(errorMessageToError(err)));
61597
+ });
61598
+ },
61599
+ writeToStream: function (uri, chunk) {
61600
+ return new Promise((resolve, reject) => {
61601
+ __nimbus.plugins.LdsBinaryStorePlugin.writeToStream(uri, chunk, resolve, (err) => reject(errorMessageToError(err)));
61602
+ });
61603
+ },
61604
+ closeStream: function (uri) {
61605
+ return new Promise((resolve, reject) => {
61606
+ __nimbus.plugins.LdsBinaryStorePlugin.closeStream(uri, resolve, (err) => reject(errorMessageToError(err)));
61607
+ });
61608
+ },
61108
61609
  };
61109
61610
 
61110
61611
  /**
@@ -62432,7 +62933,6 @@ let lazyDurableStore;
62432
62933
  let lazyNetworkAdapter;
62433
62934
  let lazyObjectInfoService;
62434
62935
  let lazyGetRecords;
62435
- // TODO [W-123]: JHORST hoist, optimize and test this function
62436
62936
  const shouldFlush = (key, value) => {
62437
62937
  if (!isStoreKeyRecordId$1(key)) {
62438
62938
  return { flushValue: true };
@@ -62603,7 +63103,7 @@ register$1({
62603
63103
  id: '@salesforce/lds-network-adapter',
62604
63104
  instrument: instrument$2,
62605
63105
  });
62606
- // version: 1.302.0-e45992a1f4
63106
+ // version: 1.304.0-aa3e5f9550
62607
63107
 
62608
63108
  const { create: create$3, keys: keys$3 } = Object;
62609
63109
  const { stringify: stringify$1, parse: parse$1 } = JSON;
@@ -82639,7 +83139,7 @@ register$1({
82639
83139
  configuration: { ...configurationForGraphQLAdapters$1 },
82640
83140
  instrument: instrument$1,
82641
83141
  });
82642
- // version: 1.302.0-5fb014108f
83142
+ // version: 1.304.0-d87b57badb
82643
83143
 
82644
83144
  // On core the unstable adapters are re-exported with different names,
82645
83145
  // we want to match them here.
@@ -84895,7 +85395,7 @@ withDefaultLuvio((luvio) => {
84895
85395
  unstable_graphQL_imperative = createImperativeAdapter(luvio, createInstrumentedAdapter(ldsAdapter, adapterMetadata), adapterMetadata);
84896
85396
  graphQLImperative = ldsAdapter;
84897
85397
  });
84898
- // version: 1.302.0-5fb014108f
85398
+ // version: 1.304.0-d87b57badb
84899
85399
 
84900
85400
  var gqlApi = /*#__PURE__*/Object.freeze({
84901
85401
  __proto__: null,
@@ -85630,7 +86130,7 @@ const callbacks$1 = [];
85630
86130
  function register(r) {
85631
86131
  callbacks$1.forEach((callback) => callback(r));
85632
86132
  }
85633
- // version: 1.302.0-e45992a1f4
86133
+ // version: 1.304.0-aa3e5f9550
85634
86134
 
85635
86135
  /**
85636
86136
  * Returns true if the value acts like a Promise, i.e. has a "then" function,
@@ -90592,4 +91092,4 @@ const { luvio } = getRuntime();
90592
91092
  setDefaultLuvio({ luvio });
90593
91093
 
90594
91094
  export { createPrimingSession, draftManager, draftQueue, evictCacheRecordsByIds, evictExpiredCacheEntries, executeAdapter, executeMutatingAdapter, getImperativeAdapterNames, invokeAdapter, invokeAdapterWithDraftToMerge, invokeAdapterWithDraftToReplace, invokeAdapterWithMetadata, nimbusDraftQueue, registerReportObserver, setMetadataTTL, setUiApiRecordTTL, stopEviction, subscribeToAdapter };
90595
- // version: 1.302.0-e45992a1f4
91095
+ // version: 1.304.0-aa3e5f9550