@salesforce/lds-runtime-mobile 1.303.0 → 1.304.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js CHANGED
@@ -20,7 +20,7 @@ import { setupInstrumentation, instrumentAdapter as instrumentAdapter$1, instrum
20
20
  import { HttpStatusCode, setBypassDeepFreeze, StoreKeySet, serializeStructuredKey, StringKeyInMemoryStore, Reader, deepFreeze, emitAdapterEvent, createCustomAdapterEventEmitter, StoreKeyMap, isFileReference, Environment, Luvio, InMemoryStore } from '@luvio/engine';
21
21
  import excludeStaleRecordsGate from '@salesforce/gate/lds.graphqlEvalExcludeStaleRecords';
22
22
  import { parseAndVisit, Kind as Kind$1, buildSchema, isObjectType, defaultFieldResolver, visit, execute, parse as parse$7, extendSchema, isScalarType } from '@luvio/graphql-parser';
23
- import { RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION, RECORD_REPRESENTATION_NAME, extractRecordIdFromStoreKey, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, isStoreKeyRecordViewEntity, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, getRecordId18, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, getObjectInfoDirectoryAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion, getRecordsAdapterFactory } from '@salesforce/lds-adapters-uiapi-mobile';
23
+ import { RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION, RECORD_REPRESENTATION_NAME, extractRecordIdFromStoreKey, keyBuilderQuickActionExecutionRepresentation, ingestQuickActionExecutionRepresentation, keyBuilderContentDocumentCompositeRepresentation, getResponseCacheKeysContentDocumentCompositeRepresentation, keyBuilderFromTypeContentDocumentCompositeRepresentation, ingestContentDocumentCompositeRepresentation, keyBuilderRecord, isStoreKeyRecordViewEntity, getTypeCacheKeysRecord, keyBuilderFromTypeRecordRepresentation, ingestRecord, getRecordId18, getRecordsAdapterFactory, RecordRepresentationRepresentationType, ObjectInfoRepresentationType, getRecordAdapterFactory, getObjectInfoAdapterFactory, getObjectInfosAdapterFactory, getObjectInfoDirectoryAdapterFactory, UiApiNamespace, RecordRepresentationType, RecordRepresentationTTL, RecordRepresentationVersion } from '@salesforce/lds-adapters-uiapi-mobile';
24
24
  import ldsIdempotencyWriteDisabled from '@salesforce/gate/lds.idempotencyWriteDisabled';
25
25
  import ldsBackdatingEnabled from '@salesforce/gate/lds.backdatingEnabled';
26
26
  import FIRST_DAY_OF_WEEK from '@salesforce/i18n/firstDayOfWeek';
@@ -5775,6 +5775,31 @@ class DurableDraftQueue {
5775
5775
  await this.startQueue();
5776
5776
  }
5777
5777
  }
5778
+ async updateDraftAction(action) {
5779
+ // stop queue manually
5780
+ this.stopQueueManually();
5781
+ const actionStatus = await this.statusOfAction(action.id);
5782
+ if (actionStatus === DraftActionStatus.Uploading) {
5783
+ return Promise.reject('cannot update an uploading action');
5784
+ }
5785
+ // save the action into the draft store
5786
+ await this.draftStore.writeAction(action);
5787
+ // make the handler replay these drafts on the record
5788
+ const handler = this.getHandler(action.handler);
5789
+ const queue = await this.getQueueActions();
5790
+ await handler.handleActionEnqueued(action, queue);
5791
+ // start queue safely
5792
+ return this.startQueueSafe();
5793
+ }
5794
+ async statusOfAction(actionId) {
5795
+ const queue = await this.getQueueActions();
5796
+ const actions = queue.filter((action) => action.id === actionId);
5797
+ if (actions.length === 0) {
5798
+ return Promise.reject('cannot update non-existent action');
5799
+ }
5800
+ const action = actions[0];
5801
+ return action.status;
5802
+ }
5778
5803
  replaceAction(targetActionId, sourceActionId) {
5779
5804
  return this.replaceOrMergeActions(targetActionId, sourceActionId, false);
5780
5805
  }
@@ -6778,6 +6803,60 @@ class DraftManager {
6778
6803
  };
6779
6804
  });
6780
6805
  }
6806
+ async mergePerformQuickAction(actionId, fields) {
6807
+ if (!this.isValidFieldMap(fields)) {
6808
+ return Promise.reject('fields is not valid');
6809
+ }
6810
+ const queue = await this.draftQueue.getQueueActions();
6811
+ const actions = queue.filter((action) => action.id === actionId);
6812
+ if (actions.length === 0) {
6813
+ return Promise.reject('cannot edit non-existent action');
6814
+ }
6815
+ const action = actions[0];
6816
+ if (!this.isPerformQuickActionDraft(action, 'post')) {
6817
+ return Promise.reject('cannot edit incompatible action type or uploading actions');
6818
+ }
6819
+ action.data.body.fields = { ...action.data.body.fields, ...fields };
6820
+ await this.draftQueue.updateDraftAction(action);
6821
+ return this.buildDraftQueueItem(action);
6822
+ }
6823
+ isValidFieldMap(fields) {
6824
+ const keys$1 = keys$6(fields);
6825
+ const validTypes = ['string', 'number', 'null', 'boolean'];
6826
+ for (let i = 0; i < keys$1.length; i++) {
6827
+ const key = keys$1[i];
6828
+ const value = fields[key];
6829
+ if (!validTypes.includes(typeof value)) {
6830
+ return false;
6831
+ }
6832
+ }
6833
+ return true;
6834
+ }
6835
+ isPerformQuickActionDraft(action, method) {
6836
+ const data = action.data;
6837
+ const isPerformQuickAction = data.basePath.startsWith('/ui-api/actions/perform-quick-action/');
6838
+ const methodMatches = data.method === method;
6839
+ const notUploading = action.status !== DraftActionStatus.Uploading;
6840
+ return isPerformQuickAction && methodMatches && notUploading;
6841
+ }
6842
+ async mergePerformUpdateRecordQuickAction(actionId, fields) {
6843
+ if (!this.isValidFieldMap(fields)) {
6844
+ return Promise.reject('fields is not valid');
6845
+ }
6846
+ const queue = await this.draftQueue.getQueueActions();
6847
+ const actions = queue.filter((action) => action.id === actionId);
6848
+ if (actions.length === 0) {
6849
+ return Promise.reject('cannot edit non-existent action');
6850
+ }
6851
+ const action = actions[0];
6852
+ if (!this.isPerformQuickActionDraft(action, 'patch')) {
6853
+ return Promise.reject('cannot edit incompatible action type or uploading actions');
6854
+ }
6855
+ const data = action.data;
6856
+ data.body.fields = { ...data.body.fields, ...fields };
6857
+ await this.draftQueue.updateDraftAction(action);
6858
+ return this.buildDraftQueueItem(action);
6859
+ }
6781
6860
  buildDraftQueueItem(action) {
6782
6861
  const operationType = getOperationTypeFrom(action);
6783
6862
  const { id, status, timestamp, targetId, metadata } = action;
@@ -7085,6 +7164,12 @@ function getDenormalizedRecord(recordKey, durableStore) {
7085
7164
  function isStoreRecordError(storeRecord) {
7086
7165
  return storeRecord.__type === 'error';
7087
7166
  }
7167
+ function isDraftFieldPending(field) {
7168
+ return !!(field.__state && field.__state.pending === true);
7169
+ }
7170
+ function isDraftFieldMissing(field) {
7171
+ return !!(field.__state && field.__state.isMissing === true);
7172
+ }
7088
7173
 
7089
7174
  /**
7090
7175
  * Checks if a resource request is a GET method on the record endpoint
@@ -7776,12 +7861,9 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
7776
7861
  }
7777
7862
  const { dataType, relationshipName, referenceToInfos } = fieldInfo;
7778
7863
  const draftFieldNode = record.fields[draftField];
7779
- // JHORST: revisit this logic
7780
7864
  // do not try to apply drafts on nodes that are pending or missing
7781
- if (draftFieldNode.__state !== undefined) {
7782
- if (draftFieldNode.__state.pending === true ||
7783
- draftFieldNode.__state.isMissing === true)
7784
- continue;
7865
+ if (isDraftFieldPending(draftFieldNode) || isDraftFieldMissing(draftFieldNode)) {
7866
+ continue;
7785
7867
  }
7786
7868
  const draftFieldValue = draftFieldNode.value;
7787
7869
  if (dataType === 'Reference' && relationshipName !== null) {
@@ -8772,7 +8854,29 @@ function isCreateContentDocumentAndVersionDraftAdapterEvent(customEvent) {
8772
8854
  return customEvent.namespace === CONTENT_DOCUMENT_AND_VERSION_NAMESPACE;
8773
8855
  }
8774
8856
 
8857
+ // so eslint doesn't complain about nimbus
8858
+ /* global __nimbus */
8775
8859
  const ContentDocumentCompositeKeyPrefix = 'UiApi::ContentDocumentCompositeRepresentation:';
8860
+ function chunkToBase64(chunk) {
8861
+ let binary = '';
8862
+ const chunkSize = 32 * 1024;
8863
+ for (let i = 0; i < chunk.length; i += chunkSize) {
8864
+ binary += String.fromCharCode.apply(null, chunk.subarray(i, i + chunkSize));
8865
+ }
8866
+ return btoa(binary);
8867
+ }
8868
+ async function streamBufferToBinaryStore(binaryStore, buffer, mimeType) {
8869
+ const uri = await binaryStore.createStream(mimeType);
8870
+ const bufferSize = 64 * 1024; // 64k buffer size
8871
+ const uint8Array = new Uint8Array(buffer);
8872
+ for (let offset = 0; offset < uint8Array.length; offset += bufferSize) {
8873
+ const chunk = uint8Array.subarray(offset, Math.min(offset + bufferSize, uint8Array.length));
8874
+ const base64Chunk = chunkToBase64(chunk);
8875
+ await binaryStore.writeToStream(uri, base64Chunk);
8876
+ }
8877
+ await binaryStore.closeStream(uri);
8878
+ return uri;
8879
+ }
8776
8880
  function createContentDocumentAndVersionDraftAdapterFactory(luvio, binaryStore, actionHandler) {
8777
8881
  const overriddenLuvio = buildLuvioOverrideForDraftAdapters(luvio, actionHandler, (key) => {
8778
8882
  // if the key is for our top-level response shape
@@ -8795,7 +8899,14 @@ function createContentDocumentAndVersionDraftAdapterFactory(luvio, binaryStore,
8795
8899
  const { fileData } = config;
8796
8900
  const { name, size, type } = fileData;
8797
8901
  const buffer = await fileData.arrayBuffer();
8798
- const uri = await binaryStore.store(new Uint8Array(buffer), type, size);
8902
+ var uri;
8903
+ // see if new chunking-api exists, if it doesnt fall back to memory-intensive mobile api
8904
+ if (!__nimbus.plugins.LdsBinaryStorePlugin.createStream) {
8905
+ uri = await binaryStore.store(new Uint8Array(buffer), type, size);
8906
+ }
8907
+ else {
8908
+ uri = await streamBufferToBinaryStore(binaryStore, buffer, type);
8909
+ }
8799
8910
  config.fileData = {
8800
8911
  isFileReference: true,
8801
8912
  handle: uri,
@@ -9634,7 +9745,7 @@ function recordLoaderFactory(query) {
9634
9745
  return new DataLoader(batchRecordQuery);
9635
9746
  }
9636
9747
 
9637
- function createContext(store, objectInfos, eventEmitter, settings, snapshot, draftFunctions) {
9748
+ function createContext(store, objectInfos, eventEmitter, settings, snapshot, mappedCursors = new Map(), draftFunctions) {
9638
9749
  store.query.bind(store);
9639
9750
  const query = (sql, params) => {
9640
9751
  const now = Date.now();
@@ -9661,7 +9772,9 @@ function createContext(store, objectInfos, eventEmitter, settings, snapshot, dra
9661
9772
  Record,
9662
9773
  snapshot,
9663
9774
  seenRecordIds: new Set(),
9775
+ possibleStaleRecordMap: new Map(),
9664
9776
  draftFunctions,
9777
+ mappedCursors,
9665
9778
  };
9666
9779
  }
9667
9780
 
@@ -10272,7 +10385,6 @@ function isTodayStartOfWeek() {
10272
10385
 
10273
10386
  const JSON_EXTRACT_PATH_INGESTION_TIMESTAMP = '$.ingestionTimestamp';
10274
10387
  const JSON_EXTRACT_PATH_INGESTION_APINAME = '$.apiName';
10275
- const JSON_EXTRACT_PATH_DRAFTS = '$.drafts';
10276
10388
 
10277
10389
  const MultiPickListValueSeparator = ';';
10278
10390
  function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draftFunctions) {
@@ -10813,14 +10925,10 @@ function buildQuery(config) {
10813
10925
  const predicates = buildPredicates(config);
10814
10926
  const orderBy = buildOrderBy(config);
10815
10927
  const sql = `
10816
- SELECT "${config.alias}".data
10928
+ SELECT "${config.alias}".data, "${config.alias}".metadata
10817
10929
  FROM lds_data "${config.alias}" ${joins.sql}
10818
10930
  WHERE "${config.alias}".key like 'UiApi::RecordRepresentation:%'
10819
10931
  AND json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_INGESTION_APINAME}') = '${config.alias}'
10820
- AND (
10821
- json_extract("${config.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ?
10822
- OR json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL
10823
- )
10824
10932
  ${predicates.sql}
10825
10933
  ${orderBy.sql}
10826
10934
  LIMIT ?
@@ -10832,7 +10940,6 @@ function buildQuery(config) {
10832
10940
  const bindings = [
10833
10941
  // bindings from predicates on joins
10834
10942
  ...joins.bindings,
10835
- config.ingestionTimestamp,
10836
10943
  // where clause and parent scope bindings
10837
10944
  ...predicates.bindings,
10838
10945
  // limit binding
@@ -10859,29 +10966,19 @@ function buildJoins(config) {
10859
10966
  if (allJoins.length === 0)
10860
10967
  return { sql, bindings };
10861
10968
  sql = allJoins.reduce((joinAccumulator, join) => {
10862
- let timestampAdded = false;
10863
10969
  const joinConditions = join.conditions.reduce((conditionAccumulator, condition) => {
10864
10970
  let joined_sql;
10865
- const joinMetadataTimestamp = ` AND (json_extract("${join.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ? OR json_extract("${join.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL)`;
10866
10971
  // predicate on a value, use the newly joined table
10867
10972
  if ('type' in condition) {
10868
10973
  const { sql, binding } = predicateToSQL(condition, join.alias);
10869
- joined_sql = ` AND ${sql}${timestampAdded ? '' : joinMetadataTimestamp}`;
10974
+ joined_sql = ` AND ${sql}`;
10870
10975
  bindings.push(...binding);
10871
- if (timestampAdded === false) {
10872
- bindings.push(config.ingestionTimestamp);
10873
- timestampAdded = true;
10874
- }
10875
10976
  }
10876
10977
  else {
10877
10978
  // predicate on a path
10878
10979
  const left = ` AND json_extract("${join.to}".data, '${condition.leftPath}')`;
10879
10980
  const right = `json_extract("${join.alias}".data, '${condition.rightPath}')`;
10880
- joined_sql = `${left} = ${right}${timestampAdded ? '' : joinMetadataTimestamp}`;
10881
- if (timestampAdded === false) {
10882
- bindings.push(config.ingestionTimestamp);
10883
- timestampAdded = true;
10884
- }
10981
+ joined_sql = `${left} = ${right}`;
10885
10982
  }
10886
10983
  conditionAccumulator += joined_sql;
10887
10984
  return conditionAccumulator;
@@ -11550,11 +11647,15 @@ async function readIngestionTimestampForKey(key, query) {
11550
11647
  }
11551
11648
  return ingestionTimestamp;
11552
11649
  }
11553
- async function readPaginationMetadataForKey(key, query) {
11554
- const sql = `SELECT data FROM lds_data WHERE key=?`;
11555
- const results = await query(sql, [key + '__pagination']);
11556
- const [paginationMetadata] = results.rows.map((row) => parse$3(row[0]));
11557
- return paginationMetadata || {};
11650
+ function isObjectDefinitionNode(node) {
11651
+ const { kind } = node;
11652
+ return typeof kind === 'string' && kind === 'OperationDefinition';
11653
+ }
11654
+ function operationNodeAncestor(ancestors) {
11655
+ let operationNode = ancestors.find((a) => {
11656
+ return !(a instanceof Array) && isObjectDefinitionNode(a);
11657
+ });
11658
+ return operationNode;
11558
11659
  }
11559
11660
 
11560
11661
  function findSpanningField(name) {
@@ -11755,44 +11856,87 @@ function atobPolyfill(data) {
11755
11856
  const base64encode = typeof btoa === 'function' ? btoa : btoaPolyfill;
11756
11857
  const base64decode = typeof atob === 'function' ? atob : atobPolyfill;
11757
11858
 
11859
+ // this truthy value is used to indicate a premature end of results
11860
+ const EARLY_END = 1;
11758
11861
  function cursorResolver(source) {
11759
- return encodeV1Cursor(source.index);
11862
+ let cursor = {
11863
+ i: source.index,
11864
+ };
11865
+ if (source.earlyEnd) {
11866
+ cursor.e = EARLY_END;
11867
+ }
11868
+ return encodeV1Cursor(cursor);
11760
11869
  }
11761
11870
  function pageInfoResolver(source) {
11762
11871
  if (source.records.length === 0) {
11872
+ // we may have found no records, but if more exist we need to
11873
+ // return a valid cursor that can be passed as the next `after`
11874
+ if (source.earlyEnd) {
11875
+ return {
11876
+ startCursor: null,
11877
+ endCursor: encodeV1Cursor({
11878
+ i: source.offset,
11879
+ e: EARLY_END,
11880
+ }),
11881
+ hasNextPage: source.hasNextPage,
11882
+ };
11883
+ }
11763
11884
  return {
11764
11885
  startCursor: null,
11765
11886
  endCursor: null,
11766
- hasNextPage: false,
11887
+ hasNextPage: source.hasNextPage,
11767
11888
  };
11768
11889
  }
11769
11890
  let startIndex = source.records[0].index;
11891
+ let startCursor = {
11892
+ i: startIndex,
11893
+ };
11770
11894
  let endIndex = source.records[source.records.length - 1].index;
11895
+ let endCursor = {
11896
+ i: endIndex,
11897
+ };
11898
+ if (source.earlyEnd) {
11899
+ startCursor.e = EARLY_END;
11900
+ endCursor.e = EARLY_END;
11901
+ }
11771
11902
  return {
11772
- startCursor: encodeV1Cursor(startIndex),
11773
- endCursor: encodeV1Cursor(endIndex),
11903
+ startCursor: encodeV1Cursor(startCursor),
11904
+ endCursor: encodeV1Cursor(endCursor),
11774
11905
  hasNextPage: source.hasNextPage,
11775
11906
  };
11776
11907
  }
11777
11908
  function pageResultCountResolver(source) {
11778
11909
  return source.records.length;
11779
11910
  }
11780
- function encodeV1Cursor(index) {
11781
- return base64encode(`v1:${index}`);
11911
+ function isLocalCursor(maybeCursor) {
11912
+ return (!!maybeCursor &&
11913
+ typeof maybeCursor === 'object' &&
11914
+ 'i' in maybeCursor &&
11915
+ typeof maybeCursor.i === 'number');
11916
+ }
11917
+ function encodeV1Cursor(cursor) {
11918
+ return base64encode(stringify$3(cursor));
11782
11919
  }
11783
- const cursorRegex = /^v1:(?<index>\d+)$/;
11920
+ const CURSOR_PARSE_ERROR = 'Unable to parse cursor';
11784
11921
  function decodeV1Cursor(base64cursor) {
11785
- const cursor = base64decode(base64cursor);
11786
- if (!cursor) {
11922
+ let maybeCursor;
11923
+ try {
11924
+ const cursorString = base64decode(base64cursor);
11925
+ maybeCursor = parse$3(cursorString);
11926
+ }
11927
+ catch (error) {
11928
+ let message = CURSOR_PARSE_ERROR;
11929
+ if (error instanceof Error) {
11930
+ message += ': ' + error.message;
11931
+ }
11787
11932
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
11788
- throw new Error('Unable to parse cursor');
11933
+ throw new Error(message);
11789
11934
  }
11790
- const found = cursor.match(cursorRegex);
11791
- if (!found || !found.groups) {
11935
+ if (!isLocalCursor(maybeCursor)) {
11792
11936
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
11793
- throw new Error('Unable to parse cursor');
11937
+ throw new Error(CURSOR_PARSE_ERROR);
11794
11938
  }
11795
- return Number(found.groups.index);
11939
+ return maybeCursor;
11796
11940
  }
11797
11941
  /**
11798
11942
  * Check the selections for any selection matching `pageInfo { hasNextPage }`
@@ -11830,6 +11974,164 @@ function selectionIncludesHasNextPage(selections, fragments) {
11830
11974
  return false;
11831
11975
  }
11832
11976
 
11977
+ const END_CURSOR = '__END__';
11978
+ // find the closest matching cursor in the server pagination metadata
11979
+ function mapCursorValue(originalValue, paginationMetadata) {
11980
+ let mappedValue = null;
11981
+ if (!originalValue) {
11982
+ return mappedValue;
11983
+ }
11984
+ // flip the pagination metadata into an array by index.
11985
+ let cursors = [];
11986
+ for (const [cursor, index] of Object.entries(paginationMetadata)) {
11987
+ if (index === undefined)
11988
+ continue;
11989
+ cursors[index] = cursor;
11990
+ }
11991
+ let cursor = decodeV1Cursor(originalValue);
11992
+ // cursors containe 1-based indexes, adjust back to 0-based
11993
+ let index = cursor.i - 1;
11994
+ if (
11995
+ // cursor.e being truthy means we had premature end of results and
11996
+ // should pin to the last known server cursor
11997
+ !cursor.e &&
11998
+ // check that the index we have is within the bounds of known cursors
11999
+ index >= 0 &&
12000
+ index < cursors.length &&
12001
+ // and make sure the cursor is not the server end marker
12002
+ cursors[index] !== END_CURSOR) {
12003
+ mappedValue = cursors[index];
12004
+ }
12005
+ else {
12006
+ // in this case, either our local cursor is beyond the max server cursor, or
12007
+ // the local cursor precedes the max server cursor and we ran out of locally
12008
+ // cached results. either way, find the last known server cursor and map to that.
12009
+ for (let i = cursors.length; i > 0; --i) {
12010
+ let cursor = cursors[i - 1];
12011
+ if (cursor !== END_CURSOR) {
12012
+ mappedValue = cursor;
12013
+ break;
12014
+ }
12015
+ }
12016
+ }
12017
+ return mappedValue;
12018
+ }
12019
+ // map all pagination cursors in the document
12020
+ async function mapPaginationCursors(originalAST, variables, store) {
12021
+ // first pass, identify record query cache keys for reading pagination metadata
12022
+ let requiredPaginationMetadataKeys = [];
12023
+ visit(originalAST, {
12024
+ Field(node, _key, _parent, _path, ancestors) {
12025
+ // is it a record query?
12026
+ if (!isRecordQuery(node)) {
12027
+ return;
12028
+ }
12029
+ // does it have a defined `after` argument?
12030
+ let after = node.arguments &&
12031
+ node.arguments.find((a) => {
12032
+ return a.name.value === 'after';
12033
+ });
12034
+ if (after && (after.value.kind === 'StringValue' || after.value.kind === 'Variable')) {
12035
+ let operationNode = operationNodeAncestor(ancestors);
12036
+ if (!operationNode) {
12037
+ return false;
12038
+ }
12039
+ let key = buildKeyStringForRecordQuery(operationNode, variables, node.arguments || [], node.name.value);
12040
+ requiredPaginationMetadataKeys.push(key);
12041
+ }
12042
+ // don't need to descend into this node
12043
+ return false;
12044
+ },
12045
+ });
12046
+ // read pagination metadata for identified record queries
12047
+ let paginationMetadataMap = await readPaginationMetadataForKeys(requiredPaginationMetadataKeys, store.query.bind(store));
12048
+ // holds the original cursor values that were mapped back to server cursors
12049
+ let mappedCursors = new Map();
12050
+ // rewrite nodes/variables with mapped cursors now that we read the pagination metadata
12051
+ let ast = visit(originalAST, {
12052
+ Field(node, _key, _parent, _path, ancestors) {
12053
+ // is it a record query?
12054
+ if (!isRecordQuery(node)) {
12055
+ // not returning false, we might be in the parent of a record query
12056
+ return;
12057
+ }
12058
+ // does it have a defined `after` argument?
12059
+ if (!node.arguments)
12060
+ return false;
12061
+ let after = node.arguments.find((a) => {
12062
+ return a.name.value === 'after';
12063
+ });
12064
+ if (!after)
12065
+ return false;
12066
+ if (after.value.kind === 'StringValue' || after.value.kind === 'Variable') {
12067
+ let operationNode = operationNodeAncestor(ancestors);
12068
+ if (!operationNode) {
12069
+ return false;
12070
+ }
12071
+ let key = buildKeyStringForRecordQuery(operationNode, variables, node.arguments || [], node.name.value);
12072
+ // pagination metadata may be missing, e.g. due to being offline
12073
+ let paginationMetadata = paginationMetadataMap.get(key) || {};
12074
+ if (after.value.kind === 'StringValue') {
12075
+ let originalValue = after.value.value;
12076
+ mappedCursors.set(key, originalValue);
12077
+ let mappedValue = mapCursorValue(originalValue, paginationMetadata);
12078
+ if (!mappedValue) {
12079
+ // there were no results from the server, remove after argument
12080
+ return {
12081
+ ...node,
12082
+ arguments: node.arguments.filter((a) => a !== after),
12083
+ };
12084
+ }
12085
+ // return a new replacement node
12086
+ return {
12087
+ ...node,
12088
+ arguments: node.arguments.map((a) => {
12089
+ if (a !== after)
12090
+ return a;
12091
+ return {
12092
+ ...a,
12093
+ value: {
12094
+ kind: 'StringValue',
12095
+ value: mappedValue,
12096
+ },
12097
+ };
12098
+ }),
12099
+ };
12100
+ }
12101
+ else if (after.value.kind === 'Variable') {
12102
+ // rewrite the variable
12103
+ let variableName = after.value.name.value;
12104
+ let variableValue = variables[variableName];
12105
+ mappedCursors.set(key, variableValue);
12106
+ let mappedValue = mapCursorValue(variableValue, paginationMetadata);
12107
+ variables[variableName] = mappedValue;
12108
+ }
12109
+ // don't need to descend into this node
12110
+ return false;
12111
+ }
12112
+ },
12113
+ });
12114
+ return {
12115
+ ast,
12116
+ mappedCursors,
12117
+ };
12118
+ }
12119
+ async function readPaginationMetadataForKeys(keys, query) {
12120
+ let metadataMap = new Map();
12121
+ if (keys.length === 0)
12122
+ return metadataMap;
12123
+ const sql = `SELECT key, data FROM lds_data WHERE key in (${Array(keys.length)
12124
+ .fill('?')
12125
+ .join(',')})`;
12126
+ const results = await query(sql, keys.map((k) => k + '__pagination'));
12127
+ for (let row of results.rows) {
12128
+ let key = row[0].replace(/__pagination$/, '');
12129
+ let metadata = parse$3(row[1]);
12130
+ metadataMap.set(key, metadata);
12131
+ }
12132
+ return metadataMap;
12133
+ }
12134
+
11833
12135
  /*
11834
12136
  resolves connections...
11835
12137
  */
@@ -11851,8 +12153,14 @@ async function connectionResolver(obj, args, context, info) {
11851
12153
  const childRelationship = parentObjectInfo &&
11852
12154
  parentObjectInfo.childRelationships.find((rel) => rel.relationshipName === info.fieldName);
11853
12155
  // or emit/throw if we want to report it
11854
- if (!childRelationship)
11855
- return { records: [], hasNextPage: false };
12156
+ if (!childRelationship) {
12157
+ return {
12158
+ records: [],
12159
+ hasNextPage: false,
12160
+ earlyEnd: false,
12161
+ offset: 0,
12162
+ };
12163
+ }
11856
12164
  alias = childRelationship.childObjectApiName;
11857
12165
  childRelationshipFieldName = childRelationship.fieldName;
11858
12166
  }
@@ -11871,7 +12179,12 @@ async function connectionResolver(obj, args, context, info) {
11871
12179
  }
11872
12180
  let offset = 0;
11873
12181
  if (args.after) {
11874
- offset = decodeV1Cursor(args.after) + 1;
12182
+ let originalCursor = context.mappedCursors.get(queryCacheKey);
12183
+ if (!originalCursor) {
12184
+ // eslint-disable-next-line @salesforce/lds/no-error-in-production
12185
+ throw new Error('Internal Error: unable to determine `after` cursor value');
12186
+ }
12187
+ offset = decodeV1Cursor(originalCursor).i;
11875
12188
  }
11876
12189
  // if the query wants to know `hasNextPage` then we need to request 1 additional record
11877
12190
  let selections = info.fieldNodes
@@ -11880,7 +12193,7 @@ async function connectionResolver(obj, args, context, info) {
11880
12193
  let wantsHasNextPage = selectionIncludesHasNextPage(selections, info.fragments);
11881
12194
  let paginationMetadata = undefined;
11882
12195
  if (wantsHasNextPage) {
11883
- paginationMetadata = await readPaginationMetadataForKey(queryCacheKey, query);
12196
+ paginationMetadata = await readPaginationMetadataForKeys([queryCacheKey], query);
11884
12197
  }
11885
12198
  let internalLimit = limit + (wantsHasNextPage ? 1 : 0);
11886
12199
  // Alias starts as entity's ApiName
@@ -11891,36 +12204,60 @@ async function connectionResolver(obj, args, context, info) {
11891
12204
  orderBy: orderByToPredicate(args.orderBy, alias, alias, context.objectInfos),
11892
12205
  limit: internalLimit,
11893
12206
  offset,
11894
- ingestionTimestamp,
11895
12207
  };
11896
12208
  const { sql, bindings } = buildQuery(queryConfig);
11897
12209
  const results = await query(sql, bindings);
11898
12210
  let hasNextPage = false;
12211
+ let earlyEnd = false;
11899
12212
  if (wantsHasNextPage) {
11900
12213
  if (results.rows.length > limit) {
11901
12214
  // more records exist in the cache
11902
12215
  hasNextPage = true;
11903
12216
  results.rows.pop();
11904
12217
  }
11905
- else if (!paginationMetadata || paginationMetadata.__END__ === undefined) {
12218
+ else if (!paginationMetadata ||
12219
+ !paginationMetadata.has(queryCacheKey) ||
12220
+ paginationMetadata.get(queryCacheKey).__END__ === undefined) {
11906
12221
  // more records may exist on the server
11907
12222
  hasNextPage = true;
12223
+ // we hit the end of our local records, so we need to know that we
12224
+ // should start at the end of known server cursors
12225
+ if (results.rows.length < limit) {
12226
+ earlyEnd = true;
12227
+ }
11908
12228
  }
11909
12229
  }
11910
12230
  //map each sql result with the ingestion timestamp to pass it down a level
11911
- let records = results.rows
11912
- .map((row) => parse$3(row[0]))
11913
- .map((recordRepresentation, index) => {
12231
+ let records = results.rows.map((row, index) => {
12232
+ const recordMetadataResult = {
12233
+ recordRepresentation: parse$3(row[0]),
12234
+ metadata: parse$3(row[1]),
12235
+ };
12236
+ const { recordRepresentation, metadata } = recordMetadataResult;
11914
12237
  context.seenRecordIds.add(recordRepresentation.id);
12238
+ if (metadata.ingestionTimestamp < ingestionTimestamp &&
12239
+ recordRepresentation.drafts === undefined) {
12240
+ if (context.possibleStaleRecordMap.has(recordRepresentation.apiName) === false) {
12241
+ context.possibleStaleRecordMap.set(recordRepresentation.apiName, []);
12242
+ }
12243
+ const ids = context.possibleStaleRecordMap.get(recordRepresentation.apiName);
12244
+ if (ids !== undefined) {
12245
+ ids.push(recordRepresentation.id);
12246
+ context.possibleStaleRecordMap.set(recordRepresentation.apiName, ids);
12247
+ }
12248
+ }
11915
12249
  return {
11916
12250
  recordRepresentation,
11917
12251
  ingestionTimestamp,
11918
- index: index + offset,
12252
+ index: index + offset + 1,
12253
+ earlyEnd,
11919
12254
  };
11920
12255
  });
11921
12256
  return {
11922
12257
  records,
11923
12258
  hasNextPage,
12259
+ earlyEnd,
12260
+ offset,
11924
12261
  };
11925
12262
  }
11926
12263
  /**
@@ -12718,7 +13055,7 @@ function getTextAreaType(field) {
12718
13055
  return 'TextAreaValue';
12719
13056
  }
12720
13057
 
12721
- async function evaluate(config, observers, settings, objectInfos, store, snapshot, cache, draftFunctions) {
13058
+ async function evaluate(config, observers, settings, objectInfos, store, snapshot, cache, draftFunctions, mappedCursors) {
12722
13059
  const eventEmitter = createCustomAdapterEventEmitter(GRAPHQL_EVAL_NAMESPACE, observers);
12723
13060
  // this is only wrapped in a try to execute the event after the result was returned
12724
13061
  try {
@@ -12777,7 +13114,7 @@ async function evaluate(config, observers, settings, objectInfos, store, snapsho
12777
13114
  eventEmitter({ type: 'graphql-preconditions-met' });
12778
13115
  // create the resolver request context, runtime values and functions for
12779
13116
  // resolvers to do their job.
12780
- const contextValue = createContext(store, objectInfos, eventEmitter, settings, snapshot, draftFunctions);
13117
+ const contextValue = createContext(store, objectInfos, eventEmitter, settings, snapshot, mappedCursors, draftFunctions);
12781
13118
  // We're building this from scratch from each request. If this becomes a
12782
13119
  // hotspot we can pull it up and memoize it later
12783
13120
  const schema = createSchemaWithCache(objectInfos, cache);
@@ -12802,7 +13139,11 @@ async function evaluate(config, observers, settings, objectInfos, store, snapsho
12802
13139
  seenRecordIds.push(queryString);
12803
13140
  });
12804
13141
  }
12805
- return { result, seenRecordIds };
13142
+ return {
13143
+ result,
13144
+ seenRecordIds,
13145
+ possibleStaleRecordMap: contextValue.possibleStaleRecordMap,
13146
+ };
12806
13147
  }
12807
13148
  finally {
12808
13149
  eventEmitter({ type: 'graphql-eval-end' });
@@ -14526,7 +14867,11 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
14526
14867
  return async function draftAwareGraphQLAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
14527
14868
  //create a copy to not accidentally modify the AST in the astResolver map of luvio
14528
14869
  const copy = parse$3(stringify$3(config.query));
14870
+ // the injected ast has extra fields needed for eval in it
14529
14871
  let injectedAST;
14872
+ // the cursor mapped ast is passed upstream so it won't reject on our local cursors
14873
+ let cursorMappedAST;
14874
+ let mappedCursors = new Map();
14530
14875
  let objectInfoNeeded = {};
14531
14876
  let unmappedDraftIDs;
14532
14877
  let internalRequestContext = {
@@ -14542,6 +14887,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
14542
14887
  objectInfos: objectInfoNeeded,
14543
14888
  unmappedDraftIDs,
14544
14889
  } = await injectSyntheticFields(copy, objectInfoService, draftFunctions, config.variables));
14890
+ ({ ast: cursorMappedAST, mappedCursors } = await mapPaginationCursors(injectedAST, config.variables || {}, store));
14545
14891
  if (config.variables) {
14546
14892
  config.variables = replaceDraftIdsInVariables(config.variables, draftFunctions, unmappedDraftIDs);
14547
14893
  }
@@ -14573,7 +14919,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
14573
14919
  const nonEvaluatedSnapshot = (await luvio.applyCachePolicy(internalRequestContext, {
14574
14920
  config: {
14575
14921
  ...config,
14576
- query: injectedAST,
14922
+ query: cursorMappedAST,
14577
14923
  },
14578
14924
  luvio,
14579
14925
  gqlEval: true,
@@ -14586,12 +14932,17 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
14586
14932
  : [];
14587
14933
  let gqlResult;
14588
14934
  let seenRecordIds;
14935
+ let possibleStaleRecordMap;
14589
14936
  try {
14590
- ({ result: gqlResult, seenRecordIds } = await evaluate({
14937
+ ({
14938
+ result: gqlResult,
14939
+ seenRecordIds,
14940
+ possibleStaleRecordMap,
14941
+ } = await evaluate({
14591
14942
  ...config,
14592
14943
  //need to create another copy of the ast for future writes
14593
14944
  query: parse$3(stringify$3(injectedAST)),
14594
- }, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot, graphqlSchemaCache));
14945
+ }, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot, graphqlSchemaCache, draftFunctions, mappedCursors));
14595
14946
  }
14596
14947
  catch (throwable) {
14597
14948
  const error = throwable;
@@ -14617,13 +14968,18 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
14617
14968
  const seenRecords = createSeenRecords(seenRecordIds, nonEvaluatedSnapshot);
14618
14969
  const recordId = generateUniqueRecordId();
14619
14970
  const rebuildWithLocalEval = async (originalSnapshot) => {
14620
- let { result: rebuildResult, seenRecordIds } = await evaluate({
14971
+ let { result: rebuildResult, seenRecordIds, possibleStaleRecordMap, } = await evaluate({
14621
14972
  ...config,
14622
14973
  query: injectedAST,
14623
- }, observers, { userId }, objectInfoNeeded, store, originalSnapshot, graphqlSchemaCache, draftFunctions);
14974
+ }, observers, { userId }, objectInfoNeeded, store, originalSnapshot, graphqlSchemaCache, draftFunctions, mappedCursors);
14624
14975
  if (!rebuildResult.errors) {
14625
14976
  rebuildResult = removeSyntheticFields(rebuildResult, config.query);
14626
14977
  }
14978
+ let snapshotState = 'Fulfilled';
14979
+ if (possibleStaleRecordMap.size > 0) {
14980
+ initiateStaleRecordRefresh(luvio, possibleStaleRecordMap);
14981
+ snapshotState = 'Stale';
14982
+ }
14627
14983
  if (objectsDeepEqual(rebuildResult, originalSnapshot.data)) {
14628
14984
  return originalSnapshot;
14629
14985
  }
@@ -14632,6 +14988,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
14632
14988
  ...originalSnapshot,
14633
14989
  data: rebuildResult,
14634
14990
  recordId,
14991
+ state: snapshotState,
14635
14992
  seenRecords: createSeenRecords(seenRecordIds, nonEvaluatedSnapshot),
14636
14993
  rebuildWithLocalEval,
14637
14994
  };
@@ -14669,9 +15026,31 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
14669
15026
  },
14670
15027
  };
14671
15028
  }
15029
+ if (possibleStaleRecordMap.size > 0) {
15030
+ initiateStaleRecordRefresh(luvio, possibleStaleRecordMap);
15031
+ resultSnapshot.state = 'Stale';
15032
+ }
14672
15033
  return resultSnapshot;
14673
15034
  };
14674
15035
  }
15036
+ function initiateStaleRecordRefresh(luvio, keyMap) {
15037
+ const staleRecordKeys = from$1(keyMap.values())
15038
+ .flat()
15039
+ .map((id) => `UiApi::RecordRepresentation:${id}`);
15040
+ luvio.storeExpirePossibleStaleRecords(staleRecordKeys, makeGetRecordsConfig(keyMap), getRecordsAdapterFactory(luvio));
15041
+ }
15042
+ function makeGetRecordsConfig(keyMap) {
15043
+ const records = [];
15044
+ keyMap.forEach((recordIds, apiName) => {
15045
+ records.push({
15046
+ recordIds,
15047
+ fields: [`${apiName}.Id`],
15048
+ });
15049
+ });
15050
+ return {
15051
+ records,
15052
+ };
15053
+ }
14675
15054
 
14676
15055
  function environmentAwareGraphQLBatchAdapterFactory(objectInfoService, luvio, isDraftId) {
14677
15056
  return async function environmentAwareGraphQLBatchAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
@@ -15818,6 +16197,9 @@ class NimbusDraftQueue {
15818
16197
  removeHandler(_id) {
15819
16198
  return Promise.reject(new Error('Cannot call setMetadata from the NimbusDraftQueue'));
15820
16199
  }
16200
+ updateDraftAction(_action) {
16201
+ return Promise.reject(new Error('Cannot call updateDraftAction from the NimbusDraftQueue'));
16202
+ }
15821
16203
  }
15822
16204
 
15823
16205
  function attachObserversToAdapterRequestContext(observers, adapterRequestContext) {
@@ -17075,6 +17457,21 @@ const NimbusBinaryStore = {
17075
17457
  __nimbus.plugins.LdsBinaryStorePlugin.setCanonicalUrl(uri, canonicalUrl, ttlSeconds, resolve, (err) => reject(errorMessageToError(err)));
17076
17458
  });
17077
17459
  },
17460
+ createStream: function (type) {
17461
+ return new Promise((resolve, reject) => {
17462
+ __nimbus.plugins.LdsBinaryStorePlugin.createStream(type, resolve, (err) => reject(errorMessageToError(err)));
17463
+ });
17464
+ },
17465
+ writeToStream: function (uri, chunk) {
17466
+ return new Promise((resolve, reject) => {
17467
+ __nimbus.plugins.LdsBinaryStorePlugin.writeToStream(uri, chunk, resolve, (err) => reject(errorMessageToError(err)));
17468
+ });
17469
+ },
17470
+ closeStream: function (uri) {
17471
+ return new Promise((resolve, reject) => {
17472
+ __nimbus.plugins.LdsBinaryStorePlugin.closeStream(uri, resolve, (err) => reject(errorMessageToError(err)));
17473
+ });
17474
+ },
17078
17475
  };
17079
17476
 
17080
17477
  /**
@@ -18404,7 +18801,6 @@ let lazyDurableStore;
18404
18801
  let lazyNetworkAdapter;
18405
18802
  let lazyObjectInfoService;
18406
18803
  let lazyGetRecords;
18407
- // TODO [W-123]: JHORST hoist, optimize and test this function
18408
18804
  const shouldFlush = (key, value) => {
18409
18805
  if (!isStoreKeyRecordId$1(key)) {
18410
18806
  return { flushValue: true };
@@ -18578,4 +18974,4 @@ register({
18578
18974
  });
18579
18975
 
18580
18976
  export { O11Y_NAMESPACE_LDS_MOBILE, getRuntime, registerReportObserver, reportGraphqlQueryParseError };
18581
- // version: 1.303.0-a698c7cc67
18977
+ // version: 1.304.0-aa3e5f9550