@salesforce/lds-worker-api 1.303.0 → 1.304.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4271,7 +4271,7 @@
4271
4271
  }
4272
4272
  callbacks.push(callback);
4273
4273
  }
4274
- // version: 1.303.0-a698c7cc67
4274
+ // version: 1.304.0-aa3e5f9550
4275
4275
 
4276
4276
  // TODO [TD-0081508]: once that TD is fulfilled we can probably change this file
4277
4277
  function instrumentAdapter$1(createFunction, _metadata) {
@@ -15770,7 +15770,7 @@
15770
15770
  }
15771
15771
  return superResult;
15772
15772
  }
15773
- // version: 1.303.0-a698c7cc67
15773
+ // version: 1.304.0-aa3e5f9550
15774
15774
 
15775
15775
  function unwrap(data) {
15776
15776
  // The lwc-luvio bindings import a function from lwc called "unwrap".
@@ -16699,7 +16699,7 @@
16699
16699
  const { apiFamily, name } = metadata;
16700
16700
  return createGraphQLWireAdapterConstructor$1(adapter, `${apiFamily}.${name}`, luvio, astResolver);
16701
16701
  }
16702
- // version: 1.303.0-a698c7cc67
16702
+ // version: 1.304.0-aa3e5f9550
16703
16703
 
16704
16704
  /**
16705
16705
  * Copyright (c) 2022, Salesforce, Inc.,
@@ -20528,6 +20528,12 @@
20528
20528
  }
20529
20529
  return fieldNode.__state.fields;
20530
20530
  }
20531
+ function writeFieldStateNodeValue(fieldNode, propertyName, value) {
20532
+ const node = fieldNode;
20533
+ const state = node.__state || {};
20534
+ state[propertyName] = value;
20535
+ node.write('__state', state);
20536
+ }
20531
20537
 
20532
20538
  const CUSTOM_API_NAME_SUFFIX = '__c';
20533
20539
  const DMO_API_NAME_SUFFIX = '__dlm';
@@ -20576,7 +20582,7 @@
20576
20582
  name: key,
20577
20583
  children: {},
20578
20584
  };
20579
- if (isMissing(fieldRep)) {
20585
+ if (fields.isMissing(key)) {
20580
20586
  current.children[key] = next;
20581
20587
  continue;
20582
20588
  }
@@ -20635,14 +20641,6 @@
20635
20641
  }
20636
20642
  return reduce$2.call(childKeys, (acc, cur) => concat$2.call(acc, convertTrieToFieldsRecursively(root.children[cur]).map((i) => `${root.name}.${i}`)), []);
20637
20643
  }
20638
- function isMissing(node) {
20639
- // TODO [W-15867870]: JHORST add support for isMissing on graphnode object
20640
- return node.data && node.data.__state && node.data.__state.isMissing === true;
20641
- }
20642
- function isPending(node) {
20643
- // TODO [W-15867870]: JHORST add support for pending on graphnode object
20644
- return node.data && node.data.__state && node.data.__state.pending === true;
20645
- }
20646
20644
  const BLANK_RECORD_FIELDS_TRIE = freeze$4({
20647
20645
  name: '',
20648
20646
  children: {},
@@ -20734,8 +20732,9 @@
20734
20732
  !isFrozen$2(resolved.data)) {
20735
20733
  const stateFields = readFieldStateFromValueNode(resolved.data);
20736
20734
  const fields = stateFields === undefined ? [] : stateFields;
20737
- // TODO [W-15838292]: JHORST add support for node state on graphnode object
20738
- resolved.write('__state', { fields: dedupe$2([...fields, path.join('.')]) });
20735
+ // Note that GraphNodes are frozen when NODE_ENV != production.
20736
+ // Use with care.
20737
+ writeFieldStateNodeValue(resolved, 'fields', dedupe$2([...fields, path.join('.')]));
20739
20738
  }
20740
20739
  }
20741
20740
  function markNulledOutRequiredFields(record, fields) {
@@ -20762,7 +20761,7 @@
20762
20761
  return;
20763
20762
  }
20764
20763
  const fieldValueValue = fieldValueRepresentation.object(fieldName);
20765
- if (isPending(fieldValueValue)) {
20764
+ if (fieldValueRepresentation.isPending(fieldName)) {
20766
20765
  writeMissingFieldToStore(fieldValueRepresentation, fieldName);
20767
20766
  return;
20768
20767
  }
@@ -20792,7 +20791,8 @@
20792
20791
  function writeMissingFieldToStore(field, fieldName) {
20793
20792
  // TODO [W-6900046]: remove cast, make RecordRepresentationNormalized['fields'] accept
20794
20793
  // an undefined/non-present __ref if isMissing is present
20795
- // TODO [W-15867870]: JHORST add support for isMissing on graphnode object
20794
+ // Note that GraphNodes are frozen when NODE_ENV != production.
20795
+ // Use with care.
20796
20796
  field.write(fieldName, {
20797
20797
  __state: {
20798
20798
  isMissing: true,
@@ -44192,7 +44192,7 @@
44192
44192
  throttle(60, 60000, setupNotifyAllListRecordUpdateAvailable(luvio));
44193
44193
  throttle(60, 60000, setupNotifyAllListInfoSummaryUpdateAvailable(luvio));
44194
44194
  });
44195
- // version: 1.303.0-b6ed223d95
44195
+ // version: 1.304.0-d87b57badb
44196
44196
 
44197
44197
  var ldsIdempotencyWriteDisabled = {
44198
44198
  isOpen: function (e) {
@@ -50074,6 +50074,31 @@
50074
50074
  await this.startQueue();
50075
50075
  }
50076
50076
  }
50077
+ async updateDraftAction(action) {
50078
+ // stop queue manually
50079
+ this.stopQueueManually();
50080
+ const actionStatus = await this.statusOfAction(action.id);
50081
+ if (actionStatus === DraftActionStatus.Uploading) {
50082
+ return Promise.reject('cannot update an uploading action');
50083
+ }
50084
+ // save the action into the draft store
50085
+ await this.draftStore.writeAction(action);
50086
+ // make the handler replay these drafts on the record
50087
+ const handler = this.getHandler(action.handler);
50088
+ const queue = await this.getQueueActions();
50089
+ await handler.handleActionEnqueued(action, queue);
50090
+ // start queue safely
50091
+ return this.startQueueSafe();
50092
+ }
50093
+ async statusOfAction(actionId) {
50094
+ const queue = await this.getQueueActions();
50095
+ const actions = queue.filter((action) => action.id === actionId);
50096
+ if (actions.length === 0) {
50097
+ return Promise.reject('cannot update non-existent action');
50098
+ }
50099
+ const action = actions[0];
50100
+ return action.status;
50101
+ }
50077
50102
  replaceAction(targetActionId, sourceActionId) {
50078
50103
  return this.replaceOrMergeActions(targetActionId, sourceActionId, false);
50079
50104
  }
@@ -51063,6 +51088,60 @@
51063
51088
  };
51064
51089
  });
51065
51090
  }
51091
+ async mergePerformQuickAction(actionId, fields) {
51092
+ if (!this.isValidFieldMap(fields)) {
51093
+ return Promise.reject('fields is not valid');
51094
+ }
51095
+ const queue = await this.draftQueue.getQueueActions();
51096
+ const actions = queue.filter((action) => action.id === actionId);
51097
+ if (actions.length === 0) {
51098
+ return Promise.reject('cannot edit non-existent action');
51099
+ }
51100
+ const action = actions[0];
51101
+ if (!this.isPerformQuickActionDraft(action, 'post')) {
51102
+ return Promise.reject('cannot edit incompatible action type or uploading actions');
51103
+ }
51104
+ action.data.body.fields = { ...action.data.body.fields, ...fields };
51105
+ await this.draftQueue.updateDraftAction(action);
51106
+ return this.buildDraftQueueItem(action);
51107
+ }
51108
+ isValidFieldMap(fields) {
51109
+ const keys$1 = keys$6(fields);
51110
+ const validTypes = ['string', 'number', 'null', 'boolean'];
51111
+ for (let i = 0; i < keys$1.length; i++) {
51112
+ const key = keys$1[i];
51113
+ const value = fields[key];
51114
+ if (!validTypes.includes(typeof value)) {
51115
+ return false;
51116
+ }
51117
+ }
51118
+ return true;
51119
+ }
51120
+ isPerformQuickActionDraft(action, method) {
51121
+ const data = action.data;
51122
+ const isPerformQuickAction = data.basePath.startsWith('/ui-api/actions/perform-quick-action/');
51123
+ const methodMatches = data.method === method;
51124
+ const notUploading = action.status !== DraftActionStatus.Uploading;
51125
+ return isPerformQuickAction && methodMatches && notUploading;
51126
+ }
51127
+ async mergePerformUpdateRecordQuickAction(actionId, fields) {
51128
+ if (!this.isValidFieldMap(fields)) {
51129
+ return Promise.reject('fields is not valid');
51130
+ }
51131
+ const queue = await this.draftQueue.getQueueActions();
51132
+ const actions = queue.filter((action) => action.id === actionId);
51133
+ if (actions.length === 0) {
51134
+ return Promise.reject('cannot edit non-existent action');
51135
+ }
51136
+ const action = actions[0];
51137
+ if (!this.isPerformQuickActionDraft(action, 'patch')) {
51138
+ return Promise.reject('cannot edit incompatible action type or uploading actions');
51139
+ }
51140
+ const data = action.data;
51141
+ data.body.fields = { ...data.body.fields, ...fields };
51142
+ await this.draftQueue.updateDraftAction(action);
51143
+ return this.buildDraftQueueItem(action);
51144
+ }
51066
51145
  buildDraftQueueItem(action) {
51067
51146
  const operationType = getOperationTypeFrom(action);
51068
51147
  const { id, status, timestamp, targetId, metadata } = action;
@@ -51370,6 +51449,12 @@
51370
51449
  function isStoreRecordError(storeRecord) {
51371
51450
  return storeRecord.__type === 'error';
51372
51451
  }
51452
+ function isDraftFieldPending(field) {
51453
+ return !!(field.__state && field.__state.pending === true);
51454
+ }
51455
+ function isDraftFieldMissing(field) {
51456
+ return !!(field.__state && field.__state.isMissing === true);
51457
+ }
51373
51458
 
51374
51459
  /**
51375
51460
  * Checks if a resource request is a GET method on the record endpoint
@@ -52061,12 +52146,9 @@
52061
52146
  }
52062
52147
  const { dataType, relationshipName, referenceToInfos } = fieldInfo;
52063
52148
  const draftFieldNode = record.fields[draftField];
52064
- // JHORST: revisit this logic
52065
52149
  // do not try to apply drafts on nodes that are pending or missing
52066
- if (draftFieldNode.__state !== undefined) {
52067
- if (draftFieldNode.__state.pending === true ||
52068
- draftFieldNode.__state.isMissing === true)
52069
- continue;
52150
+ if (isDraftFieldPending(draftFieldNode) || isDraftFieldMissing(draftFieldNode)) {
52151
+ continue;
52070
52152
  }
52071
52153
  const draftFieldValue = draftFieldNode.value;
52072
52154
  if (dataType === 'Reference' && relationshipName !== null) {
@@ -53057,7 +53139,29 @@
53057
53139
  return customEvent.namespace === CONTENT_DOCUMENT_AND_VERSION_NAMESPACE;
53058
53140
  }
53059
53141
 
53142
+ // so eslint doesn't complain about nimbus
53143
+ /* global __nimbus */
53060
53144
  const ContentDocumentCompositeKeyPrefix = 'UiApi::ContentDocumentCompositeRepresentation:';
53145
+ function chunkToBase64(chunk) {
53146
+ let binary = '';
53147
+ const chunkSize = 32 * 1024;
53148
+ for (let i = 0; i < chunk.length; i += chunkSize) {
53149
+ binary += String.fromCharCode.apply(null, chunk.subarray(i, i + chunkSize));
53150
+ }
53151
+ return btoa(binary);
53152
+ }
53153
+ async function streamBufferToBinaryStore(binaryStore, buffer, mimeType) {
53154
+ const uri = await binaryStore.createStream(mimeType);
53155
+ const bufferSize = 64 * 1024; // 64k buffer size
53156
+ const uint8Array = new Uint8Array(buffer);
53157
+ for (let offset = 0; offset < uint8Array.length; offset += bufferSize) {
53158
+ const chunk = uint8Array.subarray(offset, Math.min(offset + bufferSize, uint8Array.length));
53159
+ const base64Chunk = chunkToBase64(chunk);
53160
+ await binaryStore.writeToStream(uri, base64Chunk);
53161
+ }
53162
+ await binaryStore.closeStream(uri);
53163
+ return uri;
53164
+ }
53061
53165
  function createContentDocumentAndVersionDraftAdapterFactory(luvio, binaryStore, actionHandler) {
53062
53166
  const overriddenLuvio = buildLuvioOverrideForDraftAdapters(luvio, actionHandler, (key) => {
53063
53167
  // if the key is for our top-level response shape
@@ -53080,7 +53184,14 @@
53080
53184
  const { fileData } = config;
53081
53185
  const { name, size, type } = fileData;
53082
53186
  const buffer = await fileData.arrayBuffer();
53083
- const uri = await binaryStore.store(new Uint8Array(buffer), type, size);
53187
+ var uri;
53188
+ // see if new chunking-api exists, if it doesnt fall back to memory-intensive mobile api
53189
+ if (!__nimbus.plugins.LdsBinaryStorePlugin.createStream) {
53190
+ uri = await binaryStore.store(new Uint8Array(buffer), type, size);
53191
+ }
53192
+ else {
53193
+ uri = await streamBufferToBinaryStore(binaryStore, buffer, type);
53194
+ }
53084
53195
  config.fileData = {
53085
53196
  isFileReference: true,
53086
53197
  handle: uri,
@@ -53919,7 +54030,7 @@
53919
54030
  return new DataLoader(batchRecordQuery);
53920
54031
  }
53921
54032
 
53922
- function createContext(store, objectInfos, eventEmitter, settings, snapshot, draftFunctions) {
54033
+ function createContext(store, objectInfos, eventEmitter, settings, snapshot, mappedCursors = new Map(), draftFunctions) {
53923
54034
  store.query.bind(store);
53924
54035
  const query = (sql, params) => {
53925
54036
  const now = Date.now();
@@ -53946,7 +54057,9 @@
53946
54057
  Record,
53947
54058
  snapshot,
53948
54059
  seenRecordIds: new Set(),
54060
+ possibleStaleRecordMap: new Map(),
53949
54061
  draftFunctions,
54062
+ mappedCursors,
53950
54063
  };
53951
54064
  }
53952
54065
 
@@ -54557,7 +54670,6 @@
54557
54670
 
54558
54671
  const JSON_EXTRACT_PATH_INGESTION_TIMESTAMP = '$.ingestionTimestamp';
54559
54672
  const JSON_EXTRACT_PATH_INGESTION_APINAME = '$.apiName';
54560
- const JSON_EXTRACT_PATH_DRAFTS = '$.drafts';
54561
54673
 
54562
54674
  const MultiPickListValueSeparator = ';';
54563
54675
  function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draftFunctions) {
@@ -55098,14 +55210,10 @@
55098
55210
  const predicates = buildPredicates(config);
55099
55211
  const orderBy = buildOrderBy(config);
55100
55212
  const sql = `
55101
- SELECT "${config.alias}".data
55213
+ SELECT "${config.alias}".data, "${config.alias}".metadata
55102
55214
  FROM lds_data "${config.alias}" ${joins.sql}
55103
55215
  WHERE "${config.alias}".key like 'UiApi::RecordRepresentation:%'
55104
55216
  AND json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_INGESTION_APINAME}') = '${config.alias}'
55105
- AND (
55106
- json_extract("${config.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ?
55107
- OR json_extract("${config.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL
55108
- )
55109
55217
  ${predicates.sql}
55110
55218
  ${orderBy.sql}
55111
55219
  LIMIT ?
@@ -55117,7 +55225,6 @@
55117
55225
  const bindings = [
55118
55226
  // bindings from predicates on joins
55119
55227
  ...joins.bindings,
55120
- config.ingestionTimestamp,
55121
55228
  // where clause and parent scope bindings
55122
55229
  ...predicates.bindings,
55123
55230
  // limit binding
@@ -55144,29 +55251,19 @@
55144
55251
  if (allJoins.length === 0)
55145
55252
  return { sql, bindings };
55146
55253
  sql = allJoins.reduce((joinAccumulator, join) => {
55147
- let timestampAdded = false;
55148
55254
  const joinConditions = join.conditions.reduce((conditionAccumulator, condition) => {
55149
55255
  let joined_sql;
55150
- const joinMetadataTimestamp = ` AND (json_extract("${join.alias}".metadata, '${JSON_EXTRACT_PATH_INGESTION_TIMESTAMP}') >= ? OR json_extract("${join.alias}".data, '${JSON_EXTRACT_PATH_DRAFTS}') IS NOT NULL)`;
55151
55256
  // predicate on a value, use the newly joined table
55152
55257
  if ('type' in condition) {
55153
55258
  const { sql, binding } = predicateToSQL(condition, join.alias);
55154
- joined_sql = ` AND ${sql}${timestampAdded ? '' : joinMetadataTimestamp}`;
55259
+ joined_sql = ` AND ${sql}`;
55155
55260
  bindings.push(...binding);
55156
- if (timestampAdded === false) {
55157
- bindings.push(config.ingestionTimestamp);
55158
- timestampAdded = true;
55159
- }
55160
55261
  }
55161
55262
  else {
55162
55263
  // predicate on a path
55163
55264
  const left = ` AND json_extract("${join.to}".data, '${condition.leftPath}')`;
55164
55265
  const right = `json_extract("${join.alias}".data, '${condition.rightPath}')`;
55165
- joined_sql = `${left} = ${right}${timestampAdded ? '' : joinMetadataTimestamp}`;
55166
- if (timestampAdded === false) {
55167
- bindings.push(config.ingestionTimestamp);
55168
- timestampAdded = true;
55169
- }
55266
+ joined_sql = `${left} = ${right}`;
55170
55267
  }
55171
55268
  conditionAccumulator += joined_sql;
55172
55269
  return conditionAccumulator;
@@ -55835,11 +55932,15 @@
55835
55932
  }
55836
55933
  return ingestionTimestamp;
55837
55934
  }
55838
- async function readPaginationMetadataForKey(key, query) {
55839
- const sql = `SELECT data FROM lds_data WHERE key=?`;
55840
- const results = await query(sql, [key + '__pagination']);
55841
- const [paginationMetadata] = results.rows.map((row) => parse$3(row[0]));
55842
- return paginationMetadata || {};
55935
+ function isObjectDefinitionNode(node) {
55936
+ const { kind } = node;
55937
+ return typeof kind === 'string' && kind === 'OperationDefinition';
55938
+ }
55939
+ function operationNodeAncestor(ancestors) {
55940
+ let operationNode = ancestors.find((a) => {
55941
+ return !(a instanceof Array) && isObjectDefinitionNode(a);
55942
+ });
55943
+ return operationNode;
55843
55944
  }
55844
55945
 
55845
55946
  function findSpanningField(name) {
@@ -56040,44 +56141,87 @@
56040
56141
  const base64encode = typeof btoa === 'function' ? btoa : btoaPolyfill;
56041
56142
  const base64decode = typeof atob === 'function' ? atob : atobPolyfill;
56042
56143
 
56144
+ // this truthy value is used to indicate a premature end of results
56145
+ const EARLY_END = 1;
56043
56146
  function cursorResolver(source) {
56044
- return encodeV1Cursor(source.index);
56147
+ let cursor = {
56148
+ i: source.index,
56149
+ };
56150
+ if (source.earlyEnd) {
56151
+ cursor.e = EARLY_END;
56152
+ }
56153
+ return encodeV1Cursor(cursor);
56045
56154
  }
56046
56155
  function pageInfoResolver(source) {
56047
56156
  if (source.records.length === 0) {
56157
+ // we may have found no records, but if more exist we need to
56158
+ // return a valid cursor that can be passed as the next `after`
56159
+ if (source.earlyEnd) {
56160
+ return {
56161
+ startCursor: null,
56162
+ endCursor: encodeV1Cursor({
56163
+ i: source.offset,
56164
+ e: EARLY_END,
56165
+ }),
56166
+ hasNextPage: source.hasNextPage,
56167
+ };
56168
+ }
56048
56169
  return {
56049
56170
  startCursor: null,
56050
56171
  endCursor: null,
56051
- hasNextPage: false,
56172
+ hasNextPage: source.hasNextPage,
56052
56173
  };
56053
56174
  }
56054
56175
  let startIndex = source.records[0].index;
56176
+ let startCursor = {
56177
+ i: startIndex,
56178
+ };
56055
56179
  let endIndex = source.records[source.records.length - 1].index;
56180
+ let endCursor = {
56181
+ i: endIndex,
56182
+ };
56183
+ if (source.earlyEnd) {
56184
+ startCursor.e = EARLY_END;
56185
+ endCursor.e = EARLY_END;
56186
+ }
56056
56187
  return {
56057
- startCursor: encodeV1Cursor(startIndex),
56058
- endCursor: encodeV1Cursor(endIndex),
56188
+ startCursor: encodeV1Cursor(startCursor),
56189
+ endCursor: encodeV1Cursor(endCursor),
56059
56190
  hasNextPage: source.hasNextPage,
56060
56191
  };
56061
56192
  }
56062
56193
  function pageResultCountResolver(source) {
56063
56194
  return source.records.length;
56064
56195
  }
56065
- function encodeV1Cursor(index) {
56066
- return base64encode(`v1:${index}`);
56196
+ function isLocalCursor(maybeCursor) {
56197
+ return (!!maybeCursor &&
56198
+ typeof maybeCursor === 'object' &&
56199
+ 'i' in maybeCursor &&
56200
+ typeof maybeCursor.i === 'number');
56201
+ }
56202
+ function encodeV1Cursor(cursor) {
56203
+ return base64encode(stringify$3(cursor));
56067
56204
  }
56068
- const cursorRegex = /^v1:(?<index>\d+)$/;
56205
+ const CURSOR_PARSE_ERROR = 'Unable to parse cursor';
56069
56206
  function decodeV1Cursor(base64cursor) {
56070
- const cursor = base64decode(base64cursor);
56071
- if (!cursor) {
56207
+ let maybeCursor;
56208
+ try {
56209
+ const cursorString = base64decode(base64cursor);
56210
+ maybeCursor = parse$3(cursorString);
56211
+ }
56212
+ catch (error) {
56213
+ let message = CURSOR_PARSE_ERROR;
56214
+ if (error instanceof Error) {
56215
+ message += ': ' + error.message;
56216
+ }
56072
56217
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
56073
- throw new Error('Unable to parse cursor');
56218
+ throw new Error(message);
56074
56219
  }
56075
- const found = cursor.match(cursorRegex);
56076
- if (!found || !found.groups) {
56220
+ if (!isLocalCursor(maybeCursor)) {
56077
56221
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
56078
- throw new Error('Unable to parse cursor');
56222
+ throw new Error(CURSOR_PARSE_ERROR);
56079
56223
  }
56080
- return Number(found.groups.index);
56224
+ return maybeCursor;
56081
56225
  }
56082
56226
  /**
56083
56227
  * Check the selections for any selection matching `pageInfo { hasNextPage }`
@@ -56115,6 +56259,164 @@
56115
56259
  return false;
56116
56260
  }
56117
56261
 
56262
+ const END_CURSOR = '__END__';
56263
+ // find the closest matching cursor in the server pagination metadata
56264
+ function mapCursorValue(originalValue, paginationMetadata) {
56265
+ let mappedValue = null;
56266
+ if (!originalValue) {
56267
+ return mappedValue;
56268
+ }
56269
+ // flip the pagination metadata into an array by index.
56270
+ let cursors = [];
56271
+ for (const [cursor, index] of Object.entries(paginationMetadata)) {
56272
+ if (index === undefined)
56273
+ continue;
56274
+ cursors[index] = cursor;
56275
+ }
56276
+ let cursor = decodeV1Cursor(originalValue);
56277
+ // cursors containe 1-based indexes, adjust back to 0-based
56278
+ let index = cursor.i - 1;
56279
+ if (
56280
+ // cursor.e being truthy means we had premature end of results and
56281
+ // should pin to the last known server cursor
56282
+ !cursor.e &&
56283
+ // check that the index we have is within the bounds of known cursors
56284
+ index >= 0 &&
56285
+ index < cursors.length &&
56286
+ // and make sure the cursor is not the server end marker
56287
+ cursors[index] !== END_CURSOR) {
56288
+ mappedValue = cursors[index];
56289
+ }
56290
+ else {
56291
+ // in this case, either our local cursor is beyond the max server cursor, or
56292
+ // the local cursor precedes the max server cursor and we ran out of locally
56293
+ // cached results. either way, find the last known server cursor and map to that.
56294
+ for (let i = cursors.length; i > 0; --i) {
56295
+ let cursor = cursors[i - 1];
56296
+ if (cursor !== END_CURSOR) {
56297
+ mappedValue = cursor;
56298
+ break;
56299
+ }
56300
+ }
56301
+ }
56302
+ return mappedValue;
56303
+ }
56304
+ // map all pagination cursors in the document
56305
+ async function mapPaginationCursors(originalAST, variables, store) {
56306
+ // first pass, identify record query cache keys for reading pagination metadata
56307
+ let requiredPaginationMetadataKeys = [];
56308
+ visit$1(originalAST, {
56309
+ Field(node, _key, _parent, _path, ancestors) {
56310
+ // is it a record query?
56311
+ if (!isRecordQuery(node)) {
56312
+ return;
56313
+ }
56314
+ // does it have a defined `after` argument?
56315
+ let after = node.arguments &&
56316
+ node.arguments.find((a) => {
56317
+ return a.name.value === 'after';
56318
+ });
56319
+ if (after && (after.value.kind === 'StringValue' || after.value.kind === 'Variable')) {
56320
+ let operationNode = operationNodeAncestor(ancestors);
56321
+ if (!operationNode) {
56322
+ return false;
56323
+ }
56324
+ let key = buildKeyStringForRecordQuery(operationNode, variables, node.arguments || [], node.name.value);
56325
+ requiredPaginationMetadataKeys.push(key);
56326
+ }
56327
+ // don't need to descend into this node
56328
+ return false;
56329
+ },
56330
+ });
56331
+ // read pagination metadata for identified record queries
56332
+ let paginationMetadataMap = await readPaginationMetadataForKeys(requiredPaginationMetadataKeys, store.query.bind(store));
56333
+ // holds the original cursor values that were mapped back to server cursors
56334
+ let mappedCursors = new Map();
56335
+ // rewrite nodes/variables with mapped cursors now that we read the pagination metadata
56336
+ let ast = visit$1(originalAST, {
56337
+ Field(node, _key, _parent, _path, ancestors) {
56338
+ // is it a record query?
56339
+ if (!isRecordQuery(node)) {
56340
+ // not returning false, we might be in the parent of a record query
56341
+ return;
56342
+ }
56343
+ // does it have a defined `after` argument?
56344
+ if (!node.arguments)
56345
+ return false;
56346
+ let after = node.arguments.find((a) => {
56347
+ return a.name.value === 'after';
56348
+ });
56349
+ if (!after)
56350
+ return false;
56351
+ if (after.value.kind === 'StringValue' || after.value.kind === 'Variable') {
56352
+ let operationNode = operationNodeAncestor(ancestors);
56353
+ if (!operationNode) {
56354
+ return false;
56355
+ }
56356
+ let key = buildKeyStringForRecordQuery(operationNode, variables, node.arguments || [], node.name.value);
56357
+ // pagination metadata may be missing, e.g. due to being offline
56358
+ let paginationMetadata = paginationMetadataMap.get(key) || {};
56359
+ if (after.value.kind === 'StringValue') {
56360
+ let originalValue = after.value.value;
56361
+ mappedCursors.set(key, originalValue);
56362
+ let mappedValue = mapCursorValue(originalValue, paginationMetadata);
56363
+ if (!mappedValue) {
56364
+ // there were no results from the server, remove after argument
56365
+ return {
56366
+ ...node,
56367
+ arguments: node.arguments.filter((a) => a !== after),
56368
+ };
56369
+ }
56370
+ // return a new replacement node
56371
+ return {
56372
+ ...node,
56373
+ arguments: node.arguments.map((a) => {
56374
+ if (a !== after)
56375
+ return a;
56376
+ return {
56377
+ ...a,
56378
+ value: {
56379
+ kind: 'StringValue',
56380
+ value: mappedValue,
56381
+ },
56382
+ };
56383
+ }),
56384
+ };
56385
+ }
56386
+ else if (after.value.kind === 'Variable') {
56387
+ // rewrite the variable
56388
+ let variableName = after.value.name.value;
56389
+ let variableValue = variables[variableName];
56390
+ mappedCursors.set(key, variableValue);
56391
+ let mappedValue = mapCursorValue(variableValue, paginationMetadata);
56392
+ variables[variableName] = mappedValue;
56393
+ }
56394
+ // don't need to descend into this node
56395
+ return false;
56396
+ }
56397
+ },
56398
+ });
56399
+ return {
56400
+ ast,
56401
+ mappedCursors,
56402
+ };
56403
+ }
56404
+ async function readPaginationMetadataForKeys(keys, query) {
56405
+ let metadataMap = new Map();
56406
+ if (keys.length === 0)
56407
+ return metadataMap;
56408
+ const sql = `SELECT key, data FROM lds_data WHERE key in (${Array(keys.length)
56409
+ .fill('?')
56410
+ .join(',')})`;
56411
+ const results = await query(sql, keys.map((k) => k + '__pagination'));
56412
+ for (let row of results.rows) {
56413
+ let key = row[0].replace(/__pagination$/, '');
56414
+ let metadata = parse$3(row[1]);
56415
+ metadataMap.set(key, metadata);
56416
+ }
56417
+ return metadataMap;
56418
+ }
56419
+
56118
56420
  /*
56119
56421
  resolves connections...
56120
56422
  */
@@ -56136,8 +56438,14 @@
56136
56438
  const childRelationship = parentObjectInfo &&
56137
56439
  parentObjectInfo.childRelationships.find((rel) => rel.relationshipName === info.fieldName);
56138
56440
  // or emit/throw if we want to report it
56139
- if (!childRelationship)
56140
- return { records: [], hasNextPage: false };
56441
+ if (!childRelationship) {
56442
+ return {
56443
+ records: [],
56444
+ hasNextPage: false,
56445
+ earlyEnd: false,
56446
+ offset: 0,
56447
+ };
56448
+ }
56141
56449
  alias = childRelationship.childObjectApiName;
56142
56450
  childRelationshipFieldName = childRelationship.fieldName;
56143
56451
  }
@@ -56156,7 +56464,12 @@
56156
56464
  }
56157
56465
  let offset = 0;
56158
56466
  if (args.after) {
56159
- offset = decodeV1Cursor(args.after) + 1;
56467
+ let originalCursor = context.mappedCursors.get(queryCacheKey);
56468
+ if (!originalCursor) {
56469
+ // eslint-disable-next-line @salesforce/lds/no-error-in-production
56470
+ throw new Error('Internal Error: unable to determine `after` cursor value');
56471
+ }
56472
+ offset = decodeV1Cursor(originalCursor).i;
56160
56473
  }
56161
56474
  // if the query wants to know `hasNextPage` then we need to request 1 additional record
56162
56475
  let selections = info.fieldNodes
@@ -56165,7 +56478,7 @@
56165
56478
  let wantsHasNextPage = selectionIncludesHasNextPage(selections, info.fragments);
56166
56479
  let paginationMetadata = undefined;
56167
56480
  if (wantsHasNextPage) {
56168
- paginationMetadata = await readPaginationMetadataForKey(queryCacheKey, query);
56481
+ paginationMetadata = await readPaginationMetadataForKeys([queryCacheKey], query);
56169
56482
  }
56170
56483
  let internalLimit = limit + (wantsHasNextPage ? 1 : 0);
56171
56484
  // Alias starts as entity's ApiName
@@ -56176,36 +56489,60 @@
56176
56489
  orderBy: orderByToPredicate(args.orderBy, alias, alias, context.objectInfos),
56177
56490
  limit: internalLimit,
56178
56491
  offset,
56179
- ingestionTimestamp,
56180
56492
  };
56181
56493
  const { sql, bindings } = buildQuery(queryConfig);
56182
56494
  const results = await query(sql, bindings);
56183
56495
  let hasNextPage = false;
56496
+ let earlyEnd = false;
56184
56497
  if (wantsHasNextPage) {
56185
56498
  if (results.rows.length > limit) {
56186
56499
  // more records exist in the cache
56187
56500
  hasNextPage = true;
56188
56501
  results.rows.pop();
56189
56502
  }
56190
- else if (!paginationMetadata || paginationMetadata.__END__ === undefined) {
56503
+ else if (!paginationMetadata ||
56504
+ !paginationMetadata.has(queryCacheKey) ||
56505
+ paginationMetadata.get(queryCacheKey).__END__ === undefined) {
56191
56506
  // more records may exist on the server
56192
56507
  hasNextPage = true;
56508
+ // we hit the end of our local records, so we need to know that we
56509
+ // should start at the end of known server cursors
56510
+ if (results.rows.length < limit) {
56511
+ earlyEnd = true;
56512
+ }
56193
56513
  }
56194
56514
  }
56195
56515
  //map each sql result with the ingestion timestamp to pass it down a level
56196
- let records = results.rows
56197
- .map((row) => parse$3(row[0]))
56198
- .map((recordRepresentation, index) => {
56516
+ let records = results.rows.map((row, index) => {
56517
+ const recordMetadataResult = {
56518
+ recordRepresentation: parse$3(row[0]),
56519
+ metadata: parse$3(row[1]),
56520
+ };
56521
+ const { recordRepresentation, metadata } = recordMetadataResult;
56199
56522
  context.seenRecordIds.add(recordRepresentation.id);
56523
+ if (metadata.ingestionTimestamp < ingestionTimestamp &&
56524
+ recordRepresentation.drafts === undefined) {
56525
+ if (context.possibleStaleRecordMap.has(recordRepresentation.apiName) === false) {
56526
+ context.possibleStaleRecordMap.set(recordRepresentation.apiName, []);
56527
+ }
56528
+ const ids = context.possibleStaleRecordMap.get(recordRepresentation.apiName);
56529
+ if (ids !== undefined) {
56530
+ ids.push(recordRepresentation.id);
56531
+ context.possibleStaleRecordMap.set(recordRepresentation.apiName, ids);
56532
+ }
56533
+ }
56200
56534
  return {
56201
56535
  recordRepresentation,
56202
56536
  ingestionTimestamp,
56203
- index: index + offset,
56537
+ index: index + offset + 1,
56538
+ earlyEnd,
56204
56539
  };
56205
56540
  });
56206
56541
  return {
56207
56542
  records,
56208
56543
  hasNextPage,
56544
+ earlyEnd,
56545
+ offset,
56209
56546
  };
56210
56547
  }
56211
56548
  /**
@@ -57003,7 +57340,7 @@
57003
57340
  return 'TextAreaValue';
57004
57341
  }
57005
57342
 
57006
- async function evaluate(config, observers, settings, objectInfos, store, snapshot, cache, draftFunctions) {
57343
+ async function evaluate(config, observers, settings, objectInfos, store, snapshot, cache, draftFunctions, mappedCursors) {
57007
57344
  const eventEmitter = createCustomAdapterEventEmitter(GRAPHQL_EVAL_NAMESPACE, observers);
57008
57345
  // this is only wrapped in a try to execute the event after the result was returned
57009
57346
  try {
@@ -57062,7 +57399,7 @@
57062
57399
  eventEmitter({ type: 'graphql-preconditions-met' });
57063
57400
  // create the resolver request context, runtime values and functions for
57064
57401
  // resolvers to do their job.
57065
- const contextValue = createContext(store, objectInfos, eventEmitter, settings, snapshot, draftFunctions);
57402
+ const contextValue = createContext(store, objectInfos, eventEmitter, settings, snapshot, mappedCursors, draftFunctions);
57066
57403
  // We're building this from scratch from each request. If this becomes a
57067
57404
  // hotspot we can pull it up and memoize it later
57068
57405
  const schema = createSchemaWithCache(objectInfos, cache);
@@ -57087,7 +57424,11 @@
57087
57424
  seenRecordIds.push(queryString);
57088
57425
  });
57089
57426
  }
57090
- return { result, seenRecordIds };
57427
+ return {
57428
+ result,
57429
+ seenRecordIds,
57430
+ possibleStaleRecordMap: contextValue.possibleStaleRecordMap,
57431
+ };
57091
57432
  }
57092
57433
  finally {
57093
57434
  eventEmitter({ type: 'graphql-eval-end' });
@@ -58811,7 +59152,11 @@
58811
59152
  return async function draftAwareGraphQLAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
58812
59153
  //create a copy to not accidentally modify the AST in the astResolver map of luvio
58813
59154
  const copy = parse$3(stringify$3(config.query));
59155
+ // the injected ast has extra fields needed for eval in it
58814
59156
  let injectedAST;
59157
+ // the cursor mapped ast is passed upstream so it won't reject on our local cursors
59158
+ let cursorMappedAST;
59159
+ let mappedCursors = new Map();
58815
59160
  let objectInfoNeeded = {};
58816
59161
  let unmappedDraftIDs;
58817
59162
  let internalRequestContext = {
@@ -58827,6 +59172,7 @@
58827
59172
  objectInfos: objectInfoNeeded,
58828
59173
  unmappedDraftIDs,
58829
59174
  } = await injectSyntheticFields(copy, objectInfoService, draftFunctions, config.variables));
59175
+ ({ ast: cursorMappedAST, mappedCursors } = await mapPaginationCursors(injectedAST, config.variables || {}, store));
58830
59176
  if (config.variables) {
58831
59177
  config.variables = replaceDraftIdsInVariables$1(config.variables, draftFunctions, unmappedDraftIDs);
58832
59178
  }
@@ -58858,7 +59204,7 @@
58858
59204
  const nonEvaluatedSnapshot = (await luvio.applyCachePolicy(internalRequestContext, {
58859
59205
  config: {
58860
59206
  ...config,
58861
- query: injectedAST,
59207
+ query: cursorMappedAST,
58862
59208
  },
58863
59209
  luvio,
58864
59210
  gqlEval: true,
@@ -58871,12 +59217,17 @@
58871
59217
  : [];
58872
59218
  let gqlResult;
58873
59219
  let seenRecordIds;
59220
+ let possibleStaleRecordMap;
58874
59221
  try {
58875
- ({ result: gqlResult, seenRecordIds } = await evaluate({
59222
+ ({
59223
+ result: gqlResult,
59224
+ seenRecordIds,
59225
+ possibleStaleRecordMap,
59226
+ } = await evaluate({
58876
59227
  ...config,
58877
59228
  //need to create another copy of the ast for future writes
58878
59229
  query: parse$3(stringify$3(injectedAST)),
58879
- }, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot, graphqlSchemaCache));
59230
+ }, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot, graphqlSchemaCache, draftFunctions, mappedCursors));
58880
59231
  }
58881
59232
  catch (throwable) {
58882
59233
  const error = throwable;
@@ -58902,13 +59253,18 @@
58902
59253
  const seenRecords = createSeenRecords(seenRecordIds, nonEvaluatedSnapshot);
58903
59254
  const recordId = generateUniqueRecordId();
58904
59255
  const rebuildWithLocalEval = async (originalSnapshot) => {
58905
- let { result: rebuildResult, seenRecordIds } = await evaluate({
59256
+ let { result: rebuildResult, seenRecordIds, possibleStaleRecordMap, } = await evaluate({
58906
59257
  ...config,
58907
59258
  query: injectedAST,
58908
- }, observers, { userId }, objectInfoNeeded, store, originalSnapshot, graphqlSchemaCache, draftFunctions);
59259
+ }, observers, { userId }, objectInfoNeeded, store, originalSnapshot, graphqlSchemaCache, draftFunctions, mappedCursors);
58909
59260
  if (!rebuildResult.errors) {
58910
59261
  rebuildResult = removeSyntheticFields(rebuildResult, config.query);
58911
59262
  }
59263
+ let snapshotState = 'Fulfilled';
59264
+ if (possibleStaleRecordMap.size > 0) {
59265
+ initiateStaleRecordRefresh(luvio, possibleStaleRecordMap);
59266
+ snapshotState = 'Stale';
59267
+ }
58912
59268
  if (objectsDeepEqual(rebuildResult, originalSnapshot.data)) {
58913
59269
  return originalSnapshot;
58914
59270
  }
@@ -58917,6 +59273,7 @@
58917
59273
  ...originalSnapshot,
58918
59274
  data: rebuildResult,
58919
59275
  recordId,
59276
+ state: snapshotState,
58920
59277
  seenRecords: createSeenRecords(seenRecordIds, nonEvaluatedSnapshot),
58921
59278
  rebuildWithLocalEval,
58922
59279
  };
@@ -58954,9 +59311,31 @@
58954
59311
  },
58955
59312
  };
58956
59313
  }
59314
+ if (possibleStaleRecordMap.size > 0) {
59315
+ initiateStaleRecordRefresh(luvio, possibleStaleRecordMap);
59316
+ resultSnapshot.state = 'Stale';
59317
+ }
58957
59318
  return resultSnapshot;
58958
59319
  };
58959
59320
  }
59321
+ function initiateStaleRecordRefresh(luvio, keyMap) {
59322
+ const staleRecordKeys = from$1(keyMap.values())
59323
+ .flat()
59324
+ .map((id) => `UiApi::RecordRepresentation:${id}`);
59325
+ luvio.storeExpirePossibleStaleRecords(staleRecordKeys, makeGetRecordsConfig(keyMap), getRecordsAdapterFactory(luvio));
59326
+ }
59327
+ function makeGetRecordsConfig(keyMap) {
59328
+ const records = [];
59329
+ keyMap.forEach((recordIds, apiName) => {
59330
+ records.push({
59331
+ recordIds,
59332
+ fields: [`${apiName}.Id`],
59333
+ });
59334
+ });
59335
+ return {
59336
+ records,
59337
+ };
59338
+ }
58960
59339
 
58961
59340
  function environmentAwareGraphQLBatchAdapterFactory(objectInfoService, luvio, isDraftId) {
58962
59341
  return async function environmentAwareGraphQLBatchAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
@@ -60102,6 +60481,9 @@
60102
60481
  removeHandler(_id) {
60103
60482
  return Promise.reject(new Error('Cannot call setMetadata from the NimbusDraftQueue'));
60104
60483
  }
60484
+ updateDraftAction(_action) {
60485
+ return Promise.reject(new Error('Cannot call updateDraftAction from the NimbusDraftQueue'));
60486
+ }
60105
60487
  }
60106
60488
 
60107
60489
  function attachObserversToAdapterRequestContext(observers, adapterRequestContext) {
@@ -61215,6 +61597,21 @@
61215
61597
  __nimbus.plugins.LdsBinaryStorePlugin.setCanonicalUrl(uri, canonicalUrl, ttlSeconds, resolve, (err) => reject(errorMessageToError(err)));
61216
61598
  });
61217
61599
  },
61600
+ createStream: function (type) {
61601
+ return new Promise((resolve, reject) => {
61602
+ __nimbus.plugins.LdsBinaryStorePlugin.createStream(type, resolve, (err) => reject(errorMessageToError(err)));
61603
+ });
61604
+ },
61605
+ writeToStream: function (uri, chunk) {
61606
+ return new Promise((resolve, reject) => {
61607
+ __nimbus.plugins.LdsBinaryStorePlugin.writeToStream(uri, chunk, resolve, (err) => reject(errorMessageToError(err)));
61608
+ });
61609
+ },
61610
+ closeStream: function (uri) {
61611
+ return new Promise((resolve, reject) => {
61612
+ __nimbus.plugins.LdsBinaryStorePlugin.closeStream(uri, resolve, (err) => reject(errorMessageToError(err)));
61613
+ });
61614
+ },
61218
61615
  };
61219
61616
 
61220
61617
  /**
@@ -62542,7 +62939,6 @@
62542
62939
  let lazyNetworkAdapter;
62543
62940
  let lazyObjectInfoService;
62544
62941
  let lazyGetRecords;
62545
- // TODO [W-123]: JHORST hoist, optimize and test this function
62546
62942
  const shouldFlush = (key, value) => {
62547
62943
  if (!isStoreKeyRecordId$1(key)) {
62548
62944
  return { flushValue: true };
@@ -62713,7 +63109,7 @@
62713
63109
  id: '@salesforce/lds-network-adapter',
62714
63110
  instrument: instrument$2,
62715
63111
  });
62716
- // version: 1.303.0-a698c7cc67
63112
+ // version: 1.304.0-aa3e5f9550
62717
63113
 
62718
63114
  const { create: create$3, keys: keys$3 } = Object;
62719
63115
  const { stringify: stringify$1, parse: parse$1 } = JSON;
@@ -82749,7 +83145,7 @@
82749
83145
  configuration: { ...configurationForGraphQLAdapters$1 },
82750
83146
  instrument: instrument$1,
82751
83147
  });
82752
- // version: 1.303.0-b6ed223d95
83148
+ // version: 1.304.0-d87b57badb
82753
83149
 
82754
83150
  // On core the unstable adapters are re-exported with different names,
82755
83151
  // we want to match them here.
@@ -85005,7 +85401,7 @@
85005
85401
  unstable_graphQL_imperative = createImperativeAdapter(luvio, createInstrumentedAdapter(ldsAdapter, adapterMetadata), adapterMetadata);
85006
85402
  graphQLImperative = ldsAdapter;
85007
85403
  });
85008
- // version: 1.303.0-b6ed223d95
85404
+ // version: 1.304.0-d87b57badb
85009
85405
 
85010
85406
  var gqlApi = /*#__PURE__*/Object.freeze({
85011
85407
  __proto__: null,
@@ -85740,7 +86136,7 @@
85740
86136
  function register(r) {
85741
86137
  callbacks$1.forEach((callback) => callback(r));
85742
86138
  }
85743
- // version: 1.303.0-a698c7cc67
86139
+ // version: 1.304.0-aa3e5f9550
85744
86140
 
85745
86141
  /**
85746
86142
  * Returns true if the value acts like a Promise, i.e. has a "then" function,
@@ -90721,4 +91117,4 @@
90721
91117
  exports.subscribeToAdapter = subscribeToAdapter;
90722
91118
 
90723
91119
  }));
90724
- // version: 1.303.0-a698c7cc67
91120
+ // version: 1.304.0-aa3e5f9550