@salesforce/lds-runtime-mobile 1.199.0 → 1.199.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/dist/main.js +215 -208
  2. package/dist/types/durableStore/NimbusSqliteStore/tables/LdsDraftIdMapDataTable.d.ts +1 -1
  3. package/dist/types/durableStore/NimbusSqliteStore/tables/LdsDraftsDataTable.d.ts +1 -1
  4. package/dist/types/durableStore/NimbusSqliteStore/tables/index.d.ts +0 -3
  5. package/dist/types/durableStore/index.d.ts +2 -1
  6. package/dist/types/priming/primingSessionFactory.d.ts +1 -1
  7. package/package.json +4 -3
  8. package/sfdc/main.js +215 -208
  9. package/sfdc/types/durableStore/NimbusSqliteStore/tables/LdsDraftIdMapDataTable.d.ts +1 -1
  10. package/sfdc/types/durableStore/NimbusSqliteStore/tables/LdsDraftsDataTable.d.ts +1 -1
  11. package/sfdc/types/durableStore/NimbusSqliteStore/tables/index.d.ts +0 -3
  12. package/sfdc/types/durableStore/index.d.ts +2 -1
  13. package/sfdc/types/priming/primingSessionFactory.d.ts +1 -1
  14. package/dist/types/durableStore/NimbusSqliteStore/NimbusSqliteStore.d.ts +0 -20
  15. package/dist/types/durableStore/NimbusSqliteStore/tables/AbstractKeyValueDataTable.d.ts +0 -14
  16. package/dist/types/durableStore/NimbusSqliteStore/tables/LdsDataTable.d.ts +0 -15
  17. package/dist/types/durableStore/NimbusSqliteStore/tables/LdsDataTableBase.d.ts +0 -8
  18. package/dist/types/durableStore/NimbusSqliteStore/tables/LdsInternalDataTable.d.ts +0 -15
  19. package/dist/types/durableStore/NimbusSqliteStore/tables/utils.d.ts +0 -2
  20. package/sfdc/types/durableStore/NimbusSqliteStore/NimbusSqliteStore.d.ts +0 -20
  21. package/sfdc/types/durableStore/NimbusSqliteStore/tables/AbstractKeyValueDataTable.d.ts +0 -14
  22. package/sfdc/types/durableStore/NimbusSqliteStore/tables/LdsDataTable.d.ts +0 -15
  23. package/sfdc/types/durableStore/NimbusSqliteStore/tables/LdsDataTableBase.d.ts +0 -8
  24. package/sfdc/types/durableStore/NimbusSqliteStore/tables/LdsInternalDataTable.d.ts +0 -15
  25. package/sfdc/types/durableStore/NimbusSqliteStore/tables/utils.d.ts +0 -2
package/dist/main.js CHANGED
@@ -40,10 +40,10 @@ import ldsPrimingGraphqlBatch from '@salesforce/gate/lds.primingGraphqlBatch';
40
40
  * For full license text, see the LICENSE.txt file
41
41
  */
42
42
 
43
- const { parse: parse$5, stringify: stringify$5 } = JSON;
43
+ const { parse: parse$6, stringify: stringify$6 } = JSON;
44
44
  const { join: join$2, push: push$2, unshift } = Array.prototype;
45
45
  const { isArray: isArray$5 } = Array;
46
- const { entries: entries$3, keys: keys$7 } = Object;
46
+ const { entries: entries$4, keys: keys$8 } = Object;
47
47
 
48
48
  const UI_API_BASE_URI = '/services/data/v59.0/ui-api';
49
49
 
@@ -108,7 +108,7 @@ function isSpanningRecord$1(fieldValue) {
108
108
  function mergeRecordFields$1(first, second) {
109
109
  const { fields: targetFields } = first;
110
110
  const { fields: sourceFields } = second;
111
- const fieldNames = keys$7(sourceFields);
111
+ const fieldNames = keys$8(sourceFields);
112
112
  for (let i = 0, len = fieldNames.length; i < len; i += 1) {
113
113
  const fieldName = fieldNames[i];
114
114
  const sourceField = sourceFields[fieldName];
@@ -343,10 +343,10 @@ function getTransactionKey(req) {
343
343
  const { resourceRequest } = req;
344
344
  const { baseUri, basePath, queryParams, headers } = resourceRequest;
345
345
  const path = `${baseUri}${basePath}`;
346
- const queryParamsString = queryParams ? stringify$5(queryParams) : EMPTY_STRING;
347
- const headersString = stringify$5(headers);
346
+ const queryParamsString = queryParams ? stringify$6(queryParams) : EMPTY_STRING;
347
+ const headersString = stringify$6(headers);
348
348
  const bodyString = resourceRequest.body && isResourceRequestDedupable(req)
349
- ? stringify$5(resourceRequest.body)
349
+ ? stringify$6(resourceRequest.body)
350
350
  : EMPTY_STRING;
351
351
  return `${path}${TRANSACTION_KEY_SEP}${headersString}${TRANSACTION_KEY_SEP}${queryParamsString}${bodyString}`;
352
352
  }
@@ -355,7 +355,7 @@ function getFulfillingRequest(inflightRequests, resourceRequest) {
355
355
  if (fulfill === undefined) {
356
356
  return null;
357
357
  }
358
- const handlersMap = entries$3(inflightRequests);
358
+ const handlersMap = entries$4(inflightRequests);
359
359
  for (let i = 0, len = handlersMap.length; i < len; i += 1) {
360
360
  const [transactionKey, handlers] = handlersMap[i];
361
361
  // check fulfillment against only the first handler ([0]) because it's equal or
@@ -417,7 +417,7 @@ const dedupeRequest = (req) => {
417
417
  // extra clone (particularly when there's only 1 handler).
418
418
  for (let i = 1, len = handlers.length; i < len; i++) {
419
419
  const handler = handlers[i];
420
- handler.resolve(parse$5(stringify$5(response)));
420
+ handler.resolve(parse$6(stringify$6(response)));
421
421
  }
422
422
  handlers[0].resolve(response);
423
423
  }, (error) => {
@@ -523,7 +523,7 @@ function isDeprecatedDurableStoreEntry(durableRecord) {
523
523
  const DefaultDurableSegment = 'DEFAULT';
524
524
  const RedirectDurableSegment = 'REDIRECT_KEYS';
525
525
 
526
- const { keys: keys$6, create: create$5, assign: assign$5, freeze: freeze$1 } = Object;
526
+ const { keys: keys$7, create: create$6, assign: assign$6, freeze: freeze$1 } = Object;
527
527
 
528
528
  //Durable store error instrumentation key
529
529
  const DURABLE_STORE_ERROR = 'durable-store-error';
@@ -570,7 +570,7 @@ function publishDurableStoreEntries(durableRecords, put, publishMetadata) {
570
570
  if (durableRecords === undefined) {
571
571
  return { revivedKeys, hadUnexpectedShape };
572
572
  }
573
- const durableKeys = keys$6(durableRecords);
573
+ const durableKeys = keys$7(durableRecords);
574
574
  if (durableKeys.length === 0) {
575
575
  // no records to revive
576
576
  return { revivedKeys, hadUnexpectedShape };
@@ -744,7 +744,7 @@ class DurableTTLStore {
744
744
  overrides,
745
745
  };
746
746
  }
747
- const keys$1 = keys$6(entries);
747
+ const keys$1 = keys$7(entries);
748
748
  for (let i = 0, len = keys$1.length; i < len; i++) {
749
749
  const key = keys$1[i];
750
750
  const entry = entries[key];
@@ -766,13 +766,13 @@ class DurableTTLStore {
766
766
  }
767
767
 
768
768
  function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler, redirects, additionalDurableStoreOperations = []) {
769
- const durableRecords = create$5(null);
770
- const evictedRecords = create$5(null);
769
+ const durableRecords = create$6(null);
770
+ const evictedRecords = create$6(null);
771
771
  const { records, metadata: storeMetadata, visitedIds, refreshedIds, } = store.fallbackStringKeyInMemoryStore;
772
772
  // TODO: W-8909393 Once metadata is stored in its own segment we need to
773
773
  // call setEntries for the visitedIds on default segment and call setEntries
774
774
  // on the metadata segment for the refreshedIds
775
- const keys$1 = keys$6({ ...visitedIds, ...refreshedIds });
775
+ const keys$1 = keys$7({ ...visitedIds, ...refreshedIds });
776
776
  for (let i = 0, len = keys$1.length; i < len; i += 1) {
777
777
  const key = keys$1[i];
778
778
  const record = records[key];
@@ -794,7 +794,7 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
794
794
  }
795
795
  const durableStoreOperations = additionalDurableStoreOperations;
796
796
  // publishes
797
- const recordKeys = keys$6(durableRecords);
797
+ const recordKeys = keys$7(durableRecords);
798
798
  if (recordKeys.length > 0) {
799
799
  durableStoreOperations.push({
800
800
  type: 'setEntries',
@@ -815,7 +815,7 @@ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStor
815
815
  });
816
816
  });
817
817
  // evicts
818
- const evictedKeys = keys$6(evictedRecords);
818
+ const evictedKeys = keys$7(evictedRecords);
819
819
  if (evictedKeys.length > 0) {
820
820
  durableStoreOperations.push({
821
821
  type: 'evictEntries',
@@ -876,7 +876,7 @@ const AdapterContextSegment = 'ADAPTER-CONTEXT';
876
876
  const ADAPTER_CONTEXT_ID_SUFFIX = '__NAMED_CONTEXT';
877
877
  async function reviveOrCreateContext(adapterId, durableStore, durableStoreErrorHandler, contextStores, pendingContextStoreKeys, onContextLoaded) {
878
878
  // initialize empty context store
879
- contextStores[adapterId] = create$5(null);
879
+ contextStores[adapterId] = create$6(null);
880
880
  const context = {
881
881
  set(key, value) {
882
882
  contextStores[adapterId][key] = value;
@@ -937,7 +937,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
937
937
  const pendingContextStoreKeys = new Set();
938
938
  // redirects that need to be flushed to the durable store
939
939
  const pendingStoreRedirects = new Map();
940
- const contextStores = create$5(null);
940
+ const contextStores = create$6(null);
941
941
  let initializationPromise = new Promise((resolve) => {
942
942
  const finish = () => {
943
943
  resolve();
@@ -1004,7 +1004,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
1004
1004
  try {
1005
1005
  const entries = await durableStore.getEntries(adapterContextKeysFromDifferentInstance, AdapterContextSegment);
1006
1006
  if (entries !== undefined) {
1007
- const entryKeys = keys$6(entries);
1007
+ const entryKeys = keys$7(entries);
1008
1008
  for (let i = 0, len = entryKeys.length; i < len; i++) {
1009
1009
  const entryKey = entryKeys[i];
1010
1010
  const entry = entries[entryKey];
@@ -1373,7 +1373,7 @@ function makeDurable(environment, { durableStore, instrumentation }) {
1373
1373
  type: 'stale-while-revalidate',
1374
1374
  staleDurationSeconds: Number.MAX_SAFE_INTEGER,
1375
1375
  });
1376
- return create$5(environment, {
1376
+ return create$6(environment, {
1377
1377
  publishStoreMetadata: { value: publishStoreMetadata },
1378
1378
  storeIngest: { value: storeIngest },
1379
1379
  storeIngestError: { value: storeIngestError },
@@ -2612,7 +2612,7 @@ function compoundOperatorToSql(operator) {
2612
2612
  }
2613
2613
 
2614
2614
  const { isArray: isArray$4 } = Array;
2615
- const { keys: keys$5 } = Object;
2615
+ const { keys: keys$6 } = Object;
2616
2616
 
2617
2617
  function isListValueNode(node) {
2618
2618
  return node.kind === 'ListValue';
@@ -4244,7 +4244,7 @@ function generateVariableSubQuery(valueNode, name, type, variables) {
4244
4244
  switch (valueNode.kind) {
4245
4245
  case Kind.OBJECT: {
4246
4246
  // For example, `{ Id: { eq: $draftId } }` is a `ObjectValueNode`, which has field keys 'Id'
4247
- const resultQuery = keys$5(valueNode.fields)
4247
+ const resultQuery = keys$6(valueNode.fields)
4248
4248
  .map((key) => generateVariableSubQuery(valueNode.fields[key], key, type, variables))
4249
4249
  .filter((subquery) => subquery.length > 0)
4250
4250
  .join(',');
@@ -4320,7 +4320,7 @@ function swapArgumentWithVariableNodes(swapped, original) {
4320
4320
  }
4321
4321
  function swapValueNodeWithVariableNodes(original, swapped) {
4322
4322
  if (original.kind === Kind.OBJECT) {
4323
- for (const key of keys$5(original.fields)) {
4323
+ for (const key of keys$6(original.fields)) {
4324
4324
  if (isObjectValueNode$1(swapped) && swapped.fields[key]) {
4325
4325
  if (is(original.fields[key], 'Variable')) {
4326
4326
  original.fields[key] = swapped.fields[key];
@@ -4902,12 +4902,12 @@ function createDraftSynthesisErrorResponse(message = 'failed to synthesize draft
4902
4902
  return new DraftErrorFetchResponse(HttpStatusCode.BadRequest, error);
4903
4903
  }
4904
4904
 
4905
- const { keys: keys$4, create: create$4, assign: assign$4, values: values$2 } = Object;
4906
- const { stringify: stringify$4, parse: parse$4 } = JSON;
4905
+ const { keys: keys$5, create: create$5, assign: assign$5, values: values$2 } = Object;
4906
+ const { stringify: stringify$5, parse: parse$5 } = JSON;
4907
4907
  const { isArray: isArray$3 } = Array;
4908
4908
 
4909
4909
  function clone$1(obj) {
4910
- return parse$4(stringify$4(obj));
4910
+ return parse$5(stringify$5(obj));
4911
4911
  }
4912
4912
 
4913
4913
  /**
@@ -5014,13 +5014,13 @@ function buildLuvioOverrideForDraftAdapters(luvio, handler, extractTargetIdFromC
5014
5014
  }
5015
5015
  softEvict(key);
5016
5016
  };
5017
- return create$4(luvio, {
5017
+ return create$5(luvio, {
5018
5018
  dispatchResourceRequest: { value: dispatchResourceRequest },
5019
5019
  publishStoreMetadata: { value: publishStoreMetadata },
5020
5020
  storeEvict: { value: storeEvict },
5021
5021
  });
5022
5022
  }
5023
- return create$4(luvio, {
5023
+ return create$5(luvio, {
5024
5024
  dispatchResourceRequest: { value: dispatchResourceRequest },
5025
5025
  publishStoreMetadata: { value: publishStoreMetadata },
5026
5026
  });
@@ -5054,7 +5054,7 @@ async function getDraftIdMappings(durableStore, mappingIds) {
5054
5054
  if (entries === undefined) {
5055
5055
  return mappings;
5056
5056
  }
5057
- const keys$1 = keys$4(entries);
5057
+ const keys$1 = keys$5(entries);
5058
5058
  for (const key of keys$1) {
5059
5059
  const entry = entries[key].data;
5060
5060
  if (isLegacyDraftIdMapping(key)) {
@@ -5072,7 +5072,7 @@ async function getDraftIdMappings(durableStore, mappingIds) {
5072
5072
  async function clearDraftIdSegment(durableStore) {
5073
5073
  const entries = await durableStore.getAllEntries(DRAFT_ID_MAPPINGS_SEGMENT);
5074
5074
  if (entries) {
5075
- const keys$1 = keys$4(entries);
5075
+ const keys$1 = keys$5(entries);
5076
5076
  if (keys$1.length > 0) {
5077
5077
  await durableStore.evictEntries(keys$1, DRAFT_ID_MAPPINGS_SEGMENT);
5078
5078
  }
@@ -5479,7 +5479,7 @@ class DurableDraftQueue {
5479
5479
  return this.replaceOrMergeActions(targetActionId, sourceActionId, true);
5480
5480
  }
5481
5481
  async setMetadata(actionId, metadata) {
5482
- const keys$1 = keys$4(metadata);
5482
+ const keys$1 = keys$5(metadata);
5483
5483
  const compatibleKeys = keys$1.filter((key) => {
5484
5484
  const value = metadata[key];
5485
5485
  return typeof key === 'string' && typeof value === 'string';
@@ -5629,7 +5629,7 @@ class DurableDraftStore {
5629
5629
  const waitForOngoingSync = this.syncPromise || Promise.resolve();
5630
5630
  return waitForOngoingSync.then(() => {
5631
5631
  const { draftStore } = this;
5632
- const keys$1 = keys$4(draftStore);
5632
+ const keys$1 = keys$5(draftStore);
5633
5633
  const actionArray = [];
5634
5634
  for (let i = 0, len = keys$1.length; i < len; i++) {
5635
5635
  const key = keys$1[i];
@@ -5653,7 +5653,7 @@ class DurableDraftStore {
5653
5653
  deleteByTag(tag) {
5654
5654
  const deleteAction = () => {
5655
5655
  const { draftStore } = this;
5656
- const keys$1 = keys$4(draftStore);
5656
+ const keys$1 = keys$5(draftStore);
5657
5657
  const durableKeys = [];
5658
5658
  for (let i = 0, len = keys$1.length; i < len; i++) {
5659
5659
  const key = keys$1[i];
@@ -5743,7 +5743,7 @@ class DurableDraftStore {
5743
5743
  return this.runQueuedOperations();
5744
5744
  }
5745
5745
  const { draftStore } = this;
5746
- const keys$1 = keys$4(durableEntries);
5746
+ const keys$1 = keys$5(durableEntries);
5747
5747
  for (let i = 0, len = keys$1.length; i < len; i++) {
5748
5748
  const entry = durableEntries[keys$1[i]];
5749
5749
  const action = entry.data;
@@ -5927,7 +5927,7 @@ class AbstractResourceRequestActionHandler {
5927
5927
  let updatedActionTargetId = undefined;
5928
5928
  const { tag: queueActionTag, data: queueActionRequest, id: queueActionId, } = queueAction;
5929
5929
  let { basePath, body } = queueActionRequest;
5930
- let stringifiedBody = stringify$4(body);
5930
+ let stringifiedBody = stringify$5(body);
5931
5931
  // for each redirected ID/key we loop over the operation to see if it needs
5932
5932
  // to be updated
5933
5933
  for (const { draftId, draftKey, canonicalId, canonicalKey } of redirects) {
@@ -5952,7 +5952,7 @@ class AbstractResourceRequestActionHandler {
5952
5952
  data: {
5953
5953
  ...queueActionRequest,
5954
5954
  basePath: basePath,
5955
- body: parse$4(stringifiedBody),
5955
+ body: parse$5(stringifiedBody),
5956
5956
  },
5957
5957
  };
5958
5958
  // item needs to be replaced with a new item at the new record key
@@ -5971,7 +5971,7 @@ class AbstractResourceRequestActionHandler {
5971
5971
  data: {
5972
5972
  ...queueActionRequest,
5973
5973
  basePath: basePath,
5974
- body: parse$4(stringifiedBody),
5974
+ body: parse$5(stringifiedBody),
5975
5975
  },
5976
5976
  };
5977
5977
  // item needs to be updated
@@ -6381,7 +6381,7 @@ class DraftManager {
6381
6381
  // We should always return an array, if the body is just a dictionary,
6382
6382
  // stick it in an array
6383
6383
  const body = isArray$3(action.error.body) ? action.error.body : [action.error.body];
6384
- const bodyString = stringify$4(body);
6384
+ const bodyString = stringify$5(body);
6385
6385
  item.error = {
6386
6386
  status: action.error.status || 0,
6387
6387
  ok: action.error.ok || false,
@@ -6546,7 +6546,7 @@ function makeEnvironmentDraftAware(luvio, env, durableStore, handlers, draftQueu
6546
6546
  decrementRefCount(key);
6547
6547
  };
6548
6548
  // note the makeEnvironmentUiApiRecordDraftAware will eventually go away once the adapters become draft aware
6549
- return create$4(env, {
6549
+ return create$5(env, {
6550
6550
  storePublish: { value: storePublish },
6551
6551
  handleSuccessResponse: { value: handleSuccessResponse },
6552
6552
  softEvict: { value: softEvict },
@@ -6827,8 +6827,8 @@ function isArrayLike(x) {
6827
6827
  (x.length === 0 || (x.length > 0 && Object.prototype.hasOwnProperty.call(x, x.length - 1))));
6828
6828
  }
6829
6829
 
6830
- const { create: create$3, keys: keys$3, values: values$1, entries: entries$2, assign: assign$3 } = Object;
6831
- const { stringify: stringify$3, parse: parse$3 } = JSON;
6830
+ const { create: create$4, keys: keys$4, values: values$1, entries: entries$3, assign: assign$4 } = Object;
6831
+ const { stringify: stringify$4, parse: parse$4 } = JSON;
6832
6832
  const { isArray: isArray$2 } = Array;
6833
6833
 
6834
6834
  function recordLoaderFactory(query) {
@@ -6840,7 +6840,7 @@ function recordLoaderFactory(query) {
6840
6840
  rows.forEach((row) => {
6841
6841
  if (!row[0])
6842
6842
  return null;
6843
- const record = parse$3(row[0]);
6843
+ const record = parse$4(row[0]);
6844
6844
  if (record.id === id) {
6845
6845
  foundRow = record;
6846
6846
  }
@@ -7170,11 +7170,11 @@ function dateTimePredicate(input, operator, field, alias) {
7170
7170
  return dateTimeRange(range, operator, field, alias);
7171
7171
  }
7172
7172
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
7173
- throw new Error(`Where filter ${stringify$3(input)} is not supported`);
7173
+ throw new Error(`Where filter ${stringify$4(input)} is not supported`);
7174
7174
  }
7175
7175
  function dateTimeRange(input, op, field, alias) {
7176
7176
  const dateFunction = field.dataType === 'DateTime' ? 'datetime' : 'date';
7177
- const key = keys$3(input)[0];
7177
+ const key = keys$4(input)[0];
7178
7178
  let operator = op;
7179
7179
  if (operator === '=')
7180
7180
  operator = 'BETWEEN';
@@ -7280,7 +7280,7 @@ function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draf
7280
7280
  if (!where)
7281
7281
  return [];
7282
7282
  let predicates = [];
7283
- const fields = keys$3(where);
7283
+ const fields = keys$4(where);
7284
7284
  for (const field of fields) {
7285
7285
  if (field === 'and' || field === 'or') {
7286
7286
  predicates.push(processCompoundPredicate(field, where[field], recordType, alias, objectInfoMap, joins));
@@ -7329,7 +7329,7 @@ function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draf
7329
7329
  }
7330
7330
  else {
7331
7331
  // @W-12618378 polymorphic query sometimes does not work as expected on server. The GQL on certain entities could fail.
7332
- const entityNames = keys$3(where[field]);
7332
+ const entityNames = keys$4(where[field]);
7333
7333
  const polyPredicatesGroups = entityNames
7334
7334
  .filter((entityName) => fieldInfo.referenceToInfos.some((referenceInfo) => referenceInfo.apiName === entityName))
7335
7335
  .map((entityName) => {
@@ -7359,7 +7359,7 @@ function filterToPredicates(where, recordType, alias, objectInfoMap, joins, draf
7359
7359
  }
7360
7360
  else {
7361
7361
  //`field` match the filedInfo's apiName
7362
- for (const [op, value] of entries$2(where[field])) {
7362
+ for (const [op, value] of entries$3(where[field])) {
7363
7363
  const operator = operatorToSql(op);
7364
7364
  /**
7365
7365
  Two types ID processing might be needed. Draft ID swapping is optional, which depends on DraftFunctions existence.
@@ -8469,7 +8469,7 @@ function depth(json, currentLevel = 0) {
8469
8469
  if (typeof json !== 'object') {
8470
8470
  return currentLevel;
8471
8471
  }
8472
- const keys$1 = keys$3(json);
8472
+ const keys$1 = keys$4(json);
8473
8473
  if (keys$1.length === 0)
8474
8474
  return 0;
8475
8475
  const depths = keys$1.map((key) => {
@@ -8498,7 +8498,7 @@ function orderByToPredicate(orderBy, recordType, alias, objectInfoMap, joins) {
8498
8498
  return predicates;
8499
8499
  const isSpanning = depth(orderBy) > 2;
8500
8500
  if (isSpanning) {
8501
- const keys$1 = keys$3(orderBy);
8501
+ const keys$1 = keys$4(orderBy);
8502
8502
  for (let i = 0, len = keys$1.length; i < len; i++) {
8503
8503
  const key = keys$1[i];
8504
8504
  const parentFields = objectInfoMap[recordType].fields;
@@ -8528,7 +8528,7 @@ function orderByToPredicate(orderBy, recordType, alias, objectInfoMap, joins) {
8528
8528
  }
8529
8529
  }
8530
8530
  else {
8531
- const keys$1 = keys$3(orderBy);
8531
+ const keys$1 = keys$4(orderBy);
8532
8532
  for (let i = 0, len = keys$1.length; i < len; i++) {
8533
8533
  const key = keys$1[i];
8534
8534
  if (!objectInfoMap[recordType])
@@ -8762,7 +8762,7 @@ function addResolversToSchema(schema, polyFields) {
8762
8762
  if (field.name === 'node') {
8763
8763
  field.resolve = function nodeResolver(obj, _args, { seenRecordIds }) {
8764
8764
  const { record, ingestionTimestamp } = obj;
8765
- const recordRepresentation = parse$3(record);
8765
+ const recordRepresentation = parse$4(record);
8766
8766
  seenRecordIds.add(recordRepresentation.id);
8767
8767
  return { recordRepresentation, ingestionTimestamp };
8768
8768
  };
@@ -8980,7 +8980,7 @@ function buildKeyStringForRecordQuery(operation, variables, argumentNodes, curre
8980
8980
  variables,
8981
8981
  fragmentMap: {},
8982
8982
  });
8983
- const filteredArgumentNodes = assign$3([], argumentNodes).filter((node) => node.name.value !== 'first' && node.name.value !== 'after');
8983
+ const filteredArgumentNodes = assign$4([], argumentNodes).filter((node) => node.name.value !== 'first' && node.name.value !== 'after');
8984
8984
  const argumentString = filteredArgumentNodes.length > 0
8985
8985
  ? '__' + serializeFieldArguments(filteredArgumentNodes, variables)
8986
8986
  : '';
@@ -9035,7 +9035,7 @@ const baseTypeDefinitions = uiapiSchemaString + additionalSchemaDefinitions;
9035
9035
  * @returns Type definition string and entity type names which support polymorphism.
9036
9036
  */
9037
9037
  function generateTypeDefinitions(objectInfos) {
9038
- if (keys$3(objectInfos).length === 0)
9038
+ if (keys$4(objectInfos).length === 0)
9039
9039
  return { typeDefs: baseTypeDefinitions, polyFieldTypeNames: [] };
9040
9040
  const { recordQueries, recordConnections, polyFieldTypeNameArr } = generateRecordQueries(objectInfos);
9041
9041
  const typeDefs = `
@@ -9786,7 +9786,7 @@ async function resolveObjectInfos(objectInfotree, objectInfoApiMap, startNodes,
9786
9786
  // eslint-disable-next-line
9787
9787
  throw new Error(`Unable to resolve ObjectInfo(s) for ${Array.from(startNodes)}`);
9788
9788
  }
9789
- if (keys$3(objectInfos).length < startNodes.size) {
9789
+ if (keys$4(objectInfos).length < startNodes.size) {
9790
9790
  // eslint-disable-next-line
9791
9791
  throw new Error(`Unable to resolve ObjectInfo(s) for ${Array.from(startNodes)}`);
9792
9792
  }
@@ -10743,7 +10743,7 @@ function removeSyntheticFields(result, query) {
10743
10743
  // so we keep any other results that are not included in a record query
10744
10744
  const output = { ...result };
10745
10745
  const outputApiParent = output.data.uiapi.query;
10746
- const keys$1 = keys$3(nodeJson);
10746
+ const keys$1 = keys$4(nodeJson);
10747
10747
  keys$1.forEach((recordName) => {
10748
10748
  const outputApi = {};
10749
10749
  // Each connectionSelection's maps its name or alias to one of returned records. The record name could be `apiName' or alias
@@ -10763,7 +10763,7 @@ function removeSyntheticFields(result, query) {
10763
10763
  * @param jsonOutput JsonObject which will be populated with properties. It would only contains properties defined in 'FieldNode'
10764
10764
  */
10765
10765
  function createUserJsonOutput(selection, jsonInput, jsonOutput) {
10766
- const keys$1 = keys$3(jsonInput);
10766
+ const keys$1 = keys$4(jsonInput);
10767
10767
  if (selection.selectionSet) {
10768
10768
  createjsonOutput(selection.selectionSet.selections, jsonInput, jsonOutput);
10769
10769
  }
@@ -10772,7 +10772,7 @@ function createUserJsonOutput(selection, jsonInput, jsonOutput) {
10772
10772
  }
10773
10773
  }
10774
10774
  function createjsonOutput(selections, jsonInput, jsonOutput) {
10775
- const keys$1 = keys$3(jsonInput);
10775
+ const keys$1 = keys$4(jsonInput);
10776
10776
  selections.filter(isFieldNode).forEach((subSelection) => {
10777
10777
  const fieldName = subSelection.name.value;
10778
10778
  if (keys$1.includes(fieldName)) {
@@ -10832,8 +10832,8 @@ function referenceIdFieldForRelationship(relationshipName) {
10832
10832
  * For full license text, see the LICENSE.txt file
10833
10833
  */
10834
10834
 
10835
- const { keys: keys$2, values, create: create$2, assign: assign$2, freeze } = Object;
10836
- const { stringify: stringify$2, parse: parse$2 } = JSON;
10835
+ const { keys: keys$3, values, create: create$3, assign: assign$3, freeze } = Object;
10836
+ const { stringify: stringify$3, parse: parse$3 } = JSON;
10837
10837
  const { shift } = Array.prototype;
10838
10838
  const { isArray: isArray$1 } = Array;
10839
10839
 
@@ -10899,7 +10899,7 @@ function getRecordKeyForId(luvio, recordId) {
10899
10899
  */
10900
10900
  function filterOutReferenceFieldsAndLinks(record) {
10901
10901
  const filteredFields = {};
10902
- const fieldNames = keys$2(record.fields);
10902
+ const fieldNames = keys$3(record.fields);
10903
10903
  for (const fieldName of fieldNames) {
10904
10904
  const field = record.fields[fieldName];
10905
10905
  if (isFieldLink(field) === false) {
@@ -11000,7 +11000,7 @@ function getRecordDraftEnvironment(luvio, env, { isDraftId, durableStore }) {
11000
11000
  const resolvedRequest = resolveResourceRequestIds(luvio, resourceRequest, canonicalKey);
11001
11001
  return env.dispatchResourceRequest(resolvedRequest, context, eventObservers);
11002
11002
  };
11003
- return create$2(env, {
11003
+ return create$3(env, {
11004
11004
  dispatchResourceRequest: { value: dispatchResourceRequest },
11005
11005
  });
11006
11006
  }
@@ -11178,7 +11178,7 @@ function getRecordsDraftEnvironment(luvio, env, { isDraftId }) {
11178
11178
  return applyDraftsToBatchResponse(resourceRequest, response, removedDraftIds);
11179
11179
  }));
11180
11180
  };
11181
- return create$2(env, {
11181
+ return create$3(env, {
11182
11182
  dispatchResourceRequest: { value: dispatchResourceRequest },
11183
11183
  });
11184
11184
  }
@@ -11188,11 +11188,11 @@ function makeEnvironmentUiApiRecordDraftAware(luvio, options, env) {
11188
11188
  const adapterSpecificEnvironments = synthesizers.reduce((environment, synthesizer) => {
11189
11189
  return synthesizer(luvio, environment, options);
11190
11190
  }, env);
11191
- return create$2(adapterSpecificEnvironments, {});
11191
+ return create$3(adapterSpecificEnvironments, {});
11192
11192
  }
11193
11193
 
11194
11194
  function clone(obj) {
11195
- return parse$2(stringify$2(obj));
11195
+ return parse$3(stringify$3(obj));
11196
11196
  }
11197
11197
 
11198
11198
  const DEFAULT_FIELD_CREATED_BY_ID = 'CreatedById';
@@ -11315,7 +11315,7 @@ function recursivelyApplyDraftsToRecord(record, draftMetadata, recordOperations)
11315
11315
  LastModifiedDate: lastModifiedDate,
11316
11316
  };
11317
11317
  const draftFields = buildRecordFieldValueRepresentationsFromDraftFields(luvio, apiName, internalFields, objectInfos, referencedRecords, formatDisplayValue);
11318
- const fieldNames = keys$2(draftFields);
11318
+ const fieldNames = keys$3(draftFields);
11319
11319
  for (let i = 0, len = fieldNames.length; i < len; i++) {
11320
11320
  const fieldName = fieldNames[i];
11321
11321
  // don't apply server values to draft created records
@@ -11376,7 +11376,7 @@ function removeDrafts(record, luvio, objectInfo) {
11376
11376
  return undefined;
11377
11377
  }
11378
11378
  const updatedFields = {};
11379
- const fieldNames = keys$2(fields);
11379
+ const fieldNames = keys$3(fields);
11380
11380
  for (let i = 0, len = fieldNames.length; i < len; i++) {
11381
11381
  const fieldName = fieldNames[i];
11382
11382
  const field = fields[fieldName];
@@ -11423,7 +11423,7 @@ function removeDrafts(record, luvio, objectInfo) {
11423
11423
  * @param fields List of draft record fields
11424
11424
  */
11425
11425
  function buildRecordFieldValueRepresentationsFromDraftFields(luvio, apiName, fields, objectInfos, referencedRecords, formatDisplayValue) {
11426
- const fieldNames = keys$2(fields);
11426
+ const fieldNames = keys$3(fields);
11427
11427
  const recordFields = {};
11428
11428
  const objectInfo = objectInfos.get(apiName);
11429
11429
  for (let i = 0, len = fieldNames.length; i < len; i++) {
@@ -11492,7 +11492,7 @@ function buildSyntheticRecordRepresentation(luvio, createOperation, userId, obje
11492
11492
  draftFields[DEFAULT_FIELD_OWNER_ID] = { value: userId, displayValue: null };
11493
11493
  draftFields[DEFAULT_FIELD_ID] = { value: recordId, displayValue: null };
11494
11494
  if (objectInfo !== undefined) {
11495
- const allObjectFields = keys$2(objectInfo.fields);
11495
+ const allObjectFields = keys$3(objectInfo.fields);
11496
11496
  allObjectFields.forEach((fieldName) => {
11497
11497
  if (draftFields[fieldName] === undefined) {
11498
11498
  draftFields[fieldName] = { value: null, displayValue: null };
@@ -11616,7 +11616,7 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
11616
11616
  this.isDraftId = isDraftId;
11617
11617
  this.recordService = recordService;
11618
11618
  this.handlerId = LDS_ACTION_HANDLER_ID;
11619
- this.collectedFields = create$2(null);
11619
+ this.collectedFields = create$3(null);
11620
11620
  recordService.registerRecordHandler(this);
11621
11621
  }
11622
11622
  async buildPendingAction(request, queue) {
@@ -11829,11 +11829,11 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
11829
11829
  // otherwise we're a record
11830
11830
  if (draftMetadata === undefined) {
11831
11831
  // no drafts applied to this record, publish and be done
11832
- this.collectedFields = create$2(null);
11832
+ this.collectedFields = create$3(null);
11833
11833
  return publishData(key, data);
11834
11834
  }
11835
11835
  // create a denormalized record with the collected fields
11836
- const recordFieldNames = keys$2(data.fields);
11836
+ const recordFieldNames = keys$3(data.fields);
11837
11837
  const partialRecord = {
11838
11838
  ...data,
11839
11839
  fields: {},
@@ -11855,7 +11855,7 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
11855
11855
  lastModifiedDate: recordWithDrafts.lastModifiedDate,
11856
11856
  lastModifiedById: recordWithDrafts.lastModifiedById,
11857
11857
  };
11858
- for (const fieldName of keys$2(recordWithSpanningRefLinks.fields)) {
11858
+ for (const fieldName of keys$3(recordWithSpanningRefLinks.fields)) {
11859
11859
  const fieldKey = buildRecordFieldStoreKey(key, fieldName);
11860
11860
  if (this.collectedFields[fieldKey] !== undefined) {
11861
11861
  const fieldData = recordWithSpanningRefLinks.fields[fieldName];
@@ -11872,7 +11872,7 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
11872
11872
  // publish the normalized record
11873
11873
  publishData(key, normalizedRecord);
11874
11874
  // we've published the record, now clear the collected fields
11875
- this.collectedFields = create$2(null);
11875
+ this.collectedFields = create$3(null);
11876
11876
  }
11877
11877
  updateMetadata(existingMetadata, incomingMetadata) {
11878
11878
  // ensure the the api name cannot be overwritten in the incoming metadata
@@ -11909,7 +11909,7 @@ class UiApiActionHandler extends AbstractResourceRequestActionHandler {
11909
11909
  let resolvedUrlParams = request.urlParams;
11910
11910
  if (request.method === 'post' || request.method === 'patch') {
11911
11911
  const bodyFields = resolvedBody.fields;
11912
- const fieldNames = keys$2(bodyFields);
11912
+ const fieldNames = keys$3(bodyFields);
11913
11913
  for (let i = 0, len = fieldNames.length; i < len; i++) {
11914
11914
  const fieldName = fieldNames[i];
11915
11915
  const fieldValue = bodyFields[fieldName];
@@ -11992,7 +11992,7 @@ function isField(key, data) {
11992
11992
  function normalizeRecordFields(key, entry) {
11993
11993
  const { data: record } = entry;
11994
11994
  const { fields, links } = record;
11995
- const linkNames = keys$2(links);
11995
+ const linkNames = keys$3(links);
11996
11996
  const normalizedFields = {};
11997
11997
  const returnEntries = {};
11998
11998
  for (let i = 0, len = linkNames.length; i < len; i++) {
@@ -12014,7 +12014,7 @@ function normalizeRecordFields(key, entry) {
12014
12014
  }
12015
12015
  }
12016
12016
  returnEntries[key] = {
12017
- data: assign$2(record, { fields: normalizedFields }),
12017
+ data: assign$3(record, { fields: normalizedFields }),
12018
12018
  metadata: entry.metadata,
12019
12019
  };
12020
12020
  return returnEntries;
@@ -12030,7 +12030,7 @@ function buildDurableRecordRepresentation(normalizedRecord, records, pendingEntr
12030
12030
  const fields = normalizedRecord.fields;
12031
12031
  const filteredFields = {};
12032
12032
  const links = {};
12033
- const fieldNames = keys$2(fields);
12033
+ const fieldNames = keys$3(fields);
12034
12034
  for (let i = 0, len = fieldNames.length; i < len; i++) {
12035
12035
  const fieldName = fieldNames[i];
12036
12036
  const field = fields[fieldName];
@@ -12117,8 +12117,8 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
12117
12117
  if (durableEntries === undefined) {
12118
12118
  return undefined;
12119
12119
  }
12120
- const returnEntries = create$2(null);
12121
- const keys$1 = keys$2(durableEntries);
12120
+ const returnEntries = create$3(null);
12121
+ const keys$1 = keys$3(durableEntries);
12122
12122
  for (let i = 0, len = keys$1.length; i < len; i++) {
12123
12123
  const key = keys$1[i];
12124
12124
  const value = durableEntries[key];
@@ -12126,7 +12126,7 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
12126
12126
  continue;
12127
12127
  }
12128
12128
  if (isEntryDurableRecordRepresentation(value, key)) {
12129
- assign$2(returnEntries, normalizeRecordFields(key, value));
12129
+ assign$3(returnEntries, normalizeRecordFields(key, value));
12130
12130
  }
12131
12131
  else {
12132
12132
  returnEntries[key] = value;
@@ -12136,8 +12136,8 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
12136
12136
  });
12137
12137
  };
12138
12138
  const denormalizeEntries = function (entries) {
12139
- const putEntries = create$2(null);
12140
- const keys$1 = keys$2(entries);
12139
+ const putEntries = create$3(null);
12140
+ const keys$1 = keys$3(entries);
12141
12141
  const putRecords = {};
12142
12142
  const putRecordViews = {};
12143
12143
  const storeRecords = getStoreRecords !== undefined ? getStoreRecords() : {};
@@ -12246,7 +12246,7 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
12246
12246
  return denormalizedRecord;
12247
12247
  });
12248
12248
  };
12249
- return create$2(durableStore, {
12249
+ return create$3(durableStore, {
12250
12250
  getEntries: { value: getEntries, writable: true },
12251
12251
  setEntries: { value: setEntries, writable: true },
12252
12252
  batchOperations: { value: batchOperations, writable: true },
@@ -12264,7 +12264,7 @@ function normalizeError$1(error) {
12264
12264
  else if (typeof error === 'string') {
12265
12265
  return new Error(error);
12266
12266
  }
12267
- return new Error(stringify$2(error));
12267
+ return new Error(stringify$3(error));
12268
12268
  }
12269
12269
 
12270
12270
  const PERFORM_QUICK_ACTION_ENDPOINT_REGEX = /^\/ui-api\/actions\/perform-quick-action\/.*$/;
@@ -12292,7 +12292,7 @@ function performQuickActionDraftEnvironment(luvio, env, handler) {
12292
12292
  }
12293
12293
  return createOkResponse(data);
12294
12294
  };
12295
- return create$2(env, {
12295
+ return create$3(env, {
12296
12296
  dispatchResourceRequest: { value: dispatchResourceRequest },
12297
12297
  });
12298
12298
  }
@@ -12359,7 +12359,7 @@ class UiApiDraftRecordService {
12359
12359
  };
12360
12360
  }
12361
12361
  objectInfoMap.set(apiName, objectInfo);
12362
- const fields = keys$2(operation.fields);
12362
+ const fields = keys$3(operation.fields);
12363
12363
  const unexpectedFields = [];
12364
12364
  for (const field of fields) {
12365
12365
  const fieldInfo = objectInfo.fields[field];
@@ -12645,7 +12645,7 @@ const replaceDraftIdsInVariables = (variables, draftFunctions, unmappedDraftIDs)
12645
12645
  }
12646
12646
  else if (typeof object === 'object' && object !== null) {
12647
12647
  let source = object;
12648
- return keys$2(source).reduce((acc, key) => {
12648
+ return keys$3(source).reduce((acc, key) => {
12649
12649
  acc[key] = replace(source[key]);
12650
12650
  return acc;
12651
12651
  }, {});
@@ -12654,7 +12654,7 @@ const replaceDraftIdsInVariables = (variables, draftFunctions, unmappedDraftIDs)
12654
12654
  return object;
12655
12655
  }
12656
12656
  };
12657
- let newVariables = keys$2(variables).reduce((acc, key) => {
12657
+ let newVariables = keys$3(variables).reduce((acc, key) => {
12658
12658
  acc[key] = replace(variables[key]);
12659
12659
  return acc;
12660
12660
  }, {});
@@ -12667,7 +12667,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
12667
12667
  };
12668
12668
  return async function draftAwareGraphQLAdapter(config, buildCachedSnapshotCachePolicy, buildNetworkSnapshotCachePolicy, requestContext = {}) {
12669
12669
  //create a copy to not accidentally modify the AST in the astResolver map of luvio
12670
- const copy = parse$2(stringify$2(config.query));
12670
+ const copy = parse$3(stringify$3(config.query));
12671
12671
  let injectedAST;
12672
12672
  let objectInfoNeeded = {};
12673
12673
  let unmappedDraftIDs;
@@ -12732,7 +12732,7 @@ function draftAwareGraphQLAdapterFactory(userId, objectInfoService, store, luvio
12732
12732
  ({ result: gqlResult, seenRecordIds } = await evaluate({
12733
12733
  ...config,
12734
12734
  //need to create another copy of the ast for future writes
12735
- query: parse$2(stringify$2(injectedAST)),
12735
+ query: parse$3(stringify$3(injectedAST)),
12736
12736
  }, observers, { userId }, objectInfoNeeded, store, nonEvaluatedSnapshot));
12737
12737
  }
12738
12738
  catch (throwable) {
@@ -13486,16 +13486,16 @@ const recordIdGenerator = (id) => {
13486
13486
  * For full license text, see the LICENSE.txt file
13487
13487
  */
13488
13488
 
13489
- const { keys: keys$1, create: create$1, assign: assign$1, entries: entries$1 } = Object;
13490
- const { stringify: stringify$1, parse: parse$1 } = JSON;
13489
+ const { keys: keys$2, create: create$2, assign: assign$2, entries: entries$2 } = Object;
13490
+ const { stringify: stringify$2, parse: parse$2 } = JSON;
13491
13491
  const { push: push$1, join: join$1, slice: slice$1 } = Array.prototype;
13492
13492
  const { isArray, from } = Array;
13493
13493
 
13494
13494
  function ldsParamsToString(params) {
13495
- const returnParams = create$1(null);
13496
- const keys$1$1 = keys$1(params);
13497
- for (let i = 0, len = keys$1$1.length; i < len; i++) {
13498
- const key = keys$1$1[i];
13495
+ const returnParams = create$2(null);
13496
+ const keys$1 = keys$2(params);
13497
+ for (let i = 0, len = keys$1.length; i < len; i++) {
13498
+ const key = keys$1[i];
13499
13499
  const value = params[key];
13500
13500
  if (value === undefined) {
13501
13501
  // filter out params that have no value
@@ -13510,8 +13510,8 @@ function ldsParamsToString(params) {
13510
13510
  else {
13511
13511
  returnParams[key] = `${value}`;
13512
13512
  }
13513
- if (isObject(value) === true && keys$1(value).length > 0) {
13514
- returnParams[key] = stringify$1(value);
13513
+ if (isObject(value) === true && keys$2(value).length > 0) {
13514
+ returnParams[key] = stringify$2(value);
13515
13515
  }
13516
13516
  }
13517
13517
  return returnParams;
@@ -13570,13 +13570,13 @@ function stringifyIfPresent(value) {
13570
13570
  if (value === undefined || value === null) {
13571
13571
  return null;
13572
13572
  }
13573
- return stringify$1(value);
13573
+ return stringify$2(value);
13574
13574
  }
13575
13575
  function parseIfPresent(value) {
13576
13576
  if (value === undefined || value === null || value === '') {
13577
13577
  return null;
13578
13578
  }
13579
- return parse$1(value);
13579
+ return parse$2(value);
13580
13580
  }
13581
13581
  function buildNimbusNetworkPluginRequest(resourceRequest, resourceRequestContext) {
13582
13582
  const { basePath, baseUri, method, headers, queryParams, body } = resourceRequest;
@@ -13907,7 +13907,7 @@ function buildAggregateUiUrl(params, resourceRequest) {
13907
13907
  optionalFields,
13908
13908
  };
13909
13909
  const queryString = [];
13910
- for (const [key, value] of entries$1(mergedParams)) {
13910
+ for (const [key, value] of entries$2(mergedParams)) {
13911
13911
  if (value !== undefined) {
13912
13912
  queryString.push(`${key}=${isArray(value) ? value.join(',') : value}`);
13913
13913
  }
@@ -13923,7 +13923,7 @@ function isSpanningRecord(fieldValue) {
13923
13923
  function mergeRecordFields(first, second) {
13924
13924
  const { fields: targetFields } = first;
13925
13925
  const { fields: sourceFields } = second;
13926
- const fieldNames = keys$1(sourceFields);
13926
+ const fieldNames = keys$2(sourceFields);
13927
13927
  for (let i = 0, len = fieldNames.length; i < len; i += 1) {
13928
13928
  const fieldName = fieldNames[i];
13929
13929
  const sourceField = sourceFields[fieldName];
@@ -14073,8 +14073,8 @@ function getMaxLengthPerChunkAllowed(request) {
14073
14073
  // Too much work to get exact length of the final url, so use stringified json to get the rough length.
14074
14074
  const roughUrlLengthWithoutFieldsAndOptionFields = request.basePath.length +
14075
14075
  request.baseUri.length +
14076
- (request.urlParams ? stringify$1(request.urlParams).length : 0) +
14077
- stringify$1({ ...request.queryParams, fields: {}, optionalFields: {} }).length;
14076
+ (request.urlParams ? stringify$2(request.urlParams).length : 0) +
14077
+ stringify$2({ ...request.queryParams, fields: {}, optionalFields: {} }).length;
14078
14078
  // MAX_URL_LENGTH - full lenght without fields, optionalFields
14079
14079
  return MAX_URL_LENGTH - roughUrlLengthWithoutFieldsAndOptionFields;
14080
14080
  }
@@ -14282,8 +14282,8 @@ function makeNetworkAdapterChunkRecordFields(networkAdapter) {
14282
14282
  }, networkAdapter);
14283
14283
  }
14284
14284
 
14285
- const { keys, create, assign, entries } = Object;
14286
- const { stringify, parse } = JSON;
14285
+ const { keys: keys$1, create: create$1, assign: assign$1, entries: entries$1 } = Object;
14286
+ const { stringify: stringify$1, parse: parse$1 } = JSON;
14287
14287
  const { push, join, slice } = Array.prototype;
14288
14288
 
14289
14289
  // so eslint doesn't complain about nimbus
@@ -14300,10 +14300,10 @@ class NimbusDraftQueue {
14300
14300
  if (callProxyMethod === undefined) {
14301
14301
  return Promise.reject(new Error('callProxyMethod not defined on the nimbus plugin'));
14302
14302
  }
14303
- const serializedAction = stringify([handlerId, data]);
14303
+ const serializedAction = stringify$1([handlerId, data]);
14304
14304
  return new Promise((resolve, reject) => {
14305
14305
  callProxyMethod('enqueue', serializedAction, (serializedActionResponse) => {
14306
- const response = parse(serializedActionResponse);
14306
+ const response = parse$1(serializedActionResponse);
14307
14307
  resolve(response);
14308
14308
  }, (errorMessage) => {
14309
14309
  reject(new Error(errorMessage));
@@ -14324,8 +14324,8 @@ class NimbusDraftQueue {
14324
14324
  return Promise.reject(new Error('callProxyMethod not defined on the nimbus plugin'));
14325
14325
  }
14326
14326
  return new Promise((resolve, reject) => {
14327
- callProxyMethod('getQueueActions', stringify([]), (serializedQueue) => {
14328
- resolve(parse(serializedQueue));
14327
+ callProxyMethod('getQueueActions', stringify$1([]), (serializedQueue) => {
14328
+ resolve(parse$1(serializedQueue));
14329
14329
  }, (errorMessage) => {
14330
14330
  reject(new Error(errorMessage));
14331
14331
  });
@@ -14336,17 +14336,17 @@ class NimbusDraftQueue {
14336
14336
  if (callProxyMethod === undefined) {
14337
14337
  return Promise.reject('callProxyMethod not defined on the nimbus plugin');
14338
14338
  }
14339
- const stringifiedArgs = stringify([action]);
14339
+ const stringifiedArgs = stringify$1([action]);
14340
14340
  return new Promise((resolve, reject) => {
14341
14341
  callProxyMethod('getDataForAction', stringifiedArgs, (data) => {
14342
14342
  if (data === undefined) {
14343
14343
  resolve(undefined);
14344
14344
  }
14345
14345
  else {
14346
- resolve(parse(data));
14346
+ resolve(parse$1(data));
14347
14347
  }
14348
14348
  }, (serializedError) => {
14349
- reject(parse(serializedError));
14349
+ reject(parse$1(serializedError));
14350
14350
  });
14351
14351
  });
14352
14352
  }
@@ -14410,7 +14410,7 @@ function normalizeError(err) {
14410
14410
  else if (typeof err === 'string') {
14411
14411
  return new Error(err);
14412
14412
  }
14413
- return new Error(stringify(err));
14413
+ return new Error(stringify$1(err));
14414
14414
  }
14415
14415
 
14416
14416
  const O11Y_NAMESPACE_LDS_MOBILE = 'lds-mobile';
@@ -14626,7 +14626,7 @@ function instrumentDraftQueue(queue) {
14626
14626
  logError: false,
14627
14627
  });
14628
14628
  };
14629
- return create(queue, { mergeActions: { value: mergeActions } });
14629
+ return create$1(queue, { mergeActions: { value: mergeActions } });
14630
14630
  }
14631
14631
 
14632
14632
  // so eslint doesn't complain about nimbus
@@ -14702,7 +14702,7 @@ function enableObjectInfoCaching(env, ensureObjectInfoCached) {
14702
14702
  function dataIsObjectInfo(key, data) {
14703
14703
  return incomingObjectInfos.has(key);
14704
14704
  }
14705
- return create(env, {
14705
+ return create$1(env, {
14706
14706
  handleSuccessResponse: { value: handleSuccessResponse },
14707
14707
  storePublish: { value: storePublish },
14708
14708
  });
@@ -14808,7 +14808,7 @@ class ObjectInfoService {
14808
14808
  this.getObjectInfosAdapter = getObjectInfosAdapter;
14809
14809
  this.durableStore = durableStore;
14810
14810
  // Local in-memory cache for ObjectInfo entries seen in DurableStore eg: {'Account': 001}
14811
- this.objectInfoMemoryCache = create(null);
14811
+ this.objectInfoMemoryCache = create$1(null);
14812
14812
  }
14813
14813
  /**
14814
14814
  * Size of return map not necessarily correlated with number of inputs. The
@@ -14924,6 +14924,9 @@ function registerReportObserver(reportObserver) {
14924
14924
  };
14925
14925
  }
14926
14926
 
14927
+ const { keys, create, assign, entries } = Object;
14928
+ const { stringify, parse } = JSON;
14929
+
14927
14930
  function selectColumnsFromTableWhereKeyIn(columnNames, table, keyColumnName, whereIn) {
14928
14931
  const paramList = whereIn.map(() => '?').join(',');
14929
14932
  return `SELECT ${columnNames.join(',')} FROM ${table} WHERE ${keyColumnName} IN (${paramList})`;
@@ -14935,13 +14938,13 @@ function selectColumnsFromTableWhereKeyInNamespaced(columnNames, table, keyColum
14935
14938
 
14936
14939
  // These const values must be in sync with the latest
14937
14940
  // @salesforce/nimbus-plugin-lds/sql schema file
14938
- const TABLE_NAME$3 = 'lds_data';
14941
+ const TABLE_NAME$1$1 = 'lds_data';
14939
14942
  const COLUMN_NAME_KEY$2 = 'key';
14940
14943
  const COLUMN_NAME_DATA$2 = 'data';
14941
14944
  const COLUMN_NAME_METADATA$1 = 'metadata';
14942
14945
  class LdsDataTable {
14943
14946
  constructor(plugin) {
14944
- this.tableName = TABLE_NAME$3;
14947
+ this.tableName = TABLE_NAME$1$1;
14945
14948
  this.columnNames = [COLUMN_NAME_KEY$2, COLUMN_NAME_DATA$2, COLUMN_NAME_METADATA$1];
14946
14949
  this.conflictColumnNames = [COLUMN_NAME_KEY$2];
14947
14950
  this.getAllQuery = `SELECT ${this.columnNames.join(',')} FROM ${this.tableName}`;
@@ -14998,95 +15001,21 @@ class LdsDataTable {
14998
15001
 
14999
15002
  // These const values must be in sync with the latest
15000
15003
  // @salesforce/nimbus-plugin-lds/sql schema file
15004
+ const TABLE_NAME$2 = 'lds_internal';
15001
15005
  const COLUMN_NAME_KEY$1 = 'key';
15002
15006
  const COLUMN_NAME_DATA$1 = 'data';
15003
- class AbstractKeyValueDataTable {
15004
- constructor(plugin, tableName) {
15005
- this.columnNames = [COLUMN_NAME_KEY$1, COLUMN_NAME_DATA$1];
15006
- this.conflictColumnNames = [COLUMN_NAME_KEY$1];
15007
- this.plugin = plugin;
15008
- this.tableName = tableName;
15009
- }
15010
- getByKeys(keys) {
15011
- return new Promise((resolve, reject) => {
15012
- const getQuery = selectColumnsFromTableWhereKeyIn(this.columnNames, this.tableName, COLUMN_NAME_KEY$1, keys);
15013
- this.plugin.query(getQuery, keys, (x) => {
15014
- resolve(this.mapToDurableEntries(x));
15015
- }, reject);
15016
- });
15017
- }
15018
- getAll() {
15019
- const getAllQuery = `SELECT ${this.columnNames.join(',')} FROM ${this.tableName}`;
15020
- return new Promise((resolve, reject) => {
15021
- this.plugin.query(getAllQuery, [], (x) => {
15022
- resolve(this.mapToDurableEntries(x));
15023
- }, reject);
15024
- });
15025
- }
15026
- entriesToUpsertOperations(entries, segment) {
15027
- return {
15028
- type: 'upsert',
15029
- table: this.tableName,
15030
- keyColumn: COLUMN_NAME_KEY$1,
15031
- context: {
15032
- segment,
15033
- },
15034
- conflictColumns: this.conflictColumnNames,
15035
- columns: this.columnNames,
15036
- rows: keys(entries).reduce((rows, key) => {
15037
- const entry = entries[key];
15038
- rows.push([key, stringify(entry.data)]);
15039
- return rows;
15040
- }, []),
15041
- };
15042
- }
15043
- mapToDurableEntries(sqliteResult) {
15044
- return sqliteResult.rows.reduce((entries, row) => {
15045
- const [key, stringifiedData] = row;
15046
- const durableStoreEntry = {
15047
- data: parse(stringifiedData),
15048
- };
15049
- entries[key] = durableStoreEntry;
15050
- return entries;
15051
- }, {});
15052
- }
15053
- }
15054
-
15055
- // These const values must be in sync with the latest
15056
- // @salesforce/nimbus-plugin-lds/sql schema file
15057
- const TABLE_NAME$2 = 'lds_env_drafts';
15058
- class LdsDraftsDataTable extends AbstractKeyValueDataTable {
15059
- constructor(plugin) {
15060
- super(plugin, TABLE_NAME$2);
15061
- }
15062
- }
15063
-
15064
- // These const values must be in sync with the latest
15065
- // @salesforce/nimbus-plugin-lds/sql schema file
15066
- const TABLE_NAME$1 = 'lds_env_draft_id_map';
15067
- class LdsDraftIdMapDataTable extends AbstractKeyValueDataTable {
15068
- constructor(plugin) {
15069
- super(plugin, TABLE_NAME$1);
15070
- }
15071
- }
15072
-
15073
- // These const values must be in sync with the latest
15074
- // @salesforce/nimbus-plugin-lds/sql schema file
15075
- const TABLE_NAME = 'lds_internal';
15076
- const COLUMN_NAME_KEY = 'key';
15077
- const COLUMN_NAME_DATA = 'data';
15078
15007
  const COLUMN_NAME_METADATA = 'metadata';
15079
15008
  const COLUMN_NAME_NAMESPACE = 'namespace';
15080
15009
  class LdsInternalDataTable {
15081
15010
  constructor(plugin) {
15082
- this.tableName = TABLE_NAME;
15011
+ this.tableName = TABLE_NAME$2;
15083
15012
  this.columnNames = [
15084
- COLUMN_NAME_KEY,
15085
- COLUMN_NAME_DATA,
15013
+ COLUMN_NAME_KEY$1,
15014
+ COLUMN_NAME_DATA$1,
15086
15015
  COLUMN_NAME_METADATA,
15087
15016
  COLUMN_NAME_NAMESPACE,
15088
15017
  ];
15089
- this.conflictColumnNames = [COLUMN_NAME_KEY, COLUMN_NAME_NAMESPACE];
15018
+ this.conflictColumnNames = [COLUMN_NAME_KEY$1, COLUMN_NAME_NAMESPACE];
15090
15019
  this.getAllQuery = `SELECT ${this.columnNames.join(',')} FROM ${this.tableName} WHERE ${COLUMN_NAME_NAMESPACE} = ?`;
15091
15020
  this.plugin = plugin;
15092
15021
  }
@@ -15094,7 +15023,7 @@ class LdsInternalDataTable {
15094
15023
  if (namespace === undefined) {
15095
15024
  throw Error('LdsInternalDataTable requires namespace');
15096
15025
  }
15097
- const getQuery = selectColumnsFromTableWhereKeyInNamespaced(this.columnNames, this.tableName, COLUMN_NAME_KEY, keys, COLUMN_NAME_NAMESPACE);
15026
+ const getQuery = selectColumnsFromTableWhereKeyInNamespaced(this.columnNames, this.tableName, COLUMN_NAME_KEY$1, keys, COLUMN_NAME_NAMESPACE);
15098
15027
  return new Promise((resolve, reject) => {
15099
15028
  this.plugin.query(getQuery, [namespace].concat(keys), (x) => {
15100
15029
  resolve(this.mapToDurableEntries(x));
@@ -15112,7 +15041,7 @@ class LdsInternalDataTable {
15112
15041
  return {
15113
15042
  type: 'upsert',
15114
15043
  table: this.tableName,
15115
- keyColumn: COLUMN_NAME_KEY,
15044
+ keyColumn: COLUMN_NAME_KEY$1,
15116
15045
  context: {
15117
15046
  segment,
15118
15047
  },
@@ -15150,13 +15079,12 @@ class LdsInternalDataTable {
15150
15079
  }
15151
15080
 
15152
15081
  class NimbusSqliteStore {
15153
- constructor(plugin) {
15082
+ constructor(plugin, additionalTableMap = {}) {
15154
15083
  this.plugin = plugin;
15155
15084
  this.internalDataTable = new LdsInternalDataTable(plugin);
15156
15085
  this.dataTableMap = {
15086
+ ...additionalTableMap,
15157
15087
  [DefaultDurableSegment]: new LdsDataTable(plugin),
15158
- [DRAFT_SEGMENT]: new LdsDraftsDataTable(plugin),
15159
- [DRAFT_ID_MAPPINGS_SEGMENT]: new LdsDraftIdMapDataTable(plugin),
15160
15088
  };
15161
15089
  }
15162
15090
  isEvalSupported() {
@@ -15253,11 +15181,90 @@ class NimbusSqliteStore {
15253
15181
  }
15254
15182
  }
15255
15183
 
15184
+ // These const values must be in sync with the latest
15185
+ // @salesforce/nimbus-plugin-lds/sql schema file
15186
+ const COLUMN_NAME_KEY = 'key';
15187
+ const COLUMN_NAME_DATA = 'data';
15188
+ class AbstractKeyValueDataTable {
15189
+ constructor(plugin, tableName) {
15190
+ this.columnNames = [COLUMN_NAME_KEY, COLUMN_NAME_DATA];
15191
+ this.conflictColumnNames = [COLUMN_NAME_KEY];
15192
+ this.plugin = plugin;
15193
+ this.tableName = tableName;
15194
+ }
15195
+ getByKeys(keys) {
15196
+ return new Promise((resolve, reject) => {
15197
+ const getQuery = selectColumnsFromTableWhereKeyIn(this.columnNames, this.tableName, COLUMN_NAME_KEY, keys);
15198
+ this.plugin.query(getQuery, keys, (x) => {
15199
+ resolve(this.mapToDurableEntries(x));
15200
+ }, reject);
15201
+ });
15202
+ }
15203
+ getAll() {
15204
+ const getAllQuery = `SELECT ${this.columnNames.join(',')} FROM ${this.tableName}`;
15205
+ return new Promise((resolve, reject) => {
15206
+ this.plugin.query(getAllQuery, [], (x) => {
15207
+ resolve(this.mapToDurableEntries(x));
15208
+ }, reject);
15209
+ });
15210
+ }
15211
+ entriesToUpsertOperations(entries, segment) {
15212
+ return {
15213
+ type: 'upsert',
15214
+ table: this.tableName,
15215
+ keyColumn: COLUMN_NAME_KEY,
15216
+ context: {
15217
+ segment,
15218
+ },
15219
+ conflictColumns: this.conflictColumnNames,
15220
+ columns: this.columnNames,
15221
+ rows: keys(entries).reduce((rows, key) => {
15222
+ const entry = entries[key];
15223
+ rows.push([key, stringify(entry.data)]);
15224
+ return rows;
15225
+ }, []),
15226
+ };
15227
+ }
15228
+ mapToDurableEntries(sqliteResult) {
15229
+ return sqliteResult.rows.reduce((entries, row) => {
15230
+ const [key, stringifiedData] = row;
15231
+ const durableStoreEntry = {
15232
+ data: parse(stringifiedData),
15233
+ };
15234
+ entries[key] = durableStoreEntry;
15235
+ return entries;
15236
+ }, {});
15237
+ }
15238
+ }
15239
+
15240
+ // These const values must be in sync with the latest
15241
+ // @salesforce/nimbus-plugin-lds/sql schema file
15242
+ const TABLE_NAME$1 = 'lds_env_drafts';
15243
+ class LdsDraftsDataTable extends AbstractKeyValueDataTable {
15244
+ constructor(plugin) {
15245
+ super(plugin, TABLE_NAME$1);
15246
+ }
15247
+ }
15248
+
15249
+ // These const values must be in sync with the latest
15250
+ // @salesforce/nimbus-plugin-lds/sql schema file
15251
+ const TABLE_NAME = 'lds_env_draft_id_map';
15252
+ class LdsDraftIdMapDataTable extends AbstractKeyValueDataTable {
15253
+ constructor(plugin) {
15254
+ super(plugin, TABLE_NAME);
15255
+ }
15256
+ }
15257
+
15256
15258
  // so eslint doesn't complain about nimbus
15257
15259
  let baseDurableStore;
15258
- function getNimbusDurableStore() {
15260
+ function getNimbusDurableStore(plugin) {
15259
15261
  if (baseDurableStore === undefined) {
15260
- baseDurableStore = new NimbusSqliteStore(__nimbus.plugins.LdsSqliteStore);
15262
+ const resolvedPlugin = plugin === undefined ? __nimbus.plugins.LdsSqliteStore : plugin;
15263
+ const draftDataTableMap = {
15264
+ [DRAFT_SEGMENT]: new LdsDraftsDataTable(resolvedPlugin),
15265
+ [DRAFT_ID_MAPPINGS_SEGMENT]: new LdsDraftIdMapDataTable(resolvedPlugin),
15266
+ };
15267
+ baseDurableStore = new NimbusSqliteStore(resolvedPlugin, draftDataTableMap);
15261
15268
  }
15262
15269
  return baseDurableStore;
15263
15270
  }
@@ -15398,7 +15405,7 @@ function makeEnvironmentGraphqlAware(environment) {
15398
15405
  }
15399
15406
  return environment.applyCachePolicy(luvio, adapterRequestContext, buildSnapshotContext, localBuildCachedSnapshot, buildNetworkSnapshot);
15400
15407
  };
15401
- return create(environment, {
15408
+ return create$1(environment, {
15402
15409
  rebuildSnapshot: { value: rebuildSnapshot },
15403
15410
  applyCachePolicy: { value: applyCachePolicy },
15404
15411
  setDefaultCachePolicy: { value: environment.setDefaultCachePolicy.bind(environment) },
@@ -15443,7 +15450,7 @@ function setupInspection(luvio) {
15443
15450
  // eslint-disable-next-line no-undef
15444
15451
  globalThis.luvio = luvio;
15445
15452
  registerReportObserver((report) => {
15446
- __nimbus.plugins.LdsInspectorPlugin.sendAdapterReport(stringify(report));
15453
+ __nimbus.plugins.LdsInspectorPlugin.sendAdapterReport(stringify$1(report));
15447
15454
  });
15448
15455
  }
15449
15456
  }
@@ -16289,4 +16296,4 @@ register({
16289
16296
  });
16290
16297
 
16291
16298
  export { getRuntime, registerReportObserver, reportGraphqlQueryParseError };
16292
- // version: 1.199.0-017661a18
16299
+ // version: 1.199.1-4bf260c39