@salesforce/lds-worker-api 1.233.0 → 1.235.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26,7 +26,7 @@
26
26
  SnapshotState["Stale"] = "Stale";
27
27
  })(SnapshotState$3 || (SnapshotState$3 = {}));
28
28
 
29
- const { create: create$b, entries: entries$5, freeze: freeze$5, keys: keys$c, values: values$4 } = Object;
29
+ const { create: create$b, entries: entries$5, freeze: freeze$5, keys: keys$c, values: values$5 } = Object;
30
30
  const { hasOwnProperty: hasOwnProperty$3 } = Object.prototype;
31
31
  const { isArray: isArray$9 } = Array;
32
32
  const { push: push$5, indexOf, slice: slice$2 } = Array.prototype;
@@ -846,14 +846,16 @@
846
846
  delete this.records[canonicalKey];
847
847
  }
848
848
  publishMetadata(key, storeMetadata) {
849
+ this.putMetadata(key, storeMetadata, true);
850
+ const canonicalKey = this.getCanonicalRecordId(key);
851
+ this.markRefreshed(canonicalKey);
852
+ }
853
+ putMetadata(key, storeMetadata, adjustTTLOverride) {
849
854
  const { namespace, representationName, ingestionTimestamp } = storeMetadata;
850
855
  const ttlOverrideKey = getTTLOverrideKey(namespace, representationName);
851
856
  const ttlOverride = getTTLOverride$1(ttlOverrideKey, this.ttlOverrides, this.defaultTTLOverride);
852
857
  const canonicalKey = this.getCanonicalRecordId(key);
853
- this.markRefreshed(canonicalKey);
854
- if (ttlOverride !== undefined) {
855
- // It should check if the namespace + representationName exists in the ttlOverride map.
856
- // If a ttlOverride does exist, calculate a new ExpirationTimestamp with the override.
858
+ if (ttlOverride !== undefined && adjustTTLOverride === true) {
857
859
  const newExpiration = ingestionTimestamp + ttlOverride;
858
860
  // Create a new StoreMetadata object and publish the new object to storeMetadata.
859
861
  const metaDataOverride = {
@@ -1495,12 +1497,20 @@
1495
1497
  this.fallbackStringKeyInMemoryStore.publishMetadata(key, storeMetadata);
1496
1498
  return;
1497
1499
  }
1500
+ this.putMetadata(key, storeMetadata, true);
1501
+ const canonicalKey = this.getCanonicalRecordId(key);
1502
+ this.markRefreshed(canonicalKey);
1503
+ }
1504
+ putMetadata(key, storeMetadata, adjustTTLOverride) {
1505
+ if (typeof key === 'string') {
1506
+ this.fallbackStringKeyInMemoryStore.putMetadata(key, storeMetadata, adjustTTLOverride);
1507
+ return;
1508
+ }
1498
1509
  const { namespace, representationName, ingestionTimestamp } = storeMetadata;
1499
1510
  const ttlOverrideKey = this.buildStructuredKey(namespace, representationName, {});
1500
1511
  const ttlOverride = getTTLOverride(ttlOverrideKey, this.ttlOverridesMap, this.defaultTTLOverride);
1501
1512
  const canonicalKey = this.getCanonicalRecordId(key);
1502
- this.markRefreshed(canonicalKey);
1503
- if (ttlOverride !== undefined) {
1513
+ if (ttlOverride !== undefined && adjustTTLOverride) {
1504
1514
  // It should check if the namespace + representationName exists in the ttlOverride map.
1505
1515
  // If a ttlOverride does exist, calculate a new ExpirationTimestamp with the override.
1506
1516
  const newExpiration = ingestionTimestamp + ttlOverride;
@@ -3284,6 +3294,9 @@
3284
3294
  publishStoreMetadata(key, storeMetadata) {
3285
3295
  this.store.publishMetadata(key, storeMetadata);
3286
3296
  }
3297
+ putStoreMetadata(key, storeMetadata, adjustTTLOverride) {
3298
+ this.store.putMetadata(key, storeMetadata, adjustTTLOverride);
3299
+ }
3287
3300
  storeSetTTLOverride(namespace, representationName, ttl) {
3288
3301
  this.store.setTTLOverride(namespace, representationName, ttl);
3289
3302
  this.recomputeTTLOverrideExpirations(namespace, representationName);
@@ -3847,7 +3860,7 @@
3847
3860
  }
3848
3861
  return resourceParams;
3849
3862
  }
3850
- // engine version: 0.145.3-1693bfb7
3863
+ // engine version: 0.147.1-bc3a8b47
3851
3864
 
3852
3865
  /**
3853
3866
  * Copyright (c) 2022, Salesforce, Inc.,
@@ -3974,7 +3987,7 @@
3974
3987
  }
3975
3988
  callbacks.push(callback);
3976
3989
  }
3977
- // version: 1.233.0-8ab7ef233
3990
+ // version: 1.235.0-3790decf0
3978
3991
 
3979
3992
  // TODO [TD-0081508]: once that TD is fulfilled we can probably change this file
3980
3993
  function instrumentAdapter$1(createFunction, _metadata) {
@@ -15435,7 +15448,7 @@
15435
15448
  }
15436
15449
  return superResult;
15437
15450
  }
15438
- // version: 1.233.0-8ab7ef233
15451
+ // version: 1.235.0-3790decf0
15439
15452
 
15440
15453
  function unwrap(data) {
15441
15454
  // The lwc-luvio bindings import a function from lwc called "unwrap".
@@ -15536,14 +15549,15 @@
15536
15549
  return undefined;
15537
15550
  });
15538
15551
  }
15539
- const { isArray: isArray$8 } = Array;
15540
- const { stringify: stringify$9 } = JSON;
15541
15552
 
15542
15553
  function isPromise$1(value) {
15543
15554
  // check for Thenable due to test frameworks using custom Promise impls
15544
15555
  return value.then !== undefined;
15545
15556
  }
15546
15557
 
15558
+ const { isArray: isArray$8 } = Array;
15559
+ const { stringify: stringify$9 } = JSON;
15560
+
15547
15561
  /**
15548
15562
  * (Re)throws an error after adding a prefix to the message.
15549
15563
  *
@@ -16358,7 +16372,7 @@
16358
16372
  const { apiFamily, name } = metadata;
16359
16373
  return createGraphQLWireAdapterConstructor$1(adapter, `${apiFamily}.${name}`, luvio, astResolver);
16360
16374
  }
16361
- // version: 1.233.0-8ab7ef233
16375
+ // version: 1.235.0-3790decf0
16362
16376
 
16363
16377
  /**
16364
16378
  * Copyright (c) 2022, Salesforce, Inc.,
@@ -16457,7 +16471,7 @@
16457
16471
  TypeCheckShapes[TypeCheckShapes["Integer"] = 3] = "Integer";
16458
16472
  TypeCheckShapes[TypeCheckShapes["Unsupported"] = 4] = "Unsupported";
16459
16473
  })(TypeCheckShapes || (TypeCheckShapes = {}));
16460
- // engine version: 0.145.3-1693bfb7
16474
+ // engine version: 0.147.1-bc3a8b47
16461
16475
 
16462
16476
  const { keys: ObjectKeys$3, create: ObjectCreate$3 } = Object;
16463
16477
 
@@ -41311,7 +41325,7 @@
41311
41325
  throttle(60, 60000, createLDSAdapter(luvio, 'notifyListInfoUpdateAvailable', notifyUpdateAvailableFactory$1));
41312
41326
  throttle(60, 60000, createLDSAdapter(luvio, 'notifyQuickActionDefaultsUpdateAvailable', notifyUpdateAvailableFactory));
41313
41327
  });
41314
- // version: 1.233.0-e0ba7cc7b
41328
+ // version: 1.235.0-c252f93dc
41315
41329
 
41316
41330
  var caseSensitiveUserId = '005B0000000GR4OIAW';
41317
41331
 
@@ -41431,6 +41445,15 @@
41431
41445
  },
41432
41446
  };
41433
41447
 
41448
+ var ldsMetadataRefreshEnabled = {
41449
+ isOpen: function (e) {
41450
+ return e.fallback;
41451
+ },
41452
+ hasError: function () {
41453
+ return !0;
41454
+ },
41455
+ };
41456
+
41434
41457
  /**
41435
41458
  * Copyright (c) 2022, Salesforce, Inc.,
41436
41459
  * All rights reserved.
@@ -42005,7 +42028,7 @@
42005
42028
  * will refresh the snapshot from network, and then run the results from network
42006
42029
  * through L2 ingestion, returning the subsequent revived snapshot.
42007
42030
  */
42008
- function reviveSnapshot(baseEnvironment, durableStore, unavailableSnapshot, durableStoreErrorHandler, buildL1Snapshot, reviveMetrics = { l2Trips: [] }) {
42031
+ function reviveSnapshot(baseEnvironment, durableStore, unavailableSnapshot, durableStoreErrorHandler, buildL1Snapshot, revivingStore, reviveMetrics = { l2Trips: [] }) {
42009
42032
  const { recordId, select, missingLinks, seenRecords, state } = unavailableSnapshot;
42010
42033
  // L2 can only revive Unfulfilled snapshots that have a selector since they have the
42011
42034
  // info needed to revive (like missingLinks) and rebuild. Otherwise return L1 snapshot.
@@ -42015,10 +42038,21 @@
42015
42038
  metrics: reviveMetrics,
42016
42039
  });
42017
42040
  }
42018
- // in case L1 store changes/deallocs a record while we are doing the async read
42019
- // we attempt to read all keys from L2 - so combine recordId with any seenRecords
42020
- const keysToReviveSet = new StoreKeySet().add(recordId);
42021
- keysToReviveSet.merge(seenRecords);
42041
+ const keysToReviveSet = new StoreKeySet();
42042
+ if (revivingStore) {
42043
+ // Any stale keys since the last l2 read should be cleared and fetched again
42044
+ for (const staleKey of revivingStore.staleEntries) {
42045
+ keysToReviveSet.add(staleKey);
42046
+ }
42047
+ revivingStore.clearStale();
42048
+ }
42049
+ else {
42050
+ // when not using a reviving store:
42051
+ // in case L1 store changes/deallocs a record while we are doing the async read
42052
+ // we attempt to read all keys from L2 - so combine recordId with any seenRecords
42053
+ keysToReviveSet.add(recordId);
42054
+ keysToReviveSet.merge(seenRecords);
42055
+ }
42022
42056
  keysToReviveSet.merge(missingLinks);
42023
42057
  const keysToRevive = keysToReviveSet.keysAsArray();
42024
42058
  const canonicalKeys = keysToRevive.map((x) => serializeStructuredKey(baseEnvironment.storeGetCanonicalKey(x)));
@@ -42068,7 +42102,7 @@
42068
42102
  for (let i = 0, len = newKeys.length; i < len; i++) {
42069
42103
  const newSnapshotSeenKey = newKeys[i];
42070
42104
  if (!alreadyRequestedOrRevivedSet.has(newSnapshotSeenKey)) {
42071
- return reviveSnapshot(baseEnvironment, durableStore, snapshot, durableStoreErrorHandler, buildL1Snapshot, reviveMetrics);
42105
+ return reviveSnapshot(baseEnvironment, durableStore, snapshot, durableStoreErrorHandler, buildL1Snapshot, revivingStore, reviveMetrics);
42072
42106
  }
42073
42107
  }
42074
42108
  }
@@ -42157,8 +42191,9 @@
42157
42191
  }
42158
42192
  }
42159
42193
 
42160
- function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler, redirects, additionalDurableStoreOperations = []) {
42194
+ function flushInMemoryStoreValuesToDurableStore(store, durableStore, durableStoreErrorHandler, redirects, additionalDurableStoreOperations = [], enableDurableMetadataRefresh = false) {
42161
42195
  const durableRecords = create$6(null);
42196
+ const refreshedDurableRecords = create$6(null);
42162
42197
  const evictedRecords = create$6(null);
42163
42198
  const { records, metadata: storeMetadata, visitedIds, refreshedIds, } = store.fallbackStringKeyInMemoryStore;
42164
42199
  // TODO: W-8909393 Once metadata is stored in its own segment we need to
@@ -42168,32 +42203,36 @@
42168
42203
  for (let i = 0, len = keys$1.length; i < len; i += 1) {
42169
42204
  const key = keys$1[i];
42170
42205
  const record = records[key];
42206
+ const wasVisited = visitedIds[key] !== undefined;
42171
42207
  // this record has been evicted, evict from DS
42172
- if (record === undefined) {
42208
+ if (wasVisited && record === undefined) {
42173
42209
  evictedRecords[key] = true;
42174
42210
  continue;
42175
42211
  }
42176
42212
  const metadata = storeMetadata[key];
42177
- durableRecords[key] = {
42178
- data: record,
42179
- };
42180
- if (metadata !== undefined) {
42181
- durableRecords[key].metadata = {
42182
- ...metadata,
42183
- metadataVersion: DURABLE_METADATA_VERSION,
42184
- };
42185
- }
42213
+ const entries = wasVisited === true || enableDurableMetadataRefresh === false
42214
+ ? durableRecords
42215
+ : refreshedDurableRecords;
42216
+ setRecordTo(entries, key, record, metadata);
42186
42217
  }
42187
42218
  const durableStoreOperations = additionalDurableStoreOperations;
42188
- // publishes
42189
42219
  const recordKeys = keys$7(durableRecords);
42190
42220
  if (recordKeys.length > 0) {
42221
+ // publishes with data
42191
42222
  durableStoreOperations.push({
42192
42223
  type: 'setEntries',
42193
42224
  entries: durableRecords,
42194
42225
  segment: DefaultDurableSegment,
42195
42226
  });
42196
42227
  }
42228
+ if (keys$7(refreshedDurableRecords).length > 0) {
42229
+ // publishes with only metadata updates
42230
+ durableStoreOperations.push({
42231
+ type: 'setMetadata',
42232
+ entries: refreshedDurableRecords,
42233
+ segment: DefaultDurableSegment,
42234
+ });
42235
+ }
42197
42236
  // redirects
42198
42237
  redirects.forEach((value, key) => {
42199
42238
  durableStoreOperations.push({
@@ -42220,6 +42259,17 @@
42220
42259
  }
42221
42260
  return Promise.resolve();
42222
42261
  }
42262
+ function setRecordTo(entries, key, record, metadata) {
42263
+ entries[key] = {
42264
+ data: record,
42265
+ };
42266
+ if (metadata !== undefined) {
42267
+ entries[key].metadata = {
42268
+ ...metadata,
42269
+ metadataVersion: DURABLE_METADATA_VERSION,
42270
+ };
42271
+ }
42272
+ }
42223
42273
 
42224
42274
  const DurableEnvironmentEventDiscriminator = 'durable';
42225
42275
  function emitDurableEnvironmentAdapterEvent(eventData, observers) {
@@ -42264,6 +42314,50 @@
42264
42314
  }
42265
42315
  }
42266
42316
 
42317
+ function buildRevivingStagingStore(upstreamStore) {
42318
+ const localStore = new StringKeyInMemoryStore();
42319
+ const staleEntries = new Set();
42320
+ function readEntry(key) {
42321
+ if (typeof key !== 'string') {
42322
+ return upstreamStore.readEntry(key);
42323
+ }
42324
+ let storeEntry = localStore.readEntry(key);
42325
+ if (!storeEntry) {
42326
+ // read from upstream store...
42327
+ storeEntry = upstreamStore.readEntry(key);
42328
+ // put it in our store to avoid it getting evicted prior to the next durable store read
42329
+ localStore.put(key, storeEntry);
42330
+ }
42331
+ return storeEntry;
42332
+ }
42333
+ // Entries are marked stale by the durable store change listener. They are not
42334
+ // immediately evicted so as to not result in a cache miss during a rebuild.
42335
+ // The revive process will clear stale entries and read them from the durable store
42336
+ // on the next revive loop.
42337
+ function markStale(key) {
42338
+ staleEntries.add(key);
42339
+ }
42340
+ // The revive loop clears stale entries right before reading from the durable store.
42341
+ // Any stale entries will be revived to ensure they are present in L1 and match the
42342
+ // latest data.
42343
+ function clearStale() {
42344
+ for (const key of staleEntries) {
42345
+ localStore.dealloc(key);
42346
+ }
42347
+ staleEntries.clear();
42348
+ }
42349
+ // All functions other than `readEntry` pass through to the upstream store.
42350
+ // A reviving store is only "active" during a call to `environment.storeLookup`, and will
42351
+ // be used by the reader attempting to build an L1 snapshot. Immediately after the L1 rebuild
42352
+ // the reviving store becomes inactive other than receiving change notifications.
42353
+ return create$6(upstreamStore, {
42354
+ readEntry: { value: readEntry },
42355
+ markStale: { value: markStale },
42356
+ clearStale: { value: clearStale },
42357
+ staleEntries: { value: staleEntries },
42358
+ });
42359
+ }
42360
+
42267
42361
  const AdapterContextSegment = 'ADAPTER-CONTEXT';
42268
42362
  const ADAPTER_CONTEXT_ID_SUFFIX = '__NAMED_CONTEXT';
42269
42363
  async function reviveOrCreateContext(adapterId, durableStore, durableStoreErrorHandler, contextStores, pendingContextStoreKeys, onContextLoaded) {
@@ -42319,14 +42413,16 @@
42319
42413
  * @param durableStore A DurableStore implementation
42320
42414
  * @param instrumentation An instrumentation function implementation
42321
42415
  */
42322
- function makeDurable(environment, { durableStore, instrumentation }) {
42323
- let ingestStagingStore = null;
42416
+ function makeDurable(environment, { durableStore, instrumentation, useRevivingStore, enableDurableMetadataRefresh = false, }) {
42417
+ let stagingStore = null;
42324
42418
  const durableTTLStore = new DurableTTLStore(durableStore);
42325
42419
  const mergeKeysPromiseMap = new Map();
42326
42420
  // When a context store is mutated we write it to L2, which causes DS on change
42327
42421
  // event. If this instance of makeDurable caused that L2 write we can ignore that
42328
42422
  // on change event. This Set helps us do that.
42329
42423
  const pendingContextStoreKeys = new Set();
42424
+ // Reviving stores are tracked so that they can be notified of durable store change notifications.
42425
+ const revivingStores = new Set();
42330
42426
  // redirects that need to be flushed to the durable store
42331
42427
  const pendingStoreRedirects = new Map();
42332
42428
  const contextStores = create$6(null);
@@ -42352,6 +42448,7 @@
42352
42448
  const defaultSegmentKeys = [];
42353
42449
  const adapterContextSegmentKeys = [];
42354
42450
  const redirectSegmentKeys = [];
42451
+ const metadataRefreshSegmentKeys = [];
42355
42452
  const messagingSegmentKeys = [];
42356
42453
  let shouldBroadcast = false;
42357
42454
  for (let i = 0, len = changes.length; i < len; i++) {
@@ -42359,7 +42456,12 @@
42359
42456
  // we only care about changes to the data which is stored in the default
42360
42457
  // segment or the adapter context
42361
42458
  if (change.segment === DefaultDurableSegment) {
42362
- defaultSegmentKeys.push(...change.ids);
42459
+ if (change.type === 'setMetadata') {
42460
+ metadataRefreshSegmentKeys.push(...change.ids);
42461
+ }
42462
+ else {
42463
+ defaultSegmentKeys.push(...change.ids);
42464
+ }
42363
42465
  }
42364
42466
  else if (change.segment === AdapterContextSegment) {
42365
42467
  adapterContextSegmentKeys.push(...change.ids);
@@ -42423,9 +42525,26 @@
42423
42525
  // and go through an entire broadcast/revive cycle for unchanged data
42424
42526
  // call base environment storeEvict so this evict is not tracked for durable deletion
42425
42527
  environment.storeEvict(key);
42528
+ for (const revivingStore of revivingStores) {
42529
+ revivingStore.markStale(key);
42530
+ }
42426
42531
  }
42427
42532
  shouldBroadcast = true;
42428
42533
  }
42534
+ // process metadata only refreshes
42535
+ if (metadataRefreshSegmentKeys.length > 0) {
42536
+ const entries = await durableStore.getMetadata(metadataRefreshSegmentKeys, DefaultDurableSegment);
42537
+ if (entries !== undefined) {
42538
+ const entryKeys = keys$7(entries);
42539
+ for (let i = 0, len = entryKeys.length; i < len; i++) {
42540
+ const entryKey = entryKeys[i];
42541
+ const { metadata } = entries[entryKey];
42542
+ if (metadata !== undefined) {
42543
+ environment.putStoreMetadata(entryKey, metadata, false);
42544
+ }
42545
+ }
42546
+ }
42547
+ }
42429
42548
  if (shouldBroadcast) {
42430
42549
  await environment.storeBroadcast(rebuildSnapshot, environment.snapshotAvailable);
42431
42550
  }
@@ -42451,10 +42570,10 @@
42451
42570
  };
42452
42571
  const storePublish = function (key, data) {
42453
42572
  validateNotDisposed();
42454
- if (ingestStagingStore === null) {
42455
- ingestStagingStore = buildIngestStagingStore(environment);
42573
+ if (stagingStore === null) {
42574
+ stagingStore = buildIngestStagingStore(environment);
42456
42575
  }
42457
- ingestStagingStore.publish(key, data);
42576
+ stagingStore.publish(key, data);
42458
42577
  // remove record from main luvio L1 cache while we are on the synchronous path
42459
42578
  // because we do not want some other code attempting to use the
42460
42579
  // in-memory values before the durable store onChanged handler
@@ -42463,26 +42582,26 @@
42463
42582
  };
42464
42583
  const publishStoreMetadata = function (recordId, storeMetadata) {
42465
42584
  validateNotDisposed();
42466
- if (ingestStagingStore === null) {
42467
- ingestStagingStore = buildIngestStagingStore(environment);
42585
+ if (stagingStore === null) {
42586
+ stagingStore = buildIngestStagingStore(environment);
42468
42587
  }
42469
- ingestStagingStore.publishMetadata(recordId, storeMetadata);
42588
+ stagingStore.publishMetadata(recordId, storeMetadata);
42470
42589
  };
42471
42590
  const storeIngest = function (key, ingest, response, luvio) {
42472
42591
  validateNotDisposed();
42473
42592
  // we don't ingest to the luvio L1 store from network directly, we ingest to
42474
42593
  // L2 and let DurableStore on change event revive keys into luvio L1 store
42475
- if (ingestStagingStore === null) {
42476
- ingestStagingStore = buildIngestStagingStore(environment);
42594
+ if (stagingStore === null) {
42595
+ stagingStore = buildIngestStagingStore(environment);
42477
42596
  }
42478
- environment.storeIngest(key, ingest, response, luvio, ingestStagingStore);
42597
+ environment.storeIngest(key, ingest, response, luvio, stagingStore);
42479
42598
  };
42480
42599
  const storeIngestError = function (key, errorSnapshot, storeMetadataParams, _storeOverride) {
42481
42600
  validateNotDisposed();
42482
- if (ingestStagingStore === null) {
42483
- ingestStagingStore = buildIngestStagingStore(environment);
42601
+ if (stagingStore === null) {
42602
+ stagingStore = buildIngestStagingStore(environment);
42484
42603
  }
42485
- environment.storeIngestError(key, errorSnapshot, storeMetadataParams, ingestStagingStore);
42604
+ environment.storeIngestError(key, errorSnapshot, storeMetadataParams, stagingStore);
42486
42605
  };
42487
42606
  const storeBroadcast = function (_rebuildSnapshot, _snapshotDataAvailable) {
42488
42607
  validateNotDisposed();
@@ -42493,19 +42612,19 @@
42493
42612
  };
42494
42613
  const publishChangesToDurableStore = function (additionalDurableStoreOperations) {
42495
42614
  validateNotDisposed();
42496
- if (ingestStagingStore === null) {
42615
+ if (stagingStore === null) {
42497
42616
  return Promise.resolve();
42498
42617
  }
42499
- const promise = flushInMemoryStoreValuesToDurableStore(ingestStagingStore, durableStore, durableStoreErrorHandler, new Map(pendingStoreRedirects), additionalDurableStoreOperations);
42618
+ const promise = flushInMemoryStoreValuesToDurableStore(stagingStore, durableStore, durableStoreErrorHandler, new Map(pendingStoreRedirects), additionalDurableStoreOperations, enableDurableMetadataRefresh);
42500
42619
  pendingStoreRedirects.clear();
42501
- ingestStagingStore = null;
42620
+ stagingStore = null;
42502
42621
  return promise;
42503
42622
  };
42504
42623
  const storeLookup = function (sel, createSnapshot, refresh, ttlStrategy) {
42505
42624
  validateNotDisposed();
42506
- // if this lookup is right after an ingest there will be a staging store
42507
- if (ingestStagingStore !== null) {
42508
- const reader = new Reader(ingestStagingStore, sel.variables, refresh, undefined, ttlStrategy);
42625
+ // if this lookup is right after an ingest or during a revive there will be a staging store
42626
+ if (stagingStore !== null) {
42627
+ const reader = new Reader(stagingStore, sel.variables, refresh, undefined, ttlStrategy);
42509
42628
  return reader.read(sel);
42510
42629
  }
42511
42630
  // otherwise this is from buildCachedSnapshot and we should use the luvio
@@ -42514,24 +42633,24 @@
42514
42633
  };
42515
42634
  const storeEvict = function (key) {
42516
42635
  validateNotDisposed();
42517
- if (ingestStagingStore === null) {
42518
- ingestStagingStore = buildIngestStagingStore(environment);
42636
+ if (stagingStore === null) {
42637
+ stagingStore = buildIngestStagingStore(environment);
42519
42638
  }
42520
- ingestStagingStore.evict(key);
42639
+ stagingStore.evict(key);
42521
42640
  };
42522
42641
  const getNode = function (key) {
42523
42642
  validateNotDisposed();
42524
- if (ingestStagingStore === null) {
42525
- ingestStagingStore = buildIngestStagingStore(environment);
42643
+ if (stagingStore === null) {
42644
+ stagingStore = buildIngestStagingStore(environment);
42526
42645
  }
42527
- return environment.getNode(key, ingestStagingStore);
42646
+ return environment.getNode(key, stagingStore);
42528
42647
  };
42529
42648
  const wrapNormalizedGraphNode = function (normalized) {
42530
42649
  validateNotDisposed();
42531
- if (ingestStagingStore === null) {
42532
- ingestStagingStore = buildIngestStagingStore(environment);
42650
+ if (stagingStore === null) {
42651
+ stagingStore = buildIngestStagingStore(environment);
42533
42652
  }
42534
- return environment.wrapNormalizedGraphNode(normalized, ingestStagingStore);
42653
+ return environment.wrapNormalizedGraphNode(normalized, stagingStore);
42535
42654
  };
42536
42655
  const rebuildSnapshot = function (snapshot, onRebuild) {
42537
42656
  validateNotDisposed();
@@ -42543,7 +42662,7 @@
42543
42662
  return;
42544
42663
  }
42545
42664
  // Do an L2 revive and emit to subscriber using the callback.
42546
- reviveSnapshot(environment, durableStore, rebuilt, durableStoreErrorHandler, () => {
42665
+ reviveSnapshotWrapper(rebuilt, () => {
42547
42666
  // reviveSnapshot will revive into L1, and since "records" is a reference
42548
42667
  // (and not a copy) to the L1 records we can use it for rebuild
42549
42668
  let rebuiltSnap;
@@ -42584,10 +42703,10 @@
42584
42703
  // the next publishChangesToDurableStore. NOTE: we don't need to call
42585
42704
  // redirect on the base environment store because staging store and base
42586
42705
  // L1 store share the same redirect and reverseRedirectKeys
42587
- if (ingestStagingStore === null) {
42588
- ingestStagingStore = buildIngestStagingStore(environment);
42706
+ if (stagingStore === null) {
42707
+ stagingStore = buildIngestStagingStore(environment);
42589
42708
  }
42590
- ingestStagingStore.redirect(existingKey, canonicalKey);
42709
+ stagingStore.redirect(existingKey, canonicalKey);
42591
42710
  };
42592
42711
  const storeSetTTLOverride = function (namespace, representationName, ttl) {
42593
42712
  validateNotDisposed();
@@ -42628,7 +42747,7 @@
42628
42747
  if (isUnfulfilledSnapshot$1(snapshot)) {
42629
42748
  const start = Date.now();
42630
42749
  emitDurableEnvironmentAdapterEvent({ type: 'l2-revive-start' }, adapterRequestContext.eventObservers);
42631
- const revivedSnapshot = reviveSnapshot(environment, durableStore, snapshot, durableStoreErrorHandler, () => injectedStoreLookup(snapshot.select, snapshot.refresh)).then((result) => {
42750
+ const revivedSnapshot = reviveSnapshotWrapper(snapshot, () => injectedStoreLookup(snapshot.select, snapshot.refresh)).then((result) => {
42632
42751
  emitDurableEnvironmentAdapterEvent({
42633
42752
  type: 'l2-revive-end',
42634
42753
  snapshot: result.snapshot,
@@ -42653,15 +42772,15 @@
42653
42772
  };
42654
42773
  const getIngestStagingStoreRecords = function () {
42655
42774
  validateNotDisposed();
42656
- if (ingestStagingStore !== null) {
42657
- return ingestStagingStore.fallbackStringKeyInMemoryStore.records;
42775
+ if (stagingStore !== null) {
42776
+ return stagingStore.fallbackStringKeyInMemoryStore.records;
42658
42777
  }
42659
42778
  return {};
42660
42779
  };
42661
42780
  const getIngestStagingStoreMetadata = function () {
42662
42781
  validateNotDisposed();
42663
- if (ingestStagingStore !== null) {
42664
- return ingestStagingStore.fallbackStringKeyInMemoryStore.metadata;
42782
+ if (stagingStore !== null) {
42783
+ return stagingStore.fallbackStringKeyInMemoryStore.metadata;
42665
42784
  }
42666
42785
  return {};
42667
42786
  };
@@ -42700,22 +42819,20 @@
42700
42819
  }
42701
42820
  await Promise.all(pendingPromises);
42702
42821
  const entries = await durableStore.getEntries(keysToReviveAsArray, DefaultDurableSegment);
42703
- ingestStagingStore = buildIngestStagingStore(environment);
42822
+ stagingStore = buildIngestStagingStore(environment);
42704
42823
  publishDurableStoreEntries(entries, (key, record) => {
42705
42824
  if (typeof key === 'string') {
42706
- ingestStagingStore.fallbackStringKeyInMemoryStore.records[key] =
42707
- record;
42825
+ stagingStore.fallbackStringKeyInMemoryStore.records[key] = record;
42708
42826
  }
42709
42827
  else {
42710
- ingestStagingStore.recordsMap.set(key, record);
42828
+ stagingStore.recordsMap.set(key, record);
42711
42829
  }
42712
42830
  }, (key, metadata) => {
42713
42831
  if (typeof key === 'string') {
42714
- ingestStagingStore.fallbackStringKeyInMemoryStore.metadata[key] =
42715
- metadata;
42832
+ stagingStore.fallbackStringKeyInMemoryStore.metadata[key] = metadata;
42716
42833
  }
42717
42834
  else {
42718
- ingestStagingStore.metadataMap.set(key, metadata);
42835
+ stagingStore.metadataMap.set(key, metadata);
42719
42836
  }
42720
42837
  });
42721
42838
  snapshotFromMemoryIngest = await ingestAndBroadcastFunc();
@@ -42744,7 +42861,7 @@
42744
42861
  // we aren't doing any merging so we don't have to synchronize, the
42745
42862
  // underlying DurableStore implementation takes care of R/W sync
42746
42863
  // so all we have to do is ingest then write to L2
42747
- ingestStagingStore = buildIngestStagingStore(environment);
42864
+ stagingStore = buildIngestStagingStore(environment);
42748
42865
  snapshotFromMemoryIngest = await ingestAndBroadcastFunc();
42749
42866
  }
42750
42867
  if (snapshotFromMemoryIngest === undefined) {
@@ -42755,12 +42872,12 @@
42755
42872
  }
42756
42873
  // if snapshot from staging store lookup is unfulfilled then do an L2 lookup
42757
42874
  const { select, refresh } = snapshotFromMemoryIngest;
42758
- const result = await reviveSnapshot(environment, durableStore, snapshotFromMemoryIngest, durableStoreErrorHandler, () => environment.storeLookup(select, environment.createSnapshot, refresh));
42875
+ const result = await reviveSnapshotWrapper(snapshotFromMemoryIngest, () => environment.storeLookup(select, environment.createSnapshot, refresh));
42759
42876
  return result.snapshot;
42760
42877
  };
42761
42878
  const handleErrorResponse = async function (ingestAndBroadcastFunc) {
42762
42879
  validateNotDisposed();
42763
- ingestStagingStore = buildIngestStagingStore(environment);
42880
+ stagingStore = buildIngestStagingStore(environment);
42764
42881
  return ingestAndBroadcastFunc();
42765
42882
  };
42766
42883
  const getNotifyChangeStoreEntries = function (keys) {
@@ -42811,6 +42928,27 @@
42811
42928
  await durableStore.setEntries({ notifyStoreUpdateAvailable: { data: entryKeys } }, MessagingDurableSegment);
42812
42929
  return Promise.resolve(undefined);
42813
42930
  };
42931
+ const reviveSnapshotWrapper = function (unavailableSnapshot, buildL1Snapshot) {
42932
+ let revivingStore = undefined;
42933
+ if (useRevivingStore) {
42934
+ // NOTE: `store` is private, there doesn't seem to be a better,
42935
+ // cleaner way of accessing it from a derived environment.
42936
+ let baseStore = environment.store;
42937
+ // If we're rebuilding during an ingest, the existing staging store should be the base store.
42938
+ if (stagingStore) {
42939
+ baseStore = stagingStore;
42940
+ }
42941
+ let revivingStore = buildRevivingStagingStore(baseStore);
42942
+ revivingStores.add(revivingStore);
42943
+ }
42944
+ return reviveSnapshot(environment, durableStore, unavailableSnapshot, durableStoreErrorHandler, () => {
42945
+ const tempStore = stagingStore;
42946
+ const result = buildL1Snapshot();
42947
+ stagingStore = tempStore;
42948
+ return result;
42949
+ }, revivingStore).finally(() => {
42950
+ });
42951
+ };
42814
42952
  // set the default cache policy of the base environment
42815
42953
  environment.setDefaultCachePolicy({
42816
42954
  type: 'stale-while-revalidate',
@@ -42845,6 +42983,72 @@
42845
42983
  });
42846
42984
  }
42847
42985
 
42986
+ /**
42987
+ * Copyright (c) 2022, Salesforce, Inc.,
42988
+ * All rights reserved.
42989
+ * For full license text, see the LICENSE.txt file
42990
+ */
42991
+
42992
+ const API_NAMESPACE$1 = 'UiApi';
42993
+ const RECORD_REPRESENTATION_NAME$2 = 'RecordRepresentation';
42994
+ const RECORD_VIEW_ENTITY_REPRESENTATION_NAME$1 = 'RecordViewEntityRepresentation';
42995
+ const RECORD_ID_PREFIX$1 = `${API_NAMESPACE$1}::${RECORD_REPRESENTATION_NAME$2}:`;
42996
+ const RECORD_VIEW_ENTITY_ID_PREFIX$1 = `${API_NAMESPACE$1}::${RECORD_VIEW_ENTITY_REPRESENTATION_NAME$1}:Name:`;
42997
+ const RECORD_FIELDS_KEY_JUNCTION$1 = '__fields__';
42998
+ function isStoreKeyRecordId(key) {
42999
+ return key.indexOf(RECORD_ID_PREFIX$1) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION$1) === -1;
43000
+ }
43001
+ function isStoreKeyRecordViewEntity$1(key) {
43002
+ return (key.indexOf(RECORD_VIEW_ENTITY_ID_PREFIX$1) > -1 &&
43003
+ key.indexOf(RECORD_FIELDS_KEY_JUNCTION$1) === -1);
43004
+ }
43005
+ function isStoreKeyRecordField(key) {
43006
+ return key.indexOf(RECORD_ID_PREFIX$1) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION$1) > -1;
43007
+ }
43008
+ function extractRecordIdFromStoreKey$1(key) {
43009
+ if (key === undefined ||
43010
+ (key.indexOf(RECORD_ID_PREFIX$1) === -1 && key.indexOf(RECORD_VIEW_ENTITY_ID_PREFIX$1) === -1)) {
43011
+ return undefined;
43012
+ }
43013
+ const parts = key.split(':');
43014
+ return parts[parts.length - 1].split('_')[0];
43015
+ }
43016
+ function buildRecordFieldStoreKey(recordKey, fieldName) {
43017
+ return `${recordKey}${RECORD_FIELDS_KEY_JUNCTION$1}${fieldName}`;
43018
+ }
43019
+ function objectsDeepEqual(lhs, rhs) {
43020
+ if (lhs === rhs)
43021
+ return true;
43022
+ if (typeof lhs !== 'object' || typeof rhs !== 'object' || lhs === null || rhs === null)
43023
+ return false;
43024
+ const lhsKeys = Object.keys(lhs);
43025
+ const rhsKeys = Object.keys(rhs);
43026
+ if (lhsKeys.length !== rhsKeys.length)
43027
+ return false;
43028
+ for (let key of lhsKeys) {
43029
+ if (!rhsKeys.includes(key))
43030
+ return false;
43031
+ if (typeof lhs[key] === 'function' || typeof rhs[key] === 'function') {
43032
+ if (lhs[key].toString() !== rhs[key].toString())
43033
+ return false;
43034
+ }
43035
+ else {
43036
+ if (!objectsDeepEqual(lhs[key], rhs[key]))
43037
+ return false;
43038
+ }
43039
+ }
43040
+ return true;
43041
+ }
43042
+
43043
+ function isStoreRecordError(storeRecord) {
43044
+ return storeRecord.__type === 'error';
43045
+ }
43046
+ function isEntryDurableRecordRepresentation(entry, key) {
43047
+ // Either a DurableRecordRepresentation or StoreRecordError can live at a record key
43048
+ return ((isStoreKeyRecordId(key) || isStoreKeyRecordViewEntity$1(key)) &&
43049
+ entry.data.__type === undefined);
43050
+ }
43051
+
42848
43052
  /**
42849
43053
  * Copyright (c) 2022, Salesforce, Inc.,
42850
43054
  * All rights reserved.
@@ -43229,7 +43433,7 @@
43229
43433
  function errors(result) {
43230
43434
  return result.error;
43231
43435
  }
43232
- function values$3(result) {
43436
+ function values$4(result) {
43233
43437
  return result.value;
43234
43438
  }
43235
43439
  function flattenResults(results) {
@@ -43237,7 +43441,7 @@
43237
43441
  if (fails.length > 0) {
43238
43442
  return failure(fails);
43239
43443
  }
43240
- return success(results.filter(isSuccess).map(values$3));
43444
+ return success(results.filter(isSuccess).map(values$4));
43241
43445
  }
43242
43446
 
43243
43447
  function getFieldInfo(apiName, fieldName, infoMap) {
@@ -44123,7 +44327,7 @@
44123
44327
  if (failures.length > 0) {
44124
44328
  return failure(failures);
44125
44329
  }
44126
- const containers = results.filter(isSuccess).map(values$3);
44330
+ const containers = results.filter(isSuccess).map(values$4);
44127
44331
  const predicates = [];
44128
44332
  containers.forEach((c) => {
44129
44333
  if (c.predicate !== undefined) {
@@ -44415,7 +44619,7 @@
44415
44619
  if (fails.length > 0) {
44416
44620
  return failure(fails);
44417
44621
  }
44418
- const vals = results.filter(isSuccess).reduce(flatMap$1(values$3), []);
44622
+ const vals = results.filter(isSuccess).reduce(flatMap$1(values$4), []);
44419
44623
  return success(vals);
44420
44624
  }
44421
44625
  function isFilterFunction(name) {
@@ -44425,7 +44629,7 @@
44425
44629
  const results = Object.entries(operatorNode.fields)
44426
44630
  .filter(([key, _]) => isFilterFunction(key) === false)
44427
44631
  .map(([key, value]) => operatorWithValue(key, value, dataType));
44428
- const _values = results.filter(isSuccess).map(values$3);
44632
+ const _values = results.filter(isSuccess).map(values$4);
44429
44633
  const fails = results.filter(isFailure).reduce(flatMap$1(errors), []);
44430
44634
  if (fails.length > 0) {
44431
44635
  return failure(fails);
@@ -45346,7 +45550,7 @@
45346
45550
  }
45347
45551
  function recordFields(luvioSelections, names, parentApiName, parentAlias, input, joins) {
45348
45552
  const results = luvioSelections.map((selection) => selectionToQueryField(selection, names, parentApiName, parentAlias, input, joins));
45349
- const fields = results.filter(isSuccess).reduce(flatMap$1(values$3), []);
45553
+ const fields = results.filter(isSuccess).reduce(flatMap$1(values$4), []);
45350
45554
  const fails = results.filter(isFailure).reduce(flatMap$1(errors), []);
45351
45555
  if (fails.length > 0) {
45352
45556
  return failure(fails);
@@ -45592,7 +45796,7 @@
45592
45796
  }
45593
45797
  function rootQuery(recordNodes, input) {
45594
45798
  const results = recordNodes.map((record) => rootRecordQuery(record, input));
45595
- const connections = results.filter(isSuccess).map(values$3);
45799
+ const connections = results.filter(isSuccess).map(values$4);
45596
45800
  const fails = results.filter(isFailure).reduce(flatMap$1(errors), []);
45597
45801
  if (fails.length > 0) {
45598
45802
  return failure(fails);
@@ -46043,7 +46247,11 @@
46043
46247
  try {
46044
46248
  const { data, seenRecords } = await queryEvaluator(rootQuery, context, eventEmitter);
46045
46249
  const rebuildWithStoreEval = ((originalSnapshot) => {
46046
- return storeEval(config, originalSnapshot, observers, connectionKeyBuilder);
46250
+ return storeEval(config, originalSnapshot, observers, connectionKeyBuilder).then((rebuiltSnapshot) => {
46251
+ return objectsDeepEqual(originalSnapshot.data, rebuiltSnapshot.data)
46252
+ ? originalSnapshot
46253
+ : rebuiltSnapshot;
46254
+ });
46047
46255
  });
46048
46256
  const recordId = generateUniqueRecordId$1();
46049
46257
  // if the non-eval'ed snapshot was an error then we return a synthetic
@@ -46346,7 +46554,7 @@
46346
46554
  return new DraftErrorFetchResponse(HttpStatusCode$1.BadRequest, error);
46347
46555
  }
46348
46556
 
46349
- const { keys: keys$5, create: create$5, assign: assign$5, values: values$2 } = Object;
46557
+ const { keys: keys$5, create: create$5, assign: assign$5, values: values$3 } = Object;
46350
46558
  const { stringify: stringify$5, parse: parse$5 } = JSON;
46351
46559
  const { isArray: isArray$3 } = Array;
46352
46560
 
@@ -46781,7 +46989,7 @@
46781
46989
  const queueOperations = handler.getQueueOperationsForCompletingDrafts(queue, action);
46782
46990
  // write the queue operations to the store prior to ingesting the result
46783
46991
  await this.draftStore.completeAction(queueOperations);
46784
- await handler.handleActionCompleted(action, queueOperations, values$2(this.handlers));
46992
+ await handler.handleActionCompleted(action, queueOperations, values$3(this.handlers));
46785
46993
  this.retryIntervalMilliseconds = 0;
46786
46994
  this.uploadingActionId = undefined;
46787
46995
  await this.notifyChangedListeners({
@@ -47989,49 +48197,6 @@
47989
48197
  });
47990
48198
  }
47991
48199
 
47992
- /**
47993
- * Copyright (c) 2022, Salesforce, Inc.,
47994
- * All rights reserved.
47995
- * For full license text, see the LICENSE.txt file
47996
- */
47997
-
47998
- const API_NAMESPACE$1 = 'UiApi';
47999
- const RECORD_REPRESENTATION_NAME$2 = 'RecordRepresentation';
48000
- const RECORD_VIEW_ENTITY_REPRESENTATION_NAME$1 = 'RecordViewEntityRepresentation';
48001
- const RECORD_ID_PREFIX$1 = `${API_NAMESPACE$1}::${RECORD_REPRESENTATION_NAME$2}:`;
48002
- const RECORD_VIEW_ENTITY_ID_PREFIX$1 = `${API_NAMESPACE$1}::${RECORD_VIEW_ENTITY_REPRESENTATION_NAME$1}:Name:`;
48003
- const RECORD_FIELDS_KEY_JUNCTION$1 = '__fields__';
48004
- function isStoreKeyRecordId(key) {
48005
- return key.indexOf(RECORD_ID_PREFIX$1) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION$1) === -1;
48006
- }
48007
- function isStoreKeyRecordViewEntity$1(key) {
48008
- return (key.indexOf(RECORD_VIEW_ENTITY_ID_PREFIX$1) > -1 &&
48009
- key.indexOf(RECORD_FIELDS_KEY_JUNCTION$1) === -1);
48010
- }
48011
- function isStoreKeyRecordField(key) {
48012
- return key.indexOf(RECORD_ID_PREFIX$1) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION$1) > -1;
48013
- }
48014
- function extractRecordIdFromStoreKey$1(key) {
48015
- if (key === undefined ||
48016
- (key.indexOf(RECORD_ID_PREFIX$1) === -1 && key.indexOf(RECORD_VIEW_ENTITY_ID_PREFIX$1) === -1)) {
48017
- return undefined;
48018
- }
48019
- const parts = key.split(':');
48020
- return parts[parts.length - 1].split('_')[0];
48021
- }
48022
- function buildRecordFieldStoreKey(recordKey, fieldName) {
48023
- return `${recordKey}${RECORD_FIELDS_KEY_JUNCTION$1}${fieldName}`;
48024
- }
48025
-
48026
- function isStoreRecordError(storeRecord) {
48027
- return storeRecord.__type === 'error';
48028
- }
48029
- function isEntryDurableRecordRepresentation(entry, key) {
48030
- // Either a DurableRecordRepresentation or StoreRecordError can live at a record key
48031
- return ((isStoreKeyRecordId(key) || isStoreKeyRecordViewEntity$1(key)) &&
48032
- entry.data.__type === undefined);
48033
- }
48034
-
48035
48200
  function serializeFieldArguments$1(argumentNodes, variables) {
48036
48201
  const mutableArgumentNodes = Object.assign([], argumentNodes);
48037
48202
  return `args__(${mutableArgumentNodes
@@ -48263,7 +48428,7 @@
48263
48428
  (x.length === 0 || (x.length > 0 && Object.prototype.hasOwnProperty.call(x, x.length - 1))));
48264
48429
  }
48265
48430
 
48266
- const { create: create$4, keys: keys$4, values: values$1, entries: entries$3, assign: assign$4 } = Object;
48431
+ const { create: create$4, keys: keys$4, values: values$2, entries: entries$3, assign: assign$4 } = Object;
48267
48432
  const { stringify: stringify$4, parse: parse$4 } = JSON;
48268
48433
  const { isArray: isArray$2$1 } = Array;
48269
48434
 
@@ -48596,7 +48761,7 @@
48596
48761
  return predicate;
48597
48762
  }
48598
48763
  else if (literal !== undefined) {
48599
- const isAvailableLiteral = values$1(DateLiteral).includes(literal);
48764
+ const isAvailableLiteral = values$2(DateLiteral).includes(literal);
48600
48765
  // eslint-disable-next-line @salesforce/lds/no-error-in-production
48601
48766
  if (!isAvailableLiteral)
48602
48767
  throw new Error(`${literal} is not a valid DateLiteral`);
@@ -49270,7 +49435,7 @@
49270
49435
  for (const join of joins) {
49271
49436
  deduped[join.alias + join.to] = join;
49272
49437
  }
49273
- return values$1(deduped);
49438
+ return values$2(deduped);
49274
49439
  }
49275
49440
  function buildJoins(config) {
49276
49441
  let sql = '';
@@ -49918,7 +50083,7 @@
49918
50083
  return previous.concat(current);
49919
50084
  }
49920
50085
  function findFieldInfo(objectInfo, fieldName) {
49921
- return values$1(objectInfo.fields).find((field) => field.apiName === fieldName ||
50086
+ return values$2(objectInfo.fields).find((field) => field.apiName === fieldName ||
49922
50087
  (field.dataType === 'Reference' && field.relationshipName === fieldName));
49923
50088
  }
49924
50089
 
@@ -49938,10 +50103,10 @@
49938
50103
  for (let i = 0, len = keys$1.length; i < len; i++) {
49939
50104
  const key = keys$1[i];
49940
50105
  const parentFields = objectInfoMap[recordType].fields;
49941
- const fieldInfo = values$1(parentFields).find(findSpanningField(key));
50106
+ const fieldInfo = values$2(parentFields).find(findSpanningField(key));
49942
50107
  if (fieldInfo && fieldInfo.referenceToInfos.length > 0) {
49943
50108
  const { apiName } = fieldInfo.referenceToInfos[0];
49944
- const parentFieldInfo = values$1(objectInfoMap[recordType].fields).find(findSpanningField(fieldInfo.apiName));
50109
+ const parentFieldInfo = values$2(objectInfoMap[recordType].fields).find(findSpanningField(fieldInfo.apiName));
49945
50110
  if (parentFieldInfo !== undefined) {
49946
50111
  const path = {
49947
50112
  leftPath: `$.fields.${parentFieldInfo.apiName}.value`,
@@ -50083,7 +50248,7 @@
50083
50248
  let baseRecord = undefined;
50084
50249
  // Concrete types for Polymorphic field
50085
50250
  const polyTypes = [];
50086
- for (const type of values$1(schema.getTypeMap())) {
50251
+ for (const type of values$2(schema.getTypeMap())) {
50087
50252
  if (type.name === 'Record') {
50088
50253
  recordInterface = type;
50089
50254
  }
@@ -50096,7 +50261,7 @@
50096
50261
  if (polyFields.find((fieldTypeName) => fieldTypeName === type.name) !== undefined) {
50097
50262
  polyTypes.push(type);
50098
50263
  }
50099
- const fields = values$1(type.getFields());
50264
+ const fields = values$2(type.getFields());
50100
50265
  // initialize the fields of current type with default behavior
50101
50266
  for (const field of fields) {
50102
50267
  field.resolve = defaultFieldResolver;
@@ -50498,26 +50663,20 @@
50498
50663
  let recordConnections = ``;
50499
50664
  const polymorphicFieldTypeNames = new Set();
50500
50665
  let typedScalars = new Set();
50501
- for (const objectInfo of values$1(objectInfos)) {
50666
+ let parentRelationshipFields = new Set();
50667
+ for (const objectInfo of values$2(objectInfos)) {
50502
50668
  const { apiName, childRelationships } = objectInfo;
50503
50669
  let fields = ``;
50504
50670
  typedScalars.add(`${apiName}_Filter`);
50505
50671
  typedScalars.add(`${apiName}_OrderBy`);
50506
- for (const childRelationship of childRelationships) {
50507
- const { childObjectApiName } = childRelationship;
50508
- // Only add the relationship if there is relevant objectinfos for it,
50509
- // otherwise we'd be defining types we cannot satisfy and aren't referenced in
50510
- // the query.
50511
- if (objectInfos[childObjectApiName] !== undefined) {
50512
- fields += `${childRelationship.relationshipName}(first: Int, where: ${childObjectApiName}_Filter, orderBy: ${childObjectApiName}_OrderBy, scope: SupportedScopes): ${childObjectApiName}Connection \n`;
50513
- typedScalars.add(`${childObjectApiName}_Filter`);
50514
- typedScalars.add(`${childObjectApiName}_OrderBy`);
50515
- }
50516
- }
50517
- for (const field of values$1(objectInfo.fields)) {
50672
+ for (const field of values$2(objectInfo.fields)) {
50518
50673
  if (!fieldsStaticallyAdded.includes(field.apiName)) {
50519
50674
  fields += `${field.apiName}: ${dataTypeToType(field.dataType, field.apiName)}\n`;
50520
50675
  }
50676
+ //handles parent relationship
50677
+ if (field.relationshipName === null) {
50678
+ continue;
50679
+ }
50521
50680
  // For spanning parent relationships with no union types
50522
50681
  if (field.referenceToInfos.length === 1) {
50523
50682
  const [relation] = field.referenceToInfos;
@@ -50525,11 +50684,13 @@
50525
50684
  // otherwise we'd be defining types we cannot satisfy and aren't referenced in
50526
50685
  // the query.
50527
50686
  if (objectInfos[relation.apiName] !== undefined) {
50687
+ parentRelationshipFields.add(field.relationshipName);
50528
50688
  fields += `${field.relationshipName}: ${relation.apiName}\n`;
50529
50689
  }
50530
50690
  // For polymorphic field, its type is 'Record' inteface. The concrete entity type name is saved for field resolving of next phase
50531
50691
  }
50532
50692
  else if (field.referenceToInfos.length > 1) {
50693
+ parentRelationshipFields.add(field.relationshipName);
50533
50694
  fields += `${field.relationshipName}: Record\n`;
50534
50695
  for (const relation of field.referenceToInfos) {
50535
50696
  if (objectInfos[relation.apiName] !== undefined) {
@@ -50538,6 +50699,20 @@
50538
50699
  }
50539
50700
  }
50540
50701
  }
50702
+ // handles child relationship
50703
+ for (const childRelationship of childRelationships) {
50704
+ const { childObjectApiName } = childRelationship;
50705
+ // Only add the relationship if there is relevant objectinfos for it,
50706
+ // otherwise we'd be defining types we cannot satisfy and aren't referenced in
50707
+ // the query.
50708
+ // If one field has both parent relationship and child relationship with the same name, the child relationship is ignored. This is how the server GQL has implemented as date of 08/07/2023
50709
+ if (objectInfos[childObjectApiName] !== undefined &&
50710
+ !parentRelationshipFields.has(childRelationship.relationshipName)) {
50711
+ fields += `${childRelationship.relationshipName}(first: Int, where: ${childObjectApiName}_Filter, orderBy: ${childObjectApiName}_OrderBy, scope: SupportedScopes): ${childObjectApiName}Connection \n`;
50712
+ typedScalars.add(`${childObjectApiName}_Filter`);
50713
+ typedScalars.add(`${childObjectApiName}_OrderBy`);
50714
+ }
50715
+ }
50541
50716
  recordQueries += `${apiName}(first: Int, where: ${apiName}_Filter, orderBy: ${apiName}_OrderBy, scope: SupportedScopes): ${apiName}Connection\n`;
50542
50717
  const isServiceAppointment = apiName === 'ServiceAppointment';
50543
50718
  recordConnections += /* GraphQL */ `
@@ -51017,7 +51192,7 @@
51017
51192
  const objectInfo = objectInfos[apiName[0]];
51018
51193
  if (!objectInfo)
51019
51194
  return false;
51020
- return values$1(objectInfo.fields).some((fieldInfo) => {
51195
+ return values$2(objectInfo.fields).some((fieldInfo) => {
51021
51196
  return (fieldInfo.apiName === 'OwnerId' &&
51022
51197
  fieldInfo.referenceToInfos.some((referenceToInfo) => {
51023
51198
  return referenceToInfo.apiName === 'User';
@@ -52268,7 +52443,7 @@
52268
52443
  * For full license text, see the LICENSE.txt file
52269
52444
  */
52270
52445
 
52271
- const { keys: keys$3, values, create: create$3, assign: assign$3, freeze: freeze$2 } = Object;
52446
+ const { keys: keys$3, values: values$1, create: create$3, assign: assign$3, freeze: freeze$2 } = Object;
52272
52447
  const { stringify: stringify$3, parse: parse$3 } = JSON;
52273
52448
  const { shift } = Array.prototype;
52274
52449
  const { isArray: isArray$1$1 } = Array;
@@ -53118,7 +53293,7 @@
53118
53293
  return;
53119
53294
  }
53120
53295
  const objectInfo = objectInfoMap[apiName];
53121
- const optionalFields = values(objectInfo.fields).map((field) => `${apiName}.${field.apiName}`);
53296
+ const optionalFields = values$1(objectInfo.fields).map((field) => `${apiName}.${field.apiName}`);
53122
53297
  await getAdapterData(this.getRecordAdapter, {
53123
53298
  recordId: referenceFieldInfo.id,
53124
53299
  optionalFields,
@@ -53137,7 +53312,7 @@
53137
53312
  const referenceToInfos = fieldInfo.referenceToInfos;
53138
53313
  const apiNames = referenceToInfos.map((referenceToInfo) => referenceToInfo.apiName);
53139
53314
  const objectInfoMap = await this.objectInfoService.getObjectInfos(apiNames);
53140
- for (const objectInfo of values(objectInfoMap)) {
53315
+ for (const objectInfo of values$1(objectInfoMap)) {
53141
53316
  const { apiName, keyPrefix } = objectInfo;
53142
53317
  if (keyPrefix !== null && id.startsWith(keyPrefix)) {
53143
53318
  return apiName;
@@ -53647,14 +53822,30 @@
53647
53822
  const operationsWithDenormedRecords = [];
53648
53823
  for (let i = 0, len = operations.length; i < len; i++) {
53649
53824
  const operation = operations[i];
53650
- if (operation.segment !== DefaultDurableSegment || operation.type !== 'setEntries') {
53651
- operationsWithDenormedRecords.push(operation);
53652
- continue;
53825
+ if (durableStore.plugin !== undefined &&
53826
+ durableStore.plugin.supportsBatchUpdates !== undefined &&
53827
+ durableStore.plugin.supportsBatchUpdates() === true) {
53828
+ if (operation.segment !== DefaultDurableSegment ||
53829
+ operation.type !== 'setEntries') {
53830
+ operationsWithDenormedRecords.push(operation);
53831
+ continue;
53832
+ }
53833
+ operationsWithDenormedRecords.push({
53834
+ ...operation,
53835
+ entries: denormalizeEntries(operation.entries),
53836
+ });
53837
+ }
53838
+ else {
53839
+ if (operation.segment !== DefaultDurableSegment ||
53840
+ operation.type === 'evictEntries') {
53841
+ operationsWithDenormedRecords.push(operation);
53842
+ continue;
53843
+ }
53844
+ operationsWithDenormedRecords.push({
53845
+ ...operation,
53846
+ entries: denormalizeEntries(operation.entries),
53847
+ });
53653
53848
  }
53654
- operationsWithDenormedRecords.push({
53655
- ...operation,
53656
- entries: denormalizeEntries(operation.entries),
53657
- });
53658
53849
  }
53659
53850
  return durableStore.batchOperations(operationsWithDenormedRecords);
53660
53851
  };
@@ -54202,6 +54393,9 @@
54202
54393
  if (!rebuildResult.errors) {
54203
54394
  rebuildResult = removeSyntheticFields(rebuildResult, config.query);
54204
54395
  }
54396
+ if (objectsDeepEqual(rebuildResult, originalSnapshot.data)) {
54397
+ return originalSnapshot;
54398
+ }
54205
54399
  // 'originalSnapshot' is the local eval snapshot subscribed. It is always in 'Fulfilled' state. This behavior would change once W-1273462(rebuild non-evaluated snapshot when the graphql local eval rebuild is triggered) is resolved.
54206
54400
  return {
54207
54401
  ...originalSnapshot,
@@ -56233,7 +56427,7 @@
56233
56427
  };
56234
56428
  }
56235
56429
 
56236
- const { keys: keys$9, create: create$7, assign: assign$7, entries } = Object;
56430
+ const { keys: keys$9, create: create$7, assign: assign$7, entries, values } = Object;
56237
56431
  const { stringify: stringify$7, parse: parse$7 } = JSON;
56238
56432
 
56239
56433
  function selectColumnsFromTableWhereKeyIn(columnNames, table, keyColumnName, whereIn) {
@@ -56267,6 +56461,22 @@
56267
56461
  }, reject);
56268
56462
  });
56269
56463
  }
56464
+ getMetadataByKeys(keys) {
56465
+ const query = selectColumnsFromTableWhereKeyIn([COLUMN_NAME_KEY$2, COLUMN_NAME_METADATA$1], this.tableName, COLUMN_NAME_KEY$2, keys);
56466
+ return new Promise((resolve, reject) => {
56467
+ this.plugin.query(query, keys, (results) => {
56468
+ resolve(results.rows.reduce((entries, row) => {
56469
+ const [key, stringifiedMetadata] = row;
56470
+ if (stringifiedMetadata !== undefined) {
56471
+ entries[key] = {
56472
+ metadata: parse$7(stringifiedMetadata),
56473
+ };
56474
+ }
56475
+ return entries;
56476
+ }, {}));
56477
+ }, reject);
56478
+ });
56479
+ }
56270
56480
  getAll() {
56271
56481
  return new Promise((resolve, reject) => {
56272
56482
  this.plugin.query(this.getAllQuery, [], (x) => {
@@ -56293,6 +56503,24 @@
56293
56503
  }, []),
56294
56504
  };
56295
56505
  }
56506
+ metadataToUpdateOperations(entries, segment) {
56507
+ return {
56508
+ type: 'update',
56509
+ table: this.tableName,
56510
+ keyColumn: COLUMN_NAME_KEY$2,
56511
+ context: {
56512
+ segment,
56513
+ type: 'setMetadata',
56514
+ },
56515
+ columns: [COLUMN_NAME_METADATA$1],
56516
+ values: keys$9(entries).reduce((values, key) => {
56517
+ const { metadata } = entries[key];
56518
+ const row = [metadata ? stringify$7(metadata) : null];
56519
+ values[key] = row;
56520
+ return values;
56521
+ }, {}),
56522
+ };
56523
+ }
56296
56524
  mapToDurableEntries(sqliteResult) {
56297
56525
  return sqliteResult.rows.reduce((entries, row) => {
56298
56526
  const [key, stringifiedData, stringifiedMetadata] = row;
@@ -56339,6 +56567,25 @@
56339
56567
  }, reject);
56340
56568
  });
56341
56569
  }
56570
+ getMetadataByKeys(keys, namespace) {
56571
+ if (namespace === undefined) {
56572
+ throw Error('LdsInternalDataTable requires namespace');
56573
+ }
56574
+ const query = selectColumnsFromTableWhereKeyInNamespaced([COLUMN_NAME_KEY$1, COLUMN_NAME_METADATA], this.tableName, COLUMN_NAME_KEY$1, keys, COLUMN_NAME_NAMESPACE);
56575
+ return new Promise((resolve, reject) => {
56576
+ this.plugin.query(query, [namespace].concat(keys), (results) => {
56577
+ resolve(results.rows.reduce((entries, row) => {
56578
+ const [key, stringifiedMetadata] = row;
56579
+ if (stringifiedMetadata !== undefined) {
56580
+ entries[key] = {
56581
+ metadata: parse$7(stringifiedMetadata),
56582
+ };
56583
+ }
56584
+ return entries;
56585
+ }, {}));
56586
+ }, reject);
56587
+ });
56588
+ }
56342
56589
  getAll(namespace) {
56343
56590
  return new Promise((resolve, reject) => {
56344
56591
  this.plugin.query(this.getAllQuery, [namespace], (x) => {
@@ -56372,6 +56619,42 @@
56372
56619
  }, []),
56373
56620
  };
56374
56621
  }
56622
+ metadataToUpdateOperations(entries, segment) {
56623
+ return {
56624
+ type: 'update',
56625
+ table: this.tableName,
56626
+ keyColumn: COLUMN_NAME_KEY$1,
56627
+ context: {
56628
+ segment,
56629
+ type: 'setMetadata',
56630
+ },
56631
+ columns: [COLUMN_NAME_METADATA],
56632
+ values: keys$9(entries).reduce((values, key) => {
56633
+ const { metadata } = entries[key];
56634
+ const row = [metadata ? stringify$7(metadata) : null];
56635
+ values[key] = row;
56636
+ return values;
56637
+ }, {}),
56638
+ };
56639
+ }
56640
+ metadataToUpdateSQLQueries(entries, segment) {
56641
+ return keys$9(entries).reduce((accu, key) => {
56642
+ const { metadata } = entries[key];
56643
+ if (metadata !== undefined) {
56644
+ accu.push({
56645
+ sql: `UPDATE ${this.tableName} SET ${COLUMN_NAME_METADATA} = ? WHERE (${COLUMN_NAME_KEY$1} IS ? AND ${COLUMN_NAME_NAMESPACE} IS ?)`,
56646
+ params: [stringify$7(metadata), key, segment],
56647
+ change: {
56648
+ ids: [key],
56649
+ segment,
56650
+ type: 'setMetadata',
56651
+ isExternalChange: false,
56652
+ },
56653
+ });
56654
+ }
56655
+ return accu;
56656
+ }, []);
56657
+ }
56375
56658
  mapToDurableEntries(sqliteResult) {
56376
56659
  return sqliteResult.rows.reduce((entries, row) => {
56377
56660
  const [key, stringifiedData, stringifiedMetadata] = row;
@@ -56408,9 +56691,16 @@
56408
56691
  });
56409
56692
  });
56410
56693
  }
56694
+ batchQuery(queries) {
56695
+ const promises = queries.map((q) => this.query(q.sql, q.params));
56696
+ return Promise.all(promises);
56697
+ }
56411
56698
  async getEntries(entryIds, segment) {
56412
56699
  return this.getTable(segment).getByKeys(entryIds, segment);
56413
56700
  }
56701
+ async getMetadata(entryIds, segment) {
56702
+ return this.getTable(segment).getMetadataByKeys(entryIds, segment);
56703
+ }
56414
56704
  getAllEntries(segment) {
56415
56705
  return this.getTable(segment).getAll(segment);
56416
56706
  }
@@ -56419,12 +56709,30 @@
56419
56709
  const upsertOperation = table.entriesToUpsertOperations(entries, segment);
56420
56710
  return this.batchOperationAsPromise([upsertOperation]);
56421
56711
  }
56712
+ setMetadata(entries, segment) {
56713
+ const table = this.getTable(segment);
56714
+ const operation = this.plugin.supportsBatchUpdates === undefined ||
56715
+ this.plugin.supportsBatchUpdates() === false
56716
+ ? table.entriesToUpsertOperations(entries, segment)
56717
+ : table.metadataToUpdateOperations(entries, segment);
56718
+ return this.batchOperationAsPromise([operation]);
56719
+ }
56422
56720
  batchOperations(operations) {
56423
56721
  const sqliteOperations = operations.reduce((acc, cur) => {
56424
56722
  if (cur.type === 'setEntries') {
56425
56723
  const table = this.getTable(cur.segment);
56426
56724
  acc.push(table.entriesToUpsertOperations(cur.entries, cur.segment));
56427
56725
  }
56726
+ else if (cur.type === 'setMetadata') {
56727
+ const table = this.getTable(cur.segment);
56728
+ if (this.plugin.supportsBatchUpdates === undefined ||
56729
+ this.plugin.supportsBatchUpdates() === false) {
56730
+ acc.push(table.entriesToUpsertOperations(cur.entries, cur.segment));
56731
+ }
56732
+ else {
56733
+ acc.push(table.metadataToUpdateOperations(cur.entries, cur.segment));
56734
+ }
56735
+ }
56428
56736
  else {
56429
56737
  acc.push(this.idsToDeleteOperation(cur.ids, cur.segment));
56430
56738
  }
@@ -56441,8 +56749,15 @@
56441
56749
  this.plugin
56442
56750
  .registerOnChangedListener(async (changes) => {
56443
56751
  const durableChanges = changes.map((c) => {
56752
+ let type = c.type === 'upsert' ? 'setEntries' : 'evictEntries';
56753
+ // if our context contains a type then set that as our main level type
56754
+ // allows us in the future of updates to specify the segment change happening
56755
+ // example being update call on metadata only or updating data
56756
+ if (c.type === 'update' && c.context.type !== undefined) {
56757
+ type = c.context.type;
56758
+ }
56444
56759
  return {
56445
- type: c.type === 'upsert' ? 'setEntries' : 'evictEntries',
56760
+ type,
56446
56761
  ids: c.keys,
56447
56762
  isExternalChange: false,
56448
56763
  segment: c.context.segment,
@@ -56509,6 +56824,10 @@
56509
56824
  }, reject);
56510
56825
  });
56511
56826
  }
56827
+ getMetadataByKeys(_keys) {
56828
+ // eslint-disable-next-line @salesforce/lds/no-error-in-production
56829
+ throw new Error(`There is no metadata in the ${this.tableName} table.`);
56830
+ }
56512
56831
  getAll() {
56513
56832
  const getAllQuery = `SELECT ${this.columnNames.join(',')} FROM ${this.tableName}`;
56514
56833
  return new Promise((resolve, reject) => {
@@ -56534,6 +56853,10 @@
56534
56853
  }, []),
56535
56854
  };
56536
56855
  }
56856
+ metadataToUpdateOperations(_entries, _segment) {
56857
+ // eslint-disable-next-line @salesforce/lds/no-error-in-production
56858
+ throw new Error(`There is no metadata in the ${this.tableName} table.`);
56859
+ }
56537
56860
  mapToDurableEntries(sqliteResult) {
56538
56861
  return sqliteResult.rows.reduce((entries, row) => {
56539
56862
  const [key, stringifiedData] = row;
@@ -57871,6 +58194,7 @@
57871
58194
  const gqlEnv = makeEnvironmentGraphqlAware(baseEnv);
57872
58195
  const durableEnv = makeDurable(gqlEnv, {
57873
58196
  durableStore: recordDenormingStore,
58197
+ enableDurableMetadataRefresh: ldsMetadataRefreshEnabled.isOpen({ fallback: false }),
57874
58198
  });
57875
58199
  getIngestRecords = durableEnv.getIngestStagingStoreRecords;
57876
58200
  getIngestMetadata = durableEnv.getIngestStagingStoreMetadata;
@@ -57969,7 +58293,7 @@
57969
58293
  id: '@salesforce/lds-network-adapter',
57970
58294
  instrument: instrument$1,
57971
58295
  });
57972
- // version: 1.233.0-8ab7ef233
58296
+ // version: 1.235.0-3790decf0
57973
58297
 
57974
58298
  const { create: create$2, keys: keys$2 } = Object;
57975
58299
  const { stringify: stringify$1, parse: parse$1 } = JSON;
@@ -76440,7 +76764,7 @@
76440
76764
  configuration: { ...configurationForGraphQLAdapters },
76441
76765
  instrument,
76442
76766
  });
76443
- // version: 1.233.0-e0ba7cc7b
76767
+ // version: 1.235.0-c252f93dc
76444
76768
 
76445
76769
  // On core the unstable adapters are re-exported with different names,
76446
76770
 
@@ -78687,7 +79011,7 @@
78687
79011
  unstable_graphQL_imperative = createImperativeAdapter(luvio, createInstrumentedAdapter(ldsAdapter, adapterMetadata), adapterMetadata);
78688
79012
  graphQLImperative = ldsAdapter;
78689
79013
  });
78690
- // version: 1.233.0-e0ba7cc7b
79014
+ // version: 1.235.0-c252f93dc
78691
79015
 
78692
79016
  var gqlApi = /*#__PURE__*/Object.freeze({
78693
79017
  __proto__: null,
@@ -79418,4 +79742,4 @@
79418
79742
  Object.defineProperty(exports, '__esModule', { value: true });
79419
79743
 
79420
79744
  }));
79421
- // version: 1.233.0-8ab7ef233
79745
+ // version: 1.235.0-3790decf0