@salesforce/lds-runtime-bridge 1.287.0-dev1 → 1.287.0-dev10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -700,7 +700,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
700
700
  // because we do not want some other code attempting to use the
701
701
  // in-memory values before the durable store onChanged handler
702
702
  // calls back and revives the values to in-memory
703
- environment.storeEvict(key);
703
+ environment.storeDealloc(key);
704
704
  };
705
705
  const publishStoreMetadata = function (recordId, storeMetadata) {
706
706
  validateNotDisposed();
@@ -1366,6 +1366,9 @@ class NimbusSqliteStore {
1366
1366
  isEvalSupported() {
1367
1367
  return true;
1368
1368
  }
1369
+ isBatchUpdateSupported() {
1370
+ return this.supportsBatchUpdates;
1371
+ }
1369
1372
  query(sql, params) {
1370
1373
  return new Promise((resolve, reject) => {
1371
1374
  this.plugin.query(sql, params, (result) => {
@@ -3725,6 +3728,9 @@ function buildBaseSchema() {
3725
3728
 
3726
3729
  const { keys, values, create, assign, freeze } = Object;
3727
3730
 
3731
+ function createLink(key) {
3732
+ return { __ref: key };
3733
+ }
3728
3734
  /**
3729
3735
  * Records are stored in the durable store with scalar fields denormalized. This function takes that denoramlized
3730
3736
  * durable store record representation and normalizes it back out into the format the the luvio store expects it
@@ -3735,26 +3741,25 @@ const { keys, values, create, assign, freeze } = Object;
3735
3741
  function normalizeRecordFields(key, entry) {
3736
3742
  const { data: record } = entry;
3737
3743
  const { fields, links } = record;
3738
- const linkNames = keys(links);
3744
+ const missingFieldLinks = keys(links);
3745
+ const fieldNames = keys(fields);
3739
3746
  const normalizedFields = {};
3740
3747
  const returnEntries = {};
3741
- for (let i = 0, len = linkNames.length; i < len; i++) {
3742
- const fieldName = linkNames[i];
3748
+ // restore fields
3749
+ for (let i = 0, len = fieldNames.length; i < len; i++) {
3750
+ const fieldName = fieldNames[i];
3743
3751
  const field = fields[fieldName];
3752
+ const fieldKey = buildRecordFieldStoreKey(key, fieldName);
3753
+ returnEntries[fieldKey] = { data: field };
3754
+ normalizedFields[fieldName] = createLink(fieldKey);
3755
+ }
3756
+ // restore missing fields
3757
+ for (let i = 0, len = missingFieldLinks.length; i < len; i++) {
3758
+ const fieldName = missingFieldLinks[i];
3744
3759
  const link = links[fieldName];
3745
- // field is undefined for missing links
3746
- if (field !== undefined) {
3747
- const fieldKey = buildRecordFieldStoreKey(key, fieldName);
3748
- returnEntries[fieldKey] = { data: field };
3749
- }
3750
- // we need to restore the undefined __ref node as it is
3751
- // lost during serialization
3752
3760
  if (link.isMissing === true) {
3753
3761
  normalizedFields[fieldName] = { ...link, __ref: undefined };
3754
3762
  }
3755
- else {
3756
- normalizedFields[fieldName] = link;
3757
- }
3758
3763
  }
3759
3764
  returnEntries[key] = {
3760
3765
  data: assign(record, { fields: normalizedFields }),
@@ -3816,7 +3821,7 @@ function buildDurableRecordRepresentation(normalizedRecord, records, pendingEntr
3816
3821
  }
3817
3822
  }
3818
3823
  // we want to preserve fields that are missing nodes
3819
- if (filteredFields[fieldName] !== undefined || field.isMissing === true) {
3824
+ if (field.isMissing === true) {
3820
3825
  links[fieldName] = field;
3821
3826
  }
3822
3827
  }
@@ -3835,7 +3840,7 @@ function getDenormalizedKey(originalKey, recordId, luvio) {
3835
3840
  }
3836
3841
  return keyBuilderRecord(luvio, { recordId });
3837
3842
  }
3838
- function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata, getStore) {
3843
+ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata, getStore, sqlStore) {
3839
3844
  const getEntries = function (entries, segment) {
3840
3845
  // this HOF only inspects records in the default segment
3841
3846
  if (segment !== DefaultDurableSegment) {
@@ -3897,7 +3902,10 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
3897
3902
  });
3898
3903
  };
3899
3904
  const denormalizeEntries = function (entries) {
3905
+ let hasEntries = false;
3906
+ let hasMetadata = false;
3900
3907
  const putEntries = create(null);
3908
+ const putMetadata = create(null);
3901
3909
  const keys$1 = keys(entries);
3902
3910
  const putRecords = {};
3903
3911
  const putRecordViews = {};
@@ -3940,6 +3948,7 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
3940
3948
  putRecords[recordId] = true;
3941
3949
  }
3942
3950
  if (isStoreRecordError(record)) {
3951
+ hasEntries = true;
3943
3952
  putEntries[recordKey] = value;
3944
3953
  continue;
3945
3954
  }
@@ -3952,24 +3961,43 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
3952
3961
  }
3953
3962
  const denormalizedRecord = buildDurableRecordRepresentation(record, storeRecords, recordEntries, store);
3954
3963
  if (denormalizedRecord !== undefined) {
3964
+ hasEntries = true;
3955
3965
  putEntries[recordKey] = {
3956
3966
  data: denormalizedRecord,
3957
3967
  metadata,
3958
3968
  };
3969
+ // if undefined then it is pending
3970
+ // we should still update metadata on pending records
3971
+ }
3972
+ else {
3973
+ hasMetadata = true;
3974
+ metadata.expirationTimestamp = metadata.ingestionTimestamp;
3975
+ putMetadata[recordKey] = {
3976
+ metadata,
3977
+ };
3959
3978
  }
3960
3979
  }
3961
3980
  else {
3981
+ hasEntries = true;
3962
3982
  putEntries[key] = value;
3963
3983
  }
3964
3984
  }
3965
- return putEntries;
3985
+ return { putEntries, putMetadata, hasEntries, hasMetadata };
3966
3986
  };
3967
3987
  const setEntries = function (entries, segment) {
3968
3988
  if (segment !== DefaultDurableSegment) {
3969
3989
  return durableStore.setEntries(entries, segment);
3970
3990
  }
3971
- const putEntries = denormalizeEntries(entries);
3972
- return durableStore.setEntries(putEntries, segment);
3991
+ const { putEntries, putMetadata, hasEntries, hasMetadata } = denormalizeEntries(entries);
3992
+ const promises = [
3993
+ hasEntries ? durableStore.setEntries(putEntries, segment) : undefined,
3994
+ ];
3995
+ if (sqlStore !== undefined && sqlStore.isBatchUpdateSupported()) {
3996
+ promises.push(hasMetadata && sqlStore !== undefined
3997
+ ? durableStore.setMetadata(putMetadata, segment)
3998
+ : undefined);
3999
+ }
4000
+ return Promise.all(promises).then(() => { });
3973
4001
  };
3974
4002
  const batchOperations = function (operations) {
3975
4003
  const operationsWithDenormedRecords = [];
@@ -3986,10 +4014,20 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
3986
4014
  // this is determined by the plugin supporting update batch calls before it gets to this HOF.
3987
4015
  // so we only need to check one entry to confirm this for performance
3988
4016
  if (firstEntry.data !== undefined) {
4017
+ const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
3989
4018
  operationsWithDenormedRecords.push({
3990
4019
  ...operation,
3991
- entries: denormalizeEntries(operation.entries),
4020
+ entries: putEntries,
3992
4021
  });
4022
+ if (hasMetadata &&
4023
+ sqlStore !== undefined &&
4024
+ sqlStore.isBatchUpdateSupported() === true) {
4025
+ operationsWithDenormedRecords.push({
4026
+ ...operation,
4027
+ entries: putMetadata,
4028
+ type: 'setMetadata',
4029
+ });
4030
+ }
3993
4031
  }
3994
4032
  else {
3995
4033
  operationsWithDenormedRecords.push(operation);
@@ -4001,10 +4039,20 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
4001
4039
  operationsWithDenormedRecords.push(operation);
4002
4040
  continue;
4003
4041
  }
4042
+ const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
4004
4043
  operationsWithDenormedRecords.push({
4005
4044
  ...operation,
4006
- entries: denormalizeEntries(operation.entries),
4045
+ entries: putEntries,
4007
4046
  });
4047
+ if (hasMetadata &&
4048
+ sqlStore !== undefined &&
4049
+ sqlStore.isBatchUpdateSupported() === true) {
4050
+ operationsWithDenormedRecords.push({
4051
+ ...operation,
4052
+ entries: putMetadata,
4053
+ type: 'setMetadata',
4054
+ });
4055
+ }
4008
4056
  }
4009
4057
  return durableStore.batchOperations(operationsWithDenormedRecords);
4010
4058
  };
@@ -4099,4 +4147,4 @@ function ldsRuntimeBridge() {
4099
4147
  }
4100
4148
 
4101
4149
  export { ldsRuntimeBridge as default };
4102
- // version: 1.287.0-dev1-26543bf66
4150
+ // version: 1.287.0-dev10-0c7922050
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@salesforce/lds-runtime-bridge",
3
- "version": "1.287.0-dev1",
3
+ "version": "1.287.0-dev10",
4
4
  "license": "SEE LICENSE IN LICENSE.txt",
5
5
  "description": "LDS runtime for bridge.app.",
6
6
  "main": "dist/ldsRuntimeBridge.js",
@@ -34,17 +34,17 @@
34
34
  "release:corejar": "yarn build && ../core-build/scripts/core.js --name=lds-runtime-bridge"
35
35
  },
36
36
  "dependencies": {
37
- "@salesforce/lds-adapters-uiapi": "^1.287.0-dev1",
38
- "@salesforce/lds-instrumentation": "^1.287.0-dev1",
37
+ "@salesforce/lds-adapters-uiapi": "^1.287.0-dev10",
38
+ "@salesforce/lds-instrumentation": "^1.287.0-dev10",
39
39
  "@salesforce/user": "0.0.21",
40
40
  "o11y": "250.7.0"
41
41
  },
42
42
  "devDependencies": {
43
- "@salesforce/lds-drafts-adapters-uiapi": "^1.287.0-dev1",
44
- "@salesforce/lds-network-aura": "^1.287.0-dev1",
45
- "@salesforce/lds-runtime-aura": "^1.287.0-dev1",
46
- "@salesforce/lds-store-nimbus": "^1.287.0-dev1",
47
- "@salesforce/nimbus-plugin-lds": "^1.287.0-dev1",
43
+ "@salesforce/lds-drafts-adapters-uiapi": "^1.287.0-dev10",
44
+ "@salesforce/lds-network-aura": "^1.287.0-dev10",
45
+ "@salesforce/lds-runtime-aura": "^1.287.0-dev10",
46
+ "@salesforce/lds-store-nimbus": "^1.287.0-dev10",
47
+ "@salesforce/nimbus-plugin-lds": "^1.287.0-dev10",
48
48
  "babel-plugin-dynamic-import-node": "^2.3.3"
49
49
  },
50
50
  "luvioBundlesize": [