@salesforce/lds-runtime-mobile 1.287.0-dev1 → 1.287.0-dev11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +101 -41
- package/dist/types/priming/SqlitePrimingStore.d.ts +2 -2
- package/package.json +18 -18
- package/sfdc/main.js +101 -41
- package/sfdc/types/priming/SqlitePrimingStore.d.ts +2 -2
package/dist/main.js
CHANGED
|
@@ -40,6 +40,8 @@ import eagerEvalValidAt from '@salesforce/gate/lds.eagerEvalValidAt';
|
|
|
40
40
|
import eagerEvalStaleWhileRevalidate from '@salesforce/gate/lds.eagerEvalStaleWhileRevalidate';
|
|
41
41
|
import eagerEvalDefaultCachePolicy from '@salesforce/gate/lds.eagerEvalDefaultCachePolicy';
|
|
42
42
|
import ldsPrimingGraphqlBatch from '@salesforce/gate/lds.primingGraphqlBatch';
|
|
43
|
+
import aggressiveTrimGate from '@salesforce/gate/lds.aggressiveTrim';
|
|
44
|
+
import aggressiveTrimLowLimitGate from '@salesforce/gate/lds.aggressiveTrimLowLimit';
|
|
43
45
|
import ldsMetadataRefreshEnabled from '@salesforce/gate/lds.metadataRefreshEnabled';
|
|
44
46
|
|
|
45
47
|
/**
|
|
@@ -1189,7 +1191,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
|
|
|
1189
1191
|
// because we do not want some other code attempting to use the
|
|
1190
1192
|
// in-memory values before the durable store onChanged handler
|
|
1191
1193
|
// calls back and revives the values to in-memory
|
|
1192
|
-
environment.
|
|
1194
|
+
environment.storeDealloc(key);
|
|
1193
1195
|
};
|
|
1194
1196
|
const publishStoreMetadata = function (recordId, storeMetadata) {
|
|
1195
1197
|
validateNotDisposed();
|
|
@@ -12577,7 +12579,7 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
|
|
|
12577
12579
|
const referencedRecord = referencedRecords.get(key);
|
|
12578
12580
|
recordFields[relationshipName] = {
|
|
12579
12581
|
displayValue: null,
|
|
12580
|
-
value: createLink(key),
|
|
12582
|
+
value: createLink$1(key),
|
|
12581
12583
|
};
|
|
12582
12584
|
// for custom objects, we select the 'Name' field
|
|
12583
12585
|
// otherwise we check the object info for name fields.
|
|
@@ -12605,7 +12607,7 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
|
|
|
12605
12607
|
}
|
|
12606
12608
|
return { ...record, fields: recordFields };
|
|
12607
12609
|
}
|
|
12608
|
-
function createLink(key) {
|
|
12610
|
+
function createLink$1(key) {
|
|
12609
12611
|
return { __ref: key };
|
|
12610
12612
|
}
|
|
12611
12613
|
function getReferenceInfoForKey(fieldName, field, luvio, objectInfo) {
|
|
@@ -12620,7 +12622,7 @@ function getReferenceInfoForKey(fieldName, field, luvio, objectInfo) {
|
|
|
12620
12622
|
referenceFieldName: relationshipName,
|
|
12621
12623
|
field: {
|
|
12622
12624
|
displayValue: null,
|
|
12623
|
-
value: createLink(key),
|
|
12625
|
+
value: createLink$1(key),
|
|
12624
12626
|
},
|
|
12625
12627
|
};
|
|
12626
12628
|
}
|
|
@@ -13083,6 +13085,9 @@ function isBackdatingFieldEditable(objectInfo, backdatingFieldName, attributeNam
|
|
|
13083
13085
|
!draftActionFieldNames.includes(backdatingFieldName));
|
|
13084
13086
|
}
|
|
13085
13087
|
|
|
13088
|
+
function createLink(key) {
|
|
13089
|
+
return { __ref: key };
|
|
13090
|
+
}
|
|
13086
13091
|
/**
|
|
13087
13092
|
* Records are stored in the durable store with scalar fields denormalized. This function takes that denoramlized
|
|
13088
13093
|
* durable store record representation and normalizes it back out into the format the the luvio store expects it
|
|
@@ -13093,26 +13098,25 @@ function isBackdatingFieldEditable(objectInfo, backdatingFieldName, attributeNam
|
|
|
13093
13098
|
function normalizeRecordFields(key, entry) {
|
|
13094
13099
|
const { data: record } = entry;
|
|
13095
13100
|
const { fields, links } = record;
|
|
13096
|
-
const
|
|
13101
|
+
const missingFieldLinks = keys$3(links);
|
|
13102
|
+
const fieldNames = keys$3(fields);
|
|
13097
13103
|
const normalizedFields = {};
|
|
13098
13104
|
const returnEntries = {};
|
|
13099
|
-
|
|
13100
|
-
|
|
13105
|
+
// restore fields
|
|
13106
|
+
for (let i = 0, len = fieldNames.length; i < len; i++) {
|
|
13107
|
+
const fieldName = fieldNames[i];
|
|
13101
13108
|
const field = fields[fieldName];
|
|
13109
|
+
const fieldKey = buildRecordFieldStoreKey(key, fieldName);
|
|
13110
|
+
returnEntries[fieldKey] = { data: field };
|
|
13111
|
+
normalizedFields[fieldName] = createLink(fieldKey);
|
|
13112
|
+
}
|
|
13113
|
+
// restore missing fields
|
|
13114
|
+
for (let i = 0, len = missingFieldLinks.length; i < len; i++) {
|
|
13115
|
+
const fieldName = missingFieldLinks[i];
|
|
13102
13116
|
const link = links[fieldName];
|
|
13103
|
-
// field is undefined for missing links
|
|
13104
|
-
if (field !== undefined) {
|
|
13105
|
-
const fieldKey = buildRecordFieldStoreKey(key, fieldName);
|
|
13106
|
-
returnEntries[fieldKey] = { data: field };
|
|
13107
|
-
}
|
|
13108
|
-
// we need to restore the undefined __ref node as it is
|
|
13109
|
-
// lost during serialization
|
|
13110
13117
|
if (link.isMissing === true) {
|
|
13111
13118
|
normalizedFields[fieldName] = { ...link, __ref: undefined };
|
|
13112
13119
|
}
|
|
13113
|
-
else {
|
|
13114
|
-
normalizedFields[fieldName] = link;
|
|
13115
|
-
}
|
|
13116
13120
|
}
|
|
13117
13121
|
returnEntries[key] = {
|
|
13118
13122
|
data: assign$3(record, { fields: normalizedFields }),
|
|
@@ -13174,7 +13178,7 @@ function buildDurableRecordRepresentation(normalizedRecord, records, pendingEntr
|
|
|
13174
13178
|
}
|
|
13175
13179
|
}
|
|
13176
13180
|
// we want to preserve fields that are missing nodes
|
|
13177
|
-
if (
|
|
13181
|
+
if (field.isMissing === true) {
|
|
13178
13182
|
links[fieldName] = field;
|
|
13179
13183
|
}
|
|
13180
13184
|
}
|
|
@@ -13193,7 +13197,7 @@ function getDenormalizedKey(originalKey, recordId, luvio) {
|
|
|
13193
13197
|
}
|
|
13194
13198
|
return keyBuilderRecord(luvio, { recordId });
|
|
13195
13199
|
}
|
|
13196
|
-
function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata, getStore) {
|
|
13200
|
+
function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata, getStore, sqlStore) {
|
|
13197
13201
|
const getEntries = function (entries, segment) {
|
|
13198
13202
|
// this HOF only inspects records in the default segment
|
|
13199
13203
|
if (segment !== DefaultDurableSegment) {
|
|
@@ -13255,7 +13259,10 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13255
13259
|
});
|
|
13256
13260
|
};
|
|
13257
13261
|
const denormalizeEntries = function (entries) {
|
|
13262
|
+
let hasEntries = false;
|
|
13263
|
+
let hasMetadata = false;
|
|
13258
13264
|
const putEntries = create$3(null);
|
|
13265
|
+
const putMetadata = create$3(null);
|
|
13259
13266
|
const keys$1 = keys$3(entries);
|
|
13260
13267
|
const putRecords = {};
|
|
13261
13268
|
const putRecordViews = {};
|
|
@@ -13298,6 +13305,7 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13298
13305
|
putRecords[recordId] = true;
|
|
13299
13306
|
}
|
|
13300
13307
|
if (isStoreRecordError(record)) {
|
|
13308
|
+
hasEntries = true;
|
|
13301
13309
|
putEntries[recordKey] = value;
|
|
13302
13310
|
continue;
|
|
13303
13311
|
}
|
|
@@ -13310,24 +13318,43 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13310
13318
|
}
|
|
13311
13319
|
const denormalizedRecord = buildDurableRecordRepresentation(record, storeRecords, recordEntries, store);
|
|
13312
13320
|
if (denormalizedRecord !== undefined) {
|
|
13321
|
+
hasEntries = true;
|
|
13313
13322
|
putEntries[recordKey] = {
|
|
13314
13323
|
data: denormalizedRecord,
|
|
13315
13324
|
metadata,
|
|
13316
13325
|
};
|
|
13326
|
+
// if undefined then it is pending
|
|
13327
|
+
// we should still update metadata on pending records
|
|
13328
|
+
}
|
|
13329
|
+
else {
|
|
13330
|
+
hasMetadata = true;
|
|
13331
|
+
metadata.expirationTimestamp = metadata.ingestionTimestamp;
|
|
13332
|
+
putMetadata[recordKey] = {
|
|
13333
|
+
metadata,
|
|
13334
|
+
};
|
|
13317
13335
|
}
|
|
13318
13336
|
}
|
|
13319
13337
|
else {
|
|
13338
|
+
hasEntries = true;
|
|
13320
13339
|
putEntries[key] = value;
|
|
13321
13340
|
}
|
|
13322
13341
|
}
|
|
13323
|
-
return putEntries;
|
|
13342
|
+
return { putEntries, putMetadata, hasEntries, hasMetadata };
|
|
13324
13343
|
};
|
|
13325
13344
|
const setEntries = function (entries, segment) {
|
|
13326
13345
|
if (segment !== DefaultDurableSegment) {
|
|
13327
13346
|
return durableStore.setEntries(entries, segment);
|
|
13328
13347
|
}
|
|
13329
|
-
const putEntries = denormalizeEntries(entries);
|
|
13330
|
-
|
|
13348
|
+
const { putEntries, putMetadata, hasEntries, hasMetadata } = denormalizeEntries(entries);
|
|
13349
|
+
const promises = [
|
|
13350
|
+
hasEntries ? durableStore.setEntries(putEntries, segment) : undefined,
|
|
13351
|
+
];
|
|
13352
|
+
if (sqlStore !== undefined && sqlStore.isBatchUpdateSupported()) {
|
|
13353
|
+
promises.push(hasMetadata && sqlStore !== undefined
|
|
13354
|
+
? durableStore.setMetadata(putMetadata, segment)
|
|
13355
|
+
: undefined);
|
|
13356
|
+
}
|
|
13357
|
+
return Promise.all(promises).then(() => { });
|
|
13331
13358
|
};
|
|
13332
13359
|
const batchOperations = function (operations) {
|
|
13333
13360
|
const operationsWithDenormedRecords = [];
|
|
@@ -13344,10 +13371,20 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13344
13371
|
// this is determined by the plugin supporting update batch calls before it gets to this HOF.
|
|
13345
13372
|
// so we only need to check one entry to confirm this for performance
|
|
13346
13373
|
if (firstEntry.data !== undefined) {
|
|
13374
|
+
const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
|
|
13347
13375
|
operationsWithDenormedRecords.push({
|
|
13348
13376
|
...operation,
|
|
13349
|
-
entries:
|
|
13377
|
+
entries: putEntries,
|
|
13350
13378
|
});
|
|
13379
|
+
if (hasMetadata &&
|
|
13380
|
+
sqlStore !== undefined &&
|
|
13381
|
+
sqlStore.isBatchUpdateSupported() === true) {
|
|
13382
|
+
operationsWithDenormedRecords.push({
|
|
13383
|
+
...operation,
|
|
13384
|
+
entries: putMetadata,
|
|
13385
|
+
type: 'setMetadata',
|
|
13386
|
+
});
|
|
13387
|
+
}
|
|
13351
13388
|
}
|
|
13352
13389
|
else {
|
|
13353
13390
|
operationsWithDenormedRecords.push(operation);
|
|
@@ -13359,10 +13396,20 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13359
13396
|
operationsWithDenormedRecords.push(operation);
|
|
13360
13397
|
continue;
|
|
13361
13398
|
}
|
|
13399
|
+
const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
|
|
13362
13400
|
operationsWithDenormedRecords.push({
|
|
13363
13401
|
...operation,
|
|
13364
|
-
entries:
|
|
13402
|
+
entries: putEntries,
|
|
13365
13403
|
});
|
|
13404
|
+
if (hasMetadata &&
|
|
13405
|
+
sqlStore !== undefined &&
|
|
13406
|
+
sqlStore.isBatchUpdateSupported() === true) {
|
|
13407
|
+
operationsWithDenormedRecords.push({
|
|
13408
|
+
...operation,
|
|
13409
|
+
entries: putMetadata,
|
|
13410
|
+
type: 'setMetadata',
|
|
13411
|
+
});
|
|
13412
|
+
}
|
|
13366
13413
|
}
|
|
13367
13414
|
return durableStore.batchOperations(operationsWithDenormedRecords);
|
|
13368
13415
|
};
|
|
@@ -15962,9 +16009,11 @@ function enableObjectInfoCaching(env, ensureObjectInfoCached) {
|
|
|
15962
16009
|
let apiName = null;
|
|
15963
16010
|
let objectInfo;
|
|
15964
16011
|
if (dataIsRecord(key)) {
|
|
16012
|
+
incomingRecords.delete(key);
|
|
15965
16013
|
apiName = data.apiName;
|
|
15966
16014
|
}
|
|
15967
16015
|
else if (dataIsObjectInfo(key)) {
|
|
16016
|
+
incomingObjectInfos.delete(key);
|
|
15968
16017
|
apiName = data.apiName;
|
|
15969
16018
|
objectInfo = data;
|
|
15970
16019
|
}
|
|
@@ -16485,6 +16534,9 @@ class NimbusSqliteStore {
|
|
|
16485
16534
|
isEvalSupported() {
|
|
16486
16535
|
return true;
|
|
16487
16536
|
}
|
|
16537
|
+
isBatchUpdateSupported() {
|
|
16538
|
+
return this.supportsBatchUpdates;
|
|
16539
|
+
}
|
|
16488
16540
|
query(sql, params) {
|
|
16489
16541
|
return new Promise((resolve, reject) => {
|
|
16490
16542
|
this.plugin.query(sql, params, (result) => {
|
|
@@ -17649,7 +17701,6 @@ class RecordLoaderGraphQL {
|
|
|
17649
17701
|
}, {});
|
|
17650
17702
|
fields['Id'] = { value: id, displayValue: null };
|
|
17651
17703
|
fields['RecordTypeId'] = { value: recordTypeId, displayValue: null };
|
|
17652
|
-
const links = this.generateFieldLinks(id, [...requestedFields, 'Id', 'RecordTypeId']);
|
|
17653
17704
|
return {
|
|
17654
17705
|
apiName: node[`${requiredPrefix}ApiName`],
|
|
17655
17706
|
childRelationships: {},
|
|
@@ -17671,18 +17722,9 @@ class RecordLoaderGraphQL {
|
|
|
17671
17722
|
: null,
|
|
17672
17723
|
weakEtag: node[`${requiredPrefix}WeakEtag`],
|
|
17673
17724
|
fields,
|
|
17674
|
-
links,
|
|
17725
|
+
links: {},
|
|
17675
17726
|
};
|
|
17676
17727
|
}
|
|
17677
|
-
generateFieldLinks(id, fields) {
|
|
17678
|
-
const links = {};
|
|
17679
|
-
for (const field of fields) {
|
|
17680
|
-
links[field] = {
|
|
17681
|
-
__ref: `UiApi::RecordRepresentation:${id}__fields__${field}`,
|
|
17682
|
-
};
|
|
17683
|
-
}
|
|
17684
|
-
return links;
|
|
17685
|
-
}
|
|
17686
17728
|
}
|
|
17687
17729
|
|
|
17688
17730
|
class RecordIngestor {
|
|
@@ -17913,6 +17955,20 @@ class SqlitePrimingStore {
|
|
|
17913
17955
|
async writeBatch(records, overwrite) {
|
|
17914
17956
|
const idsToPrime = new Set();
|
|
17915
17957
|
const written = [];
|
|
17958
|
+
if (overwrite === true) {
|
|
17959
|
+
// if overwrite is true we need to raise change notifications so use the batchOperations
|
|
17960
|
+
const operations = {};
|
|
17961
|
+
for (const { record, metadata } of records) {
|
|
17962
|
+
const key = keyBuilderRecord(this.getLuvio(), { recordId: record.id });
|
|
17963
|
+
idsToPrime.add(record.id);
|
|
17964
|
+
operations[key] = {
|
|
17965
|
+
data: record,
|
|
17966
|
+
metadata: { ...metadata, metadataVersion: DURABLE_METADATA_VERSION },
|
|
17967
|
+
};
|
|
17968
|
+
}
|
|
17969
|
+
await this.store.setEntries(operations, DefaultDurableSegment);
|
|
17970
|
+
return { written: Array.from(idsToPrime), conflicted: [], errors: [] };
|
|
17971
|
+
}
|
|
17916
17972
|
const statement = `${overwrite ? 'REPLACE' : 'INSERT or IGNORE'} INTO lds_data (key, data, metadata) VALUES ${records
|
|
17917
17973
|
.map((_) => `(?,?,?)`)
|
|
17918
17974
|
.join(',')} returning key;`;
|
|
@@ -18028,7 +18084,9 @@ function primingSessionFactory(config) {
|
|
|
18028
18084
|
return instrumentPrimingSession(session);
|
|
18029
18085
|
}
|
|
18030
18086
|
|
|
18031
|
-
const DEFAULT_MAX_RECORD_COUNT =
|
|
18087
|
+
const DEFAULT_MAX_RECORD_COUNT = aggressiveTrimLowLimitGate.isOpen({ fallback: false })
|
|
18088
|
+
? 20000
|
|
18089
|
+
: 200000;
|
|
18032
18090
|
const DEFAULT_MAX_BATCH_SIZE = 200;
|
|
18033
18091
|
async function aggressiveTrim(data, deallocateFn, options = {}) {
|
|
18034
18092
|
const maxStoreRecords = options.maxStoreRecords !== undefined ? options.maxStoreRecords : DEFAULT_MAX_RECORD_COUNT;
|
|
@@ -18139,9 +18197,11 @@ function getRuntime() {
|
|
|
18139
18197
|
// user id centric record ID generator
|
|
18140
18198
|
const { newRecordId, isGenerated } = recordIdGenerator(userId);
|
|
18141
18199
|
// non-draft-aware base services
|
|
18142
|
-
|
|
18143
|
-
|
|
18144
|
-
|
|
18200
|
+
let storeOptions = {};
|
|
18201
|
+
if (aggressiveTrimGate.isOpen({ fallback: false })) {
|
|
18202
|
+
storeOptions.customTrimPolicy = aggressiveTrim;
|
|
18203
|
+
}
|
|
18204
|
+
const store = new InMemoryStore(storeOptions);
|
|
18145
18205
|
lazyNetworkAdapter = platformNetworkAdapter(makeNetworkAdapterChunkRecordFields(NimbusNetworkAdapter, {
|
|
18146
18206
|
reportChunkCandidateUrlLength: reportChunkCandidateUrlLength,
|
|
18147
18207
|
}));
|
|
@@ -18168,7 +18228,7 @@ function getRuntime() {
|
|
|
18168
18228
|
let getIngestRecords;
|
|
18169
18229
|
let getIngestMetadata;
|
|
18170
18230
|
let getIngestStore;
|
|
18171
|
-
const recordDenormingStore = makeRecordDenormalizingDurableStore(lazyLuvio, lazyBaseDurableStore, () => (getIngestRecords !== undefined ? getIngestRecords() : {}), () => (getIngestMetadata !== undefined ? getIngestMetadata() : {}), () => (getIngestStore !== undefined ? getIngestStore() : undefined));
|
|
18231
|
+
const recordDenormingStore = makeRecordDenormalizingDurableStore(lazyLuvio, lazyBaseDurableStore, () => (getIngestRecords !== undefined ? getIngestRecords() : {}), () => (getIngestMetadata !== undefined ? getIngestMetadata() : {}), () => (getIngestStore !== undefined ? getIngestStore() : undefined), lazyBaseDurableStore);
|
|
18172
18232
|
const baseEnv = new Environment(store, lazyNetworkAdapter);
|
|
18173
18233
|
const gqlEnv = makeEnvironmentGraphqlAware(baseEnv);
|
|
18174
18234
|
const durableEnv = makeDurable(gqlEnv, {
|
|
@@ -18281,4 +18341,4 @@ register({
|
|
|
18281
18341
|
});
|
|
18282
18342
|
|
|
18283
18343
|
export { O11Y_NAMESPACE_LDS_MOBILE, getRuntime, registerReportObserver, reportGraphqlQueryParseError };
|
|
18284
|
-
// version: 1.287.0-
|
|
18344
|
+
// version: 1.287.0-dev11-e0b0a8a5b2
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import type { PrimingStore, RecordWithMetadata, WriteResult } from '@salesforce/lds-priming';
|
|
2
2
|
import type { Luvio } from '@luvio/engine';
|
|
3
|
-
import type {
|
|
3
|
+
import type { NimbusSqliteStore } from '@salesforce/lds-store-nimbus';
|
|
4
4
|
export declare class SqlitePrimingStore implements PrimingStore {
|
|
5
5
|
private readonly getLuvio;
|
|
6
6
|
private readonly store;
|
|
7
|
-
constructor(getLuvio: () => Luvio, store:
|
|
7
|
+
constructor(getLuvio: () => Luvio, store: NimbusSqliteStore);
|
|
8
8
|
readRecords(ids: string[]): Promise<RecordWithMetadata[]>;
|
|
9
9
|
writeRecords(records: RecordWithMetadata[], overwrite: boolean): Promise<WriteResult>;
|
|
10
10
|
private writeBatch;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@salesforce/lds-runtime-mobile",
|
|
3
|
-
"version": "1.287.0-
|
|
3
|
+
"version": "1.287.0-dev11",
|
|
4
4
|
"license": "SEE LICENSE IN LICENSE.txt",
|
|
5
5
|
"description": "LDS runtime for mobile/hybrid environments.",
|
|
6
6
|
"main": "dist/main.js",
|
|
@@ -32,25 +32,25 @@
|
|
|
32
32
|
"release:corejar": "yarn build && ../core-build/scripts/core.js --name=lds-runtime-mobile"
|
|
33
33
|
},
|
|
34
34
|
"dependencies": {
|
|
35
|
-
"@salesforce/lds-adapters-uiapi": "^1.287.0-
|
|
36
|
-
"@salesforce/lds-bindings": "^1.287.0-
|
|
37
|
-
"@salesforce/lds-instrumentation": "^1.287.0-
|
|
38
|
-
"@salesforce/lds-priming": "^1.287.0-
|
|
35
|
+
"@salesforce/lds-adapters-uiapi": "^1.287.0-dev11",
|
|
36
|
+
"@salesforce/lds-bindings": "^1.287.0-dev11",
|
|
37
|
+
"@salesforce/lds-instrumentation": "^1.287.0-dev11",
|
|
38
|
+
"@salesforce/lds-priming": "^1.287.0-dev11",
|
|
39
39
|
"@salesforce/user": "0.0.21",
|
|
40
40
|
"o11y": "250.7.0"
|
|
41
41
|
},
|
|
42
42
|
"devDependencies": {
|
|
43
|
-
"@salesforce/lds-adapters-graphql": "^1.287.0-
|
|
44
|
-
"@salesforce/lds-drafts": "^1.287.0-
|
|
45
|
-
"@salesforce/lds-drafts-adapters-uiapi": "^1.287.0-
|
|
46
|
-
"@salesforce/lds-graphql-eval": "^1.287.0-
|
|
47
|
-
"@salesforce/lds-network-adapter": "^1.287.0-
|
|
48
|
-
"@salesforce/lds-network-nimbus": "^1.287.0-
|
|
49
|
-
"@salesforce/lds-store-binary": "^1.287.0-
|
|
50
|
-
"@salesforce/lds-store-nimbus": "^1.287.0-
|
|
51
|
-
"@salesforce/lds-store-sql": "^1.287.0-
|
|
52
|
-
"@salesforce/lds-utils-adapters": "^1.287.0-
|
|
53
|
-
"@salesforce/nimbus-plugin-lds": "^1.287.0-
|
|
43
|
+
"@salesforce/lds-adapters-graphql": "^1.287.0-dev11",
|
|
44
|
+
"@salesforce/lds-drafts": "^1.287.0-dev11",
|
|
45
|
+
"@salesforce/lds-drafts-adapters-uiapi": "^1.287.0-dev11",
|
|
46
|
+
"@salesforce/lds-graphql-eval": "^1.287.0-dev11",
|
|
47
|
+
"@salesforce/lds-network-adapter": "^1.287.0-dev11",
|
|
48
|
+
"@salesforce/lds-network-nimbus": "^1.287.0-dev11",
|
|
49
|
+
"@salesforce/lds-store-binary": "^1.287.0-dev11",
|
|
50
|
+
"@salesforce/lds-store-nimbus": "^1.287.0-dev11",
|
|
51
|
+
"@salesforce/lds-store-sql": "^1.287.0-dev11",
|
|
52
|
+
"@salesforce/lds-utils-adapters": "^1.287.0-dev11",
|
|
53
|
+
"@salesforce/nimbus-plugin-lds": "^1.287.0-dev11",
|
|
54
54
|
"babel-plugin-dynamic-import-node": "^2.3.3",
|
|
55
55
|
"wait-for-expect": "^3.0.2"
|
|
56
56
|
},
|
|
@@ -59,7 +59,7 @@
|
|
|
59
59
|
"path": "./dist/main.js",
|
|
60
60
|
"maxSize": {
|
|
61
61
|
"none": "800 kB",
|
|
62
|
-
"min": "
|
|
62
|
+
"min": "400 kB",
|
|
63
63
|
"compressed": "150 kB"
|
|
64
64
|
}
|
|
65
65
|
},
|
|
@@ -67,7 +67,7 @@
|
|
|
67
67
|
"path": "./sfdc/main.js",
|
|
68
68
|
"maxSize": {
|
|
69
69
|
"none": "800 kB",
|
|
70
|
-
"min": "
|
|
70
|
+
"min": "400 kB",
|
|
71
71
|
"compressed": "150 kB"
|
|
72
72
|
}
|
|
73
73
|
}
|
package/sfdc/main.js
CHANGED
|
@@ -40,6 +40,8 @@ import eagerEvalValidAt from '@salesforce/gate/lds.eagerEvalValidAt';
|
|
|
40
40
|
import eagerEvalStaleWhileRevalidate from '@salesforce/gate/lds.eagerEvalStaleWhileRevalidate';
|
|
41
41
|
import eagerEvalDefaultCachePolicy from '@salesforce/gate/lds.eagerEvalDefaultCachePolicy';
|
|
42
42
|
import ldsPrimingGraphqlBatch from '@salesforce/gate/lds.primingGraphqlBatch';
|
|
43
|
+
import aggressiveTrimGate from '@salesforce/gate/lds.aggressiveTrim';
|
|
44
|
+
import aggressiveTrimLowLimitGate from '@salesforce/gate/lds.aggressiveTrimLowLimit';
|
|
43
45
|
import ldsMetadataRefreshEnabled from '@salesforce/gate/lds.metadataRefreshEnabled';
|
|
44
46
|
|
|
45
47
|
/**
|
|
@@ -1189,7 +1191,7 @@ function makeDurable(environment, { durableStore, instrumentation, useRevivingSt
|
|
|
1189
1191
|
// because we do not want some other code attempting to use the
|
|
1190
1192
|
// in-memory values before the durable store onChanged handler
|
|
1191
1193
|
// calls back and revives the values to in-memory
|
|
1192
|
-
environment.
|
|
1194
|
+
environment.storeDealloc(key);
|
|
1193
1195
|
};
|
|
1194
1196
|
const publishStoreMetadata = function (recordId, storeMetadata) {
|
|
1195
1197
|
validateNotDisposed();
|
|
@@ -12577,7 +12579,7 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
|
|
|
12577
12579
|
const referencedRecord = referencedRecords.get(key);
|
|
12578
12580
|
recordFields[relationshipName] = {
|
|
12579
12581
|
displayValue: null,
|
|
12580
|
-
value: createLink(key),
|
|
12582
|
+
value: createLink$1(key),
|
|
12581
12583
|
};
|
|
12582
12584
|
// for custom objects, we select the 'Name' field
|
|
12583
12585
|
// otherwise we check the object info for name fields.
|
|
@@ -12605,7 +12607,7 @@ function applyReferenceLinksToDraft(record, draftMetadata) {
|
|
|
12605
12607
|
}
|
|
12606
12608
|
return { ...record, fields: recordFields };
|
|
12607
12609
|
}
|
|
12608
|
-
function createLink(key) {
|
|
12610
|
+
function createLink$1(key) {
|
|
12609
12611
|
return { __ref: key };
|
|
12610
12612
|
}
|
|
12611
12613
|
function getReferenceInfoForKey(fieldName, field, luvio, objectInfo) {
|
|
@@ -12620,7 +12622,7 @@ function getReferenceInfoForKey(fieldName, field, luvio, objectInfo) {
|
|
|
12620
12622
|
referenceFieldName: relationshipName,
|
|
12621
12623
|
field: {
|
|
12622
12624
|
displayValue: null,
|
|
12623
|
-
value: createLink(key),
|
|
12625
|
+
value: createLink$1(key),
|
|
12624
12626
|
},
|
|
12625
12627
|
};
|
|
12626
12628
|
}
|
|
@@ -13083,6 +13085,9 @@ function isBackdatingFieldEditable(objectInfo, backdatingFieldName, attributeNam
|
|
|
13083
13085
|
!draftActionFieldNames.includes(backdatingFieldName));
|
|
13084
13086
|
}
|
|
13085
13087
|
|
|
13088
|
+
function createLink(key) {
|
|
13089
|
+
return { __ref: key };
|
|
13090
|
+
}
|
|
13086
13091
|
/**
|
|
13087
13092
|
* Records are stored in the durable store with scalar fields denormalized. This function takes that denoramlized
|
|
13088
13093
|
* durable store record representation and normalizes it back out into the format the the luvio store expects it
|
|
@@ -13093,26 +13098,25 @@ function isBackdatingFieldEditable(objectInfo, backdatingFieldName, attributeNam
|
|
|
13093
13098
|
function normalizeRecordFields(key, entry) {
|
|
13094
13099
|
const { data: record } = entry;
|
|
13095
13100
|
const { fields, links } = record;
|
|
13096
|
-
const
|
|
13101
|
+
const missingFieldLinks = keys$3(links);
|
|
13102
|
+
const fieldNames = keys$3(fields);
|
|
13097
13103
|
const normalizedFields = {};
|
|
13098
13104
|
const returnEntries = {};
|
|
13099
|
-
|
|
13100
|
-
|
|
13105
|
+
// restore fields
|
|
13106
|
+
for (let i = 0, len = fieldNames.length; i < len; i++) {
|
|
13107
|
+
const fieldName = fieldNames[i];
|
|
13101
13108
|
const field = fields[fieldName];
|
|
13109
|
+
const fieldKey = buildRecordFieldStoreKey(key, fieldName);
|
|
13110
|
+
returnEntries[fieldKey] = { data: field };
|
|
13111
|
+
normalizedFields[fieldName] = createLink(fieldKey);
|
|
13112
|
+
}
|
|
13113
|
+
// restore missing fields
|
|
13114
|
+
for (let i = 0, len = missingFieldLinks.length; i < len; i++) {
|
|
13115
|
+
const fieldName = missingFieldLinks[i];
|
|
13102
13116
|
const link = links[fieldName];
|
|
13103
|
-
// field is undefined for missing links
|
|
13104
|
-
if (field !== undefined) {
|
|
13105
|
-
const fieldKey = buildRecordFieldStoreKey(key, fieldName);
|
|
13106
|
-
returnEntries[fieldKey] = { data: field };
|
|
13107
|
-
}
|
|
13108
|
-
// we need to restore the undefined __ref node as it is
|
|
13109
|
-
// lost during serialization
|
|
13110
13117
|
if (link.isMissing === true) {
|
|
13111
13118
|
normalizedFields[fieldName] = { ...link, __ref: undefined };
|
|
13112
13119
|
}
|
|
13113
|
-
else {
|
|
13114
|
-
normalizedFields[fieldName] = link;
|
|
13115
|
-
}
|
|
13116
13120
|
}
|
|
13117
13121
|
returnEntries[key] = {
|
|
13118
13122
|
data: assign$3(record, { fields: normalizedFields }),
|
|
@@ -13174,7 +13178,7 @@ function buildDurableRecordRepresentation(normalizedRecord, records, pendingEntr
|
|
|
13174
13178
|
}
|
|
13175
13179
|
}
|
|
13176
13180
|
// we want to preserve fields that are missing nodes
|
|
13177
|
-
if (
|
|
13181
|
+
if (field.isMissing === true) {
|
|
13178
13182
|
links[fieldName] = field;
|
|
13179
13183
|
}
|
|
13180
13184
|
}
|
|
@@ -13193,7 +13197,7 @@ function getDenormalizedKey(originalKey, recordId, luvio) {
|
|
|
13193
13197
|
}
|
|
13194
13198
|
return keyBuilderRecord(luvio, { recordId });
|
|
13195
13199
|
}
|
|
13196
|
-
function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata, getStore) {
|
|
13200
|
+
function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata, getStore, sqlStore) {
|
|
13197
13201
|
const getEntries = function (entries, segment) {
|
|
13198
13202
|
// this HOF only inspects records in the default segment
|
|
13199
13203
|
if (segment !== DefaultDurableSegment) {
|
|
@@ -13255,7 +13259,10 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13255
13259
|
});
|
|
13256
13260
|
};
|
|
13257
13261
|
const denormalizeEntries = function (entries) {
|
|
13262
|
+
let hasEntries = false;
|
|
13263
|
+
let hasMetadata = false;
|
|
13258
13264
|
const putEntries = create$3(null);
|
|
13265
|
+
const putMetadata = create$3(null);
|
|
13259
13266
|
const keys$1 = keys$3(entries);
|
|
13260
13267
|
const putRecords = {};
|
|
13261
13268
|
const putRecordViews = {};
|
|
@@ -13298,6 +13305,7 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13298
13305
|
putRecords[recordId] = true;
|
|
13299
13306
|
}
|
|
13300
13307
|
if (isStoreRecordError(record)) {
|
|
13308
|
+
hasEntries = true;
|
|
13301
13309
|
putEntries[recordKey] = value;
|
|
13302
13310
|
continue;
|
|
13303
13311
|
}
|
|
@@ -13310,24 +13318,43 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13310
13318
|
}
|
|
13311
13319
|
const denormalizedRecord = buildDurableRecordRepresentation(record, storeRecords, recordEntries, store);
|
|
13312
13320
|
if (denormalizedRecord !== undefined) {
|
|
13321
|
+
hasEntries = true;
|
|
13313
13322
|
putEntries[recordKey] = {
|
|
13314
13323
|
data: denormalizedRecord,
|
|
13315
13324
|
metadata,
|
|
13316
13325
|
};
|
|
13326
|
+
// if undefined then it is pending
|
|
13327
|
+
// we should still update metadata on pending records
|
|
13328
|
+
}
|
|
13329
|
+
else {
|
|
13330
|
+
hasMetadata = true;
|
|
13331
|
+
metadata.expirationTimestamp = metadata.ingestionTimestamp;
|
|
13332
|
+
putMetadata[recordKey] = {
|
|
13333
|
+
metadata,
|
|
13334
|
+
};
|
|
13317
13335
|
}
|
|
13318
13336
|
}
|
|
13319
13337
|
else {
|
|
13338
|
+
hasEntries = true;
|
|
13320
13339
|
putEntries[key] = value;
|
|
13321
13340
|
}
|
|
13322
13341
|
}
|
|
13323
|
-
return putEntries;
|
|
13342
|
+
return { putEntries, putMetadata, hasEntries, hasMetadata };
|
|
13324
13343
|
};
|
|
13325
13344
|
const setEntries = function (entries, segment) {
|
|
13326
13345
|
if (segment !== DefaultDurableSegment) {
|
|
13327
13346
|
return durableStore.setEntries(entries, segment);
|
|
13328
13347
|
}
|
|
13329
|
-
const putEntries = denormalizeEntries(entries);
|
|
13330
|
-
|
|
13348
|
+
const { putEntries, putMetadata, hasEntries, hasMetadata } = denormalizeEntries(entries);
|
|
13349
|
+
const promises = [
|
|
13350
|
+
hasEntries ? durableStore.setEntries(putEntries, segment) : undefined,
|
|
13351
|
+
];
|
|
13352
|
+
if (sqlStore !== undefined && sqlStore.isBatchUpdateSupported()) {
|
|
13353
|
+
promises.push(hasMetadata && sqlStore !== undefined
|
|
13354
|
+
? durableStore.setMetadata(putMetadata, segment)
|
|
13355
|
+
: undefined);
|
|
13356
|
+
}
|
|
13357
|
+
return Promise.all(promises).then(() => { });
|
|
13331
13358
|
};
|
|
13332
13359
|
const batchOperations = function (operations) {
|
|
13333
13360
|
const operationsWithDenormedRecords = [];
|
|
@@ -13344,10 +13371,20 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13344
13371
|
// this is determined by the plugin supporting update batch calls before it gets to this HOF.
|
|
13345
13372
|
// so we only need to check one entry to confirm this for performance
|
|
13346
13373
|
if (firstEntry.data !== undefined) {
|
|
13374
|
+
const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
|
|
13347
13375
|
operationsWithDenormedRecords.push({
|
|
13348
13376
|
...operation,
|
|
13349
|
-
entries:
|
|
13377
|
+
entries: putEntries,
|
|
13350
13378
|
});
|
|
13379
|
+
if (hasMetadata &&
|
|
13380
|
+
sqlStore !== undefined &&
|
|
13381
|
+
sqlStore.isBatchUpdateSupported() === true) {
|
|
13382
|
+
operationsWithDenormedRecords.push({
|
|
13383
|
+
...operation,
|
|
13384
|
+
entries: putMetadata,
|
|
13385
|
+
type: 'setMetadata',
|
|
13386
|
+
});
|
|
13387
|
+
}
|
|
13351
13388
|
}
|
|
13352
13389
|
else {
|
|
13353
13390
|
operationsWithDenormedRecords.push(operation);
|
|
@@ -13359,10 +13396,20 @@ function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecord
|
|
|
13359
13396
|
operationsWithDenormedRecords.push(operation);
|
|
13360
13397
|
continue;
|
|
13361
13398
|
}
|
|
13399
|
+
const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
|
|
13362
13400
|
operationsWithDenormedRecords.push({
|
|
13363
13401
|
...operation,
|
|
13364
|
-
entries:
|
|
13402
|
+
entries: putEntries,
|
|
13365
13403
|
});
|
|
13404
|
+
if (hasMetadata &&
|
|
13405
|
+
sqlStore !== undefined &&
|
|
13406
|
+
sqlStore.isBatchUpdateSupported() === true) {
|
|
13407
|
+
operationsWithDenormedRecords.push({
|
|
13408
|
+
...operation,
|
|
13409
|
+
entries: putMetadata,
|
|
13410
|
+
type: 'setMetadata',
|
|
13411
|
+
});
|
|
13412
|
+
}
|
|
13366
13413
|
}
|
|
13367
13414
|
return durableStore.batchOperations(operationsWithDenormedRecords);
|
|
13368
13415
|
};
|
|
@@ -15962,9 +16009,11 @@ function enableObjectInfoCaching(env, ensureObjectInfoCached) {
|
|
|
15962
16009
|
let apiName = null;
|
|
15963
16010
|
let objectInfo;
|
|
15964
16011
|
if (dataIsRecord(key)) {
|
|
16012
|
+
incomingRecords.delete(key);
|
|
15965
16013
|
apiName = data.apiName;
|
|
15966
16014
|
}
|
|
15967
16015
|
else if (dataIsObjectInfo(key)) {
|
|
16016
|
+
incomingObjectInfos.delete(key);
|
|
15968
16017
|
apiName = data.apiName;
|
|
15969
16018
|
objectInfo = data;
|
|
15970
16019
|
}
|
|
@@ -16485,6 +16534,9 @@ class NimbusSqliteStore {
|
|
|
16485
16534
|
isEvalSupported() {
|
|
16486
16535
|
return true;
|
|
16487
16536
|
}
|
|
16537
|
+
isBatchUpdateSupported() {
|
|
16538
|
+
return this.supportsBatchUpdates;
|
|
16539
|
+
}
|
|
16488
16540
|
query(sql, params) {
|
|
16489
16541
|
return new Promise((resolve, reject) => {
|
|
16490
16542
|
this.plugin.query(sql, params, (result) => {
|
|
@@ -17649,7 +17701,6 @@ class RecordLoaderGraphQL {
|
|
|
17649
17701
|
}, {});
|
|
17650
17702
|
fields['Id'] = { value: id, displayValue: null };
|
|
17651
17703
|
fields['RecordTypeId'] = { value: recordTypeId, displayValue: null };
|
|
17652
|
-
const links = this.generateFieldLinks(id, [...requestedFields, 'Id', 'RecordTypeId']);
|
|
17653
17704
|
return {
|
|
17654
17705
|
apiName: node[`${requiredPrefix}ApiName`],
|
|
17655
17706
|
childRelationships: {},
|
|
@@ -17671,18 +17722,9 @@ class RecordLoaderGraphQL {
|
|
|
17671
17722
|
: null,
|
|
17672
17723
|
weakEtag: node[`${requiredPrefix}WeakEtag`],
|
|
17673
17724
|
fields,
|
|
17674
|
-
links,
|
|
17725
|
+
links: {},
|
|
17675
17726
|
};
|
|
17676
17727
|
}
|
|
17677
|
-
generateFieldLinks(id, fields) {
|
|
17678
|
-
const links = {};
|
|
17679
|
-
for (const field of fields) {
|
|
17680
|
-
links[field] = {
|
|
17681
|
-
__ref: `UiApi::RecordRepresentation:${id}__fields__${field}`,
|
|
17682
|
-
};
|
|
17683
|
-
}
|
|
17684
|
-
return links;
|
|
17685
|
-
}
|
|
17686
17728
|
}
|
|
17687
17729
|
|
|
17688
17730
|
class RecordIngestor {
|
|
@@ -17913,6 +17955,20 @@ class SqlitePrimingStore {
|
|
|
17913
17955
|
async writeBatch(records, overwrite) {
|
|
17914
17956
|
const idsToPrime = new Set();
|
|
17915
17957
|
const written = [];
|
|
17958
|
+
if (overwrite === true) {
|
|
17959
|
+
// if overwrite is true we need to raise change notifications so use the batchOperations
|
|
17960
|
+
const operations = {};
|
|
17961
|
+
for (const { record, metadata } of records) {
|
|
17962
|
+
const key = keyBuilderRecord(this.getLuvio(), { recordId: record.id });
|
|
17963
|
+
idsToPrime.add(record.id);
|
|
17964
|
+
operations[key] = {
|
|
17965
|
+
data: record,
|
|
17966
|
+
metadata: { ...metadata, metadataVersion: DURABLE_METADATA_VERSION },
|
|
17967
|
+
};
|
|
17968
|
+
}
|
|
17969
|
+
await this.store.setEntries(operations, DefaultDurableSegment);
|
|
17970
|
+
return { written: Array.from(idsToPrime), conflicted: [], errors: [] };
|
|
17971
|
+
}
|
|
17916
17972
|
const statement = `${overwrite ? 'REPLACE' : 'INSERT or IGNORE'} INTO lds_data (key, data, metadata) VALUES ${records
|
|
17917
17973
|
.map((_) => `(?,?,?)`)
|
|
17918
17974
|
.join(',')} returning key;`;
|
|
@@ -18028,7 +18084,9 @@ function primingSessionFactory(config) {
|
|
|
18028
18084
|
return instrumentPrimingSession(session);
|
|
18029
18085
|
}
|
|
18030
18086
|
|
|
18031
|
-
const DEFAULT_MAX_RECORD_COUNT =
|
|
18087
|
+
const DEFAULT_MAX_RECORD_COUNT = aggressiveTrimLowLimitGate.isOpen({ fallback: false })
|
|
18088
|
+
? 20000
|
|
18089
|
+
: 200000;
|
|
18032
18090
|
const DEFAULT_MAX_BATCH_SIZE = 200;
|
|
18033
18091
|
async function aggressiveTrim(data, deallocateFn, options = {}) {
|
|
18034
18092
|
const maxStoreRecords = options.maxStoreRecords !== undefined ? options.maxStoreRecords : DEFAULT_MAX_RECORD_COUNT;
|
|
@@ -18139,9 +18197,11 @@ function getRuntime() {
|
|
|
18139
18197
|
// user id centric record ID generator
|
|
18140
18198
|
const { newRecordId, isGenerated } = recordIdGenerator(userId);
|
|
18141
18199
|
// non-draft-aware base services
|
|
18142
|
-
|
|
18143
|
-
|
|
18144
|
-
|
|
18200
|
+
let storeOptions = {};
|
|
18201
|
+
if (aggressiveTrimGate.isOpen({ fallback: false })) {
|
|
18202
|
+
storeOptions.customTrimPolicy = aggressiveTrim;
|
|
18203
|
+
}
|
|
18204
|
+
const store = new InMemoryStore(storeOptions);
|
|
18145
18205
|
lazyNetworkAdapter = platformNetworkAdapter(makeNetworkAdapterChunkRecordFields(NimbusNetworkAdapter, {
|
|
18146
18206
|
reportChunkCandidateUrlLength: reportChunkCandidateUrlLength,
|
|
18147
18207
|
}));
|
|
@@ -18168,7 +18228,7 @@ function getRuntime() {
|
|
|
18168
18228
|
let getIngestRecords;
|
|
18169
18229
|
let getIngestMetadata;
|
|
18170
18230
|
let getIngestStore;
|
|
18171
|
-
const recordDenormingStore = makeRecordDenormalizingDurableStore(lazyLuvio, lazyBaseDurableStore, () => (getIngestRecords !== undefined ? getIngestRecords() : {}), () => (getIngestMetadata !== undefined ? getIngestMetadata() : {}), () => (getIngestStore !== undefined ? getIngestStore() : undefined));
|
|
18231
|
+
const recordDenormingStore = makeRecordDenormalizingDurableStore(lazyLuvio, lazyBaseDurableStore, () => (getIngestRecords !== undefined ? getIngestRecords() : {}), () => (getIngestMetadata !== undefined ? getIngestMetadata() : {}), () => (getIngestStore !== undefined ? getIngestStore() : undefined), lazyBaseDurableStore);
|
|
18172
18232
|
const baseEnv = new Environment(store, lazyNetworkAdapter);
|
|
18173
18233
|
const gqlEnv = makeEnvironmentGraphqlAware(baseEnv);
|
|
18174
18234
|
const durableEnv = makeDurable(gqlEnv, {
|
|
@@ -18281,4 +18341,4 @@ register({
|
|
|
18281
18341
|
});
|
|
18282
18342
|
|
|
18283
18343
|
export { O11Y_NAMESPACE_LDS_MOBILE, getRuntime, registerReportObserver, reportGraphqlQueryParseError };
|
|
18284
|
-
// version: 1.287.0-
|
|
18344
|
+
// version: 1.287.0-dev11-e0b0a8a5b2
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import type { PrimingStore, RecordWithMetadata, WriteResult } from '@salesforce/lds-priming';
|
|
2
2
|
import type { Luvio } from '@luvio/engine';
|
|
3
|
-
import type {
|
|
3
|
+
import type { NimbusSqliteStore } from '@salesforce/lds-store-nimbus';
|
|
4
4
|
export declare class SqlitePrimingStore implements PrimingStore {
|
|
5
5
|
private readonly getLuvio;
|
|
6
6
|
private readonly store;
|
|
7
|
-
constructor(getLuvio: () => Luvio, store:
|
|
7
|
+
constructor(getLuvio: () => Luvio, store: NimbusSqliteStore);
|
|
8
8
|
readRecords(ids: string[]): Promise<RecordWithMetadata[]>;
|
|
9
9
|
writeRecords(records: RecordWithMetadata[], overwrite: boolean): Promise<WriteResult>;
|
|
10
10
|
private writeBatch;
|