@salesforce/lds-durable-records 0.1.0-dev1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE.txt
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
Terms of Use
|
|
2
|
+
|
|
3
|
+
Copyright 2022 Salesforce, Inc. All rights reserved.
|
|
4
|
+
|
|
5
|
+
These Terms of Use govern the download, installation, and/or use of this
|
|
6
|
+
software provided by Salesforce, Inc. ("Salesforce") (the "Software"), were
|
|
7
|
+
last updated on April 15, 2022, and constitute a legally binding
|
|
8
|
+
agreement between you and Salesforce. If you do not agree to these Terms of
|
|
9
|
+
Use, do not install or use the Software.
|
|
10
|
+
|
|
11
|
+
Salesforce grants you a worldwide, non-exclusive, no-charge, royalty-free
|
|
12
|
+
copyright license to reproduce, prepare derivative works of, publicly
|
|
13
|
+
display, publicly perform, sublicense, and distribute the Software and
|
|
14
|
+
derivative works subject to these Terms. These Terms shall be included in
|
|
15
|
+
all copies or substantial portions of the Software.
|
|
16
|
+
|
|
17
|
+
Subject to the limited rights expressly granted hereunder, Salesforce
|
|
18
|
+
reserves all rights, title, and interest in and to all intellectual
|
|
19
|
+
property subsisting in the Software. No rights are granted to you hereunder
|
|
20
|
+
other than as expressly set forth herein. Users residing in countries on
|
|
21
|
+
the United States Office of Foreign Assets Control sanction list, or which
|
|
22
|
+
are otherwise subject to a US export embargo, may not use the Software.
|
|
23
|
+
|
|
24
|
+
Implementation of the Software may require development work, for which you
|
|
25
|
+
are responsible. The Software may contain bugs, errors and
|
|
26
|
+
incompatibilities and is made available on an AS IS basis without support,
|
|
27
|
+
updates, or service level commitments.
|
|
28
|
+
|
|
29
|
+
Salesforce reserves the right at any time to modify, suspend, or
|
|
30
|
+
discontinue, the Software (or any part thereof) with or without notice. You
|
|
31
|
+
agree that Salesforce shall not be liable to you or to any third party for
|
|
32
|
+
any modification, suspension, or discontinuance.
|
|
33
|
+
|
|
34
|
+
You agree to defend Salesforce against any claim, demand, suit or
|
|
35
|
+
proceeding made or brought against Salesforce by a third party arising out
|
|
36
|
+
of or accruing from (a) your use of the Software, and (b) any application
|
|
37
|
+
you develop with the Software that infringes any copyright, trademark,
|
|
38
|
+
trade secret, trade dress, patent, or other intellectual property right of
|
|
39
|
+
any person or defames any person or violates their rights of publicity or
|
|
40
|
+
privacy (each a "Claim Against Salesforce"), and will indemnify Salesforce
|
|
41
|
+
from any damages, attorney fees, and costs finally awarded against
|
|
42
|
+
Salesforce as a result of, or for any amounts paid by Salesforce under a
|
|
43
|
+
settlement approved by you in writing of, a Claim Against Salesforce,
|
|
44
|
+
provided Salesforce (x) promptly gives you written notice of the Claim
|
|
45
|
+
Against Salesforce, (y) gives you sole control of the defense and
|
|
46
|
+
settlement of the Claim Against Salesforce (except that you may not settle
|
|
47
|
+
any Claim Against Salesforce unless it unconditionally releases Salesforce
|
|
48
|
+
of all liability), and (z) gives you all reasonable assistance, at your
|
|
49
|
+
expense.
|
|
50
|
+
|
|
51
|
+
WITHOUT LIMITING THE GENERALITY OF THE FOREGOING, THE SOFTWARE IS NOT
|
|
52
|
+
SUPPORTED AND IS PROVIDED "AS IS," WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
53
|
+
IMPLIED. IN NO EVENT SHALL SALESFORCE HAVE ANY LIABILITY FOR ANY DAMAGES,
|
|
54
|
+
INCLUDING, BUT NOT LIMITED TO, DIRECT, INDIRECT, SPECIAL, INCIDENTAL,
|
|
55
|
+
PUNITIVE, OR CONSEQUENTIAL DAMAGES, OR DAMAGES BASED ON LOST PROFITS, DATA,
|
|
56
|
+
OR USE, IN CONNECTION WITH THE SOFTWARE, HOWEVER CAUSED AND WHETHER IN
|
|
57
|
+
CONTRACT, TORT, OR UNDER ANY OTHER THEORY OF LIABILITY, WHETHER OR NOT YOU
|
|
58
|
+
HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
|
59
|
+
|
|
60
|
+
These Terms of Use shall be governed exclusively by the internal laws of
|
|
61
|
+
the State of California, without regard to its conflicts of laws
|
|
62
|
+
rules. Each party hereby consents to the exclusive jurisdiction of the
|
|
63
|
+
state and federal courts located in San Francisco County, California to
|
|
64
|
+
adjudicate any dispute arising out of or relating to these Terms of Use and
|
|
65
|
+
the download, installation, and/or use of the Software. Except as expressly
|
|
66
|
+
stated herein, these Terms of Use constitute the entire agreement between
|
|
67
|
+
the parties, and supersede all prior and contemporaneous agreements,
|
|
68
|
+
proposals, or representations, written or oral, concerning their subject
|
|
69
|
+
matter. No modification, amendment, or waiver of any provision of these
|
|
70
|
+
Terms of Use shall be effective unless it is by an update to these Terms of
|
|
71
|
+
Use that Salesforce makes available, or is in writing and signed by the
|
|
72
|
+
party against whom the modification, amendment, or waiver is to be
|
|
73
|
+
asserted.
|
|
74
|
+
|
|
75
|
+
Data Privacy: Salesforce may collect, process, and store device,
|
|
76
|
+
system, and other information related to your use of the Software. This
|
|
77
|
+
information includes, but is not limited to, IP address, user metrics, and
|
|
78
|
+
other data ("Usage Data"). Salesforce may use Usage Data for analytics,
|
|
79
|
+
product development, and marketing purposes. You acknowledge that files
|
|
80
|
+
generated in conjunction with the Software may contain sensitive or
|
|
81
|
+
confidential data, and you are solely responsible for anonymizing and
|
|
82
|
+
protecting such data.
|
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Copyright (c) 2022, Salesforce, Inc.,
|
|
3
|
+
* All rights reserved.
|
|
4
|
+
* For full license text, see the LICENSE.txt file
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { DefaultDurableSegment, DURABLE_METADATA_VERSION } from '@luvio/environments';
|
|
8
|
+
import { extractRecordIdFromStoreKey, RECORD_VIEW_ENTITY_ID_PREFIX, isStoreKeyRecordViewEntity, keyBuilderRecord, RECORD_ID_PREFIX, RECORD_FIELDS_KEY_JUNCTION } from '@salesforce/lds-adapters-uiapi';
|
|
9
|
+
|
|
10
|
+
const { keys, values, create, assign, freeze, entries } = Object;
|
|
11
|
+
|
|
12
|
+
function buildRecordFieldStoreKey(recordKey, fieldName) {
|
|
13
|
+
return `${recordKey}${RECORD_FIELDS_KEY_JUNCTION}${fieldName}`;
|
|
14
|
+
}
|
|
15
|
+
function isStoreKeyRecordId(key) {
|
|
16
|
+
return key.indexOf(RECORD_ID_PREFIX) > -1 && key.indexOf(RECORD_FIELDS_KEY_JUNCTION) === -1;
|
|
17
|
+
}
|
|
18
|
+
function createLink(key) {
|
|
19
|
+
return { __ref: key };
|
|
20
|
+
}
|
|
21
|
+
function isStoreRecordError(storeRecord) {
|
|
22
|
+
return storeRecord.__type === 'error';
|
|
23
|
+
}
|
|
24
|
+
function isEntryDurableRecordRepresentation(entry, key) {
|
|
25
|
+
// Either a DurableRecordRepresentation or StoreRecordError can live at a record key
|
|
26
|
+
return ((isStoreKeyRecordId(key) || isStoreKeyRecordViewEntity(key)) &&
|
|
27
|
+
entry.data.__type === undefined);
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Records are stored in the durable store with scalar fields denormalized. This function takes that denoramlized
|
|
31
|
+
* durable store record representation and normalizes it back out into the format the the luvio store expects it
|
|
32
|
+
* @param key Record store key
|
|
33
|
+
* @param entry Durable entry containing a denormalized record representation
|
|
34
|
+
* @returns a set of entries containing the normalized record and its normalized fields
|
|
35
|
+
*/
|
|
36
|
+
function normalizeRecordFields(key, entry) {
|
|
37
|
+
const { data: record } = entry;
|
|
38
|
+
const { fields, links } = record;
|
|
39
|
+
const missingFieldLinks = links === undefined ? [] : keys(links);
|
|
40
|
+
const fieldNames = keys(fields);
|
|
41
|
+
const normalizedFields = {};
|
|
42
|
+
const returnEntries = {};
|
|
43
|
+
// restore fields
|
|
44
|
+
for (let i = 0, len = fieldNames.length; i < len; i++) {
|
|
45
|
+
const fieldName = fieldNames[i];
|
|
46
|
+
const field = fields[fieldName];
|
|
47
|
+
if (field.__state !== undefined && field.__state.isMissing === true) {
|
|
48
|
+
normalizedFields[fieldName] = { isMissing: true, __ref: undefined };
|
|
49
|
+
continue;
|
|
50
|
+
}
|
|
51
|
+
const fieldKey = buildRecordFieldStoreKey(key, fieldName);
|
|
52
|
+
returnEntries[fieldKey] = { data: field };
|
|
53
|
+
normalizedFields[fieldName] = createLink(fieldKey);
|
|
54
|
+
}
|
|
55
|
+
// restore missing fields
|
|
56
|
+
for (let i = 0, len = missingFieldLinks.length; i < len; i++) {
|
|
57
|
+
const fieldName = missingFieldLinks[i];
|
|
58
|
+
const link = links[fieldName];
|
|
59
|
+
if (link.isMissing === true) {
|
|
60
|
+
normalizedFields[fieldName] = { ...link, __ref: undefined };
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
returnEntries[key] = {
|
|
64
|
+
data: assign(record, { fields: normalizedFields }),
|
|
65
|
+
metadata: entry.metadata,
|
|
66
|
+
};
|
|
67
|
+
return returnEntries;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Transforms a record for storage in the durable store. The transformation involves denormalizing
|
|
71
|
+
* scalar fields and persisting link metadata to transform back into a normalized representation
|
|
72
|
+
*
|
|
73
|
+
* If the record contains pending fields this will return undefined as pending records do not get persisted
|
|
74
|
+
* to the durable store. There should be a refresh operation outbound that will bring in the updated record.
|
|
75
|
+
*
|
|
76
|
+
* @param normalizedRecord Record containing normalized field links
|
|
77
|
+
* @param recordStore a store containing referenced record fields
|
|
78
|
+
*/
|
|
79
|
+
function buildDurableRecordRepresentation(normalizedRecord, records, pendingEntries, store) {
|
|
80
|
+
const fields = normalizedRecord.fields;
|
|
81
|
+
const filteredFields = {};
|
|
82
|
+
const fieldNames = keys(fields);
|
|
83
|
+
for (let i = 0, len = fieldNames.length; i < len; i++) {
|
|
84
|
+
const fieldName = fieldNames[i];
|
|
85
|
+
const field = fields[fieldName];
|
|
86
|
+
// pending fields get filtered out of the durable store
|
|
87
|
+
const { pending } = field;
|
|
88
|
+
if (pending === true) {
|
|
89
|
+
// do not write records with pending fields to the durable store
|
|
90
|
+
// there should be a refresh operation outbound that will bring in the updated record
|
|
91
|
+
return undefined;
|
|
92
|
+
}
|
|
93
|
+
const { __ref } = field;
|
|
94
|
+
if (__ref !== undefined) {
|
|
95
|
+
let ref = records[__ref];
|
|
96
|
+
if (pendingEntries !== undefined) {
|
|
97
|
+
// If the ref was part of the pending write that takes precedence
|
|
98
|
+
const pendingEntry = pendingEntries[__ref];
|
|
99
|
+
if (pendingEntry !== undefined) {
|
|
100
|
+
ref = pendingEntry.data;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
// if field reference exists then add it to our filteredFields
|
|
104
|
+
if (ref !== undefined) {
|
|
105
|
+
filteredFields[fieldName] = ref;
|
|
106
|
+
}
|
|
107
|
+
else {
|
|
108
|
+
// if we have a store to read, try to find the field there too
|
|
109
|
+
// The durable ingest staging store may pass through to L1, and
|
|
110
|
+
// not all fields are necessarily published every time, so it is
|
|
111
|
+
// important to check L1 and not just the fields being published,
|
|
112
|
+
// otherwise we risk truncating the fields on the record.
|
|
113
|
+
if (store) {
|
|
114
|
+
ref = store.readEntry(__ref);
|
|
115
|
+
if (ref !== undefined) {
|
|
116
|
+
filteredFields[fieldName] = ref;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
// we want to preserve fields that are missing nodes
|
|
122
|
+
if (field.isMissing === true) {
|
|
123
|
+
filteredFields[fieldName] = {
|
|
124
|
+
value: undefined,
|
|
125
|
+
displayValue: undefined,
|
|
126
|
+
__state: { isMissing: true },
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
return {
|
|
131
|
+
...normalizedRecord,
|
|
132
|
+
fields: filteredFields,
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
function getDenormalizedKey(originalKey, recordId, luvio) {
|
|
136
|
+
// this will likely need to be handled when moving to structured keys
|
|
137
|
+
// note record view entities dont have an associated keybuilder. They get ingested as records to a different key format
|
|
138
|
+
// see the override for how they are handled packages/lds-adapters-uiapi/src/raml-artifacts/types/RecordRepresentation/keyBuilderFromType.ts
|
|
139
|
+
if (originalKey.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX)) {
|
|
140
|
+
return RECORD_VIEW_ENTITY_ID_PREFIX + recordId;
|
|
141
|
+
}
|
|
142
|
+
return keyBuilderRecord(luvio, { recordId });
|
|
143
|
+
}
|
|
144
|
+
function makeRecordDenormalizingDurableStore(luvio, durableStore, getStoreRecords, getStoreMetadata, getStore, sqlStore) {
|
|
145
|
+
const getEntries = function (entries, segment) {
|
|
146
|
+
// this HOF only inspects records in the default segment
|
|
147
|
+
if (segment !== DefaultDurableSegment) {
|
|
148
|
+
return durableStore.getEntries(entries, segment);
|
|
149
|
+
}
|
|
150
|
+
const { length: entriesLength } = entries;
|
|
151
|
+
if (entriesLength === 0) {
|
|
152
|
+
return Promise.resolve({});
|
|
153
|
+
}
|
|
154
|
+
// filter out record field keys
|
|
155
|
+
const filteredEntryIds = [];
|
|
156
|
+
// map of records to avoid requesting duplicate record keys when requesting both records and fields
|
|
157
|
+
const recordEntries = {};
|
|
158
|
+
const recordViewEntries = {};
|
|
159
|
+
for (let i = 0, len = entriesLength; i < len; i++) {
|
|
160
|
+
const id = entries[i];
|
|
161
|
+
const recordId = extractRecordIdFromStoreKey(id);
|
|
162
|
+
if (recordId !== undefined) {
|
|
163
|
+
if (id.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX)) {
|
|
164
|
+
if (recordViewEntries[recordId] === undefined) {
|
|
165
|
+
const key = getDenormalizedKey(id, recordId, luvio);
|
|
166
|
+
recordViewEntries[recordId] = true;
|
|
167
|
+
filteredEntryIds.push(key);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
else {
|
|
171
|
+
if (recordEntries[recordId] === undefined) {
|
|
172
|
+
const key = getDenormalizedKey(id, recordId, luvio);
|
|
173
|
+
recordEntries[recordId] = true;
|
|
174
|
+
filteredEntryIds.push(key);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
else {
|
|
179
|
+
filteredEntryIds.push(id);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
// call base getEntries
|
|
183
|
+
return durableStore.getEntries(filteredEntryIds, segment).then((durableEntries) => {
|
|
184
|
+
if (durableEntries === undefined) {
|
|
185
|
+
return undefined;
|
|
186
|
+
}
|
|
187
|
+
const returnEntries = create(null);
|
|
188
|
+
const keys$1 = keys(durableEntries);
|
|
189
|
+
for (let i = 0, len = keys$1.length; i < len; i++) {
|
|
190
|
+
const key = keys$1[i];
|
|
191
|
+
const value = durableEntries[key];
|
|
192
|
+
if (value === undefined) {
|
|
193
|
+
continue;
|
|
194
|
+
}
|
|
195
|
+
if (isEntryDurableRecordRepresentation(value, key)) {
|
|
196
|
+
assign(returnEntries, normalizeRecordFields(key, value));
|
|
197
|
+
}
|
|
198
|
+
else {
|
|
199
|
+
returnEntries[key] = value;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
return returnEntries;
|
|
203
|
+
});
|
|
204
|
+
};
|
|
205
|
+
const denormalizeEntries = function (entries) {
|
|
206
|
+
let hasEntries = false;
|
|
207
|
+
let hasMetadata = false;
|
|
208
|
+
const putEntries = create(null);
|
|
209
|
+
const putMetadata = create(null);
|
|
210
|
+
const keys$1 = keys(entries);
|
|
211
|
+
const putRecords = {};
|
|
212
|
+
const putRecordViews = {};
|
|
213
|
+
const storeRecords = getStoreRecords !== undefined ? getStoreRecords() : {};
|
|
214
|
+
const storeMetadata = getStoreMetadata !== undefined ? getStoreMetadata() : {};
|
|
215
|
+
const store = getStore();
|
|
216
|
+
for (let i = 0, len = keys$1.length; i < len; i++) {
|
|
217
|
+
const key = keys$1[i];
|
|
218
|
+
let value = entries[key];
|
|
219
|
+
const recordId = extractRecordIdFromStoreKey(key);
|
|
220
|
+
// do not put normalized field values
|
|
221
|
+
if (recordId !== undefined) {
|
|
222
|
+
const isRecordView = key.startsWith(RECORD_VIEW_ENTITY_ID_PREFIX);
|
|
223
|
+
if (isRecordView) {
|
|
224
|
+
if (putRecordViews[recordId] === true) {
|
|
225
|
+
continue;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
else {
|
|
229
|
+
if (putRecords[recordId] === true) {
|
|
230
|
+
continue;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
const recordKey = getDenormalizedKey(key, recordId, luvio);
|
|
234
|
+
const recordEntries = entries;
|
|
235
|
+
const entry = recordEntries[recordKey];
|
|
236
|
+
let record = entry && entry.data;
|
|
237
|
+
if (record === undefined) {
|
|
238
|
+
record = storeRecords[recordKey];
|
|
239
|
+
if (record === undefined) {
|
|
240
|
+
// fields are being published without a record for them existing,
|
|
241
|
+
// fields cannot exist standalone in the durable store
|
|
242
|
+
continue;
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
if (isRecordView) {
|
|
246
|
+
putRecordViews[recordId] = true;
|
|
247
|
+
}
|
|
248
|
+
else {
|
|
249
|
+
putRecords[recordId] = true;
|
|
250
|
+
}
|
|
251
|
+
if (isStoreRecordError(record)) {
|
|
252
|
+
hasEntries = true;
|
|
253
|
+
putEntries[recordKey] = value;
|
|
254
|
+
continue;
|
|
255
|
+
}
|
|
256
|
+
let metadata = entry && entry.metadata;
|
|
257
|
+
if (metadata === undefined) {
|
|
258
|
+
metadata = {
|
|
259
|
+
...storeMetadata[recordKey],
|
|
260
|
+
metadataVersion: DURABLE_METADATA_VERSION,
|
|
261
|
+
};
|
|
262
|
+
}
|
|
263
|
+
const denormalizedRecord = buildDurableRecordRepresentation(record, storeRecords, recordEntries, store);
|
|
264
|
+
if (denormalizedRecord !== undefined) {
|
|
265
|
+
hasEntries = true;
|
|
266
|
+
putEntries[recordKey] = {
|
|
267
|
+
data: denormalizedRecord,
|
|
268
|
+
metadata,
|
|
269
|
+
};
|
|
270
|
+
// if undefined then it is pending
|
|
271
|
+
// we should still update metadata on pending records
|
|
272
|
+
}
|
|
273
|
+
else {
|
|
274
|
+
hasMetadata = true;
|
|
275
|
+
metadata.expirationTimestamp = metadata.ingestionTimestamp;
|
|
276
|
+
putMetadata[recordKey] = {
|
|
277
|
+
metadata,
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
else {
|
|
282
|
+
hasEntries = true;
|
|
283
|
+
putEntries[key] = value;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
return { putEntries, putMetadata, hasEntries, hasMetadata };
|
|
287
|
+
};
|
|
288
|
+
const setEntries = function (entries, segment) {
|
|
289
|
+
if (segment !== DefaultDurableSegment) {
|
|
290
|
+
return durableStore.setEntries(entries, segment);
|
|
291
|
+
}
|
|
292
|
+
const { putEntries, putMetadata, hasEntries, hasMetadata } = denormalizeEntries(entries);
|
|
293
|
+
const promises = [
|
|
294
|
+
hasEntries ? durableStore.setEntries(putEntries, segment) : undefined,
|
|
295
|
+
];
|
|
296
|
+
if (sqlStore !== undefined) {
|
|
297
|
+
promises.push(hasMetadata && sqlStore !== undefined
|
|
298
|
+
? durableStore.setMetadata(putMetadata, segment)
|
|
299
|
+
: undefined);
|
|
300
|
+
}
|
|
301
|
+
return Promise.all(promises).then(() => { });
|
|
302
|
+
};
|
|
303
|
+
const batchOperations = function (operations) {
|
|
304
|
+
const operationsWithDenormedRecords = [];
|
|
305
|
+
for (let i = 0, len = operations.length; i < len; i++) {
|
|
306
|
+
const operation = operations[i];
|
|
307
|
+
if (operation.type === 'setMetadata') {
|
|
308
|
+
// if setMetadata also contains entry data then it needs to be denormalized.
|
|
309
|
+
const keys$1 = keys(operation.entries);
|
|
310
|
+
if (keys$1.length > 0) {
|
|
311
|
+
const firstKey = keys$1[0];
|
|
312
|
+
// casted to any to check if data exists
|
|
313
|
+
const firstEntry = operation.entries[firstKey];
|
|
314
|
+
// it is not possible for setMetadata to contain entries with both data and no data in the same operation.
|
|
315
|
+
// this is determined by the plugin supporting update batch calls before it gets to this HOF.
|
|
316
|
+
// so we only need to check one entry to confirm this for performance
|
|
317
|
+
if (firstEntry.data !== undefined) {
|
|
318
|
+
const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
|
|
319
|
+
operationsWithDenormedRecords.push({
|
|
320
|
+
...operation,
|
|
321
|
+
entries: putEntries,
|
|
322
|
+
});
|
|
323
|
+
if (hasMetadata && sqlStore !== undefined) {
|
|
324
|
+
operationsWithDenormedRecords.push({
|
|
325
|
+
...operation,
|
|
326
|
+
entries: putMetadata,
|
|
327
|
+
type: 'setMetadata',
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
else {
|
|
332
|
+
operationsWithDenormedRecords.push(operation);
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
continue;
|
|
336
|
+
}
|
|
337
|
+
if (operation.segment !== DefaultDurableSegment || operation.type === 'evictEntries') {
|
|
338
|
+
operationsWithDenormedRecords.push(operation);
|
|
339
|
+
continue;
|
|
340
|
+
}
|
|
341
|
+
const { putEntries, putMetadata, hasMetadata } = denormalizeEntries(operation.entries);
|
|
342
|
+
operationsWithDenormedRecords.push({
|
|
343
|
+
...operation,
|
|
344
|
+
entries: putEntries,
|
|
345
|
+
});
|
|
346
|
+
if (hasMetadata && sqlStore !== undefined) {
|
|
347
|
+
operationsWithDenormedRecords.push({
|
|
348
|
+
...operation,
|
|
349
|
+
entries: putMetadata,
|
|
350
|
+
type: 'setMetadata',
|
|
351
|
+
});
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
return durableStore.batchOperations(operationsWithDenormedRecords);
|
|
355
|
+
};
|
|
356
|
+
return create(durableStore, {
|
|
357
|
+
getEntries: { value: getEntries, writable: true },
|
|
358
|
+
setEntries: { value: setEntries, writable: true },
|
|
359
|
+
batchOperations: { value: batchOperations, writable: true },
|
|
360
|
+
});
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
export { makeRecordDenormalizingDurableStore };
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
export { makeRecordDenormalizingDurableStore } from './makeRecordDenormalizingDurableStore';
|
|
2
|
+
export type DurableRecordRepresentationFieldState = {
|
|
3
|
+
isMissing?: true;
|
|
4
|
+
pending?: true;
|
|
5
|
+
};
|
|
6
|
+
export type DurableRecordRepresentationField = {
|
|
7
|
+
value: ScalarFieldType | DurableStoreLink | undefined;
|
|
8
|
+
displayValue: string | null | undefined;
|
|
9
|
+
__state?: DurableRecordRepresentationFieldState;
|
|
10
|
+
};
|
|
11
|
+
export type DurableRecordRepresentationFields = Record<string, DurableRecordRepresentationField>;
|
|
12
|
+
type ServerValueFieldType = {
|
|
13
|
+
displayValue: string | null;
|
|
14
|
+
value: ScalarFieldType;
|
|
15
|
+
} | null;
|
|
16
|
+
export interface DurableRecordRepresentation {
|
|
17
|
+
drafts?: {
|
|
18
|
+
created: boolean;
|
|
19
|
+
edited: boolean;
|
|
20
|
+
deleted: boolean;
|
|
21
|
+
serverValues: Record<string, ServerValueFieldType>;
|
|
22
|
+
serverRootValues: {
|
|
23
|
+
recordTypeId: string | null;
|
|
24
|
+
recordTypeInfo: {
|
|
25
|
+
available: boolean;
|
|
26
|
+
defaultRecordTypeMapping: boolean;
|
|
27
|
+
master: boolean;
|
|
28
|
+
name: string;
|
|
29
|
+
recordTypeId: string;
|
|
30
|
+
} | null;
|
|
31
|
+
lastModifiedById: string | null;
|
|
32
|
+
lastModifiedDate: string | null;
|
|
33
|
+
systemModstamp: string | null;
|
|
34
|
+
} | undefined;
|
|
35
|
+
draftActionIds: string[];
|
|
36
|
+
latestDraftActionId: string;
|
|
37
|
+
};
|
|
38
|
+
links?: Record<string, DurableStoreLink>;
|
|
39
|
+
apiName: string;
|
|
40
|
+
childRelationships: Record<string, DurableStoreLink>;
|
|
41
|
+
eTag: string;
|
|
42
|
+
fields: DurableRecordRepresentationFields;
|
|
43
|
+
id: string;
|
|
44
|
+
lastModifiedById: string | null;
|
|
45
|
+
lastModifiedDate: string | null;
|
|
46
|
+
recordTypeId: string | null;
|
|
47
|
+
recordTypeInfo: {
|
|
48
|
+
available: boolean;
|
|
49
|
+
defaultRecordTypeMapping: boolean;
|
|
50
|
+
master: boolean;
|
|
51
|
+
name: string;
|
|
52
|
+
recordTypeId: string;
|
|
53
|
+
} | null;
|
|
54
|
+
systemModstamp: string | null;
|
|
55
|
+
weakEtag: number;
|
|
56
|
+
}
|
|
57
|
+
export type ScalarFieldType = boolean | number | string | null;
|
|
58
|
+
type DurableStoreLink = {
|
|
59
|
+
__ref?: string;
|
|
60
|
+
pending?: true;
|
|
61
|
+
isMissing?: true;
|
|
62
|
+
};
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { RecordSource, InMemoryStore, Luvio, StoreRecordError } from '@luvio/engine';
|
|
2
|
+
import type { DurableStore, DurableStoreEntry } from '@luvio/environments';
|
|
3
|
+
import type { NimbusSqliteStore } from '@salesforce/lds-store-nimbus';
|
|
4
|
+
import type { DurableRecordRepresentation } from './main';
|
|
5
|
+
export declare function isStoreRecordError(storeRecord: object): storeRecord is StoreRecordError;
|
|
6
|
+
export declare function isEntryDurableRecordRepresentation(entry: DurableStoreEntry<any>, key: string): entry is DurableStoreEntry<DurableRecordRepresentation>;
|
|
7
|
+
export declare function makeRecordDenormalizingDurableStore(luvio: Luvio, durableStore: DurableStore, getStoreRecords: () => RecordSource, getStoreMetadata: () => InMemoryStore['fallbackStringKeyInMemoryStore']['metadata'], getStore: () => InMemoryStore['fallbackStringKeyInMemoryStore'] | undefined, sqlStore?: NimbusSqliteStore): DurableStore;
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
declare const keys: {
|
|
2
|
+
(o: object): string[];
|
|
3
|
+
(o: {}): string[];
|
|
4
|
+
}, values: {
|
|
5
|
+
<T>(o: {
|
|
6
|
+
[s: string]: T;
|
|
7
|
+
} | ArrayLike<T>): T[];
|
|
8
|
+
(o: {}): any[];
|
|
9
|
+
}, create: {
|
|
10
|
+
(o: object | null): any;
|
|
11
|
+
(o: object | null, properties: PropertyDescriptorMap & ThisType<any>): any;
|
|
12
|
+
}, assign: {
|
|
13
|
+
<T extends {}, U>(target: T, source: U): T & U;
|
|
14
|
+
<T_1 extends {}, U_1, V>(target: T_1, source1: U_1, source2: V): T_1 & U_1 & V;
|
|
15
|
+
<T_2 extends {}, U_2, V_1, W>(target: T_2, source1: U_2, source2: V_1, source3: W): T_2 & U_2 & V_1 & W;
|
|
16
|
+
(target: object, ...sources: any[]): any;
|
|
17
|
+
}, freeze: {
|
|
18
|
+
<T extends Function>(f: T): T;
|
|
19
|
+
<T_1 extends {
|
|
20
|
+
[idx: string]: object | U | null | undefined;
|
|
21
|
+
}, U extends string | number | bigint | boolean | symbol>(o: T_1): Readonly<T_1>;
|
|
22
|
+
<T_2>(o: T_2): Readonly<T_2>;
|
|
23
|
+
}, entries: {
|
|
24
|
+
<T>(o: {
|
|
25
|
+
[s: string]: T;
|
|
26
|
+
} | ArrayLike<T>): [string, T][];
|
|
27
|
+
(o: {}): [string, any][];
|
|
28
|
+
};
|
|
29
|
+
declare const stringify: {
|
|
30
|
+
(value: any, replacer?: ((this: any, key: string, value: any) => any) | undefined, space?: string | number | undefined): string;
|
|
31
|
+
(value: any, replacer?: (string | number)[] | null | undefined, space?: string | number | undefined): string;
|
|
32
|
+
}, parse: (text: string, reviver?: ((this: any, key: string, value: any) => any) | undefined) => any;
|
|
33
|
+
declare const shift: () => any;
|
|
34
|
+
declare const isArray: (arg: any) => arg is any[], from: {
|
|
35
|
+
<T>(arrayLike: ArrayLike<T>): T[];
|
|
36
|
+
<T_1, U>(arrayLike: ArrayLike<T_1>, mapfn: (v: T_1, k: number) => U, thisArg?: any): U[];
|
|
37
|
+
<T_2>(iterable: Iterable<T_2> | ArrayLike<T_2>): T_2[];
|
|
38
|
+
<T_3, U_1>(iterable: Iterable<T_3> | ArrayLike<T_3>, mapfn: (v: T_3, k: number) => U_1, thisArg?: any): U_1[];
|
|
39
|
+
};
|
|
40
|
+
export { keys as ObjectKeys, values as ObjectValues, create as ObjectCreate, assign as ObjectAssign, freeze as ObjectFreeze, entries as ObjectEntries, stringify as JSONStringify, parse as JSONParse, shift as ArrayPrototypeShift, isArray as ArrayIsArray, from as ArrayFrom, };
|
package/package.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@salesforce/lds-durable-records",
|
|
3
|
+
"version": "0.1.0-dev1",
|
|
4
|
+
"license": "SEE LICENSE IN LICENSE.txt",
|
|
5
|
+
"description": "LDS Record Utilities",
|
|
6
|
+
"main": "dist/ldsDurableRecords.js",
|
|
7
|
+
"module": "dist/ldsDurableRecords.js",
|
|
8
|
+
"types": "dist/types/main.d.ts",
|
|
9
|
+
"files": [
|
|
10
|
+
"dist"
|
|
11
|
+
],
|
|
12
|
+
"exports": {
|
|
13
|
+
".": {
|
|
14
|
+
"types": "./dist/types/main.d.ts",
|
|
15
|
+
"import": "./dist/ldsDurableRecords.js",
|
|
16
|
+
"default": "./dist/ldsDurableRecords.js"
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
"scripts": {
|
|
20
|
+
"prepare": "yarn build",
|
|
21
|
+
"build": "rollup --bundleConfigAsCjs --config rollup.config.js",
|
|
22
|
+
"clean": "rm -rf dist",
|
|
23
|
+
"test:unit": "jest"
|
|
24
|
+
},
|
|
25
|
+
"dependencies": {
|
|
26
|
+
"@luvio/engine": "0.158.7",
|
|
27
|
+
"@luvio/environments": "0.158.7",
|
|
28
|
+
"@salesforce/lds-adapters-uiapi": "^0.1.0-dev1"
|
|
29
|
+
},
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"@salesforce/lds-store-nimbus": "^0.1.0-dev1"
|
|
32
|
+
}
|
|
33
|
+
}
|