react-native-onyx 1.0.120 → 1.0.121
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/web.development.js +118 -110
- package/dist/web.development.js.map +1 -1
- package/dist/web.min.js.map +1 -1
- package/lib/Logger.js +1 -5
- package/lib/MDTable.js +11 -14
- package/lib/Onyx.d.ts +1 -4
- package/lib/Onyx.js +237 -232
- package/lib/OnyxCache.js +12 -3
- package/lib/Str.js +1 -3
- package/lib/compose.js +6 -2
- package/lib/metrics/PerformanceUtils.js +2 -7
- package/lib/metrics/index.native.js +28 -41
- package/lib/metrics/index.web.js +4 -7
- package/lib/storage/WebStorage.js +5 -10
- package/lib/storage/__mocks__/index.js +2 -2
- package/lib/storage/providers/IDBKeyVal.js +27 -37
- package/lib/storage/providers/SQLiteStorage.js +58 -62
- package/lib/types.d.ts +1 -13
- package/lib/utils.d.ts +2 -6
- package/lib/utils.js +19 -22
- package/lib/withOnyx.d.ts +8 -32
- package/lib/withOnyx.js +37 -34
- package/package.json +6 -3
package/lib/Onyx.js
CHANGED
|
@@ -118,12 +118,17 @@ const getSubsetOfData = (sourceData, selector, withOnyxInstanceState) => selecto
|
|
|
118
118
|
* @param {Object} [withOnyxInstanceState]
|
|
119
119
|
* @returns {Object}
|
|
120
120
|
*/
|
|
121
|
-
const reduceCollectionWithSelector = (collection, selector, withOnyxInstanceState) =>
|
|
122
|
-
|
|
123
|
-
|
|
121
|
+
const reduceCollectionWithSelector = (collection, selector, withOnyxInstanceState) =>
|
|
122
|
+
_.reduce(
|
|
123
|
+
collection,
|
|
124
|
+
(finalCollection, item, key) => {
|
|
125
|
+
// eslint-disable-next-line no-param-reassign
|
|
126
|
+
finalCollection[key] = getSubsetOfData(item, selector, withOnyxInstanceState);
|
|
124
127
|
|
|
125
|
-
|
|
126
|
-
},
|
|
128
|
+
return finalCollection;
|
|
129
|
+
},
|
|
130
|
+
{},
|
|
131
|
+
);
|
|
127
132
|
|
|
128
133
|
/**
|
|
129
134
|
* Get some data from the store
|
|
@@ -151,7 +156,7 @@ function get(key) {
|
|
|
151
156
|
cache.set(key, val);
|
|
152
157
|
return val;
|
|
153
158
|
})
|
|
154
|
-
.catch(err => Logger.logInfo(`Unable to get item from persistent storage. Key: ${key} Error: ${err}`));
|
|
159
|
+
.catch((err) => Logger.logInfo(`Unable to get item from persistent storage. Key: ${key} Error: ${err}`));
|
|
155
160
|
|
|
156
161
|
return cache.captureTask(taskName, promise);
|
|
157
162
|
}
|
|
@@ -176,11 +181,10 @@ function getAllKeys() {
|
|
|
176
181
|
}
|
|
177
182
|
|
|
178
183
|
// Otherwise retrieve the keys from storage and capture a promise to aid concurrent usages
|
|
179
|
-
const promise = Storage.getAllKeys()
|
|
180
|
-
.
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
});
|
|
184
|
+
const promise = Storage.getAllKeys().then((keys) => {
|
|
185
|
+
_.each(keys, (key) => cache.addKey(key));
|
|
186
|
+
return keys;
|
|
187
|
+
});
|
|
184
188
|
|
|
185
189
|
return cache.captureTask(taskName, promise);
|
|
186
190
|
}
|
|
@@ -216,9 +220,7 @@ function isCollectionMemberKey(collectionKey, key) {
|
|
|
216
220
|
* @return {Boolean}
|
|
217
221
|
*/
|
|
218
222
|
function isKeyMatch(configKey, key) {
|
|
219
|
-
return isCollectionKey(configKey)
|
|
220
|
-
? Str.startsWith(key, configKey)
|
|
221
|
-
: configKey === key;
|
|
223
|
+
return isCollectionKey(configKey) ? Str.startsWith(key, configKey) : configKey === key;
|
|
222
224
|
}
|
|
223
225
|
|
|
224
226
|
/**
|
|
@@ -230,7 +232,7 @@ function isKeyMatch(configKey, key) {
|
|
|
230
232
|
* @returns {Boolean}
|
|
231
233
|
*/
|
|
232
234
|
function isSafeEvictionKey(testKey) {
|
|
233
|
-
return _.some(evictionAllowList, key => isKeyMatch(key, testKey));
|
|
235
|
+
return _.some(evictionAllowList, (key) => isKeyMatch(key, testKey));
|
|
234
236
|
}
|
|
235
237
|
|
|
236
238
|
/**
|
|
@@ -252,16 +254,20 @@ function tryGetCachedValue(key, mapping = {}) {
|
|
|
252
254
|
if (allCacheKeys.length === 0) {
|
|
253
255
|
return;
|
|
254
256
|
}
|
|
255
|
-
const matchingKeys = _.filter(allCacheKeys, k => k.startsWith(key));
|
|
256
|
-
const values = _.reduce(
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
257
|
+
const matchingKeys = _.filter(allCacheKeys, (k) => k.startsWith(key));
|
|
258
|
+
const values = _.reduce(
|
|
259
|
+
matchingKeys,
|
|
260
|
+
(finalObject, matchedKey) => {
|
|
261
|
+
const cachedValue = cache.getValue(matchedKey);
|
|
262
|
+
if (cachedValue) {
|
|
263
|
+
// This is permissible because we're in the process of constructing the final object in a reduce function.
|
|
264
|
+
// eslint-disable-next-line no-param-reassign
|
|
265
|
+
finalObject[matchedKey] = cachedValue;
|
|
266
|
+
}
|
|
267
|
+
return finalObject;
|
|
268
|
+
},
|
|
269
|
+
{},
|
|
270
|
+
);
|
|
265
271
|
|
|
266
272
|
val = values;
|
|
267
273
|
}
|
|
@@ -349,17 +355,16 @@ function addToEvictionBlockList(key, connectionID) {
|
|
|
349
355
|
* @returns {Promise}
|
|
350
356
|
*/
|
|
351
357
|
function addAllSafeEvictionKeysToRecentlyAccessedList() {
|
|
352
|
-
return getAllKeys()
|
|
353
|
-
.
|
|
354
|
-
_.each(
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
addLastAccessedKey(key);
|
|
360
|
-
});
|
|
358
|
+
return getAllKeys().then((keys) => {
|
|
359
|
+
_.each(evictionAllowList, (safeEvictionKey) => {
|
|
360
|
+
_.each(keys, (key) => {
|
|
361
|
+
if (!isKeyMatch(safeEvictionKey, key)) {
|
|
362
|
+
return;
|
|
363
|
+
}
|
|
364
|
+
addLastAccessedKey(key);
|
|
361
365
|
});
|
|
362
366
|
});
|
|
367
|
+
});
|
|
363
368
|
}
|
|
364
369
|
|
|
365
370
|
/**
|
|
@@ -368,20 +373,22 @@ function addAllSafeEvictionKeysToRecentlyAccessedList() {
|
|
|
368
373
|
* @returns {Object}
|
|
369
374
|
*/
|
|
370
375
|
function getCachedCollection(collectionKey) {
|
|
371
|
-
const collectionMemberKeys = _.filter(cache.getAllKeys(), (
|
|
372
|
-
|
|
373
|
-
|
|
376
|
+
const collectionMemberKeys = _.filter(cache.getAllKeys(), (storedKey) => isCollectionMemberKey(collectionKey, storedKey));
|
|
377
|
+
|
|
378
|
+
return _.reduce(
|
|
379
|
+
collectionMemberKeys,
|
|
380
|
+
(prev, curr) => {
|
|
381
|
+
const cachedValue = cache.getValue(curr);
|
|
382
|
+
if (!cachedValue) {
|
|
383
|
+
return prev;
|
|
384
|
+
}
|
|
374
385
|
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
if (!cachedValue) {
|
|
386
|
+
// eslint-disable-next-line no-param-reassign
|
|
387
|
+
prev[curr] = cachedValue;
|
|
378
388
|
return prev;
|
|
379
|
-
}
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
prev[curr] = cachedValue;
|
|
383
|
-
return prev;
|
|
384
|
-
}, {});
|
|
389
|
+
},
|
|
390
|
+
{},
|
|
391
|
+
);
|
|
385
392
|
}
|
|
386
393
|
|
|
387
394
|
/**
|
|
@@ -750,9 +757,7 @@ function addKeyToRecentlyAccessedIfNeeded(mapping) {
|
|
|
750
757
|
if (mapping.withOnyxInstance && !isCollectionKey(mapping.key)) {
|
|
751
758
|
// All React components subscribing to a key flagged as a safe eviction key must implement the canEvict property.
|
|
752
759
|
if (_.isUndefined(mapping.canEvict)) {
|
|
753
|
-
throw new Error(
|
|
754
|
-
`Cannot subscribe to safe eviction key '${mapping.key}' without providing a canEvict value.`,
|
|
755
|
-
);
|
|
760
|
+
throw new Error(`Cannot subscribe to safe eviction key '${mapping.key}' without providing a canEvict value.`);
|
|
756
761
|
}
|
|
757
762
|
|
|
758
763
|
addLastAccessedKey(mapping.key);
|
|
@@ -767,13 +772,19 @@ function addKeyToRecentlyAccessedIfNeeded(mapping) {
|
|
|
767
772
|
* @param {Object} mapping
|
|
768
773
|
*/
|
|
769
774
|
function getCollectionDataAndSendAsObject(matchingKeys, mapping) {
|
|
770
|
-
Promise.all(_.map(matchingKeys, key => get(key)))
|
|
771
|
-
.then(
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
775
|
+
Promise.all(_.map(matchingKeys, (key) => get(key)))
|
|
776
|
+
.then((values) =>
|
|
777
|
+
_.reduce(
|
|
778
|
+
values,
|
|
779
|
+
(finalObject, value, i) => {
|
|
780
|
+
// eslint-disable-next-line no-param-reassign
|
|
781
|
+
finalObject[matchingKeys[i]] = value;
|
|
782
|
+
return finalObject;
|
|
783
|
+
},
|
|
784
|
+
{},
|
|
785
|
+
),
|
|
786
|
+
)
|
|
787
|
+
.then((val) => sendDataToConnection(mapping, val, undefined, true));
|
|
777
788
|
}
|
|
778
789
|
|
|
779
790
|
/**
|
|
@@ -820,11 +831,7 @@ function connect(mapping) {
|
|
|
820
831
|
// Performance improvement
|
|
821
832
|
// If the mapping is connected to an onyx key that is not a collection
|
|
822
833
|
// we can skip the call to getAllKeys() and return an array with a single item
|
|
823
|
-
if (Boolean(mapping.key)
|
|
824
|
-
&& typeof mapping.key === 'string'
|
|
825
|
-
&& !(mapping.key.endsWith('_'))
|
|
826
|
-
&& cache.storageKeys.has(mapping.key)
|
|
827
|
-
) {
|
|
834
|
+
if (Boolean(mapping.key) && typeof mapping.key === 'string' && !mapping.key.endsWith('_') && cache.storageKeys.has(mapping.key)) {
|
|
828
835
|
return [mapping.key];
|
|
829
836
|
}
|
|
830
837
|
return getAllKeys();
|
|
@@ -833,7 +840,7 @@ function connect(mapping) {
|
|
|
833
840
|
// We search all the keys in storage to see if any are a "match" for the subscriber we are connecting so that we
|
|
834
841
|
// can send data back to the subscriber. Note that multiple keys can match as a subscriber could either be
|
|
835
842
|
// subscribed to a "collection key" or a single key.
|
|
836
|
-
const matchingKeys = _.filter(keys, key => isKeyMatch(mapping.key, key));
|
|
843
|
+
const matchingKeys = _.filter(keys, (key) => isKeyMatch(mapping.key, key));
|
|
837
844
|
|
|
838
845
|
// If the key being connected to does not exist we initialize the value with null. For subscribers that connected
|
|
839
846
|
// directly via connect() they will simply get a null value sent to them without any information about which key matched
|
|
@@ -862,13 +869,13 @@ function connect(mapping) {
|
|
|
862
869
|
|
|
863
870
|
// We did not opt into using waitForCollectionCallback mode so the callback is called for every matching key.
|
|
864
871
|
for (let i = 0; i < matchingKeys.length; i++) {
|
|
865
|
-
get(matchingKeys[i]).then(val => sendDataToConnection(mapping, val, matchingKeys[i], true));
|
|
872
|
+
get(matchingKeys[i]).then((val) => sendDataToConnection(mapping, val, matchingKeys[i], true));
|
|
866
873
|
}
|
|
867
874
|
return;
|
|
868
875
|
}
|
|
869
876
|
|
|
870
877
|
// If we are not subscribed to a collection key then there's only a single key to send an update for.
|
|
871
|
-
get(mapping.key).then(val => sendDataToConnection(mapping, val, mapping.key, true));
|
|
878
|
+
get(mapping.key).then((val) => sendDataToConnection(mapping, val, mapping.key, true));
|
|
872
879
|
return;
|
|
873
880
|
}
|
|
874
881
|
|
|
@@ -881,7 +888,7 @@ function connect(mapping) {
|
|
|
881
888
|
}
|
|
882
889
|
|
|
883
890
|
// If the subscriber is not using a collection key then we just send a single value back to the subscriber
|
|
884
|
-
get(mapping.key).then(val => sendDataToConnection(mapping, val, mapping.key, true));
|
|
891
|
+
get(mapping.key).then((val) => sendDataToConnection(mapping, val, mapping.key, true));
|
|
885
892
|
return;
|
|
886
893
|
}
|
|
887
894
|
|
|
@@ -988,13 +995,13 @@ function reportStorageQuota() {
|
|
|
988
995
|
function evictStorageAndRetry(error, onyxMethod, ...args) {
|
|
989
996
|
Logger.logInfo(`Failed to save to storage. Error: ${error}. onyxMethod: ${onyxMethod.name}`);
|
|
990
997
|
|
|
991
|
-
if (error && Str.startsWith(error.message,
|
|
998
|
+
if (error && Str.startsWith(error.message, "Failed to execute 'put' on 'IDBObjectStore'")) {
|
|
992
999
|
Logger.logAlert('Attempted to set invalid data set in Onyx. Please ensure all data is serializable.');
|
|
993
1000
|
throw error;
|
|
994
1001
|
}
|
|
995
1002
|
|
|
996
1003
|
// Find the first key that we can remove that has no subscribers in our blocklist
|
|
997
|
-
const keyForRemoval = _.find(recentlyAccessedKeys, key => !evictionBlocklist[key]);
|
|
1004
|
+
const keyForRemoval = _.find(recentlyAccessedKeys, (key) => !evictionBlocklist[key]);
|
|
998
1005
|
if (!keyForRemoval) {
|
|
999
1006
|
// If we have no acceptable keys to remove then we are possibly trying to save mission critical data. If this is the case,
|
|
1000
1007
|
// then we should stop retrying as there is not much the user can do to fix this. Instead of getting them stuck in an infinite loop we
|
|
@@ -1006,8 +1013,7 @@ function evictStorageAndRetry(error, onyxMethod, ...args) {
|
|
|
1006
1013
|
// Remove the least recently viewed key that is not currently being accessed and retry.
|
|
1007
1014
|
Logger.logInfo(`Out of storage. Evicting least recently accessed key (${keyForRemoval}) and retrying.`);
|
|
1008
1015
|
reportStorageQuota();
|
|
1009
|
-
return remove(keyForRemoval)
|
|
1010
|
-
.then(() => onyxMethod(...args));
|
|
1016
|
+
return remove(keyForRemoval).then(() => onyxMethod(...args));
|
|
1011
1017
|
}
|
|
1012
1018
|
|
|
1013
1019
|
/**
|
|
@@ -1031,7 +1037,7 @@ function broadcastUpdate(key, value, hasChanged, method) {
|
|
|
1031
1037
|
cache.addToAccessedKeys(key);
|
|
1032
1038
|
}
|
|
1033
1039
|
|
|
1034
|
-
return scheduleSubscriberUpdate(key, value, subscriber => hasChanged || subscriber.initWithStoredValues === false);
|
|
1040
|
+
return scheduleSubscriberUpdate(key, value, (subscriber) => hasChanged || subscriber.initWithStoredValues === false);
|
|
1035
1041
|
}
|
|
1036
1042
|
|
|
1037
1043
|
/**
|
|
@@ -1100,7 +1106,7 @@ function set(key, value) {
|
|
|
1100
1106
|
}
|
|
1101
1107
|
|
|
1102
1108
|
return Storage.setItem(key, valueWithoutNull)
|
|
1103
|
-
.catch(error => evictStorageAndRetry(error, set, key, valueWithoutNull))
|
|
1109
|
+
.catch((error) => evictStorageAndRetry(error, set, key, valueWithoutNull))
|
|
1104
1110
|
.then(() => updatePromise);
|
|
1105
1111
|
}
|
|
1106
1112
|
|
|
@@ -1142,17 +1148,20 @@ function multiSet(data) {
|
|
|
1142
1148
|
return scheduleSubscriberUpdate(key, val);
|
|
1143
1149
|
});
|
|
1144
1150
|
|
|
1145
|
-
const keyValuePairsWithoutNull = _.filter(
|
|
1146
|
-
|
|
1151
|
+
const keyValuePairsWithoutNull = _.filter(
|
|
1152
|
+
_.map(keyValuePairs, ([key, value]) => {
|
|
1153
|
+
const valueWithoutNull = removeNullValues(key, value);
|
|
1147
1154
|
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1155
|
+
if (valueWithoutNull === null) {
|
|
1156
|
+
return;
|
|
1157
|
+
}
|
|
1158
|
+
return [key, valueWithoutNull];
|
|
1159
|
+
}),
|
|
1160
|
+
Boolean,
|
|
1161
|
+
);
|
|
1153
1162
|
|
|
1154
1163
|
return Storage.multiSet(keyValuePairsWithoutNull)
|
|
1155
|
-
.catch(error => evictStorageAndRetry(error, multiSet, data))
|
|
1164
|
+
.catch((error) => evictStorageAndRetry(error, multiSet, data))
|
|
1156
1165
|
.then(() => Promise.all(updatePromises));
|
|
1157
1166
|
}
|
|
1158
1167
|
|
|
@@ -1174,8 +1183,7 @@ function applyMerge(existingValue, changes, shouldRemoveNullObjectValues) {
|
|
|
1174
1183
|
|
|
1175
1184
|
if (_.some(changes, _.isObject)) {
|
|
1176
1185
|
// Object values are then merged one after the other
|
|
1177
|
-
return _.reduce(changes, (modifiedData, change) => utils.fastMerge(modifiedData, change, shouldRemoveNullObjectValues),
|
|
1178
|
-
existingValue || {});
|
|
1186
|
+
return _.reduce(changes, (modifiedData, change) => utils.fastMerge(modifiedData, change, shouldRemoveNullObjectValues), existingValue || {});
|
|
1179
1187
|
}
|
|
1180
1188
|
|
|
1181
1189
|
// If we have anything else we can't merge it so we'll
|
|
@@ -1227,57 +1235,55 @@ function merge(key, changes) {
|
|
|
1227
1235
|
}
|
|
1228
1236
|
mergeQueue[key] = [changes];
|
|
1229
1237
|
|
|
1230
|
-
mergeQueuePromise[key] = get(key)
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
let batchedChanges = applyMerge(undefined, mergeQueue[key], false);
|
|
1236
|
-
|
|
1237
|
-
// The presence of a `null` in the merge queue instructs us to drop the existing value.
|
|
1238
|
-
// In this case, we can't simply merge the batched changes with the existing value, because then the null in the merge queue would have no effect
|
|
1239
|
-
const shouldOverwriteExistingValue = _.includes(mergeQueue[key], null);
|
|
1238
|
+
mergeQueuePromise[key] = get(key).then((existingValue) => {
|
|
1239
|
+
try {
|
|
1240
|
+
// We first only merge the changes, so we can provide these to the native implementation (SQLite uses only delta changes in "JSON_PATCH" to merge)
|
|
1241
|
+
// We don't want to remove null values from the "batchedChanges", because SQLite uses them to remove keys from storage natively.
|
|
1242
|
+
let batchedChanges = applyMerge(undefined, mergeQueue[key], false);
|
|
1240
1243
|
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
+
// The presence of a `null` in the merge queue instructs us to drop the existing value.
|
|
1245
|
+
// In this case, we can't simply merge the batched changes with the existing value, because then the null in the merge queue would have no effect
|
|
1246
|
+
const shouldOverwriteExistingValue = _.includes(mergeQueue[key], null);
|
|
1244
1247
|
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
return;
|
|
1249
|
-
}
|
|
1248
|
+
// Clean up the write queue, so we don't apply these changes again
|
|
1249
|
+
delete mergeQueue[key];
|
|
1250
|
+
delete mergeQueuePromise[key];
|
|
1250
1251
|
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
// On native platforms we use SQLite which utilises JSON_PATCH to merge changes.
|
|
1257
|
-
// JSON_PATCH generally removes null values from the stored object.
|
|
1258
|
-
// When there is no existing value though, SQLite will just insert the changes as a new value and thus the null values won't be removed.
|
|
1259
|
-
// Therefore we need to remove null values from the `batchedChanges` which are sent to the SQLite, if no existing value is present.
|
|
1260
|
-
if (!existingValue) {
|
|
1261
|
-
batchedChanges = applyMerge(undefined, [batchedChanges], true);
|
|
1262
|
-
}
|
|
1252
|
+
// If the batched changes equal null, we want to remove the key from storage, to reduce storage size
|
|
1253
|
+
if (_.isNull(batchedChanges)) {
|
|
1254
|
+
remove(key);
|
|
1255
|
+
return;
|
|
1256
|
+
}
|
|
1263
1257
|
|
|
1264
|
-
|
|
1258
|
+
// After that we merge the batched changes with the existing value
|
|
1259
|
+
// We can remove null values from the "modifiedData", because "null" implicates that the user wants to remove a value from storage.
|
|
1260
|
+
// The "modifiedData" will be directly "set" in storage instead of being merged
|
|
1261
|
+
const modifiedData = shouldOverwriteExistingValue ? batchedChanges : applyMerge(existingValue, [batchedChanges], true);
|
|
1262
|
+
|
|
1263
|
+
// On native platforms we use SQLite which utilises JSON_PATCH to merge changes.
|
|
1264
|
+
// JSON_PATCH generally removes null values from the stored object.
|
|
1265
|
+
// When there is no existing value though, SQLite will just insert the changes as a new value and thus the null values won't be removed.
|
|
1266
|
+
// Therefore we need to remove null values from the `batchedChanges` which are sent to the SQLite, if no existing value is present.
|
|
1267
|
+
if (!existingValue) {
|
|
1268
|
+
batchedChanges = applyMerge(undefined, [batchedChanges], true);
|
|
1269
|
+
}
|
|
1265
1270
|
|
|
1266
|
-
|
|
1267
|
-
const updatePromise = broadcastUpdate(key, modifiedData, hasChanged, 'merge');
|
|
1271
|
+
const hasChanged = cache.hasValueChanged(key, modifiedData);
|
|
1268
1272
|
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
return updatePromise;
|
|
1272
|
-
}
|
|
1273
|
+
// This approach prioritizes fast UI changes without waiting for data to be stored in device storage.
|
|
1274
|
+
const updatePromise = broadcastUpdate(key, modifiedData, hasChanged, 'merge');
|
|
1273
1275
|
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
Logger.logAlert(`An error occurred while applying merge for key: ${key}, Error: ${error}`);
|
|
1278
|
-
return Promise.resolve();
|
|
1276
|
+
// If the value has not changed, calling Storage.setItem() would be redundant and a waste of performance, so return early instead.
|
|
1277
|
+
if (!hasChanged || isClearing) {
|
|
1278
|
+
return updatePromise;
|
|
1279
1279
|
}
|
|
1280
|
-
|
|
1280
|
+
|
|
1281
|
+
return Storage.mergeItem(key, batchedChanges, modifiedData).then(() => updatePromise);
|
|
1282
|
+
} catch (error) {
|
|
1283
|
+
Logger.logAlert(`An error occurred while applying merge for key: ${key}, Error: ${error}`);
|
|
1284
|
+
return Promise.resolve();
|
|
1285
|
+
}
|
|
1286
|
+
});
|
|
1281
1287
|
|
|
1282
1288
|
return mergeQueuePromise[key];
|
|
1283
1289
|
}
|
|
@@ -1288,14 +1294,13 @@ function merge(key, changes) {
|
|
|
1288
1294
|
* @returns {Promise}
|
|
1289
1295
|
*/
|
|
1290
1296
|
function initializeWithDefaultKeyStates() {
|
|
1291
|
-
return Storage.multiGet(_.keys(defaultKeyStates))
|
|
1292
|
-
.
|
|
1293
|
-
const asObject = _.object(pairs);
|
|
1297
|
+
return Storage.multiGet(_.keys(defaultKeyStates)).then((pairs) => {
|
|
1298
|
+
const asObject = _.object(pairs);
|
|
1294
1299
|
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1300
|
+
const merged = utils.fastMerge(asObject, defaultKeyStates);
|
|
1301
|
+
cache.merge(merged);
|
|
1302
|
+
_.each(merged, (val, key) => keyChanged(key, val));
|
|
1303
|
+
});
|
|
1299
1304
|
}
|
|
1300
1305
|
|
|
1301
1306
|
/**
|
|
@@ -1332,70 +1337,71 @@ function clear(keysToPreserve = []) {
|
|
|
1332
1337
|
|
|
1333
1338
|
isClearing = true;
|
|
1334
1339
|
|
|
1335
|
-
return getAllKeys()
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
_.
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
if (collectionKey) {
|
|
1361
|
-
|
|
1362
|
-
keyValuesToResetAsCollection[collectionKey] = {};
|
|
1363
|
-
}
|
|
1364
|
-
keyValuesToResetAsCollection[collectionKey][key] = newValue;
|
|
1365
|
-
} else {
|
|
1366
|
-
keyValuesToResetIndividually[key] = newValue;
|
|
1340
|
+
return getAllKeys().then((keys) => {
|
|
1341
|
+
const keysToBeClearedFromStorage = [];
|
|
1342
|
+
const keyValuesToResetAsCollection = {};
|
|
1343
|
+
const keyValuesToResetIndividually = {};
|
|
1344
|
+
|
|
1345
|
+
// The only keys that should not be cleared are:
|
|
1346
|
+
// 1. Anything specifically passed in keysToPreserve (because some keys like language preferences, offline
|
|
1347
|
+
// status, or activeClients need to remain in Onyx even when signed out)
|
|
1348
|
+
// 2. Any keys with a default state (because they need to remain in Onyx as their default, and setting them
|
|
1349
|
+
// to null would cause unknown behavior)
|
|
1350
|
+
_.each(keys, (key) => {
|
|
1351
|
+
const isKeyToPreserve = _.contains(keysToPreserve, key);
|
|
1352
|
+
const isDefaultKey = _.has(defaultKeyStates, key);
|
|
1353
|
+
|
|
1354
|
+
// If the key is being removed or reset to default:
|
|
1355
|
+
// 1. Update it in the cache
|
|
1356
|
+
// 2. Figure out whether it is a collection key or not,
|
|
1357
|
+
// since collection key subscribers need to be updated differently
|
|
1358
|
+
if (!isKeyToPreserve) {
|
|
1359
|
+
const oldValue = cache.getValue(key);
|
|
1360
|
+
const newValue = _.get(defaultKeyStates, key, null);
|
|
1361
|
+
if (newValue !== oldValue) {
|
|
1362
|
+
cache.set(key, newValue);
|
|
1363
|
+
const collectionKey = key.substring(0, key.indexOf('_') + 1);
|
|
1364
|
+
if (collectionKey) {
|
|
1365
|
+
if (!keyValuesToResetAsCollection[collectionKey]) {
|
|
1366
|
+
keyValuesToResetAsCollection[collectionKey] = {};
|
|
1367
1367
|
}
|
|
1368
|
+
keyValuesToResetAsCollection[collectionKey][key] = newValue;
|
|
1369
|
+
} else {
|
|
1370
|
+
keyValuesToResetIndividually[key] = newValue;
|
|
1368
1371
|
}
|
|
1369
1372
|
}
|
|
1373
|
+
}
|
|
1370
1374
|
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1375
|
+
if (isKeyToPreserve || isDefaultKey) {
|
|
1376
|
+
return;
|
|
1377
|
+
}
|
|
1374
1378
|
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1379
|
+
// If it isn't preserved and doesn't have a default, we'll remove it
|
|
1380
|
+
keysToBeClearedFromStorage.push(key);
|
|
1381
|
+
});
|
|
1378
1382
|
|
|
1379
|
-
|
|
1383
|
+
const updatePromises = [];
|
|
1380
1384
|
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1385
|
+
// Notify the subscribers for each key/value group so they can receive the new values
|
|
1386
|
+
_.each(keyValuesToResetIndividually, (value, key) => {
|
|
1387
|
+
updatePromises.push(scheduleSubscriberUpdate(key, value));
|
|
1388
|
+
});
|
|
1389
|
+
_.each(keyValuesToResetAsCollection, (value, key) => {
|
|
1390
|
+
updatePromises.push(scheduleNotifyCollectionSubscribers(key, value));
|
|
1391
|
+
});
|
|
1388
1392
|
|
|
1389
|
-
|
|
1393
|
+
const defaultKeyValuePairs = _.pairs(_.omit(defaultKeyStates, keysToPreserve));
|
|
1390
1394
|
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1395
|
+
// Remove only the items that we want cleared from storage, and reset others to default
|
|
1396
|
+
_.each(keysToBeClearedFromStorage, (key) => cache.drop(key));
|
|
1397
|
+
return Storage.removeItems(keysToBeClearedFromStorage)
|
|
1398
|
+
.then(() => Storage.multiSet(defaultKeyValuePairs))
|
|
1399
|
+
.then(() => {
|
|
1394
1400
|
isClearing = false;
|
|
1395
1401
|
Broadcast.sendMessage({type: METHOD.CLEAR, keysToPreserve});
|
|
1396
1402
|
return Promise.all(updatePromises);
|
|
1397
1403
|
});
|
|
1398
|
-
|
|
1404
|
+
});
|
|
1399
1405
|
}
|
|
1400
1406
|
|
|
1401
1407
|
/**
|
|
@@ -1438,49 +1444,48 @@ function mergeCollection(collectionKey, collection) {
|
|
|
1438
1444
|
return Promise.resolve();
|
|
1439
1445
|
}
|
|
1440
1446
|
|
|
1441
|
-
return getAllKeys()
|
|
1442
|
-
|
|
1443
|
-
|
|
1444
|
-
|
|
1445
|
-
.
|
|
1446
|
-
|
|
1447
|
-
|
|
1448
|
-
|
|
1449
|
-
|
|
1450
|
-
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
|
|
1460
|
-
|
|
1461
|
-
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
|
|
1466
|
-
|
|
1467
|
-
}
|
|
1468
|
-
|
|
1469
|
-
if (keyValuePairsForNewCollection.length > 0) {
|
|
1470
|
-
promises.push(Storage.multiSet(keyValuePairsForNewCollection));
|
|
1471
|
-
}
|
|
1447
|
+
return getAllKeys().then((persistedKeys) => {
|
|
1448
|
+
// Split to keys that exist in storage and keys that don't
|
|
1449
|
+
const [existingKeys, newKeys] = _.chain(collection)
|
|
1450
|
+
.pick((value, key) => {
|
|
1451
|
+
if (_.isNull(value)) {
|
|
1452
|
+
remove(key);
|
|
1453
|
+
return false;
|
|
1454
|
+
}
|
|
1455
|
+
return true;
|
|
1456
|
+
})
|
|
1457
|
+
.keys()
|
|
1458
|
+
.partition((key) => persistedKeys.includes(key))
|
|
1459
|
+
.value();
|
|
1460
|
+
|
|
1461
|
+
const existingKeyCollection = _.pick(collection, existingKeys);
|
|
1462
|
+
const newCollection = _.pick(collection, newKeys);
|
|
1463
|
+
const keyValuePairsForExistingCollection = prepareKeyValuePairsForStorage(existingKeyCollection);
|
|
1464
|
+
const keyValuePairsForNewCollection = prepareKeyValuePairsForStorage(newCollection);
|
|
1465
|
+
|
|
1466
|
+
const promises = [];
|
|
1467
|
+
|
|
1468
|
+
// New keys will be added via multiSet while existing keys will be updated using multiMerge
|
|
1469
|
+
// This is because setting a key that doesn't exist yet with multiMerge will throw errors
|
|
1470
|
+
if (keyValuePairsForExistingCollection.length > 0) {
|
|
1471
|
+
promises.push(Storage.multiMerge(keyValuePairsForExistingCollection));
|
|
1472
|
+
}
|
|
1472
1473
|
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
cache.merge(collection);
|
|
1477
|
-
return scheduleNotifyCollectionSubscribers(collectionKey, collection);
|
|
1478
|
-
});
|
|
1474
|
+
if (keyValuePairsForNewCollection.length > 0) {
|
|
1475
|
+
promises.push(Storage.multiSet(keyValuePairsForNewCollection));
|
|
1476
|
+
}
|
|
1479
1477
|
|
|
1480
|
-
|
|
1481
|
-
|
|
1482
|
-
|
|
1478
|
+
// Prefill cache if necessary by calling get() on any existing keys and then merge original data to cache
|
|
1479
|
+
// and update all subscribers
|
|
1480
|
+
const promiseUpdate = Promise.all(_.map(existingKeys, get)).then(() => {
|
|
1481
|
+
cache.merge(collection);
|
|
1482
|
+
return scheduleNotifyCollectionSubscribers(collectionKey, collection);
|
|
1483
1483
|
});
|
|
1484
|
+
|
|
1485
|
+
return Promise.all(promises)
|
|
1486
|
+
.catch((error) => evictStorageAndRetry(error, mergeCollection, collection))
|
|
1487
|
+
.then(() => promiseUpdate);
|
|
1488
|
+
});
|
|
1484
1489
|
}
|
|
1485
1490
|
|
|
1486
1491
|
/**
|
|
@@ -1530,7 +1535,7 @@ function update(data) {
|
|
|
1530
1535
|
}
|
|
1531
1536
|
});
|
|
1532
1537
|
|
|
1533
|
-
return clearPromise.then(() => Promise.all(_.map(promises, p => p())));
|
|
1538
|
+
return clearPromise.then(() => Promise.all(_.map(promises, (p) => p())));
|
|
1534
1539
|
}
|
|
1535
1540
|
|
|
1536
1541
|
/**
|
|
@@ -1646,10 +1651,14 @@ function init({
|
|
|
1646
1651
|
// We need the value of the collection keys later for checking if a
|
|
1647
1652
|
// key is a collection. We store it in a map for faster lookup.
|
|
1648
1653
|
const collectionValues = _.values(keys.COLLECTION);
|
|
1649
|
-
onyxCollectionKeyMap = _.reduce(
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1654
|
+
onyxCollectionKeyMap = _.reduce(
|
|
1655
|
+
collectionValues,
|
|
1656
|
+
(acc, val) => {
|
|
1657
|
+
acc.set(val, true);
|
|
1658
|
+
return acc;
|
|
1659
|
+
},
|
|
1660
|
+
new Map(),
|
|
1661
|
+
);
|
|
1653
1662
|
|
|
1654
1663
|
// Set our default key states to use when initializing and clearing Onyx data
|
|
1655
1664
|
defaultKeyStates = initialKeyStates;
|
|
@@ -1658,11 +1667,7 @@ function init({
|
|
|
1658
1667
|
evictionAllowList = safeEvictionKeys;
|
|
1659
1668
|
|
|
1660
1669
|
// Initialize all of our keys with data provided then give green light to any pending connections
|
|
1661
|
-
Promise.all([
|
|
1662
|
-
addAllSafeEvictionKeysToRecentlyAccessedList(),
|
|
1663
|
-
initializeWithDefaultKeyStates(),
|
|
1664
|
-
])
|
|
1665
|
-
.then(deferredInitTask.resolve);
|
|
1670
|
+
Promise.all([addAllSafeEvictionKeysToRecentlyAccessedList(), initializeWithDefaultKeyStates()]).then(deferredInitTask.resolve);
|
|
1666
1671
|
|
|
1667
1672
|
if (shouldSyncMultipleInstances && _.isFunction(Storage.keepInstancesSync)) {
|
|
1668
1673
|
Storage.keepInstancesSync((key, value) => {
|