react-native-onyx 3.0.27 → 3.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/GlobalSettings.js +2 -1
- package/dist/Onyx.js +28 -25
- package/dist/OnyxCache.js +10 -10
- package/dist/OnyxConnectionManager.js +9 -6
- package/dist/OnyxUtils.js +54 -51
- package/dist/dependencies/PerformanceProxy/index.native.js +0 -1
- package/dist/logMessages.js +1 -3
- package/dist/storage/InstanceSync/index.web.js +2 -2
- package/dist/storage/providers/MemoryOnlyProvider.js +2 -3
- package/dist/storage/providers/NoopProvider.js +1 -1
- package/dist/types.d.ts +1 -3
- package/dist/utils.js +8 -9
- package/package.json +11 -10
package/dist/GlobalSettings.js
CHANGED
|
@@ -17,7 +17,8 @@ function addGlobalSettingsChangeListener(listener) {
|
|
|
17
17
|
};
|
|
18
18
|
}
|
|
19
19
|
function notifyListeners() {
|
|
20
|
-
|
|
20
|
+
for (const listener of listeners)
|
|
21
|
+
listener(globalSettings);
|
|
21
22
|
}
|
|
22
23
|
function setPerformanceMetricsEnabled(enabled) {
|
|
23
24
|
globalSettings.enablePerformanceMetrics = enabled;
|
package/dist/Onyx.js
CHANGED
|
@@ -286,6 +286,7 @@ function clear(keysToPreserve = []) {
|
|
|
286
286
|
const initialKeys = Object.keys(defaultKeyStates);
|
|
287
287
|
const promise = OnyxUtils_1.default.getAllKeys()
|
|
288
288
|
.then((cachedKeys) => {
|
|
289
|
+
var _a;
|
|
289
290
|
OnyxCache_1.default.clearNullishStorageKeys();
|
|
290
291
|
const keysToBeClearedFromStorage = [];
|
|
291
292
|
const keyValuesToResetAsCollection = {};
|
|
@@ -297,8 +298,7 @@ function clear(keysToPreserve = []) {
|
|
|
297
298
|
// 2. Any keys with a default state (because they need to remain in Onyx as their default, and setting them
|
|
298
299
|
// to null would cause unknown behavior)
|
|
299
300
|
// 2.1 However, if a default key was explicitly set to null, we need to reset it to the default value
|
|
300
|
-
|
|
301
|
-
var _a;
|
|
301
|
+
for (const key of allKeys) {
|
|
302
302
|
const isKeyToPreserve = keysToPreserve.includes(key);
|
|
303
303
|
const isDefaultKey = key in defaultKeyStates;
|
|
304
304
|
// If the key is being removed or reset to default:
|
|
@@ -330,19 +330,19 @@ function clear(keysToPreserve = []) {
|
|
|
330
330
|
}
|
|
331
331
|
}
|
|
332
332
|
if (isKeyToPreserve || isDefaultKey) {
|
|
333
|
-
|
|
333
|
+
continue;
|
|
334
334
|
}
|
|
335
335
|
// If it isn't preserved and doesn't have a default, we'll remove it
|
|
336
336
|
keysToBeClearedFromStorage.push(key);
|
|
337
|
-
}
|
|
337
|
+
}
|
|
338
338
|
const updatePromises = [];
|
|
339
339
|
// Notify the subscribers for each key/value group so they can receive the new values
|
|
340
|
-
|
|
340
|
+
for (const [key, value] of Object.entries(keyValuesToResetIndividually)) {
|
|
341
341
|
updatePromises.push(OnyxUtils_1.default.scheduleSubscriberUpdate(key, value));
|
|
342
|
-
}
|
|
343
|
-
|
|
342
|
+
}
|
|
343
|
+
for (const [key, value] of Object.entries(keyValuesToResetAsCollection)) {
|
|
344
344
|
updatePromises.push(OnyxUtils_1.default.scheduleNotifyCollectionSubscribers(key, value));
|
|
345
|
-
}
|
|
345
|
+
}
|
|
346
346
|
const defaultKeyValuePairs = Object.entries(Object.keys(defaultKeyStates)
|
|
347
347
|
.filter((key) => !keysToPreserve.includes(key))
|
|
348
348
|
.reduce((obj, key) => {
|
|
@@ -351,7 +351,8 @@ function clear(keysToPreserve = []) {
|
|
|
351
351
|
return obj;
|
|
352
352
|
}, {}));
|
|
353
353
|
// Remove only the items that we want cleared from storage, and reset others to default
|
|
354
|
-
|
|
354
|
+
for (const key of keysToBeClearedFromStorage)
|
|
355
|
+
OnyxCache_1.default.drop(key);
|
|
355
356
|
return storage_1.default.removeItems(keysToBeClearedFromStorage)
|
|
356
357
|
.then(() => OnyxConnectionManager_1.default.refreshSessionID())
|
|
357
358
|
.then(() => storage_1.default.multiSet(defaultKeyValuePairs))
|
|
@@ -371,7 +372,7 @@ function clear(keysToPreserve = []) {
|
|
|
371
372
|
*/
|
|
372
373
|
function update(data) {
|
|
373
374
|
// First, validate the Onyx object is in the format we expect
|
|
374
|
-
|
|
375
|
+
for (const { onyxMethod, key, value } of data) {
|
|
375
376
|
if (!Object.values(OnyxUtils_1.default.METHOD).includes(onyxMethod)) {
|
|
376
377
|
throw new Error(`Invalid onyxMethod ${onyxMethod} in Onyx update.`);
|
|
377
378
|
}
|
|
@@ -384,7 +385,7 @@ function update(data) {
|
|
|
384
385
|
else if (onyxMethod !== OnyxUtils_1.default.METHOD.CLEAR && typeof key !== 'string') {
|
|
385
386
|
throw new Error(`Invalid ${typeof key} key provided in Onyx update. Onyx key must be of type string.`);
|
|
386
387
|
}
|
|
387
|
-
}
|
|
388
|
+
}
|
|
388
389
|
// The queue of operations within a single `update` call in the format of <item key - list of operations updating the item>.
|
|
389
390
|
// This allows us to batch the operations per item and merge them into one operation in the order they were requested.
|
|
390
391
|
const updateQueue = {};
|
|
@@ -408,7 +409,7 @@ function update(data) {
|
|
|
408
409
|
};
|
|
409
410
|
const promises = [];
|
|
410
411
|
let clearPromise = Promise.resolve();
|
|
411
|
-
|
|
412
|
+
for (const { onyxMethod, key, value } of data) {
|
|
412
413
|
const handlers = {
|
|
413
414
|
[OnyxUtils_1.default.METHOD.SET]: enqueueSetOperation,
|
|
414
415
|
[OnyxUtils_1.default.METHOD.MERGE]: enqueueMergeOperation,
|
|
@@ -422,26 +423,30 @@ function update(data) {
|
|
|
422
423
|
const collectionKeys = Object.keys(collection);
|
|
423
424
|
if (OnyxUtils_1.default.doAllCollectionItemsBelongToSameParent(key, collectionKeys)) {
|
|
424
425
|
const mergedCollection = collection;
|
|
425
|
-
|
|
426
|
+
for (const collectionKey of collectionKeys)
|
|
427
|
+
enqueueMergeOperation(collectionKey, mergedCollection[collectionKey]);
|
|
426
428
|
}
|
|
427
429
|
},
|
|
428
430
|
[OnyxUtils_1.default.METHOD.SET_COLLECTION]: (k, v) => promises.push(() => setCollection(k, v)),
|
|
429
|
-
[OnyxUtils_1.default.METHOD.MULTI_SET]: (k, v) =>
|
|
431
|
+
[OnyxUtils_1.default.METHOD.MULTI_SET]: (k, v) => {
|
|
432
|
+
for (const [entryKey, entryValue] of Object.entries(v))
|
|
433
|
+
enqueueSetOperation(entryKey, entryValue);
|
|
434
|
+
},
|
|
430
435
|
[OnyxUtils_1.default.METHOD.CLEAR]: () => {
|
|
431
436
|
clearPromise = clear();
|
|
432
437
|
},
|
|
433
438
|
};
|
|
434
439
|
handlers[onyxMethod](key, value);
|
|
435
|
-
}
|
|
440
|
+
}
|
|
436
441
|
// Group all the collection-related keys and update each collection in a single `mergeCollection` call.
|
|
437
442
|
// This is needed to prevent multiple `mergeCollection` calls for the same collection and `merge` calls for the individual items of the said collection.
|
|
438
443
|
// This way, we ensure there is no race condition in the queued updates of the same key.
|
|
439
|
-
OnyxUtils_1.default.getCollectionKeys()
|
|
444
|
+
for (const collectionKey of OnyxUtils_1.default.getCollectionKeys()) {
|
|
440
445
|
const collectionItemKeys = Object.keys(updateQueue).filter((key) => OnyxUtils_1.default.isKeyMatch(collectionKey, key));
|
|
441
446
|
if (collectionItemKeys.length <= 1) {
|
|
442
447
|
// If there are no items of this collection in the updateQueue, we should skip it.
|
|
443
448
|
// If there is only one item, we should update it individually, therefore retain it in the updateQueue.
|
|
444
|
-
|
|
449
|
+
continue;
|
|
445
450
|
}
|
|
446
451
|
const batchedCollectionUpdates = collectionItemKeys.reduce((queue, key) => {
|
|
447
452
|
const operations = updateQueue[key];
|
|
@@ -477,17 +482,17 @@ function update(data) {
|
|
|
477
482
|
if (!utils_1.default.isEmptyObject(batchedCollectionUpdates.set)) {
|
|
478
483
|
promises.push(() => OnyxUtils_1.default.partialSetCollection({ collectionKey, collection: batchedCollectionUpdates.set }));
|
|
479
484
|
}
|
|
480
|
-
}
|
|
481
|
-
|
|
485
|
+
}
|
|
486
|
+
for (const [key, operations] of Object.entries(updateQueue)) {
|
|
482
487
|
if (operations[0] === null) {
|
|
483
488
|
const batchedChanges = OnyxUtils_1.default.mergeChanges(operations).result;
|
|
484
489
|
promises.push(() => set(key, batchedChanges));
|
|
485
|
-
|
|
490
|
+
continue;
|
|
486
491
|
}
|
|
487
|
-
|
|
492
|
+
for (const operation of operations) {
|
|
488
493
|
promises.push(() => merge(key, operation));
|
|
489
|
-
}
|
|
490
|
-
}
|
|
494
|
+
}
|
|
495
|
+
}
|
|
491
496
|
const snapshotPromises = OnyxUtils_1.default.updateSnapshots(data, merge);
|
|
492
497
|
// We need to run the snapshot updates before the other updates so the snapshot data can be updated before the loading state in the snapshot
|
|
493
498
|
const finalPromises = snapshotPromises.concat(promises);
|
|
@@ -526,7 +531,6 @@ const Onyx = {
|
|
|
526
531
|
};
|
|
527
532
|
function applyDecorators() {
|
|
528
533
|
// We are reassigning the functions directly so that internal function calls are also decorated
|
|
529
|
-
/* eslint-disable rulesdir/prefer-actions-set-data */
|
|
530
534
|
// @ts-expect-error Reassign
|
|
531
535
|
connect = (0, metrics_1.default)(connect, 'Onyx.connect');
|
|
532
536
|
// @ts-expect-error Reassign
|
|
@@ -543,6 +547,5 @@ function applyDecorators() {
|
|
|
543
547
|
update = (0, metrics_1.default)(update, 'Onyx.update');
|
|
544
548
|
// @ts-expect-error Reassign
|
|
545
549
|
clear = (0, metrics_1.default)(clear, 'Onyx.clear');
|
|
546
|
-
/* eslint-enable rulesdir/prefer-actions-set-data */
|
|
547
550
|
}
|
|
548
551
|
exports.default = Onyx;
|
package/dist/OnyxCache.js
CHANGED
|
@@ -179,7 +179,7 @@ class OnyxCache {
|
|
|
179
179
|
shouldRemoveNestedNulls: true,
|
|
180
180
|
objectRemovalMode: 'replace',
|
|
181
181
|
}).result);
|
|
182
|
-
|
|
182
|
+
for (const [key, value] of Object.entries(data)) {
|
|
183
183
|
this.addKey(key);
|
|
184
184
|
this.addToAccessedKeys(key);
|
|
185
185
|
const collectionKey = this.getCollectionKey(key);
|
|
@@ -200,7 +200,7 @@ class OnyxCache {
|
|
|
200
200
|
this.collectionData[collectionKey][key] = this.storageMap[key];
|
|
201
201
|
}
|
|
202
202
|
}
|
|
203
|
-
}
|
|
203
|
+
}
|
|
204
204
|
}
|
|
205
205
|
/**
|
|
206
206
|
* Check whether the given task is already running
|
|
@@ -332,14 +332,14 @@ class OnyxCache {
|
|
|
332
332
|
*/
|
|
333
333
|
addEvictableKeysToRecentlyAccessedList(isCollectionKeyFn, getAllKeysFn) {
|
|
334
334
|
return getAllKeysFn().then((keys) => {
|
|
335
|
-
this.evictionAllowList
|
|
336
|
-
|
|
335
|
+
for (const evictableKey of this.evictionAllowList) {
|
|
336
|
+
for (const key of keys) {
|
|
337
337
|
if (!this.isKeyMatch(evictableKey, key)) {
|
|
338
|
-
|
|
338
|
+
continue;
|
|
339
339
|
}
|
|
340
340
|
this.addLastAccessedKey(key, isCollectionKeyFn(key));
|
|
341
|
-
}
|
|
342
|
-
}
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
343
|
});
|
|
344
344
|
}
|
|
345
345
|
/**
|
|
@@ -359,12 +359,12 @@ class OnyxCache {
|
|
|
359
359
|
setCollectionKeys(collectionKeys) {
|
|
360
360
|
this.collectionKeys = collectionKeys;
|
|
361
361
|
// Initialize collection data for existing collection keys
|
|
362
|
-
|
|
362
|
+
for (const collectionKey of collectionKeys) {
|
|
363
363
|
if (this.collectionData[collectionKey]) {
|
|
364
|
-
|
|
364
|
+
continue;
|
|
365
365
|
}
|
|
366
366
|
this.collectionData[collectionKey] = {};
|
|
367
|
-
}
|
|
367
|
+
}
|
|
368
368
|
}
|
|
369
369
|
/**
|
|
370
370
|
* Check if a key is a collection key
|
|
@@ -81,14 +81,17 @@ class OnyxConnectionManager {
|
|
|
81
81
|
*/
|
|
82
82
|
fireCallbacks(connectionID) {
|
|
83
83
|
const connection = this.connectionsMap.get(connectionID);
|
|
84
|
-
|
|
84
|
+
if (!connection) {
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
for (const callback of connection.callbacks.values()) {
|
|
85
88
|
if (connection.waitForCollectionCallback) {
|
|
86
89
|
callback(connection.cachedCallbackValue, connection.cachedCallbackKey, connection.sourceValue);
|
|
87
90
|
}
|
|
88
91
|
else {
|
|
89
92
|
callback(connection.cachedCallbackValue, connection.cachedCallbackKey);
|
|
90
93
|
}
|
|
91
|
-
}
|
|
94
|
+
}
|
|
92
95
|
}
|
|
93
96
|
/**
|
|
94
97
|
* Connects to an Onyx key given the options passed and listens to its changes.
|
|
@@ -168,12 +171,12 @@ class OnyxConnectionManager {
|
|
|
168
171
|
* Disconnect all subscribers from Onyx.
|
|
169
172
|
*/
|
|
170
173
|
disconnectAll() {
|
|
171
|
-
this.connectionsMap.
|
|
174
|
+
for (const [connectionID, connectionMetadata] of this.connectionsMap.entries()) {
|
|
172
175
|
OnyxUtils_1.default.unsubscribeFromKey(connectionMetadata.subscriptionID);
|
|
173
|
-
connectionMetadata.callbacks.
|
|
176
|
+
for (const callbackID of connectionMetadata.callbacks.keys()) {
|
|
174
177
|
this.removeFromEvictionBlockList({ id: connectionID, callbackID });
|
|
175
|
-
}
|
|
176
|
-
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
177
180
|
this.connectionsMap.clear();
|
|
178
181
|
// Clear snapshot cache when all connections are disconnected
|
|
179
182
|
OnyxSnapshotCache_1.default.clear();
|
package/dist/OnyxUtils.js
CHANGED
|
@@ -37,7 +37,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
37
37
|
};
|
|
38
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
39
|
exports.clearOnyxUtilsInternals = clearOnyxUtilsInternals;
|
|
40
|
-
/* eslint-disable no-continue */
|
|
41
40
|
const fast_equals_1 = require("fast-equals");
|
|
42
41
|
const pick_1 = __importDefault(require("lodash/pick"));
|
|
43
42
|
const underscore_1 = __importDefault(require("underscore"));
|
|
@@ -185,9 +184,9 @@ function maybeFlushBatchUpdates() {
|
|
|
185
184
|
batchUpdatesQueue = [];
|
|
186
185
|
batchUpdatesPromise = null;
|
|
187
186
|
(0, batch_1.default)(() => {
|
|
188
|
-
|
|
187
|
+
for (const applyUpdates of updatesCopy) {
|
|
189
188
|
applyUpdates();
|
|
190
|
-
}
|
|
189
|
+
}
|
|
191
190
|
});
|
|
192
191
|
resolve();
|
|
193
192
|
}, 0);
|
|
@@ -265,11 +264,11 @@ function multiGet(keys) {
|
|
|
265
264
|
*
|
|
266
265
|
* These missingKeys will be later used to multiGet the data from the storage.
|
|
267
266
|
*/
|
|
268
|
-
|
|
267
|
+
for (const key of keys) {
|
|
269
268
|
const cacheValue = OnyxCache_1.default.get(key);
|
|
270
269
|
if (cacheValue) {
|
|
271
270
|
dataMap.set(key, cacheValue);
|
|
272
|
-
|
|
271
|
+
continue;
|
|
273
272
|
}
|
|
274
273
|
const pendingKey = `${OnyxCache_1.TASK.GET}:${key}`;
|
|
275
274
|
if (OnyxCache_1.default.hasPendingTask(pendingKey)) {
|
|
@@ -279,13 +278,13 @@ function multiGet(keys) {
|
|
|
279
278
|
else {
|
|
280
279
|
missingKeys.push(key);
|
|
281
280
|
}
|
|
282
|
-
}
|
|
281
|
+
}
|
|
283
282
|
return (Promise.all(pendingTasks)
|
|
284
283
|
// Wait for all the pending tasks to resolve and then add the data to the data map.
|
|
285
284
|
.then((values) => {
|
|
286
|
-
values.
|
|
285
|
+
for (const [index, value] of values.entries()) {
|
|
287
286
|
dataMap.set(pendingKeys[index], value);
|
|
288
|
-
}
|
|
287
|
+
}
|
|
289
288
|
return Promise.resolve();
|
|
290
289
|
})
|
|
291
290
|
// Get the missing keys using multiGet from the storage.
|
|
@@ -302,13 +301,13 @@ function multiGet(keys) {
|
|
|
302
301
|
}
|
|
303
302
|
// temp object is used to merge the missing data into the cache
|
|
304
303
|
const temp = {};
|
|
305
|
-
|
|
304
|
+
for (const [key, value] of values) {
|
|
306
305
|
if (skippableCollectionMemberIDs.size) {
|
|
307
306
|
try {
|
|
308
307
|
const [, collectionMemberID] = splitCollectionMemberKey(key);
|
|
309
308
|
if (skippableCollectionMemberIDs.has(collectionMemberID)) {
|
|
310
309
|
// The key is a skippable one, so we skip this iteration.
|
|
311
|
-
|
|
310
|
+
continue;
|
|
312
311
|
}
|
|
313
312
|
}
|
|
314
313
|
catch (e) {
|
|
@@ -317,7 +316,7 @@ function multiGet(keys) {
|
|
|
317
316
|
}
|
|
318
317
|
dataMap.set(key, value);
|
|
319
318
|
temp[key] = value;
|
|
320
|
-
}
|
|
319
|
+
}
|
|
321
320
|
OnyxCache_1.default.merge(temp);
|
|
322
321
|
return dataMap;
|
|
323
322
|
}));
|
|
@@ -489,14 +488,14 @@ function getCachedCollection(collectionKey, collectionMemberKeys) {
|
|
|
489
488
|
// If we have specific member keys, filter the collection
|
|
490
489
|
if (collectionMemberKeys) {
|
|
491
490
|
const filteredCollection = {};
|
|
492
|
-
|
|
491
|
+
for (const key of collectionMemberKeys) {
|
|
493
492
|
if (collectionData[key] !== undefined) {
|
|
494
493
|
filteredCollection[key] = collectionData[key];
|
|
495
494
|
}
|
|
496
495
|
else if (OnyxCache_1.default.hasNullishStorageKey(key)) {
|
|
497
496
|
filteredCollection[key] = OnyxCache_1.default.get(key);
|
|
498
497
|
}
|
|
499
|
-
}
|
|
498
|
+
}
|
|
500
499
|
return filteredCollection;
|
|
501
500
|
}
|
|
502
501
|
// Return a copy to avoid mutations affecting the cache
|
|
@@ -505,19 +504,19 @@ function getCachedCollection(collectionKey, collectionMemberKeys) {
|
|
|
505
504
|
// Fallback to original implementation if collection data not available
|
|
506
505
|
const collection = {};
|
|
507
506
|
// forEach exists on both Set and Array
|
|
508
|
-
|
|
507
|
+
for (const key of allKeys) {
|
|
509
508
|
// If we don't have collectionMemberKeys array then we have to check whether a key is a collection member key.
|
|
510
509
|
// Because in that case the keys will be coming from `cache.getAllKeys()` and we need to filter out the keys that
|
|
511
510
|
// are not part of the collection.
|
|
512
511
|
if (!collectionMemberKeys && !isCollectionMemberKey(collectionKey, key)) {
|
|
513
|
-
|
|
512
|
+
continue;
|
|
514
513
|
}
|
|
515
514
|
const cachedValue = OnyxCache_1.default.get(key);
|
|
516
515
|
if (cachedValue === undefined && !OnyxCache_1.default.hasNullishStorageKey(key)) {
|
|
517
|
-
|
|
516
|
+
continue;
|
|
518
517
|
}
|
|
519
518
|
collection[key] = OnyxCache_1.default.get(key);
|
|
520
|
-
}
|
|
519
|
+
}
|
|
521
520
|
return collection;
|
|
522
521
|
}
|
|
523
522
|
/**
|
|
@@ -699,7 +698,7 @@ function addKeyToRecentlyAccessedIfNeeded(key) {
|
|
|
699
698
|
function getCollectionDataAndSendAsObject(matchingKeys, mapping) {
|
|
700
699
|
multiGet(matchingKeys).then((dataMap) => {
|
|
701
700
|
const data = Object.fromEntries(dataMap.entries());
|
|
702
|
-
sendDataToConnection(mapping, data,
|
|
701
|
+
sendDataToConnection(mapping, data, mapping.key);
|
|
703
702
|
});
|
|
704
703
|
}
|
|
705
704
|
/**
|
|
@@ -807,16 +806,16 @@ function hasPendingMergeForKey(key) {
|
|
|
807
806
|
*/
|
|
808
807
|
function prepareKeyValuePairsForStorage(data, shouldRemoveNestedNulls, replaceNullPatches, isProcessingCollectionUpdate) {
|
|
809
808
|
const pairs = [];
|
|
810
|
-
|
|
809
|
+
for (const [key, value] of Object.entries(data)) {
|
|
811
810
|
if (value === null) {
|
|
812
811
|
remove(key, isProcessingCollectionUpdate);
|
|
813
|
-
|
|
812
|
+
continue;
|
|
814
813
|
}
|
|
815
814
|
const valueWithoutNestedNullValues = (shouldRemoveNestedNulls !== null && shouldRemoveNestedNulls !== void 0 ? shouldRemoveNestedNulls : true) ? utils_1.default.removeNestedNullValues(value) : value;
|
|
816
815
|
if (valueWithoutNestedNullValues !== undefined) {
|
|
817
816
|
pairs.push([key, valueWithoutNestedNullValues, replaceNullPatches === null || replaceNullPatches === void 0 ? void 0 : replaceNullPatches[key]]);
|
|
818
817
|
}
|
|
819
|
-
}
|
|
818
|
+
}
|
|
820
819
|
return pairs;
|
|
821
820
|
}
|
|
822
821
|
/**
|
|
@@ -878,7 +877,8 @@ function initializeWithDefaultKeyStates() {
|
|
|
878
877
|
shouldRemoveNestedNulls: true,
|
|
879
878
|
}).result;
|
|
880
879
|
OnyxCache_1.default.merge(merged !== null && merged !== void 0 ? merged : {});
|
|
881
|
-
Object.entries(merged !== null && merged !== void 0 ? merged : {})
|
|
880
|
+
for (const [key, value] of Object.entries(merged !== null && merged !== void 0 ? merged : {}))
|
|
881
|
+
keyChanged(key, value);
|
|
882
882
|
});
|
|
883
883
|
}
|
|
884
884
|
/**
|
|
@@ -892,16 +892,16 @@ function isValidNonEmptyCollectionForMerge(collection) {
|
|
|
892
892
|
*/
|
|
893
893
|
function doAllCollectionItemsBelongToSameParent(collectionKey, collectionKeys) {
|
|
894
894
|
let hasCollectionKeyCheckFailed = false;
|
|
895
|
-
|
|
895
|
+
for (const dataKey of collectionKeys) {
|
|
896
896
|
if (isKeyMatch(collectionKey, dataKey)) {
|
|
897
|
-
|
|
897
|
+
continue;
|
|
898
898
|
}
|
|
899
899
|
if (process.env.NODE_ENV === 'development') {
|
|
900
900
|
throw new Error(`Provided collection doesn't have all its data belonging to the same parent. CollectionKey: ${collectionKey}, DataKey: ${dataKey}`);
|
|
901
901
|
}
|
|
902
902
|
hasCollectionKeyCheckFailed = true;
|
|
903
903
|
Logger.logAlert(`Provided collection doesn't have all its data belonging to the same parent. CollectionKey: ${collectionKey}, DataKey: ${dataKey}`);
|
|
904
|
-
}
|
|
904
|
+
}
|
|
905
905
|
return !hasCollectionKeyCheckFailed;
|
|
906
906
|
}
|
|
907
907
|
/**
|
|
@@ -947,12 +947,12 @@ function subscribeToKey(connectOptions) {
|
|
|
947
947
|
}
|
|
948
948
|
else {
|
|
949
949
|
// Collection case - need to iterate through all keys to find matches (O(n))
|
|
950
|
-
|
|
950
|
+
for (const key of keys) {
|
|
951
951
|
if (!isKeyMatch(mapping.key, key)) {
|
|
952
|
-
|
|
952
|
+
continue;
|
|
953
953
|
}
|
|
954
954
|
matchingKeys.push(key);
|
|
955
|
-
}
|
|
955
|
+
}
|
|
956
956
|
}
|
|
957
957
|
// If the key being connected to does not exist we initialize the value with null. For subscribers that connected
|
|
958
958
|
// directly via connect() they will simply get a null value sent to them without any information about which key matched
|
|
@@ -961,9 +961,10 @@ function subscribeToKey(connectOptions) {
|
|
|
961
961
|
if (mapping.key) {
|
|
962
962
|
OnyxCache_1.default.addNullishStorageKey(mapping.key);
|
|
963
963
|
}
|
|
964
|
+
const matchedKey = isCollectionKey(mapping.key) && mapping.waitForCollectionCallback ? mapping.key : undefined;
|
|
964
965
|
// Here we cannot use batching because the nullish value is expected to be set immediately for default props
|
|
965
966
|
// or they will be undefined.
|
|
966
|
-
sendDataToConnection(mapping, null,
|
|
967
|
+
sendDataToConnection(mapping, null, matchedKey);
|
|
967
968
|
return;
|
|
968
969
|
}
|
|
969
970
|
// When using a callback subscriber we will either trigger the provided callback for each key we find or combine all values
|
|
@@ -977,9 +978,9 @@ function subscribeToKey(connectOptions) {
|
|
|
977
978
|
}
|
|
978
979
|
// We did not opt into using waitForCollectionCallback mode so the callback is called for every matching key.
|
|
979
980
|
multiGet(matchingKeys).then((values) => {
|
|
980
|
-
values.
|
|
981
|
+
for (const [key, val] of values.entries()) {
|
|
981
982
|
sendDataToConnection(mapping, val, key);
|
|
982
|
-
}
|
|
983
|
+
}
|
|
983
984
|
});
|
|
984
985
|
return;
|
|
985
986
|
}
|
|
@@ -1011,42 +1012,42 @@ function updateSnapshots(data, mergeFn) {
|
|
|
1011
1012
|
return [];
|
|
1012
1013
|
const promises = [];
|
|
1013
1014
|
const snapshotCollection = getCachedCollection(snapshotCollectionKey);
|
|
1014
|
-
|
|
1015
|
+
for (const [snapshotEntryKey, snapshotEntryValue] of Object.entries(snapshotCollection)) {
|
|
1015
1016
|
// Snapshots may not be present in cache. We don't know how to update them so we skip.
|
|
1016
1017
|
if (!snapshotEntryValue) {
|
|
1017
|
-
|
|
1018
|
+
continue;
|
|
1018
1019
|
}
|
|
1019
1020
|
let updatedData = {};
|
|
1020
|
-
|
|
1021
|
+
for (const { key, value } of data) {
|
|
1021
1022
|
// snapshots are normal keys so we want to skip update if they are written to Onyx
|
|
1022
1023
|
if (isCollectionMemberKey(snapshotCollectionKey, key)) {
|
|
1023
|
-
|
|
1024
|
+
continue;
|
|
1024
1025
|
}
|
|
1025
1026
|
if (typeof snapshotEntryValue !== 'object' || !('data' in snapshotEntryValue)) {
|
|
1026
|
-
|
|
1027
|
+
continue;
|
|
1027
1028
|
}
|
|
1028
1029
|
const snapshotData = snapshotEntryValue.data;
|
|
1029
1030
|
if (!snapshotData || !snapshotData[key]) {
|
|
1030
|
-
|
|
1031
|
+
continue;
|
|
1031
1032
|
}
|
|
1032
1033
|
if (Array.isArray(value) || Array.isArray(snapshotData[key])) {
|
|
1033
1034
|
updatedData[key] = value || [];
|
|
1034
|
-
|
|
1035
|
+
continue;
|
|
1035
1036
|
}
|
|
1036
1037
|
if (value === null) {
|
|
1037
1038
|
updatedData[key] = value;
|
|
1038
|
-
|
|
1039
|
+
continue;
|
|
1039
1040
|
}
|
|
1040
1041
|
const oldValue = updatedData[key] || {};
|
|
1041
1042
|
const newValue = (0, pick_1.default)(value, Object.keys(snapshotData[key]));
|
|
1042
1043
|
updatedData = Object.assign(Object.assign({}, updatedData), { [key]: Object.assign(oldValue, newValue) });
|
|
1043
|
-
}
|
|
1044
|
+
}
|
|
1044
1045
|
// Skip the update if there's no data to be merged
|
|
1045
1046
|
if (utils_1.default.isEmptyObject(updatedData)) {
|
|
1046
|
-
|
|
1047
|
+
continue;
|
|
1047
1048
|
}
|
|
1048
1049
|
promises.push(() => mergeFn(snapshotEntryKey, { data: updatedData }));
|
|
1049
|
-
}
|
|
1050
|
+
}
|
|
1050
1051
|
return promises;
|
|
1051
1052
|
}
|
|
1052
1053
|
/**
|
|
@@ -1200,18 +1201,19 @@ function setCollectionWithRetry({ collectionKey, collection }, retryAttempt) {
|
|
|
1200
1201
|
resultCollectionKeys = Object.keys(resultCollection);
|
|
1201
1202
|
return OnyxUtils.getAllKeys().then((persistedKeys) => {
|
|
1202
1203
|
const mutableCollection = Object.assign({}, resultCollection);
|
|
1203
|
-
|
|
1204
|
+
for (const key of persistedKeys) {
|
|
1204
1205
|
if (!key.startsWith(collectionKey)) {
|
|
1205
|
-
|
|
1206
|
+
continue;
|
|
1206
1207
|
}
|
|
1207
1208
|
if (resultCollectionKeys.includes(key)) {
|
|
1208
|
-
|
|
1209
|
+
continue;
|
|
1209
1210
|
}
|
|
1210
1211
|
mutableCollection[key] = null;
|
|
1211
|
-
}
|
|
1212
|
+
}
|
|
1212
1213
|
const keyValuePairs = OnyxUtils.prepareKeyValuePairsForStorage(mutableCollection, true, undefined, true);
|
|
1213
1214
|
const previousCollection = OnyxUtils.getCachedCollection(collectionKey);
|
|
1214
|
-
|
|
1215
|
+
for (const [key, value] of keyValuePairs)
|
|
1216
|
+
OnyxCache_1.default.set(key, value);
|
|
1215
1217
|
const updatePromise = OnyxUtils.scheduleNotifyCollectionSubscribers(collectionKey, mutableCollection, previousCollection);
|
|
1216
1218
|
return storage_1.default.multiSet(keyValuePairs)
|
|
1217
1219
|
.catch((error) => OnyxUtils.retryOperation(error, setCollectionWithRetry, { collectionKey, collection }, retryAttempt))
|
|
@@ -1285,12 +1287,12 @@ function mergeCollectionWithPatches({ collectionKey, collection, mergeReplaceNul
|
|
|
1285
1287
|
return obj;
|
|
1286
1288
|
}, {});
|
|
1287
1289
|
const newCollection = {};
|
|
1288
|
-
|
|
1290
|
+
for (const key of keys) {
|
|
1289
1291
|
if (persistedKeys.has(key)) {
|
|
1290
|
-
|
|
1292
|
+
continue;
|
|
1291
1293
|
}
|
|
1292
1294
|
newCollection[key] = resultCollection[key];
|
|
1293
|
-
}
|
|
1295
|
+
}
|
|
1294
1296
|
// When (multi-)merging the values with the existing values in storage,
|
|
1295
1297
|
// we don't want to remove nested null values from the data that we pass to the storage layer,
|
|
1296
1298
|
// because the storage layer uses them to remove nested keys from storage natively.
|
|
@@ -1367,7 +1369,8 @@ function partialSetCollection({ collectionKey, collection }, retryAttempt) {
|
|
|
1367
1369
|
const existingKeys = resultCollectionKeys.filter((key) => persistedKeys.has(key));
|
|
1368
1370
|
const previousCollection = getCachedCollection(collectionKey, existingKeys);
|
|
1369
1371
|
const keyValuePairs = prepareKeyValuePairsForStorage(mutableCollection, true, undefined, true);
|
|
1370
|
-
|
|
1372
|
+
for (const [key, value] of keyValuePairs)
|
|
1373
|
+
OnyxCache_1.default.set(key, value);
|
|
1371
1374
|
const updatePromise = scheduleNotifyCollectionSubscribers(collectionKey, mutableCollection, previousCollection);
|
|
1372
1375
|
return storage_1.default.multiSet(keyValuePairs)
|
|
1373
1376
|
.catch((error) => retryOperation(error, partialSetCollection, { collectionKey, collection }, retryAttempt))
|
|
@@ -3,7 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
const ModuleProxy_1 = require("../ModuleProxy");
|
|
4
4
|
const PerformanceProxy = (0, ModuleProxy_1.createModuleProxy)(() => {
|
|
5
5
|
try {
|
|
6
|
-
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
7
6
|
return require('react-native-performance').default;
|
|
8
7
|
}
|
|
9
8
|
catch (_a) {
|
package/dist/logMessages.js
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
const logMessages = {
|
|
4
|
-
incompatibleUpdateAlert: (key, operation, existingValueType, newValueType) => {
|
|
5
|
-
return `Warning: Trying to apply "${operation}" with ${newValueType !== null && newValueType !== void 0 ? newValueType : 'unknown'} type to ${existingValueType !== null && existingValueType !== void 0 ? existingValueType : 'unknown'} type in the key "${key}"`;
|
|
6
|
-
},
|
|
4
|
+
incompatibleUpdateAlert: (key, operation, existingValueType, newValueType) => `Warning: Trying to apply "${operation}" with ${newValueType !== null && newValueType !== void 0 ? newValueType : 'unknown'} type to ${existingValueType !== null && existingValueType !== void 0 ? existingValueType : 'unknown'} type in the key "${key}"`,
|
|
7
5
|
};
|
|
8
6
|
exports.default = logMessages;
|
|
@@ -14,9 +14,9 @@ function raiseStorageSyncEvent(onyxKey) {
|
|
|
14
14
|
global.localStorage.removeItem(SYNC_ONYX);
|
|
15
15
|
}
|
|
16
16
|
function raiseStorageSyncManyKeysEvent(onyxKeys) {
|
|
17
|
-
|
|
17
|
+
for (const onyxKey of onyxKeys) {
|
|
18
18
|
raiseStorageSyncEvent(onyxKey);
|
|
19
|
-
}
|
|
19
|
+
}
|
|
20
20
|
}
|
|
21
21
|
let storage = NoopProvider_1.default;
|
|
22
22
|
const InstanceSync = {
|
|
@@ -6,7 +6,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
6
6
|
exports.setMockStore = exports.mockStore = exports.mockSet = void 0;
|
|
7
7
|
const underscore_1 = __importDefault(require("underscore"));
|
|
8
8
|
const utils_1 = __importDefault(require("../../utils"));
|
|
9
|
-
// eslint-disable-next-line import/no-mutable-exports
|
|
10
9
|
const storeInternal = {};
|
|
11
10
|
exports.mockStore = storeInternal;
|
|
12
11
|
const setInternal = (key, value) => {
|
|
@@ -70,14 +69,14 @@ const provider = {
|
|
|
70
69
|
* This function also removes all nested null values from an object.
|
|
71
70
|
*/
|
|
72
71
|
multiMerge(pairs) {
|
|
73
|
-
|
|
72
|
+
for (const [key, value] of pairs) {
|
|
74
73
|
const existingValue = provider.store[key];
|
|
75
74
|
const newValue = utils_1.default.fastMerge(existingValue, value, {
|
|
76
75
|
shouldRemoveNestedNulls: true,
|
|
77
76
|
objectRemovalMode: 'replace',
|
|
78
77
|
}).result;
|
|
79
78
|
set(key, newValue);
|
|
80
|
-
}
|
|
79
|
+
}
|
|
81
80
|
return Promise.resolve();
|
|
82
81
|
},
|
|
83
82
|
/**
|
package/dist/types.d.ts
CHANGED
|
@@ -370,9 +370,7 @@ type GenericFunction = (...args: any[]) => any;
|
|
|
370
370
|
* Represents a record where the key is a collection member key and the value is a list of
|
|
371
371
|
* tuples that we'll use to replace the nested objects of that collection member record with something else.
|
|
372
372
|
*/
|
|
373
|
-
type MultiMergeReplaceNullPatches =
|
|
374
|
-
[TKey in OnyxKey]: FastMergeReplaceNullPatch[];
|
|
375
|
-
};
|
|
373
|
+
type MultiMergeReplaceNullPatches = Record<OnyxKey, FastMergeReplaceNullPatch[]>;
|
|
376
374
|
/**
|
|
377
375
|
* Represents a combination of Merge and Set operations that should be executed in Onyx
|
|
378
376
|
*/
|
package/dist/utils.js
CHANGED
|
@@ -44,32 +44,32 @@ function mergeObject(target, source, options, metadata, basePath) {
|
|
|
44
44
|
// If "shouldRemoveNestedNulls" is true, we want to remove null values from the merged object
|
|
45
45
|
// and therefore we need to omit keys where either the source or target value is null.
|
|
46
46
|
if (targetObject) {
|
|
47
|
-
Object.keys(targetObject)
|
|
47
|
+
for (const key of Object.keys(targetObject)) {
|
|
48
48
|
const targetProperty = targetObject === null || targetObject === void 0 ? void 0 : targetObject[key];
|
|
49
49
|
const sourceProperty = source === null || source === void 0 ? void 0 : source[key];
|
|
50
50
|
// If "shouldRemoveNestedNulls" is true, we want to remove (nested) null values from the merged object.
|
|
51
51
|
// If either the source or target value is null, we want to omit the key from the merged object.
|
|
52
52
|
const shouldOmitNullishProperty = options.shouldRemoveNestedNulls && (targetProperty === null || sourceProperty === null);
|
|
53
53
|
if (targetProperty === undefined || shouldOmitNullishProperty) {
|
|
54
|
-
|
|
54
|
+
continue;
|
|
55
55
|
}
|
|
56
56
|
destination[key] = targetProperty;
|
|
57
|
-
}
|
|
57
|
+
}
|
|
58
58
|
}
|
|
59
59
|
// After copying over all keys from the target object, we want to merge the source object into the destination object.
|
|
60
|
-
Object.keys(source)
|
|
60
|
+
for (const key of Object.keys(source)) {
|
|
61
61
|
let targetProperty = targetObject === null || targetObject === void 0 ? void 0 : targetObject[key];
|
|
62
62
|
const sourceProperty = source === null || source === void 0 ? void 0 : source[key];
|
|
63
63
|
// If "shouldRemoveNestedNulls" is true, we want to remove (nested) null values from the merged object.
|
|
64
64
|
// If the source value is null, we want to omit the key from the merged object.
|
|
65
65
|
const shouldOmitNullishProperty = options.shouldRemoveNestedNulls && sourceProperty === null;
|
|
66
66
|
if (sourceProperty === undefined || shouldOmitNullishProperty) {
|
|
67
|
-
|
|
67
|
+
continue;
|
|
68
68
|
}
|
|
69
69
|
// If the source value is not a mergable object, we need to set the key directly.
|
|
70
70
|
if (!isMergeableObject(sourceProperty)) {
|
|
71
71
|
destination[key] = sourceProperty;
|
|
72
|
-
|
|
72
|
+
continue;
|
|
73
73
|
}
|
|
74
74
|
// If "shouldMarkRemovedObjects" is enabled and the previous merge change (targetProperty) is null,
|
|
75
75
|
// it means we want to fully replace this object when merging the batched changes with the Onyx value.
|
|
@@ -88,10 +88,10 @@ function mergeObject(target, source, options, metadata, basePath) {
|
|
|
88
88
|
const sourcePropertyWithoutMark = Object.assign({}, sourceProperty);
|
|
89
89
|
delete sourcePropertyWithoutMark.ONYX_INTERNALS__REPLACE_OBJECT_MARK;
|
|
90
90
|
destination[key] = sourcePropertyWithoutMark;
|
|
91
|
-
|
|
91
|
+
continue;
|
|
92
92
|
}
|
|
93
93
|
destination[key] = fastMerge(targetProperty, sourceProperty, options, metadata, [...basePath, key]).result;
|
|
94
|
-
}
|
|
94
|
+
}
|
|
95
95
|
return destination;
|
|
96
96
|
}
|
|
97
97
|
/** Checks whether the given object is an object and not null/undefined. */
|
|
@@ -119,7 +119,6 @@ function removeNestedNullValues(value) {
|
|
|
119
119
|
for (const key in value) {
|
|
120
120
|
const propertyValue = value[key];
|
|
121
121
|
if (propertyValue === null || propertyValue === undefined) {
|
|
122
|
-
// eslint-disable-next-line no-continue
|
|
123
122
|
continue;
|
|
124
123
|
}
|
|
125
124
|
if (typeof propertyValue === 'object' && !Array.isArray(propertyValue)) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "react-native-onyx",
|
|
3
|
-
"version": "3.0.
|
|
3
|
+
"version": "3.0.29",
|
|
4
4
|
"author": "Expensify, Inc.",
|
|
5
5
|
"homepage": "https://expensify.com",
|
|
6
6
|
"description": "State management for React Native",
|
|
@@ -54,7 +54,7 @@
|
|
|
54
54
|
"devDependencies": {
|
|
55
55
|
"@actions/core": "^1.10.1",
|
|
56
56
|
"@jest/globals": "^29.7.0",
|
|
57
|
-
"@lwc/eslint-plugin-lwc": "^
|
|
57
|
+
"@lwc/eslint-plugin-lwc": "^3.3.0",
|
|
58
58
|
"@ngneat/falso": "^7.3.0",
|
|
59
59
|
"@react-native-community/eslint-config": "^3.2.0",
|
|
60
60
|
"@react-native/babel-preset": "0.76.3",
|
|
@@ -68,16 +68,17 @@
|
|
|
68
68
|
"@types/react-dom": "^18.2.18",
|
|
69
69
|
"@types/react-native": "^0.70.0",
|
|
70
70
|
"@types/underscore": "^1.11.15",
|
|
71
|
-
"@typescript-eslint/eslint-plugin": "^
|
|
72
|
-
"@typescript-eslint/parser": "^
|
|
71
|
+
"@typescript-eslint/eslint-plugin": "^8.51.0",
|
|
72
|
+
"@typescript-eslint/parser": "^8.51.0",
|
|
73
73
|
"@vercel/ncc": "0.38.1",
|
|
74
74
|
"date-fns": "^4.1.0",
|
|
75
|
-
"eslint": "^
|
|
76
|
-
"eslint-config-expensify": "^2.0.
|
|
77
|
-
"eslint-config-prettier": "^
|
|
78
|
-
"eslint-
|
|
79
|
-
"eslint-plugin-
|
|
80
|
-
"eslint-plugin-
|
|
75
|
+
"eslint": "^9.39.2",
|
|
76
|
+
"eslint-config-expensify": "^2.0.102",
|
|
77
|
+
"eslint-config-prettier": "^9.1.0",
|
|
78
|
+
"eslint-import-resolver-typescript": "^4.4.4",
|
|
79
|
+
"eslint-plugin-import": "^2.31.0",
|
|
80
|
+
"eslint-plugin-jsx-a11y": "^6.10.2",
|
|
81
|
+
"eslint-plugin-react": "^7.37.5",
|
|
81
82
|
"fake-indexeddb": "^6.2.5",
|
|
82
83
|
"idb-keyval": "^6.2.1",
|
|
83
84
|
"jest": "^29.7.0",
|