react-native-onyx 1.0.120 → 1.0.122
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/web.development.js +118 -110
- package/dist/web.development.js.map +1 -1
- package/dist/web.min.js.map +1 -1
- package/lib/Logger.js +1 -5
- package/lib/MDTable.js +11 -14
- package/lib/Onyx.d.ts +1 -4
- package/lib/Onyx.js +237 -232
- package/lib/OnyxCache.js +12 -3
- package/lib/Str.js +1 -3
- package/lib/compose.js +6 -2
- package/lib/metrics/PerformanceUtils.js +2 -7
- package/lib/metrics/index.native.js +28 -41
- package/lib/metrics/index.web.js +4 -7
- package/lib/storage/WebStorage.js +5 -10
- package/lib/storage/__mocks__/index.js +2 -2
- package/lib/storage/providers/IDBKeyVal.js +27 -37
- package/lib/storage/providers/SQLiteStorage.js +58 -62
- package/lib/types.d.ts +1 -13
- package/lib/utils.d.ts +2 -6
- package/lib/utils.js +19 -22
- package/lib/withOnyx.d.ts +8 -32
- package/lib/withOnyx.js +37 -34
- package/package.json +5 -3
package/lib/OnyxCache.js
CHANGED
|
@@ -43,8 +43,17 @@ class OnyxCache {
|
|
|
43
43
|
// bind all public methods to prevent problems with `this`
|
|
44
44
|
_.bindAll(
|
|
45
45
|
this,
|
|
46
|
-
'getAllKeys',
|
|
47
|
-
'
|
|
46
|
+
'getAllKeys',
|
|
47
|
+
'getValue',
|
|
48
|
+
'hasCacheForKey',
|
|
49
|
+
'addKey',
|
|
50
|
+
'set',
|
|
51
|
+
'drop',
|
|
52
|
+
'merge',
|
|
53
|
+
'hasPendingTask',
|
|
54
|
+
'getTaskPromise',
|
|
55
|
+
'captureTask',
|
|
56
|
+
'removeLeastRecentlyUsedKeys',
|
|
48
57
|
'setRecentKeysLimit',
|
|
49
58
|
);
|
|
50
59
|
}
|
|
@@ -126,7 +135,7 @@ class OnyxCache {
|
|
|
126
135
|
const storageKeys = this.getAllKeys();
|
|
127
136
|
const mergedKeys = _.keys(data);
|
|
128
137
|
this.storageKeys = new Set([...storageKeys, ...mergedKeys]);
|
|
129
|
-
_.each(mergedKeys, key => this.addToAccessedKeys(key));
|
|
138
|
+
_.each(mergedKeys, (key) => this.addToAccessedKeys(key));
|
|
130
139
|
}
|
|
131
140
|
|
|
132
141
|
/**
|
package/lib/Str.js
CHANGED
|
@@ -8,9 +8,7 @@ import _ from 'underscore';
|
|
|
8
8
|
* @return {Boolean} Returns true if the haystack starts with the needle.
|
|
9
9
|
*/
|
|
10
10
|
function startsWith(haystack, needle) {
|
|
11
|
-
return _.isString(haystack)
|
|
12
|
-
&& _.isString(needle)
|
|
13
|
-
&& haystack.startsWith(needle);
|
|
11
|
+
return _.isString(haystack) && _.isString(needle) && haystack.startsWith(needle);
|
|
14
12
|
}
|
|
15
13
|
|
|
16
14
|
/**
|
package/lib/compose.js
CHANGED
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
*/
|
|
19
19
|
export default function compose(...funcs) {
|
|
20
20
|
if (funcs.length === 0) {
|
|
21
|
-
return arg => arg;
|
|
21
|
+
return (arg) => arg;
|
|
22
22
|
}
|
|
23
23
|
|
|
24
24
|
if (funcs.length === 1) {
|
|
@@ -26,5 +26,9 @@ export default function compose(...funcs) {
|
|
|
26
26
|
}
|
|
27
27
|
|
|
28
28
|
// eslint-disable-next-line rulesdir/prefer-underscore-method
|
|
29
|
-
return funcs.reduce(
|
|
29
|
+
return funcs.reduce(
|
|
30
|
+
(a, b) =>
|
|
31
|
+
(...args) =>
|
|
32
|
+
a(b(...args)),
|
|
33
|
+
);
|
|
30
34
|
}
|
|
@@ -26,9 +26,7 @@ function diffObject(object, base) {
|
|
|
26
26
|
}
|
|
27
27
|
|
|
28
28
|
// eslint-disable-next-line no-param-reassign
|
|
29
|
-
result[key] =
|
|
30
|
-
? changes(value, comparisonObject[key])
|
|
31
|
-
: value;
|
|
29
|
+
result[key] = _.isObject(value) && _.isObject(comparisonObject[key]) ? changes(value, comparisonObject[key]) : value;
|
|
32
30
|
});
|
|
33
31
|
}
|
|
34
32
|
return changes(object, base);
|
|
@@ -62,7 +60,4 @@ function logSetStateCall(mapping, previousValue, newValue, caller, keyThatChange
|
|
|
62
60
|
console.debug(`[Onyx-Debug] ${mapping.displayName} setState() called. Subscribed to key '${mapping.key}' (${caller})`, logParams);
|
|
63
61
|
}
|
|
64
62
|
|
|
65
|
-
export {
|
|
66
|
-
logSetStateCall,
|
|
67
|
-
setShouldDebugSetState,
|
|
68
|
-
};
|
|
63
|
+
export {logSetStateCall, setShouldDebugSetState};
|
|
@@ -47,9 +47,9 @@ function decorateWithMetrics(func, alias = func.name) {
|
|
|
47
47
|
const originalPromise = func.apply(this, args);
|
|
48
48
|
|
|
49
49
|
/*
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
50
|
+
* Then handlers added here are not affecting the original promise
|
|
51
|
+
* They create a separate chain that's not exposed (returned) to the original caller
|
|
52
|
+
* */
|
|
53
53
|
originalPromise
|
|
54
54
|
.then((result) => {
|
|
55
55
|
measureMarkToNow(mark, {result});
|
|
@@ -84,26 +84,29 @@ function sum(list, prop) {
|
|
|
84
84
|
*/
|
|
85
85
|
function getMetrics() {
|
|
86
86
|
const summaries = _.chain(performance.getEntriesByType('measure'))
|
|
87
|
-
.filter(entry => entry.detail && decoratedAliases.has(entry.detail.alias))
|
|
88
|
-
.groupBy(entry => entry.detail.alias)
|
|
87
|
+
.filter((entry) => entry.detail && decoratedAliases.has(entry.detail.alias))
|
|
88
|
+
.groupBy((entry) => entry.detail.alias)
|
|
89
89
|
.map((calls, methodName) => {
|
|
90
90
|
const total = sum(calls, 'duration');
|
|
91
|
-
const avg =
|
|
91
|
+
const avg = total / calls.length || 0;
|
|
92
92
|
const max = _.max(calls, 'duration').duration || 0;
|
|
93
93
|
const min = _.min(calls, 'duration').duration || 0;
|
|
94
94
|
|
|
95
95
|
// Latest complete call (by end time) for all the calls made to the current method
|
|
96
|
-
const lastCall = _.max(calls, call => call.startTime + call.duration);
|
|
96
|
+
const lastCall = _.max(calls, (call) => call.startTime + call.duration);
|
|
97
97
|
|
|
98
|
-
return [
|
|
98
|
+
return [
|
|
99
99
|
methodName,
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
100
|
+
{
|
|
101
|
+
methodName,
|
|
102
|
+
total,
|
|
103
|
+
max,
|
|
104
|
+
min,
|
|
105
|
+
avg,
|
|
106
|
+
lastCall,
|
|
107
|
+
calls,
|
|
108
|
+
},
|
|
109
|
+
];
|
|
107
110
|
})
|
|
108
111
|
.object() // Create a map like methodName -> StatSummary
|
|
109
112
|
.value();
|
|
@@ -111,10 +114,7 @@ function getMetrics() {
|
|
|
111
114
|
const totalTime = sum(_.values(summaries), 'total');
|
|
112
115
|
|
|
113
116
|
// Latest complete call (by end time) of all methods up to this point
|
|
114
|
-
const lastCompleteCall = _.max(
|
|
115
|
-
_.values(summaries),
|
|
116
|
-
summary => summary.lastCall.startTime + summary.lastCall.duration,
|
|
117
|
-
).lastCall;
|
|
117
|
+
const lastCompleteCall = _.max(_.values(summaries), (summary) => summary.lastCall.startTime + summary.lastCall.duration).lastCall;
|
|
118
118
|
|
|
119
119
|
return {
|
|
120
120
|
totalTime,
|
|
@@ -177,7 +177,7 @@ function printMetrics({raw = false, format = 'console', methods} = {}) {
|
|
|
177
177
|
const methodNames = _.isArray(methods) ? methods : _.keys(summaries);
|
|
178
178
|
|
|
179
179
|
const methodCallTables = _.chain(methodNames)
|
|
180
|
-
.filter(methodName => summaries[methodName] && summaries[methodName].avg > 0)
|
|
180
|
+
.filter((methodName) => summaries[methodName] && summaries[methodName].avg > 0)
|
|
181
181
|
.map((methodName) => {
|
|
182
182
|
const {calls, ...methodStats} = summaries[methodName];
|
|
183
183
|
tableSummary.addRow(
|
|
@@ -186,7 +186,7 @@ function printMetrics({raw = false, format = 'console', methods} = {}) {
|
|
|
186
186
|
toDuration(methodStats.max, raw),
|
|
187
187
|
toDuration(methodStats.min, raw),
|
|
188
188
|
toDuration(methodStats.avg, raw),
|
|
189
|
-
toDuration(
|
|
189
|
+
toDuration(methodStats.lastCall.startTime + methodStats.lastCall.duration - timeOrigin, raw),
|
|
190
190
|
calls.length,
|
|
191
191
|
);
|
|
192
192
|
|
|
@@ -194,12 +194,12 @@ function printMetrics({raw = false, format = 'console', methods} = {}) {
|
|
|
194
194
|
title: methodName,
|
|
195
195
|
heading: ['start time', 'end time', 'duration', 'args'],
|
|
196
196
|
leftAlignedCols: [3],
|
|
197
|
-
rows: _.map(calls, call =>
|
|
197
|
+
rows: _.map(calls, (call) => [
|
|
198
198
|
toDuration(call.startTime - performance.timeOrigin, raw),
|
|
199
|
-
toDuration(
|
|
199
|
+
toDuration(call.startTime + call.duration - timeOrigin, raw),
|
|
200
200
|
toDuration(call.duration, raw),
|
|
201
201
|
_.map(call.detail.args, String).join(', ').slice(0, 60), // Restrict cell width to 60 chars max
|
|
202
|
-
])
|
|
202
|
+
]),
|
|
203
203
|
});
|
|
204
204
|
})
|
|
205
205
|
.value();
|
|
@@ -219,17 +219,9 @@ function printMetrics({raw = false, format = 'console', methods} = {}) {
|
|
|
219
219
|
}).join('\n\n');
|
|
220
220
|
}
|
|
221
221
|
|
|
222
|
-
const lastComplete = lastCompleteCall && toDuration(
|
|
223
|
-
(lastCompleteCall.startTime + lastCompleteCall.duration) - timeOrigin, raw,
|
|
224
|
-
);
|
|
222
|
+
const lastComplete = lastCompleteCall && toDuration(lastCompleteCall.startTime + lastCompleteCall.duration - timeOrigin, raw);
|
|
225
223
|
|
|
226
|
-
const mainOutput = [
|
|
227
|
-
'### Onyx Benchmark',
|
|
228
|
-
` - Total: ${toDuration(totalTime, raw)}`,
|
|
229
|
-
` - Last call finished at: ${lastComplete || 'N/A'}`,
|
|
230
|
-
'',
|
|
231
|
-
tableSummary.toString(),
|
|
232
|
-
];
|
|
224
|
+
const mainOutput = ['### Onyx Benchmark', ` - Total: ${toDuration(totalTime, raw)}`, ` - Last call finished at: ${lastComplete || 'N/A'}`, '', tableSummary.toString()];
|
|
233
225
|
|
|
234
226
|
/* eslint-disable no-console */
|
|
235
227
|
console.info(mainOutput.join('\n'));
|
|
@@ -248,7 +240,7 @@ function resetMetrics() {
|
|
|
248
240
|
const {summaries} = getMetrics();
|
|
249
241
|
|
|
250
242
|
_.chain(summaries)
|
|
251
|
-
.map(summary => summary.calls)
|
|
243
|
+
.map((summary) => summary.calls)
|
|
252
244
|
.flatten()
|
|
253
245
|
.each((measure) => {
|
|
254
246
|
performance.clearMarks(measure.detail.alias);
|
|
@@ -256,9 +248,4 @@ function resetMetrics() {
|
|
|
256
248
|
});
|
|
257
249
|
}
|
|
258
250
|
|
|
259
|
-
export {
|
|
260
|
-
decorateWithMetrics,
|
|
261
|
-
getMetrics,
|
|
262
|
-
resetMetrics,
|
|
263
|
-
printMetrics,
|
|
264
|
-
};
|
|
251
|
+
export {decorateWithMetrics, getMetrics, resetMetrics, printMetrics};
|
package/lib/metrics/index.web.js
CHANGED
|
@@ -1,13 +1,10 @@
|
|
|
1
1
|
// For web-only implementations of Onyx, this module will just be a no-op
|
|
2
2
|
|
|
3
|
-
function decorateWithMetrics(func) {
|
|
3
|
+
function decorateWithMetrics(func) {
|
|
4
|
+
return func;
|
|
5
|
+
}
|
|
4
6
|
function getMetrics() {}
|
|
5
7
|
function printMetrics() {}
|
|
6
8
|
function resetMetrics() {}
|
|
7
9
|
|
|
8
|
-
export {
|
|
9
|
-
decorateWithMetrics,
|
|
10
|
-
getMetrics,
|
|
11
|
-
resetMetrics,
|
|
12
|
-
printMetrics,
|
|
13
|
-
};
|
|
10
|
+
export {decorateWithMetrics, getMetrics, resetMetrics, printMetrics};
|
|
@@ -31,17 +31,13 @@ const webStorage = {
|
|
|
31
31
|
*/
|
|
32
32
|
keepInstancesSync(onStorageKeyChanged) {
|
|
33
33
|
// Override set, remove and clear to raise storage events that we intercept in other tabs
|
|
34
|
-
this.setItem = (key, value) => Storage.setItem(key, value)
|
|
35
|
-
.then(() => raiseStorageSyncEvent(key));
|
|
34
|
+
this.setItem = (key, value) => Storage.setItem(key, value).then(() => raiseStorageSyncEvent(key));
|
|
36
35
|
|
|
37
|
-
this.removeItem = key => Storage.removeItem(key)
|
|
38
|
-
.then(() => raiseStorageSyncEvent(key));
|
|
36
|
+
this.removeItem = (key) => Storage.removeItem(key).then(() => raiseStorageSyncEvent(key));
|
|
39
37
|
|
|
40
|
-
this.removeItems = keys => Storage.removeItems(keys)
|
|
41
|
-
.then(() => raiseStorageSyncManyKeysEvent(keys));
|
|
38
|
+
this.removeItems = (keys) => Storage.removeItems(keys).then(() => raiseStorageSyncManyKeysEvent(keys));
|
|
42
39
|
|
|
43
|
-
this.mergeItem = (key, batchedChanges, modifiedData) => Storage.mergeItem(key, batchedChanges, modifiedData)
|
|
44
|
-
.then(() => raiseStorageSyncEvent(key));
|
|
40
|
+
this.mergeItem = (key, batchedChanges, modifiedData) => Storage.mergeItem(key, batchedChanges, modifiedData).then(() => raiseStorageSyncEvent(key));
|
|
45
41
|
|
|
46
42
|
// If we just call Storage.clear other tabs will have no idea which keys were available previously
|
|
47
43
|
// so that they can call keysChanged for them. That's why we iterate over every key and raise a storage sync
|
|
@@ -70,8 +66,7 @@ const webStorage = {
|
|
|
70
66
|
}
|
|
71
67
|
|
|
72
68
|
const onyxKey = event.newValue;
|
|
73
|
-
Storage.getItem(onyxKey)
|
|
74
|
-
.then(value => onStorageKeyChanged(onyxKey, value));
|
|
69
|
+
Storage.getItem(onyxKey).then((value) => onStorageKeyChanged(onyxKey, value));
|
|
75
70
|
});
|
|
76
71
|
},
|
|
77
72
|
};
|
|
@@ -14,13 +14,13 @@ const idbKeyvalMock = {
|
|
|
14
14
|
},
|
|
15
15
|
multiSet(pairs) {
|
|
16
16
|
const setPromises = _.map(pairs, ([key, value]) => this.setItem(key, value));
|
|
17
|
-
return new Promise(resolve => Promise.all(setPromises).then(() => resolve(storageMapInternal)));
|
|
17
|
+
return new Promise((resolve) => Promise.all(setPromises).then(() => resolve(storageMapInternal)));
|
|
18
18
|
},
|
|
19
19
|
getItem(key) {
|
|
20
20
|
return Promise.resolve(storageMapInternal[key]);
|
|
21
21
|
},
|
|
22
22
|
multiGet(keys) {
|
|
23
|
-
const getPromises = _.map(keys, key => new Promise(resolve => this.getItem(key).then(value => resolve([key, value]))));
|
|
23
|
+
const getPromises = _.map(keys, (key) => new Promise((resolve) => this.getItem(key).then((value) => resolve([key, value]))));
|
|
24
24
|
return Promise.all(getPromises);
|
|
25
25
|
},
|
|
26
26
|
multiMerge(pairs) {
|
|
@@ -1,15 +1,4 @@
|
|
|
1
|
-
import {
|
|
2
|
-
set,
|
|
3
|
-
keys,
|
|
4
|
-
getMany,
|
|
5
|
-
setMany,
|
|
6
|
-
get,
|
|
7
|
-
clear,
|
|
8
|
-
del,
|
|
9
|
-
delMany,
|
|
10
|
-
createStore,
|
|
11
|
-
promisifyRequest,
|
|
12
|
-
} from 'idb-keyval';
|
|
1
|
+
import {set, keys, getMany, setMany, get, clear, del, delMany, createStore, promisifyRequest} from 'idb-keyval';
|
|
13
2
|
import _ from 'underscore';
|
|
14
3
|
import utils from '../../utils';
|
|
15
4
|
|
|
@@ -38,8 +27,7 @@ const provider = {
|
|
|
38
27
|
* @param {String[]} keysParam
|
|
39
28
|
* @return {Promise<Array<[key, value]>>}
|
|
40
29
|
*/
|
|
41
|
-
multiGet: keysParam => getMany(keysParam, getCustomStore())
|
|
42
|
-
.then(values => _.map(values, (value, index) => [keysParam[index], value])),
|
|
30
|
+
multiGet: (keysParam) => getMany(keysParam, getCustomStore()).then((values) => _.map(values, (value, index) => [keysParam[index], value])),
|
|
43
31
|
|
|
44
32
|
/**
|
|
45
33
|
* Multiple merging of existing and new values in a batch
|
|
@@ -47,21 +35,22 @@ const provider = {
|
|
|
47
35
|
* This function also removes all nested null values from an object.
|
|
48
36
|
* @return {Promise<void>}
|
|
49
37
|
*/
|
|
50
|
-
multiMerge: pairs
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
const
|
|
59
|
-
|
|
60
|
-
|
|
38
|
+
multiMerge: (pairs) =>
|
|
39
|
+
getCustomStore()('readwrite', (store) => {
|
|
40
|
+
// Note: we are using the manual store transaction here, to fit the read and update
|
|
41
|
+
// of the items in one transaction to achieve best performance.
|
|
42
|
+
|
|
43
|
+
const getValues = Promise.all(_.map(pairs, ([key]) => promisifyRequest(store.get(key))));
|
|
44
|
+
|
|
45
|
+
return getValues.then((values) => {
|
|
46
|
+
const upsertMany = _.map(pairs, ([key, value], index) => {
|
|
47
|
+
const prev = values[index];
|
|
48
|
+
const newValue = utils.fastMerge(prev, value);
|
|
49
|
+
return promisifyRequest(store.put(newValue, key));
|
|
50
|
+
});
|
|
51
|
+
return Promise.all(upsertMany);
|
|
61
52
|
});
|
|
62
|
-
|
|
63
|
-
});
|
|
64
|
-
}),
|
|
53
|
+
}),
|
|
65
54
|
|
|
66
55
|
/**
|
|
67
56
|
* Merging an existing value with a new one
|
|
@@ -80,7 +69,7 @@ const provider = {
|
|
|
80
69
|
* @param {Array<[key, value]>} pairs
|
|
81
70
|
* @return {Promise<void>}
|
|
82
71
|
*/
|
|
83
|
-
multiSet: pairs => setMany(pairs, getCustomStore()),
|
|
72
|
+
multiSet: (pairs) => setMany(pairs, getCustomStore()),
|
|
84
73
|
|
|
85
74
|
/**
|
|
86
75
|
* Clear everything from storage and also stops the SyncQueue from adding anything more to storage
|
|
@@ -102,17 +91,17 @@ const provider = {
|
|
|
102
91
|
* @param {String} key
|
|
103
92
|
* @return {Promise<*>}
|
|
104
93
|
*/
|
|
105
|
-
getItem: key =>
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
94
|
+
getItem: (key) =>
|
|
95
|
+
get(key, getCustomStore())
|
|
96
|
+
// idb-keyval returns undefined for missing items, but this needs to return null so that idb-keyval does the same thing as SQLiteStorage.
|
|
97
|
+
.then((val) => (val === undefined ? null : val)),
|
|
109
98
|
|
|
110
99
|
/**
|
|
111
100
|
* Remove given key and it's value from storage
|
|
112
101
|
* @param {String} key
|
|
113
102
|
* @returns {Promise<void>}
|
|
114
103
|
*/
|
|
115
|
-
removeItem: key => del(key, getCustomStore()),
|
|
104
|
+
removeItem: (key) => del(key, getCustomStore()),
|
|
116
105
|
|
|
117
106
|
/**
|
|
118
107
|
* Remove given keys and their values from storage
|
|
@@ -120,7 +109,7 @@ const provider = {
|
|
|
120
109
|
* @param {Array} keysParam
|
|
121
110
|
* @returns {Promise}
|
|
122
111
|
*/
|
|
123
|
-
removeItems: keysParam => delMany(keysParam, getCustomStore()),
|
|
112
|
+
removeItems: (keysParam) => delMany(keysParam, getCustomStore()),
|
|
124
113
|
|
|
125
114
|
/**
|
|
126
115
|
* Gets the total bytes of the database file
|
|
@@ -131,8 +120,9 @@ const provider = {
|
|
|
131
120
|
throw new Error('StorageManager browser API unavailable');
|
|
132
121
|
}
|
|
133
122
|
|
|
134
|
-
return window.navigator.storage
|
|
135
|
-
.
|
|
123
|
+
return window.navigator.storage
|
|
124
|
+
.estimate()
|
|
125
|
+
.then((value) => ({
|
|
136
126
|
bytesUsed: value.usage,
|
|
137
127
|
bytesRemaining: value.quota - value.usage,
|
|
138
128
|
}))
|
|
@@ -19,10 +19,10 @@ db.execute('PRAGMA journal_mode=WAL;');
|
|
|
19
19
|
|
|
20
20
|
const provider = {
|
|
21
21
|
/**
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
22
|
+
* Get the value of a given key or return `null` if it's not available in storage
|
|
23
|
+
* @param {String} key
|
|
24
|
+
* @return {Promise<*>}
|
|
25
|
+
*/
|
|
26
26
|
getItem(key) {
|
|
27
27
|
return db.executeAsync('SELECT record_key, valueJSON FROM keyvaluepairs WHERE record_key = ?;', [key]).then(({rows}) => {
|
|
28
28
|
if (rows.length === 0) {
|
|
@@ -34,41 +34,37 @@ const provider = {
|
|
|
34
34
|
},
|
|
35
35
|
|
|
36
36
|
/**
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
37
|
+
* Get multiple key-value pairs for the given array of keys in a batch
|
|
38
|
+
* @param {String[]} keys
|
|
39
|
+
* @return {Promise<Array<[key, value]>>}
|
|
40
|
+
*/
|
|
41
41
|
multiGet(keys) {
|
|
42
42
|
const placeholders = _.map(keys, () => '?').join(',');
|
|
43
43
|
const command = `SELECT record_key, valueJSON FROM keyvaluepairs WHERE record_key IN (${placeholders});`;
|
|
44
|
-
return db.executeAsync(command, keys)
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
});
|
|
44
|
+
return db.executeAsync(command, keys).then(({rows}) => {
|
|
45
|
+
// eslint-disable-next-line no-underscore-dangle
|
|
46
|
+
const result = _.map(rows._array, (row) => [row.record_key, JSON.parse(row.valueJSON)]);
|
|
47
|
+
return result;
|
|
48
|
+
});
|
|
50
49
|
},
|
|
51
50
|
|
|
52
51
|
/**
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
52
|
+
* Sets the value for a given key. The only requirement is that the value should be serializable to JSON string
|
|
53
|
+
* @param {String} key
|
|
54
|
+
* @param {*} value
|
|
55
|
+
* @return {Promise<void>}
|
|
56
|
+
*/
|
|
58
57
|
setItem(key, value) {
|
|
59
58
|
return db.executeAsync('REPLACE INTO keyvaluepairs (record_key, valueJSON) VALUES (?, ?);', [key, JSON.stringify(value)]);
|
|
60
59
|
},
|
|
61
60
|
|
|
62
61
|
/**
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
62
|
+
* Stores multiple key-value pairs in a batch
|
|
63
|
+
* @param {Array<[key, value]>} pairs
|
|
64
|
+
* @return {Promise<void>}
|
|
65
|
+
*/
|
|
67
66
|
multiSet(pairs) {
|
|
68
|
-
const stringifiedPairs = _.map(pairs, pair => [
|
|
69
|
-
pair[0],
|
|
70
|
-
JSON.stringify(_.isUndefined(pair[1]) ? null : pair[1]),
|
|
71
|
-
]);
|
|
67
|
+
const stringifiedPairs = _.map(pairs, (pair) => [pair[0], JSON.stringify(_.isUndefined(pair[1]) ? null : pair[1])]);
|
|
72
68
|
if (_.isEmpty(stringifiedPairs)) {
|
|
73
69
|
return Promise.resolve();
|
|
74
70
|
}
|
|
@@ -76,10 +72,10 @@ const provider = {
|
|
|
76
72
|
},
|
|
77
73
|
|
|
78
74
|
/**
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
75
|
+
* Multiple merging of existing and new values in a batch
|
|
76
|
+
* @param {Array<[key, value]>} pairs
|
|
77
|
+
* @return {Promise<void>}
|
|
78
|
+
*/
|
|
83
79
|
multiMerge(pairs) {
|
|
84
80
|
// Note: We use `ON CONFLICT DO UPDATE` here instead of `INSERT OR REPLACE INTO`
|
|
85
81
|
// so the new JSON value is merged into the old one if there's an existing value
|
|
@@ -89,7 +85,7 @@ const provider = {
|
|
|
89
85
|
SET valueJSON = JSON_PATCH(valueJSON, JSON(:value));
|
|
90
86
|
`;
|
|
91
87
|
|
|
92
|
-
const nonNullishPairs = _.filter(pairs, pair => !_.isUndefined(pair[1]));
|
|
88
|
+
const nonNullishPairs = _.filter(pairs, (pair) => !_.isUndefined(pair[1]));
|
|
93
89
|
const queryArguments = _.map(nonNullishPairs, (pair) => {
|
|
94
90
|
const value = JSON.stringify(pair[1]);
|
|
95
91
|
return [pair[0], value];
|
|
@@ -99,31 +95,32 @@ const provider = {
|
|
|
99
95
|
},
|
|
100
96
|
|
|
101
97
|
/**
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
98
|
+
* Merges an existing value with a new one by leveraging JSON_PATCH
|
|
99
|
+
* @param {String} key
|
|
100
|
+
* @param {*} changes - the delta for a specific key
|
|
101
|
+
* @return {Promise<void>}
|
|
102
|
+
*/
|
|
107
103
|
mergeItem(key, changes) {
|
|
108
104
|
return this.multiMerge([[key, changes]]);
|
|
109
105
|
},
|
|
110
106
|
|
|
111
107
|
/**
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
getAllKeys: () =>
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
108
|
+
* Returns all keys available in storage
|
|
109
|
+
* @returns {Promise<String[]>}
|
|
110
|
+
*/
|
|
111
|
+
getAllKeys: () =>
|
|
112
|
+
db.executeAsync('SELECT record_key FROM keyvaluepairs;').then(({rows}) => {
|
|
113
|
+
// eslint-disable-next-line no-underscore-dangle
|
|
114
|
+
const result = _.map(rows._array, (row) => row.record_key);
|
|
115
|
+
return result;
|
|
116
|
+
}),
|
|
120
117
|
|
|
121
118
|
/**
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
removeItem: key => db.executeAsync('DELETE FROM keyvaluepairs WHERE record_key = ?;', [key]),
|
|
119
|
+
* Removes given key and it's value from storage
|
|
120
|
+
* @param {String} key
|
|
121
|
+
* @returns {Promise<void>}
|
|
122
|
+
*/
|
|
123
|
+
removeItem: (key) => db.executeAsync('DELETE FROM keyvaluepairs WHERE record_key = ?;', [key]),
|
|
127
124
|
|
|
128
125
|
/**
|
|
129
126
|
* Removes given keys and their values from storage
|
|
@@ -138,9 +135,9 @@ const provider = {
|
|
|
138
135
|
},
|
|
139
136
|
|
|
140
137
|
/**
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
138
|
+
* Clears absolutely everything from storage
|
|
139
|
+
* @returns {Promise<void>}
|
|
140
|
+
*/
|
|
144
141
|
clear: () => db.executeAsync('DELETE FROM keyvaluepairs;', []),
|
|
145
142
|
|
|
146
143
|
/**
|
|
@@ -153,15 +150,14 @@ const provider = {
|
|
|
153
150
|
* @returns {Promise}
|
|
154
151
|
*/
|
|
155
152
|
getDatabaseSize() {
|
|
156
|
-
return Promise.all([db.executeAsync('PRAGMA page_size;'), db.executeAsync('PRAGMA page_count;'), getFreeDiskStorage()])
|
|
157
|
-
.
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
});
|
|
153
|
+
return Promise.all([db.executeAsync('PRAGMA page_size;'), db.executeAsync('PRAGMA page_count;'), getFreeDiskStorage()]).then(([pageSizeResult, pageCountResult, bytesRemaining]) => {
|
|
154
|
+
const pageSize = pageSizeResult.rows.item(0).page_size;
|
|
155
|
+
const pageCount = pageCountResult.rows.item(0).page_count;
|
|
156
|
+
return {
|
|
157
|
+
bytesUsed: pageSize * pageCount,
|
|
158
|
+
bytesRemaining,
|
|
159
|
+
};
|
|
160
|
+
});
|
|
165
161
|
},
|
|
166
162
|
|
|
167
163
|
/**
|
package/lib/types.d.ts
CHANGED
|
@@ -221,16 +221,4 @@ type NullishObjectDeep<ObjectType extends object> = {
|
|
|
221
221
|
[KeyType in keyof ObjectType]?: NullishDeep<ObjectType[KeyType]> | null;
|
|
222
222
|
};
|
|
223
223
|
|
|
224
|
-
export {
|
|
225
|
-
CollectionKey,
|
|
226
|
-
CollectionKeyBase,
|
|
227
|
-
CustomTypeOptions,
|
|
228
|
-
DeepRecord,
|
|
229
|
-
Key,
|
|
230
|
-
KeyValueMapping,
|
|
231
|
-
OnyxCollection,
|
|
232
|
-
OnyxEntry,
|
|
233
|
-
OnyxKey,
|
|
234
|
-
Selector,
|
|
235
|
-
NullishDeep,
|
|
236
|
-
};
|
|
224
|
+
export {CollectionKey, CollectionKeyBase, CustomTypeOptions, DeepRecord, Key, KeyValueMapping, OnyxCollection, OnyxEntry, OnyxKey, Selector, NullishDeep};
|
package/lib/utils.d.ts
CHANGED
|
@@ -5,10 +5,6 @@
|
|
|
5
5
|
* On native, when merging an existing value with new changes, SQLite will use JSON_PATCH, which removes top-level nullish values.
|
|
6
6
|
* To be consistent with the behaviour for merge, we'll also want to remove null values for "set" operations.
|
|
7
7
|
*/
|
|
8
|
-
declare function fastMerge<T>(
|
|
9
|
-
target: T,
|
|
10
|
-
source: T,
|
|
11
|
-
shouldRemoveNullObjectValues: boolean = true
|
|
12
|
-
): T;
|
|
8
|
+
declare function fastMerge<T>(target: T, source: T, shouldRemoveNullObjectValues: boolean = true): T;
|
|
13
9
|
|
|
14
|
-
export default {
|
|
10
|
+
export default {fastMerge};
|