@dereekb/firebase 12.6.21 → 13.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/index.cjs.js +53 -2239
- package/index.esm.js +34 -2216
- package/package.json +21 -15
- package/src/lib/client/storage/driver.accessor.d.ts +1 -1
- package/src/lib/common/firestore/query/iterator.d.ts +0 -4
- package/src/lib/common/firestore/snapshot/snapshot.field.d.ts +0 -6
- package/src/lib/common/model/model.service.d.ts +1 -1
- package/src/lib/common/storage/driver/accessor.d.ts +1 -1
- package/src/lib/common/storage/types.d.ts +3 -3
- package/src/lib/model/notification/notification.api.d.ts +1 -1
- package/src/lib/model/notification/notification.config.d.ts +5 -5
- package/src/lib/model/notification/notification.d.ts +15 -15
- package/src/lib/model/notification/notification.details.d.ts +0 -4
- package/src/lib/model/notification/notification.item.d.ts +1 -1
- package/src/lib/model/storagefile/storagefile.api.d.ts +4 -4
- package/src/lib/model/storagefile/storagefile.d.ts +7 -7
- package/src/lib/model/storagefile/storagefile.task.d.ts +1 -13
- package/src/lib/model/system/system.d.ts +2 -2
- package/test/index.cjs.js +4043 -0
- package/test/index.esm.js +3957 -0
- package/test/package.json +27 -9
- package/test/src/lib/client/firebase.authorized.d.ts +2 -2
- package/test/src/lib/client/firebase.d.ts +4 -3
- package/test/src/lib/client/firestore.mock.item.fixture.authorized.d.ts +2 -2
- package/test/src/lib/common/firebase.instance.d.ts +7 -3
- package/test/src/lib/common/firestore/firestore.instance.d.ts +7 -3
- package/test/src/lib/common/mock/mock.item.collection.fixture.d.ts +5 -2
- package/test/src/lib/common/mock/mock.item.storage.fixture.d.ts +4 -4
- package/test/src/lib/common/storage/storage.instance.d.ts +7 -3
- package/test/CHANGELOG.md +0 -2114
- package/test/README.md +0 -11
- package/test/src/index.js +0 -5
- package/test/src/index.js.map +0 -1
- package/test/src/lib/client/firebase.authorized.js +0 -35
- package/test/src/lib/client/firebase.authorized.js.map +0 -1
- package/test/src/lib/client/firebase.js +0 -125
- package/test/src/lib/client/firebase.js.map +0 -1
- package/test/src/lib/client/firestore.mock.item.fixture.authorized.js +0 -19
- package/test/src/lib/client/firestore.mock.item.fixture.authorized.js.map +0 -1
- package/test/src/lib/client/index.js +0 -7
- package/test/src/lib/client/index.js.map +0 -1
- package/test/src/lib/common/firebase.instance.js +0 -35
- package/test/src/lib/common/firebase.instance.js.map +0 -1
- package/test/src/lib/common/firestore/firestore.instance.js +0 -24
- package/test/src/lib/common/firestore/firestore.instance.js.map +0 -1
- package/test/src/lib/common/firestore/firestore.js +0 -67
- package/test/src/lib/common/firestore/firestore.js.map +0 -1
- package/test/src/lib/common/firestore/index.js +0 -9
- package/test/src/lib/common/firestore/index.js.map +0 -1
- package/test/src/lib/common/firestore/test.driver.accessor.js +0 -767
- package/test/src/lib/common/firestore/test.driver.accessor.js.map +0 -1
- package/test/src/lib/common/firestore/test.driver.query.js +0 -1361
- package/test/src/lib/common/firestore/test.driver.query.js.map +0 -1
- package/test/src/lib/common/firestore/test.iterator.js +0 -221
- package/test/src/lib/common/firestore/test.iterator.js.map +0 -1
- package/test/src/lib/common/index.js +0 -8
- package/test/src/lib/common/index.js.map +0 -1
- package/test/src/lib/common/mock/index.js +0 -10
- package/test/src/lib/common/mock/index.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.collection.fixture.js +0 -64
- package/test/src/lib/common/mock/mock.item.collection.fixture.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.id.js +0 -3
- package/test/src/lib/common/mock/mock.item.id.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.js +0 -339
- package/test/src/lib/common/mock/mock.item.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.query.js +0 -33
- package/test/src/lib/common/mock/mock.item.query.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.service.js +0 -77
- package/test/src/lib/common/mock/mock.item.service.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.storage.fixture.js +0 -40
- package/test/src/lib/common/mock/mock.item.storage.fixture.js.map +0 -1
- package/test/src/lib/common/storage/index.js +0 -7
- package/test/src/lib/common/storage/index.js.map +0 -1
- package/test/src/lib/common/storage/storage.instance.js +0 -24
- package/test/src/lib/common/storage/storage.instance.js.map +0 -1
- package/test/src/lib/common/storage/storage.js +0 -37
- package/test/src/lib/common/storage/storage.js.map +0 -1
- package/test/src/lib/common/storage/test.driver.accessor.js +0 -669
- package/test/src/lib/common/storage/test.driver.accessor.js.map +0 -1
- package/test/src/lib/index.js +0 -6
- package/test/src/lib/index.js.map +0 -1
- /package/{index.cjs.d.ts → index.d.ts} +0 -0
- /package/{index.esm.d.ts → test/index.d.ts} +0 -0
|
@@ -0,0 +1,4043 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
require('core-js/modules/es.iterator.constructor.js');
|
|
4
|
+
require('core-js/modules/es.iterator.for-each.js');
|
|
5
|
+
require('core-js/modules/es.iterator.map.js');
|
|
6
|
+
require('core-js/modules/es.map.get-or-insert.js');
|
|
7
|
+
require('core-js/modules/es.map.get-or-insert-computed.js');
|
|
8
|
+
var util = require('@dereekb/util');
|
|
9
|
+
var test = require('@dereekb/util/test');
|
|
10
|
+
var rulesUnitTesting = require('@firebase/rules-unit-testing');
|
|
11
|
+
var firebase = require('@dereekb/firebase');
|
|
12
|
+
var firestore = require('firebase/firestore');
|
|
13
|
+
require('core-js/modules/es.set.difference.v2.js');
|
|
14
|
+
require('core-js/modules/es.set.symmetric-difference.v2.js');
|
|
15
|
+
require('core-js/modules/es.set.union.v2.js');
|
|
16
|
+
var rxjs$1 = require('rxjs');
|
|
17
|
+
var rxjs = require('@dereekb/rxjs');
|
|
18
|
+
require('core-js/modules/es.iterator.filter.js');
|
|
19
|
+
var dateFns = require('date-fns');
|
|
20
|
+
var date = require('@dereekb/date');
|
|
21
|
+
require('core-js/modules/es.iterator.find.js');
|
|
22
|
+
require('core-js/modules/es.typed-array.with.js');
|
|
23
|
+
require('core-js/modules/es.uint8-array.set-from-base64.js');
|
|
24
|
+
require('core-js/modules/es.uint8-array.set-from-hex.js');
|
|
25
|
+
require('core-js/modules/es.uint8-array.to-base64.js');
|
|
26
|
+
require('core-js/modules/es.uint8-array.to-hex.js');
|
|
27
|
+
var stream = require('stream');
|
|
28
|
+
var fs = require('fs');
|
|
29
|
+
|
|
30
|
+
function makeTestingFirestoreAccesorDriver(driver) {
|
|
31
|
+
let fuzzerKey = 0;
|
|
32
|
+
const time = new Date().getTime();
|
|
33
|
+
const fuzzedMap = new Map();
|
|
34
|
+
const {
|
|
35
|
+
collection,
|
|
36
|
+
subcollection,
|
|
37
|
+
collectionGroup
|
|
38
|
+
} = driver;
|
|
39
|
+
const fuzzedPathForPath = path => {
|
|
40
|
+
let fuzzedPath = fuzzedMap.get(path);
|
|
41
|
+
if (!fuzzedPath) {
|
|
42
|
+
const random = Math.ceil(Math.random() * 9999) % 9999;
|
|
43
|
+
fuzzedPath = `${time}_${random}_${path}_${fuzzerKey += 1}`;
|
|
44
|
+
fuzzedMap.set(path, fuzzedPath);
|
|
45
|
+
}
|
|
46
|
+
return fuzzedPath;
|
|
47
|
+
};
|
|
48
|
+
const fuzzedCollection = (f, path) => {
|
|
49
|
+
const fuzzedPath = fuzzedPathForPath(path);
|
|
50
|
+
return collection(f, fuzzedPath);
|
|
51
|
+
};
|
|
52
|
+
const fuzzedSubcollection = (document, path, ...pathSegments) => {
|
|
53
|
+
const fuzzedPath = fuzzedPathForPath(path);
|
|
54
|
+
const fuzzedPathSegments = pathSegments.map(x => fuzzedPathForPath(x));
|
|
55
|
+
return subcollection(document, fuzzedPath, ...fuzzedPathSegments);
|
|
56
|
+
};
|
|
57
|
+
const fuzzedCollectionGroup = (f, collectionId) => {
|
|
58
|
+
const fuzzedPath = fuzzedPathForPath(collectionId);
|
|
59
|
+
return collectionGroup(f, fuzzedPath);
|
|
60
|
+
};
|
|
61
|
+
const initWithCollectionNames = collectionNames => {
|
|
62
|
+
collectionNames.forEach(x => fuzzedPathForPath(x));
|
|
63
|
+
return fuzzedMap;
|
|
64
|
+
};
|
|
65
|
+
const injectedDriver = {
|
|
66
|
+
...driver,
|
|
67
|
+
collection: fuzzedCollection,
|
|
68
|
+
collectionGroup: fuzzedCollectionGroup,
|
|
69
|
+
subcollection: fuzzedSubcollection,
|
|
70
|
+
getFuzzedCollectionsNameMap: () => fuzzedMap,
|
|
71
|
+
initWithCollectionNames,
|
|
72
|
+
fuzzedPathForPath
|
|
73
|
+
};
|
|
74
|
+
return injectedDriver;
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Extends the input drivers to generate new drivers for a testing environment.
|
|
78
|
+
*
|
|
79
|
+
* @param drivers
|
|
80
|
+
* @returns
|
|
81
|
+
*/
|
|
82
|
+
function makeTestingFirestoreDrivers(drivers) {
|
|
83
|
+
return {
|
|
84
|
+
...drivers,
|
|
85
|
+
firestoreDriverType: 'testing',
|
|
86
|
+
firestoreAccessorDriver: makeTestingFirestoreAccesorDriver(drivers.firestoreAccessorDriver)
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
async function clearTestFirestoreContextCollections(context, clearCollection) {
|
|
90
|
+
const names = context.drivers.firestoreAccessorDriver.getFuzzedCollectionsNameMap();
|
|
91
|
+
const tuples = Array.from(names.entries());
|
|
92
|
+
await util.performAsyncTasks(tuples, ([name, fuzzyPath]) => clearCollection(name, fuzzyPath));
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
let bucketTestNameKey = 0;
|
|
96
|
+
function makeTestingFirebaseStorageAccesorDriver(driver, config) {
|
|
97
|
+
const {
|
|
98
|
+
useTestDefaultBucket
|
|
99
|
+
} = config ?? {};
|
|
100
|
+
// The default bucket is only used if another bucket is not input.
|
|
101
|
+
const defaultBucket = !driver.getDefaultBucket && useTestDefaultBucket !== false || useTestDefaultBucket === true ? util.cachedGetter(() => {
|
|
102
|
+
const time = new Date().getTime();
|
|
103
|
+
const random = Math.ceil(Math.random() * 999999) % 999999;
|
|
104
|
+
const testBucketName = `test-bucket-${time}-${random}-${bucketTestNameKey += 1}`;
|
|
105
|
+
return testBucketName;
|
|
106
|
+
}) : driver.getDefaultBucket;
|
|
107
|
+
const injectedDriver = {
|
|
108
|
+
...driver,
|
|
109
|
+
getDefaultBucket: defaultBucket
|
|
110
|
+
};
|
|
111
|
+
return injectedDriver;
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Extends the input drivers to generate new drivers for a testing environment.
|
|
115
|
+
*
|
|
116
|
+
* @param drivers
|
|
117
|
+
* @returns
|
|
118
|
+
*/
|
|
119
|
+
function makeTestingFirebaseStorageDrivers(drivers, config) {
|
|
120
|
+
return {
|
|
121
|
+
...drivers,
|
|
122
|
+
storageDriverType: 'testing',
|
|
123
|
+
storageAccessorDriver: makeTestingFirebaseStorageAccesorDriver(drivers.storageAccessorDriver, config)
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
class TestFirebaseInstance {
|
|
128
|
+
constructor(firestoreContext, storageContext) {
|
|
129
|
+
this.firestoreContext = void 0;
|
|
130
|
+
this.storageContext = void 0;
|
|
131
|
+
this.firestoreContext = firestoreContext;
|
|
132
|
+
this.storageContext = storageContext;
|
|
133
|
+
}
|
|
134
|
+
get firestore() {
|
|
135
|
+
return this.firestoreContext.firestore;
|
|
136
|
+
}
|
|
137
|
+
get storage() {
|
|
138
|
+
return this.storageContext.storage;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
class TestFirebaseContextFixture extends test.AbstractTestContextFixture {
|
|
142
|
+
get firestore() {
|
|
143
|
+
return this.instance.firestore;
|
|
144
|
+
}
|
|
145
|
+
get firestoreContext() {
|
|
146
|
+
return this.instance.firestoreContext;
|
|
147
|
+
}
|
|
148
|
+
get storage() {
|
|
149
|
+
return this.instance.storage;
|
|
150
|
+
}
|
|
151
|
+
get storageContext() {
|
|
152
|
+
return this.instance.storageContext;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
function makeRulesTestFirestoreContext(drivers, rulesTestEnvironment, rulesTestContext) {
|
|
157
|
+
const context = {
|
|
158
|
+
...firebase.firestoreContextFactory(drivers)(rulesTestContext.firestore()),
|
|
159
|
+
drivers,
|
|
160
|
+
rulesTestContext,
|
|
161
|
+
rulesTestEnvironment
|
|
162
|
+
};
|
|
163
|
+
return context;
|
|
164
|
+
}
|
|
165
|
+
function makeRulesTestFirebaseStorageContext(drivers, rulesTestEnvironment, rulesTestContext) {
|
|
166
|
+
const context = {
|
|
167
|
+
...firebase.firebaseStorageContextFactory(drivers)(rulesTestContext.storage()),
|
|
168
|
+
drivers,
|
|
169
|
+
rulesTestContext,
|
|
170
|
+
rulesTestEnvironment
|
|
171
|
+
};
|
|
172
|
+
return context;
|
|
173
|
+
}
|
|
174
|
+
class RulesUnitTestTestFirebaseInstance {
|
|
175
|
+
constructor(drivers, rulesTestEnvironment, rulesTestContext) {
|
|
176
|
+
this.drivers = void 0;
|
|
177
|
+
this.rulesTestEnvironment = void 0;
|
|
178
|
+
this.rulesTestContext = void 0;
|
|
179
|
+
this._firestoreContext = util.cachedGetter(() => makeRulesTestFirestoreContext(this.drivers, this.rulesTestEnvironment, this.rulesTestContext));
|
|
180
|
+
this._storageContext = util.cachedGetter(() => makeRulesTestFirebaseStorageContext(this.drivers, this.rulesTestEnvironment, this.rulesTestContext));
|
|
181
|
+
this.drivers = drivers;
|
|
182
|
+
this.rulesTestEnvironment = rulesTestEnvironment;
|
|
183
|
+
this.rulesTestContext = rulesTestContext;
|
|
184
|
+
}
|
|
185
|
+
get firestoreContext() {
|
|
186
|
+
return this._firestoreContext();
|
|
187
|
+
}
|
|
188
|
+
get storageContext() {
|
|
189
|
+
return this._storageContext();
|
|
190
|
+
}
|
|
191
|
+
get firestore() {
|
|
192
|
+
return this.firestoreContext.firestore;
|
|
193
|
+
}
|
|
194
|
+
get storage() {
|
|
195
|
+
return this.storageContext.storage;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
class RulesUnitTestFirebaseTestingContextFixture extends TestFirebaseContextFixture {}
|
|
199
|
+
/**
|
|
200
|
+
* A TestContextBuilderFunction for building firebase test context factories using @firebase/firebase and @firebase/rules-unit-testing. This means CLIENT TESTING ONLY. For server testing, look at @dereekb/firestore-server.
|
|
201
|
+
*
|
|
202
|
+
* This can be used to easily build a testing context that sets up RulesTestEnvironment for tests that sets itself up and tears itself down.
|
|
203
|
+
*/
|
|
204
|
+
const firebaseRulesUnitTestBuilder = test.testContextBuilder({
|
|
205
|
+
buildConfig: input => {
|
|
206
|
+
const config = {
|
|
207
|
+
testEnvironment: input?.testEnvironment ?? {},
|
|
208
|
+
rulesContext: input?.rulesContext
|
|
209
|
+
};
|
|
210
|
+
return config;
|
|
211
|
+
},
|
|
212
|
+
buildFixture: () => new RulesUnitTestFirebaseTestingContextFixture(),
|
|
213
|
+
setupInstance: async config => {
|
|
214
|
+
const drivers = {
|
|
215
|
+
...makeTestingFirestoreDrivers(firebase.firebaseFirestoreClientDrivers()),
|
|
216
|
+
...makeTestingFirebaseStorageDrivers(firebase.firebaseStorageClientDrivers(), {
|
|
217
|
+
useTestDefaultBucket: true
|
|
218
|
+
})
|
|
219
|
+
};
|
|
220
|
+
let testEnvironment = config.testEnvironment;
|
|
221
|
+
if (config.testEnvironment.collectionNames) {
|
|
222
|
+
drivers.firestoreAccessorDriver.initWithCollectionNames(config.testEnvironment.collectionNames);
|
|
223
|
+
testEnvironment = {
|
|
224
|
+
...testEnvironment,
|
|
225
|
+
firestore: rewriteEmulatorConfigRulesForFuzzedCollectionNames(testEnvironment.firestore)
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
const rulesTestEnv = await rulesUnitTesting.initializeTestEnvironment(config.testEnvironment);
|
|
229
|
+
const rulesTestContext = rulesTestContextForConfig(rulesTestEnv, config.rulesContext);
|
|
230
|
+
return new RulesUnitTestTestFirebaseInstance(drivers, rulesTestEnv, rulesTestContext);
|
|
231
|
+
},
|
|
232
|
+
teardownInstance: async (instance, config) => {
|
|
233
|
+
await instance.rulesTestEnvironment.cleanup().catch(e => {
|
|
234
|
+
console.warn('firebaseRulesUnitTestBuilder(): Failed to cleanup rules test environment', e);
|
|
235
|
+
throw e;
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
});
|
|
239
|
+
// MARK: Internal
|
|
240
|
+
function rulesTestContextForConfig(rulesTestEnv, testingRulesConfig) {
|
|
241
|
+
let rulesTestContext;
|
|
242
|
+
if (testingRulesConfig != null) {
|
|
243
|
+
rulesTestContext = rulesTestEnv.authenticatedContext(testingRulesConfig.userId, testingRulesConfig.tokenOptions ?? undefined);
|
|
244
|
+
} else {
|
|
245
|
+
rulesTestContext = rulesTestEnv.unauthenticatedContext();
|
|
246
|
+
}
|
|
247
|
+
return rulesTestContext;
|
|
248
|
+
}
|
|
249
|
+
function rewriteEmulatorConfigRulesForFuzzedCollectionNames(config, fuzzedCollectionNamesMap) {
|
|
250
|
+
if (config && config.rules) {
|
|
251
|
+
config = {
|
|
252
|
+
...config,
|
|
253
|
+
rules: rewriteRulesForFuzzedCollectionNames(config.rules)
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
return config;
|
|
257
|
+
}
|
|
258
|
+
function rewriteRulesForFuzzedCollectionNames(rules, fuzzedCollectionNamesMap) {
|
|
259
|
+
// TODO: rewrite the rules using regex matching/replacement.
|
|
260
|
+
return rules;
|
|
261
|
+
}
|
|
262
|
+
// MARK: Utility
|
|
263
|
+
function changeFirestoreLogLevelBeforeAndAfterTests() {
|
|
264
|
+
beforeAll(() => firestore.setLogLevel('error'));
|
|
265
|
+
afterAll(() => firestore.setLogLevel('warn'));
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
const TESTING_AUTHORIZED_FIREBASE_USER_ID = '0';
|
|
269
|
+
const authorizedFirebaseFactory = firebaseRulesUnitTestBuilder({
|
|
270
|
+
testEnvironment: {
|
|
271
|
+
firestore: {
|
|
272
|
+
rules: `
|
|
273
|
+
rules_version = '2';
|
|
274
|
+
service cloud.firestore {
|
|
275
|
+
match /databases/{database}/documents {
|
|
276
|
+
match /{document=**} {
|
|
277
|
+
allow read, write: if true;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
`
|
|
282
|
+
},
|
|
283
|
+
storage: {
|
|
284
|
+
rules: `
|
|
285
|
+
rules_version = '2';
|
|
286
|
+
service firebase.storage {
|
|
287
|
+
match /b/{bucket}/o {
|
|
288
|
+
match /{allPaths=**} {
|
|
289
|
+
allow read, write: if true;
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
`
|
|
294
|
+
}
|
|
295
|
+
},
|
|
296
|
+
rulesContext: {
|
|
297
|
+
userId: TESTING_AUTHORIZED_FIREBASE_USER_ID
|
|
298
|
+
}
|
|
299
|
+
});
|
|
300
|
+
|
|
301
|
+
// MARK: Mock Item
|
|
302
|
+
const mockItemIdentity = firebase.firestoreModelIdentity('mockItem', 'mi');
|
|
303
|
+
class MockItemDocument extends firebase.AbstractFirestoreDocument {
|
|
304
|
+
get modelIdentity() {
|
|
305
|
+
return mockItemIdentity;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
/**
|
|
309
|
+
* Used to build a FirestoreDataConverter. Fields are configured via configuration. See the SnapshotConverterFunctions for more info.
|
|
310
|
+
*/
|
|
311
|
+
const mockItemConverter = firebase.snapshotConverterFunctions({
|
|
312
|
+
fields: {
|
|
313
|
+
value: firebase.optionalFirestoreString(),
|
|
314
|
+
tags: firebase.optionalFirestoreArray(),
|
|
315
|
+
date: firebase.optionalFirestoreDate(),
|
|
316
|
+
number: firebase.optionalFirestoreNumber(),
|
|
317
|
+
test: firebase.firestoreBoolean({
|
|
318
|
+
default: true
|
|
319
|
+
})
|
|
320
|
+
}
|
|
321
|
+
});
|
|
322
|
+
/**
|
|
323
|
+
* Used to build a mockItemCollection from a firestore instance with a converter setup.
|
|
324
|
+
*
|
|
325
|
+
* @param firestore
|
|
326
|
+
* @returns
|
|
327
|
+
*/
|
|
328
|
+
function mockItemCollectionReference(context) {
|
|
329
|
+
return context.collection(mockItemIdentity.collectionName);
|
|
330
|
+
}
|
|
331
|
+
function mockItemFirestoreCollection(firestoreContext) {
|
|
332
|
+
return firestoreContext.firestoreCollection({
|
|
333
|
+
converter: mockItemConverter,
|
|
334
|
+
modelIdentity: mockItemIdentity,
|
|
335
|
+
collection: mockItemCollectionReference(firestoreContext),
|
|
336
|
+
makeDocument: (a, d) => new MockItemDocument(a, d),
|
|
337
|
+
firestoreContext
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
// MARK: MockItemPrivate
|
|
341
|
+
const mockItemPrivateIdentity = firebase.firestoreModelIdentity(mockItemIdentity, 'mockItemPrivate', 'mip');
|
|
342
|
+
exports.MockItemSettingsItemEnum = void 0;
|
|
343
|
+
(function (MockItemSettingsItemEnum) {
|
|
344
|
+
MockItemSettingsItemEnum[MockItemSettingsItemEnum["NORTH"] = 0] = "NORTH";
|
|
345
|
+
MockItemSettingsItemEnum[MockItemSettingsItemEnum["SOUTH"] = 1] = "SOUTH";
|
|
346
|
+
MockItemSettingsItemEnum[MockItemSettingsItemEnum["EAST"] = 2] = "EAST";
|
|
347
|
+
MockItemSettingsItemEnum[MockItemSettingsItemEnum["WEST"] = 3] = "WEST";
|
|
348
|
+
})(exports.MockItemSettingsItemEnum || (exports.MockItemSettingsItemEnum = {}));
|
|
349
|
+
const mockItemSettingsItemDencoder = util.bitwiseObjectDencoder({
|
|
350
|
+
maxIndex: 4,
|
|
351
|
+
toSetFunction: x => {
|
|
352
|
+
const set = new Set();
|
|
353
|
+
if (x.north) {
|
|
354
|
+
set.add(exports.MockItemSettingsItemEnum.NORTH);
|
|
355
|
+
}
|
|
356
|
+
if (x.south) {
|
|
357
|
+
set.add(exports.MockItemSettingsItemEnum.SOUTH);
|
|
358
|
+
}
|
|
359
|
+
if (x.east) {
|
|
360
|
+
set.add(exports.MockItemSettingsItemEnum.EAST);
|
|
361
|
+
}
|
|
362
|
+
if (x.west) {
|
|
363
|
+
set.add(exports.MockItemSettingsItemEnum.WEST);
|
|
364
|
+
}
|
|
365
|
+
return set;
|
|
366
|
+
},
|
|
367
|
+
fromSetFunction: x => {
|
|
368
|
+
const object = {};
|
|
369
|
+
if (x.has(exports.MockItemSettingsItemEnum.NORTH)) {
|
|
370
|
+
object.north = true;
|
|
371
|
+
}
|
|
372
|
+
if (x.has(exports.MockItemSettingsItemEnum.SOUTH)) {
|
|
373
|
+
object.south = true;
|
|
374
|
+
}
|
|
375
|
+
if (x.has(exports.MockItemSettingsItemEnum.EAST)) {
|
|
376
|
+
object.east = true;
|
|
377
|
+
}
|
|
378
|
+
if (x.has(exports.MockItemSettingsItemEnum.WEST)) {
|
|
379
|
+
object.west = true;
|
|
380
|
+
}
|
|
381
|
+
return object;
|
|
382
|
+
}
|
|
383
|
+
});
|
|
384
|
+
/**
|
|
385
|
+
* FirestoreDocument for MockItem
|
|
386
|
+
*/
|
|
387
|
+
class MockItemPrivateDocument extends firebase.AbstractFirestoreDocument {
|
|
388
|
+
get modelIdentity() {
|
|
389
|
+
return mockItemPrivateIdentity;
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
/**
|
|
393
|
+
* Used to build a FirestoreDataConverter. Fields are configured via configuration. See the SnapshotConverterFunctions for more info.
|
|
394
|
+
*/
|
|
395
|
+
const mockItemPrivateConverter = firebase.snapshotConverterFunctions({
|
|
396
|
+
fieldConversions: util.modelFieldConversions({
|
|
397
|
+
num: firebase.firestoreNumber({
|
|
398
|
+
default: 0,
|
|
399
|
+
defaultBeforeSave: 0
|
|
400
|
+
}),
|
|
401
|
+
comments: firebase.optionalFirestoreString(),
|
|
402
|
+
values: firebase.firestoreUniqueStringArray(),
|
|
403
|
+
settings: firebase.firestoreBitwiseObjectMap({
|
|
404
|
+
dencoder: mockItemSettingsItemDencoder
|
|
405
|
+
}),
|
|
406
|
+
createdAt: firebase.firestoreDate({
|
|
407
|
+
saveDefaultAsNow: true
|
|
408
|
+
})
|
|
409
|
+
})
|
|
410
|
+
});
|
|
411
|
+
/**
|
|
412
|
+
* Used to build a mockItemCollection from a firestore instance with a converter setup.
|
|
413
|
+
*
|
|
414
|
+
* @param firestore
|
|
415
|
+
* @returns
|
|
416
|
+
*/
|
|
417
|
+
function mockItemPrivateCollectionReferenceFactory(context) {
|
|
418
|
+
return parent => {
|
|
419
|
+
return context.subcollection(parent.documentRef, mockItemPrivateIdentity.collectionName);
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
function mockItemPrivateFirestoreCollection(firestoreContext) {
|
|
423
|
+
const factory = mockItemPrivateCollectionReferenceFactory(firestoreContext);
|
|
424
|
+
return parent => {
|
|
425
|
+
return firestoreContext.singleItemFirestoreCollection({
|
|
426
|
+
modelIdentity: mockItemPrivateIdentity,
|
|
427
|
+
converter: mockItemPrivateConverter,
|
|
428
|
+
collection: factory(parent),
|
|
429
|
+
makeDocument: (a, d) => new MockItemPrivateDocument(a, d),
|
|
430
|
+
firestoreContext,
|
|
431
|
+
parent
|
|
432
|
+
});
|
|
433
|
+
};
|
|
434
|
+
}
|
|
435
|
+
function mockItemPrivateCollectionReference(context) {
|
|
436
|
+
return context.collectionGroup(mockItemPrivateIdentity.collectionName);
|
|
437
|
+
}
|
|
438
|
+
function mockItemPrivateFirestoreCollectionGroup(firestoreContext) {
|
|
439
|
+
return firestoreContext.firestoreCollectionGroup({
|
|
440
|
+
modelIdentity: mockItemPrivateIdentity,
|
|
441
|
+
converter: mockItemPrivateConverter,
|
|
442
|
+
queryLike: mockItemPrivateCollectionReference(firestoreContext),
|
|
443
|
+
makeDocument: (accessor, documentAccessor) => new MockItemPrivateDocument(accessor, documentAccessor),
|
|
444
|
+
firestoreContext
|
|
445
|
+
});
|
|
446
|
+
}
|
|
447
|
+
// MARK: MockItemUser
|
|
448
|
+
const mockItemUserIdentity = firebase.firestoreModelIdentity(mockItemIdentity, 'mockItemUser', 'miu');
|
|
449
|
+
/**
|
|
450
|
+
* FirestoreDocument for MockItem
|
|
451
|
+
*/
|
|
452
|
+
class MockItemUserDocument extends firebase.AbstractFirestoreDocument {
|
|
453
|
+
get modelIdentity() {
|
|
454
|
+
return mockItemUserIdentity;
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
/**
|
|
458
|
+
* Firestore collection path name.
|
|
459
|
+
*/
|
|
460
|
+
const mockItemUserCollectionName = 'mockItemUser';
|
|
461
|
+
const mockItemUserIdentifier = '0';
|
|
462
|
+
/**
|
|
463
|
+
* Used to build a FirestoreDataConverter. Fields are configured via configuration. See the SnapshotConverterFunctions for more info.
|
|
464
|
+
*/
|
|
465
|
+
const mockItemUserConverter = firebase.snapshotConverterFunctions({
|
|
466
|
+
fieldConversions: util.modelFieldConversions({
|
|
467
|
+
uid: firebase.firestoreUID(),
|
|
468
|
+
name: firebase.firestoreString()
|
|
469
|
+
})
|
|
470
|
+
});
|
|
471
|
+
/**
|
|
472
|
+
* Used to build a mockItemCollection from a firestore instance with a converter setup.
|
|
473
|
+
*
|
|
474
|
+
* @param firestore
|
|
475
|
+
* @returns
|
|
476
|
+
*/
|
|
477
|
+
function mockItemUserCollectionReferenceFactory(context) {
|
|
478
|
+
return parent => {
|
|
479
|
+
return context.subcollection(parent.documentRef, mockItemUserCollectionName);
|
|
480
|
+
};
|
|
481
|
+
}
|
|
482
|
+
const mockItemUserAccessorFactory = firebase.copyUserRelatedDataAccessorFactoryFunction();
|
|
483
|
+
function mockItemUserFirestoreCollection(firestoreContext) {
|
|
484
|
+
const factory = mockItemUserCollectionReferenceFactory(firestoreContext);
|
|
485
|
+
return parent => {
|
|
486
|
+
return firestoreContext.firestoreCollectionWithParent({
|
|
487
|
+
modelIdentity: mockItemUserIdentity,
|
|
488
|
+
converter: mockItemUserConverter,
|
|
489
|
+
collection: factory(parent),
|
|
490
|
+
accessorFactory: mockItemUserAccessorFactory,
|
|
491
|
+
makeDocument: (a, d) => new MockItemUserDocument(a, d),
|
|
492
|
+
firestoreContext,
|
|
493
|
+
parent
|
|
494
|
+
});
|
|
495
|
+
};
|
|
496
|
+
}
|
|
497
|
+
function mockItemUserCollectionReference(context) {
|
|
498
|
+
return context.collectionGroup(mockItemUserCollectionName);
|
|
499
|
+
}
|
|
500
|
+
function mockItemUserFirestoreCollectionGroup(firestoreContext) {
|
|
501
|
+
return firestoreContext.firestoreCollectionGroup({
|
|
502
|
+
modelIdentity: mockItemUserIdentity,
|
|
503
|
+
converter: mockItemUserConverter,
|
|
504
|
+
queryLike: mockItemUserCollectionReference(firestoreContext),
|
|
505
|
+
accessorFactory: mockItemUserAccessorFactory,
|
|
506
|
+
makeDocument: (accessor, documentAccessor) => new MockItemUserDocument(accessor, documentAccessor),
|
|
507
|
+
firestoreContext
|
|
508
|
+
});
|
|
509
|
+
}
|
|
510
|
+
// MARK: MockItemSubItem
|
|
511
|
+
const mockItemSubItemIdentity = firebase.firestoreModelIdentity(mockItemIdentity, 'mockItemSub', 'misi');
|
|
512
|
+
/**
|
|
513
|
+
* FirestoreDocument for MockItem
|
|
514
|
+
*/
|
|
515
|
+
class MockItemSubItemDocument extends firebase.AbstractFirestoreDocumentWithParent {
|
|
516
|
+
get modelIdentity() {
|
|
517
|
+
return mockItemSubItemIdentity;
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
/**
|
|
521
|
+
* Used to build a FirestoreDataConverter. Fields are configured via configuration. See the SnapshotConverterFunctions for more info.
|
|
522
|
+
*/
|
|
523
|
+
const mockItemSubItemConverter = firebase.snapshotConverterFunctions({
|
|
524
|
+
fields: {
|
|
525
|
+
value: firebase.optionalFirestoreNumber()
|
|
526
|
+
}
|
|
527
|
+
});
|
|
528
|
+
function mockItemSubItemCollectionReferenceFactory(context) {
|
|
529
|
+
return parent => {
|
|
530
|
+
return context.subcollection(parent.documentRef, mockItemSubItemIdentity.collectionName);
|
|
531
|
+
};
|
|
532
|
+
}
|
|
533
|
+
function mockItemSubItemFirestoreCollection(firestoreContext) {
|
|
534
|
+
const factory = mockItemSubItemCollectionReferenceFactory(firestoreContext);
|
|
535
|
+
return parent => {
|
|
536
|
+
return firestoreContext.firestoreCollectionWithParent({
|
|
537
|
+
modelIdentity: mockItemSubItemIdentity,
|
|
538
|
+
converter: mockItemSubItemConverter,
|
|
539
|
+
collection: factory(parent),
|
|
540
|
+
makeDocument: (a, d) => new MockItemSubItemDocument(a, d),
|
|
541
|
+
firestoreContext,
|
|
542
|
+
parent
|
|
543
|
+
});
|
|
544
|
+
};
|
|
545
|
+
}
|
|
546
|
+
function mockItemSubItemCollectionReference(context) {
|
|
547
|
+
return context.collectionGroup(mockItemSubItemIdentity.collectionName);
|
|
548
|
+
}
|
|
549
|
+
function mockItemSubItemFirestoreCollectionGroup(firestoreContext) {
|
|
550
|
+
return firestoreContext.firestoreCollectionGroup({
|
|
551
|
+
modelIdentity: mockItemSubItemIdentity,
|
|
552
|
+
converter: mockItemSubItemConverter,
|
|
553
|
+
queryLike: mockItemSubItemCollectionReference(firestoreContext),
|
|
554
|
+
makeDocument: (accessor, documentAccessor) => new MockItemSubItemDocument(accessor, documentAccessor),
|
|
555
|
+
firestoreContext
|
|
556
|
+
});
|
|
557
|
+
}
|
|
558
|
+
// MARK: Sub-Sub Item
|
|
559
|
+
const mockItemSubItemDeepIdentity = firebase.firestoreModelIdentity(mockItemSubItemIdentity, 'mockItemSubItemDeep', 'misid');
|
|
560
|
+
/**
|
|
561
|
+
* FirestoreDocument for MockSubItem
|
|
562
|
+
*/
|
|
563
|
+
class MockItemSubItemDeepDocument extends firebase.AbstractFirestoreDocumentWithParent {
|
|
564
|
+
get modelIdentity() {
|
|
565
|
+
return mockItemSubItemDeepIdentity;
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
/**
|
|
569
|
+
* Used to build a FirestoreDataConverter. Fields are configured via configuration. See the SnapshotConverterFunctions for more info.
|
|
570
|
+
*/
|
|
571
|
+
const mockItemSubItemDeepConverter = firebase.snapshotConverterFunctions({
|
|
572
|
+
fields: {
|
|
573
|
+
value: firebase.optionalFirestoreNumber()
|
|
574
|
+
}
|
|
575
|
+
});
|
|
576
|
+
function mockItemSubItemDeepCollectionReferenceFactory(context) {
|
|
577
|
+
return parent => {
|
|
578
|
+
return context.subcollection(parent.documentRef, mockItemSubItemDeepIdentity.collectionName);
|
|
579
|
+
};
|
|
580
|
+
}
|
|
581
|
+
function mockItemSubItemDeepFirestoreCollection(firestoreContext) {
|
|
582
|
+
const factory = mockItemSubItemDeepCollectionReferenceFactory(firestoreContext);
|
|
583
|
+
return parent => {
|
|
584
|
+
return firestoreContext.firestoreCollectionWithParent({
|
|
585
|
+
modelIdentity: mockItemSubItemDeepIdentity,
|
|
586
|
+
converter: mockItemSubItemDeepConverter,
|
|
587
|
+
collection: factory(parent),
|
|
588
|
+
makeDocument: (a, d) => new MockItemSubItemDeepDocument(a, d),
|
|
589
|
+
firestoreContext,
|
|
590
|
+
parent
|
|
591
|
+
});
|
|
592
|
+
};
|
|
593
|
+
}
|
|
594
|
+
function mockItemSubItemDeepCollectionReference(context) {
|
|
595
|
+
return context.collectionGroup(mockItemSubItemDeepIdentity.collectionName);
|
|
596
|
+
}
|
|
597
|
+
function mockItemSubItemDeepFirestoreCollectionGroup(firestoreContext) {
|
|
598
|
+
return firestoreContext.firestoreCollectionGroup({
|
|
599
|
+
modelIdentity: mockItemSubItemDeepIdentity,
|
|
600
|
+
converter: mockItemSubItemDeepConverter,
|
|
601
|
+
queryLike: mockItemSubItemDeepCollectionReference(firestoreContext),
|
|
602
|
+
makeDocument: (accessor, documentAccessor) => new MockItemSubItemDeepDocument(accessor, documentAccessor),
|
|
603
|
+
firestoreContext
|
|
604
|
+
});
|
|
605
|
+
}
|
|
606
|
+
// MARK: Mock System Item
|
|
607
|
+
const MOCK_SYSTEM_STATE_TYPE = 'mockitemsystemstate';
|
|
608
|
+
const mockItemSystemDataConverter = firebase.firestoreSubObject({
|
|
609
|
+
objectField: {
|
|
610
|
+
fields: {
|
|
611
|
+
lat: firebase.firestoreDate({
|
|
612
|
+
saveDefaultAsNow: true
|
|
613
|
+
})
|
|
614
|
+
}
|
|
615
|
+
}
|
|
616
|
+
});
|
|
617
|
+
const mockItemSystemStateStoredDataConverterMap = {
|
|
618
|
+
[MOCK_SYSTEM_STATE_TYPE]: mockItemSystemDataConverter
|
|
619
|
+
};
|
|
620
|
+
|
|
621
|
+
// MARK: Collections
|
|
622
|
+
class MockItemCollections {}
|
|
623
|
+
function makeMockItemCollections(firestoreContext) {
|
|
624
|
+
return {
|
|
625
|
+
mockItemCollection: mockItemFirestoreCollection(firestoreContext),
|
|
626
|
+
mockItemPrivateCollectionFactory: mockItemPrivateFirestoreCollection(firestoreContext),
|
|
627
|
+
mockItemPrivateCollectionGroup: mockItemPrivateFirestoreCollectionGroup(firestoreContext),
|
|
628
|
+
mockItemUserCollectionFactory: mockItemUserFirestoreCollection(firestoreContext),
|
|
629
|
+
mockItemUserCollectionGroup: mockItemUserFirestoreCollectionGroup(firestoreContext),
|
|
630
|
+
mockItemSubItemCollectionFactory: mockItemSubItemFirestoreCollection(firestoreContext),
|
|
631
|
+
mockItemSubItemCollectionGroup: mockItemSubItemFirestoreCollectionGroup(firestoreContext),
|
|
632
|
+
mockItemSubItemDeepCollectionFactory: mockItemSubItemDeepFirestoreCollection(firestoreContext),
|
|
633
|
+
mockItemSubItemDeepCollectionGroup: mockItemSubItemDeepFirestoreCollectionGroup(firestoreContext),
|
|
634
|
+
mockItemSystemStateCollection: firebase.systemStateFirestoreCollection(firestoreContext, mockItemSystemStateStoredDataConverterMap)
|
|
635
|
+
};
|
|
636
|
+
}
|
|
637
|
+
// MARK: Models
|
|
638
|
+
const mockItemFirebaseModelServiceFactory = firebase.firebaseModelServiceFactory({
|
|
639
|
+
roleMapForModel: function (output, context, model) {
|
|
640
|
+
const roles = context.rolesToReturn ?? {
|
|
641
|
+
read: true
|
|
642
|
+
};
|
|
643
|
+
return roles;
|
|
644
|
+
},
|
|
645
|
+
getFirestoreCollection: c => c.app.mockItemCollection
|
|
646
|
+
});
|
|
647
|
+
const mockItemPrivateFirebaseModelServiceFactory = firebase.firebaseModelServiceFactory({
|
|
648
|
+
roleMapForModel: function (output, context, model) {
|
|
649
|
+
const roles = context.rolesToReturn ?? {
|
|
650
|
+
read: true
|
|
651
|
+
};
|
|
652
|
+
return roles;
|
|
653
|
+
},
|
|
654
|
+
getFirestoreCollection: c => c.app.mockItemPrivateCollectionGroup
|
|
655
|
+
});
|
|
656
|
+
const mockItemUserFirebaseModelServiceFactory = firebase.firebaseModelServiceFactory({
|
|
657
|
+
roleMapForModel: function (output, context, model) {
|
|
658
|
+
const isOwnerUser = context.auth?.uid === model.documentRef.id;
|
|
659
|
+
const roles = context.rolesToReturn ?? {
|
|
660
|
+
read: isOwnerUser
|
|
661
|
+
};
|
|
662
|
+
return roles;
|
|
663
|
+
},
|
|
664
|
+
getFirestoreCollection: c => c.app.mockItemUserCollectionGroup
|
|
665
|
+
});
|
|
666
|
+
const mockItemSubItemFirebaseModelServiceFactory = firebase.firebaseModelServiceFactory({
|
|
667
|
+
roleMapForModel: function (output, context, model) {
|
|
668
|
+
const roles = context.rolesToReturn ?? {
|
|
669
|
+
read: true
|
|
670
|
+
};
|
|
671
|
+
return roles;
|
|
672
|
+
},
|
|
673
|
+
getFirestoreCollection: c => c.app.mockItemSubItemCollectionGroup
|
|
674
|
+
});
|
|
675
|
+
const mockItemSubItemDeepFirebaseModelServiceFactory = firebase.firebaseModelServiceFactory({
|
|
676
|
+
roleMapForModel: function (output, context, model) {
|
|
677
|
+
const roles = context.rolesToReturn ?? {
|
|
678
|
+
read: true
|
|
679
|
+
};
|
|
680
|
+
return roles;
|
|
681
|
+
},
|
|
682
|
+
getFirestoreCollection: c => c.app.mockItemSubItemDeepCollectionGroup
|
|
683
|
+
});
|
|
684
|
+
const mockItemSystemStateFirebaseModelServiceFactory = firebase.firebaseModelServiceFactory({
|
|
685
|
+
roleMapForModel: function (output, context, model) {
|
|
686
|
+
return firebase.grantFullAccessIfAdmin(context); // only sys-admin allowed
|
|
687
|
+
},
|
|
688
|
+
getFirestoreCollection: c => c.app.mockItemSystemStateCollection
|
|
689
|
+
});
|
|
690
|
+
const MOCK_FIREBASE_MODEL_SERVICE_FACTORIES = {
|
|
691
|
+
systemState: mockItemSystemStateFirebaseModelServiceFactory,
|
|
692
|
+
mockItem: mockItemFirebaseModelServiceFactory,
|
|
693
|
+
mockItemPrivate: mockItemPrivateFirebaseModelServiceFactory,
|
|
694
|
+
mockItemUser: mockItemUserFirebaseModelServiceFactory,
|
|
695
|
+
mockItemSub: mockItemSubItemFirebaseModelServiceFactory,
|
|
696
|
+
mockItemSubItemDeep: mockItemSubItemDeepFirebaseModelServiceFactory
|
|
697
|
+
};
|
|
698
|
+
const mockFirebaseModelServices = firebase.firebaseModelsService(MOCK_FIREBASE_MODEL_SERVICE_FACTORIES);
|
|
699
|
+
|
|
700
|
+
// MARK: Test Item Testing Fixture
|
|
701
|
+
class MockItemCollectionFixtureInstance {
|
|
702
|
+
get collection() {
|
|
703
|
+
return this.mockItemCollection.collection;
|
|
704
|
+
}
|
|
705
|
+
/**
|
|
706
|
+
* @deprecated Use mockItemCollection instead.
|
|
707
|
+
*/
|
|
708
|
+
get firestoreCollection() {
|
|
709
|
+
return this.collections.mockItemCollection;
|
|
710
|
+
}
|
|
711
|
+
get mockItemCollection() {
|
|
712
|
+
return this.collections.mockItemCollection;
|
|
713
|
+
}
|
|
714
|
+
get mockItemPrivateCollection() {
|
|
715
|
+
return this.collections.mockItemPrivateCollectionFactory;
|
|
716
|
+
}
|
|
717
|
+
get mockItemSubItemCollection() {
|
|
718
|
+
return this.collections.mockItemSubItemCollectionFactory;
|
|
719
|
+
}
|
|
720
|
+
get mockItemSubItemCollectionGroup() {
|
|
721
|
+
return this.collections.mockItemSubItemCollectionGroup;
|
|
722
|
+
}
|
|
723
|
+
get mockItemUserCollection() {
|
|
724
|
+
return this.collections.mockItemUserCollectionFactory;
|
|
725
|
+
}
|
|
726
|
+
get mockItemUserCollectionGroup() {
|
|
727
|
+
return this.collections.mockItemUserCollectionGroup;
|
|
728
|
+
}
|
|
729
|
+
get mockItemSubItemDeepCollection() {
|
|
730
|
+
return this.collections.mockItemSubItemDeepCollectionFactory;
|
|
731
|
+
}
|
|
732
|
+
get mockItemSubItemDeepCollectionGroup() {
|
|
733
|
+
return this.collections.mockItemSubItemDeepCollectionGroup;
|
|
734
|
+
}
|
|
735
|
+
get mockItemSystemState() {
|
|
736
|
+
return this.collections.mockItemSystemStateCollection;
|
|
737
|
+
}
|
|
738
|
+
constructor(fixture) {
|
|
739
|
+
this.fixture = void 0;
|
|
740
|
+
this.collections = void 0;
|
|
741
|
+
this.fixture = fixture;
|
|
742
|
+
this.collections = makeMockItemCollections(fixture.parent.firestoreContext);
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
/**
|
|
746
|
+
* Used to expose a CollectionReference to MockItem for simple tests.
|
|
747
|
+
*/
|
|
748
|
+
class MockItemCollectionFixture extends test.AbstractWrappedFixtureWithInstance {}
|
|
749
|
+
function testWithMockItemCollectionFixture(config) {
|
|
750
|
+
return test.instanceWrapTestContextFactory({
|
|
751
|
+
wrapFixture: fixture => new MockItemCollectionFixture(fixture),
|
|
752
|
+
makeInstance: wrap => new MockItemCollectionFixtureInstance(wrap),
|
|
753
|
+
teardownInstance: instance => {}
|
|
754
|
+
// TODO(FUTURE): Utilize config here using the setup/teardown later if needed.
|
|
755
|
+
});
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
// MARK: Test Item Testing Fixture
|
|
759
|
+
class MockItemStorageFixtureInstance {
|
|
760
|
+
constructor(fixture) {
|
|
761
|
+
this.fixture = void 0;
|
|
762
|
+
this.fixture = fixture;
|
|
763
|
+
}
|
|
764
|
+
get storage() {
|
|
765
|
+
return this.fixture.parent.storage;
|
|
766
|
+
}
|
|
767
|
+
get storageContext() {
|
|
768
|
+
return this.fixture.parent.storageContext;
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
/**
|
|
772
|
+
* Used to expose a CollectionReference to MockItem for simple tests.
|
|
773
|
+
*/
|
|
774
|
+
class MockItemStorageFixture extends test.AbstractWrappedFixtureWithInstance {
|
|
775
|
+
get storage() {
|
|
776
|
+
return this.instance.storage;
|
|
777
|
+
}
|
|
778
|
+
get storageContext() {
|
|
779
|
+
return this.instance.storageContext;
|
|
780
|
+
}
|
|
781
|
+
}
|
|
782
|
+
function testWithMockItemStorageFixture(config) {
|
|
783
|
+
return test.instanceWrapTestContextFactory({
|
|
784
|
+
wrapFixture: fixture => new MockItemStorageFixture(fixture),
|
|
785
|
+
makeInstance: wrap => new MockItemStorageFixtureInstance(wrap),
|
|
786
|
+
teardownInstance: instance => {}
|
|
787
|
+
// TODO(FUTURE): Utilize config here using the setup/teardown later if needed.
|
|
788
|
+
});
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
/**
|
|
792
|
+
* Convenience mock instance for collection tests within an authorized firebase context.
|
|
793
|
+
*
|
|
794
|
+
* Uses @firebase/firestore. This is ONLY for the client.
|
|
795
|
+
*/
|
|
796
|
+
const authorizedTestWithMockItemCollection = testWithMockItemCollectionFixture()(authorizedFirebaseFactory);
|
|
797
|
+
/**
|
|
798
|
+
* Convenience mock instance for storage tests within an authorized firebase context.
|
|
799
|
+
*
|
|
800
|
+
* Uses @firebase/storage. This is ONLY for the client.
|
|
801
|
+
*/
|
|
802
|
+
const authorizedTestWithMockItemStorage = testWithMockItemStorageFixture()(authorizedFirebaseFactory);
|
|
803
|
+
|
|
804
|
+
class TestFirestoreInstance {
|
|
805
|
+
constructor(firestoreContext) {
|
|
806
|
+
this.firestoreContext = void 0;
|
|
807
|
+
this.firestoreContext = firestoreContext;
|
|
808
|
+
}
|
|
809
|
+
get firestore() {
|
|
810
|
+
return this.firestoreContext.firestore;
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
class TestFirestoreContextFixture extends test.AbstractTestContextFixture {
|
|
814
|
+
get firestore() {
|
|
815
|
+
return this.instance.firestore;
|
|
816
|
+
}
|
|
817
|
+
get firestoreContext() {
|
|
818
|
+
return this.instance.firestoreContext;
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
function mockItemWithValue(value) {
|
|
823
|
+
return firebase.where('value', '==', value);
|
|
824
|
+
}
|
|
825
|
+
function mockItemWithTestValue(test) {
|
|
826
|
+
return firebase.where('test', '==', test);
|
|
827
|
+
}
|
|
828
|
+
/**
|
|
829
|
+
* This sorts all fields by their document ID, then filters in between two specific document id paths in order to only return values between a specific path.
|
|
830
|
+
*
|
|
831
|
+
* Visual Example:
|
|
832
|
+
*
|
|
833
|
+
* /a/b/c/c/a
|
|
834
|
+
* /a/b/c/d/A
|
|
835
|
+
* /a/b/c/d/B
|
|
836
|
+
* /a/b/c/d/C
|
|
837
|
+
* /a/b/c/e/a
|
|
838
|
+
*
|
|
839
|
+
* From:
|
|
840
|
+
* https://medium.com/firebase-developers/how-to-query-collections-in-firestore-under-a-certain-path-6a0d686cebd2
|
|
841
|
+
*
|
|
842
|
+
* @param parent
|
|
843
|
+
* @returns
|
|
844
|
+
*/
|
|
845
|
+
function allChildMockItemSubItemDeepsWithinMockItem(mockItem) {
|
|
846
|
+
return firebase.allChildDocumentsUnderParent(mockItem);
|
|
847
|
+
}
|
|
848
|
+
|
|
849
|
+
/**
|
|
850
|
+
* Describes accessor driver tests, using a MockItemCollectionFixture.
|
|
851
|
+
*
|
|
852
|
+
* @param f
|
|
853
|
+
*/
|
|
854
|
+
function describeFirestoreAccessorDriverTests(f) {
|
|
855
|
+
describe('FirestoreAccessorDriver', () => {
|
|
856
|
+
const testDocumentCount = 5;
|
|
857
|
+
let mockItemFirestoreDocumentAccessor;
|
|
858
|
+
let items;
|
|
859
|
+
beforeEach(async () => {
|
|
860
|
+
mockItemFirestoreDocumentAccessor = f.instance.firestoreCollection.documentAccessor();
|
|
861
|
+
items = await firebase.makeDocuments(f.instance.firestoreCollection.documentAccessor(), {
|
|
862
|
+
count: testDocumentCount,
|
|
863
|
+
init: i => {
|
|
864
|
+
return {
|
|
865
|
+
value: `${i}`,
|
|
866
|
+
test: true,
|
|
867
|
+
string: ''
|
|
868
|
+
};
|
|
869
|
+
}
|
|
870
|
+
});
|
|
871
|
+
});
|
|
872
|
+
describe('MockItem', () => {
|
|
873
|
+
let itemDocument;
|
|
874
|
+
beforeEach(() => {
|
|
875
|
+
itemDocument = items[0];
|
|
876
|
+
itemDocument.accessor;
|
|
877
|
+
});
|
|
878
|
+
describe('accessors', () => {
|
|
879
|
+
describeFirestoreDocumentAccessorTests(() => ({
|
|
880
|
+
context: f.parent.firestoreContext,
|
|
881
|
+
firestoreDocument: () => itemDocument,
|
|
882
|
+
dataForFirstOfTwoUpdates: () => ({
|
|
883
|
+
test: true,
|
|
884
|
+
tags: ['a']
|
|
885
|
+
}),
|
|
886
|
+
dataForUpdate: () => ({
|
|
887
|
+
test: false
|
|
888
|
+
}),
|
|
889
|
+
hasRemainingDataFromFirstOfTwoUpdate: data => (data.tags?.length || 0) > 0 && data.tags?.[0] === 'a',
|
|
890
|
+
hasDataFromUpdate: data => data.test === false,
|
|
891
|
+
loadDocumentForTransaction: (transaction, ref) => f.instance.firestoreCollection.documentAccessorForTransaction(transaction).loadDocument(ref),
|
|
892
|
+
loadDocumentForWriteBatch: (writeBatch, ref) => f.instance.firestoreCollection.documentAccessorForWriteBatch(writeBatch).loadDocument(ref)
|
|
893
|
+
}));
|
|
894
|
+
describe('increment()', () => {
|
|
895
|
+
it(`should increase the item's value`, async () => {
|
|
896
|
+
let data = await itemDocument.snapshotData();
|
|
897
|
+
expect(data?.number).toBe(undefined);
|
|
898
|
+
const update = {
|
|
899
|
+
number: 3
|
|
900
|
+
};
|
|
901
|
+
await itemDocument.increment(update);
|
|
902
|
+
data = await itemDocument.snapshotData();
|
|
903
|
+
expect(data?.number).toBe(update.number);
|
|
904
|
+
// increment again
|
|
905
|
+
await itemDocument.increment(update);
|
|
906
|
+
data = await itemDocument.snapshotData();
|
|
907
|
+
expect(data?.number).toBe(update.number * 2);
|
|
908
|
+
});
|
|
909
|
+
it(`should decrease the item's value`, async () => {
|
|
910
|
+
let data = await itemDocument.snapshotData();
|
|
911
|
+
expect(data?.number).toBe(undefined);
|
|
912
|
+
const update = {
|
|
913
|
+
number: -3
|
|
914
|
+
};
|
|
915
|
+
await itemDocument.increment(update);
|
|
916
|
+
data = await itemDocument.snapshotData();
|
|
917
|
+
expect(data?.number).toBe(update.number);
|
|
918
|
+
// increment again
|
|
919
|
+
await itemDocument.increment(update);
|
|
920
|
+
data = await itemDocument.snapshotData();
|
|
921
|
+
expect(data?.number).toBe(update.number * 2);
|
|
922
|
+
});
|
|
923
|
+
it(`should increase and decrease the item's value`, async () => {
|
|
924
|
+
let data = await itemDocument.snapshotData();
|
|
925
|
+
expect(data?.number).toBe(undefined);
|
|
926
|
+
const update = {
|
|
927
|
+
number: 3
|
|
928
|
+
};
|
|
929
|
+
await itemDocument.increment(update);
|
|
930
|
+
const update2 = {
|
|
931
|
+
number: -6
|
|
932
|
+
};
|
|
933
|
+
await itemDocument.increment(update2);
|
|
934
|
+
data = await itemDocument.snapshotData();
|
|
935
|
+
expect(data?.number).toBe(update.number + update2.number);
|
|
936
|
+
});
|
|
937
|
+
describe('in transaction', () => {
|
|
938
|
+
it(`should increase the item's value`, async () => {
|
|
939
|
+
const update = {
|
|
940
|
+
number: 3
|
|
941
|
+
};
|
|
942
|
+
await f.parent.firestoreContext.runTransaction(async transaction => {
|
|
943
|
+
const itemDocumentInTransaction = await f.instance.firestoreCollection.documentAccessorForTransaction(transaction).loadDocumentForId(itemDocument.id);
|
|
944
|
+
const data = await itemDocumentInTransaction.snapshotData();
|
|
945
|
+
expect(data?.number).toBe(undefined);
|
|
946
|
+
await itemDocumentInTransaction.increment(update);
|
|
947
|
+
});
|
|
948
|
+
const result = await itemDocument.snapshotData();
|
|
949
|
+
expect(result?.number).toBe(update.number);
|
|
950
|
+
});
|
|
951
|
+
});
|
|
952
|
+
describe('in write batch', () => {
|
|
953
|
+
it(`should increase the item's value`, async () => {
|
|
954
|
+
const update = {
|
|
955
|
+
number: 3
|
|
956
|
+
};
|
|
957
|
+
const writeBatch = f.parent.firestoreContext.batch();
|
|
958
|
+
const itemDocumentForWriteBatch = await f.instance.firestoreCollection.documentAccessorForWriteBatch(writeBatch).loadDocumentForId(itemDocument.id);
|
|
959
|
+
await itemDocumentForWriteBatch.increment(update);
|
|
960
|
+
await writeBatch.commit();
|
|
961
|
+
const result = await itemDocument.snapshotData();
|
|
962
|
+
expect(result?.number).toBe(update.number);
|
|
963
|
+
});
|
|
964
|
+
});
|
|
965
|
+
});
|
|
966
|
+
describe('arrayUpdate()', () => {
|
|
967
|
+
describe('union', () => {
|
|
968
|
+
it('should add to the array', async () => {
|
|
969
|
+
await itemDocument.accessor.update({
|
|
970
|
+
tags: ['a']
|
|
971
|
+
});
|
|
972
|
+
await itemDocument.arrayUpdate({
|
|
973
|
+
union: {
|
|
974
|
+
tags: ['b', 'c']
|
|
975
|
+
}
|
|
976
|
+
});
|
|
977
|
+
const result = await itemDocument.snapshotData();
|
|
978
|
+
expect(result?.tags).toEqual(['a', 'b', 'c']);
|
|
979
|
+
});
|
|
980
|
+
});
|
|
981
|
+
describe('remove', () => {
|
|
982
|
+
it('should remove from the array', async () => {
|
|
983
|
+
await itemDocument.accessor.update({
|
|
984
|
+
tags: ['a', 'b', 'c']
|
|
985
|
+
});
|
|
986
|
+
await itemDocument.arrayUpdate({
|
|
987
|
+
remove: {
|
|
988
|
+
tags: ['a', 'b']
|
|
989
|
+
}
|
|
990
|
+
});
|
|
991
|
+
const result = await itemDocument.snapshotData();
|
|
992
|
+
expect(result?.tags).toEqual(['c']);
|
|
993
|
+
});
|
|
994
|
+
});
|
|
995
|
+
});
|
|
996
|
+
});
|
|
997
|
+
describe('Subcollections', () => {
|
|
998
|
+
describe('singleItemFirestoreCollection (MockItemUser)', () => {
|
|
999
|
+
let testUserId;
|
|
1000
|
+
let mockItemUserFirestoreCollection;
|
|
1001
|
+
let itemUserDataDocument;
|
|
1002
|
+
beforeEach(() => {
|
|
1003
|
+
testUserId = 'userid' + Math.ceil(Math.random() * 100000);
|
|
1004
|
+
mockItemUserFirestoreCollection = f.instance.collections.mockItemUserCollectionFactory(itemDocument);
|
|
1005
|
+
itemUserDataDocument = mockItemUserFirestoreCollection.documentAccessor().loadDocumentForId(testUserId);
|
|
1006
|
+
itemUserDataDocument.accessor;
|
|
1007
|
+
});
|
|
1008
|
+
describe('create()', () => {
|
|
1009
|
+
describe('mockItemUserAccessorFactory usage', () => {
|
|
1010
|
+
it('should copy the documents identifier to the uid field on create.', async () => {
|
|
1011
|
+
await itemUserDataDocument.accessor.create({
|
|
1012
|
+
uid: '',
|
|
1013
|
+
// the mockItemUserAccessorFactory silently enforces the uid to be the same as the document.
|
|
1014
|
+
name: 'hello'
|
|
1015
|
+
});
|
|
1016
|
+
const snapshot = await itemUserDataDocument.accessor.get();
|
|
1017
|
+
expect(snapshot.data()?.uid).toBe(testUserId);
|
|
1018
|
+
});
|
|
1019
|
+
});
|
|
1020
|
+
});
|
|
1021
|
+
describe('set()', () => {
|
|
1022
|
+
describe('mockItemUserAccessorFactory usage', () => {
|
|
1023
|
+
it('should copy the documents identifier to the uid field on set.', async () => {
|
|
1024
|
+
await itemUserDataDocument.accessor.set({
|
|
1025
|
+
uid: '',
|
|
1026
|
+
// the mockItemUserAccessorFactory silently enforces the uid to be the same as the document.
|
|
1027
|
+
name: 'hello'
|
|
1028
|
+
});
|
|
1029
|
+
const snapshot = await itemUserDataDocument.accessor.get();
|
|
1030
|
+
expect(snapshot.data()?.uid).toBe(testUserId);
|
|
1031
|
+
});
|
|
1032
|
+
});
|
|
1033
|
+
});
|
|
1034
|
+
});
|
|
1035
|
+
describe('singleItemFirestoreCollection (MockItemPrivate)', () => {
|
|
1036
|
+
let mockItemPrivateFirestoreCollection;
|
|
1037
|
+
let itemPrivateDataDocument;
|
|
1038
|
+
let privateDataAccessor;
|
|
1039
|
+
let privateSub;
|
|
1040
|
+
beforeEach(() => {
|
|
1041
|
+
mockItemPrivateFirestoreCollection = f.instance.collections.mockItemPrivateCollectionFactory(itemDocument);
|
|
1042
|
+
itemPrivateDataDocument = mockItemPrivateFirestoreCollection.loadDocument();
|
|
1043
|
+
privateDataAccessor = itemPrivateDataDocument.accessor;
|
|
1044
|
+
privateSub = new rxjs.SubscriptionObject();
|
|
1045
|
+
});
|
|
1046
|
+
afterEach(() => {
|
|
1047
|
+
privateSub.destroy();
|
|
1048
|
+
});
|
|
1049
|
+
describe('singleItemFirestoreCollection accessor', () => {
|
|
1050
|
+
it('should implement FirestoreSingleDocumentAccessor', () => {
|
|
1051
|
+
expect(mockItemPrivateFirestoreCollection.singleItemIdentifier).toBeDefined();
|
|
1052
|
+
expect(mockItemPrivateFirestoreCollection.documentRef).toBeDefined();
|
|
1053
|
+
expect(mockItemPrivateFirestoreCollection.loadDocument).toBeDefined();
|
|
1054
|
+
expect(mockItemPrivateFirestoreCollection.loadDocumentForTransaction).toBeDefined();
|
|
1055
|
+
expect(mockItemPrivateFirestoreCollection.loadDocumentForWriteBatch).toBeDefined();
|
|
1056
|
+
});
|
|
1057
|
+
});
|
|
1058
|
+
describe('get()', () => {
|
|
1059
|
+
it('should read that data using the configured converter', async () => {
|
|
1060
|
+
await itemPrivateDataDocument.accessor.set({
|
|
1061
|
+
values: null
|
|
1062
|
+
});
|
|
1063
|
+
const dataWithoutConverter = (await itemPrivateDataDocument.accessor.getWithConverter(null)).data();
|
|
1064
|
+
expect(dataWithoutConverter).toBeDefined();
|
|
1065
|
+
expect(dataWithoutConverter.values).toBeNull();
|
|
1066
|
+
// converter on client, _converter on server
|
|
1067
|
+
expect(itemPrivateDataDocument.documentRef.converter ?? itemPrivateDataDocument.documentRef._converter).toBeDefined();
|
|
1068
|
+
const data = await itemPrivateDataDocument.snapshotData();
|
|
1069
|
+
expect(data?.values).toBeDefined();
|
|
1070
|
+
expect(data?.values).not.toBeNull(); // should not be null due to the snapshot converter config
|
|
1071
|
+
});
|
|
1072
|
+
});
|
|
1073
|
+
describe('getWithConverter()', () => {
|
|
1074
|
+
it('should get the results with the input converter', async () => {
|
|
1075
|
+
await itemPrivateDataDocument.accessor.set({
|
|
1076
|
+
values: null
|
|
1077
|
+
});
|
|
1078
|
+
const data = await itemPrivateDataDocument.snapshotData();
|
|
1079
|
+
expect(data?.values).toBeDefined();
|
|
1080
|
+
const dataWithoutConverter = (await itemPrivateDataDocument.accessor.getWithConverter(null)).data();
|
|
1081
|
+
expect(dataWithoutConverter).toBeDefined();
|
|
1082
|
+
expect(dataWithoutConverter.values).toBeNull();
|
|
1083
|
+
});
|
|
1084
|
+
it('should get the results with the input converter with a type', async () => {
|
|
1085
|
+
await itemPrivateDataDocument.accessor.set({
|
|
1086
|
+
values: null
|
|
1087
|
+
});
|
|
1088
|
+
const data = await itemPrivateDataDocument.snapshotData();
|
|
1089
|
+
expect(data?.values).toBeDefined();
|
|
1090
|
+
const converter = mockItemConverter;
|
|
1091
|
+
const dataWithoutConverter = await itemPrivateDataDocument.accessor.getWithConverter(converter);
|
|
1092
|
+
expect(dataWithoutConverter).toBeDefined();
|
|
1093
|
+
});
|
|
1094
|
+
});
|
|
1095
|
+
describe('update()', () => {
|
|
1096
|
+
test.itShouldFail('if the item does not exist', async () => {
|
|
1097
|
+
const exists = await itemPrivateDataDocument.accessor.exists();
|
|
1098
|
+
expect(exists).toBe(false);
|
|
1099
|
+
await test.expectFail(() => itemPrivateDataDocument.update({
|
|
1100
|
+
createdAt: new Date()
|
|
1101
|
+
}));
|
|
1102
|
+
});
|
|
1103
|
+
it('should update the item if it exist', async () => {
|
|
1104
|
+
await itemPrivateDataDocument.create({
|
|
1105
|
+
createdAt: new Date(),
|
|
1106
|
+
num: 0,
|
|
1107
|
+
values: [],
|
|
1108
|
+
settings: {
|
|
1109
|
+
test: {
|
|
1110
|
+
north: true,
|
|
1111
|
+
south: true
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
});
|
|
1115
|
+
const newDate = new Date(0);
|
|
1116
|
+
const exists = await itemPrivateDataDocument.accessor.exists();
|
|
1117
|
+
expect(exists).toBe(true);
|
|
1118
|
+
await itemPrivateDataDocument.update({
|
|
1119
|
+
createdAt: newDate
|
|
1120
|
+
});
|
|
1121
|
+
const data = await itemPrivateDataDocument.snapshotData();
|
|
1122
|
+
expect(data?.createdAt.getTime()).toBe(newDate.getTime());
|
|
1123
|
+
// check was not modified
|
|
1124
|
+
expect(data?.settings['test'].north).toBe(true);
|
|
1125
|
+
expect(data?.settings['test'].south).toBe(true);
|
|
1126
|
+
expect(data?.settings['test'].east).toBeUndefined();
|
|
1127
|
+
expect(data?.settings['test'].west).toBeUndefined();
|
|
1128
|
+
});
|
|
1129
|
+
});
|
|
1130
|
+
describe('set()', () => {
|
|
1131
|
+
it('should create the item', async () => {
|
|
1132
|
+
let exists = await privateDataAccessor.exists();
|
|
1133
|
+
expect(exists).toBe(false);
|
|
1134
|
+
const createdAt = new Date();
|
|
1135
|
+
const settings = {
|
|
1136
|
+
test: {
|
|
1137
|
+
north: true
|
|
1138
|
+
}
|
|
1139
|
+
};
|
|
1140
|
+
await privateDataAccessor.set({
|
|
1141
|
+
values: [],
|
|
1142
|
+
num: 0,
|
|
1143
|
+
createdAt,
|
|
1144
|
+
settings
|
|
1145
|
+
});
|
|
1146
|
+
exists = await privateDataAccessor.exists();
|
|
1147
|
+
expect(exists).toBe(true);
|
|
1148
|
+
const getResult = await privateDataAccessor.get();
|
|
1149
|
+
const data = getResult.data();
|
|
1150
|
+
expect(data).toBeDefined();
|
|
1151
|
+
expect(data?.num).toBe(0);
|
|
1152
|
+
expect(data?.values).toEqual([]);
|
|
1153
|
+
expect(data?.createdAt).toBeInstanceOf(Date);
|
|
1154
|
+
expect(data?.createdAt.toISOString()).toBe(createdAt.toISOString());
|
|
1155
|
+
expect(data?.settings).toEqual(settings);
|
|
1156
|
+
});
|
|
1157
|
+
});
|
|
1158
|
+
describe('with item', () => {
|
|
1159
|
+
beforeEach(async () => {
|
|
1160
|
+
await privateDataAccessor.set({
|
|
1161
|
+
num: 0,
|
|
1162
|
+
values: [],
|
|
1163
|
+
createdAt: new Date(),
|
|
1164
|
+
settings: {}
|
|
1165
|
+
});
|
|
1166
|
+
});
|
|
1167
|
+
describe('increment()', () => {
|
|
1168
|
+
it(`should increase the item's value`, async () => {
|
|
1169
|
+
let data = await itemPrivateDataDocument.snapshotData();
|
|
1170
|
+
expect(data?.num).toBe(0);
|
|
1171
|
+
const update = {
|
|
1172
|
+
num: 3
|
|
1173
|
+
};
|
|
1174
|
+
await itemPrivateDataDocument.increment(update);
|
|
1175
|
+
data = await itemPrivateDataDocument.snapshotData();
|
|
1176
|
+
expect(data?.num).toBe(update.num);
|
|
1177
|
+
});
|
|
1178
|
+
});
|
|
1179
|
+
describe('accessors', () => {
|
|
1180
|
+
const TEST_COMMENTS = 'test';
|
|
1181
|
+
describeFirestoreDocumentAccessorTests(() => ({
|
|
1182
|
+
context: f.parent.firestoreContext,
|
|
1183
|
+
firestoreDocument: () => itemPrivateDataDocument,
|
|
1184
|
+
dataForFirstOfTwoUpdates: () => ({
|
|
1185
|
+
comments: 'not_test_comments',
|
|
1186
|
+
values: ['a']
|
|
1187
|
+
}),
|
|
1188
|
+
hasRemainingDataFromFirstOfTwoUpdate: data => data.values.length > 0 && data.values[0] === 'a',
|
|
1189
|
+
dataForUpdate: () => ({
|
|
1190
|
+
comments: TEST_COMMENTS
|
|
1191
|
+
}),
|
|
1192
|
+
hasDataFromUpdate: data => data.comments === TEST_COMMENTS,
|
|
1193
|
+
loadDocumentForTransaction: (transaction, ref) => mockItemPrivateFirestoreCollection.loadDocumentForTransaction(transaction),
|
|
1194
|
+
loadDocumentForWriteBatch: (writeBatch, ref) => mockItemPrivateFirestoreCollection.loadDocumentForWriteBatch(writeBatch)
|
|
1195
|
+
}));
|
|
1196
|
+
});
|
|
1197
|
+
});
|
|
1198
|
+
});
|
|
1199
|
+
describe('MockItemSubItem', () => {
|
|
1200
|
+
let subItemDocument;
|
|
1201
|
+
beforeEach(async () => {
|
|
1202
|
+
subItemDocument = f.instance.collections.mockItemSubItemCollectionFactory(itemDocument).documentAccessor().newDocument();
|
|
1203
|
+
await subItemDocument.accessor.set({
|
|
1204
|
+
value: 0
|
|
1205
|
+
});
|
|
1206
|
+
});
|
|
1207
|
+
describe('firestoreCollectionWithParent (MockItemSubItem)', () => {
|
|
1208
|
+
let mockItemSubItemFirestoreCollection;
|
|
1209
|
+
beforeEach(() => {
|
|
1210
|
+
mockItemSubItemFirestoreCollection = f.instance.collections.mockItemSubItemCollectionFactory(itemDocument);
|
|
1211
|
+
});
|
|
1212
|
+
describe('with item', () => {
|
|
1213
|
+
describe('accessors', () => {
|
|
1214
|
+
const TEST_VALUE = 1234;
|
|
1215
|
+
describeFirestoreDocumentAccessorTests(() => ({
|
|
1216
|
+
context: f.parent.firestoreContext,
|
|
1217
|
+
firestoreDocument: () => subItemDocument,
|
|
1218
|
+
dataForFirstOfTwoUpdates: () => ({
|
|
1219
|
+
value: TEST_VALUE - 10
|
|
1220
|
+
}),
|
|
1221
|
+
dataForUpdate: () => ({
|
|
1222
|
+
value: TEST_VALUE
|
|
1223
|
+
}),
|
|
1224
|
+
hasDataFromUpdate: data => data.value === TEST_VALUE,
|
|
1225
|
+
loadDocumentForTransaction: (transaction, ref) => mockItemSubItemFirestoreCollection.documentAccessorForTransaction(transaction).loadDocument(ref),
|
|
1226
|
+
loadDocumentForWriteBatch: (writeBatch, ref) => mockItemSubItemFirestoreCollection.documentAccessorForWriteBatch(writeBatch).loadDocument(ref)
|
|
1227
|
+
}));
|
|
1228
|
+
});
|
|
1229
|
+
});
|
|
1230
|
+
});
|
|
1231
|
+
describe('firestoreCollectionGroup (MockItemSubItem)', () => {
|
|
1232
|
+
let mockItemSubItemFirestoreCollectionGroup;
|
|
1233
|
+
beforeEach(() => {
|
|
1234
|
+
mockItemSubItemFirestoreCollectionGroup = f.instance.collections.mockItemSubItemCollectionGroup;
|
|
1235
|
+
});
|
|
1236
|
+
describe('with item', () => {
|
|
1237
|
+
describe('accessors', () => {
|
|
1238
|
+
const TEST_VALUE = 1234;
|
|
1239
|
+
describeFirestoreDocumentAccessorTests(() => ({
|
|
1240
|
+
context: f.parent.firestoreContext,
|
|
1241
|
+
firestoreDocument: () => subItemDocument,
|
|
1242
|
+
dataForFirstOfTwoUpdates: () => ({
|
|
1243
|
+
value: TEST_VALUE - 10
|
|
1244
|
+
}),
|
|
1245
|
+
dataForUpdate: () => ({
|
|
1246
|
+
value: TEST_VALUE
|
|
1247
|
+
}),
|
|
1248
|
+
hasDataFromUpdate: data => data.value === TEST_VALUE,
|
|
1249
|
+
loadDocumentForTransaction: (transaction, ref) => mockItemSubItemFirestoreCollectionGroup.documentAccessorForTransaction(transaction).loadDocument(ref),
|
|
1250
|
+
loadDocumentForWriteBatch: (writeBatch, ref) => mockItemSubItemFirestoreCollectionGroup.documentAccessorForWriteBatch(writeBatch).loadDocument(ref)
|
|
1251
|
+
}));
|
|
1252
|
+
});
|
|
1253
|
+
});
|
|
1254
|
+
});
|
|
1255
|
+
});
|
|
1256
|
+
});
|
|
1257
|
+
});
|
|
1258
|
+
describe('documentAccessor()', () => {
|
|
1259
|
+
describe('loadDocumentForKey()', () => {
|
|
1260
|
+
it('should load an existing document from the path.', async () => {
|
|
1261
|
+
const document = mockItemFirestoreDocumentAccessor.loadDocumentForKey(items[0].key);
|
|
1262
|
+
const exists = await document.accessor.exists();
|
|
1263
|
+
expect(exists).toBe(true);
|
|
1264
|
+
});
|
|
1265
|
+
test.itShouldFail('if the path is invalid (points to collection)', () => {
|
|
1266
|
+
test.expectFail(() => {
|
|
1267
|
+
mockItemFirestoreDocumentAccessor.loadDocumentForKey('path');
|
|
1268
|
+
});
|
|
1269
|
+
});
|
|
1270
|
+
test.itShouldFail('if the path points to a different type/collection', () => {
|
|
1271
|
+
test.expectFail(() => {
|
|
1272
|
+
mockItemFirestoreDocumentAccessor.loadDocumentForKey('path/id');
|
|
1273
|
+
});
|
|
1274
|
+
});
|
|
1275
|
+
test.itShouldFail('if the path is empty.', () => {
|
|
1276
|
+
test.expectFail(() => {
|
|
1277
|
+
mockItemFirestoreDocumentAccessor.loadDocumentForKey('');
|
|
1278
|
+
});
|
|
1279
|
+
});
|
|
1280
|
+
test.itShouldFail('if the path is undefined.', () => {
|
|
1281
|
+
test.expectFail(() => {
|
|
1282
|
+
mockItemFirestoreDocumentAccessor.loadDocumentForKey(undefined);
|
|
1283
|
+
});
|
|
1284
|
+
});
|
|
1285
|
+
test.itShouldFail('if the path is null.', () => {
|
|
1286
|
+
test.expectFail(() => {
|
|
1287
|
+
mockItemFirestoreDocumentAccessor.loadDocumentForKey(null);
|
|
1288
|
+
});
|
|
1289
|
+
});
|
|
1290
|
+
});
|
|
1291
|
+
describe('loadDocumentForId()', () => {
|
|
1292
|
+
it('should return a document with the given id.', () => {
|
|
1293
|
+
const document = mockItemFirestoreDocumentAccessor.loadDocumentForId('id');
|
|
1294
|
+
expect(document).toBeDefined();
|
|
1295
|
+
});
|
|
1296
|
+
test.itShouldFail('if the id is empty.', () => {
|
|
1297
|
+
test.expectFail(() => {
|
|
1298
|
+
mockItemFirestoreDocumentAccessor.loadDocumentForId('');
|
|
1299
|
+
});
|
|
1300
|
+
});
|
|
1301
|
+
test.itShouldFail('if the id is undefined.', () => {
|
|
1302
|
+
test.expectFail(() => {
|
|
1303
|
+
mockItemFirestoreDocumentAccessor.loadDocumentForId(undefined);
|
|
1304
|
+
});
|
|
1305
|
+
});
|
|
1306
|
+
});
|
|
1307
|
+
});
|
|
1308
|
+
});
|
|
1309
|
+
}
|
|
1310
|
+
function describeFirestoreDocumentAccessorTests(init) {
|
|
1311
|
+
let c;
|
|
1312
|
+
let sub;
|
|
1313
|
+
let firestoreDocument;
|
|
1314
|
+
let accessor;
|
|
1315
|
+
beforeEach(() => {
|
|
1316
|
+
sub = new rxjs.SubscriptionObject();
|
|
1317
|
+
c = init();
|
|
1318
|
+
firestoreDocument = c.firestoreDocument();
|
|
1319
|
+
accessor = firestoreDocument.accessor;
|
|
1320
|
+
});
|
|
1321
|
+
afterEach(() => {
|
|
1322
|
+
sub.destroy();
|
|
1323
|
+
});
|
|
1324
|
+
describe('utilities', () => {
|
|
1325
|
+
describe('getDocumentSnapshotPairs()', () => {
|
|
1326
|
+
it('should return the document and snapshot pairs for the input.', async () => {
|
|
1327
|
+
const pairs = await firebase.getDocumentSnapshotPairs([firestoreDocument]);
|
|
1328
|
+
expect(pairs.length).toBe(1);
|
|
1329
|
+
expect(pairs[0]).toBeDefined();
|
|
1330
|
+
expect(pairs[0].document).toBe(firestoreDocument);
|
|
1331
|
+
expect(pairs[0].snapshot).toBeDefined();
|
|
1332
|
+
expect(pairs[0].snapshot.data()).toBeDefined();
|
|
1333
|
+
});
|
|
1334
|
+
});
|
|
1335
|
+
describe('useDocumentSnapshot()', () => {
|
|
1336
|
+
it(`should use the input document value if it exists`, async () => {
|
|
1337
|
+
const exists = await firestoreDocument.exists();
|
|
1338
|
+
expect(exists).toBe(true);
|
|
1339
|
+
let snapshotUsed = false;
|
|
1340
|
+
await firebase.useDocumentSnapshot(firestoreDocument, snapshot => {
|
|
1341
|
+
expect(snapshot).toBeDefined();
|
|
1342
|
+
snapshotUsed = true;
|
|
1343
|
+
});
|
|
1344
|
+
expect(snapshotUsed).toBe(true);
|
|
1345
|
+
});
|
|
1346
|
+
it(`should not use the input undefined value`, async () => {
|
|
1347
|
+
let snapshotUsed = false;
|
|
1348
|
+
await firebase.useDocumentSnapshot(undefined, snapshot => {
|
|
1349
|
+
expect(snapshot).toBeDefined();
|
|
1350
|
+
snapshotUsed = true;
|
|
1351
|
+
});
|
|
1352
|
+
expect(snapshotUsed).toBe(false);
|
|
1353
|
+
});
|
|
1354
|
+
});
|
|
1355
|
+
describe('useDocumentSnapshotData()', () => {
|
|
1356
|
+
it(`should use the input document's snapshot data if it exists`, async () => {
|
|
1357
|
+
const exists = await firestoreDocument.exists();
|
|
1358
|
+
expect(exists).toBe(true);
|
|
1359
|
+
let snapshotUsed = false;
|
|
1360
|
+
await firebase.useDocumentSnapshotData(firestoreDocument, data => {
|
|
1361
|
+
expect(data).toBeDefined();
|
|
1362
|
+
snapshotUsed = true;
|
|
1363
|
+
});
|
|
1364
|
+
expect(snapshotUsed).toBe(true);
|
|
1365
|
+
});
|
|
1366
|
+
});
|
|
1367
|
+
});
|
|
1368
|
+
describe('AbstractFirestoreDocument', () => {
|
|
1369
|
+
describe('snapshot()', () => {
|
|
1370
|
+
it('should return the snapshot.', async () => {
|
|
1371
|
+
const snapshot = await firestoreDocument.snapshot();
|
|
1372
|
+
expect(snapshot).toBeDefined();
|
|
1373
|
+
});
|
|
1374
|
+
});
|
|
1375
|
+
describe('snapshotData()', () => {
|
|
1376
|
+
it('should return the snapshot data if the model exists.', async () => {
|
|
1377
|
+
const exists = await firestoreDocument.exists();
|
|
1378
|
+
expect(exists).toBe(true);
|
|
1379
|
+
const data = await firestoreDocument.snapshotData();
|
|
1380
|
+
expect(data).toBeDefined();
|
|
1381
|
+
});
|
|
1382
|
+
it('should return the undefined if the model does not exist.', async () => {
|
|
1383
|
+
await accessor.delete();
|
|
1384
|
+
const exists = await firestoreDocument.exists();
|
|
1385
|
+
expect(exists).toBe(false);
|
|
1386
|
+
const data = await firestoreDocument.snapshotData();
|
|
1387
|
+
expect(data).toBeUndefined();
|
|
1388
|
+
});
|
|
1389
|
+
});
|
|
1390
|
+
describe('create()', () => {
|
|
1391
|
+
it('should create the document if it does not exist.', async () => {
|
|
1392
|
+
const snapshot = await firestoreDocument.snapshot();
|
|
1393
|
+
await accessor.delete();
|
|
1394
|
+
let exists = await firestoreDocument.exists();
|
|
1395
|
+
expect(exists).toBe(false);
|
|
1396
|
+
await firestoreDocument.create(snapshot.data());
|
|
1397
|
+
exists = await firestoreDocument.exists();
|
|
1398
|
+
expect(exists).toBe(true);
|
|
1399
|
+
});
|
|
1400
|
+
test.itShouldFail('if the document exists.', async () => {
|
|
1401
|
+
const snapshot = await firestoreDocument.snapshot();
|
|
1402
|
+
const exists = await firestoreDocument.exists();
|
|
1403
|
+
expect(exists).toBe(true);
|
|
1404
|
+
await test.expectFail(() => firestoreDocument.create(snapshot.data()));
|
|
1405
|
+
});
|
|
1406
|
+
});
|
|
1407
|
+
describe('update()', () => {
|
|
1408
|
+
it('should update the data if the document exists.', async () => {
|
|
1409
|
+
const data = c.dataForUpdate();
|
|
1410
|
+
await firestoreDocument.update(data);
|
|
1411
|
+
const snapshot = await firestoreDocument.snapshot();
|
|
1412
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1413
|
+
});
|
|
1414
|
+
test.itShouldFail('if the document does not exist.', async () => {
|
|
1415
|
+
await accessor.delete();
|
|
1416
|
+
const snapshot = await firestoreDocument.snapshot();
|
|
1417
|
+
expect(snapshot.data()).toBe(undefined);
|
|
1418
|
+
const exists = await firestoreDocument.exists();
|
|
1419
|
+
expect(exists).toBe(false);
|
|
1420
|
+
await test.expectFail(() => firestoreDocument.update(c.dataForUpdate()));
|
|
1421
|
+
});
|
|
1422
|
+
it('should not throw an error if the input update data is empty.', async () => {
|
|
1423
|
+
await firestoreDocument.update({});
|
|
1424
|
+
});
|
|
1425
|
+
});
|
|
1426
|
+
describe('transaction', () => {
|
|
1427
|
+
describe('stream$', () => {
|
|
1428
|
+
it('should not cause the transaction to fail if the document is loaded after changes have begun.', async () => {
|
|
1429
|
+
await c.context.runTransaction(async transaction => {
|
|
1430
|
+
const transactionDocument = await c.loadDocumentForTransaction(transaction, firestoreDocument.documentRef);
|
|
1431
|
+
const currentData = await transactionDocument.snapshotData();
|
|
1432
|
+
expect(currentData).toBeDefined();
|
|
1433
|
+
const data = c.dataForUpdate();
|
|
1434
|
+
await transactionDocument.update(data);
|
|
1435
|
+
// stream$ and data$ do not call stream() until called directly.
|
|
1436
|
+
const secondLoading = await c.loadDocumentForTransaction(transaction, firestoreDocument.documentRef);
|
|
1437
|
+
expect(secondLoading).toBeDefined();
|
|
1438
|
+
});
|
|
1439
|
+
});
|
|
1440
|
+
test.itShouldFail('if stream$ is called after an update has occured in the transaction', async () => {
|
|
1441
|
+
await test.expectFail(() => c.context.runTransaction(async transaction => {
|
|
1442
|
+
const transactionDocument = await c.loadDocumentForTransaction(transaction, firestoreDocument.documentRef);
|
|
1443
|
+
const currentData = await transactionDocument.snapshotData();
|
|
1444
|
+
expect(currentData).toBeDefined();
|
|
1445
|
+
const data = c.dataForUpdate();
|
|
1446
|
+
await transactionDocument.update(data);
|
|
1447
|
+
// read the stream using a promise so the error is captured
|
|
1448
|
+
await rxjs$1.firstValueFrom(c.loadDocumentForTransaction(transaction, firestoreDocument.documentRef).stream$);
|
|
1449
|
+
}));
|
|
1450
|
+
});
|
|
1451
|
+
});
|
|
1452
|
+
describe('update()', () => {
|
|
1453
|
+
it('should update the data if the document exists.', async () => {
|
|
1454
|
+
await c.context.runTransaction(async transaction => {
|
|
1455
|
+
const transactionDocument = await c.loadDocumentForTransaction(transaction, firestoreDocument.documentRef);
|
|
1456
|
+
const currentData = await transactionDocument.snapshotData();
|
|
1457
|
+
expect(currentData).toBeDefined();
|
|
1458
|
+
const data = c.dataForUpdate();
|
|
1459
|
+
await transactionDocument.update(data);
|
|
1460
|
+
});
|
|
1461
|
+
const snapshot = await firestoreDocument.snapshot();
|
|
1462
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1463
|
+
});
|
|
1464
|
+
describe('multiple updates', () => {
|
|
1465
|
+
it('should merge the updates together and override the values from the first update that are defined in the second update', async () => {
|
|
1466
|
+
await c.context.runTransaction(async transaction => {
|
|
1467
|
+
const transactionDocument = await c.loadDocumentForTransaction(transaction, firestoreDocument.documentRef);
|
|
1468
|
+
const currentData = await transactionDocument.snapshotData();
|
|
1469
|
+
expect(currentData).toBeDefined();
|
|
1470
|
+
const firstData = c.dataForFirstOfTwoUpdates();
|
|
1471
|
+
await transactionDocument.update(firstData);
|
|
1472
|
+
const data = c.dataForUpdate();
|
|
1473
|
+
await transactionDocument.update(data);
|
|
1474
|
+
});
|
|
1475
|
+
const snapshot = await firestoreDocument.snapshot();
|
|
1476
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1477
|
+
if (c.hasRemainingDataFromFirstOfTwoUpdate != null) {
|
|
1478
|
+
expect(c.hasRemainingDataFromFirstOfTwoUpdate(snapshot.data())).toBe(true);
|
|
1479
|
+
}
|
|
1480
|
+
});
|
|
1481
|
+
});
|
|
1482
|
+
});
|
|
1483
|
+
});
|
|
1484
|
+
describe('write batch', () => {
|
|
1485
|
+
describe('update()', () => {
|
|
1486
|
+
it('should update the data if the document exists.', async () => {
|
|
1487
|
+
const batch = c.context.batch();
|
|
1488
|
+
const batchDocument = await c.loadDocumentForWriteBatch(batch, firestoreDocument.documentRef);
|
|
1489
|
+
const data = c.dataForUpdate();
|
|
1490
|
+
await batchDocument.update(data);
|
|
1491
|
+
await batch.commit();
|
|
1492
|
+
const snapshot = await firestoreDocument.snapshot();
|
|
1493
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1494
|
+
});
|
|
1495
|
+
});
|
|
1496
|
+
});
|
|
1497
|
+
});
|
|
1498
|
+
describe('accessor', () => {
|
|
1499
|
+
describe('stream()', () => {
|
|
1500
|
+
it('should return a snapshot stream', async () => {
|
|
1501
|
+
const result = await accessor.stream();
|
|
1502
|
+
expect(result).toBeDefined();
|
|
1503
|
+
});
|
|
1504
|
+
it('should emit values on updates from the observable.', test.callbackTest(done => {
|
|
1505
|
+
let count = 0;
|
|
1506
|
+
sub.subscription = accessor.stream().subscribe(item => {
|
|
1507
|
+
count += 1;
|
|
1508
|
+
if (count === 1) {
|
|
1509
|
+
expect(c.hasDataFromUpdate(item.data())).toBe(false);
|
|
1510
|
+
} else if (count === 2) {
|
|
1511
|
+
expect(c.hasDataFromUpdate(item.data())).toBe(true);
|
|
1512
|
+
done();
|
|
1513
|
+
}
|
|
1514
|
+
});
|
|
1515
|
+
setTimeout(() => {
|
|
1516
|
+
accessor.update(c.dataForUpdate());
|
|
1517
|
+
}, 100);
|
|
1518
|
+
}));
|
|
1519
|
+
describe('in transition context', () => {
|
|
1520
|
+
let runTransaction;
|
|
1521
|
+
beforeEach(() => {
|
|
1522
|
+
runTransaction = c.context.runTransaction;
|
|
1523
|
+
});
|
|
1524
|
+
it('should return the first emitted value (observable completes immediately)', async () => {
|
|
1525
|
+
await runTransaction(async transaction => {
|
|
1526
|
+
const transactionItemDocument = c.loadDocumentForTransaction(transaction, accessor.documentRef);
|
|
1527
|
+
// load the value
|
|
1528
|
+
const value = await rxjs$1.firstValueFrom(transactionItemDocument.accessor.stream());
|
|
1529
|
+
expect(value).toBeDefined();
|
|
1530
|
+
// set to make the transaction valid
|
|
1531
|
+
await transactionItemDocument.accessor.set({
|
|
1532
|
+
value: 0
|
|
1533
|
+
}, {
|
|
1534
|
+
merge: true
|
|
1535
|
+
});
|
|
1536
|
+
return value;
|
|
1537
|
+
});
|
|
1538
|
+
});
|
|
1539
|
+
});
|
|
1540
|
+
describe('in batch context', () => {
|
|
1541
|
+
it('should return the first emitted value (observable completes immediately)', async () => {
|
|
1542
|
+
const writeBatch = c.context.batch();
|
|
1543
|
+
const batchItemDocument = c.loadDocumentForWriteBatch(writeBatch, accessor.documentRef);
|
|
1544
|
+
// load the value
|
|
1545
|
+
const value = await rxjs$1.firstValueFrom(batchItemDocument.accessor.stream());
|
|
1546
|
+
expect(value).toBeDefined();
|
|
1547
|
+
// set to make the batch changes valid
|
|
1548
|
+
await batchItemDocument.accessor.set({
|
|
1549
|
+
value: 0
|
|
1550
|
+
}, {
|
|
1551
|
+
merge: true
|
|
1552
|
+
});
|
|
1553
|
+
// commit the changes
|
|
1554
|
+
await writeBatch.commit();
|
|
1555
|
+
});
|
|
1556
|
+
});
|
|
1557
|
+
});
|
|
1558
|
+
describe('create()', () => {
|
|
1559
|
+
it('should create the document if it does not exist.', async () => {
|
|
1560
|
+
const snapshot = await accessor.get();
|
|
1561
|
+
await accessor.delete();
|
|
1562
|
+
let exists = await accessor.exists();
|
|
1563
|
+
expect(exists).toBe(false);
|
|
1564
|
+
await accessor.create(snapshot.data());
|
|
1565
|
+
exists = await accessor.exists();
|
|
1566
|
+
expect(exists).toBe(true);
|
|
1567
|
+
});
|
|
1568
|
+
test.itShouldFail('if the document exists.', async () => {
|
|
1569
|
+
const snapshot = await accessor.get();
|
|
1570
|
+
const exists = await accessor.exists();
|
|
1571
|
+
expect(exists).toBe(true);
|
|
1572
|
+
await test.expectFail(() => accessor.create(snapshot.data()));
|
|
1573
|
+
});
|
|
1574
|
+
});
|
|
1575
|
+
describe('get()', () => {
|
|
1576
|
+
it('should return a snapshot', async () => {
|
|
1577
|
+
const result = await accessor.get();
|
|
1578
|
+
expect(result).toBeDefined();
|
|
1579
|
+
expect(result.id).toBeDefined();
|
|
1580
|
+
});
|
|
1581
|
+
});
|
|
1582
|
+
describe('exists()', () => {
|
|
1583
|
+
it('should return true if the document exists', async () => {
|
|
1584
|
+
const exists = await accessor.exists();
|
|
1585
|
+
expect(exists).toBe(true);
|
|
1586
|
+
});
|
|
1587
|
+
it('should return false if the document does not exist', async () => {
|
|
1588
|
+
await accessor.delete();
|
|
1589
|
+
const exists = await accessor.exists();
|
|
1590
|
+
expect(exists).toBe(false);
|
|
1591
|
+
});
|
|
1592
|
+
});
|
|
1593
|
+
describe('update()', () => {
|
|
1594
|
+
it('should update the data if the document exists.', async () => {
|
|
1595
|
+
const data = c.dataForUpdate();
|
|
1596
|
+
await accessor.update(data);
|
|
1597
|
+
const snapshot = await accessor.get();
|
|
1598
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1599
|
+
});
|
|
1600
|
+
test.itShouldFail('if the document does not exist.', async () => {
|
|
1601
|
+
await accessor.delete();
|
|
1602
|
+
const snapshot = await accessor.get();
|
|
1603
|
+
expect(snapshot.data()).toBe(undefined);
|
|
1604
|
+
const exists = await accessor.exists();
|
|
1605
|
+
expect(exists).toBe(false);
|
|
1606
|
+
await test.expectFail(() => accessor.update(c.dataForUpdate()));
|
|
1607
|
+
});
|
|
1608
|
+
test.itShouldFail('if the input is an empty object.', async () => {
|
|
1609
|
+
await test.expectFail(() => accessor.update({}));
|
|
1610
|
+
});
|
|
1611
|
+
// TODO(TEST): test that update does not call the converter when setting values.
|
|
1612
|
+
});
|
|
1613
|
+
describe('set()', () => {
|
|
1614
|
+
it('should create the object if it does not exist.', async () => {
|
|
1615
|
+
await accessor.delete();
|
|
1616
|
+
let exists = await accessor.exists();
|
|
1617
|
+
expect(exists).toBe(false);
|
|
1618
|
+
const data = c.dataForUpdate();
|
|
1619
|
+
await accessor.set(data);
|
|
1620
|
+
exists = await accessor.exists();
|
|
1621
|
+
expect(exists).toBe(true);
|
|
1622
|
+
const snapshot = await accessor.get();
|
|
1623
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1624
|
+
});
|
|
1625
|
+
it('should update the data on the document for fields that are not undefined.', async () => {
|
|
1626
|
+
const data = c.dataForUpdate();
|
|
1627
|
+
await accessor.set(data);
|
|
1628
|
+
const snapshot = await accessor.get();
|
|
1629
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1630
|
+
});
|
|
1631
|
+
describe('merge=true', () => {
|
|
1632
|
+
it('should update the data if the document exists.', async () => {
|
|
1633
|
+
const data = c.dataForUpdate();
|
|
1634
|
+
await accessor.set(data, {
|
|
1635
|
+
merge: true
|
|
1636
|
+
});
|
|
1637
|
+
const snapshot = await accessor.get();
|
|
1638
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1639
|
+
});
|
|
1640
|
+
it('should succeed if the document does not exist.', async () => {
|
|
1641
|
+
await accessor.delete();
|
|
1642
|
+
let snapshot = await accessor.get();
|
|
1643
|
+
expect(snapshot.data()).toBe(undefined);
|
|
1644
|
+
const exists = await accessor.exists();
|
|
1645
|
+
expect(exists).toBe(false);
|
|
1646
|
+
await accessor.set(c.dataForUpdate(), {
|
|
1647
|
+
merge: true
|
|
1648
|
+
});
|
|
1649
|
+
snapshot = await accessor.get();
|
|
1650
|
+
expect(c.hasDataFromUpdate(snapshot.data())).toBe(true);
|
|
1651
|
+
});
|
|
1652
|
+
});
|
|
1653
|
+
// TODO(TEST): test that set calls the converter when setting values.
|
|
1654
|
+
});
|
|
1655
|
+
describe('delete()', () => {
|
|
1656
|
+
it('should delete the document.', async () => {
|
|
1657
|
+
await accessor.delete();
|
|
1658
|
+
const snapshot = await accessor.get();
|
|
1659
|
+
expect(snapshot.data()).toBe(undefined);
|
|
1660
|
+
const exists = await accessor.exists();
|
|
1661
|
+
expect(exists).toBe(false);
|
|
1662
|
+
});
|
|
1663
|
+
});
|
|
1664
|
+
});
|
|
1665
|
+
}
|
|
1666
|
+
|
|
1667
|
+
/**
|
|
1668
|
+
* Describes query driver tests, using a MockItemCollectionFixture.
|
|
1669
|
+
*
|
|
1670
|
+
* @param f
|
|
1671
|
+
*/
|
|
1672
|
+
function describeFirestoreQueryDriverTests(f) {
|
|
1673
|
+
describe('FirestoreQueryDriver', () => {
|
|
1674
|
+
const testDocumentCount = 5;
|
|
1675
|
+
let items;
|
|
1676
|
+
const startDate = dateFns.addDays(dateFns.startOfDay(new Date()), 1);
|
|
1677
|
+
const EVEN_TAG = 'even';
|
|
1678
|
+
const ODD_TAG = 'odd';
|
|
1679
|
+
beforeEach(async () => {
|
|
1680
|
+
items = await firebase.makeDocuments(f.instance.firestoreCollection.documentAccessor(), {
|
|
1681
|
+
count: testDocumentCount,
|
|
1682
|
+
init: i => {
|
|
1683
|
+
return {
|
|
1684
|
+
value: `${i}`,
|
|
1685
|
+
number: i,
|
|
1686
|
+
date: dateFns.addHours(startDate, i),
|
|
1687
|
+
tags: [`${i}`, `${util.isEvenNumber(i) ? EVEN_TAG : ODD_TAG}`],
|
|
1688
|
+
test: true
|
|
1689
|
+
};
|
|
1690
|
+
}
|
|
1691
|
+
});
|
|
1692
|
+
});
|
|
1693
|
+
describe('firestoreIdBatchVerifierFactory()', () => {
|
|
1694
|
+
const mockItemIdBatchVerifier = firebase.firestoreIdBatchVerifierFactory({
|
|
1695
|
+
readKeys: x => [x.id],
|
|
1696
|
+
fieldToQuery: '_id'
|
|
1697
|
+
});
|
|
1698
|
+
it('should query on the id field.', async () => {
|
|
1699
|
+
const takenIds = items.map(x => x.id);
|
|
1700
|
+
const result = await f.instance.mockItemCollection.queryDocument(firebase.whereDocumentId('in', takenIds)).getDocs();
|
|
1701
|
+
expect(result).toBeDefined();
|
|
1702
|
+
expect(result.length).toBe(takenIds.length);
|
|
1703
|
+
expect(result.map(x => x.id)).toContain(takenIds[0]);
|
|
1704
|
+
});
|
|
1705
|
+
it('should return ids that are not taken.', async () => {
|
|
1706
|
+
const takenIds = items.map(x => x.id);
|
|
1707
|
+
const idFactory = util.arrayFactory(util.mapGetter(util.randomNumberFactory(10000000), x => `test-id-${x}`));
|
|
1708
|
+
const random = util.randomFromArrayFactory(takenIds);
|
|
1709
|
+
const factory = util.idBatchFactory({
|
|
1710
|
+
verifier: mockItemIdBatchVerifier(f.instance.mockItemCollection),
|
|
1711
|
+
factory: count => {
|
|
1712
|
+
const ids = [random(), ...idFactory(count)];
|
|
1713
|
+
return ids;
|
|
1714
|
+
}
|
|
1715
|
+
});
|
|
1716
|
+
const idsToMake = 30;
|
|
1717
|
+
const result = await factory(idsToMake);
|
|
1718
|
+
expect(result).toBeDefined();
|
|
1719
|
+
expect(util.unique(result).length).toBe(idsToMake);
|
|
1720
|
+
expect(util.unique(result, takenIds).length).toBe(idsToMake);
|
|
1721
|
+
});
|
|
1722
|
+
});
|
|
1723
|
+
describe('mockItemUser', () => {
|
|
1724
|
+
let testUserId;
|
|
1725
|
+
let allMockUserItems;
|
|
1726
|
+
beforeEach(async () => {
|
|
1727
|
+
testUserId = 'userid' + Math.ceil(Math.random() * 100000);
|
|
1728
|
+
const results = await Promise.all(items.map(parent => firebase.makeDocuments(f.instance.mockItemUserCollection(parent).documentAccessor(), {
|
|
1729
|
+
count: 1,
|
|
1730
|
+
newDocument: x => x.loadDocumentForId(testUserId),
|
|
1731
|
+
init: i => {
|
|
1732
|
+
return {
|
|
1733
|
+
uid: '',
|
|
1734
|
+
name: `name ${i}`
|
|
1735
|
+
};
|
|
1736
|
+
}
|
|
1737
|
+
})));
|
|
1738
|
+
allMockUserItems = results.flat();
|
|
1739
|
+
});
|
|
1740
|
+
describe('utils', () => {
|
|
1741
|
+
describe('iterate load firestore utilities', () => {
|
|
1742
|
+
describe('loadAllFirestoreDocumentSnapshotPairs()', () => {
|
|
1743
|
+
it('should iterate batches of snapshot pairs.', async () => {
|
|
1744
|
+
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
1745
|
+
const mockUserItemsVisited = new Set();
|
|
1746
|
+
const result = await firebase.loadAllFirestoreDocumentSnapshotPairs({
|
|
1747
|
+
documentAccessor,
|
|
1748
|
+
iterateSnapshotPairsBatch: async x => {
|
|
1749
|
+
x.forEach(y => mockUserItemsVisited.add(y.document.key));
|
|
1750
|
+
const pair = x[0];
|
|
1751
|
+
expect(pair.data).toBeDefined();
|
|
1752
|
+
expect(pair.snapshot).toBeDefined();
|
|
1753
|
+
expect(pair.document).toBeDefined();
|
|
1754
|
+
},
|
|
1755
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1756
|
+
constraintsFactory: [] // no constraints
|
|
1757
|
+
});
|
|
1758
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
1759
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
1760
|
+
expect(result.snapshotPairs.length).toBe(allMockUserItems.length);
|
|
1761
|
+
expect(result.snapshotPairs[0].data).toBeDefined();
|
|
1762
|
+
expect(result.snapshotPairs[0].document).toBeDefined();
|
|
1763
|
+
expect(result.snapshotPairs[0].snapshot).toBeDefined();
|
|
1764
|
+
});
|
|
1765
|
+
});
|
|
1766
|
+
describe('loadAllFirestoreDocumentSnapshot()', () => {
|
|
1767
|
+
it('should iterate batches of snapshot pairs.', async () => {
|
|
1768
|
+
const mockUserItemsVisited = new Set();
|
|
1769
|
+
const result = await firebase.loadAllFirestoreDocumentSnapshot({
|
|
1770
|
+
iterateSnapshotsForCheckpoint: async x => {
|
|
1771
|
+
x.forEach(y => mockUserItemsVisited.add(y.ref.path));
|
|
1772
|
+
const snapshot = x[0];
|
|
1773
|
+
expect(snapshot.ref).toBeDefined();
|
|
1774
|
+
expect(snapshot.data()).toBeDefined();
|
|
1775
|
+
},
|
|
1776
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1777
|
+
constraintsFactory: [] // no constraints
|
|
1778
|
+
});
|
|
1779
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
1780
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
1781
|
+
expect(result.snapshots.length).toBe(allMockUserItems.length);
|
|
1782
|
+
expect(result.snapshots[0].ref).toBeDefined();
|
|
1783
|
+
expect(result.snapshots[0].data()).toBeDefined();
|
|
1784
|
+
});
|
|
1785
|
+
});
|
|
1786
|
+
});
|
|
1787
|
+
describe('iterate firestore utilities', () => {
|
|
1788
|
+
describe('iterateFirestoreDocumentSnapshotPairs()', () => {
|
|
1789
|
+
it('should iterate across all mock users by each snapshot pair.', async () => {
|
|
1790
|
+
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
1791
|
+
const mockUserItemsVisited = new Set();
|
|
1792
|
+
const batchSize = 2;
|
|
1793
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotPairs({
|
|
1794
|
+
batchSize,
|
|
1795
|
+
handleRepeatCursor: false,
|
|
1796
|
+
// exit immediately if the cursor is visited again
|
|
1797
|
+
filterCheckpointSnapshots: async x => {
|
|
1798
|
+
return x;
|
|
1799
|
+
},
|
|
1800
|
+
iterateSnapshotPair: async x => {
|
|
1801
|
+
expect(x.data).toBeDefined();
|
|
1802
|
+
expect(x.snapshot).toBeDefined();
|
|
1803
|
+
expect(x.document).toBeDefined();
|
|
1804
|
+
const key = x.document.key;
|
|
1805
|
+
if (mockUserItemsVisited.has(key)) {
|
|
1806
|
+
throw new Error('encountered repeat key');
|
|
1807
|
+
} else {
|
|
1808
|
+
mockUserItemsVisited.add(key);
|
|
1809
|
+
}
|
|
1810
|
+
},
|
|
1811
|
+
useCheckpointResult: async x => {
|
|
1812
|
+
if (x.docSnapshots.length > 0) {
|
|
1813
|
+
expect(x.results[0].snapshots.length).toBeLessThanOrEqual(batchSize);
|
|
1814
|
+
}
|
|
1815
|
+
},
|
|
1816
|
+
documentAccessor,
|
|
1817
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1818
|
+
constraintsFactory: [] // no constraints
|
|
1819
|
+
});
|
|
1820
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
1821
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
1822
|
+
});
|
|
1823
|
+
describe('1 item exists', () => {
|
|
1824
|
+
let onlyItem;
|
|
1825
|
+
beforeEach(async () => {
|
|
1826
|
+
onlyItem = allMockUserItems.pop();
|
|
1827
|
+
await Promise.all(allMockUserItems.map(x => x.accessor.delete()));
|
|
1828
|
+
allMockUserItems = [onlyItem];
|
|
1829
|
+
});
|
|
1830
|
+
it('should iterate the single item', async () => {
|
|
1831
|
+
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
1832
|
+
const mockUserItemsVisited = new Set();
|
|
1833
|
+
expect(allMockUserItems).toHaveLength(1);
|
|
1834
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotPairs({
|
|
1835
|
+
iterateSnapshotPair: async x => {
|
|
1836
|
+
expect(x.data).toBeDefined();
|
|
1837
|
+
expect(x.snapshot).toBeDefined();
|
|
1838
|
+
expect(x.document).toBeDefined();
|
|
1839
|
+
const key = x.document.key;
|
|
1840
|
+
if (mockUserItemsVisited.has(key)) {
|
|
1841
|
+
throw new Error('encountered repeat key');
|
|
1842
|
+
} else {
|
|
1843
|
+
mockUserItemsVisited.add(key);
|
|
1844
|
+
}
|
|
1845
|
+
},
|
|
1846
|
+
documentAccessor,
|
|
1847
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1848
|
+
batchSize: null,
|
|
1849
|
+
limitPerCheckpoint: 200,
|
|
1850
|
+
totalSnapshotsLimit: 100,
|
|
1851
|
+
performTasksConfig: {
|
|
1852
|
+
maxParallelTasks: 20
|
|
1853
|
+
},
|
|
1854
|
+
constraintsFactory: [] // no constraints
|
|
1855
|
+
});
|
|
1856
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
1857
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
1858
|
+
});
|
|
1859
|
+
// TODO(TEST): Case where a document was visited twice via iteration after it was updated. Assumed
|
|
1860
|
+
// to occur when the updated item matches an "or" case or other value when using "in". Cannot
|
|
1861
|
+
// reproduce at the moment.
|
|
1862
|
+
/*
|
|
1863
|
+
describe('scenario', () => {
|
|
1864
|
+
it('should visit the item twice if it is updated and matches a different filter', async () => {
|
|
1865
|
+
const onlyItemValue = await onlyItem.snapshotData() as MockItemUser;
|
|
1866
|
+
const nameToChangeTo = `${onlyItemValue.name}-changed`;
|
|
1867
|
+
const namesToFilter = [onlyItemValue.name, nameToChangeTo];
|
|
1868
|
+
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
1869
|
+
const mockUserItemsVisited = new Set<MockItemUserKey>();
|
|
1870
|
+
let updates = 0;
|
|
1871
|
+
expect(allMockUserItems).toHaveLength(1);
|
|
1872
|
+
const result = await iterateFirestoreDocumentSnapshotPairs({
|
|
1873
|
+
iterateSnapshotPair: async (x) => {
|
|
1874
|
+
expect(x.data).toBeDefined();
|
|
1875
|
+
expect(x.snapshot).toBeDefined();
|
|
1876
|
+
expect(x.document).toBeDefined();
|
|
1877
|
+
await x.document.update({ name: nameToChangeTo });
|
|
1878
|
+
updates += 1;
|
|
1879
|
+
const key = x.document.key;
|
|
1880
|
+
mockUserItemsVisited.add(key);
|
|
1881
|
+
},
|
|
1882
|
+
documentAccessor,
|
|
1883
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1884
|
+
batchSize: null,
|
|
1885
|
+
limitPerCheckpoint: 200,
|
|
1886
|
+
totalSnapshotsLimit: 100,
|
|
1887
|
+
performTasksConfig: {
|
|
1888
|
+
maxParallelTasks: 20
|
|
1889
|
+
},
|
|
1890
|
+
constraintsFactory: () => [where<MockItemUser>('name', 'in', namesToFilter)],
|
|
1891
|
+
});
|
|
1892
|
+
expect(updates).toBe(2);
|
|
1893
|
+
expect(result.totalSnapshotsVisited).toBe(2);
|
|
1894
|
+
expect(mockUserItemsVisited.size).toBe(1);
|
|
1895
|
+
});
|
|
1896
|
+
});
|
|
1897
|
+
*/
|
|
1898
|
+
});
|
|
1899
|
+
describe('0 items exists', () => {
|
|
1900
|
+
beforeEach(async () => {
|
|
1901
|
+
await Promise.all(allMockUserItems.map(x => x.accessor.delete()));
|
|
1902
|
+
});
|
|
1903
|
+
it('should iterate no items', async () => {
|
|
1904
|
+
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
1905
|
+
const mockUserItemsVisited = new Set();
|
|
1906
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotPairs({
|
|
1907
|
+
iterateSnapshotPair: async x => {
|
|
1908
|
+
expect(x.data).toBeDefined();
|
|
1909
|
+
expect(x.snapshot).toBeDefined();
|
|
1910
|
+
expect(x.document).toBeDefined();
|
|
1911
|
+
const key = x.document.key;
|
|
1912
|
+
if (mockUserItemsVisited.has(key)) {
|
|
1913
|
+
throw new Error('encountered repeat key');
|
|
1914
|
+
} else {
|
|
1915
|
+
mockUserItemsVisited.add(key);
|
|
1916
|
+
}
|
|
1917
|
+
},
|
|
1918
|
+
documentAccessor,
|
|
1919
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1920
|
+
constraintsFactory: [] // no constraints
|
|
1921
|
+
});
|
|
1922
|
+
expect(result.totalSnapshotsVisited).toBe(0);
|
|
1923
|
+
expect(mockUserItemsVisited.size).toBe(0);
|
|
1924
|
+
});
|
|
1925
|
+
});
|
|
1926
|
+
});
|
|
1927
|
+
describe('iterateFirestoreDocumentSnapshots()', () => {
|
|
1928
|
+
it('should iterate across all mock users by each snapshot.', async () => {
|
|
1929
|
+
f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
1930
|
+
const mockUserItemsVisited = new Set();
|
|
1931
|
+
const batchSize = 2;
|
|
1932
|
+
const result = await firebase.iterateFirestoreDocumentSnapshots({
|
|
1933
|
+
batchSize,
|
|
1934
|
+
iterateSnapshot: async x => {
|
|
1935
|
+
const key = x.ref.path;
|
|
1936
|
+
if (mockUserItemsVisited.has(key)) {
|
|
1937
|
+
throw new Error('encountered repeat key');
|
|
1938
|
+
} else {
|
|
1939
|
+
mockUserItemsVisited.add(key);
|
|
1940
|
+
}
|
|
1941
|
+
},
|
|
1942
|
+
useCheckpointResult: async x => {
|
|
1943
|
+
if (x.docSnapshots.length > 0) {
|
|
1944
|
+
expect(x.results[0].snapshots.length).toBeLessThanOrEqual(batchSize);
|
|
1945
|
+
}
|
|
1946
|
+
},
|
|
1947
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1948
|
+
constraintsFactory: [] // no constraints
|
|
1949
|
+
});
|
|
1950
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
1951
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
1952
|
+
});
|
|
1953
|
+
});
|
|
1954
|
+
describe('iterateFirestoreDocumentSnapshotPairBatches()', () => {
|
|
1955
|
+
it('should iterate batches of snapshot pairs.', async () => {
|
|
1956
|
+
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
1957
|
+
const mockUserItemsVisited = new Set();
|
|
1958
|
+
const batchSize = 2;
|
|
1959
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotPairBatches({
|
|
1960
|
+
documentAccessor,
|
|
1961
|
+
batchSize,
|
|
1962
|
+
// use specific batch size
|
|
1963
|
+
iterateSnapshotPairsBatch: async x => {
|
|
1964
|
+
expect(x.length).toBeLessThanOrEqual(batchSize);
|
|
1965
|
+
const pair = x[0];
|
|
1966
|
+
expect(pair.data).toBeDefined();
|
|
1967
|
+
expect(pair.snapshot).toBeDefined();
|
|
1968
|
+
expect(pair.document).toBeDefined();
|
|
1969
|
+
},
|
|
1970
|
+
useCheckpointResult: async x => {
|
|
1971
|
+
x.docSnapshots.forEach(y => mockUserItemsVisited.add(y.ref.path));
|
|
1972
|
+
},
|
|
1973
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1974
|
+
constraintsFactory: [] // no constraints
|
|
1975
|
+
});
|
|
1976
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
1977
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
1978
|
+
});
|
|
1979
|
+
});
|
|
1980
|
+
describe('iterateFirestoreDocumentSnapshotBatches()', () => {
|
|
1981
|
+
it('should iterate batches of snapshots.', async () => {
|
|
1982
|
+
const mockUserItemsVisited = new Set();
|
|
1983
|
+
const batchSize = 2;
|
|
1984
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotBatches({
|
|
1985
|
+
batchSize,
|
|
1986
|
+
// use specific batch size
|
|
1987
|
+
iterateSnapshotBatch: async x => {
|
|
1988
|
+
expect(x.length).toBeLessThanOrEqual(batchSize);
|
|
1989
|
+
},
|
|
1990
|
+
useCheckpointResult: async x => {
|
|
1991
|
+
x.docSnapshots.forEach(y => mockUserItemsVisited.add(y.ref.path));
|
|
1992
|
+
},
|
|
1993
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
1994
|
+
constraintsFactory: [] // no constraints
|
|
1995
|
+
});
|
|
1996
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
1997
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
1998
|
+
});
|
|
1999
|
+
describe('limitPerCheckpoint', () => {
|
|
2000
|
+
describe('limitPerCheckpoint = 0', () => {
|
|
2001
|
+
it('should not iterate any batches', async () => {
|
|
2002
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotBatches({
|
|
2003
|
+
limitPerCheckpoint: 0,
|
|
2004
|
+
iterateSnapshotBatch: async x => {
|
|
2005
|
+
expect(x.length).toBe(0);
|
|
2006
|
+
},
|
|
2007
|
+
useCheckpointResult: async x => {
|
|
2008
|
+
expect(x.docSnapshots.length).toBe(0);
|
|
2009
|
+
},
|
|
2010
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
2011
|
+
constraintsFactory: [] // no constraints
|
|
2012
|
+
});
|
|
2013
|
+
expect(result.totalSnapshotsVisited).toBe(0);
|
|
2014
|
+
expect(result.totalSnapshotsLimitReached).toBe(true);
|
|
2015
|
+
});
|
|
2016
|
+
});
|
|
2017
|
+
});
|
|
2018
|
+
describe('maxParallelCheckpoints>1', () => {
|
|
2019
|
+
it('should process the checkpoints in parallel.', async () => {
|
|
2020
|
+
const mockUserItemsVisited = new Set();
|
|
2021
|
+
const batchSize = 1;
|
|
2022
|
+
const maxParallelCheckpoints = 4;
|
|
2023
|
+
let currentRunningTasks = 0;
|
|
2024
|
+
let maxRunningTasks = 0;
|
|
2025
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotBatches({
|
|
2026
|
+
batchSize,
|
|
2027
|
+
// use specific batch size
|
|
2028
|
+
limitPerCheckpoint: 1,
|
|
2029
|
+
maxParallelCheckpoints,
|
|
2030
|
+
// do four checkpoints in parallel
|
|
2031
|
+
iterateSnapshotBatch: async (x, batchIndex) => {
|
|
2032
|
+
currentRunningTasks += 1;
|
|
2033
|
+
await util.waitForMs(1000);
|
|
2034
|
+
maxRunningTasks = Math.max(maxRunningTasks, currentRunningTasks);
|
|
2035
|
+
currentRunningTasks -= 1;
|
|
2036
|
+
},
|
|
2037
|
+
useCheckpointResult: async x => {
|
|
2038
|
+
x.docSnapshots.forEach(y => mockUserItemsVisited.add(y.ref.path));
|
|
2039
|
+
},
|
|
2040
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
2041
|
+
constraintsFactory: [] // no constraints
|
|
2042
|
+
});
|
|
2043
|
+
expect(maxRunningTasks).toBe(maxParallelCheckpoints);
|
|
2044
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
2045
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
2046
|
+
});
|
|
2047
|
+
});
|
|
2048
|
+
describe('batchSize=null', () => {
|
|
2049
|
+
it('should iterate with a single batch', async () => {
|
|
2050
|
+
const mockUserItemsVisited = new Set();
|
|
2051
|
+
const batchSize = null;
|
|
2052
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotBatches({
|
|
2053
|
+
batchSize,
|
|
2054
|
+
// use specific batch size
|
|
2055
|
+
iterateSnapshotBatch: async x => {
|
|
2056
|
+
expect(x.length).toBe(allMockUserItems.length);
|
|
2057
|
+
},
|
|
2058
|
+
useCheckpointResult: async x => {
|
|
2059
|
+
x.docSnapshots.forEach(y => mockUserItemsVisited.add(y.ref.path));
|
|
2060
|
+
},
|
|
2061
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
2062
|
+
constraintsFactory: [] // no constraints
|
|
2063
|
+
});
|
|
2064
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
2065
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
2066
|
+
});
|
|
2067
|
+
});
|
|
2068
|
+
describe('batchSizeForSnapshots: () => null', () => {
|
|
2069
|
+
it('should iterate with a single batch', async () => {
|
|
2070
|
+
const mockUserItemsVisited = new Set();
|
|
2071
|
+
const result = await firebase.iterateFirestoreDocumentSnapshotBatches({
|
|
2072
|
+
batchSizeForSnapshots: () => null,
|
|
2073
|
+
iterateSnapshotBatch: async x => {
|
|
2074
|
+
expect(x.length).toBe(allMockUserItems.length);
|
|
2075
|
+
},
|
|
2076
|
+
useCheckpointResult: async x => {
|
|
2077
|
+
x.docSnapshots.forEach(y => mockUserItemsVisited.add(y.ref.path));
|
|
2078
|
+
},
|
|
2079
|
+
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
2080
|
+
constraintsFactory: [] // no constraints
|
|
2081
|
+
});
|
|
2082
|
+
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
2083
|
+
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
2084
|
+
});
|
|
2085
|
+
});
|
|
2086
|
+
});
|
|
2087
|
+
});
|
|
2088
|
+
});
|
|
2089
|
+
describe('collection group', () => {
|
|
2090
|
+
describe('query', () => {
|
|
2091
|
+
describe('constraints', () => {
|
|
2092
|
+
describe('where', () => {
|
|
2093
|
+
it('should return the documents matching the input uid', async () => {
|
|
2094
|
+
const result = await f.instance.mockItemUserCollectionGroup.query(firebase.where('uid', '==', testUserId)).getDocs();
|
|
2095
|
+
expect(result.docs.length).toBe(testDocumentCount);
|
|
2096
|
+
result.docs.forEach(x => {
|
|
2097
|
+
expect(x.data().uid).toBe(testUserId);
|
|
2098
|
+
});
|
|
2099
|
+
});
|
|
2100
|
+
});
|
|
2101
|
+
});
|
|
2102
|
+
});
|
|
2103
|
+
});
|
|
2104
|
+
});
|
|
2105
|
+
describe('nested items', () => {
|
|
2106
|
+
const subItemCountPerItem = 2;
|
|
2107
|
+
const totalSubItemsCount = subItemCountPerItem * testDocumentCount;
|
|
2108
|
+
let parentA;
|
|
2109
|
+
let querySubItems;
|
|
2110
|
+
let allSubItems;
|
|
2111
|
+
beforeEach(async () => {
|
|
2112
|
+
querySubItems = f.instance.mockItemSubItemCollectionGroup.query;
|
|
2113
|
+
parentA = items[0];
|
|
2114
|
+
const results = await Promise.all(items.map(parent => firebase.makeDocuments(f.instance.mockItemSubItemCollection(parent).documentAccessor(), {
|
|
2115
|
+
count: subItemCountPerItem,
|
|
2116
|
+
init: i => {
|
|
2117
|
+
return {
|
|
2118
|
+
value: i
|
|
2119
|
+
};
|
|
2120
|
+
}
|
|
2121
|
+
})));
|
|
2122
|
+
allSubItems = results.flat();
|
|
2123
|
+
});
|
|
2124
|
+
describe('sub sub item', () => {
|
|
2125
|
+
const deepSubItemCountPerItem = 1;
|
|
2126
|
+
const totalDeepSubItemsPerMockItem = subItemCountPerItem * deepSubItemCountPerItem;
|
|
2127
|
+
let queryDeepSubItems;
|
|
2128
|
+
beforeEach(async () => {
|
|
2129
|
+
queryDeepSubItems = f.instance.mockItemSubItemDeepCollectionGroup.query;
|
|
2130
|
+
allSubItems[0];
|
|
2131
|
+
const results = await Promise.all(allSubItems.map(parent => firebase.makeDocuments(f.instance.mockItemSubItemDeepCollection(parent).documentAccessor(), {
|
|
2132
|
+
count: deepSubItemCountPerItem,
|
|
2133
|
+
init: i => {
|
|
2134
|
+
return {
|
|
2135
|
+
value: i
|
|
2136
|
+
};
|
|
2137
|
+
}
|
|
2138
|
+
})));
|
|
2139
|
+
results.flat();
|
|
2140
|
+
});
|
|
2141
|
+
// tests querying for all nested items under a parent
|
|
2142
|
+
it('querying for only items belonging to mock item parentA', async () => {
|
|
2143
|
+
const result = await queryDeepSubItems(allChildMockItemSubItemDeepsWithinMockItem(parentA.documentRef)).getDocs();
|
|
2144
|
+
expect(result.docs.length).toBe(totalDeepSubItemsPerMockItem);
|
|
2145
|
+
result.docs.forEach(x => expect(x.ref.parent?.parent?.parent?.parent?.path).toBe(parentA.documentRef.path));
|
|
2146
|
+
});
|
|
2147
|
+
// TODO(TEST): Add tests for allChildDocumentsUnderRelativePath
|
|
2148
|
+
});
|
|
2149
|
+
describe('sub item', () => {
|
|
2150
|
+
describe('collection group', () => {
|
|
2151
|
+
describe('query', () => {
|
|
2152
|
+
it('should return sub items', async () => {
|
|
2153
|
+
const result = await querySubItems().getDocs();
|
|
2154
|
+
expect(result.docs.length).toBe(totalSubItemsCount);
|
|
2155
|
+
});
|
|
2156
|
+
describe('constraints', () => {
|
|
2157
|
+
describe('where', () => {
|
|
2158
|
+
it('should return the documents matching the query.', async () => {
|
|
2159
|
+
const value = 0;
|
|
2160
|
+
const result = await querySubItems(firebase.where('value', '==', value)).getDocs();
|
|
2161
|
+
expect(result.docs.length).toBe(testDocumentCount);
|
|
2162
|
+
expect(result.docs[0].data().value).toBe(value);
|
|
2163
|
+
const ref = result.docs[0].ref;
|
|
2164
|
+
expect(ref).toBeDefined();
|
|
2165
|
+
expect(ref.parent).toBeDefined();
|
|
2166
|
+
});
|
|
2167
|
+
});
|
|
2168
|
+
describe('whereDocumentId', () => {
|
|
2169
|
+
test.itShouldFail('to query on collection groups.', async () => {
|
|
2170
|
+
// https://stackoverflow.com/questions/56149601/firestore-collection-group-query-on-documentid
|
|
2171
|
+
const targetId = 'targetid';
|
|
2172
|
+
/*
|
|
2173
|
+
const results = await Promise.all(
|
|
2174
|
+
allSubItems.map((parent: MockItemSubItemDocument) =>
|
|
2175
|
+
makeDocuments(f.instance.mockItemSubItemDeepCollection(parent).documentAccessor(), {
|
|
2176
|
+
count: 1,
|
|
2177
|
+
newDocument: (x) => x.loadDocumentForId(targetId),
|
|
2178
|
+
init: (i) => {
|
|
2179
|
+
return {
|
|
2180
|
+
value: i
|
|
2181
|
+
};
|
|
2182
|
+
}
|
|
2183
|
+
})
|
|
2184
|
+
)
|
|
2185
|
+
);
|
|
2186
|
+
*/
|
|
2187
|
+
await test.expectFail(() => querySubItems(firebase.whereDocumentId('==', targetId)).getDocs());
|
|
2188
|
+
});
|
|
2189
|
+
});
|
|
2190
|
+
});
|
|
2191
|
+
describe('streamDocs()', () => {
|
|
2192
|
+
let sub;
|
|
2193
|
+
beforeEach(() => {
|
|
2194
|
+
sub = new rxjs.SubscriptionObject();
|
|
2195
|
+
});
|
|
2196
|
+
afterEach(() => {
|
|
2197
|
+
sub.destroy();
|
|
2198
|
+
});
|
|
2199
|
+
it('should emit when the query results update (an item is added).', test.callbackTest(done => {
|
|
2200
|
+
const itemsToAdd = 1;
|
|
2201
|
+
let addCompleted = false;
|
|
2202
|
+
let addSeen = false;
|
|
2203
|
+
function tryComplete() {
|
|
2204
|
+
if (addSeen && addCompleted) {
|
|
2205
|
+
done();
|
|
2206
|
+
}
|
|
2207
|
+
}
|
|
2208
|
+
sub.subscription = querySubItems().streamDocs().pipe(rxjs$1.filter(x => x.docs.length > allSubItems.length)).subscribe(results => {
|
|
2209
|
+
addSeen = true;
|
|
2210
|
+
expect(results.docs.length).toBe(allSubItems.length + itemsToAdd);
|
|
2211
|
+
tryComplete();
|
|
2212
|
+
});
|
|
2213
|
+
// add one item
|
|
2214
|
+
firebase.makeDocuments(f.instance.mockItemSubItemCollection(parentA).documentAccessor(), {
|
|
2215
|
+
count: itemsToAdd,
|
|
2216
|
+
init: i => {
|
|
2217
|
+
return {
|
|
2218
|
+
value: i
|
|
2219
|
+
};
|
|
2220
|
+
}
|
|
2221
|
+
}).then(() => {
|
|
2222
|
+
addCompleted = true;
|
|
2223
|
+
tryComplete();
|
|
2224
|
+
});
|
|
2225
|
+
}));
|
|
2226
|
+
it('should emit when the query results update (an item is removed).', test.callbackTest(done => {
|
|
2227
|
+
const itemsToRemove = 1;
|
|
2228
|
+
let deleteCompleted = false;
|
|
2229
|
+
let deleteSeen = false;
|
|
2230
|
+
function tryComplete() {
|
|
2231
|
+
if (deleteSeen && deleteCompleted) {
|
|
2232
|
+
done();
|
|
2233
|
+
}
|
|
2234
|
+
}
|
|
2235
|
+
sub.subscription = querySubItems().streamDocs().pipe(rxjs$1.filter(x => x.docs.length < allSubItems.length)).subscribe(results => {
|
|
2236
|
+
deleteSeen = true;
|
|
2237
|
+
expect(results.docs.length).toBe(allSubItems.length - itemsToRemove);
|
|
2238
|
+
tryComplete();
|
|
2239
|
+
});
|
|
2240
|
+
allSubItems[0].accessor.exists().then(exists => {
|
|
2241
|
+
expect(exists).toBe(true);
|
|
2242
|
+
// remove one item
|
|
2243
|
+
return allSubItems[0].accessor.delete().then(() => {
|
|
2244
|
+
deleteCompleted = true;
|
|
2245
|
+
tryComplete();
|
|
2246
|
+
});
|
|
2247
|
+
});
|
|
2248
|
+
}));
|
|
2249
|
+
});
|
|
2250
|
+
});
|
|
2251
|
+
});
|
|
2252
|
+
});
|
|
2253
|
+
});
|
|
2254
|
+
describe('queryDocument', () => {
|
|
2255
|
+
let queryDocument;
|
|
2256
|
+
beforeEach(async () => {
|
|
2257
|
+
queryDocument = f.instance.firestoreCollection.queryDocument;
|
|
2258
|
+
});
|
|
2259
|
+
describe('filter()', () => {
|
|
2260
|
+
it('should apply the filter to the query', async () => {
|
|
2261
|
+
const results = await queryDocument().filter(firebase.where('tags', 'array-contains', EVEN_TAG)).getDocSnapshotDataPairs();
|
|
2262
|
+
expect(results).toBeDefined();
|
|
2263
|
+
results.forEach(result => {
|
|
2264
|
+
expect(result.data).toBeDefined();
|
|
2265
|
+
expect(result.data?.tags).toContain(EVEN_TAG);
|
|
2266
|
+
expect(result.document).toBeDefined();
|
|
2267
|
+
expect(result.document instanceof MockItemDocument).toBe(true);
|
|
2268
|
+
expect(result.snapshot).toBeDefined();
|
|
2269
|
+
expect(result.snapshot.data()).toBeDefined();
|
|
2270
|
+
expect(result.snapshot.ref).toBeDefined();
|
|
2271
|
+
expect(result.snapshot.id).toBe(result.document.id);
|
|
2272
|
+
});
|
|
2273
|
+
});
|
|
2274
|
+
it('should add more filters to the existing query', async () => {
|
|
2275
|
+
const results = await queryDocument().filter(firebase.where('tags', 'array-contains', EVEN_TAG)).filter(firebase.where('number', '>=', 4)).getDocSnapshotDataPairs();
|
|
2276
|
+
expect(results).toBeDefined();
|
|
2277
|
+
expect(results.length).toBe(1);
|
|
2278
|
+
results.forEach(result => {
|
|
2279
|
+
expect(result.data).toBeDefined();
|
|
2280
|
+
expect(result.data?.tags).toContain(EVEN_TAG);
|
|
2281
|
+
expect(result.data?.number).toBeGreaterThanOrEqual(4);
|
|
2282
|
+
expect(result.document).toBeDefined();
|
|
2283
|
+
expect(result.document instanceof MockItemDocument).toBe(true);
|
|
2284
|
+
expect(result.snapshot).toBeDefined();
|
|
2285
|
+
expect(result.snapshot.data()).toBeDefined();
|
|
2286
|
+
expect(result.snapshot.ref).toBeDefined();
|
|
2287
|
+
expect(result.snapshot.id).toBe(result.document.id);
|
|
2288
|
+
});
|
|
2289
|
+
});
|
|
2290
|
+
});
|
|
2291
|
+
describe('getFirstDocSnapshotDataPair()', () => {
|
|
2292
|
+
it('should return undefined if the query contains nothing', async () => {
|
|
2293
|
+
const result = await queryDocument(firebase.where('value', '==', '_DOES_NOT_EXIST_')).getFirstDocSnapshotDataPair();
|
|
2294
|
+
expect(result).not.toBeDefined();
|
|
2295
|
+
});
|
|
2296
|
+
it('should return the first doc that matches if it exists', async () => {
|
|
2297
|
+
const result = await queryDocument().getFirstDocSnapshotDataPair();
|
|
2298
|
+
expect(result).toBeDefined();
|
|
2299
|
+
expect(result.data).toBeDefined();
|
|
2300
|
+
expect(result.document).toBeDefined();
|
|
2301
|
+
expect(result.document instanceof MockItemDocument).toBe(true);
|
|
2302
|
+
expect(result.snapshot).toBeDefined();
|
|
2303
|
+
expect(result.snapshot.data()).toBeDefined();
|
|
2304
|
+
expect(result.snapshot.ref).toBeDefined();
|
|
2305
|
+
expect(result.snapshot.id).toBe(result.document.id);
|
|
2306
|
+
});
|
|
2307
|
+
});
|
|
2308
|
+
describe('getDocSnapshotDataPairs()', () => {
|
|
2309
|
+
it('should return an empty array if the query returns nothing', async () => {
|
|
2310
|
+
const result = await queryDocument(firebase.where('value', '==', '_DOES_NOT_EXIST_')).getDocSnapshotDataPairs();
|
|
2311
|
+
expect(result).toBeDefined();
|
|
2312
|
+
expect(result.length).toBe(0);
|
|
2313
|
+
});
|
|
2314
|
+
it('should return the matching results', async () => {
|
|
2315
|
+
const results = await queryDocument().getDocSnapshotDataPairs();
|
|
2316
|
+
expect(results).toBeDefined();
|
|
2317
|
+
expect(results.length).toBeGreaterThan(0);
|
|
2318
|
+
results.forEach(result => {
|
|
2319
|
+
expect(result).toBeDefined();
|
|
2320
|
+
expect(result.data).toBeDefined();
|
|
2321
|
+
expect(result.document).toBeDefined();
|
|
2322
|
+
expect(result.document instanceof MockItemDocument).toBe(true);
|
|
2323
|
+
expect(result.snapshot).toBeDefined();
|
|
2324
|
+
expect(result.snapshot.data()).toBeDefined();
|
|
2325
|
+
expect(result.snapshot.ref).toBeDefined();
|
|
2326
|
+
expect(result.snapshot.id).toBe(result.document.id);
|
|
2327
|
+
});
|
|
2328
|
+
});
|
|
2329
|
+
});
|
|
2330
|
+
describe('streamDocs()', () => {
|
|
2331
|
+
let sub;
|
|
2332
|
+
beforeEach(() => {
|
|
2333
|
+
sub = new rxjs.SubscriptionObject();
|
|
2334
|
+
});
|
|
2335
|
+
afterEach(() => {
|
|
2336
|
+
sub.destroy();
|
|
2337
|
+
});
|
|
2338
|
+
it('should emit when the query results update (an item is added).', test.callbackTest(done => {
|
|
2339
|
+
const itemsToAdd = 1;
|
|
2340
|
+
let addCompleted = false;
|
|
2341
|
+
let addSeen = false;
|
|
2342
|
+
function tryComplete() {
|
|
2343
|
+
if (addSeen && addCompleted) {
|
|
2344
|
+
done();
|
|
2345
|
+
}
|
|
2346
|
+
}
|
|
2347
|
+
sub.subscription = queryDocument().streamDocs().pipe(rxjs$1.filter(documents => documents.length > items.length)).subscribe(documents => {
|
|
2348
|
+
addSeen = true;
|
|
2349
|
+
expect(documents.length).toBe(items.length + itemsToAdd);
|
|
2350
|
+
tryComplete();
|
|
2351
|
+
});
|
|
2352
|
+
// add one item
|
|
2353
|
+
util.waitForMs(10).then(() => firebase.makeDocuments(f.instance.firestoreCollection.documentAccessor(), {
|
|
2354
|
+
count: itemsToAdd,
|
|
2355
|
+
init: i => {
|
|
2356
|
+
return {
|
|
2357
|
+
value: `${i + items.length}`,
|
|
2358
|
+
test: true
|
|
2359
|
+
};
|
|
2360
|
+
}
|
|
2361
|
+
}).then(() => {
|
|
2362
|
+
addCompleted = true;
|
|
2363
|
+
tryComplete();
|
|
2364
|
+
}));
|
|
2365
|
+
}));
|
|
2366
|
+
it('should emit when the query results update (an item is removed).', test.callbackTest(done => {
|
|
2367
|
+
const itemsToRemove = 1;
|
|
2368
|
+
let deleteCompleted = false;
|
|
2369
|
+
let deleteSeen = false;
|
|
2370
|
+
function tryComplete() {
|
|
2371
|
+
if (deleteSeen && deleteCompleted) {
|
|
2372
|
+
done();
|
|
2373
|
+
}
|
|
2374
|
+
}
|
|
2375
|
+
sub.subscription = queryDocument().streamDocs().pipe(rxjs$1.skip(1)).subscribe(documents => {
|
|
2376
|
+
deleteSeen = true;
|
|
2377
|
+
expect(documents.length).toBe(items.length - itemsToRemove);
|
|
2378
|
+
tryComplete();
|
|
2379
|
+
});
|
|
2380
|
+
util.waitForMs(10).then(() => items[0].exists().then(exists => {
|
|
2381
|
+
expect(exists).toBe(true);
|
|
2382
|
+
// remove one item
|
|
2383
|
+
return items[0].accessor.delete().then(() => {
|
|
2384
|
+
deleteCompleted = true;
|
|
2385
|
+
tryComplete();
|
|
2386
|
+
});
|
|
2387
|
+
}));
|
|
2388
|
+
}));
|
|
2389
|
+
});
|
|
2390
|
+
describe('streamDocSnapshotDataPairs()', () => {
|
|
2391
|
+
let sub;
|
|
2392
|
+
beforeEach(() => {
|
|
2393
|
+
sub = new rxjs.SubscriptionObject();
|
|
2394
|
+
});
|
|
2395
|
+
afterEach(() => {
|
|
2396
|
+
sub.destroy();
|
|
2397
|
+
});
|
|
2398
|
+
it('should emit when the query results update (an item is added).', test.callbackTest(done => {
|
|
2399
|
+
const itemsToAdd = 1;
|
|
2400
|
+
let addCompleted = false;
|
|
2401
|
+
let addSeen = false;
|
|
2402
|
+
function tryComplete() {
|
|
2403
|
+
if (addSeen && addCompleted) {
|
|
2404
|
+
done();
|
|
2405
|
+
}
|
|
2406
|
+
}
|
|
2407
|
+
sub.subscription = queryDocument().streamDocSnapshotDataPairs().pipe(rxjs$1.filter(documents => documents.length > items.length)).subscribe(documents => {
|
|
2408
|
+
addSeen = true;
|
|
2409
|
+
expect(documents.length).toBe(items.length + itemsToAdd);
|
|
2410
|
+
documents.forEach(x => {
|
|
2411
|
+
// validate each document returned
|
|
2412
|
+
expect(x.data).toBeDefined();
|
|
2413
|
+
expect(x.document).toBeDefined();
|
|
2414
|
+
expect(x.document instanceof MockItemDocument).toBe(true);
|
|
2415
|
+
expect(x.snapshot).toBeDefined();
|
|
2416
|
+
expect(x.snapshot.data()).toBeDefined();
|
|
2417
|
+
expect(x.snapshot.ref).toBeDefined();
|
|
2418
|
+
expect(x.snapshot.id).toBe(x.document.id);
|
|
2419
|
+
});
|
|
2420
|
+
tryComplete();
|
|
2421
|
+
});
|
|
2422
|
+
// add one item
|
|
2423
|
+
util.waitForMs(10).then(() => firebase.makeDocuments(f.instance.firestoreCollection.documentAccessor(), {
|
|
2424
|
+
count: itemsToAdd,
|
|
2425
|
+
init: i => {
|
|
2426
|
+
return {
|
|
2427
|
+
value: `${i + items.length}`,
|
|
2428
|
+
test: true
|
|
2429
|
+
};
|
|
2430
|
+
}
|
|
2431
|
+
}).then(() => {
|
|
2432
|
+
addCompleted = true;
|
|
2433
|
+
tryComplete();
|
|
2434
|
+
}));
|
|
2435
|
+
}));
|
|
2436
|
+
it('should emit when the query results update (an item is removed).', test.callbackTest(done => {
|
|
2437
|
+
const itemsToRemove = 1;
|
|
2438
|
+
let deleteCompleted = false;
|
|
2439
|
+
let deleteSeen = false;
|
|
2440
|
+
function tryComplete() {
|
|
2441
|
+
if (deleteSeen && deleteCompleted) {
|
|
2442
|
+
done();
|
|
2443
|
+
}
|
|
2444
|
+
}
|
|
2445
|
+
sub.subscription = queryDocument().streamDocs().pipe(rxjs$1.skip(1)).subscribe(documents => {
|
|
2446
|
+
deleteSeen = true;
|
|
2447
|
+
expect(documents.length).toBe(items.length - itemsToRemove);
|
|
2448
|
+
tryComplete();
|
|
2449
|
+
});
|
|
2450
|
+
util.waitForMs(10).then(() => items[0].exists().then(exists => {
|
|
2451
|
+
expect(exists).toBe(true);
|
|
2452
|
+
// remove one item
|
|
2453
|
+
return items[0].accessor.delete().then(() => {
|
|
2454
|
+
deleteCompleted = true;
|
|
2455
|
+
tryComplete();
|
|
2456
|
+
});
|
|
2457
|
+
}));
|
|
2458
|
+
}));
|
|
2459
|
+
});
|
|
2460
|
+
});
|
|
2461
|
+
describe('query', () => {
|
|
2462
|
+
let query;
|
|
2463
|
+
beforeEach(async () => {
|
|
2464
|
+
query = f.instance.firestoreCollection.query;
|
|
2465
|
+
});
|
|
2466
|
+
describe('streamDocs()', () => {
|
|
2467
|
+
let sub;
|
|
2468
|
+
beforeEach(() => {
|
|
2469
|
+
sub = new rxjs.SubscriptionObject();
|
|
2470
|
+
});
|
|
2471
|
+
afterEach(() => {
|
|
2472
|
+
sub.destroy();
|
|
2473
|
+
});
|
|
2474
|
+
it('should emit when the query results update (an item is added).', test.callbackTest(done => {
|
|
2475
|
+
const itemsToAdd = 1;
|
|
2476
|
+
let addCompleted = false;
|
|
2477
|
+
let addSeen = false;
|
|
2478
|
+
function tryComplete() {
|
|
2479
|
+
if (addSeen && addCompleted) {
|
|
2480
|
+
done();
|
|
2481
|
+
}
|
|
2482
|
+
}
|
|
2483
|
+
sub.subscription = query().streamDocs().pipe(rxjs$1.filter(x => x.docs.length > items.length)).subscribe(results => {
|
|
2484
|
+
addSeen = true;
|
|
2485
|
+
expect(results.docs.length).toBe(items.length + itemsToAdd);
|
|
2486
|
+
tryComplete();
|
|
2487
|
+
});
|
|
2488
|
+
// add one item
|
|
2489
|
+
util.waitForMs(10).then(() => firebase.makeDocuments(f.instance.firestoreCollection.documentAccessor(), {
|
|
2490
|
+
count: itemsToAdd,
|
|
2491
|
+
init: i => {
|
|
2492
|
+
return {
|
|
2493
|
+
value: `${i + items.length}`,
|
|
2494
|
+
test: true
|
|
2495
|
+
};
|
|
2496
|
+
}
|
|
2497
|
+
}).then(() => {
|
|
2498
|
+
addCompleted = true;
|
|
2499
|
+
tryComplete();
|
|
2500
|
+
}));
|
|
2501
|
+
}));
|
|
2502
|
+
it('should emit when the query results update (an item is removed).', test.callbackTest(done => {
|
|
2503
|
+
const itemsToRemove = 1;
|
|
2504
|
+
let deleteCompleted = false;
|
|
2505
|
+
let deleteSeen = false;
|
|
2506
|
+
function tryComplete() {
|
|
2507
|
+
if (deleteSeen && deleteCompleted) {
|
|
2508
|
+
done();
|
|
2509
|
+
}
|
|
2510
|
+
}
|
|
2511
|
+
sub.subscription = query().streamDocs().pipe(rxjs$1.skip(1)).subscribe(results => {
|
|
2512
|
+
deleteSeen = true;
|
|
2513
|
+
expect(results.docs.length).toBe(items.length - itemsToRemove);
|
|
2514
|
+
tryComplete();
|
|
2515
|
+
});
|
|
2516
|
+
util.waitForMs(10).then(() => items[0].accessor.exists().then(exists => {
|
|
2517
|
+
expect(exists).toBe(true);
|
|
2518
|
+
// remove one item
|
|
2519
|
+
return items[0].accessor.delete().then(() => {
|
|
2520
|
+
deleteCompleted = true;
|
|
2521
|
+
tryComplete();
|
|
2522
|
+
});
|
|
2523
|
+
}));
|
|
2524
|
+
}));
|
|
2525
|
+
});
|
|
2526
|
+
describe('constraint', () => {
|
|
2527
|
+
describe('limit', () => {
|
|
2528
|
+
it('should limit the number of items returned.', async () => {
|
|
2529
|
+
const limitCount = 2;
|
|
2530
|
+
const unlimited = await query().getDocs();
|
|
2531
|
+
expect(unlimited.docs.length).toBe(testDocumentCount);
|
|
2532
|
+
const result = await query(firebase.limit(limitCount)).getDocs();
|
|
2533
|
+
expect(result.docs.length).toBe(limitCount);
|
|
2534
|
+
});
|
|
2535
|
+
it('should limit the streamed results.', test.callbackTest(done => {
|
|
2536
|
+
const limitCount = 2;
|
|
2537
|
+
const resultObs = query(firebase.limit(limitCount)).streamDocs();
|
|
2538
|
+
rxjs$1.from(resultObs).pipe(rxjs$1.first()).subscribe(results => {
|
|
2539
|
+
expect(results.docs.length).toBe(limitCount);
|
|
2540
|
+
done();
|
|
2541
|
+
});
|
|
2542
|
+
}));
|
|
2543
|
+
it('should limit the number of items counted.', async () => {
|
|
2544
|
+
const limitCount = 2;
|
|
2545
|
+
const unlimited = await query().countDocs();
|
|
2546
|
+
expect(unlimited).toBe(testDocumentCount);
|
|
2547
|
+
const result = await query(firebase.limit(limitCount)).countDocs();
|
|
2548
|
+
expect(result).toBe(limitCount);
|
|
2549
|
+
});
|
|
2550
|
+
});
|
|
2551
|
+
describe('limitToLast', () => {
|
|
2552
|
+
it('should limit the number of items returned.', async () => {
|
|
2553
|
+
const limitCount = 2;
|
|
2554
|
+
const unlimited = await query().getDocs();
|
|
2555
|
+
expect(unlimited.docs.length).toBe(testDocumentCount);
|
|
2556
|
+
const result = await query(firebase.orderBy('value'), firebase.limitToLast(limitCount)).getDocs();
|
|
2557
|
+
expect(result.docs.length).toBe(limitCount);
|
|
2558
|
+
});
|
|
2559
|
+
it('the results should be returned from the end of the list. The results are still in the same order as requested.', async () => {
|
|
2560
|
+
const limitCount = 2;
|
|
2561
|
+
const result = await query(firebase.orderBy('value', 'asc'), firebase.limitToLast(limitCount)).getDocs();
|
|
2562
|
+
expect(result.docs.length).toBe(limitCount);
|
|
2563
|
+
expect(result.docs[0].data().value).toBe('3');
|
|
2564
|
+
expect(result.docs[1].data().value).toBe('4');
|
|
2565
|
+
});
|
|
2566
|
+
test.itShouldFail('if orderby is not provided.', async () => {
|
|
2567
|
+
const limitCount = 2;
|
|
2568
|
+
const unlimited = await query().getDocs();
|
|
2569
|
+
expect(unlimited.docs.length).toBe(testDocumentCount);
|
|
2570
|
+
await test.expectFail(() => query(firebase.limitToLast(limitCount)).getDocs());
|
|
2571
|
+
});
|
|
2572
|
+
it('should stream results.', test.callbackTest(done => {
|
|
2573
|
+
const limitCount = 2;
|
|
2574
|
+
const resultObs = query(firebase.orderBy('value'), firebase.limitToLast(limitCount)).streamDocs();
|
|
2575
|
+
rxjs$1.from(resultObs).pipe(rxjs$1.first()).subscribe(results => {
|
|
2576
|
+
expect(results.docs.length).toBe(limitCount);
|
|
2577
|
+
done();
|
|
2578
|
+
});
|
|
2579
|
+
}));
|
|
2580
|
+
it('should limit the number of items counted.', async () => {
|
|
2581
|
+
const limitCount = 2;
|
|
2582
|
+
const unlimited = await query().countDocs();
|
|
2583
|
+
expect(unlimited).toBe(testDocumentCount);
|
|
2584
|
+
const result = await query(firebase.orderBy('value'), firebase.limitToLast(limitCount)).countDocs();
|
|
2585
|
+
expect(result).toBe(limitCount);
|
|
2586
|
+
});
|
|
2587
|
+
});
|
|
2588
|
+
describe('orderBy', () => {
|
|
2589
|
+
it('should return values sorted in ascending order.', async () => {
|
|
2590
|
+
const results = await query(firebase.orderBy('value', 'asc')).getDocs();
|
|
2591
|
+
expect(results.docs[0].data().value).toBe('0');
|
|
2592
|
+
});
|
|
2593
|
+
it('should return values sorted in descending order.', async () => {
|
|
2594
|
+
const results = await query(firebase.orderBy('value', 'desc')).getDocs();
|
|
2595
|
+
expect(results.docs[0].data().value).toBe(`${items.length - 1}`);
|
|
2596
|
+
});
|
|
2597
|
+
});
|
|
2598
|
+
describe('where', () => {
|
|
2599
|
+
describe('==', () => {
|
|
2600
|
+
it('should return the documents matching the query.', async () => {
|
|
2601
|
+
const value = '0';
|
|
2602
|
+
const result = await query(firebase.where('value', '==', value)).getDocs();
|
|
2603
|
+
expect(result.docs.length).toBe(1);
|
|
2604
|
+
expect(result.docs[0].data().value).toBe(value);
|
|
2605
|
+
});
|
|
2606
|
+
it('should return the count of the documents matching the query.', async () => {
|
|
2607
|
+
const value = '0';
|
|
2608
|
+
const result = await query(firebase.where('value', '==', value)).countDocs();
|
|
2609
|
+
expect(result).toBe(1);
|
|
2610
|
+
});
|
|
2611
|
+
});
|
|
2612
|
+
describe('in', () => {
|
|
2613
|
+
it('should return the documents with any of the input values.', async () => {
|
|
2614
|
+
const targetValue = ['0', '1', '2'];
|
|
2615
|
+
const result = await query(firebase.where('value', 'in', targetValue)).getDocs();
|
|
2616
|
+
expect(result.docs.length).toBe(3);
|
|
2617
|
+
const values = result.docs.map(x => x.data().value);
|
|
2618
|
+
expect(values).toContain('0');
|
|
2619
|
+
expect(values).toContain('1');
|
|
2620
|
+
expect(values).toContain('2');
|
|
2621
|
+
});
|
|
2622
|
+
it('should return the count of documents with any of the input values.', async () => {
|
|
2623
|
+
const targetValue = ['0', '1', '2'];
|
|
2624
|
+
const result = await query(firebase.where('value', 'in', targetValue)).countDocs();
|
|
2625
|
+
expect(result).toBe(3);
|
|
2626
|
+
});
|
|
2627
|
+
});
|
|
2628
|
+
describe('not-in', () => {
|
|
2629
|
+
it('should return the documents that do not contain any of the input values.', async () => {
|
|
2630
|
+
const targetValue = ['0', '1', '2'];
|
|
2631
|
+
const result = await query(firebase.where('value', 'not-in', targetValue)).getDocs();
|
|
2632
|
+
expect(result.docs.length).toBe(2);
|
|
2633
|
+
const values = result.docs.map(x => x.data().value);
|
|
2634
|
+
expect(values).not.toContain('0');
|
|
2635
|
+
expect(values).not.toContain('1');
|
|
2636
|
+
expect(values).not.toContain('2');
|
|
2637
|
+
expect(values).toContain('3');
|
|
2638
|
+
expect(values).toContain('4');
|
|
2639
|
+
});
|
|
2640
|
+
it('should return the count of documents that do not contain any of the input values.', async () => {
|
|
2641
|
+
const targetValue = ['0', '1', '2'];
|
|
2642
|
+
const result = await query(firebase.where('value', 'not-in', targetValue)).countDocs();
|
|
2643
|
+
expect(result).toBe(2);
|
|
2644
|
+
});
|
|
2645
|
+
});
|
|
2646
|
+
describe('searching array values', () => {
|
|
2647
|
+
describe('in', () => {
|
|
2648
|
+
it('should return the documents with arrays that only have the given values.', async () => {
|
|
2649
|
+
// NOTE: we pass an array to match exactly
|
|
2650
|
+
const targetValue = [['0', 'even']];
|
|
2651
|
+
const result = await query(firebase.where('tags', 'in', targetValue)).getDocs();
|
|
2652
|
+
expect(result.docs.length).toBe(1);
|
|
2653
|
+
expect(result.docs[0].data().value).toBe('0');
|
|
2654
|
+
});
|
|
2655
|
+
it('should not return the document with arrays that have more than the requested values.', async () => {
|
|
2656
|
+
const targetValue = [['0']];
|
|
2657
|
+
const result = await query(firebase.where('tags', 'in', targetValue)).getDocs();
|
|
2658
|
+
expect(result.docs.length).toBe(0);
|
|
2659
|
+
});
|
|
2660
|
+
it('should return the count of documents with arrays that only have the given values.', async () => {
|
|
2661
|
+
// NOTE: we pass an array to match exactly
|
|
2662
|
+
const targetValue = [['0', 'even']];
|
|
2663
|
+
const result = await query(firebase.where('tags', 'in', targetValue)).countDocs();
|
|
2664
|
+
expect(result).toBe(1);
|
|
2665
|
+
});
|
|
2666
|
+
});
|
|
2667
|
+
describe('array-contains', () => {
|
|
2668
|
+
it('should return the documents that contain the given value.', async () => {
|
|
2669
|
+
const targetValue = '0';
|
|
2670
|
+
const result = await query(firebase.where('tags', 'array-contains', targetValue)).getDocs();
|
|
2671
|
+
expect(result.docs.length).toBe(1);
|
|
2672
|
+
expect(result.docs[0].data().value).toBe('0');
|
|
2673
|
+
});
|
|
2674
|
+
test.itShouldFail('if an array is passed to where with array-contains', async () => {
|
|
2675
|
+
const targetValues = ['0', 'even'];
|
|
2676
|
+
await test.expectFail(() => query(firebase.where('tags', 'array-contains', targetValues)).getDocs());
|
|
2677
|
+
});
|
|
2678
|
+
});
|
|
2679
|
+
describe('array-contains-any', () => {
|
|
2680
|
+
it('should return the documents that contain the given value, even if it is not passed as an array.', async () => {
|
|
2681
|
+
const targetValues = 'even';
|
|
2682
|
+
const result = await query(firebase.where('tags', 'array-contains-any', targetValues)).getDocs();
|
|
2683
|
+
expect(result.docs.length).toBe(Math.floor(testDocumentCount / 2) + 1);
|
|
2684
|
+
result.docs.forEach(x => {
|
|
2685
|
+
expect(util.isEvenNumber(Number(x.data().value)));
|
|
2686
|
+
});
|
|
2687
|
+
});
|
|
2688
|
+
it('should return the documents that contain any of the given values.', async () => {
|
|
2689
|
+
const targetValues = ['0', 'even'];
|
|
2690
|
+
const result = await query(firebase.where('tags', 'array-contains-any', targetValues)).getDocs();
|
|
2691
|
+
expect(result.docs.length).toBe(Math.floor(testDocumentCount / 2) + 1);
|
|
2692
|
+
result.docs.forEach(x => {
|
|
2693
|
+
expect(util.isEvenNumber(Number(x.data().value)));
|
|
2694
|
+
});
|
|
2695
|
+
});
|
|
2696
|
+
});
|
|
2697
|
+
});
|
|
2698
|
+
describe('Compound Queries', () => {
|
|
2699
|
+
describe('Searching Strings', () => {
|
|
2700
|
+
/*
|
|
2701
|
+
Create models that have model key like string values for prefix searching.
|
|
2702
|
+
*/
|
|
2703
|
+
const evenPrefix = mockItemIdentity.collectionType + '/';
|
|
2704
|
+
const oddPrefix = mockItemIdentity.collectionType + 'd' + '/'; // similar, but not quite the same
|
|
2705
|
+
const expectedNumberOfEvenValues = Math.ceil(testDocumentCount / 2);
|
|
2706
|
+
beforeEach(async () => {
|
|
2707
|
+
items = await firebase.makeDocuments(f.instance.firestoreCollection.documentAccessor(), {
|
|
2708
|
+
count: testDocumentCount,
|
|
2709
|
+
init: i => {
|
|
2710
|
+
const isEven = util.isEvenNumber(i);
|
|
2711
|
+
const prefix = isEven ? evenPrefix : oddPrefix;
|
|
2712
|
+
return {
|
|
2713
|
+
value: `${prefix}${i}`,
|
|
2714
|
+
date: new Date(),
|
|
2715
|
+
tags: [],
|
|
2716
|
+
test: true
|
|
2717
|
+
};
|
|
2718
|
+
}
|
|
2719
|
+
});
|
|
2720
|
+
});
|
|
2721
|
+
describe('whereStringHasRootIdentityModelKey()', () => {
|
|
2722
|
+
it('should return only models with searched prefix', async () => {
|
|
2723
|
+
const result = await query(firebase.whereStringHasRootIdentityModelKey('value', mockItemIdentity)).getDocs();
|
|
2724
|
+
const values = result.docs.map(x => x.data().value);
|
|
2725
|
+
values.forEach(x => {
|
|
2726
|
+
expect(x.startsWith(evenPrefix));
|
|
2727
|
+
});
|
|
2728
|
+
expect(result.docs.length).toBe(expectedNumberOfEvenValues);
|
|
2729
|
+
});
|
|
2730
|
+
it('should return the count of only models with searched prefix', async () => {
|
|
2731
|
+
const result = await query(firebase.whereStringHasRootIdentityModelKey('value', mockItemIdentity)).countDocs();
|
|
2732
|
+
expect(result).toBe(expectedNumberOfEvenValues);
|
|
2733
|
+
});
|
|
2734
|
+
});
|
|
2735
|
+
describe('whereStringValueHasPrefix()', () => {
|
|
2736
|
+
it('should return only models with searched prefix', async () => {
|
|
2737
|
+
const result = await query(firebase.whereStringValueHasPrefix('value', evenPrefix)).getDocs();
|
|
2738
|
+
const values = result.docs.map(x => x.data().value);
|
|
2739
|
+
values.forEach(x => {
|
|
2740
|
+
expect(x.startsWith(evenPrefix));
|
|
2741
|
+
});
|
|
2742
|
+
expect(result.docs.length).toBe(expectedNumberOfEvenValues);
|
|
2743
|
+
});
|
|
2744
|
+
});
|
|
2745
|
+
});
|
|
2746
|
+
/**
|
|
2747
|
+
* Since we choose to store dates as strings, we can compare ranges of dates.
|
|
2748
|
+
*/
|
|
2749
|
+
describe('Searching Date Strings', () => {
|
|
2750
|
+
describe('whereDateIsAfterWithSort()', () => {
|
|
2751
|
+
it('should return models with dates after the input.', async () => {
|
|
2752
|
+
const startHoursLater = 2;
|
|
2753
|
+
const start = dateFns.addHours(startDate, startHoursLater);
|
|
2754
|
+
const result = await query(firebase.whereDateIsAfterWithSort('date', start)).getDocs();
|
|
2755
|
+
expect(result.docs.length).toBe(startHoursLater);
|
|
2756
|
+
// ascending order by default
|
|
2757
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(dateFns.addHours(start, 1).toISOString());
|
|
2758
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(start, 2).toISOString());
|
|
2759
|
+
});
|
|
2760
|
+
it('should return models with dates after the input in descending order.', async () => {
|
|
2761
|
+
const startHoursLater = 2;
|
|
2762
|
+
const start = dateFns.addHours(startDate, startHoursLater);
|
|
2763
|
+
const result = await query(firebase.whereDateIsAfterWithSort('date', start, 'desc')).getDocs();
|
|
2764
|
+
expect(result.docs.length).toBe(startHoursLater);
|
|
2765
|
+
// check descending order
|
|
2766
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(dateFns.addHours(start, 2).toISOString());
|
|
2767
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(start, 1).toISOString());
|
|
2768
|
+
});
|
|
2769
|
+
});
|
|
2770
|
+
describe('whereDateIsBeforeWithSort()', () => {
|
|
2771
|
+
it('should return models with dates before the input.', async () => {
|
|
2772
|
+
const startHoursLater = 2;
|
|
2773
|
+
const endDate = dateFns.addHours(startDate, startHoursLater);
|
|
2774
|
+
const result = await query(firebase.whereDateIsBeforeWithSort('date', endDate)).getDocs();
|
|
2775
|
+
expect(result.docs.length).toBe(startHoursLater);
|
|
2776
|
+
// descending order by default
|
|
2777
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(dateFns.addHours(endDate, -1).toISOString());
|
|
2778
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(endDate, -2).toISOString());
|
|
2779
|
+
});
|
|
2780
|
+
it('should return models with dates before the input in ascending order.', async () => {
|
|
2781
|
+
const startHoursLater = 2;
|
|
2782
|
+
const endDate = dateFns.addHours(startDate, startHoursLater);
|
|
2783
|
+
const result = await query(firebase.whereDateIsBeforeWithSort('date', endDate, 'asc')).getDocs();
|
|
2784
|
+
expect(result.docs.length).toBe(startHoursLater);
|
|
2785
|
+
// check ascending order
|
|
2786
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(dateFns.addHours(endDate, -2).toISOString());
|
|
2787
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(endDate, -1).toISOString());
|
|
2788
|
+
});
|
|
2789
|
+
});
|
|
2790
|
+
describe('whereDateIsOnOrAfterWithSort()', () => {
|
|
2791
|
+
it('should return models with dates after the input.', async () => {
|
|
2792
|
+
const startHoursLater = 2;
|
|
2793
|
+
const start = dateFns.addHours(startDate, startHoursLater);
|
|
2794
|
+
const result = await query(firebase.whereDateIsOnOrAfterWithSort('date', start)).getDocs();
|
|
2795
|
+
expect(result.docs.length).toBe(3);
|
|
2796
|
+
// ascending order by default
|
|
2797
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(dateFns.addHours(start, 0).toISOString());
|
|
2798
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(start, 1).toISOString());
|
|
2799
|
+
expect(result.docs[2].data().date?.toISOString()).toBe(dateFns.addHours(start, 2).toISOString());
|
|
2800
|
+
});
|
|
2801
|
+
it('should return models with dates after the input in descending order.', async () => {
|
|
2802
|
+
const startHoursLater = 2;
|
|
2803
|
+
const start = dateFns.addHours(startDate, startHoursLater);
|
|
2804
|
+
const result = await query(firebase.whereDateIsOnOrAfterWithSort('date', start, 'desc')).getDocs();
|
|
2805
|
+
expect(result.docs.length).toBe(3);
|
|
2806
|
+
// check descending order
|
|
2807
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(dateFns.addHours(start, 2).toISOString());
|
|
2808
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(start, 1).toISOString());
|
|
2809
|
+
expect(result.docs[2].data().date?.toISOString()).toBe(dateFns.addHours(start, 0).toISOString());
|
|
2810
|
+
});
|
|
2811
|
+
});
|
|
2812
|
+
describe('whereDateIsOnOrBeforeWithSort()', () => {
|
|
2813
|
+
it('should return models with dates before the input.', async () => {
|
|
2814
|
+
const startHoursLater = 2;
|
|
2815
|
+
const endDate = dateFns.addHours(startDate, startHoursLater);
|
|
2816
|
+
const result = await query(firebase.whereDateIsOnOrBeforeWithSort('date', endDate)).getDocs();
|
|
2817
|
+
expect(result.docs.length).toBe(3);
|
|
2818
|
+
// descending order by default
|
|
2819
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(dateFns.addHours(endDate, 0).toISOString());
|
|
2820
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(endDate, -1).toISOString());
|
|
2821
|
+
expect(result.docs[2].data().date?.toISOString()).toBe(dateFns.addHours(endDate, -2).toISOString());
|
|
2822
|
+
});
|
|
2823
|
+
it('should return models with dates before the input in ascending order.', async () => {
|
|
2824
|
+
const startHoursLater = 2;
|
|
2825
|
+
const endDate = dateFns.addHours(startDate, startHoursLater);
|
|
2826
|
+
const result = await query(firebase.whereDateIsOnOrBeforeWithSort('date', endDate, 'asc')).getDocs();
|
|
2827
|
+
expect(result.docs.length).toBe(3);
|
|
2828
|
+
// check ascending order
|
|
2829
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(dateFns.addHours(endDate, -2).toISOString());
|
|
2830
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(endDate, -1).toISOString());
|
|
2831
|
+
expect(result.docs[2].data().date?.toISOString()).toBe(dateFns.addHours(endDate, 0).toISOString());
|
|
2832
|
+
});
|
|
2833
|
+
});
|
|
2834
|
+
describe('whereDateIsInRange()', () => {
|
|
2835
|
+
it('should return the date values within the given range.', async () => {
|
|
2836
|
+
const startHoursLater = 1;
|
|
2837
|
+
const totalHoursInRange = 2;
|
|
2838
|
+
const start = dateFns.addHours(startDate, startHoursLater);
|
|
2839
|
+
const result = await query(firebase.whereDateIsInRange('date', {
|
|
2840
|
+
date: start,
|
|
2841
|
+
distance: totalHoursInRange - 1,
|
|
2842
|
+
type: date.DateRangeType.HOURS_RANGE
|
|
2843
|
+
})).getDocs();
|
|
2844
|
+
expect(result.docs.length).toBe(totalHoursInRange);
|
|
2845
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(start.toISOString());
|
|
2846
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(start, 1).toISOString());
|
|
2847
|
+
});
|
|
2848
|
+
});
|
|
2849
|
+
describe('whereDateIsBetween()', () => {
|
|
2850
|
+
it('should return the date values within the given range.', async () => {
|
|
2851
|
+
const startHoursLater = 1;
|
|
2852
|
+
const hoursRange = 2;
|
|
2853
|
+
const start = dateFns.addHours(startDate, startHoursLater);
|
|
2854
|
+
const end = dateFns.addHours(start, hoursRange);
|
|
2855
|
+
const result = await query(firebase.whereDateIsBetween('date', {
|
|
2856
|
+
start,
|
|
2857
|
+
end
|
|
2858
|
+
})).getDocs();
|
|
2859
|
+
expect(result.docs.length).toBe(hoursRange);
|
|
2860
|
+
expect(result.docs[0].data().date?.toISOString()).toBe(start.toISOString());
|
|
2861
|
+
expect(result.docs[1].data().date?.toISOString()).toBe(dateFns.addHours(start, 1).toISOString());
|
|
2862
|
+
});
|
|
2863
|
+
describe('with searching array value', () => {
|
|
2864
|
+
it('should search the date range and values that are tagged even.', async () => {
|
|
2865
|
+
const targetTag = 'even';
|
|
2866
|
+
const startHoursLater = 1;
|
|
2867
|
+
const hoursRange = 2;
|
|
2868
|
+
const start = dateFns.addHours(startDate, startHoursLater);
|
|
2869
|
+
const end = dateFns.addHours(start, hoursRange);
|
|
2870
|
+
const result = await query([
|
|
2871
|
+
// filter by dates first
|
|
2872
|
+
...firebase.whereDateIsBetween('date', {
|
|
2873
|
+
start,
|
|
2874
|
+
end
|
|
2875
|
+
}),
|
|
2876
|
+
// only allow even items
|
|
2877
|
+
firebase.where('tags', 'array-contains-any', targetTag)]).getDocs();
|
|
2878
|
+
expect(result.docs.length).toBe(1);
|
|
2879
|
+
const onlyResultData = result.docs[0].data();
|
|
2880
|
+
expect(onlyResultData.date?.toISOString()).toBe(dateFns.addHours(start, 1).toISOString());
|
|
2881
|
+
expect(onlyResultData.tags).toContain(targetTag);
|
|
2882
|
+
});
|
|
2883
|
+
});
|
|
2884
|
+
});
|
|
2885
|
+
});
|
|
2886
|
+
});
|
|
2887
|
+
});
|
|
2888
|
+
describe('whereDocumentId', () => {
|
|
2889
|
+
it('should return the documents matching the query.', async () => {
|
|
2890
|
+
const targetId = items[0].id;
|
|
2891
|
+
const result = await query(firebase.whereDocumentId('==', targetId)).getDocs();
|
|
2892
|
+
expect(result.docs.length).toBe(1);
|
|
2893
|
+
expect(result.docs[0].id).toBe(targetId);
|
|
2894
|
+
});
|
|
2895
|
+
});
|
|
2896
|
+
describe('startAt', () => {
|
|
2897
|
+
it('should return values starting from the specified startAt document.', async () => {
|
|
2898
|
+
const limitCount = 2;
|
|
2899
|
+
const firstQuery = query(firebase.limit(limitCount));
|
|
2900
|
+
const first = await firstQuery.getDocs();
|
|
2901
|
+
expect(first.docs.length).toBe(limitCount);
|
|
2902
|
+
const second = await firstQuery.filter(firebase.startAt(first.docs[1])).getDocs();
|
|
2903
|
+
expect(second.docs.length).toBe(limitCount);
|
|
2904
|
+
expect(second.docs[0].id).toBe(first.docs[1].id);
|
|
2905
|
+
});
|
|
2906
|
+
it('should return the count of values starting from the specified startAt document.', async () => {
|
|
2907
|
+
const limitCount = 2;
|
|
2908
|
+
const firstQuery = query(firebase.limit(limitCount));
|
|
2909
|
+
const first = await firstQuery.getDocs();
|
|
2910
|
+
expect(first.docs.length).toBe(limitCount);
|
|
2911
|
+
// NOTE: startAt with count requires an orderBy to be set.
|
|
2912
|
+
const secondCount = await firstQuery.filter(firebase.orderByDocumentId(), firebase.startAt(first.docs[1])).countDocs();
|
|
2913
|
+
expect(secondCount).toBe(limitCount);
|
|
2914
|
+
});
|
|
2915
|
+
});
|
|
2916
|
+
describe('startAtValue', () => {
|
|
2917
|
+
it('should return values starting from the specified startAt path.', async () => {
|
|
2918
|
+
const limitCount = testDocumentCount;
|
|
2919
|
+
const firstQuery = query(firebase.orderBy('value'), firebase.limit(limitCount));
|
|
2920
|
+
const first = await firstQuery.getDocs();
|
|
2921
|
+
expect(first.docs.length).toBe(limitCount);
|
|
2922
|
+
const indexToStartAt = 3;
|
|
2923
|
+
const docToStartAt = first.docs[indexToStartAt];
|
|
2924
|
+
const docToStartAtValue = docToStartAt.data().value;
|
|
2925
|
+
const second = await firstQuery.filter(firebase.startAtValue(docToStartAtValue)).getDocs();
|
|
2926
|
+
expect(second.docs.length).toBe(limitCount - indexToStartAt);
|
|
2927
|
+
expect(second.docs[0].id).toBe(docToStartAt.id);
|
|
2928
|
+
});
|
|
2929
|
+
});
|
|
2930
|
+
describe('startAfter', () => {
|
|
2931
|
+
it('should return values starting after the specified startAt point.', async () => {
|
|
2932
|
+
const limitCount = 3;
|
|
2933
|
+
const firstQuery = query(firebase.limit(limitCount));
|
|
2934
|
+
const first = await firstQuery.getDocs();
|
|
2935
|
+
expect(first.docs.length).toBe(limitCount);
|
|
2936
|
+
const startAfterDoc = first.docs[1];
|
|
2937
|
+
const expectedFirstDoc = first.docs[2];
|
|
2938
|
+
const second = await firstQuery.filter(firebase.startAfter(startAfterDoc)).getDocs();
|
|
2939
|
+
expect(second.docs.length).toBe(limitCount);
|
|
2940
|
+
expect(second.docs[0].id).toBe(expectedFirstDoc.id);
|
|
2941
|
+
});
|
|
2942
|
+
});
|
|
2943
|
+
describe('endAt', () => {
|
|
2944
|
+
it('should return values ending with the specified endAt point (inclusive).', async () => {
|
|
2945
|
+
const limitCount = 2;
|
|
2946
|
+
const firstQuery = query(firebase.limit(limitCount));
|
|
2947
|
+
const first = await firstQuery.getDocs();
|
|
2948
|
+
expect(first.docs.length).toBe(limitCount);
|
|
2949
|
+
const second = await firstQuery.filter(firebase.endAt(first.docs[0])).getDocs();
|
|
2950
|
+
expect(second.docs.length).toBe(limitCount - 1);
|
|
2951
|
+
expect(second.docs[0].id).toBe(first.docs[0].id);
|
|
2952
|
+
});
|
|
2953
|
+
});
|
|
2954
|
+
describe('endAtValue', () => {
|
|
2955
|
+
it('should return values starting from the specified startAt path.', async () => {
|
|
2956
|
+
const limitCount = testDocumentCount;
|
|
2957
|
+
const firstQuery = query(firebase.orderBy('value'), firebase.limit(limitCount));
|
|
2958
|
+
const first = await firstQuery.getDocs();
|
|
2959
|
+
expect(first.docs.length).toBe(limitCount);
|
|
2960
|
+
const indexToEndAt = 2;
|
|
2961
|
+
const docToEndAt = first.docs[indexToEndAt];
|
|
2962
|
+
const docToEndAtValue = docToEndAt.data().value;
|
|
2963
|
+
const second = await firstQuery.filter(firebase.endAtValue(docToEndAtValue)).getDocs();
|
|
2964
|
+
expect(second.docs.length).toBe(indexToEndAt + 1);
|
|
2965
|
+
expect(second.docs[second.docs.length - 1].id).toBe(docToEndAt.id);
|
|
2966
|
+
});
|
|
2967
|
+
});
|
|
2968
|
+
describe('endBefore', () => {
|
|
2969
|
+
it('should return values ending with the specified endBefore point (exclusive).', async () => {
|
|
2970
|
+
const limitCount = 2;
|
|
2971
|
+
const firstQuery = query(firebase.limit(limitCount));
|
|
2972
|
+
const first = await firstQuery.getDocs();
|
|
2973
|
+
expect(first.docs.length).toBe(limitCount);
|
|
2974
|
+
const second = await firstQuery.filter(firebase.endBefore(first.docs[1])).getDocs();
|
|
2975
|
+
expect(second.docs.length).toBe(limitCount - 1);
|
|
2976
|
+
expect(second.docs[0].id).toBe(first.docs[0].id);
|
|
2977
|
+
});
|
|
2978
|
+
});
|
|
2979
|
+
});
|
|
2980
|
+
});
|
|
2981
|
+
});
|
|
2982
|
+
}
|
|
2983
|
+
|
|
2984
|
+
/**
|
|
2985
|
+
* Describes accessor driver tests, using a MockItemCollectionFixture.
|
|
2986
|
+
*
|
|
2987
|
+
* @param f
|
|
2988
|
+
*/
|
|
2989
|
+
function describeFirestoreIterationTests(f) {
|
|
2990
|
+
describe('firestoreItemPageIteration', () => {
|
|
2991
|
+
const testDocumentCount = 10;
|
|
2992
|
+
let firestoreIteration;
|
|
2993
|
+
let items;
|
|
2994
|
+
let sub;
|
|
2995
|
+
beforeEach(async () => {
|
|
2996
|
+
firestoreIteration = f.instance.firestoreCollection.firestoreIteration;
|
|
2997
|
+
items = await firebase.makeDocuments(f.instance.firestoreCollection.documentAccessor(), {
|
|
2998
|
+
count: testDocumentCount,
|
|
2999
|
+
init: i => {
|
|
3000
|
+
return {
|
|
3001
|
+
value: `${i}`,
|
|
3002
|
+
test: true
|
|
3003
|
+
};
|
|
3004
|
+
}
|
|
3005
|
+
});
|
|
3006
|
+
sub = new rxjs.SubscriptionObject();
|
|
3007
|
+
});
|
|
3008
|
+
afterEach(() => {
|
|
3009
|
+
sub.destroy();
|
|
3010
|
+
});
|
|
3011
|
+
describe('filter', () => {
|
|
3012
|
+
describe('limit', () => {
|
|
3013
|
+
it('should use the input limit for page size.', test.callbackTest(done => {
|
|
3014
|
+
const limit = 4;
|
|
3015
|
+
const iteration = firestoreIteration({
|
|
3016
|
+
limit
|
|
3017
|
+
});
|
|
3018
|
+
sub.subscription = iteration.latestState$.subscribe(x => {
|
|
3019
|
+
const results = x.value;
|
|
3020
|
+
expect(results.length).toBe(limit);
|
|
3021
|
+
done();
|
|
3022
|
+
});
|
|
3023
|
+
}));
|
|
3024
|
+
});
|
|
3025
|
+
describe('constraint', () => {
|
|
3026
|
+
it('should use the constraints', test.callbackTest(done => {
|
|
3027
|
+
const iteration = firestoreIteration({
|
|
3028
|
+
constraints: mockItemWithValue('0')
|
|
3029
|
+
});
|
|
3030
|
+
sub.subscription = iteration.latestState$.subscribe(x => {
|
|
3031
|
+
const results = x.value;
|
|
3032
|
+
expect(results.length).toBe(1);
|
|
3033
|
+
expect(results[0].id).toBe(items[0].documentRef.id);
|
|
3034
|
+
done();
|
|
3035
|
+
});
|
|
3036
|
+
}));
|
|
3037
|
+
});
|
|
3038
|
+
});
|
|
3039
|
+
describe('pagination', () => {
|
|
3040
|
+
const limit = 4;
|
|
3041
|
+
let iteration;
|
|
3042
|
+
beforeEach(() => {
|
|
3043
|
+
iteration = firestoreIteration({
|
|
3044
|
+
limit
|
|
3045
|
+
});
|
|
3046
|
+
});
|
|
3047
|
+
afterEach(() => {
|
|
3048
|
+
iteration.destroy();
|
|
3049
|
+
});
|
|
3050
|
+
describe('latestState$', () => {
|
|
3051
|
+
it('should load the first state when subscribed to for the first time.', test.callbackTest(done => {
|
|
3052
|
+
sub.subscription = iteration.latestState$.subscribe(latestState => {
|
|
3053
|
+
const page = latestState.page;
|
|
3054
|
+
expect(page).toBe(0);
|
|
3055
|
+
const values = latestState.value;
|
|
3056
|
+
expect(values.length).toBe(limit);
|
|
3057
|
+
done();
|
|
3058
|
+
});
|
|
3059
|
+
}));
|
|
3060
|
+
});
|
|
3061
|
+
describe('currentState$', () => {
|
|
3062
|
+
it('should load the first items when subscribed to for the first time.', test.callbackTest(done => {
|
|
3063
|
+
sub.subscription = iteration.currentState$.pipe(rxjs$1.filter(x => Boolean(x.value))).subscribe(currentState => {
|
|
3064
|
+
const page = currentState.page;
|
|
3065
|
+
expect(page).toBe(0);
|
|
3066
|
+
const values = currentState.value;
|
|
3067
|
+
expect(values.length).toBe(limit);
|
|
3068
|
+
done();
|
|
3069
|
+
});
|
|
3070
|
+
}));
|
|
3071
|
+
});
|
|
3072
|
+
describe('nextPage()', () => {
|
|
3073
|
+
it('should load the next page and return when the page has finished loading.', test.callbackTest(done => {
|
|
3074
|
+
iteration.nextPage().then(() => {
|
|
3075
|
+
const nextPageResult = rxjs$1.from(iteration.nextPage());
|
|
3076
|
+
sub.subscription = nextPageResult.pipe(rxjs$1.switchMap(x => iteration.currentState$)).subscribe(latestState => {
|
|
3077
|
+
const page = latestState.page;
|
|
3078
|
+
expect(page).toBe(1);
|
|
3079
|
+
const values = latestState.value;
|
|
3080
|
+
expect(values.length).toBe(limit);
|
|
3081
|
+
done();
|
|
3082
|
+
});
|
|
3083
|
+
});
|
|
3084
|
+
}));
|
|
3085
|
+
});
|
|
3086
|
+
describe('with accumulator', () => {
|
|
3087
|
+
let accumulatorSub;
|
|
3088
|
+
beforeEach(() => {
|
|
3089
|
+
accumulatorSub = new rxjs.SubscriptionObject();
|
|
3090
|
+
});
|
|
3091
|
+
afterEach(() => {
|
|
3092
|
+
accumulatorSub.destroy();
|
|
3093
|
+
});
|
|
3094
|
+
describe('firebaseQuerySnapshotAccumulator()', () => {
|
|
3095
|
+
let accumulator;
|
|
3096
|
+
beforeEach(() => {
|
|
3097
|
+
accumulator = firebase.firebaseQuerySnapshotAccumulator(iteration);
|
|
3098
|
+
});
|
|
3099
|
+
it('should accumulate values from the query.', () => {
|
|
3100
|
+
// todo
|
|
3101
|
+
});
|
|
3102
|
+
describe('flattenAccumulatorResultItemArray()', () => {
|
|
3103
|
+
it(`should aggregate the array of results into a single array.`, test.callbackTest(done => {
|
|
3104
|
+
const pagesToLoad = 2;
|
|
3105
|
+
// load up to page 2
|
|
3106
|
+
rxjs.iteratorNextPageUntilPage(iteration, pagesToLoad).then(page => {
|
|
3107
|
+
expect(page).toBe(pagesToLoad - 1);
|
|
3108
|
+
const obs = rxjs.flattenAccumulatorResultItemArray(accumulator);
|
|
3109
|
+
accumulatorSub.subscription = obs.pipe(rxjs$1.first()).subscribe(values => {
|
|
3110
|
+
expect(values.length).toBe(pagesToLoad * limit);
|
|
3111
|
+
expect(util.arrayContainsDuplicateValue(values.map(x => x.id))).toBe(false);
|
|
3112
|
+
// should not be a query snapshot
|
|
3113
|
+
expect(values[0].ref).toBeDefined();
|
|
3114
|
+
done();
|
|
3115
|
+
});
|
|
3116
|
+
});
|
|
3117
|
+
}));
|
|
3118
|
+
});
|
|
3119
|
+
describe('accumulatorCurrentPageListLoadingState()', () => {
|
|
3120
|
+
it('should return a loading state for the current page.', test.callbackTest(done => {
|
|
3121
|
+
const obs = rxjs.accumulatorCurrentPageListLoadingState(accumulator);
|
|
3122
|
+
accumulatorSub.subscription = obs.pipe(rxjs$1.filter(x => !x.loading)).subscribe(state => {
|
|
3123
|
+
const value = state.value;
|
|
3124
|
+
expect(rxjs.isLoadingStateFinishedLoading(state)).toBe(true);
|
|
3125
|
+
expect(value).toBeDefined();
|
|
3126
|
+
expect(Array.isArray(value)).toBe(true);
|
|
3127
|
+
expect(Array.isArray(value[0])).toBe(true);
|
|
3128
|
+
done();
|
|
3129
|
+
});
|
|
3130
|
+
}));
|
|
3131
|
+
});
|
|
3132
|
+
});
|
|
3133
|
+
describe('firebaseQueryItemAccumulator()', () => {
|
|
3134
|
+
let itemAccumulator;
|
|
3135
|
+
beforeEach(() => {
|
|
3136
|
+
itemAccumulator = firebase.firebaseQueryItemAccumulator(iteration);
|
|
3137
|
+
});
|
|
3138
|
+
describe('flattenAccumulatorResultItemArray()', () => {
|
|
3139
|
+
it(`should aggregate the array of results into a single array.`, test.callbackTest(done => {
|
|
3140
|
+
const pagesToLoad = 2;
|
|
3141
|
+
// load up to page 2
|
|
3142
|
+
rxjs.iteratorNextPageUntilPage(iteration, pagesToLoad).then(page => {
|
|
3143
|
+
expect(page).toBe(pagesToLoad - 1);
|
|
3144
|
+
const obs = rxjs.flattenAccumulatorResultItemArray(itemAccumulator);
|
|
3145
|
+
accumulatorSub.subscription = obs.pipe(rxjs$1.first()).subscribe(values => {
|
|
3146
|
+
expect(values.length).toBe(pagesToLoad * limit);
|
|
3147
|
+
expect(util.arrayContainsDuplicateValue(values.map(x => x.id))).toBe(false);
|
|
3148
|
+
done();
|
|
3149
|
+
});
|
|
3150
|
+
});
|
|
3151
|
+
}));
|
|
3152
|
+
});
|
|
3153
|
+
describe('flattenAccumulatorResultItemArray()', () => {
|
|
3154
|
+
it(`should aggregate the array of results into a single array of the items.`, test.callbackTest(done => {
|
|
3155
|
+
const pagesToLoad = 2;
|
|
3156
|
+
// load up to page 2
|
|
3157
|
+
rxjs.iteratorNextPageUntilPage(iteration, pagesToLoad).then(page => {
|
|
3158
|
+
expect(page).toBe(pagesToLoad - 1);
|
|
3159
|
+
const obs = rxjs.flattenAccumulatorResultItemArray(itemAccumulator);
|
|
3160
|
+
accumulatorSub.subscription = obs.pipe(rxjs$1.first()).subscribe(values => {
|
|
3161
|
+
expect(values.length).toBe(pagesToLoad * limit);
|
|
3162
|
+
expect(util.arrayContainsDuplicateValue(values.map(x => x.id))).toBe(false);
|
|
3163
|
+
// should not be a query snapshot
|
|
3164
|
+
expect(values[0].ref).not.toBeDefined();
|
|
3165
|
+
done();
|
|
3166
|
+
});
|
|
3167
|
+
});
|
|
3168
|
+
}));
|
|
3169
|
+
});
|
|
3170
|
+
describe('accumulatorFlattenPageListLoadingState()', () => {
|
|
3171
|
+
it('should return a loading state for the current page with all items in a single array.', test.callbackTest(done => {
|
|
3172
|
+
const obs = rxjs.accumulatorFlattenPageListLoadingState(itemAccumulator);
|
|
3173
|
+
accumulatorSub.subscription = obs.pipe(rxjs$1.filter(x => !x.loading)).subscribe(state => {
|
|
3174
|
+
const value = state.value;
|
|
3175
|
+
expect(rxjs.isLoadingStateFinishedLoading(state)).toBe(true);
|
|
3176
|
+
expect(value).toBeDefined();
|
|
3177
|
+
expect(Array.isArray(value)).toBe(true);
|
|
3178
|
+
expect(Array.isArray(value[0])).toBe(false);
|
|
3179
|
+
done();
|
|
3180
|
+
});
|
|
3181
|
+
}));
|
|
3182
|
+
});
|
|
3183
|
+
describe('accumulatorCurrentPageListLoadingState()', () => {
|
|
3184
|
+
it('should return a loading state for the current page.', test.callbackTest(done => {
|
|
3185
|
+
const obs = rxjs.accumulatorCurrentPageListLoadingState(itemAccumulator);
|
|
3186
|
+
accumulatorSub.subscription = obs.pipe(rxjs$1.filter(x => !x.loading)).subscribe(state => {
|
|
3187
|
+
const value = state.value;
|
|
3188
|
+
expect(rxjs.isLoadingStateFinishedLoading(state)).toBe(true);
|
|
3189
|
+
expect(value).toBeDefined();
|
|
3190
|
+
expect(Array.isArray(value)).toBe(true);
|
|
3191
|
+
expect(Array.isArray(value[0])).toBe(true);
|
|
3192
|
+
done();
|
|
3193
|
+
});
|
|
3194
|
+
}));
|
|
3195
|
+
});
|
|
3196
|
+
});
|
|
3197
|
+
});
|
|
3198
|
+
});
|
|
3199
|
+
});
|
|
3200
|
+
}
|
|
3201
|
+
|
|
3202
|
+
class TestFirebaseStorageInstance {
|
|
3203
|
+
constructor(storageContext) {
|
|
3204
|
+
this.storageContext = void 0;
|
|
3205
|
+
this.storageContext = storageContext;
|
|
3206
|
+
}
|
|
3207
|
+
get storage() {
|
|
3208
|
+
return this.storageContext.storage;
|
|
3209
|
+
}
|
|
3210
|
+
}
|
|
3211
|
+
class TestFirebaseStorageContextFixture extends test.AbstractTestContextFixture {
|
|
3212
|
+
get storage() {
|
|
3213
|
+
return this.instance.storage;
|
|
3214
|
+
}
|
|
3215
|
+
get storageContext() {
|
|
3216
|
+
return this.instance.storageContext;
|
|
3217
|
+
}
|
|
3218
|
+
}
|
|
3219
|
+
|
|
3220
|
+
/**
|
|
3221
|
+
* Describes accessor driver tests, using a MockItemCollectionFixture.
|
|
3222
|
+
*
|
|
3223
|
+
* @param f
|
|
3224
|
+
*/
|
|
3225
|
+
function describeFirebaseStorageAccessorDriverTests(f) {
|
|
3226
|
+
describe('FirebaseStorageAccessor', () => {
|
|
3227
|
+
describe('file()', () => {
|
|
3228
|
+
const secondBucket = 'second-bucket';
|
|
3229
|
+
const doesNotExistFilePath = 'test.png';
|
|
3230
|
+
let doesNotExistFile;
|
|
3231
|
+
const existsFilePath = 'test/exists.txt';
|
|
3232
|
+
const existsFileContent = 'Hello! \ud83d\ude0a';
|
|
3233
|
+
const existsFileContentType = 'text/plain';
|
|
3234
|
+
let existsFile;
|
|
3235
|
+
let secondBucketTarget;
|
|
3236
|
+
beforeEach(async () => {
|
|
3237
|
+
doesNotExistFile = f.storageContext.file(doesNotExistFilePath);
|
|
3238
|
+
existsFile = f.storageContext.file(existsFilePath);
|
|
3239
|
+
await existsFile.upload(existsFileContent, {
|
|
3240
|
+
stringFormat: 'raw',
|
|
3241
|
+
contentType: existsFileContentType
|
|
3242
|
+
}); // re-upload for each test
|
|
3243
|
+
// delete the does not exist file and second bucket target if it exists
|
|
3244
|
+
await doesNotExistFile.delete().catch(() => null);
|
|
3245
|
+
secondBucketTarget = f.storageContext.file({
|
|
3246
|
+
bucketId: secondBucket,
|
|
3247
|
+
pathString: doesNotExistFilePath
|
|
3248
|
+
});
|
|
3249
|
+
await secondBucketTarget.delete().catch(() => null);
|
|
3250
|
+
});
|
|
3251
|
+
describe('uploading', () => {
|
|
3252
|
+
let uploadFile;
|
|
3253
|
+
beforeEach(() => {
|
|
3254
|
+
uploadFile = f.storageContext.file('upload.txt');
|
|
3255
|
+
});
|
|
3256
|
+
describe('upload()', () => {
|
|
3257
|
+
describe('string types', () => {
|
|
3258
|
+
test.itShouldFail('if stringFormat is not defined in the options', async () => {
|
|
3259
|
+
const contentType = 'text/plain';
|
|
3260
|
+
const data = existsFileContent;
|
|
3261
|
+
await test.expectFail(() => uploadFile.upload(data, {
|
|
3262
|
+
contentType
|
|
3263
|
+
}));
|
|
3264
|
+
});
|
|
3265
|
+
it('should upload a raw UTF-16 string.', async () => {
|
|
3266
|
+
const contentType = 'text/plain';
|
|
3267
|
+
const data = existsFileContent;
|
|
3268
|
+
await uploadFile.upload(data, {
|
|
3269
|
+
stringFormat: 'raw',
|
|
3270
|
+
contentType
|
|
3271
|
+
});
|
|
3272
|
+
const metadata = await uploadFile.getMetadata();
|
|
3273
|
+
expect(metadata.contentType).toBe(contentType);
|
|
3274
|
+
const result = await uploadFile.getBytes();
|
|
3275
|
+
expect(result).toBeDefined();
|
|
3276
|
+
const decoded = Buffer.from(result).toString('utf-8');
|
|
3277
|
+
expect(decoded).toBe(data);
|
|
3278
|
+
});
|
|
3279
|
+
it('should upload a base64 string.', async () => {
|
|
3280
|
+
const bytes = await existsFile.getBytes();
|
|
3281
|
+
const data = Buffer.from(bytes).toString('base64');
|
|
3282
|
+
const contentType = 'text/plain';
|
|
3283
|
+
await uploadFile.upload(data, {
|
|
3284
|
+
stringFormat: 'base64',
|
|
3285
|
+
contentType
|
|
3286
|
+
});
|
|
3287
|
+
const metadata = await uploadFile.getMetadata();
|
|
3288
|
+
expect(metadata.contentType).toBe(contentType);
|
|
3289
|
+
const result = await uploadFile.getBytes();
|
|
3290
|
+
expect(result).toBeDefined();
|
|
3291
|
+
const decoded = Buffer.from(result).toString('utf-8');
|
|
3292
|
+
expect(decoded).toBe(existsFileContent);
|
|
3293
|
+
});
|
|
3294
|
+
it('should upload a base64url string.', async () => {
|
|
3295
|
+
const bytes = await existsFile.getBytes();
|
|
3296
|
+
const data = Buffer.from(bytes).toString('base64url');
|
|
3297
|
+
const contentType = 'text/plain';
|
|
3298
|
+
await uploadFile.upload(data, {
|
|
3299
|
+
stringFormat: 'base64url',
|
|
3300
|
+
contentType
|
|
3301
|
+
});
|
|
3302
|
+
const metadata = await uploadFile.getMetadata();
|
|
3303
|
+
expect(metadata.contentType).toBe(contentType);
|
|
3304
|
+
const result = await uploadFile.getBytes();
|
|
3305
|
+
expect(result).toBeDefined();
|
|
3306
|
+
const decoded = Buffer.from(result).toString('utf-8');
|
|
3307
|
+
expect(decoded).toBe(existsFileContent);
|
|
3308
|
+
});
|
|
3309
|
+
});
|
|
3310
|
+
describe('data types', () => {
|
|
3311
|
+
// NOTE: We can really only test how a NodeJS environment will behave here.
|
|
3312
|
+
it('should upload a Uint8Array', async () => {
|
|
3313
|
+
const dataBuffer = Buffer.from(existsFileContent, 'utf-8');
|
|
3314
|
+
const data = new Uint8Array(dataBuffer);
|
|
3315
|
+
const contentType = 'text/plain';
|
|
3316
|
+
await uploadFile.upload(data, {
|
|
3317
|
+
contentType
|
|
3318
|
+
});
|
|
3319
|
+
const metadata = await uploadFile.getMetadata();
|
|
3320
|
+
expect(metadata.contentType).toBe(contentType);
|
|
3321
|
+
});
|
|
3322
|
+
it('should upload a Buffer', async () => {
|
|
3323
|
+
const buffer = Buffer.from(existsFileContent, 'utf-8');
|
|
3324
|
+
const contentType = 'text/plain';
|
|
3325
|
+
await uploadFile.upload(buffer, {
|
|
3326
|
+
contentType
|
|
3327
|
+
});
|
|
3328
|
+
const metadata = await uploadFile.getMetadata();
|
|
3329
|
+
expect(metadata.contentType).toBe(contentType);
|
|
3330
|
+
});
|
|
3331
|
+
it('should upload a Blob', async () => {
|
|
3332
|
+
const buffer = Buffer.from(existsFileContent, 'utf-8');
|
|
3333
|
+
const data = new Uint8Array(buffer);
|
|
3334
|
+
const blob = data.buffer; // blob-like
|
|
3335
|
+
const contentType = 'text/plain';
|
|
3336
|
+
await uploadFile.upload(blob, {
|
|
3337
|
+
contentType
|
|
3338
|
+
});
|
|
3339
|
+
const metadata = await uploadFile.getMetadata();
|
|
3340
|
+
expect(metadata.contentType).toBe(contentType);
|
|
3341
|
+
});
|
|
3342
|
+
// NOTE: File extends Blob, so above test should cover it ok.
|
|
3343
|
+
});
|
|
3344
|
+
// TODO(TEST): Test uploading other types.
|
|
3345
|
+
describe('custom metadata', () => {
|
|
3346
|
+
it('should upload custom metadata via customMetadata', async () => {
|
|
3347
|
+
const customMetadataKey = 'x-amz-meta-custom-key';
|
|
3348
|
+
const customMetadataValue = 'custom-value';
|
|
3349
|
+
const customMetadata = {
|
|
3350
|
+
[customMetadataKey]: customMetadataValue
|
|
3351
|
+
};
|
|
3352
|
+
const contentType = 'text/plain';
|
|
3353
|
+
const data = existsFileContent;
|
|
3354
|
+
await uploadFile.upload(data, {
|
|
3355
|
+
stringFormat: 'raw',
|
|
3356
|
+
contentType,
|
|
3357
|
+
customMetadata
|
|
3358
|
+
});
|
|
3359
|
+
const metadata = await uploadFile.getMetadata();
|
|
3360
|
+
expect(metadata.customMetadata).toEqual(customMetadata);
|
|
3361
|
+
});
|
|
3362
|
+
it('should upload custom metadata via metadata', async () => {
|
|
3363
|
+
const customMetadataKey = 'x-amz-meta-custom-key';
|
|
3364
|
+
const customMetadataValue = 'custom-value';
|
|
3365
|
+
const customMetadata = {
|
|
3366
|
+
[customMetadataKey]: customMetadataValue
|
|
3367
|
+
};
|
|
3368
|
+
const contentType = 'text/plain';
|
|
3369
|
+
const data = existsFileContent;
|
|
3370
|
+
await uploadFile.upload(data, {
|
|
3371
|
+
stringFormat: 'raw',
|
|
3372
|
+
contentType,
|
|
3373
|
+
metadata: {
|
|
3374
|
+
customMetadata
|
|
3375
|
+
}
|
|
3376
|
+
});
|
|
3377
|
+
const metadata = await uploadFile.getMetadata();
|
|
3378
|
+
expect(metadata.customMetadata).toEqual(customMetadata);
|
|
3379
|
+
});
|
|
3380
|
+
it('should upload the merged custom metadatas', async () => {
|
|
3381
|
+
const customMetadataAKey = 'x-amz-meta-custom-key';
|
|
3382
|
+
const customMetadataAValue = '1';
|
|
3383
|
+
const customMetadataBKey = 'x-axx-meta-custom-key';
|
|
3384
|
+
const customMetadataBValue = 'true';
|
|
3385
|
+
const customMetadataA = {
|
|
3386
|
+
[customMetadataAKey]: customMetadataAValue
|
|
3387
|
+
};
|
|
3388
|
+
const customMetadataB = {
|
|
3389
|
+
[customMetadataBKey]: customMetadataBValue
|
|
3390
|
+
};
|
|
3391
|
+
const contentType = 'text/plain';
|
|
3392
|
+
const data = existsFileContent;
|
|
3393
|
+
await uploadFile.upload(data, {
|
|
3394
|
+
stringFormat: 'raw',
|
|
3395
|
+
contentType,
|
|
3396
|
+
customMetadata: customMetadataA,
|
|
3397
|
+
metadata: {
|
|
3398
|
+
customMetadata: customMetadataB
|
|
3399
|
+
}
|
|
3400
|
+
});
|
|
3401
|
+
const metadata = await uploadFile.getMetadata();
|
|
3402
|
+
expect(metadata.customMetadata).toEqual({
|
|
3403
|
+
...customMetadataA,
|
|
3404
|
+
...customMetadataB
|
|
3405
|
+
});
|
|
3406
|
+
});
|
|
3407
|
+
});
|
|
3408
|
+
});
|
|
3409
|
+
describe('uploadStream()', () => {
|
|
3410
|
+
it('should upload a string using a WritableStream', async () => {
|
|
3411
|
+
if (uploadFile.uploadStream != null) {
|
|
3412
|
+
const contentType = 'text/plain';
|
|
3413
|
+
const data = existsFileContent;
|
|
3414
|
+
const stream = uploadFile.uploadStream();
|
|
3415
|
+
await util.useCallback(cb => stream.write(data, 'utf-8', cb));
|
|
3416
|
+
await util.useCallback(cb => stream.end(cb));
|
|
3417
|
+
const exists = await uploadFile.exists();
|
|
3418
|
+
expect(exists).toBe(true);
|
|
3419
|
+
const metadata = await uploadFile.getMetadata();
|
|
3420
|
+
expect(metadata.contentType).toBe(contentType);
|
|
3421
|
+
const result = await uploadFile.getBytes();
|
|
3422
|
+
expect(result).toBeDefined();
|
|
3423
|
+
const decoded = Buffer.from(result).toString('utf-8');
|
|
3424
|
+
expect(decoded).toBe(data);
|
|
3425
|
+
}
|
|
3426
|
+
});
|
|
3427
|
+
it('should upload a string using a stream using a WritableStream', async () => {
|
|
3428
|
+
if (uploadFile.uploadStream != null) {
|
|
3429
|
+
const myText = 'This is a test string.';
|
|
3430
|
+
// Create a readable stream from the string
|
|
3431
|
+
const readableStream = stream.Readable.from(myText, {
|
|
3432
|
+
encoding: 'utf-8'
|
|
3433
|
+
});
|
|
3434
|
+
await firebase.uploadFileWithStream(uploadFile, readableStream);
|
|
3435
|
+
const exists = await uploadFile.exists();
|
|
3436
|
+
expect(exists).toBe(true);
|
|
3437
|
+
const metadata = await uploadFile.getMetadata();
|
|
3438
|
+
expect(metadata.contentType).toBe('text/plain');
|
|
3439
|
+
const result = await uploadFile.getBytes();
|
|
3440
|
+
expect(result).toBeDefined();
|
|
3441
|
+
const decoded = Buffer.from(result).toString('utf-8');
|
|
3442
|
+
expect(decoded).toBe(myText);
|
|
3443
|
+
}
|
|
3444
|
+
});
|
|
3445
|
+
it('should upload a png using a stream using a WritableStream', async () => {
|
|
3446
|
+
if (uploadFile.uploadStream != null) {
|
|
3447
|
+
const testFilePath = `${__dirname}/assets/testpng.png`;
|
|
3448
|
+
const contentType = 'image/png';
|
|
3449
|
+
const testFileStream = fs.createReadStream(testFilePath, {});
|
|
3450
|
+
await firebase.uploadFileWithStream(uploadFile, testFileStream, {
|
|
3451
|
+
contentType
|
|
3452
|
+
});
|
|
3453
|
+
const exists = await uploadFile.exists();
|
|
3454
|
+
expect(exists).toBe(true);
|
|
3455
|
+
const metadata = await uploadFile.getMetadata();
|
|
3456
|
+
expect(metadata.contentType).toBe(contentType);
|
|
3457
|
+
const result = await uploadFile.getBytes();
|
|
3458
|
+
expect(result).toBeDefined();
|
|
3459
|
+
}
|
|
3460
|
+
});
|
|
3461
|
+
});
|
|
3462
|
+
});
|
|
3463
|
+
describe('copy()', () => {
|
|
3464
|
+
it('should copy the file to a new location in the same bucket.', async () => {
|
|
3465
|
+
if (existsFile.copy != null) {
|
|
3466
|
+
const exists = await doesNotExistFile.exists();
|
|
3467
|
+
expect(exists).toBe(false);
|
|
3468
|
+
const targetPath = doesNotExistFile.storagePath;
|
|
3469
|
+
const result = await existsFile.copy(targetPath);
|
|
3470
|
+
expect(result.storagePath.pathString).toBe(targetPath.pathString);
|
|
3471
|
+
const doesNotExistFileExists = await doesNotExistFile.exists();
|
|
3472
|
+
expect(doesNotExistFileExists).toBe(true);
|
|
3473
|
+
const existsStillExists = await existsFile.exists();
|
|
3474
|
+
expect(existsStillExists).toBe(true); // original still exists
|
|
3475
|
+
}
|
|
3476
|
+
});
|
|
3477
|
+
it('should copy the file to a new location to a different bucket.', async () => {
|
|
3478
|
+
if (existsFile.copy != null) {
|
|
3479
|
+
const secondBucket = {
|
|
3480
|
+
bucketId: 'second-bucket',
|
|
3481
|
+
pathString: secondBucketTarget.storagePath.pathString
|
|
3482
|
+
};
|
|
3483
|
+
const targetFile = f.storageContext.file(secondBucket);
|
|
3484
|
+
const exists = await targetFile.exists();
|
|
3485
|
+
expect(exists).toBe(false);
|
|
3486
|
+
const targetPath = targetFile.storagePath;
|
|
3487
|
+
const result = await existsFile.copy(targetPath);
|
|
3488
|
+
expect(result.storagePath.pathString).toBe(targetPath.pathString);
|
|
3489
|
+
const targetFileExists = await targetFile.exists();
|
|
3490
|
+
expect(targetFileExists).toBe(true);
|
|
3491
|
+
const doesNotExistExists = await doesNotExistFile.exists();
|
|
3492
|
+
expect(doesNotExistExists).toBe(false); // on a different bucket
|
|
3493
|
+
const existsStillExists = await existsFile.exists();
|
|
3494
|
+
expect(existsStillExists).toBe(true); // original still exists
|
|
3495
|
+
}
|
|
3496
|
+
});
|
|
3497
|
+
});
|
|
3498
|
+
describe('move()', () => {
|
|
3499
|
+
it('should move the file to a new location in the same bucket.', async () => {
|
|
3500
|
+
if (existsFile.move != null) {
|
|
3501
|
+
const exists = await doesNotExistFile.exists();
|
|
3502
|
+
expect(exists).toBe(false);
|
|
3503
|
+
const targetPath = doesNotExistFile.storagePath;
|
|
3504
|
+
const result = await existsFile.move(targetPath);
|
|
3505
|
+
expect(result.storagePath.pathString).toBe(targetPath.pathString);
|
|
3506
|
+
const doesNotExistExists = await doesNotExistFile.exists();
|
|
3507
|
+
expect(doesNotExistExists).toBe(true);
|
|
3508
|
+
const existsStillExists = await existsFile.exists();
|
|
3509
|
+
expect(existsStillExists).toBe(false); // check was moved
|
|
3510
|
+
}
|
|
3511
|
+
});
|
|
3512
|
+
it('should move the file to a new location to a different bucket.', async () => {
|
|
3513
|
+
if (existsFile.move != null) {
|
|
3514
|
+
const secondBucket = {
|
|
3515
|
+
bucketId: 'second-bucket',
|
|
3516
|
+
pathString: doesNotExistFile.storagePath.pathString
|
|
3517
|
+
};
|
|
3518
|
+
const targetFile = f.storageContext.file(secondBucket);
|
|
3519
|
+
const exists = await targetFile.exists();
|
|
3520
|
+
expect(exists).toBe(false);
|
|
3521
|
+
const targetPath = targetFile.storagePath;
|
|
3522
|
+
const result = await existsFile.move(targetPath);
|
|
3523
|
+
expect(result.storagePath.pathString).toBe(targetPath.pathString);
|
|
3524
|
+
const targetFileExists = await targetFile.exists();
|
|
3525
|
+
expect(targetFileExists).toBe(true);
|
|
3526
|
+
const doesNotExistStillDoesNotExists = await doesNotExistFile.exists();
|
|
3527
|
+
expect(doesNotExistStillDoesNotExists).toBe(false);
|
|
3528
|
+
const existsStillExists = await existsFile.exists();
|
|
3529
|
+
expect(existsStillExists).toBe(false); // check was moved
|
|
3530
|
+
}
|
|
3531
|
+
});
|
|
3532
|
+
});
|
|
3533
|
+
describe('exists()', () => {
|
|
3534
|
+
it('should return true if the file exists.', async () => {
|
|
3535
|
+
const result = await existsFile.exists();
|
|
3536
|
+
expect(result).toBe(true);
|
|
3537
|
+
});
|
|
3538
|
+
it('should return false if the file exists.', async () => {
|
|
3539
|
+
const result = await doesNotExistFile.exists();
|
|
3540
|
+
expect(result).toBe(false);
|
|
3541
|
+
});
|
|
3542
|
+
});
|
|
3543
|
+
describe('getMetadata()', () => {
|
|
3544
|
+
test.itShouldFail('if the file does not exist.', async () => {
|
|
3545
|
+
await test.expectFail(() => doesNotExistFile.getMetadata());
|
|
3546
|
+
});
|
|
3547
|
+
it('should return the metadata.', async () => {
|
|
3548
|
+
const result = await existsFile.getMetadata();
|
|
3549
|
+
expect(result.bucket).toBe(existsFile.storagePath.bucketId);
|
|
3550
|
+
expect(result.fullPath).toBe(existsFilePath);
|
|
3551
|
+
expect(typeof result.size).toBe('number');
|
|
3552
|
+
expect(result.size).toBeGreaterThan(0);
|
|
3553
|
+
expect(result.contentType).toBe(existsFileContentType);
|
|
3554
|
+
expect(result).toBeDefined();
|
|
3555
|
+
});
|
|
3556
|
+
});
|
|
3557
|
+
describe('setMetadata()', () => {
|
|
3558
|
+
test.itShouldFail('if the file does not exist.', async () => {
|
|
3559
|
+
await test.expectFail(() => doesNotExistFile.setMetadata({}));
|
|
3560
|
+
});
|
|
3561
|
+
it('should replace the content type field.', async () => {
|
|
3562
|
+
const currentMetadata = await existsFile.getMetadata();
|
|
3563
|
+
expect(currentMetadata.contentType).toBe(existsFileContentType);
|
|
3564
|
+
const nextContentType = 'application/json';
|
|
3565
|
+
const result = await existsFile.setMetadata({
|
|
3566
|
+
contentType: nextContentType
|
|
3567
|
+
});
|
|
3568
|
+
expect(result.contentType).toBe(nextContentType);
|
|
3569
|
+
const updatedMetadata = await existsFile.getMetadata();
|
|
3570
|
+
expect(updatedMetadata.contentType).toBe(nextContentType);
|
|
3571
|
+
});
|
|
3572
|
+
it('should replace the metadata for only the provided fields.', async () => {
|
|
3573
|
+
const currentMetadata = await existsFile.getMetadata();
|
|
3574
|
+
expect(currentMetadata.contentType).toBe(existsFileContentType);
|
|
3575
|
+
const customMetadataA = {
|
|
3576
|
+
foo: 'bar'
|
|
3577
|
+
};
|
|
3578
|
+
const result = await existsFile.setMetadata({
|
|
3579
|
+
contentType: undefined,
|
|
3580
|
+
// should not change
|
|
3581
|
+
customMetadata: customMetadataA
|
|
3582
|
+
});
|
|
3583
|
+
expect(result.contentType).toBe(existsFileContentType);
|
|
3584
|
+
expect(result.customMetadata).toEqual(customMetadataA);
|
|
3585
|
+
const updatedMetadata = await existsFile.getMetadata();
|
|
3586
|
+
expect(updatedMetadata.contentType).toBe(existsFileContentType);
|
|
3587
|
+
expect(updatedMetadata.customMetadata).toEqual(customMetadataA);
|
|
3588
|
+
// update again. All custom metadata is replaced
|
|
3589
|
+
const customMetadataB = {
|
|
3590
|
+
foo: 'baz'
|
|
3591
|
+
};
|
|
3592
|
+
const result2 = await existsFile.setMetadata({
|
|
3593
|
+
customMetadata: customMetadataB
|
|
3594
|
+
});
|
|
3595
|
+
expect(result2.contentType).toBe(existsFileContentType);
|
|
3596
|
+
expect(result2.customMetadata).toEqual(customMetadataB);
|
|
3597
|
+
const updatedMetadata2 = await existsFile.getMetadata();
|
|
3598
|
+
expect(updatedMetadata2.contentType).toBe(existsFileContentType);
|
|
3599
|
+
expect(updatedMetadata2.customMetadata).toEqual(customMetadataB);
|
|
3600
|
+
});
|
|
3601
|
+
});
|
|
3602
|
+
describe('getBytes()', () => {
|
|
3603
|
+
test.itShouldFail('if the file does not exist.', async () => {
|
|
3604
|
+
await test.expectFail(() => doesNotExistFile.getBytes());
|
|
3605
|
+
});
|
|
3606
|
+
it('should download the file.', async () => {
|
|
3607
|
+
const result = await existsFile.getBytes();
|
|
3608
|
+
expect(result).toBeDefined();
|
|
3609
|
+
const decoded = Buffer.from(result).toString('utf-8');
|
|
3610
|
+
expect(decoded).toBe(existsFileContent);
|
|
3611
|
+
});
|
|
3612
|
+
describe('with maxDownloadSizeBytes configuration', () => {
|
|
3613
|
+
it('should download up to the maxDownloadSizeBytes number of bytes', async () => {
|
|
3614
|
+
const charactersToTake = 5;
|
|
3615
|
+
const result = await existsFile.getBytes(charactersToTake); // each normal utf-8 character is 1 byte
|
|
3616
|
+
expect(result).toBeDefined();
|
|
3617
|
+
const decoded = Buffer.from(result).toString('utf-8');
|
|
3618
|
+
expect(decoded).toBe(existsFileContent.substring(0, charactersToTake));
|
|
3619
|
+
});
|
|
3620
|
+
});
|
|
3621
|
+
});
|
|
3622
|
+
describe('getStream()', () => {
|
|
3623
|
+
it('should download the file.', async () => {
|
|
3624
|
+
if (existsFile.getStream != null) {
|
|
3625
|
+
// only test if the driver/file has getStream available
|
|
3626
|
+
const stream = existsFile.getStream();
|
|
3627
|
+
expect(stream).toBeDefined();
|
|
3628
|
+
const buffer = await util.readableStreamToBuffer(stream);
|
|
3629
|
+
const decoded = buffer.toString('utf-8');
|
|
3630
|
+
expect(decoded).toBe(existsFileContent);
|
|
3631
|
+
}
|
|
3632
|
+
});
|
|
3633
|
+
});
|
|
3634
|
+
describe('getDownloadUrl()', () => {
|
|
3635
|
+
test.itShouldFail('if the file does not exist.', async () => {
|
|
3636
|
+
const doesNotExistFileExists = await doesNotExistFile.exists();
|
|
3637
|
+
expect(doesNotExistFileExists).toBe(false);
|
|
3638
|
+
await test.expectFail(() => doesNotExistFile.getDownloadUrl());
|
|
3639
|
+
});
|
|
3640
|
+
it('should return the download url.', async () => {
|
|
3641
|
+
const result = await existsFile.getDownloadUrl();
|
|
3642
|
+
expect(result).toBeDefined();
|
|
3643
|
+
expect(typeof result).toBe('string');
|
|
3644
|
+
});
|
|
3645
|
+
});
|
|
3646
|
+
// Cannot be tested, will throw "Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information."
|
|
3647
|
+
describe('getSignedUrl()', () => {
|
|
3648
|
+
it('should return the signed read url.', async () => {
|
|
3649
|
+
if (existsFile.getSignedUrl) {
|
|
3650
|
+
const result = await existsFile.getSignedUrl({});
|
|
3651
|
+
expect(result).toBeDefined();
|
|
3652
|
+
expect(typeof result).toBe('string');
|
|
3653
|
+
}
|
|
3654
|
+
});
|
|
3655
|
+
});
|
|
3656
|
+
describe('makePublic()', () => {
|
|
3657
|
+
beforeEach(async () => {
|
|
3658
|
+
await existsFile.delete();
|
|
3659
|
+
await existsFile.upload(existsFileContent, {
|
|
3660
|
+
stringFormat: 'raw',
|
|
3661
|
+
contentType: existsFileContentType
|
|
3662
|
+
}); // re-upload for each test
|
|
3663
|
+
});
|
|
3664
|
+
it('should make the file public.', async () => {
|
|
3665
|
+
if (existsFile.makePublic && existsFile.isPublic && existsFile.getAcls) {
|
|
3666
|
+
// TODO: firestore emulator files seem to always be public and ACLs do not change?
|
|
3667
|
+
// let isPublic = await existsFile.isPublic();
|
|
3668
|
+
// expect(isPublic).toBe(false);
|
|
3669
|
+
// TODO: Not implemented in the emulator properly either
|
|
3670
|
+
// const acls = await existsFile.getAcls();
|
|
3671
|
+
// console.log({ acls });
|
|
3672
|
+
await existsFile.makePublic(true);
|
|
3673
|
+
// TODO: doesn't really test it properly since true is always returned by the emulator...
|
|
3674
|
+
const isPublic = await existsFile.isPublic();
|
|
3675
|
+
expect(isPublic).toBe(true);
|
|
3676
|
+
// TODO: Not implemented in the emulator
|
|
3677
|
+
// await existsFile.makePublic(false);
|
|
3678
|
+
// isPublic = await existsFile.isPublic();
|
|
3679
|
+
// expect(isPublic).toBe(false);
|
|
3680
|
+
}
|
|
3681
|
+
});
|
|
3682
|
+
});
|
|
3683
|
+
// TODO: getAcls() and related functions cannot be tested in the emulator currently
|
|
3684
|
+
describe('delete()', () => {
|
|
3685
|
+
test.itShouldFail('if the file does not exist.', async () => {
|
|
3686
|
+
await test.expectFail(() => doesNotExistFile.delete());
|
|
3687
|
+
});
|
|
3688
|
+
it('should delete the file at the path.', async () => {
|
|
3689
|
+
await existsFile.delete();
|
|
3690
|
+
const result = await existsFile.exists();
|
|
3691
|
+
expect(result).toBe(false);
|
|
3692
|
+
});
|
|
3693
|
+
describe('ignoreNotFound=true', () => {
|
|
3694
|
+
it('should not throw an error if the file does not exist.', async () => {
|
|
3695
|
+
await doesNotExistFile.delete({
|
|
3696
|
+
ignoreNotFound: true
|
|
3697
|
+
});
|
|
3698
|
+
});
|
|
3699
|
+
});
|
|
3700
|
+
});
|
|
3701
|
+
});
|
|
3702
|
+
describe('folder()', () => {
|
|
3703
|
+
const doesNotExistFolderPath = '/doesnotexist/';
|
|
3704
|
+
let doesNotExistFolder;
|
|
3705
|
+
const existsFolderPath = '/test/two/';
|
|
3706
|
+
let existsFolder;
|
|
3707
|
+
const existsFileName = 'exists.txt';
|
|
3708
|
+
const existsFilePath = existsFolderPath + existsFileName;
|
|
3709
|
+
const existsFileContent = 'Hello! \ud83d\ude0a';
|
|
3710
|
+
let existsFile;
|
|
3711
|
+
beforeEach(async () => {
|
|
3712
|
+
doesNotExistFolder = f.storageContext.folder(doesNotExistFolderPath);
|
|
3713
|
+
existsFolder = f.storageContext.folder(existsFolderPath);
|
|
3714
|
+
existsFile = f.storageContext.file(existsFilePath);
|
|
3715
|
+
await existsFile.upload(existsFileContent, {
|
|
3716
|
+
stringFormat: 'raw',
|
|
3717
|
+
contentType: 'text/plain'
|
|
3718
|
+
});
|
|
3719
|
+
});
|
|
3720
|
+
describe('exists()', () => {
|
|
3721
|
+
it('should return false if there are no items in the folder.', async () => {
|
|
3722
|
+
const exists = await doesNotExistFolder.exists();
|
|
3723
|
+
expect(exists).toBe(false);
|
|
3724
|
+
});
|
|
3725
|
+
it('should return true if there are items in the folder.', async () => {
|
|
3726
|
+
const exists = await existsFolder.exists();
|
|
3727
|
+
expect(exists).toBe(true);
|
|
3728
|
+
});
|
|
3729
|
+
});
|
|
3730
|
+
describe('list()', () => {
|
|
3731
|
+
const existsBFileName = 'a.txt';
|
|
3732
|
+
const existsBFilePath = existsFolderPath + existsBFileName;
|
|
3733
|
+
const existsCFolderPath = existsFolderPath + 'c/';
|
|
3734
|
+
const existsCFilePath = existsCFolderPath + 'c.txt';
|
|
3735
|
+
const otherFolderPath = '/other/';
|
|
3736
|
+
const otherFolderFilePath = otherFolderPath + 'other.txt';
|
|
3737
|
+
beforeEach(async () => {
|
|
3738
|
+
await f.storageContext.file(existsBFilePath).upload(existsFileContent, {
|
|
3739
|
+
stringFormat: 'raw',
|
|
3740
|
+
contentType: 'text/plain'
|
|
3741
|
+
});
|
|
3742
|
+
await f.storageContext.file(existsCFilePath).upload(existsFileContent, {
|
|
3743
|
+
stringFormat: 'raw',
|
|
3744
|
+
contentType: 'text/plain'
|
|
3745
|
+
});
|
|
3746
|
+
await f.storageContext.file(otherFolderFilePath).upload(existsFileContent, {
|
|
3747
|
+
stringFormat: 'raw',
|
|
3748
|
+
contentType: 'text/plain'
|
|
3749
|
+
});
|
|
3750
|
+
});
|
|
3751
|
+
describe('options', () => {
|
|
3752
|
+
describe('listAll', () => {
|
|
3753
|
+
describe('=false/unset', () => {
|
|
3754
|
+
it('should list all the direct files and folders that exist on the test path.', async () => {
|
|
3755
|
+
const result = await existsFolder.list();
|
|
3756
|
+
expect(result).toBeDefined();
|
|
3757
|
+
const files = result.files();
|
|
3758
|
+
expect(files.length).toBe(2);
|
|
3759
|
+
const fileNames = new Set(files.map(x => x.name));
|
|
3760
|
+
expect(fileNames).toContain(existsFileName);
|
|
3761
|
+
expect(fileNames).toContain(existsBFileName);
|
|
3762
|
+
const folders = result.folders();
|
|
3763
|
+
expect(folders.length).toBe(1);
|
|
3764
|
+
const folderNames = new Set(folders.map(x => x.name));
|
|
3765
|
+
expect(folderNames).toContain('c');
|
|
3766
|
+
});
|
|
3767
|
+
it('should list all the direct folders that exist at the root.', async () => {
|
|
3768
|
+
const rootFolder = await f.storageContext.folder('/');
|
|
3769
|
+
const result = await rootFolder.list();
|
|
3770
|
+
expect(result).toBeDefined();
|
|
3771
|
+
const files = result.files();
|
|
3772
|
+
expect(files.length).toBe(0); // files are under /test/ and /other/
|
|
3773
|
+
const folders = result.folders();
|
|
3774
|
+
expect(folders.length).toBe(2);
|
|
3775
|
+
const names = new Set(folders.map(x => x.name));
|
|
3776
|
+
expect(names).toContain('test');
|
|
3777
|
+
expect(names).toContain('other');
|
|
3778
|
+
});
|
|
3779
|
+
});
|
|
3780
|
+
describe('=true', () => {
|
|
3781
|
+
it('should list all files and folders that exist on the test path.', async () => {
|
|
3782
|
+
const result = await existsFolder.list({
|
|
3783
|
+
includeNestedResults: true
|
|
3784
|
+
});
|
|
3785
|
+
expect(result).toBeDefined();
|
|
3786
|
+
const files = result.files();
|
|
3787
|
+
expect(files.length).toBe(3);
|
|
3788
|
+
const filePaths = new Set(files.map(x => `${util.SLASH_PATH_SEPARATOR}${x.storagePath.pathString}`));
|
|
3789
|
+
expect(filePaths).toContain(existsFilePath);
|
|
3790
|
+
expect(filePaths).toContain(existsBFilePath);
|
|
3791
|
+
expect(filePaths).toContain(existsCFilePath);
|
|
3792
|
+
expect(filePaths).not.toContain(otherFolderFilePath);
|
|
3793
|
+
// folders are not counted/returned
|
|
3794
|
+
const folders = result.folders();
|
|
3795
|
+
expect(folders.length).toBe(0);
|
|
3796
|
+
});
|
|
3797
|
+
it('should list all the folders that exist at the root.', async () => {
|
|
3798
|
+
const rootFolder = await f.storageContext.folder('/');
|
|
3799
|
+
const result = await rootFolder.list({
|
|
3800
|
+
includeNestedResults: true
|
|
3801
|
+
});
|
|
3802
|
+
expect(result).toBeDefined();
|
|
3803
|
+
const files = result.files();
|
|
3804
|
+
expect(files.length).toBe(4); // all created files
|
|
3805
|
+
const folders = result.folders();
|
|
3806
|
+
expect(folders.length).toBe(0);
|
|
3807
|
+
});
|
|
3808
|
+
describe('maxResults', () => {
|
|
3809
|
+
it('should limit the number of results returned.', async () => {
|
|
3810
|
+
const rootFolder = await f.storageContext.folder('/');
|
|
3811
|
+
const limit = 2;
|
|
3812
|
+
const result = await rootFolder.list({
|
|
3813
|
+
includeNestedResults: true,
|
|
3814
|
+
maxResults: limit
|
|
3815
|
+
});
|
|
3816
|
+
expect(result).toBeDefined();
|
|
3817
|
+
if (f.storageContext.drivers.storageAccessorDriver.type === 'server') {
|
|
3818
|
+
// Currently only the server can properly limit the number of results returned.
|
|
3819
|
+
// The client-side will limit the results somewhat, but if folders are returned then it will return the results of those folders as well.
|
|
3820
|
+
const files = result.files();
|
|
3821
|
+
expect(files.length).toBe(limit);
|
|
3822
|
+
const nextPage = await result.next();
|
|
3823
|
+
const nextPageFiles = nextPage.files();
|
|
3824
|
+
expect(nextPageFiles.length).toBe(limit);
|
|
3825
|
+
}
|
|
3826
|
+
const folders = result.folders();
|
|
3827
|
+
expect(folders.length).toBe(0);
|
|
3828
|
+
});
|
|
3829
|
+
});
|
|
3830
|
+
});
|
|
3831
|
+
});
|
|
3832
|
+
});
|
|
3833
|
+
describe('file()', () => {
|
|
3834
|
+
it('should return the file for the result.', async () => {
|
|
3835
|
+
const result = await existsFolder.list();
|
|
3836
|
+
expect(result).toBeDefined();
|
|
3837
|
+
const files = result.files();
|
|
3838
|
+
const fileResult = files.find(x => x.name === existsFileName);
|
|
3839
|
+
const file = fileResult.file();
|
|
3840
|
+
const exists = await file.exists();
|
|
3841
|
+
expect(exists).toBe(true);
|
|
3842
|
+
});
|
|
3843
|
+
});
|
|
3844
|
+
describe('folder()', () => {
|
|
3845
|
+
it('should return the folder for the result.', async () => {
|
|
3846
|
+
const rootFolder = await f.storageContext.folder('/');
|
|
3847
|
+
const result = await rootFolder.list();
|
|
3848
|
+
expect(result).toBeDefined();
|
|
3849
|
+
const folders = result.folders();
|
|
3850
|
+
const folderResult = folders.find(x => x.name === 'test');
|
|
3851
|
+
const folder = folderResult.folder();
|
|
3852
|
+
const exists = await folder.exists();
|
|
3853
|
+
expect(exists).toBe(true);
|
|
3854
|
+
});
|
|
3855
|
+
});
|
|
3856
|
+
describe('next()', () => {
|
|
3857
|
+
it('should return the next set of results.', async () => {
|
|
3858
|
+
const maxResults = 1;
|
|
3859
|
+
const rootFolder = await f.storageContext.folder(existsFolderPath);
|
|
3860
|
+
const result = await rootFolder.list({
|
|
3861
|
+
maxResults
|
|
3862
|
+
});
|
|
3863
|
+
expect(result).toBeDefined();
|
|
3864
|
+
const files = result.files();
|
|
3865
|
+
expect(files.length).toBe(maxResults);
|
|
3866
|
+
const next = await result.next();
|
|
3867
|
+
expect(next).toBeDefined();
|
|
3868
|
+
const nextFiles = next.files();
|
|
3869
|
+
expect(nextFiles.length).toBe(maxResults);
|
|
3870
|
+
expect(nextFiles[0].storagePath.pathString).not.toBe(files[0].storagePath.pathString);
|
|
3871
|
+
expect(next.hasNext).toBe(false);
|
|
3872
|
+
});
|
|
3873
|
+
test.itShouldFail('if next() is called and hasNext was false.', async () => {
|
|
3874
|
+
const rootFolder = await f.storageContext.folder(existsFolderPath);
|
|
3875
|
+
const result = await rootFolder.list({});
|
|
3876
|
+
expect(result.hasNext).toBe(false);
|
|
3877
|
+
await test.expectFail(() => result.next());
|
|
3878
|
+
});
|
|
3879
|
+
});
|
|
3880
|
+
describe('maxResults', () => {
|
|
3881
|
+
it('should respect the max results.', async () => {
|
|
3882
|
+
const maxResults = 1;
|
|
3883
|
+
const rootFolder = await f.storageContext.folder(existsFolderPath);
|
|
3884
|
+
const result = await rootFolder.list({
|
|
3885
|
+
maxResults
|
|
3886
|
+
});
|
|
3887
|
+
expect(result).toBeDefined();
|
|
3888
|
+
const files = result.files();
|
|
3889
|
+
expect(files.length).toBe(maxResults);
|
|
3890
|
+
const folders = result.folders();
|
|
3891
|
+
expect(folders.length).toBe(1);
|
|
3892
|
+
const names = new Set(folders.map(x => x.name));
|
|
3893
|
+
expect(names).toContain('c');
|
|
3894
|
+
});
|
|
3895
|
+
it('prefixes/folders are unaffected by maxResults.', async () => {
|
|
3896
|
+
const maxResults = 1;
|
|
3897
|
+
const rootFolder = await f.storageContext.folder('/');
|
|
3898
|
+
const result = await rootFolder.list({
|
|
3899
|
+
maxResults
|
|
3900
|
+
});
|
|
3901
|
+
expect(result).toBeDefined();
|
|
3902
|
+
const files = result.files();
|
|
3903
|
+
expect(files.length).toBe(0); // files are under /test/ and /other/
|
|
3904
|
+
const folders = result.folders();
|
|
3905
|
+
expect(folders.length).toBe(2);
|
|
3906
|
+
const names = new Set(folders.map(x => x.name));
|
|
3907
|
+
expect(names).toContain('test');
|
|
3908
|
+
expect(names).toContain('other');
|
|
3909
|
+
});
|
|
3910
|
+
});
|
|
3911
|
+
describe('utilities', () => {
|
|
3912
|
+
describe('iterateStorageListFilesByEachFile()', () => {
|
|
3913
|
+
it('should iterate through all the files in the current folder one at a time', async () => {
|
|
3914
|
+
const visitedFiles = [];
|
|
3915
|
+
const result = await firebase.iterateStorageListFilesByEachFile({
|
|
3916
|
+
folder: existsFolder,
|
|
3917
|
+
readItemsFromPageResult: x => x.result.files(),
|
|
3918
|
+
iterateEachPageItem: async file => {
|
|
3919
|
+
visitedFiles.push(file);
|
|
3920
|
+
}
|
|
3921
|
+
});
|
|
3922
|
+
const visitedFilePathStrings = visitedFiles.map(x => `${util.SLASH_PATH_SEPARATOR}${x.storagePath.pathString}`);
|
|
3923
|
+
expect(visitedFilePathStrings).toContain(existsFilePath);
|
|
3924
|
+
expect(visitedFilePathStrings).toContain(existsBFilePath);
|
|
3925
|
+
expect(visitedFilePathStrings).not.toContain(existsCFilePath);
|
|
3926
|
+
expect(visitedFilePathStrings).not.toContain(otherFolderFilePath);
|
|
3927
|
+
expect(result).toBeDefined();
|
|
3928
|
+
expect(result.totalItemsLoaded).toBe(2);
|
|
3929
|
+
expect(result.totalItemsVisited).toBe(visitedFiles.length);
|
|
3930
|
+
});
|
|
3931
|
+
describe('includeNestedResults=true', () => {
|
|
3932
|
+
it('should iterate through all the files and nested files under the current folder one at a time', async () => {
|
|
3933
|
+
const visitedFiles = [];
|
|
3934
|
+
const result = await firebase.iterateStorageListFilesByEachFile({
|
|
3935
|
+
folder: existsFolder,
|
|
3936
|
+
includeNestedResults: true,
|
|
3937
|
+
readItemsFromPageResult: x => x.result.files(),
|
|
3938
|
+
iterateEachPageItem: async file => {
|
|
3939
|
+
visitedFiles.push(file);
|
|
3940
|
+
}
|
|
3941
|
+
});
|
|
3942
|
+
const visitedFilePathStrings = visitedFiles.map(x => `${util.SLASH_PATH_SEPARATOR}${x.storagePath.pathString}`);
|
|
3943
|
+
expect(result).toBeDefined();
|
|
3944
|
+
expect(result.totalItemsLoaded).toBe(3);
|
|
3945
|
+
expect(result.totalItemsVisited).toBe(visitedFiles.length);
|
|
3946
|
+
expect(visitedFilePathStrings).toContain(existsFilePath);
|
|
3947
|
+
expect(visitedFilePathStrings).toContain(existsBFilePath);
|
|
3948
|
+
expect(visitedFilePathStrings).toContain(existsCFilePath);
|
|
3949
|
+
expect(visitedFilePathStrings).not.toContain(otherFolderFilePath);
|
|
3950
|
+
});
|
|
3951
|
+
});
|
|
3952
|
+
});
|
|
3953
|
+
});
|
|
3954
|
+
});
|
|
3955
|
+
});
|
|
3956
|
+
});
|
|
3957
|
+
}
|
|
3958
|
+
|
|
3959
|
+
exports.MOCK_FIREBASE_MODEL_SERVICE_FACTORIES = MOCK_FIREBASE_MODEL_SERVICE_FACTORIES;
|
|
3960
|
+
exports.MOCK_SYSTEM_STATE_TYPE = MOCK_SYSTEM_STATE_TYPE;
|
|
3961
|
+
exports.MockItemCollectionFixture = MockItemCollectionFixture;
|
|
3962
|
+
exports.MockItemCollectionFixtureInstance = MockItemCollectionFixtureInstance;
|
|
3963
|
+
exports.MockItemCollections = MockItemCollections;
|
|
3964
|
+
exports.MockItemDocument = MockItemDocument;
|
|
3965
|
+
exports.MockItemPrivateDocument = MockItemPrivateDocument;
|
|
3966
|
+
exports.MockItemStorageFixture = MockItemStorageFixture;
|
|
3967
|
+
exports.MockItemStorageFixtureInstance = MockItemStorageFixtureInstance;
|
|
3968
|
+
exports.MockItemSubItemDeepDocument = MockItemSubItemDeepDocument;
|
|
3969
|
+
exports.MockItemSubItemDocument = MockItemSubItemDocument;
|
|
3970
|
+
exports.MockItemUserDocument = MockItemUserDocument;
|
|
3971
|
+
exports.RulesUnitTestFirebaseTestingContextFixture = RulesUnitTestFirebaseTestingContextFixture;
|
|
3972
|
+
exports.RulesUnitTestTestFirebaseInstance = RulesUnitTestTestFirebaseInstance;
|
|
3973
|
+
exports.TESTING_AUTHORIZED_FIREBASE_USER_ID = TESTING_AUTHORIZED_FIREBASE_USER_ID;
|
|
3974
|
+
exports.TestFirebaseContextFixture = TestFirebaseContextFixture;
|
|
3975
|
+
exports.TestFirebaseInstance = TestFirebaseInstance;
|
|
3976
|
+
exports.TestFirebaseStorageContextFixture = TestFirebaseStorageContextFixture;
|
|
3977
|
+
exports.TestFirebaseStorageInstance = TestFirebaseStorageInstance;
|
|
3978
|
+
exports.TestFirestoreContextFixture = TestFirestoreContextFixture;
|
|
3979
|
+
exports.TestFirestoreInstance = TestFirestoreInstance;
|
|
3980
|
+
exports.allChildMockItemSubItemDeepsWithinMockItem = allChildMockItemSubItemDeepsWithinMockItem;
|
|
3981
|
+
exports.authorizedFirebaseFactory = authorizedFirebaseFactory;
|
|
3982
|
+
exports.authorizedTestWithMockItemCollection = authorizedTestWithMockItemCollection;
|
|
3983
|
+
exports.authorizedTestWithMockItemStorage = authorizedTestWithMockItemStorage;
|
|
3984
|
+
exports.changeFirestoreLogLevelBeforeAndAfterTests = changeFirestoreLogLevelBeforeAndAfterTests;
|
|
3985
|
+
exports.clearTestFirestoreContextCollections = clearTestFirestoreContextCollections;
|
|
3986
|
+
exports.describeFirebaseStorageAccessorDriverTests = describeFirebaseStorageAccessorDriverTests;
|
|
3987
|
+
exports.describeFirestoreAccessorDriverTests = describeFirestoreAccessorDriverTests;
|
|
3988
|
+
exports.describeFirestoreDocumentAccessorTests = describeFirestoreDocumentAccessorTests;
|
|
3989
|
+
exports.describeFirestoreIterationTests = describeFirestoreIterationTests;
|
|
3990
|
+
exports.describeFirestoreQueryDriverTests = describeFirestoreQueryDriverTests;
|
|
3991
|
+
exports.firebaseRulesUnitTestBuilder = firebaseRulesUnitTestBuilder;
|
|
3992
|
+
exports.makeMockItemCollections = makeMockItemCollections;
|
|
3993
|
+
exports.makeRulesTestFirebaseStorageContext = makeRulesTestFirebaseStorageContext;
|
|
3994
|
+
exports.makeRulesTestFirestoreContext = makeRulesTestFirestoreContext;
|
|
3995
|
+
exports.makeTestingFirebaseStorageAccesorDriver = makeTestingFirebaseStorageAccesorDriver;
|
|
3996
|
+
exports.makeTestingFirebaseStorageDrivers = makeTestingFirebaseStorageDrivers;
|
|
3997
|
+
exports.makeTestingFirestoreAccesorDriver = makeTestingFirestoreAccesorDriver;
|
|
3998
|
+
exports.makeTestingFirestoreDrivers = makeTestingFirestoreDrivers;
|
|
3999
|
+
exports.mockFirebaseModelServices = mockFirebaseModelServices;
|
|
4000
|
+
exports.mockItemCollectionReference = mockItemCollectionReference;
|
|
4001
|
+
exports.mockItemConverter = mockItemConverter;
|
|
4002
|
+
exports.mockItemFirebaseModelServiceFactory = mockItemFirebaseModelServiceFactory;
|
|
4003
|
+
exports.mockItemFirestoreCollection = mockItemFirestoreCollection;
|
|
4004
|
+
exports.mockItemIdentity = mockItemIdentity;
|
|
4005
|
+
exports.mockItemPrivateCollectionReference = mockItemPrivateCollectionReference;
|
|
4006
|
+
exports.mockItemPrivateCollectionReferenceFactory = mockItemPrivateCollectionReferenceFactory;
|
|
4007
|
+
exports.mockItemPrivateConverter = mockItemPrivateConverter;
|
|
4008
|
+
exports.mockItemPrivateFirebaseModelServiceFactory = mockItemPrivateFirebaseModelServiceFactory;
|
|
4009
|
+
exports.mockItemPrivateFirestoreCollection = mockItemPrivateFirestoreCollection;
|
|
4010
|
+
exports.mockItemPrivateFirestoreCollectionGroup = mockItemPrivateFirestoreCollectionGroup;
|
|
4011
|
+
exports.mockItemPrivateIdentity = mockItemPrivateIdentity;
|
|
4012
|
+
exports.mockItemSettingsItemDencoder = mockItemSettingsItemDencoder;
|
|
4013
|
+
exports.mockItemSubItemCollectionReference = mockItemSubItemCollectionReference;
|
|
4014
|
+
exports.mockItemSubItemCollectionReferenceFactory = mockItemSubItemCollectionReferenceFactory;
|
|
4015
|
+
exports.mockItemSubItemConverter = mockItemSubItemConverter;
|
|
4016
|
+
exports.mockItemSubItemDeepCollectionReference = mockItemSubItemDeepCollectionReference;
|
|
4017
|
+
exports.mockItemSubItemDeepCollectionReferenceFactory = mockItemSubItemDeepCollectionReferenceFactory;
|
|
4018
|
+
exports.mockItemSubItemDeepConverter = mockItemSubItemDeepConverter;
|
|
4019
|
+
exports.mockItemSubItemDeepFirebaseModelServiceFactory = mockItemSubItemDeepFirebaseModelServiceFactory;
|
|
4020
|
+
exports.mockItemSubItemDeepFirestoreCollection = mockItemSubItemDeepFirestoreCollection;
|
|
4021
|
+
exports.mockItemSubItemDeepFirestoreCollectionGroup = mockItemSubItemDeepFirestoreCollectionGroup;
|
|
4022
|
+
exports.mockItemSubItemDeepIdentity = mockItemSubItemDeepIdentity;
|
|
4023
|
+
exports.mockItemSubItemFirebaseModelServiceFactory = mockItemSubItemFirebaseModelServiceFactory;
|
|
4024
|
+
exports.mockItemSubItemFirestoreCollection = mockItemSubItemFirestoreCollection;
|
|
4025
|
+
exports.mockItemSubItemFirestoreCollectionGroup = mockItemSubItemFirestoreCollectionGroup;
|
|
4026
|
+
exports.mockItemSubItemIdentity = mockItemSubItemIdentity;
|
|
4027
|
+
exports.mockItemSystemDataConverter = mockItemSystemDataConverter;
|
|
4028
|
+
exports.mockItemSystemStateFirebaseModelServiceFactory = mockItemSystemStateFirebaseModelServiceFactory;
|
|
4029
|
+
exports.mockItemSystemStateStoredDataConverterMap = mockItemSystemStateStoredDataConverterMap;
|
|
4030
|
+
exports.mockItemUserAccessorFactory = mockItemUserAccessorFactory;
|
|
4031
|
+
exports.mockItemUserCollectionName = mockItemUserCollectionName;
|
|
4032
|
+
exports.mockItemUserCollectionReference = mockItemUserCollectionReference;
|
|
4033
|
+
exports.mockItemUserCollectionReferenceFactory = mockItemUserCollectionReferenceFactory;
|
|
4034
|
+
exports.mockItemUserConverter = mockItemUserConverter;
|
|
4035
|
+
exports.mockItemUserFirebaseModelServiceFactory = mockItemUserFirebaseModelServiceFactory;
|
|
4036
|
+
exports.mockItemUserFirestoreCollection = mockItemUserFirestoreCollection;
|
|
4037
|
+
exports.mockItemUserFirestoreCollectionGroup = mockItemUserFirestoreCollectionGroup;
|
|
4038
|
+
exports.mockItemUserIdentifier = mockItemUserIdentifier;
|
|
4039
|
+
exports.mockItemUserIdentity = mockItemUserIdentity;
|
|
4040
|
+
exports.mockItemWithTestValue = mockItemWithTestValue;
|
|
4041
|
+
exports.mockItemWithValue = mockItemWithValue;
|
|
4042
|
+
exports.testWithMockItemCollectionFixture = testWithMockItemCollectionFixture;
|
|
4043
|
+
exports.testWithMockItemStorageFixture = testWithMockItemStorageFixture;
|