@dereekb/firebase 12.6.21 → 13.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/index.cjs.js +53 -2239
- package/index.esm.js +34 -2216
- package/package.json +21 -15
- package/src/lib/client/storage/driver.accessor.d.ts +1 -1
- package/src/lib/common/firestore/query/iterator.d.ts +0 -4
- package/src/lib/common/firestore/snapshot/snapshot.field.d.ts +0 -6
- package/src/lib/common/model/model.service.d.ts +1 -1
- package/src/lib/common/storage/driver/accessor.d.ts +1 -1
- package/src/lib/common/storage/types.d.ts +3 -3
- package/src/lib/model/notification/notification.api.d.ts +1 -1
- package/src/lib/model/notification/notification.config.d.ts +5 -5
- package/src/lib/model/notification/notification.d.ts +15 -15
- package/src/lib/model/notification/notification.details.d.ts +0 -4
- package/src/lib/model/notification/notification.item.d.ts +1 -1
- package/src/lib/model/storagefile/storagefile.api.d.ts +4 -4
- package/src/lib/model/storagefile/storagefile.d.ts +7 -7
- package/src/lib/model/storagefile/storagefile.task.d.ts +1 -13
- package/src/lib/model/system/system.d.ts +2 -2
- package/test/index.cjs.js +4043 -0
- package/test/index.esm.js +3957 -0
- package/test/package.json +27 -9
- package/test/src/lib/client/firebase.authorized.d.ts +2 -2
- package/test/src/lib/client/firebase.d.ts +4 -3
- package/test/src/lib/client/firestore.mock.item.fixture.authorized.d.ts +2 -2
- package/test/src/lib/common/firebase.instance.d.ts +7 -3
- package/test/src/lib/common/firestore/firestore.instance.d.ts +7 -3
- package/test/src/lib/common/mock/mock.item.collection.fixture.d.ts +5 -2
- package/test/src/lib/common/mock/mock.item.storage.fixture.d.ts +4 -4
- package/test/src/lib/common/storage/storage.instance.d.ts +7 -3
- package/test/CHANGELOG.md +0 -2114
- package/test/README.md +0 -11
- package/test/src/index.js +0 -5
- package/test/src/index.js.map +0 -1
- package/test/src/lib/client/firebase.authorized.js +0 -35
- package/test/src/lib/client/firebase.authorized.js.map +0 -1
- package/test/src/lib/client/firebase.js +0 -125
- package/test/src/lib/client/firebase.js.map +0 -1
- package/test/src/lib/client/firestore.mock.item.fixture.authorized.js +0 -19
- package/test/src/lib/client/firestore.mock.item.fixture.authorized.js.map +0 -1
- package/test/src/lib/client/index.js +0 -7
- package/test/src/lib/client/index.js.map +0 -1
- package/test/src/lib/common/firebase.instance.js +0 -35
- package/test/src/lib/common/firebase.instance.js.map +0 -1
- package/test/src/lib/common/firestore/firestore.instance.js +0 -24
- package/test/src/lib/common/firestore/firestore.instance.js.map +0 -1
- package/test/src/lib/common/firestore/firestore.js +0 -67
- package/test/src/lib/common/firestore/firestore.js.map +0 -1
- package/test/src/lib/common/firestore/index.js +0 -9
- package/test/src/lib/common/firestore/index.js.map +0 -1
- package/test/src/lib/common/firestore/test.driver.accessor.js +0 -767
- package/test/src/lib/common/firestore/test.driver.accessor.js.map +0 -1
- package/test/src/lib/common/firestore/test.driver.query.js +0 -1361
- package/test/src/lib/common/firestore/test.driver.query.js.map +0 -1
- package/test/src/lib/common/firestore/test.iterator.js +0 -221
- package/test/src/lib/common/firestore/test.iterator.js.map +0 -1
- package/test/src/lib/common/index.js +0 -8
- package/test/src/lib/common/index.js.map +0 -1
- package/test/src/lib/common/mock/index.js +0 -10
- package/test/src/lib/common/mock/index.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.collection.fixture.js +0 -64
- package/test/src/lib/common/mock/mock.item.collection.fixture.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.id.js +0 -3
- package/test/src/lib/common/mock/mock.item.id.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.js +0 -339
- package/test/src/lib/common/mock/mock.item.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.query.js +0 -33
- package/test/src/lib/common/mock/mock.item.query.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.service.js +0 -77
- package/test/src/lib/common/mock/mock.item.service.js.map +0 -1
- package/test/src/lib/common/mock/mock.item.storage.fixture.js +0 -40
- package/test/src/lib/common/mock/mock.item.storage.fixture.js.map +0 -1
- package/test/src/lib/common/storage/index.js +0 -7
- package/test/src/lib/common/storage/index.js.map +0 -1
- package/test/src/lib/common/storage/storage.instance.js +0 -24
- package/test/src/lib/common/storage/storage.instance.js.map +0 -1
- package/test/src/lib/common/storage/storage.js +0 -37
- package/test/src/lib/common/storage/storage.js.map +0 -1
- package/test/src/lib/common/storage/test.driver.accessor.js +0 -669
- package/test/src/lib/common/storage/test.driver.accessor.js.map +0 -1
- package/test/src/lib/index.js +0 -6
- package/test/src/lib/index.js.map +0 -1
- /package/{index.cjs.d.ts → index.d.ts} +0 -0
- /package/{index.esm.d.ts → test/index.d.ts} +0 -0
|
@@ -1,1361 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.describeFirestoreQueryDriverTests = describeFirestoreQueryDriverTests;
|
|
4
|
-
const date_fns_1 = require("date-fns");
|
|
5
|
-
const test_1 = require("@dereekb/util/test");
|
|
6
|
-
const rxjs_1 = require("@dereekb/rxjs");
|
|
7
|
-
const rxjs_2 = require("rxjs");
|
|
8
|
-
const firebase_1 = require("@dereekb/firebase");
|
|
9
|
-
const mock_1 = require("../mock");
|
|
10
|
-
const util_1 = require("@dereekb/util");
|
|
11
|
-
const date_1 = require("@dereekb/date");
|
|
12
|
-
/**
|
|
13
|
-
* Describes query driver tests, using a MockItemCollectionFixture.
|
|
14
|
-
*
|
|
15
|
-
* @param f
|
|
16
|
-
*/
|
|
17
|
-
function describeFirestoreQueryDriverTests(f) {
|
|
18
|
-
describe('FirestoreQueryDriver', () => {
|
|
19
|
-
const testDocumentCount = 5;
|
|
20
|
-
let items;
|
|
21
|
-
const startDate = (0, date_fns_1.addDays)((0, date_fns_1.startOfDay)(new Date()), 1);
|
|
22
|
-
const EVEN_TAG = 'even';
|
|
23
|
-
const ODD_TAG = 'odd';
|
|
24
|
-
beforeEach(async () => {
|
|
25
|
-
items = await (0, firebase_1.makeDocuments)(f.instance.firestoreCollection.documentAccessor(), {
|
|
26
|
-
count: testDocumentCount,
|
|
27
|
-
init: (i) => {
|
|
28
|
-
return {
|
|
29
|
-
value: `${i}`,
|
|
30
|
-
number: i,
|
|
31
|
-
date: (0, date_fns_1.addHours)(startDate, i),
|
|
32
|
-
tags: [`${i}`, `${(0, util_1.isEvenNumber)(i) ? EVEN_TAG : ODD_TAG}`],
|
|
33
|
-
test: true
|
|
34
|
-
};
|
|
35
|
-
}
|
|
36
|
-
});
|
|
37
|
-
});
|
|
38
|
-
describe('firestoreIdBatchVerifierFactory()', () => {
|
|
39
|
-
const mockItemIdBatchVerifier = (0, firebase_1.firestoreIdBatchVerifierFactory)({
|
|
40
|
-
readKeys: (x) => [x.id],
|
|
41
|
-
fieldToQuery: '_id'
|
|
42
|
-
});
|
|
43
|
-
it('should query on the id field.', async () => {
|
|
44
|
-
const takenIds = items.map((x) => x.id);
|
|
45
|
-
const result = await f.instance.mockItemCollection.queryDocument((0, firebase_1.whereDocumentId)('in', takenIds)).getDocs();
|
|
46
|
-
expect(result).toBeDefined();
|
|
47
|
-
expect(result.length).toBe(takenIds.length);
|
|
48
|
-
expect(result.map((x) => x.id)).toContain(takenIds[0]);
|
|
49
|
-
});
|
|
50
|
-
it('should return ids that are not taken.', async () => {
|
|
51
|
-
const takenIds = items.map((x) => x.id);
|
|
52
|
-
const idFactory = (0, util_1.arrayFactory)((0, util_1.mapGetter)((0, util_1.randomNumberFactory)(10000000), (x) => `test-id-${x}`));
|
|
53
|
-
const random = (0, util_1.randomFromArrayFactory)(takenIds);
|
|
54
|
-
const factory = (0, util_1.idBatchFactory)({
|
|
55
|
-
verifier: mockItemIdBatchVerifier(f.instance.mockItemCollection),
|
|
56
|
-
factory: (count) => {
|
|
57
|
-
const ids = [random(), ...idFactory(count)];
|
|
58
|
-
return ids;
|
|
59
|
-
}
|
|
60
|
-
});
|
|
61
|
-
const idsToMake = 30;
|
|
62
|
-
const result = await factory(idsToMake);
|
|
63
|
-
expect(result).toBeDefined();
|
|
64
|
-
expect((0, util_1.unique)(result).length).toBe(idsToMake);
|
|
65
|
-
expect((0, util_1.unique)(result, takenIds).length).toBe(idsToMake);
|
|
66
|
-
});
|
|
67
|
-
});
|
|
68
|
-
describe('mockItemUser', () => {
|
|
69
|
-
let testUserId;
|
|
70
|
-
let allMockUserItems;
|
|
71
|
-
beforeEach(async () => {
|
|
72
|
-
testUserId = 'userid' + Math.ceil(Math.random() * 100000);
|
|
73
|
-
const results = await Promise.all(items.map((parent) => (0, firebase_1.makeDocuments)(f.instance.mockItemUserCollection(parent).documentAccessor(), {
|
|
74
|
-
count: 1,
|
|
75
|
-
newDocument: (x) => x.loadDocumentForId(testUserId),
|
|
76
|
-
init: (i) => {
|
|
77
|
-
return {
|
|
78
|
-
uid: '',
|
|
79
|
-
name: `name ${i}`
|
|
80
|
-
};
|
|
81
|
-
}
|
|
82
|
-
})));
|
|
83
|
-
allMockUserItems = results.flat();
|
|
84
|
-
});
|
|
85
|
-
describe('utils', () => {
|
|
86
|
-
describe('iterate load firestore utilities', () => {
|
|
87
|
-
describe('loadAllFirestoreDocumentSnapshotPairs()', () => {
|
|
88
|
-
it('should iterate batches of snapshot pairs.', async () => {
|
|
89
|
-
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
90
|
-
const mockUserItemsVisited = new Set();
|
|
91
|
-
const result = await (0, firebase_1.loadAllFirestoreDocumentSnapshotPairs)({
|
|
92
|
-
documentAccessor,
|
|
93
|
-
iterateSnapshotPairsBatch: async (x) => {
|
|
94
|
-
x.forEach((y) => mockUserItemsVisited.add(y.document.key));
|
|
95
|
-
const pair = x[0];
|
|
96
|
-
expect(pair.data).toBeDefined();
|
|
97
|
-
expect(pair.snapshot).toBeDefined();
|
|
98
|
-
expect(pair.document).toBeDefined();
|
|
99
|
-
},
|
|
100
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
101
|
-
constraintsFactory: [] // no constraints
|
|
102
|
-
});
|
|
103
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
104
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
105
|
-
expect(result.snapshotPairs.length).toBe(allMockUserItems.length);
|
|
106
|
-
expect(result.snapshotPairs[0].data).toBeDefined();
|
|
107
|
-
expect(result.snapshotPairs[0].document).toBeDefined();
|
|
108
|
-
expect(result.snapshotPairs[0].snapshot).toBeDefined();
|
|
109
|
-
});
|
|
110
|
-
});
|
|
111
|
-
describe('loadAllFirestoreDocumentSnapshot()', () => {
|
|
112
|
-
it('should iterate batches of snapshot pairs.', async () => {
|
|
113
|
-
const mockUserItemsVisited = new Set();
|
|
114
|
-
const result = await (0, firebase_1.loadAllFirestoreDocumentSnapshot)({
|
|
115
|
-
iterateSnapshotsForCheckpoint: async (x) => {
|
|
116
|
-
x.forEach((y) => mockUserItemsVisited.add(y.ref.path));
|
|
117
|
-
const snapshot = x[0];
|
|
118
|
-
expect(snapshot.ref).toBeDefined();
|
|
119
|
-
expect(snapshot.data()).toBeDefined();
|
|
120
|
-
},
|
|
121
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
122
|
-
constraintsFactory: [] // no constraints
|
|
123
|
-
});
|
|
124
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
125
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
126
|
-
expect(result.snapshots.length).toBe(allMockUserItems.length);
|
|
127
|
-
expect(result.snapshots[0].ref).toBeDefined();
|
|
128
|
-
expect(result.snapshots[0].data()).toBeDefined();
|
|
129
|
-
});
|
|
130
|
-
});
|
|
131
|
-
});
|
|
132
|
-
describe('iterate firestore utilities', () => {
|
|
133
|
-
describe('iterateFirestoreDocumentSnapshotPairs()', () => {
|
|
134
|
-
it('should iterate across all mock users by each snapshot pair.', async () => {
|
|
135
|
-
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
136
|
-
const mockUserItemsVisited = new Set();
|
|
137
|
-
const batchSize = 2;
|
|
138
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotPairs)({
|
|
139
|
-
batchSize,
|
|
140
|
-
handleRepeatCursor: false, // exit immediately if the cursor is visited again
|
|
141
|
-
filterCheckpointSnapshots: async (x) => {
|
|
142
|
-
return x;
|
|
143
|
-
},
|
|
144
|
-
iterateSnapshotPair: async (x) => {
|
|
145
|
-
expect(x.data).toBeDefined();
|
|
146
|
-
expect(x.snapshot).toBeDefined();
|
|
147
|
-
expect(x.document).toBeDefined();
|
|
148
|
-
const key = x.document.key;
|
|
149
|
-
if (mockUserItemsVisited.has(key)) {
|
|
150
|
-
throw new Error('encountered repeat key');
|
|
151
|
-
}
|
|
152
|
-
else {
|
|
153
|
-
mockUserItemsVisited.add(key);
|
|
154
|
-
}
|
|
155
|
-
},
|
|
156
|
-
useCheckpointResult: async (x) => {
|
|
157
|
-
if (x.docSnapshots.length > 0) {
|
|
158
|
-
expect(x.results[0].snapshots.length).toBeLessThanOrEqual(batchSize);
|
|
159
|
-
}
|
|
160
|
-
},
|
|
161
|
-
documentAccessor,
|
|
162
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
163
|
-
constraintsFactory: [] // no constraints
|
|
164
|
-
});
|
|
165
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
166
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
167
|
-
});
|
|
168
|
-
describe('1 item exists', () => {
|
|
169
|
-
let onlyItem;
|
|
170
|
-
beforeEach(async () => {
|
|
171
|
-
onlyItem = allMockUserItems.pop();
|
|
172
|
-
await Promise.all(allMockUserItems.map((x) => x.accessor.delete()));
|
|
173
|
-
allMockUserItems = [onlyItem];
|
|
174
|
-
});
|
|
175
|
-
it('should iterate the single item', async () => {
|
|
176
|
-
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
177
|
-
const mockUserItemsVisited = new Set();
|
|
178
|
-
expect(allMockUserItems).toHaveLength(1);
|
|
179
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotPairs)({
|
|
180
|
-
iterateSnapshotPair: async (x) => {
|
|
181
|
-
expect(x.data).toBeDefined();
|
|
182
|
-
expect(x.snapshot).toBeDefined();
|
|
183
|
-
expect(x.document).toBeDefined();
|
|
184
|
-
const key = x.document.key;
|
|
185
|
-
if (mockUserItemsVisited.has(key)) {
|
|
186
|
-
throw new Error('encountered repeat key');
|
|
187
|
-
}
|
|
188
|
-
else {
|
|
189
|
-
mockUserItemsVisited.add(key);
|
|
190
|
-
}
|
|
191
|
-
},
|
|
192
|
-
documentAccessor,
|
|
193
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
194
|
-
batchSize: null,
|
|
195
|
-
limitPerCheckpoint: 200,
|
|
196
|
-
totalSnapshotsLimit: 100,
|
|
197
|
-
performTasksConfig: {
|
|
198
|
-
maxParallelTasks: 20
|
|
199
|
-
},
|
|
200
|
-
constraintsFactory: [] // no constraints
|
|
201
|
-
});
|
|
202
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
203
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
204
|
-
});
|
|
205
|
-
// TODO(TEST): Case where a document was visited twice via iteration after it was updated. Assumed
|
|
206
|
-
// to occur when the updated item matches an "or" case or other value when using "in". Cannot
|
|
207
|
-
// reproduce at the moment.
|
|
208
|
-
/*
|
|
209
|
-
describe('scenario', () => {
|
|
210
|
-
|
|
211
|
-
it('should visit the item twice if it is updated and matches a different filter', async () => {
|
|
212
|
-
const onlyItemValue = await onlyItem.snapshotData() as MockItemUser;
|
|
213
|
-
const nameToChangeTo = `${onlyItemValue.name}-changed`;
|
|
214
|
-
const namesToFilter = [onlyItemValue.name, nameToChangeTo];
|
|
215
|
-
|
|
216
|
-
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
217
|
-
const mockUserItemsVisited = new Set<MockItemUserKey>();
|
|
218
|
-
let updates = 0;
|
|
219
|
-
|
|
220
|
-
expect(allMockUserItems).toHaveLength(1);
|
|
221
|
-
|
|
222
|
-
const result = await iterateFirestoreDocumentSnapshotPairs({
|
|
223
|
-
iterateSnapshotPair: async (x) => {
|
|
224
|
-
expect(x.data).toBeDefined();
|
|
225
|
-
expect(x.snapshot).toBeDefined();
|
|
226
|
-
expect(x.document).toBeDefined();
|
|
227
|
-
|
|
228
|
-
await x.document.update({ name: nameToChangeTo });
|
|
229
|
-
updates += 1;
|
|
230
|
-
|
|
231
|
-
const key = x.document.key;
|
|
232
|
-
mockUserItemsVisited.add(key);
|
|
233
|
-
},
|
|
234
|
-
documentAccessor,
|
|
235
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
236
|
-
batchSize: null,
|
|
237
|
-
limitPerCheckpoint: 200,
|
|
238
|
-
totalSnapshotsLimit: 100,
|
|
239
|
-
performTasksConfig: {
|
|
240
|
-
maxParallelTasks: 20
|
|
241
|
-
},
|
|
242
|
-
constraintsFactory: () => [where<MockItemUser>('name', 'in', namesToFilter)],
|
|
243
|
-
});
|
|
244
|
-
|
|
245
|
-
expect(updates).toBe(2);
|
|
246
|
-
expect(result.totalSnapshotsVisited).toBe(2);
|
|
247
|
-
expect(mockUserItemsVisited.size).toBe(1);
|
|
248
|
-
});
|
|
249
|
-
|
|
250
|
-
});
|
|
251
|
-
*/
|
|
252
|
-
});
|
|
253
|
-
describe('0 items exists', () => {
|
|
254
|
-
beforeEach(async () => {
|
|
255
|
-
await Promise.all(allMockUserItems.map((x) => x.accessor.delete()));
|
|
256
|
-
});
|
|
257
|
-
it('should iterate no items', async () => {
|
|
258
|
-
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
259
|
-
const mockUserItemsVisited = new Set();
|
|
260
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotPairs)({
|
|
261
|
-
iterateSnapshotPair: async (x) => {
|
|
262
|
-
expect(x.data).toBeDefined();
|
|
263
|
-
expect(x.snapshot).toBeDefined();
|
|
264
|
-
expect(x.document).toBeDefined();
|
|
265
|
-
const key = x.document.key;
|
|
266
|
-
if (mockUserItemsVisited.has(key)) {
|
|
267
|
-
throw new Error('encountered repeat key');
|
|
268
|
-
}
|
|
269
|
-
else {
|
|
270
|
-
mockUserItemsVisited.add(key);
|
|
271
|
-
}
|
|
272
|
-
},
|
|
273
|
-
documentAccessor,
|
|
274
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
275
|
-
constraintsFactory: [] // no constraints
|
|
276
|
-
});
|
|
277
|
-
expect(result.totalSnapshotsVisited).toBe(0);
|
|
278
|
-
expect(mockUserItemsVisited.size).toBe(0);
|
|
279
|
-
});
|
|
280
|
-
});
|
|
281
|
-
});
|
|
282
|
-
describe('iterateFirestoreDocumentSnapshots()', () => {
|
|
283
|
-
it('should iterate across all mock users by each snapshot.', async () => {
|
|
284
|
-
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
285
|
-
const mockUserItemsVisited = new Set();
|
|
286
|
-
const batchSize = 2;
|
|
287
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshots)({
|
|
288
|
-
batchSize,
|
|
289
|
-
iterateSnapshot: async (x) => {
|
|
290
|
-
const key = x.ref.path;
|
|
291
|
-
if (mockUserItemsVisited.has(key)) {
|
|
292
|
-
throw new Error('encountered repeat key');
|
|
293
|
-
}
|
|
294
|
-
else {
|
|
295
|
-
mockUserItemsVisited.add(key);
|
|
296
|
-
}
|
|
297
|
-
},
|
|
298
|
-
useCheckpointResult: async (x) => {
|
|
299
|
-
if (x.docSnapshots.length > 0) {
|
|
300
|
-
expect(x.results[0].snapshots.length).toBeLessThanOrEqual(batchSize);
|
|
301
|
-
}
|
|
302
|
-
},
|
|
303
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
304
|
-
constraintsFactory: [] // no constraints
|
|
305
|
-
});
|
|
306
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
307
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
308
|
-
});
|
|
309
|
-
});
|
|
310
|
-
describe('iterateFirestoreDocumentSnapshotPairBatches()', () => {
|
|
311
|
-
it('should iterate batches of snapshot pairs.', async () => {
|
|
312
|
-
const documentAccessor = f.instance.mockItemUserCollectionGroup.documentAccessor();
|
|
313
|
-
const mockUserItemsVisited = new Set();
|
|
314
|
-
const batchSize = 2;
|
|
315
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotPairBatches)({
|
|
316
|
-
documentAccessor,
|
|
317
|
-
batchSize, // use specific batch size
|
|
318
|
-
iterateSnapshotPairsBatch: async (x) => {
|
|
319
|
-
expect(x.length).toBeLessThanOrEqual(batchSize);
|
|
320
|
-
const pair = x[0];
|
|
321
|
-
expect(pair.data).toBeDefined();
|
|
322
|
-
expect(pair.snapshot).toBeDefined();
|
|
323
|
-
expect(pair.document).toBeDefined();
|
|
324
|
-
},
|
|
325
|
-
useCheckpointResult: async (x) => {
|
|
326
|
-
x.docSnapshots.forEach((y) => mockUserItemsVisited.add(y.ref.path));
|
|
327
|
-
},
|
|
328
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
329
|
-
constraintsFactory: [] // no constraints
|
|
330
|
-
});
|
|
331
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
332
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
333
|
-
});
|
|
334
|
-
});
|
|
335
|
-
describe('iterateFirestoreDocumentSnapshotBatches()', () => {
|
|
336
|
-
it('should iterate batches of snapshots.', async () => {
|
|
337
|
-
const mockUserItemsVisited = new Set();
|
|
338
|
-
const batchSize = 2;
|
|
339
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotBatches)({
|
|
340
|
-
batchSize, // use specific batch size
|
|
341
|
-
iterateSnapshotBatch: async (x) => {
|
|
342
|
-
expect(x.length).toBeLessThanOrEqual(batchSize);
|
|
343
|
-
},
|
|
344
|
-
useCheckpointResult: async (x) => {
|
|
345
|
-
x.docSnapshots.forEach((y) => mockUserItemsVisited.add(y.ref.path));
|
|
346
|
-
},
|
|
347
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
348
|
-
constraintsFactory: [] // no constraints
|
|
349
|
-
});
|
|
350
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
351
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
352
|
-
});
|
|
353
|
-
describe('limitPerCheckpoint', () => {
|
|
354
|
-
describe('limitPerCheckpoint = 0', () => {
|
|
355
|
-
it('should not iterate any batches', async () => {
|
|
356
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotBatches)({
|
|
357
|
-
limitPerCheckpoint: 0,
|
|
358
|
-
iterateSnapshotBatch: async (x) => {
|
|
359
|
-
expect(x.length).toBe(0);
|
|
360
|
-
},
|
|
361
|
-
useCheckpointResult: async (x) => {
|
|
362
|
-
expect(x.docSnapshots.length).toBe(0);
|
|
363
|
-
},
|
|
364
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
365
|
-
constraintsFactory: [] // no constraints
|
|
366
|
-
});
|
|
367
|
-
expect(result.totalSnapshotsVisited).toBe(0);
|
|
368
|
-
expect(result.totalSnapshotsLimitReached).toBe(true);
|
|
369
|
-
});
|
|
370
|
-
});
|
|
371
|
-
});
|
|
372
|
-
describe('maxParallelCheckpoints>1', () => {
|
|
373
|
-
it('should process the checkpoints in parallel.', async () => {
|
|
374
|
-
const mockUserItemsVisited = new Set();
|
|
375
|
-
const batchSize = 1;
|
|
376
|
-
const maxParallelCheckpoints = 4;
|
|
377
|
-
let currentRunningTasks = 0;
|
|
378
|
-
let maxRunningTasks = 0;
|
|
379
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotBatches)({
|
|
380
|
-
batchSize, // use specific batch size
|
|
381
|
-
limitPerCheckpoint: 1,
|
|
382
|
-
maxParallelCheckpoints, // do four checkpoints in parallel
|
|
383
|
-
iterateSnapshotBatch: async (x, batchIndex) => {
|
|
384
|
-
currentRunningTasks += 1;
|
|
385
|
-
await (0, util_1.waitForMs)(1000);
|
|
386
|
-
maxRunningTasks = Math.max(maxRunningTasks, currentRunningTasks);
|
|
387
|
-
currentRunningTasks -= 1;
|
|
388
|
-
},
|
|
389
|
-
useCheckpointResult: async (x) => {
|
|
390
|
-
x.docSnapshots.forEach((y) => mockUserItemsVisited.add(y.ref.path));
|
|
391
|
-
},
|
|
392
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
393
|
-
constraintsFactory: [] // no constraints
|
|
394
|
-
});
|
|
395
|
-
expect(maxRunningTasks).toBe(maxParallelCheckpoints);
|
|
396
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
397
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
398
|
-
});
|
|
399
|
-
});
|
|
400
|
-
describe('batchSize=null', () => {
|
|
401
|
-
it('should iterate with a single batch', async () => {
|
|
402
|
-
const mockUserItemsVisited = new Set();
|
|
403
|
-
const batchSize = null;
|
|
404
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotBatches)({
|
|
405
|
-
batchSize, // use specific batch size
|
|
406
|
-
iterateSnapshotBatch: async (x) => {
|
|
407
|
-
expect(x.length).toBe(allMockUserItems.length);
|
|
408
|
-
},
|
|
409
|
-
useCheckpointResult: async (x) => {
|
|
410
|
-
x.docSnapshots.forEach((y) => mockUserItemsVisited.add(y.ref.path));
|
|
411
|
-
},
|
|
412
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
413
|
-
constraintsFactory: [] // no constraints
|
|
414
|
-
});
|
|
415
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
416
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
417
|
-
});
|
|
418
|
-
});
|
|
419
|
-
describe('batchSizeForSnapshots: () => null', () => {
|
|
420
|
-
it('should iterate with a single batch', async () => {
|
|
421
|
-
const mockUserItemsVisited = new Set();
|
|
422
|
-
const result = await (0, firebase_1.iterateFirestoreDocumentSnapshotBatches)({
|
|
423
|
-
batchSizeForSnapshots: () => null,
|
|
424
|
-
iterateSnapshotBatch: async (x) => {
|
|
425
|
-
expect(x.length).toBe(allMockUserItems.length);
|
|
426
|
-
},
|
|
427
|
-
useCheckpointResult: async (x) => {
|
|
428
|
-
x.docSnapshots.forEach((y) => mockUserItemsVisited.add(y.ref.path));
|
|
429
|
-
},
|
|
430
|
-
queryFactory: f.instance.mockItemUserCollectionGroup,
|
|
431
|
-
constraintsFactory: [] // no constraints
|
|
432
|
-
});
|
|
433
|
-
expect(result.totalSnapshotsVisited).toBe(allMockUserItems.length);
|
|
434
|
-
expect(mockUserItemsVisited.size).toBe(allMockUserItems.length);
|
|
435
|
-
});
|
|
436
|
-
});
|
|
437
|
-
});
|
|
438
|
-
});
|
|
439
|
-
});
|
|
440
|
-
describe('collection group', () => {
|
|
441
|
-
describe('query', () => {
|
|
442
|
-
describe('constraints', () => {
|
|
443
|
-
describe('where', () => {
|
|
444
|
-
it('should return the documents matching the input uid', async () => {
|
|
445
|
-
const result = await f.instance.mockItemUserCollectionGroup.query((0, firebase_1.where)('uid', '==', testUserId)).getDocs();
|
|
446
|
-
expect(result.docs.length).toBe(testDocumentCount);
|
|
447
|
-
result.docs.forEach((x) => {
|
|
448
|
-
expect(x.data().uid).toBe(testUserId);
|
|
449
|
-
});
|
|
450
|
-
});
|
|
451
|
-
});
|
|
452
|
-
});
|
|
453
|
-
});
|
|
454
|
-
});
|
|
455
|
-
});
|
|
456
|
-
describe('nested items', () => {
|
|
457
|
-
const subItemCountPerItem = 2;
|
|
458
|
-
const totalSubItemsCount = subItemCountPerItem * testDocumentCount;
|
|
459
|
-
let parentA;
|
|
460
|
-
let querySubItems;
|
|
461
|
-
let allSubItems;
|
|
462
|
-
beforeEach(async () => {
|
|
463
|
-
querySubItems = f.instance.mockItemSubItemCollectionGroup.query;
|
|
464
|
-
parentA = items[0];
|
|
465
|
-
const results = await Promise.all(items.map((parent) => (0, firebase_1.makeDocuments)(f.instance.mockItemSubItemCollection(parent).documentAccessor(), {
|
|
466
|
-
count: subItemCountPerItem,
|
|
467
|
-
init: (i) => {
|
|
468
|
-
return {
|
|
469
|
-
value: i
|
|
470
|
-
};
|
|
471
|
-
}
|
|
472
|
-
})));
|
|
473
|
-
allSubItems = results.flat();
|
|
474
|
-
});
|
|
475
|
-
describe('sub sub item', () => {
|
|
476
|
-
const deepSubItemCountPerItem = 1;
|
|
477
|
-
const totalDeepSubItemsCount = deepSubItemCountPerItem * totalSubItemsCount;
|
|
478
|
-
const totalDeepSubItemsPerMockItem = subItemCountPerItem * deepSubItemCountPerItem;
|
|
479
|
-
let deepSubItemParentA;
|
|
480
|
-
let queryDeepSubItems;
|
|
481
|
-
let allDeepSubItems;
|
|
482
|
-
beforeEach(async () => {
|
|
483
|
-
queryDeepSubItems = f.instance.mockItemSubItemDeepCollectionGroup.query;
|
|
484
|
-
deepSubItemParentA = allSubItems[0];
|
|
485
|
-
const results = await Promise.all(allSubItems.map((parent) => (0, firebase_1.makeDocuments)(f.instance.mockItemSubItemDeepCollection(parent).documentAccessor(), {
|
|
486
|
-
count: deepSubItemCountPerItem,
|
|
487
|
-
init: (i) => {
|
|
488
|
-
return {
|
|
489
|
-
value: i
|
|
490
|
-
};
|
|
491
|
-
}
|
|
492
|
-
})));
|
|
493
|
-
allDeepSubItems = results.flat();
|
|
494
|
-
});
|
|
495
|
-
// tests querying for all nested items under a parent
|
|
496
|
-
it('querying for only items belonging to mock item parentA', async () => {
|
|
497
|
-
const result = await queryDeepSubItems((0, mock_1.allChildMockItemSubItemDeepsWithinMockItem)(parentA.documentRef)).getDocs();
|
|
498
|
-
expect(result.docs.length).toBe(totalDeepSubItemsPerMockItem);
|
|
499
|
-
result.docs.forEach((x) => expect(x.ref.parent?.parent?.parent?.parent?.path).toBe(parentA.documentRef.path));
|
|
500
|
-
});
|
|
501
|
-
// TODO(TEST): Add tests for allChildDocumentsUnderRelativePath
|
|
502
|
-
});
|
|
503
|
-
describe('sub item', () => {
|
|
504
|
-
describe('collection group', () => {
|
|
505
|
-
describe('query', () => {
|
|
506
|
-
it('should return sub items', async () => {
|
|
507
|
-
const result = await querySubItems().getDocs();
|
|
508
|
-
expect(result.docs.length).toBe(totalSubItemsCount);
|
|
509
|
-
});
|
|
510
|
-
describe('constraints', () => {
|
|
511
|
-
describe('where', () => {
|
|
512
|
-
it('should return the documents matching the query.', async () => {
|
|
513
|
-
const value = 0;
|
|
514
|
-
const result = await querySubItems((0, firebase_1.where)('value', '==', value)).getDocs();
|
|
515
|
-
expect(result.docs.length).toBe(testDocumentCount);
|
|
516
|
-
expect(result.docs[0].data().value).toBe(value);
|
|
517
|
-
const ref = result.docs[0].ref;
|
|
518
|
-
expect(ref).toBeDefined();
|
|
519
|
-
expect(ref.parent).toBeDefined();
|
|
520
|
-
});
|
|
521
|
-
});
|
|
522
|
-
describe('whereDocumentId', () => {
|
|
523
|
-
(0, test_1.itShouldFail)('to query on collection groups.', async () => {
|
|
524
|
-
// https://stackoverflow.com/questions/56149601/firestore-collection-group-query-on-documentid
|
|
525
|
-
const targetId = 'targetid';
|
|
526
|
-
/*
|
|
527
|
-
const results = await Promise.all(
|
|
528
|
-
allSubItems.map((parent: MockItemSubItemDocument) =>
|
|
529
|
-
makeDocuments(f.instance.mockItemSubItemDeepCollection(parent).documentAccessor(), {
|
|
530
|
-
count: 1,
|
|
531
|
-
newDocument: (x) => x.loadDocumentForId(targetId),
|
|
532
|
-
init: (i) => {
|
|
533
|
-
return {
|
|
534
|
-
value: i
|
|
535
|
-
};
|
|
536
|
-
}
|
|
537
|
-
})
|
|
538
|
-
)
|
|
539
|
-
);
|
|
540
|
-
*/
|
|
541
|
-
await (0, test_1.expectFail)(() => querySubItems((0, firebase_1.whereDocumentId)('==', targetId)).getDocs());
|
|
542
|
-
});
|
|
543
|
-
});
|
|
544
|
-
});
|
|
545
|
-
describe('streamDocs()', () => {
|
|
546
|
-
let sub;
|
|
547
|
-
beforeEach(() => {
|
|
548
|
-
sub = new rxjs_1.SubscriptionObject();
|
|
549
|
-
});
|
|
550
|
-
afterEach(() => {
|
|
551
|
-
sub.destroy();
|
|
552
|
-
});
|
|
553
|
-
it('should emit when the query results update (an item is added).', (done) => {
|
|
554
|
-
const itemsToAdd = 1;
|
|
555
|
-
let addCompleted = false;
|
|
556
|
-
let addSeen = false;
|
|
557
|
-
function tryComplete() {
|
|
558
|
-
if (addSeen && addCompleted) {
|
|
559
|
-
done();
|
|
560
|
-
}
|
|
561
|
-
}
|
|
562
|
-
sub.subscription = querySubItems()
|
|
563
|
-
.streamDocs()
|
|
564
|
-
.pipe((0, rxjs_2.filter)((x) => x.docs.length > allSubItems.length))
|
|
565
|
-
.subscribe((results) => {
|
|
566
|
-
addSeen = true;
|
|
567
|
-
expect(results.docs.length).toBe(allSubItems.length + itemsToAdd);
|
|
568
|
-
tryComplete();
|
|
569
|
-
});
|
|
570
|
-
// add one item
|
|
571
|
-
(0, firebase_1.makeDocuments)(f.instance.mockItemSubItemCollection(parentA).documentAccessor(), {
|
|
572
|
-
count: itemsToAdd,
|
|
573
|
-
init: (i) => {
|
|
574
|
-
return {
|
|
575
|
-
value: i
|
|
576
|
-
};
|
|
577
|
-
}
|
|
578
|
-
}).then(() => {
|
|
579
|
-
addCompleted = true;
|
|
580
|
-
tryComplete();
|
|
581
|
-
});
|
|
582
|
-
});
|
|
583
|
-
it('should emit when the query results update (an item is removed).', (done) => {
|
|
584
|
-
const itemsToRemove = 1;
|
|
585
|
-
let deleteCompleted = false;
|
|
586
|
-
let deleteSeen = false;
|
|
587
|
-
function tryComplete() {
|
|
588
|
-
if (deleteSeen && deleteCompleted) {
|
|
589
|
-
done();
|
|
590
|
-
}
|
|
591
|
-
}
|
|
592
|
-
sub.subscription = querySubItems()
|
|
593
|
-
.streamDocs()
|
|
594
|
-
.pipe((0, rxjs_2.filter)((x) => x.docs.length < allSubItems.length))
|
|
595
|
-
.subscribe((results) => {
|
|
596
|
-
deleteSeen = true;
|
|
597
|
-
expect(results.docs.length).toBe(allSubItems.length - itemsToRemove);
|
|
598
|
-
tryComplete();
|
|
599
|
-
});
|
|
600
|
-
allSubItems[0].accessor.exists().then((exists) => {
|
|
601
|
-
expect(exists).toBe(true);
|
|
602
|
-
// remove one item
|
|
603
|
-
return allSubItems[0].accessor.delete().then(() => {
|
|
604
|
-
deleteCompleted = true;
|
|
605
|
-
tryComplete();
|
|
606
|
-
});
|
|
607
|
-
});
|
|
608
|
-
});
|
|
609
|
-
});
|
|
610
|
-
});
|
|
611
|
-
});
|
|
612
|
-
});
|
|
613
|
-
});
|
|
614
|
-
describe('queryDocument', () => {
|
|
615
|
-
let queryDocument;
|
|
616
|
-
beforeEach(async () => {
|
|
617
|
-
queryDocument = f.instance.firestoreCollection.queryDocument;
|
|
618
|
-
});
|
|
619
|
-
describe('filter()', () => {
|
|
620
|
-
it('should apply the filter to the query', async () => {
|
|
621
|
-
const results = (await queryDocument()
|
|
622
|
-
.filter((0, firebase_1.where)('tags', 'array-contains', EVEN_TAG))
|
|
623
|
-
.getDocSnapshotDataPairs());
|
|
624
|
-
expect(results).toBeDefined();
|
|
625
|
-
results.forEach((result) => {
|
|
626
|
-
expect(result.data).toBeDefined();
|
|
627
|
-
expect(result.data?.tags).toContain(EVEN_TAG);
|
|
628
|
-
expect(result.document).toBeDefined();
|
|
629
|
-
expect(result.document instanceof mock_1.MockItemDocument).toBe(true);
|
|
630
|
-
expect(result.snapshot).toBeDefined();
|
|
631
|
-
expect(result.snapshot.data()).toBeDefined();
|
|
632
|
-
expect(result.snapshot.ref).toBeDefined();
|
|
633
|
-
expect(result.snapshot.id).toBe(result.document.id);
|
|
634
|
-
});
|
|
635
|
-
});
|
|
636
|
-
it('should add more filters to the existing query', async () => {
|
|
637
|
-
const results = (await queryDocument()
|
|
638
|
-
.filter((0, firebase_1.where)('tags', 'array-contains', EVEN_TAG))
|
|
639
|
-
.filter((0, firebase_1.where)('number', '>=', 4))
|
|
640
|
-
.getDocSnapshotDataPairs());
|
|
641
|
-
expect(results).toBeDefined();
|
|
642
|
-
expect(results.length).toBe(1);
|
|
643
|
-
results.forEach((result) => {
|
|
644
|
-
expect(result.data).toBeDefined();
|
|
645
|
-
expect(result.data?.tags).toContain(EVEN_TAG);
|
|
646
|
-
expect(result.data?.number).toBeGreaterThanOrEqual(4);
|
|
647
|
-
expect(result.document).toBeDefined();
|
|
648
|
-
expect(result.document instanceof mock_1.MockItemDocument).toBe(true);
|
|
649
|
-
expect(result.snapshot).toBeDefined();
|
|
650
|
-
expect(result.snapshot.data()).toBeDefined();
|
|
651
|
-
expect(result.snapshot.ref).toBeDefined();
|
|
652
|
-
expect(result.snapshot.id).toBe(result.document.id);
|
|
653
|
-
});
|
|
654
|
-
});
|
|
655
|
-
});
|
|
656
|
-
describe('getFirstDocSnapshotDataPair()', () => {
|
|
657
|
-
it('should return undefined if the query contains nothing', async () => {
|
|
658
|
-
const result = (await queryDocument((0, firebase_1.where)('value', '==', '_DOES_NOT_EXIST_')).getFirstDocSnapshotDataPair());
|
|
659
|
-
expect(result).not.toBeDefined();
|
|
660
|
-
});
|
|
661
|
-
it('should return the first doc that matches if it exists', async () => {
|
|
662
|
-
const result = (await queryDocument().getFirstDocSnapshotDataPair());
|
|
663
|
-
expect(result).toBeDefined();
|
|
664
|
-
expect(result.data).toBeDefined();
|
|
665
|
-
expect(result.document).toBeDefined();
|
|
666
|
-
expect(result.document instanceof mock_1.MockItemDocument).toBe(true);
|
|
667
|
-
expect(result.snapshot).toBeDefined();
|
|
668
|
-
expect(result.snapshot.data()).toBeDefined();
|
|
669
|
-
expect(result.snapshot.ref).toBeDefined();
|
|
670
|
-
expect(result.snapshot.id).toBe(result.document.id);
|
|
671
|
-
});
|
|
672
|
-
});
|
|
673
|
-
describe('getDocSnapshotDataPairs()', () => {
|
|
674
|
-
it('should return an empty array if the query returns nothing', async () => {
|
|
675
|
-
const result = await queryDocument((0, firebase_1.where)('value', '==', '_DOES_NOT_EXIST_')).getDocSnapshotDataPairs();
|
|
676
|
-
expect(result).toBeDefined();
|
|
677
|
-
expect(result.length).toBe(0);
|
|
678
|
-
});
|
|
679
|
-
it('should return the matching results', async () => {
|
|
680
|
-
const results = await queryDocument().getDocSnapshotDataPairs();
|
|
681
|
-
expect(results).toBeDefined();
|
|
682
|
-
expect(results.length).toBeGreaterThan(0);
|
|
683
|
-
results.forEach((result) => {
|
|
684
|
-
expect(result).toBeDefined();
|
|
685
|
-
expect(result.data).toBeDefined();
|
|
686
|
-
expect(result.document).toBeDefined();
|
|
687
|
-
expect(result.document instanceof mock_1.MockItemDocument).toBe(true);
|
|
688
|
-
expect(result.snapshot).toBeDefined();
|
|
689
|
-
expect(result.snapshot.data()).toBeDefined();
|
|
690
|
-
expect(result.snapshot.ref).toBeDefined();
|
|
691
|
-
expect(result.snapshot.id).toBe(result.document.id);
|
|
692
|
-
});
|
|
693
|
-
});
|
|
694
|
-
});
|
|
695
|
-
describe('streamDocs()', () => {
|
|
696
|
-
let sub;
|
|
697
|
-
beforeEach(() => {
|
|
698
|
-
sub = new rxjs_1.SubscriptionObject();
|
|
699
|
-
});
|
|
700
|
-
afterEach(() => {
|
|
701
|
-
sub.destroy();
|
|
702
|
-
});
|
|
703
|
-
it('should emit when the query results update (an item is added).', (done) => {
|
|
704
|
-
const itemsToAdd = 1;
|
|
705
|
-
let addCompleted = false;
|
|
706
|
-
let addSeen = false;
|
|
707
|
-
function tryComplete() {
|
|
708
|
-
if (addSeen && addCompleted) {
|
|
709
|
-
done();
|
|
710
|
-
}
|
|
711
|
-
}
|
|
712
|
-
sub.subscription = queryDocument()
|
|
713
|
-
.streamDocs()
|
|
714
|
-
.pipe((0, rxjs_2.filter)((documents) => documents.length > items.length))
|
|
715
|
-
.subscribe((documents) => {
|
|
716
|
-
addSeen = true;
|
|
717
|
-
expect(documents.length).toBe(items.length + itemsToAdd);
|
|
718
|
-
tryComplete();
|
|
719
|
-
});
|
|
720
|
-
// add one item
|
|
721
|
-
(0, util_1.waitForMs)(10).then(() => (0, firebase_1.makeDocuments)(f.instance.firestoreCollection.documentAccessor(), {
|
|
722
|
-
count: itemsToAdd,
|
|
723
|
-
init: (i) => {
|
|
724
|
-
return {
|
|
725
|
-
value: `${i + items.length}`,
|
|
726
|
-
test: true
|
|
727
|
-
};
|
|
728
|
-
}
|
|
729
|
-
}).then(() => {
|
|
730
|
-
addCompleted = true;
|
|
731
|
-
tryComplete();
|
|
732
|
-
}));
|
|
733
|
-
});
|
|
734
|
-
it('should emit when the query results update (an item is removed).', (done) => {
|
|
735
|
-
const itemsToRemove = 1;
|
|
736
|
-
let deleteCompleted = false;
|
|
737
|
-
let deleteSeen = false;
|
|
738
|
-
function tryComplete() {
|
|
739
|
-
if (deleteSeen && deleteCompleted) {
|
|
740
|
-
done();
|
|
741
|
-
}
|
|
742
|
-
}
|
|
743
|
-
sub.subscription = queryDocument()
|
|
744
|
-
.streamDocs()
|
|
745
|
-
.pipe((0, rxjs_2.skip)(1))
|
|
746
|
-
.subscribe((documents) => {
|
|
747
|
-
deleteSeen = true;
|
|
748
|
-
expect(documents.length).toBe(items.length - itemsToRemove);
|
|
749
|
-
tryComplete();
|
|
750
|
-
});
|
|
751
|
-
(0, util_1.waitForMs)(10).then(() => items[0].exists().then((exists) => {
|
|
752
|
-
expect(exists).toBe(true);
|
|
753
|
-
// remove one item
|
|
754
|
-
return items[0].accessor.delete().then(() => {
|
|
755
|
-
deleteCompleted = true;
|
|
756
|
-
tryComplete();
|
|
757
|
-
});
|
|
758
|
-
}));
|
|
759
|
-
});
|
|
760
|
-
});
|
|
761
|
-
describe('streamDocSnapshotDataPairs()', () => {
|
|
762
|
-
let sub;
|
|
763
|
-
beforeEach(() => {
|
|
764
|
-
sub = new rxjs_1.SubscriptionObject();
|
|
765
|
-
});
|
|
766
|
-
afterEach(() => {
|
|
767
|
-
sub.destroy();
|
|
768
|
-
});
|
|
769
|
-
it('should emit when the query results update (an item is added).', (done) => {
|
|
770
|
-
const itemsToAdd = 1;
|
|
771
|
-
let addCompleted = false;
|
|
772
|
-
let addSeen = false;
|
|
773
|
-
function tryComplete() {
|
|
774
|
-
if (addSeen && addCompleted) {
|
|
775
|
-
done();
|
|
776
|
-
}
|
|
777
|
-
}
|
|
778
|
-
sub.subscription = queryDocument()
|
|
779
|
-
.streamDocSnapshotDataPairs()
|
|
780
|
-
.pipe((0, rxjs_2.filter)((documents) => documents.length > items.length))
|
|
781
|
-
.subscribe((documents) => {
|
|
782
|
-
addSeen = true;
|
|
783
|
-
expect(documents.length).toBe(items.length + itemsToAdd);
|
|
784
|
-
documents.forEach((x) => {
|
|
785
|
-
// validate each document returned
|
|
786
|
-
expect(x.data).toBeDefined();
|
|
787
|
-
expect(x.document).toBeDefined();
|
|
788
|
-
expect(x.document instanceof mock_1.MockItemDocument).toBe(true);
|
|
789
|
-
expect(x.snapshot).toBeDefined();
|
|
790
|
-
expect(x.snapshot.data()).toBeDefined();
|
|
791
|
-
expect(x.snapshot.ref).toBeDefined();
|
|
792
|
-
expect(x.snapshot.id).toBe(x.document.id);
|
|
793
|
-
});
|
|
794
|
-
tryComplete();
|
|
795
|
-
});
|
|
796
|
-
// add one item
|
|
797
|
-
(0, util_1.waitForMs)(10).then(() => (0, firebase_1.makeDocuments)(f.instance.firestoreCollection.documentAccessor(), {
|
|
798
|
-
count: itemsToAdd,
|
|
799
|
-
init: (i) => {
|
|
800
|
-
return {
|
|
801
|
-
value: `${i + items.length}`,
|
|
802
|
-
test: true
|
|
803
|
-
};
|
|
804
|
-
}
|
|
805
|
-
}).then(() => {
|
|
806
|
-
addCompleted = true;
|
|
807
|
-
tryComplete();
|
|
808
|
-
}));
|
|
809
|
-
});
|
|
810
|
-
it('should emit when the query results update (an item is removed).', (done) => {
|
|
811
|
-
const itemsToRemove = 1;
|
|
812
|
-
let deleteCompleted = false;
|
|
813
|
-
let deleteSeen = false;
|
|
814
|
-
function tryComplete() {
|
|
815
|
-
if (deleteSeen && deleteCompleted) {
|
|
816
|
-
done();
|
|
817
|
-
}
|
|
818
|
-
}
|
|
819
|
-
sub.subscription = queryDocument()
|
|
820
|
-
.streamDocs()
|
|
821
|
-
.pipe((0, rxjs_2.skip)(1))
|
|
822
|
-
.subscribe((documents) => {
|
|
823
|
-
deleteSeen = true;
|
|
824
|
-
expect(documents.length).toBe(items.length - itemsToRemove);
|
|
825
|
-
tryComplete();
|
|
826
|
-
});
|
|
827
|
-
(0, util_1.waitForMs)(10).then(() => items[0].exists().then((exists) => {
|
|
828
|
-
expect(exists).toBe(true);
|
|
829
|
-
// remove one item
|
|
830
|
-
return items[0].accessor.delete().then(() => {
|
|
831
|
-
deleteCompleted = true;
|
|
832
|
-
tryComplete();
|
|
833
|
-
});
|
|
834
|
-
}));
|
|
835
|
-
});
|
|
836
|
-
});
|
|
837
|
-
});
|
|
838
|
-
describe('query', () => {
|
|
839
|
-
let query;
|
|
840
|
-
beforeEach(async () => {
|
|
841
|
-
query = f.instance.firestoreCollection.query;
|
|
842
|
-
});
|
|
843
|
-
describe('streamDocs()', () => {
|
|
844
|
-
let sub;
|
|
845
|
-
beforeEach(() => {
|
|
846
|
-
sub = new rxjs_1.SubscriptionObject();
|
|
847
|
-
});
|
|
848
|
-
afterEach(() => {
|
|
849
|
-
sub.destroy();
|
|
850
|
-
});
|
|
851
|
-
it('should emit when the query results update (an item is added).', (done) => {
|
|
852
|
-
const itemsToAdd = 1;
|
|
853
|
-
let addCompleted = false;
|
|
854
|
-
let addSeen = false;
|
|
855
|
-
function tryComplete() {
|
|
856
|
-
if (addSeen && addCompleted) {
|
|
857
|
-
done();
|
|
858
|
-
}
|
|
859
|
-
}
|
|
860
|
-
sub.subscription = query()
|
|
861
|
-
.streamDocs()
|
|
862
|
-
.pipe((0, rxjs_2.filter)((x) => x.docs.length > items.length))
|
|
863
|
-
.subscribe((results) => {
|
|
864
|
-
addSeen = true;
|
|
865
|
-
expect(results.docs.length).toBe(items.length + itemsToAdd);
|
|
866
|
-
tryComplete();
|
|
867
|
-
});
|
|
868
|
-
// add one item
|
|
869
|
-
(0, util_1.waitForMs)(10).then(() => (0, firebase_1.makeDocuments)(f.instance.firestoreCollection.documentAccessor(), {
|
|
870
|
-
count: itemsToAdd,
|
|
871
|
-
init: (i) => {
|
|
872
|
-
return {
|
|
873
|
-
value: `${i + items.length}`,
|
|
874
|
-
test: true
|
|
875
|
-
};
|
|
876
|
-
}
|
|
877
|
-
}).then(() => {
|
|
878
|
-
addCompleted = true;
|
|
879
|
-
tryComplete();
|
|
880
|
-
}));
|
|
881
|
-
});
|
|
882
|
-
it('should emit when the query results update (an item is removed).', (done) => {
|
|
883
|
-
const itemsToRemove = 1;
|
|
884
|
-
let deleteCompleted = false;
|
|
885
|
-
let deleteSeen = false;
|
|
886
|
-
function tryComplete() {
|
|
887
|
-
if (deleteSeen && deleteCompleted) {
|
|
888
|
-
done();
|
|
889
|
-
}
|
|
890
|
-
}
|
|
891
|
-
sub.subscription = query()
|
|
892
|
-
.streamDocs()
|
|
893
|
-
.pipe((0, rxjs_2.skip)(1))
|
|
894
|
-
.subscribe((results) => {
|
|
895
|
-
deleteSeen = true;
|
|
896
|
-
expect(results.docs.length).toBe(items.length - itemsToRemove);
|
|
897
|
-
tryComplete();
|
|
898
|
-
});
|
|
899
|
-
(0, util_1.waitForMs)(10).then(() => items[0].accessor.exists().then((exists) => {
|
|
900
|
-
expect(exists).toBe(true);
|
|
901
|
-
// remove one item
|
|
902
|
-
return items[0].accessor.delete().then(() => {
|
|
903
|
-
deleteCompleted = true;
|
|
904
|
-
tryComplete();
|
|
905
|
-
});
|
|
906
|
-
}));
|
|
907
|
-
});
|
|
908
|
-
});
|
|
909
|
-
describe('constraint', () => {
|
|
910
|
-
describe('limit', () => {
|
|
911
|
-
it('should limit the number of items returned.', async () => {
|
|
912
|
-
const limitCount = 2;
|
|
913
|
-
const unlimited = await query().getDocs();
|
|
914
|
-
expect(unlimited.docs.length).toBe(testDocumentCount);
|
|
915
|
-
const result = await query((0, firebase_1.limit)(limitCount)).getDocs();
|
|
916
|
-
expect(result.docs.length).toBe(limitCount);
|
|
917
|
-
});
|
|
918
|
-
it('should limit the streamed results.', (done) => {
|
|
919
|
-
const limitCount = 2;
|
|
920
|
-
const resultObs = query((0, firebase_1.limit)(limitCount)).streamDocs();
|
|
921
|
-
(0, rxjs_2.from)(resultObs)
|
|
922
|
-
.pipe((0, rxjs_2.first)())
|
|
923
|
-
.subscribe((results) => {
|
|
924
|
-
expect(results.docs.length).toBe(limitCount);
|
|
925
|
-
done();
|
|
926
|
-
});
|
|
927
|
-
});
|
|
928
|
-
it('should limit the number of items counted.', async () => {
|
|
929
|
-
const limitCount = 2;
|
|
930
|
-
const unlimited = await query().countDocs();
|
|
931
|
-
expect(unlimited).toBe(testDocumentCount);
|
|
932
|
-
const result = await query((0, firebase_1.limit)(limitCount)).countDocs();
|
|
933
|
-
expect(result).toBe(limitCount);
|
|
934
|
-
});
|
|
935
|
-
});
|
|
936
|
-
describe('limitToLast', () => {
|
|
937
|
-
it('should limit the number of items returned.', async () => {
|
|
938
|
-
const limitCount = 2;
|
|
939
|
-
const unlimited = await query().getDocs();
|
|
940
|
-
expect(unlimited.docs.length).toBe(testDocumentCount);
|
|
941
|
-
const result = await query((0, firebase_1.orderBy)('value'), (0, firebase_1.limitToLast)(limitCount)).getDocs();
|
|
942
|
-
expect(result.docs.length).toBe(limitCount);
|
|
943
|
-
});
|
|
944
|
-
it('the results should be returned from the end of the list. The results are still in the same order as requested.', async () => {
|
|
945
|
-
const limitCount = 2;
|
|
946
|
-
const result = await query((0, firebase_1.orderBy)('value', 'asc'), (0, firebase_1.limitToLast)(limitCount)).getDocs();
|
|
947
|
-
expect(result.docs.length).toBe(limitCount);
|
|
948
|
-
expect(result.docs[0].data().value).toBe('3');
|
|
949
|
-
expect(result.docs[1].data().value).toBe('4');
|
|
950
|
-
});
|
|
951
|
-
(0, test_1.itShouldFail)('if orderby is not provided.', async () => {
|
|
952
|
-
const limitCount = 2;
|
|
953
|
-
const unlimited = await query().getDocs();
|
|
954
|
-
expect(unlimited.docs.length).toBe(testDocumentCount);
|
|
955
|
-
await (0, test_1.expectFail)(() => query((0, firebase_1.limitToLast)(limitCount)).getDocs());
|
|
956
|
-
});
|
|
957
|
-
it('should stream results.', (done) => {
|
|
958
|
-
const limitCount = 2;
|
|
959
|
-
const resultObs = query((0, firebase_1.orderBy)('value'), (0, firebase_1.limitToLast)(limitCount)).streamDocs();
|
|
960
|
-
(0, rxjs_2.from)(resultObs)
|
|
961
|
-
.pipe((0, rxjs_2.first)())
|
|
962
|
-
.subscribe((results) => {
|
|
963
|
-
expect(results.docs.length).toBe(limitCount);
|
|
964
|
-
done();
|
|
965
|
-
});
|
|
966
|
-
});
|
|
967
|
-
it('should limit the number of items counted.', async () => {
|
|
968
|
-
const limitCount = 2;
|
|
969
|
-
const unlimited = await query().countDocs();
|
|
970
|
-
expect(unlimited).toBe(testDocumentCount);
|
|
971
|
-
const result = await query((0, firebase_1.orderBy)('value'), (0, firebase_1.limitToLast)(limitCount)).countDocs();
|
|
972
|
-
expect(result).toBe(limitCount);
|
|
973
|
-
});
|
|
974
|
-
});
|
|
975
|
-
describe('orderBy', () => {
|
|
976
|
-
it('should return values sorted in ascending order.', async () => {
|
|
977
|
-
const results = await query((0, firebase_1.orderBy)('value', 'asc')).getDocs();
|
|
978
|
-
expect(results.docs[0].data().value).toBe('0');
|
|
979
|
-
});
|
|
980
|
-
it('should return values sorted in descending order.', async () => {
|
|
981
|
-
const results = await query((0, firebase_1.orderBy)('value', 'desc')).getDocs();
|
|
982
|
-
expect(results.docs[0].data().value).toBe(`${items.length - 1}`);
|
|
983
|
-
});
|
|
984
|
-
});
|
|
985
|
-
describe('where', () => {
|
|
986
|
-
describe('==', () => {
|
|
987
|
-
it('should return the documents matching the query.', async () => {
|
|
988
|
-
const value = '0';
|
|
989
|
-
const result = await query((0, firebase_1.where)('value', '==', value)).getDocs();
|
|
990
|
-
expect(result.docs.length).toBe(1);
|
|
991
|
-
expect(result.docs[0].data().value).toBe(value);
|
|
992
|
-
});
|
|
993
|
-
it('should return the count of the documents matching the query.', async () => {
|
|
994
|
-
const value = '0';
|
|
995
|
-
const result = await query((0, firebase_1.where)('value', '==', value)).countDocs();
|
|
996
|
-
expect(result).toBe(1);
|
|
997
|
-
});
|
|
998
|
-
});
|
|
999
|
-
describe('in', () => {
|
|
1000
|
-
it('should return the documents with any of the input values.', async () => {
|
|
1001
|
-
const targetValue = ['0', '1', '2'];
|
|
1002
|
-
const result = await query((0, firebase_1.where)('value', 'in', targetValue)).getDocs();
|
|
1003
|
-
expect(result.docs.length).toBe(3);
|
|
1004
|
-
const values = result.docs.map((x) => x.data().value);
|
|
1005
|
-
expect(values).toContain('0');
|
|
1006
|
-
expect(values).toContain('1');
|
|
1007
|
-
expect(values).toContain('2');
|
|
1008
|
-
});
|
|
1009
|
-
it('should return the count of documents with any of the input values.', async () => {
|
|
1010
|
-
const targetValue = ['0', '1', '2'];
|
|
1011
|
-
const result = await query((0, firebase_1.where)('value', 'in', targetValue)).countDocs();
|
|
1012
|
-
expect(result).toBe(3);
|
|
1013
|
-
});
|
|
1014
|
-
});
|
|
1015
|
-
describe('not-in', () => {
|
|
1016
|
-
it('should return the documents that do not contain any of the input values.', async () => {
|
|
1017
|
-
const targetValue = ['0', '1', '2'];
|
|
1018
|
-
const result = await query((0, firebase_1.where)('value', 'not-in', targetValue)).getDocs();
|
|
1019
|
-
expect(result.docs.length).toBe(2);
|
|
1020
|
-
const values = result.docs.map((x) => x.data().value);
|
|
1021
|
-
expect(values).not.toContain('0');
|
|
1022
|
-
expect(values).not.toContain('1');
|
|
1023
|
-
expect(values).not.toContain('2');
|
|
1024
|
-
expect(values).toContain('3');
|
|
1025
|
-
expect(values).toContain('4');
|
|
1026
|
-
});
|
|
1027
|
-
it('should return the count of documents that do not contain any of the input values.', async () => {
|
|
1028
|
-
const targetValue = ['0', '1', '2'];
|
|
1029
|
-
const result = await query((0, firebase_1.where)('value', 'not-in', targetValue)).countDocs();
|
|
1030
|
-
expect(result).toBe(2);
|
|
1031
|
-
});
|
|
1032
|
-
});
|
|
1033
|
-
describe('searching array values', () => {
|
|
1034
|
-
describe('in', () => {
|
|
1035
|
-
it('should return the documents with arrays that only have the given values.', async () => {
|
|
1036
|
-
// NOTE: we pass an array to match exactly
|
|
1037
|
-
const targetValue = [['0', 'even']];
|
|
1038
|
-
const result = await query((0, firebase_1.where)('tags', 'in', targetValue)).getDocs();
|
|
1039
|
-
expect(result.docs.length).toBe(1);
|
|
1040
|
-
expect(result.docs[0].data().value).toBe('0');
|
|
1041
|
-
});
|
|
1042
|
-
it('should not return the document with arrays that have more than the requested values.', async () => {
|
|
1043
|
-
const targetValue = [['0']];
|
|
1044
|
-
const result = await query((0, firebase_1.where)('tags', 'in', targetValue)).getDocs();
|
|
1045
|
-
expect(result.docs.length).toBe(0);
|
|
1046
|
-
});
|
|
1047
|
-
it('should return the count of documents with arrays that only have the given values.', async () => {
|
|
1048
|
-
// NOTE: we pass an array to match exactly
|
|
1049
|
-
const targetValue = [['0', 'even']];
|
|
1050
|
-
const result = await query((0, firebase_1.where)('tags', 'in', targetValue)).countDocs();
|
|
1051
|
-
expect(result).toBe(1);
|
|
1052
|
-
});
|
|
1053
|
-
});
|
|
1054
|
-
describe('array-contains', () => {
|
|
1055
|
-
it('should return the documents that contain the given value.', async () => {
|
|
1056
|
-
const targetValue = '0';
|
|
1057
|
-
const result = await query((0, firebase_1.where)('tags', 'array-contains', targetValue)).getDocs();
|
|
1058
|
-
expect(result.docs.length).toBe(1);
|
|
1059
|
-
expect(result.docs[0].data().value).toBe('0');
|
|
1060
|
-
});
|
|
1061
|
-
(0, test_1.itShouldFail)('if an array is passed to where with array-contains', async () => {
|
|
1062
|
-
const targetValues = ['0', 'even'];
|
|
1063
|
-
await (0, test_1.expectFail)(() => query((0, firebase_1.where)('tags', 'array-contains', targetValues)).getDocs());
|
|
1064
|
-
});
|
|
1065
|
-
});
|
|
1066
|
-
describe('array-contains-any', () => {
|
|
1067
|
-
it('should return the documents that contain the given value, even if it is not passed as an array.', async () => {
|
|
1068
|
-
const targetValues = 'even';
|
|
1069
|
-
const result = await query((0, firebase_1.where)('tags', 'array-contains-any', targetValues)).getDocs();
|
|
1070
|
-
expect(result.docs.length).toBe(Math.floor(testDocumentCount / 2) + 1);
|
|
1071
|
-
result.docs.forEach((x) => {
|
|
1072
|
-
expect((0, util_1.isEvenNumber)(Number(x.data().value)));
|
|
1073
|
-
});
|
|
1074
|
-
});
|
|
1075
|
-
it('should return the documents that contain any of the given values.', async () => {
|
|
1076
|
-
const targetValues = ['0', 'even'];
|
|
1077
|
-
const result = await query((0, firebase_1.where)('tags', 'array-contains-any', targetValues)).getDocs();
|
|
1078
|
-
expect(result.docs.length).toBe(Math.floor(testDocumentCount / 2) + 1);
|
|
1079
|
-
result.docs.forEach((x) => {
|
|
1080
|
-
expect((0, util_1.isEvenNumber)(Number(x.data().value)));
|
|
1081
|
-
});
|
|
1082
|
-
});
|
|
1083
|
-
});
|
|
1084
|
-
});
|
|
1085
|
-
describe('Compound Queries', () => {
|
|
1086
|
-
describe('Searching Strings', () => {
|
|
1087
|
-
/*
|
|
1088
|
-
Create models that have model key like string values for prefix searching.
|
|
1089
|
-
*/
|
|
1090
|
-
const evenPrefix = mock_1.mockItemIdentity.collectionType + '/';
|
|
1091
|
-
const oddPrefix = mock_1.mockItemIdentity.collectionType + 'd' + '/'; // similar, but not quite the same
|
|
1092
|
-
const expectedNumberOfEvenValues = Math.ceil(testDocumentCount / 2);
|
|
1093
|
-
beforeEach(async () => {
|
|
1094
|
-
items = await (0, firebase_1.makeDocuments)(f.instance.firestoreCollection.documentAccessor(), {
|
|
1095
|
-
count: testDocumentCount,
|
|
1096
|
-
init: (i) => {
|
|
1097
|
-
const isEven = (0, util_1.isEvenNumber)(i);
|
|
1098
|
-
const prefix = isEven ? evenPrefix : oddPrefix;
|
|
1099
|
-
return {
|
|
1100
|
-
value: `${prefix}${i}`,
|
|
1101
|
-
date: new Date(),
|
|
1102
|
-
tags: [],
|
|
1103
|
-
test: true
|
|
1104
|
-
};
|
|
1105
|
-
}
|
|
1106
|
-
});
|
|
1107
|
-
});
|
|
1108
|
-
describe('whereStringHasRootIdentityModelKey()', () => {
|
|
1109
|
-
it('should return only models with searched prefix', async () => {
|
|
1110
|
-
const result = await query((0, firebase_1.whereStringHasRootIdentityModelKey)('value', mock_1.mockItemIdentity)).getDocs();
|
|
1111
|
-
const values = result.docs.map((x) => x.data().value);
|
|
1112
|
-
values.forEach((x) => {
|
|
1113
|
-
expect(x.startsWith(evenPrefix));
|
|
1114
|
-
});
|
|
1115
|
-
expect(result.docs.length).toBe(expectedNumberOfEvenValues);
|
|
1116
|
-
});
|
|
1117
|
-
it('should return the count of only models with searched prefix', async () => {
|
|
1118
|
-
const result = await query((0, firebase_1.whereStringHasRootIdentityModelKey)('value', mock_1.mockItemIdentity)).countDocs();
|
|
1119
|
-
expect(result).toBe(expectedNumberOfEvenValues);
|
|
1120
|
-
});
|
|
1121
|
-
});
|
|
1122
|
-
describe('whereStringValueHasPrefix()', () => {
|
|
1123
|
-
it('should return only models with searched prefix', async () => {
|
|
1124
|
-
const result = await query((0, firebase_1.whereStringValueHasPrefix)('value', evenPrefix)).getDocs();
|
|
1125
|
-
const values = result.docs.map((x) => x.data().value);
|
|
1126
|
-
values.forEach((x) => {
|
|
1127
|
-
expect(x.startsWith(evenPrefix));
|
|
1128
|
-
});
|
|
1129
|
-
expect(result.docs.length).toBe(expectedNumberOfEvenValues);
|
|
1130
|
-
});
|
|
1131
|
-
});
|
|
1132
|
-
});
|
|
1133
|
-
/**
|
|
1134
|
-
* Since we choose to store dates as strings, we can compare ranges of dates.
|
|
1135
|
-
*/
|
|
1136
|
-
describe('Searching Date Strings', () => {
|
|
1137
|
-
describe('whereDateIsAfterWithSort()', () => {
|
|
1138
|
-
it('should return models with dates after the input.', async () => {
|
|
1139
|
-
const startHoursLater = 2;
|
|
1140
|
-
const start = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1141
|
-
const result = await query((0, firebase_1.whereDateIsAfterWithSort)('date', start)).getDocs();
|
|
1142
|
-
expect(result.docs.length).toBe(startHoursLater);
|
|
1143
|
-
// ascending order by default
|
|
1144
|
-
expect(result.docs[0].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 1).toISOString());
|
|
1145
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 2).toISOString());
|
|
1146
|
-
});
|
|
1147
|
-
it('should return models with dates after the input in descending order.', async () => {
|
|
1148
|
-
const startHoursLater = 2;
|
|
1149
|
-
const start = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1150
|
-
const result = await query((0, firebase_1.whereDateIsAfterWithSort)('date', start, 'desc')).getDocs();
|
|
1151
|
-
expect(result.docs.length).toBe(startHoursLater);
|
|
1152
|
-
// check descending order
|
|
1153
|
-
expect(result.docs[0].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 2).toISOString());
|
|
1154
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 1).toISOString());
|
|
1155
|
-
});
|
|
1156
|
-
});
|
|
1157
|
-
describe('whereDateIsBeforeWithSort()', () => {
|
|
1158
|
-
it('should return models with dates before the input.', async () => {
|
|
1159
|
-
const startHoursLater = 2;
|
|
1160
|
-
const endDate = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1161
|
-
const result = await query((0, firebase_1.whereDateIsBeforeWithSort)('date', endDate)).getDocs();
|
|
1162
|
-
expect(result.docs.length).toBe(startHoursLater);
|
|
1163
|
-
// descending order by default
|
|
1164
|
-
expect(result.docs[0].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, -1).toISOString());
|
|
1165
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, -2).toISOString());
|
|
1166
|
-
});
|
|
1167
|
-
it('should return models with dates before the input in ascending order.', async () => {
|
|
1168
|
-
const startHoursLater = 2;
|
|
1169
|
-
const endDate = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1170
|
-
const result = await query((0, firebase_1.whereDateIsBeforeWithSort)('date', endDate, 'asc')).getDocs();
|
|
1171
|
-
expect(result.docs.length).toBe(startHoursLater);
|
|
1172
|
-
// check ascending order
|
|
1173
|
-
expect(result.docs[0].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, -2).toISOString());
|
|
1174
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, -1).toISOString());
|
|
1175
|
-
});
|
|
1176
|
-
});
|
|
1177
|
-
describe('whereDateIsOnOrAfterWithSort()', () => {
|
|
1178
|
-
it('should return models with dates after the input.', async () => {
|
|
1179
|
-
const startHoursLater = 2;
|
|
1180
|
-
const start = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1181
|
-
const result = await query((0, firebase_1.whereDateIsOnOrAfterWithSort)('date', start)).getDocs();
|
|
1182
|
-
expect(result.docs.length).toBe(3);
|
|
1183
|
-
// ascending order by default
|
|
1184
|
-
expect(result.docs[0].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 0).toISOString());
|
|
1185
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 1).toISOString());
|
|
1186
|
-
expect(result.docs[2].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 2).toISOString());
|
|
1187
|
-
});
|
|
1188
|
-
it('should return models with dates after the input in descending order.', async () => {
|
|
1189
|
-
const startHoursLater = 2;
|
|
1190
|
-
const start = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1191
|
-
const result = await query((0, firebase_1.whereDateIsOnOrAfterWithSort)('date', start, 'desc')).getDocs();
|
|
1192
|
-
expect(result.docs.length).toBe(3);
|
|
1193
|
-
// check descending order
|
|
1194
|
-
expect(result.docs[0].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 2).toISOString());
|
|
1195
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 1).toISOString());
|
|
1196
|
-
expect(result.docs[2].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 0).toISOString());
|
|
1197
|
-
});
|
|
1198
|
-
});
|
|
1199
|
-
describe('whereDateIsOnOrBeforeWithSort()', () => {
|
|
1200
|
-
it('should return models with dates before the input.', async () => {
|
|
1201
|
-
const startHoursLater = 2;
|
|
1202
|
-
const endDate = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1203
|
-
const result = await query((0, firebase_1.whereDateIsOnOrBeforeWithSort)('date', endDate)).getDocs();
|
|
1204
|
-
expect(result.docs.length).toBe(3);
|
|
1205
|
-
// descending order by default
|
|
1206
|
-
expect(result.docs[0].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, 0).toISOString());
|
|
1207
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, -1).toISOString());
|
|
1208
|
-
expect(result.docs[2].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, -2).toISOString());
|
|
1209
|
-
});
|
|
1210
|
-
it('should return models with dates before the input in ascending order.', async () => {
|
|
1211
|
-
const startHoursLater = 2;
|
|
1212
|
-
const endDate = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1213
|
-
const result = await query((0, firebase_1.whereDateIsOnOrBeforeWithSort)('date', endDate, 'asc')).getDocs();
|
|
1214
|
-
expect(result.docs.length).toBe(3);
|
|
1215
|
-
// check ascending order
|
|
1216
|
-
expect(result.docs[0].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, -2).toISOString());
|
|
1217
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, -1).toISOString());
|
|
1218
|
-
expect(result.docs[2].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(endDate, 0).toISOString());
|
|
1219
|
-
});
|
|
1220
|
-
});
|
|
1221
|
-
describe('whereDateIsInRange()', () => {
|
|
1222
|
-
it('should return the date values within the given range.', async () => {
|
|
1223
|
-
const startHoursLater = 1;
|
|
1224
|
-
const totalHoursInRange = 2;
|
|
1225
|
-
const start = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1226
|
-
const result = await query((0, firebase_1.whereDateIsInRange)('date', { date: start, distance: totalHoursInRange - 1, type: date_1.DateRangeType.HOURS_RANGE })).getDocs();
|
|
1227
|
-
expect(result.docs.length).toBe(totalHoursInRange);
|
|
1228
|
-
expect(result.docs[0].data().date?.toISOString()).toBe(start.toISOString());
|
|
1229
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 1).toISOString());
|
|
1230
|
-
});
|
|
1231
|
-
});
|
|
1232
|
-
describe('whereDateIsBetween()', () => {
|
|
1233
|
-
it('should return the date values within the given range.', async () => {
|
|
1234
|
-
const startHoursLater = 1;
|
|
1235
|
-
const hoursRange = 2;
|
|
1236
|
-
const start = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1237
|
-
const end = (0, date_fns_1.addHours)(start, hoursRange);
|
|
1238
|
-
const result = await query((0, firebase_1.whereDateIsBetween)('date', { start, end })).getDocs();
|
|
1239
|
-
expect(result.docs.length).toBe(hoursRange);
|
|
1240
|
-
expect(result.docs[0].data().date?.toISOString()).toBe(start.toISOString());
|
|
1241
|
-
expect(result.docs[1].data().date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 1).toISOString());
|
|
1242
|
-
});
|
|
1243
|
-
describe('with searching array value', () => {
|
|
1244
|
-
it('should search the date range and values that are tagged even.', async () => {
|
|
1245
|
-
const targetTag = 'even';
|
|
1246
|
-
const startHoursLater = 1;
|
|
1247
|
-
const hoursRange = 2;
|
|
1248
|
-
const start = (0, date_fns_1.addHours)(startDate, startHoursLater);
|
|
1249
|
-
const end = (0, date_fns_1.addHours)(start, hoursRange);
|
|
1250
|
-
const result = await query([
|
|
1251
|
-
// filter by dates first
|
|
1252
|
-
...(0, firebase_1.whereDateIsBetween)('date', { start, end }),
|
|
1253
|
-
// only allow even items
|
|
1254
|
-
(0, firebase_1.where)('tags', 'array-contains-any', targetTag)
|
|
1255
|
-
]).getDocs();
|
|
1256
|
-
expect(result.docs.length).toBe(1);
|
|
1257
|
-
const onlyResultData = result.docs[0].data();
|
|
1258
|
-
expect(onlyResultData.date?.toISOString()).toBe((0, date_fns_1.addHours)(start, 1).toISOString());
|
|
1259
|
-
expect(onlyResultData.tags).toContain(targetTag);
|
|
1260
|
-
});
|
|
1261
|
-
});
|
|
1262
|
-
});
|
|
1263
|
-
});
|
|
1264
|
-
});
|
|
1265
|
-
});
|
|
1266
|
-
describe('whereDocumentId', () => {
|
|
1267
|
-
it('should return the documents matching the query.', async () => {
|
|
1268
|
-
const targetId = items[0].id;
|
|
1269
|
-
const result = await query((0, firebase_1.whereDocumentId)('==', targetId)).getDocs();
|
|
1270
|
-
expect(result.docs.length).toBe(1);
|
|
1271
|
-
expect(result.docs[0].id).toBe(targetId);
|
|
1272
|
-
});
|
|
1273
|
-
});
|
|
1274
|
-
describe('startAt', () => {
|
|
1275
|
-
it('should return values starting from the specified startAt document.', async () => {
|
|
1276
|
-
const limitCount = 2;
|
|
1277
|
-
const firstQuery = query((0, firebase_1.limit)(limitCount));
|
|
1278
|
-
const first = await firstQuery.getDocs();
|
|
1279
|
-
expect(first.docs.length).toBe(limitCount);
|
|
1280
|
-
const second = await firstQuery.filter((0, firebase_1.startAt)(first.docs[1])).getDocs();
|
|
1281
|
-
expect(second.docs.length).toBe(limitCount);
|
|
1282
|
-
expect(second.docs[0].id).toBe(first.docs[1].id);
|
|
1283
|
-
});
|
|
1284
|
-
it('should return the count of values starting from the specified startAt document.', async () => {
|
|
1285
|
-
const limitCount = 2;
|
|
1286
|
-
const firstQuery = query((0, firebase_1.limit)(limitCount));
|
|
1287
|
-
const first = await firstQuery.getDocs();
|
|
1288
|
-
expect(first.docs.length).toBe(limitCount);
|
|
1289
|
-
// NOTE: startAt with count requires an orderBy to be set.
|
|
1290
|
-
const secondCount = await firstQuery.filter((0, firebase_1.orderByDocumentId)(), (0, firebase_1.startAt)(first.docs[1])).countDocs();
|
|
1291
|
-
expect(secondCount).toBe(limitCount);
|
|
1292
|
-
});
|
|
1293
|
-
});
|
|
1294
|
-
describe('startAtValue', () => {
|
|
1295
|
-
it('should return values starting from the specified startAt path.', async () => {
|
|
1296
|
-
const limitCount = testDocumentCount;
|
|
1297
|
-
const firstQuery = query((0, firebase_1.orderBy)('value'), (0, firebase_1.limit)(limitCount));
|
|
1298
|
-
const first = await firstQuery.getDocs();
|
|
1299
|
-
expect(first.docs.length).toBe(limitCount);
|
|
1300
|
-
const indexToStartAt = 3;
|
|
1301
|
-
const docToStartAt = first.docs[indexToStartAt];
|
|
1302
|
-
const docToStartAtValue = docToStartAt.data().value;
|
|
1303
|
-
const second = await firstQuery.filter((0, firebase_1.startAtValue)(docToStartAtValue)).getDocs();
|
|
1304
|
-
expect(second.docs.length).toBe(limitCount - indexToStartAt);
|
|
1305
|
-
expect(second.docs[0].id).toBe(docToStartAt.id);
|
|
1306
|
-
});
|
|
1307
|
-
});
|
|
1308
|
-
describe('startAfter', () => {
|
|
1309
|
-
it('should return values starting after the specified startAt point.', async () => {
|
|
1310
|
-
const limitCount = 3;
|
|
1311
|
-
const firstQuery = query((0, firebase_1.limit)(limitCount));
|
|
1312
|
-
const first = await firstQuery.getDocs();
|
|
1313
|
-
expect(first.docs.length).toBe(limitCount);
|
|
1314
|
-
const startAfterDoc = first.docs[1];
|
|
1315
|
-
const expectedFirstDoc = first.docs[2];
|
|
1316
|
-
const second = await firstQuery.filter((0, firebase_1.startAfter)(startAfterDoc)).getDocs();
|
|
1317
|
-
expect(second.docs.length).toBe(limitCount);
|
|
1318
|
-
expect(second.docs[0].id).toBe(expectedFirstDoc.id);
|
|
1319
|
-
});
|
|
1320
|
-
});
|
|
1321
|
-
describe('endAt', () => {
|
|
1322
|
-
it('should return values ending with the specified endAt point (inclusive).', async () => {
|
|
1323
|
-
const limitCount = 2;
|
|
1324
|
-
const firstQuery = query((0, firebase_1.limit)(limitCount));
|
|
1325
|
-
const first = await firstQuery.getDocs();
|
|
1326
|
-
expect(first.docs.length).toBe(limitCount);
|
|
1327
|
-
const second = await firstQuery.filter((0, firebase_1.endAt)(first.docs[0])).getDocs();
|
|
1328
|
-
expect(second.docs.length).toBe(limitCount - 1);
|
|
1329
|
-
expect(second.docs[0].id).toBe(first.docs[0].id);
|
|
1330
|
-
});
|
|
1331
|
-
});
|
|
1332
|
-
describe('endAtValue', () => {
|
|
1333
|
-
it('should return values starting from the specified startAt path.', async () => {
|
|
1334
|
-
const limitCount = testDocumentCount;
|
|
1335
|
-
const firstQuery = query((0, firebase_1.orderBy)('value'), (0, firebase_1.limit)(limitCount));
|
|
1336
|
-
const first = await firstQuery.getDocs();
|
|
1337
|
-
expect(first.docs.length).toBe(limitCount);
|
|
1338
|
-
const indexToEndAt = 2;
|
|
1339
|
-
const docToEndAt = first.docs[indexToEndAt];
|
|
1340
|
-
const docToEndAtValue = docToEndAt.data().value;
|
|
1341
|
-
const second = await firstQuery.filter((0, firebase_1.endAtValue)(docToEndAtValue)).getDocs();
|
|
1342
|
-
expect(second.docs.length).toBe(indexToEndAt + 1);
|
|
1343
|
-
expect(second.docs[second.docs.length - 1].id).toBe(docToEndAt.id);
|
|
1344
|
-
});
|
|
1345
|
-
});
|
|
1346
|
-
describe('endBefore', () => {
|
|
1347
|
-
it('should return values ending with the specified endBefore point (exclusive).', async () => {
|
|
1348
|
-
const limitCount = 2;
|
|
1349
|
-
const firstQuery = query((0, firebase_1.limit)(limitCount));
|
|
1350
|
-
const first = await firstQuery.getDocs();
|
|
1351
|
-
expect(first.docs.length).toBe(limitCount);
|
|
1352
|
-
const second = await firstQuery.filter((0, firebase_1.endBefore)(first.docs[1])).getDocs();
|
|
1353
|
-
expect(second.docs.length).toBe(limitCount - 1);
|
|
1354
|
-
expect(second.docs[0].id).toBe(first.docs[0].id);
|
|
1355
|
-
});
|
|
1356
|
-
});
|
|
1357
|
-
});
|
|
1358
|
-
});
|
|
1359
|
-
});
|
|
1360
|
-
}
|
|
1361
|
-
//# sourceMappingURL=test.driver.query.js.map
|