@powersync/service-module-mongodb 0.11.0 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +30 -0
- package/LICENSE +3 -3
- package/dist/replication/ChangeStream.d.ts +5 -3
- package/dist/replication/ChangeStream.js +11 -2
- package/dist/replication/ChangeStream.js.map +1 -1
- package/dist/replication/MongoRelation.d.ts +3 -3
- package/dist/replication/MongoRelation.js +14 -13
- package/dist/replication/MongoRelation.js.map +1 -1
- package/dist/replication/replication-utils.js +3 -3
- package/dist/replication/replication-utils.js.map +1 -1
- package/package.json +11 -11
- package/src/replication/ChangeStream.ts +14 -4
- package/src/replication/MongoRelation.ts +26 -16
- package/src/replication/replication-utils.ts +3 -3
- package/test/src/mongo_test.test.ts +58 -16
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -52,7 +52,7 @@ export async function checkSourceConfiguration(connectionManager: MongoManager):
|
|
|
52
52
|
const fullName = `${db.databaseName}.${CHECKPOINTS_COLLECTION}`;
|
|
53
53
|
throw new ServiceError(
|
|
54
54
|
ErrorCode.PSYNC_S1307,
|
|
55
|
-
`MongoDB user does not have the required ${missingCheckpointActions.map((a) => `"${a}"`).join(', ')}
|
|
55
|
+
`MongoDB user does not have the required ${missingCheckpointActions.map((a) => `"${a}"`).join(', ')} privilege(s) on "${fullName}".`
|
|
56
56
|
);
|
|
57
57
|
}
|
|
58
58
|
|
|
@@ -62,14 +62,14 @@ export async function checkSourceConfiguration(connectionManager: MongoManager):
|
|
|
62
62
|
if (!anyCollectionActions.has('collMod')) {
|
|
63
63
|
throw new ServiceError(
|
|
64
64
|
ErrorCode.PSYNC_S1307,
|
|
65
|
-
`MongoDB user does not have the required "collMod"
|
|
65
|
+
`MongoDB user does not have the required "collMod" privilege on "${db.databaseName}", required for "post_images: auto_configure".`
|
|
66
66
|
);
|
|
67
67
|
}
|
|
68
68
|
}
|
|
69
69
|
if (!anyCollectionActions.has('listCollections')) {
|
|
70
70
|
throw new ServiceError(
|
|
71
71
|
ErrorCode.PSYNC_S1307,
|
|
72
|
-
`MongoDB user does not have the required "listCollections"
|
|
72
|
+
`MongoDB user does not have the required "listCollections" privilege on "${db.databaseName}".`
|
|
73
73
|
);
|
|
74
74
|
}
|
|
75
75
|
} else {
|
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
import { mongo } from '@powersync/lib-service-mongodb';
|
|
2
|
-
import {
|
|
2
|
+
import {
|
|
3
|
+
applyRowContext,
|
|
4
|
+
CompatibilityContext,
|
|
5
|
+
CompatibilityEdition,
|
|
6
|
+
SqliteInputRow,
|
|
7
|
+
SqlSyncRules
|
|
8
|
+
} from '@powersync/service-sync-rules';
|
|
3
9
|
import { describe, expect, test } from 'vitest';
|
|
4
10
|
|
|
5
11
|
import { MongoRouteAPIAdapter } from '@module/api/MongoRouteAPIAdapter.js';
|
|
@@ -138,8 +144,10 @@ describe('mongo data types', () => {
|
|
|
138
144
|
]);
|
|
139
145
|
}
|
|
140
146
|
|
|
141
|
-
function checkResults(transformed:
|
|
142
|
-
|
|
147
|
+
function checkResults(transformed: SqliteInputRow[]) {
|
|
148
|
+
const sqliteValue = transformed.map((e) => applyRowContext(e, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY));
|
|
149
|
+
|
|
150
|
+
expect(sqliteValue[0]).toMatchObject({
|
|
143
151
|
_id: 1n,
|
|
144
152
|
text: 'text',
|
|
145
153
|
uuid: 'baeb2514-4c57-436d-b3cc-c1256211656d',
|
|
@@ -152,17 +160,17 @@ describe('mongo data types', () => {
|
|
|
152
160
|
null: null,
|
|
153
161
|
decimal: '3.14'
|
|
154
162
|
});
|
|
155
|
-
expect(
|
|
163
|
+
expect(sqliteValue[1]).toMatchObject({
|
|
156
164
|
_id: 2n,
|
|
157
165
|
nested: '{"test":"thing"}'
|
|
158
166
|
});
|
|
159
167
|
|
|
160
|
-
expect(
|
|
168
|
+
expect(sqliteValue[2]).toMatchObject({
|
|
161
169
|
_id: 3n,
|
|
162
170
|
date: '2023-03-06 13:47:00.000Z'
|
|
163
171
|
});
|
|
164
172
|
|
|
165
|
-
expect(
|
|
173
|
+
expect(sqliteValue[3]).toMatchObject({
|
|
166
174
|
_id: 4n,
|
|
167
175
|
objectId: '66e834cc91d805df11fa0ecb',
|
|
168
176
|
timestamp: 1958505087099n,
|
|
@@ -177,9 +185,9 @@ describe('mongo data types', () => {
|
|
|
177
185
|
});
|
|
178
186
|
|
|
179
187
|
// This must specifically be null, and not undefined.
|
|
180
|
-
expect(
|
|
188
|
+
expect(sqliteValue[4].undefined).toBeNull();
|
|
181
189
|
|
|
182
|
-
expect(
|
|
190
|
+
expect(sqliteValue[5]).toMatchObject({
|
|
183
191
|
_id: 6n,
|
|
184
192
|
int4: -1n,
|
|
185
193
|
int8: -9007199254740993n,
|
|
@@ -188,8 +196,10 @@ describe('mongo data types', () => {
|
|
|
188
196
|
});
|
|
189
197
|
}
|
|
190
198
|
|
|
191
|
-
function checkResultsNested(transformed:
|
|
192
|
-
|
|
199
|
+
function checkResultsNested(transformed: SqliteInputRow[]) {
|
|
200
|
+
const sqliteValue = transformed.map((e) => applyRowContext(e, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY));
|
|
201
|
+
|
|
202
|
+
expect(sqliteValue[0]).toMatchObject({
|
|
193
203
|
_id: 1n,
|
|
194
204
|
text: `["text"]`,
|
|
195
205
|
uuid: '["baeb2514-4c57-436d-b3cc-c1256211656d"]',
|
|
@@ -204,22 +214,22 @@ describe('mongo data types', () => {
|
|
|
204
214
|
|
|
205
215
|
// Note: Depending on to what extent we use the original postgres value, the whitespace may change, and order may change.
|
|
206
216
|
// We do expect that decimals and big numbers are preserved.
|
|
207
|
-
expect(
|
|
217
|
+
expect(sqliteValue[1]).toMatchObject({
|
|
208
218
|
_id: 2n,
|
|
209
219
|
nested: '[{"test":"thing"}]'
|
|
210
220
|
});
|
|
211
221
|
|
|
212
|
-
expect(
|
|
222
|
+
expect(sqliteValue[2]).toMatchObject({
|
|
213
223
|
_id: 3n,
|
|
214
224
|
date: '["2023-03-06 13:47:00.000Z"]'
|
|
215
225
|
});
|
|
216
226
|
|
|
217
|
-
expect(
|
|
227
|
+
expect(sqliteValue[3]).toMatchObject({
|
|
218
228
|
_id: 5n,
|
|
219
229
|
undefined: '[null]'
|
|
220
230
|
});
|
|
221
231
|
|
|
222
|
-
expect(
|
|
232
|
+
expect(sqliteValue[4]).toMatchObject({
|
|
223
233
|
_id: 6n,
|
|
224
234
|
int4: '[-1]',
|
|
225
235
|
int8: '[-9007199254740993]',
|
|
@@ -227,7 +237,7 @@ describe('mongo data types', () => {
|
|
|
227
237
|
decimal: '["-3.14"]'
|
|
228
238
|
});
|
|
229
239
|
|
|
230
|
-
expect(
|
|
240
|
+
expect(sqliteValue[5]).toMatchObject({
|
|
231
241
|
_id: 10n,
|
|
232
242
|
objectId: '["66e834cc91d805df11fa0ecb"]',
|
|
233
243
|
timestamp: '[1958505087099]',
|
|
@@ -522,13 +532,45 @@ bucket_definitions:
|
|
|
522
532
|
errors: []
|
|
523
533
|
});
|
|
524
534
|
});
|
|
535
|
+
|
|
536
|
+
test('date format', async () => {
|
|
537
|
+
const { db, client } = await connectMongoData();
|
|
538
|
+
const collection = db.collection('test_data');
|
|
539
|
+
try {
|
|
540
|
+
await setupTable(db);
|
|
541
|
+
await collection.insertOne({
|
|
542
|
+
fraction: new Date('2023-03-06 15:47:01.123+02'),
|
|
543
|
+
noFraction: new Date('2023-03-06 15:47:01+02')
|
|
544
|
+
});
|
|
545
|
+
|
|
546
|
+
const rawResults = await db
|
|
547
|
+
.collection('test_data')
|
|
548
|
+
.find({}, { sort: { _id: 1 } })
|
|
549
|
+
.toArray();
|
|
550
|
+
const [row] = [...ChangeStream.getQueryData(rawResults)];
|
|
551
|
+
|
|
552
|
+
const oldFormat = applyRowContext(row, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
|
|
553
|
+
expect(oldFormat).toMatchObject({
|
|
554
|
+
fraction: '2023-03-06 13:47:01.123Z',
|
|
555
|
+
noFraction: '2023-03-06 13:47:01.000Z'
|
|
556
|
+
});
|
|
557
|
+
|
|
558
|
+
const newFormat = applyRowContext(row, new CompatibilityContext(CompatibilityEdition.SYNC_STREAMS));
|
|
559
|
+
expect(newFormat).toMatchObject({
|
|
560
|
+
fraction: '2023-03-06T13:47:01.123Z',
|
|
561
|
+
noFraction: '2023-03-06T13:47:01.000Z'
|
|
562
|
+
});
|
|
563
|
+
} finally {
|
|
564
|
+
await client.close();
|
|
565
|
+
}
|
|
566
|
+
});
|
|
525
567
|
});
|
|
526
568
|
|
|
527
569
|
/**
|
|
528
570
|
* Return all the inserts from the first transaction in the replication stream.
|
|
529
571
|
*/
|
|
530
572
|
async function getReplicationTx(replicationStream: mongo.ChangeStream, count: number) {
|
|
531
|
-
let transformed:
|
|
573
|
+
let transformed: SqliteInputRow[] = [];
|
|
532
574
|
for await (const doc of replicationStream) {
|
|
533
575
|
// Specifically filter out map_input / map_output collections
|
|
534
576
|
if (!(doc as any)?.ns?.coll?.startsWith('test_data')) {
|