@powersync/service-module-mongodb 0.1.8 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/CHANGELOG.md +30 -0
  2. package/dist/api/MongoRouteAPIAdapter.d.ts +1 -1
  3. package/dist/api/MongoRouteAPIAdapter.js +6 -5
  4. package/dist/api/MongoRouteAPIAdapter.js.map +1 -1
  5. package/dist/index.d.ts +3 -0
  6. package/dist/index.js +3 -0
  7. package/dist/index.js.map +1 -1
  8. package/dist/module/MongoModule.d.ts +0 -1
  9. package/dist/module/MongoModule.js +4 -6
  10. package/dist/module/MongoModule.js.map +1 -1
  11. package/dist/replication/ChangeStream.d.ts +1 -1
  12. package/dist/replication/ChangeStream.js +3 -3
  13. package/dist/replication/ChangeStream.js.map +1 -1
  14. package/dist/replication/ChangeStreamReplicationJob.js +2 -2
  15. package/dist/replication/ChangeStreamReplicationJob.js.map +1 -1
  16. package/dist/replication/MongoManager.d.ts +1 -1
  17. package/dist/replication/MongoManager.js +1 -1
  18. package/dist/replication/MongoManager.js.map +1 -1
  19. package/dist/replication/MongoRelation.d.ts +1 -1
  20. package/dist/replication/MongoRelation.js +16 -6
  21. package/dist/replication/MongoRelation.js.map +1 -1
  22. package/dist/types/types.d.ts +16 -19
  23. package/dist/types/types.js +4 -21
  24. package/dist/types/types.js.map +1 -1
  25. package/package.json +11 -9
  26. package/src/api/MongoRouteAPIAdapter.ts +7 -6
  27. package/src/index.ts +3 -0
  28. package/src/module/MongoModule.ts +4 -7
  29. package/src/replication/ChangeStream.ts +3 -3
  30. package/src/replication/ChangeStreamReplicationJob.ts +3 -4
  31. package/src/replication/MongoManager.ts +2 -1
  32. package/src/replication/MongoRelation.ts +16 -7
  33. package/src/types/types.ts +8 -27
  34. package/test/src/change_stream.test.ts +38 -38
  35. package/test/src/change_stream_utils.ts +7 -6
  36. package/test/src/env.ts +1 -0
  37. package/test/src/mongo_test.test.ts +74 -14
  38. package/test/src/setup.ts +4 -1
  39. package/test/src/slow_tests.test.ts +11 -16
  40. package/test/src/util.ts +7 -27
  41. package/test/tsconfig.json +3 -0
  42. package/tsconfig.json +6 -0
  43. package/tsconfig.tsbuildinfo +1 -1
  44. package/vitest.config.ts +1 -1
@@ -1,13 +1,14 @@
1
+ import * as lib_mongo from '@powersync/lib-service-mongodb';
2
+ import { mongo } from '@powersync/lib-service-mongodb';
1
3
  import { api, ParseSyncRulesOptions, SourceTable } from '@powersync/service-core';
2
- import * as mongo from 'mongodb';
3
-
4
4
  import * as sync_rules from '@powersync/service-sync-rules';
5
5
  import * as service_types from '@powersync/service-types';
6
+
6
7
  import { MongoManager } from '../replication/MongoManager.js';
7
8
  import { constructAfterRecord, createCheckpoint } from '../replication/MongoRelation.js';
9
+ import { CHECKPOINTS_COLLECTION } from '../replication/replication-utils.js';
8
10
  import * as types from '../types/types.js';
9
11
  import { escapeRegExp } from '../utils.js';
10
- import { CHECKPOINTS_COLLECTION } from '../replication/replication-utils.js';
11
12
 
12
13
  export class MongoRouteAPIAdapter implements api.RouteAPI {
13
14
  protected client: mongo.MongoClient;
@@ -45,7 +46,7 @@ export class MongoRouteAPIAdapter implements api.RouteAPI {
45
46
  async getConnectionStatus(): Promise<service_types.ConnectionStatusV2> {
46
47
  const base = {
47
48
  id: this.config.id,
48
- uri: types.baseUri(this.config)
49
+ uri: lib_mongo.baseUri(this.config)
49
50
  };
50
51
 
51
52
  try {
@@ -225,7 +226,7 @@ export class MongoRouteAPIAdapter implements api.RouteAPI {
225
226
  try {
226
227
  collections = await this.client.db(db.name).listCollections().toArray();
227
228
  } catch (e) {
228
- if (e instanceof mongo.MongoServerError && e.codeName == 'Unauthorized') {
229
+ if (lib_mongo.isMongoServerError(e) && e.codeName == 'Unauthorized') {
229
230
  // Ignore databases we're not authorized to query
230
231
  return null;
231
232
  }
@@ -267,7 +268,7 @@ export class MongoRouteAPIAdapter implements api.RouteAPI {
267
268
  });
268
269
  }
269
270
  } catch (e) {
270
- if (e instanceof mongo.MongoServerError && e.codeName == 'Unauthorized') {
271
+ if (lib_mongo.isMongoServerError(e) && e.codeName == 'Unauthorized') {
271
272
  // Ignore collections we're not authorized to query
272
273
  continue;
273
274
  }
package/src/index.ts CHANGED
@@ -1 +1,4 @@
1
+ export * from './api/MongoRouteAPIAdapter.js';
1
2
  export * from './module/MongoModule.js';
3
+ export * from './replication/replication-index.js';
4
+ export * from './types/types.js';
@@ -1,25 +1,22 @@
1
+ import * as lib_mongo from '@powersync/lib-service-mongodb';
1
2
  import { api, ConfigurationFileSyncRulesProvider, replication, system, TearDownOptions } from '@powersync/service-core';
2
3
  import { MongoRouteAPIAdapter } from '../api/MongoRouteAPIAdapter.js';
4
+ import { ChangeStreamReplicator } from '../replication/ChangeStreamReplicator.js';
3
5
  import { ConnectionManagerFactory } from '../replication/ConnectionManagerFactory.js';
4
6
  import { MongoErrorRateLimiter } from '../replication/MongoErrorRateLimiter.js';
5
- import { ChangeStreamReplicator } from '../replication/ChangeStreamReplicator.js';
6
- import * as types from '../types/types.js';
7
7
  import { MongoManager } from '../replication/MongoManager.js';
8
8
  import { checkSourceConfiguration } from '../replication/replication-utils.js';
9
+ import * as types from '../types/types.js';
9
10
 
10
11
  export class MongoModule extends replication.ReplicationModule<types.MongoConnectionConfig> {
11
12
  constructor() {
12
13
  super({
13
14
  name: 'MongoDB',
14
- type: types.MONGO_CONNECTION_TYPE,
15
+ type: lib_mongo.MONGO_CONNECTION_TYPE,
15
16
  configSchema: types.MongoConnectionConfig
16
17
  });
17
18
  }
18
19
 
19
- async initialize(context: system.ServiceContextContainer): Promise<void> {
20
- await super.initialize(context);
21
- }
22
-
23
20
  protected createRouteAPIAdapter(): api.RouteAPI {
24
21
  return new MongoRouteAPIAdapter(this.resolveConfig(this.decodedConfig!));
25
22
  }
@@ -1,7 +1,9 @@
1
+ import { mongo } from '@powersync/lib-service-mongodb';
1
2
  import { container, logger } from '@powersync/lib-services-framework';
2
3
  import { Metrics, SaveOperationTag, SourceEntityDescriptor, SourceTable, storage } from '@powersync/service-core';
3
4
  import { DatabaseInputRow, SqliteRow, SqlSyncRules, TablePattern } from '@powersync/service-sync-rules';
4
- import * as mongo from 'mongodb';
5
+ import { PostImagesOption } from '../types/types.js';
6
+ import { escapeRegExp } from '../utils.js';
5
7
  import { MongoManager } from './MongoManager.js';
6
8
  import {
7
9
  constructAfterRecord,
@@ -10,9 +12,7 @@ import {
10
12
  getMongoRelation,
11
13
  mongoLsnToTimestamp
12
14
  } from './MongoRelation.js';
13
- import { escapeRegExp } from '../utils.js';
14
15
  import { CHECKPOINTS_COLLECTION } from './replication-utils.js';
15
- import { PostImagesOption } from '../types/types.js';
16
16
 
17
17
  export const ZERO_LSN = '0000000000000000';
18
18
 
@@ -1,10 +1,9 @@
1
+ import { mongo } from '@powersync/lib-service-mongodb';
1
2
  import { container } from '@powersync/lib-services-framework';
2
- import { ChangeStreamInvalidatedError, ChangeStream } from './ChangeStream.js';
3
-
4
3
  import { replication } from '@powersync/service-core';
5
- import { ConnectionManagerFactory } from './ConnectionManagerFactory.js';
6
4
 
7
- import * as mongo from 'mongodb';
5
+ import { ChangeStream, ChangeStreamInvalidatedError } from './ChangeStream.js';
6
+ import { ConnectionManagerFactory } from './ConnectionManagerFactory.js';
8
7
 
9
8
  export interface ChangeStreamReplicationJobOptions extends replication.AbstractReplicationJobOptions {
10
9
  connectionFactory: ConnectionManagerFactory;
@@ -1,4 +1,5 @@
1
- import * as mongo from 'mongodb';
1
+ import { mongo } from '@powersync/lib-service-mongodb';
2
+
2
3
  import { NormalizedMongoConnectionConfig } from '../types/types.js';
3
4
 
4
5
  export class MongoManager {
@@ -1,7 +1,8 @@
1
+ import { mongo } from '@powersync/lib-service-mongodb';
1
2
  import { storage } from '@powersync/service-core';
2
- import { SqliteRow, SqliteValue, toSyncRulesRow } from '@powersync/service-sync-rules';
3
- import * as mongo from 'mongodb';
4
3
  import { JSONBig, JsonContainer } from '@powersync/service-jsonbig';
4
+ import { SqliteRow, SqliteValue } from '@powersync/service-sync-rules';
5
+
5
6
  import { CHECKPOINTS_COLLECTION } from './replication-utils.js';
6
7
 
7
8
  export function getMongoRelation(source: mongo.ChangeStreamNameSpace): storage.SourceEntityDescriptor {
@@ -38,9 +39,12 @@ export function constructAfterRecord(document: mongo.Document): SqliteRow {
38
39
 
39
40
  export function toMongoSyncRulesValue(data: any): SqliteValue {
40
41
  const autoBigNum = true;
41
- if (data == null) {
42
- // null or undefined
43
- return data;
42
+ if (data === null) {
43
+ return null;
44
+ } else if (typeof data == 'undefined') {
45
+ // We consider `undefined` in top-level fields as missing replicated value,
46
+ // so use null instead.
47
+ return null;
44
48
  } else if (typeof data == 'string') {
45
49
  return data;
46
50
  } else if (typeof data == 'number') {
@@ -95,8 +99,13 @@ function filterJsonData(data: any, depth = 0): any {
95
99
  // This is primarily to prevent infinite recursion
96
100
  throw new Error(`json nested object depth exceeds the limit of ${DEPTH_LIMIT}`);
97
101
  }
98
- if (data == null) {
99
- return data; // null or undefined
102
+ if (data === null) {
103
+ return data;
104
+ } else if (typeof data == 'undefined') {
105
+ // For nested data, keep as undefined.
106
+ // In arrays, this is converted to null.
107
+ // In objects, the key is excluded.
108
+ return undefined;
100
109
  } else if (typeof data == 'string') {
101
110
  return data;
102
111
  } else if (typeof data == 'number') {
@@ -1,9 +1,7 @@
1
- import { normalizeMongoConfig } from '@powersync/service-core';
1
+ import * as lib_mongo from '@powersync/lib-service-mongodb/types';
2
2
  import * as service_types from '@powersync/service-types';
3
3
  import * as t from 'ts-codec';
4
4
 
5
- export const MONGO_CONNECTION_TYPE = 'mongodb' as const;
6
-
7
5
  export enum PostImagesOption {
8
6
  /**
9
7
  * Use fullDocument: updateLookup on the changeStream.
@@ -53,18 +51,9 @@ export interface NormalizedMongoConnectionConfig {
53
51
  postImages: PostImagesOption;
54
52
  }
55
53
 
56
- export const MongoConnectionConfig = service_types.configFile.DataSourceConfig.and(
54
+ export const MongoConnectionConfig = lib_mongo.BaseMongoConfig.and(service_types.configFile.DataSourceConfig).and(
57
55
  t.object({
58
- type: t.literal(MONGO_CONNECTION_TYPE),
59
- /** Unique identifier for the connection - optional when a single connection is present. */
60
- id: t.string.optional(),
61
- /** Tag used as reference in sync rules. Defaults to "default". Does not have to be unique. */
62
- tag: t.string.optional(),
63
- uri: t.string,
64
- username: t.string.optional(),
65
- password: t.string.optional(),
66
- database: t.string.optional(),
67
-
56
+ // Replication specific settings
68
57
  post_images: t.literal('off').or(t.literal('auto_configure')).or(t.literal('read_only')).optional()
69
58
  })
70
59
  );
@@ -72,20 +61,21 @@ export const MongoConnectionConfig = service_types.configFile.DataSourceConfig.a
72
61
  /**
73
62
  * Config input specified when starting services
74
63
  */
75
- export type MongoConnectionConfig = t.Decoded<typeof MongoConnectionConfig>;
64
+ export type MongoConnectionConfig = t.Encoded<typeof MongoConnectionConfig>;
65
+ export type MongoConnectionConfigDecoded = t.Decoded<typeof MongoConnectionConfig>;
76
66
 
77
67
  /**
78
68
  * Resolved version of {@link MongoConnectionConfig}
79
69
  */
80
- export type ResolvedConnectionConfig = MongoConnectionConfig & NormalizedMongoConnectionConfig;
70
+ export type ResolvedConnectionConfig = MongoConnectionConfigDecoded & NormalizedMongoConnectionConfig;
81
71
 
82
72
  /**
83
73
  * Validate and normalize connection options.
84
74
  *
85
75
  * Returns destructured options.
86
76
  */
87
- export function normalizeConnectionConfig(options: MongoConnectionConfig): NormalizedMongoConnectionConfig {
88
- const base = normalizeMongoConfig(options);
77
+ export function normalizeConnectionConfig(options: MongoConnectionConfigDecoded): NormalizedMongoConnectionConfig {
78
+ const base = lib_mongo.normalizeMongoConfig(options);
89
79
 
90
80
  return {
91
81
  ...base,
@@ -94,12 +84,3 @@ export function normalizeConnectionConfig(options: MongoConnectionConfig): Norma
94
84
  postImages: (options.post_images as PostImagesOption | undefined) ?? PostImagesOption.OFF
95
85
  };
96
86
  }
97
-
98
- /**
99
- * Construct a mongodb URI, without username, password or ssl options.
100
- *
101
- * Only contains hostname, port, database.
102
- */
103
- export function baseUri(options: NormalizedMongoConnectionConfig) {
104
- return options.uri;
105
- }
@@ -1,14 +1,14 @@
1
- import { putOp, removeOp } from '@core-tests/stream_utils.js';
2
- import { MONGO_STORAGE_FACTORY } from '@core-tests/util.js';
3
- import { BucketStorageFactory } from '@powersync/service-core';
4
1
  import * as crypto from 'crypto';
5
- import * as mongo from 'mongodb';
6
2
  import { setTimeout } from 'node:timers/promises';
7
3
  import { describe, expect, test, vi } from 'vitest';
8
- import { ChangeStreamTestContext } from './change_stream_utils.js';
9
- import { PostImagesOption } from '@module/types/types.js';
10
4
 
11
- type StorageFactory = () => Promise<BucketStorageFactory>;
5
+ import { mongo } from '@powersync/lib-service-mongodb';
6
+ import { storage } from '@powersync/service-core';
7
+ import { test_utils } from '@powersync/service-core-tests';
8
+
9
+ import { PostImagesOption } from '@module/types/types.js';
10
+ import { ChangeStreamTestContext } from './change_stream_utils.js';
11
+ import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
12
12
 
13
13
  const BASIC_SYNC_RULES = `
14
14
  bucket_definitions:
@@ -18,10 +18,10 @@ bucket_definitions:
18
18
  `;
19
19
 
20
20
  describe('change stream - mongodb', { timeout: 20_000 }, function () {
21
- defineChangeStreamTests(MONGO_STORAGE_FACTORY);
21
+ defineChangeStreamTests(INITIALIZED_MONGO_STORAGE_FACTORY);
22
22
  });
23
23
 
24
- function defineChangeStreamTests(factory: StorageFactory) {
24
+ function defineChangeStreamTests(factory: storage.TestStorageFactory) {
25
25
  test('replicating basic values', async () => {
26
26
  await using context = await ChangeStreamTestContext.open(factory);
27
27
  const { db } = context;
@@ -52,10 +52,10 @@ bucket_definitions:
52
52
  const data = await context.getBucketData('global[]');
53
53
 
54
54
  expect(data).toMatchObject([
55
- putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
56
- putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n }),
57
- putOp('test_data', { id: test_id.toHexString(), description: 'test3' }),
58
- removeOp('test_data', test_id.toHexString())
55
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
56
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n }),
57
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test3' }),
58
+ test_utils.removeOp('test_data', test_id.toHexString())
59
59
  ]);
60
60
  });
61
61
 
@@ -86,8 +86,8 @@ bucket_definitions:
86
86
  const data = await context.getBucketData('global[]');
87
87
 
88
88
  expect(data).toMatchObject([
89
- putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
90
- putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n })
89
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
90
+ test_utils.putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n })
91
91
  ]);
92
92
  });
93
93
 
@@ -125,11 +125,11 @@ bucket_definitions:
125
125
  const data = await context.getBucketData('global[]');
126
126
 
127
127
  expect(data).toMatchObject([
128
- putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
128
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
129
129
  // fullDocument is not available at the point this is replicated, resulting in it treated as a remove
130
- removeOp('test_data', test_id!.toHexString()),
131
- putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
132
- removeOp('test_data', test_id!.toHexString())
130
+ test_utils.removeOp('test_data', test_id!.toHexString()),
131
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
132
+ test_utils.removeOp('test_data', test_id!.toHexString())
133
133
  ]);
134
134
  });
135
135
 
@@ -171,11 +171,11 @@ bucket_definitions:
171
171
  const data = await context.getBucketData('global[]');
172
172
 
173
173
  expect(data).toMatchObject([
174
- putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
174
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
175
175
  // The postImage helps us get this data
176
- putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
177
- putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
178
- removeOp('test_data', test_id!.toHexString())
176
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
177
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
178
+ test_utils.removeOp('test_data', test_id!.toHexString())
179
179
  ]);
180
180
  });
181
181
 
@@ -216,11 +216,11 @@ bucket_definitions:
216
216
  const data = await context.getBucketData('global[]');
217
217
 
218
218
  expect(data).toMatchObject([
219
- putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
219
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
220
220
  // The postImage helps us get this data
221
- putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
222
- putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
223
- removeOp('test_data', test_id!.toHexString())
221
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
222
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
223
+ test_utils.removeOp('test_data', test_id!.toHexString())
224
224
  ]);
225
225
  });
226
226
 
@@ -244,7 +244,7 @@ bucket_definitions:
244
244
 
245
245
  const data = await context.getBucketData('global[]');
246
246
 
247
- expect(data).toMatchObject([putOp('test_DATA', { id: test_id, description: 'test1' })]);
247
+ expect(data).toMatchObject([test_utils.putOp('test_DATA', { id: test_id, description: 'test1' })]);
248
248
  });
249
249
 
250
250
  test('replicating large values', async () => {
@@ -270,10 +270,10 @@ bucket_definitions:
270
270
 
271
271
  const data = await context.getBucketData('global[]');
272
272
  expect(data.slice(0, 1)).toMatchObject([
273
- putOp('test_data', { id: test_id.toHexString(), name: 'test1', description: largeDescription })
273
+ test_utils.putOp('test_data', { id: test_id.toHexString(), name: 'test1', description: largeDescription })
274
274
  ]);
275
275
  expect(data.slice(1)).toMatchObject([
276
- putOp('test_data', { id: test_id.toHexString(), name: 'test2', description: largeDescription })
276
+ test_utils.putOp('test_data', { id: test_id.toHexString(), name: 'test2', description: largeDescription })
277
277
  ]);
278
278
  });
279
279
 
@@ -302,8 +302,8 @@ bucket_definitions:
302
302
  const data = await context.getBucketData('global[]');
303
303
 
304
304
  expect(data).toMatchObject([
305
- putOp('test_data', { id: test_id, description: 'test1' }),
306
- removeOp('test_data', test_id)
305
+ test_utils.putOp('test_data', { id: test_id, description: 'test1' }),
306
+ test_utils.removeOp('test_data', test_id)
307
307
  ]);
308
308
  });
309
309
 
@@ -330,9 +330,9 @@ bucket_definitions:
330
330
  const data = await context.getBucketData('global[]');
331
331
 
332
332
  expect(data).toMatchObject([
333
- putOp('test_data1', { id: test_id, description: 'test1' }),
334
- removeOp('test_data1', test_id),
335
- putOp('test_data2', { id: test_id, description: 'test1' })
333
+ test_utils.putOp('test_data1', { id: test_id, description: 'test1' }),
334
+ test_utils.removeOp('test_data1', test_id),
335
+ test_utils.putOp('test_data2', { id: test_id, description: 'test1' })
336
336
  ]);
337
337
  });
338
338
 
@@ -349,7 +349,7 @@ bucket_definitions:
349
349
  context.startStreaming();
350
350
 
351
351
  const data = await context.getBucketData('global[]');
352
- expect(data).toMatchObject([putOp('test_data', { id: test_id, description: 'test1' })]);
352
+ expect(data).toMatchObject([test_utils.putOp('test_data', { id: test_id, description: 'test1' })]);
353
353
  });
354
354
 
355
355
  test('large record', async () => {
@@ -446,8 +446,8 @@ bucket_definitions:
446
446
 
447
447
  const data = await context.getBucketData('global[]');
448
448
  expect(data).toMatchObject([
449
- putOp('test_data', { id: test_id!.toHexString(), description: 'test1' }),
450
- putOp('test_data', { id: test_id!.toHexString(), description: 'test2' })
449
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test1' }),
450
+ test_utils.putOp('test_data', { id: test_id!.toHexString(), description: 'test2' })
451
451
  ]);
452
452
  });
453
453
 
@@ -1,13 +1,14 @@
1
+ import { mongo } from '@powersync/lib-service-mongodb';
1
2
  import { ActiveCheckpoint, BucketStorageFactory, OpId, SyncRulesBucketStorage } from '@powersync/service-core';
3
+ import { test_utils } from '@powersync/service-core-tests';
2
4
 
3
- import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js';
4
- import { fromAsync } from '@core-tests/stream_utils.js';
5
- import { MongoManager } from '@module/replication/MongoManager.js';
6
5
  import { ChangeStream, ChangeStreamOptions } from '@module/replication/ChangeStream.js';
7
- import * as mongo from 'mongodb';
6
+ import { MongoManager } from '@module/replication/MongoManager.js';
8
7
  import { createCheckpoint } from '@module/replication/MongoRelation.js';
9
8
  import { NormalizedMongoConnectionConfig } from '@module/types/types.js';
10
9
 
10
+ import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js';
11
+
11
12
  export class ChangeStreamTestContext {
12
13
  private _walStream?: ChangeStream;
13
14
  private abortController = new AbortController();
@@ -102,7 +103,7 @@ export class ChangeStreamTestContext {
102
103
  async getBucketsDataBatch(buckets: Record<string, string>, options?: { timeout?: number }) {
103
104
  let checkpoint = await this.getCheckpoint(options);
104
105
  const map = new Map<string, string>(Object.entries(buckets));
105
- return fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
106
+ return test_utils.fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
106
107
  }
107
108
 
108
109
  async getBucketData(
@@ -117,7 +118,7 @@ export class ChangeStreamTestContext {
117
118
  limit: options?.limit,
118
119
  chunkLimitBytes: options?.chunkLimitBytes
119
120
  });
120
- const batches = await fromAsync(batch);
121
+ const batches = await test_utils.fromAsync(batch);
121
122
  return batches[0]?.batch.data ?? [];
122
123
  }
123
124
 
package/test/src/env.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { utils } from '@powersync/lib-services-framework';
2
2
 
3
3
  export const env = utils.collectEnvironmentVariables({
4
+ MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'),
4
5
  MONGO_TEST_DATA_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test_data'),
5
6
  CI: utils.type.boolean.default('false'),
6
7
  SLOW_TESTS: utils.type.boolean.default('false')
@@ -1,11 +1,12 @@
1
+ import { mongo } from '@powersync/lib-service-mongodb';
2
+ import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules';
3
+ import { describe, expect, test } from 'vitest';
4
+
1
5
  import { MongoRouteAPIAdapter } from '@module/api/MongoRouteAPIAdapter.js';
2
6
  import { ChangeStream } from '@module/replication/ChangeStream.js';
3
7
  import { constructAfterRecord } from '@module/replication/MongoRelation.js';
4
- import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules';
5
- import * as mongo from 'mongodb';
6
- import { describe, expect, test } from 'vitest';
7
- import { clearTestDb, connectMongoData, TEST_CONNECTION_OPTIONS } from './util.js';
8
8
  import { PostImagesOption } from '@module/types/types.js';
9
+ import { clearTestDb, connectMongoData, TEST_CONNECTION_OPTIONS } from './util.js';
9
10
 
10
11
  describe('mongo data types', () => {
11
12
  async function setupTable(db: mongo.Db) {
@@ -45,12 +46,52 @@ describe('mongo data types', () => {
45
46
  mongo.ObjectId.createFromHexString('66e834cc91d805df11fa0ecb'),
46
47
  'mydb',
47
48
  { foo: 'bar' }
48
- ),
49
- undefined: undefined
49
+ )
50
50
  }
51
51
  ]);
52
52
  }
53
53
 
54
+ async function insertUndefined(db: mongo.Db, collection: string, array?: boolean) {
55
+ // MongoDB has deprecated the `undefined` value, making it really
56
+ // difficult to insert one into the database.
57
+ // mapReduce is also deprecated, but it's one way to still generate
58
+ // the value.
59
+ const mapInput = db.collection('map_input');
60
+ await mapInput.insertOne({ test: 'test' });
61
+ const fin = array ? `return { result: [undefined] }` : `return { result: undefined }`;
62
+ await db.command({
63
+ mapReduce: 'map_input',
64
+ map: new mongo.Code(`function () {
65
+ // We only need to emit once for a single result:
66
+ emit(5, {});
67
+ }`),
68
+ reduce: new mongo.Code(`function (key, values) {
69
+ // Return an object whose property is explicitly set to undefined
70
+ return undefined;
71
+ }`),
72
+ finalize: new mongo.Code(`function (key, reducedVal) {
73
+ ${fin};
74
+ }`),
75
+ out: { merge: 'map_output' }
76
+ });
77
+
78
+ await db
79
+ .collection('map_output')
80
+ .aggregate([
81
+ { $set: { undefined: '$value.result' } },
82
+ { $project: { undefined: 1 } },
83
+ {
84
+ $merge: {
85
+ into: collection
86
+ }
87
+ }
88
+ ])
89
+ .toArray();
90
+
91
+ await mapInput.drop();
92
+ await db.collection('map_output').drop();
93
+ }
94
+
54
95
  async function insertNested(collection: mongo.Collection) {
55
96
  await collection.insertMany([
56
97
  {
@@ -118,9 +159,11 @@ describe('mongo data types', () => {
118
159
  js: '{"code":"testcode","scope":null}',
119
160
  js2: '{"code":"testcode","scope":{"foo":"bar"}}',
120
161
  pointer: '{"collection":"mycollection","oid":"66e834cc91d805df11fa0ecb","fields":{}}',
121
- pointer2: '{"collection":"mycollection","oid":"66e834cc91d805df11fa0ecb","db":"mydb","fields":{"foo":"bar"}}',
122
- undefined: null
162
+ pointer2: '{"collection":"mycollection","oid":"66e834cc91d805df11fa0ecb","db":"mydb","fields":{"foo":"bar"}}'
123
163
  });
164
+
165
+ // This must specifically be null, and not undefined.
166
+ expect(transformed[4].undefined).toBeNull();
124
167
  }
125
168
 
126
169
  function checkResultsNested(transformed: Record<string, any>[]) {
@@ -158,20 +201,27 @@ describe('mongo data types', () => {
158
201
  js: '[{"code":"testcode","scope":null}]',
159
202
  pointer: '[{"collection":"mycollection","oid":"66e834cc91d805df11fa0ecb","fields":{}}]',
160
203
  minKey: '[null]',
161
- maxKey: '[null]',
204
+ maxKey: '[null]'
205
+ });
206
+
207
+ expect(transformed[4]).toMatchObject({
162
208
  undefined: '[null]'
163
209
  });
164
210
  }
165
211
 
166
212
  test('test direct queries', async () => {
167
213
  const { db, client } = await connectMongoData();
214
+
168
215
  const collection = db.collection('test_data');
169
216
  try {
170
217
  await setupTable(db);
171
-
172
218
  await insert(collection);
219
+ await insertUndefined(db, 'test_data');
173
220
 
174
- const transformed = [...ChangeStream.getQueryData(await db.collection('test_data').find().toArray())];
221
+ const rawResults = await db.collection('test_data').find().toArray();
222
+ // It is tricky to save "undefined" with mongo, so we check that it succeeded.
223
+ expect(rawResults[4].undefined).toBeUndefined();
224
+ const transformed = [...ChangeStream.getQueryData(rawResults)];
175
225
 
176
226
  checkResults(transformed);
177
227
  } finally {
@@ -186,8 +236,11 @@ describe('mongo data types', () => {
186
236
  await setupTable(db);
187
237
 
188
238
  await insertNested(collection);
239
+ await insertUndefined(db, 'test_data_arrays', true);
189
240
 
190
- const transformed = [...ChangeStream.getQueryData(await db.collection('test_data_arrays').find().toArray())];
241
+ const rawResults = await db.collection('test_data_arrays').find().toArray();
242
+ expect(rawResults[4].undefined).toEqual([undefined]);
243
+ const transformed = [...ChangeStream.getQueryData(rawResults)];
191
244
 
192
245
  checkResultsNested(transformed);
193
246
  } finally {
@@ -212,8 +265,9 @@ describe('mongo data types', () => {
212
265
  await stream.tryNext();
213
266
 
214
267
  await insert(collection);
268
+ await insertUndefined(db, 'test_data');
215
269
 
216
- const transformed = await getReplicationTx(stream, 4);
270
+ const transformed = await getReplicationTx(stream, 5);
217
271
 
218
272
  checkResults(transformed);
219
273
  } finally {
@@ -236,8 +290,9 @@ describe('mongo data types', () => {
236
290
  await stream.tryNext();
237
291
 
238
292
  await insertNested(collection);
293
+ await insertUndefined(db, 'test_data_arrays', true);
239
294
 
240
- const transformed = await getReplicationTx(stream, 4);
295
+ const transformed = await getReplicationTx(stream, 5);
241
296
 
242
297
  checkResultsNested(transformed);
243
298
  } finally {
@@ -256,6 +311,7 @@ describe('mongo data types', () => {
256
311
  const collection = db.collection('test_data');
257
312
  await setupTable(db);
258
313
  await insert(collection);
314
+ await insertUndefined(db, 'test_data');
259
315
 
260
316
  const schema = await adapter.getConnectionSchema();
261
317
  const dbSchema = schema.filter((s) => s.name == TEST_CONNECTION_OPTIONS.database)[0];
@@ -440,6 +496,10 @@ bucket_definitions:
440
496
  async function getReplicationTx(replicationStream: mongo.ChangeStream, count: number) {
441
497
  let transformed: SqliteRow[] = [];
442
498
  for await (const doc of replicationStream) {
499
+ // Specifically filter out map_input / map_output collections
500
+ if (!(doc as any)?.ns?.coll?.startsWith('test_data')) {
501
+ continue;
502
+ }
443
503
  transformed.push(constructAfterRecord((doc as any).fullDocument));
444
504
  if (transformed.length == count) {
445
505
  break;
package/test/src/setup.ts CHANGED
@@ -1,7 +1,10 @@
1
1
  import { container } from '@powersync/lib-services-framework';
2
+ import { test_utils } from '@powersync/service-core-tests';
2
3
  import { beforeAll } from 'vitest';
3
4
 
4
- beforeAll(() => {
5
+ beforeAll(async () => {
5
6
  // Executes for every test file
6
7
  container.registerDefaults();
8
+
9
+ await test_utils.initMetrics();
7
10
  });