@powersync/service-module-mysql 0.0.0-dev-20241128134723 → 0.0.0-dev-20241219091224

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,12 +1,65 @@
1
1
  # @powersync/service-module-mysql
2
2
 
3
- ## 0.0.0-dev-20241128134723
3
+ ## 0.0.0-dev-20241219091224
4
4
 
5
5
  ### Patch Changes
6
6
 
7
- - Updated dependencies [74548e4]
8
- - @powersync/service-core@0.0.0-dev-20241128134723
9
- - @powersync/service-types@0.0.0-dev-20241128134723
7
+ - Updated dependencies [697d44b]
8
+ - Updated dependencies [697d44b]
9
+ - Updated dependencies [a66be3b]
10
+ - Updated dependencies [697d44b]
11
+ - @powersync/service-core@0.0.0-dev-20241219091224
12
+ - @powersync/lib-services-framework@0.0.0-dev-20241219091224
13
+ - @powersync/service-sync-rules@0.0.0-dev-20241219091224
14
+
15
+ ## 0.1.6
16
+
17
+ ### Patch Changes
18
+
19
+ - Updated dependencies [320e646]
20
+ - Updated dependencies [e3a9343]
21
+ - @powersync/service-core@0.12.2
22
+
23
+ ## 0.1.5
24
+
25
+ ### Patch Changes
26
+
27
+ - Updated dependencies [889ac46]
28
+ - @powersync/service-core@0.12.1
29
+
30
+ ## 0.1.4
31
+
32
+ ### Patch Changes
33
+
34
+ - Updated dependencies [ebc62ff]
35
+ - @powersync/service-core@0.12.0
36
+ - @powersync/service-types@0.5.0
37
+
38
+ ## 0.1.3
39
+
40
+ ### Patch Changes
41
+
42
+ - Updated dependencies [62e97f3]
43
+ - Updated dependencies [a235c9f]
44
+ - Updated dependencies [8c6ce90]
45
+ - @powersync/service-core@0.11.0
46
+ - @powersync/service-sync-rules@0.22.0
47
+
48
+ ## 0.1.2
49
+
50
+ ### Patch Changes
51
+
52
+ - Updated dependencies [2a4f020]
53
+ - @powersync/service-core@0.10.1
54
+
55
+ ## 0.1.1
56
+
57
+ ### Patch Changes
58
+
59
+ - Updated dependencies [2c18ad2]
60
+ - Updated dependencies [35c267f]
61
+ - @powersync/service-core@0.10.0
62
+ - @powersync/service-types@0.4.0
10
63
 
11
64
  ## 0.1.0
12
65
 
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "@powersync/service-module-mysql",
3
3
  "repository": "https://github.com/powersync-ja/powersync-service",
4
4
  "types": "dist/index.d.ts",
5
- "version": "0.0.0-dev-20241128134723",
5
+ "version": "0.0.0-dev-20241219091224",
6
6
  "license": "FSL-1.1-Apache-2.0",
7
7
  "main": "dist/index.js",
8
8
  "type": "module",
@@ -26,19 +26,21 @@
26
26
  "semver": "^7.5.4",
27
27
  "async": "^3.2.4",
28
28
  "mysql2": "^3.11.0",
29
- "ts-codec": "^1.2.2",
29
+ "ts-codec": "^1.3.0",
30
30
  "uri-js": "^4.4.1",
31
31
  "uuid": "^9.0.1",
32
- "@powersync/lib-services-framework": "0.2.0",
33
- "@powersync/service-core": "0.0.0-dev-20241128134723",
34
- "@powersync/service-sync-rules": "0.21.0",
35
- "@powersync/service-types": "0.0.0-dev-20241128134723",
32
+ "@powersync/lib-services-framework": "0.0.0-dev-20241219091224",
33
+ "@powersync/service-core": "0.0.0-dev-20241219091224",
34
+ "@powersync/service-sync-rules": "0.0.0-dev-20241219091224",
35
+ "@powersync/service-types": "0.5.0",
36
36
  "@powersync/service-jsonbig": "0.17.10"
37
37
  },
38
38
  "devDependencies": {
39
39
  "@types/semver": "^7.5.4",
40
40
  "@types/async": "^3.2.24",
41
- "@types/uuid": "^9.0.4"
41
+ "@types/uuid": "^9.0.4",
42
+ "@powersync/service-core-tests": "0.0.0-dev-20241219091224",
43
+ "@powersync/service-module-mongodb": "0.0.0-dev-20241219091224"
42
44
  },
43
45
  "scripts": {
44
46
  "build": "tsc -b",
@@ -1,9 +1,9 @@
1
- import { putOp, removeOp } from '@core-tests/stream_utils.js';
2
- import { MONGO_STORAGE_FACTORY } from '@core-tests/util.js';
3
1
  import { BucketStorageFactory, Metrics } from '@powersync/service-core';
2
+ import { test_utils } from '@powersync/service-core-tests';
3
+ import { v4 as uuid } from 'uuid';
4
4
  import { describe, expect, test } from 'vitest';
5
5
  import { binlogStreamTest } from './BinlogStreamUtils.js';
6
- import { v4 as uuid } from 'uuid';
6
+ import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
7
7
 
8
8
  type StorageFactory = () => Promise<BucketStorageFactory>;
9
9
 
@@ -17,7 +17,7 @@ bucket_definitions:
17
17
  describe(
18
18
  ' Binlog stream - mongodb',
19
19
  function () {
20
- defineBinlogStreamTests(MONGO_STORAGE_FACTORY);
20
+ defineBinlogStreamTests(INITIALIZED_MONGO_STORAGE_FACTORY);
21
21
  },
22
22
  { timeout: 20_000 }
23
23
  );
@@ -49,7 +49,7 @@ function defineBinlogStreamTests(factory: StorageFactory) {
49
49
  );
50
50
  const data = await context.getBucketData('global[]');
51
51
 
52
- expect(data).toMatchObject([putOp('test_data', { id: testId, description: 'test1', num: 1152921504606846976n })]);
52
+ expect(data).toMatchObject([test_utils.putOp('test_data', { id: testId, description: 'test1', num: 1152921504606846976n })]);
53
53
  const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
54
54
  const endTxCount =
55
55
  (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
@@ -85,7 +85,7 @@ function defineBinlogStreamTests(factory: StorageFactory) {
85
85
 
86
86
  const data = await context.getBucketData('global[]');
87
87
 
88
- expect(data).toMatchObject([putOp('test_DATA', { id: testId, description: 'test1' })]);
88
+ expect(data).toMatchObject([test_utils.putOp('test_DATA', { id: testId, description: 'test1' })]);
89
89
  const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
90
90
  const endTxCount =
91
91
  (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
@@ -161,14 +161,14 @@ function defineBinlogStreamTests(factory: StorageFactory) {
161
161
  const data = await context.getBucketData('global[]');
162
162
  expect(data).toMatchObject([
163
163
  // Initial insert
164
- putOp('test_data', { id: testId1, description: 'test1' }),
164
+ test_utils.putOp('test_data', { id: testId1, description: 'test1' }),
165
165
  // Update id, then description
166
- removeOp('test_data', testId1),
167
- putOp('test_data', { id: testId2, description: 'test2a' }),
168
- putOp('test_data', { id: testId2, description: 'test2b' }),
166
+ test_utils.removeOp('test_data', testId1),
167
+ test_utils.putOp('test_data', { id: testId2, description: 'test2a' }),
168
+ test_utils.putOp('test_data', { id: testId2, description: 'test2b' }),
169
169
  // Re-use old id
170
- putOp('test_data', { id: testId1, description: 'test1b' }),
171
- putOp('test_data', { id: testId1, description: 'test1c' })
170
+ test_utils.putOp('test_data', { id: testId1, description: 'test1b' }),
171
+ test_utils.putOp('test_data', { id: testId1, description: 'test1c' })
172
172
  ]);
173
173
  })
174
174
  );
@@ -187,7 +187,7 @@ function defineBinlogStreamTests(factory: StorageFactory) {
187
187
  await context.replicateSnapshot();
188
188
 
189
189
  const data = await context.getBucketData('global[]');
190
- expect(data).toMatchObject([putOp('test_data', { id: testId, description: 'test1' })]);
190
+ expect(data).toMatchObject([test_utils.putOp('test_data', { id: testId, description: 'test1' })]);
191
191
  })
192
192
  );
193
193
 
@@ -215,7 +215,7 @@ function defineBinlogStreamTests(factory: StorageFactory) {
215
215
 
216
216
  const data = await context.getBucketData('global[]');
217
217
  expect(data).toMatchObject([
218
- putOp('test_data', {
218
+ test_utils.putOp('test_data', {
219
219
  id: testId,
220
220
  description: 'testDates',
221
221
  date: `2023-03-06`,
@@ -257,7 +257,7 @@ function defineBinlogStreamTests(factory: StorageFactory) {
257
257
 
258
258
  const data = await context.getBucketData('global[]');
259
259
  expect(data).toMatchObject([
260
- putOp('test_data', {
260
+ test_utils.putOp('test_data', {
261
261
  id: testId,
262
262
  description: 'testDates',
263
263
  date: `2023-03-06`,
@@ -1,3 +1,7 @@
1
+ import { readExecutedGtid } from '@module/common/read-executed-gtid.js';
2
+ import { BinLogStream, BinLogStreamOptions } from '@module/replication/BinLogStream.js';
3
+ import { MySQLConnectionManager } from '@module/replication/MySQLConnectionManager.js';
4
+ import { logger } from '@powersync/lib-services-framework';
1
5
  import {
2
6
  ActiveCheckpoint,
3
7
  BucketStorageFactory,
@@ -5,13 +9,9 @@ import {
5
9
  OplogEntry,
6
10
  SyncRulesBucketStorage
7
11
  } from '@powersync/service-core';
8
- import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js';
9
- import { fromAsync } from '@core-tests/stream_utils.js';
10
- import { BinLogStream, BinLogStreamOptions } from '@module/replication/BinLogStream.js';
11
- import { MySQLConnectionManager } from '@module/replication/MySQLConnectionManager.js';
12
+ import { test_utils } from '@powersync/service-core-tests';
12
13
  import mysqlPromise from 'mysql2/promise';
13
- import { readExecutedGtid } from '@module/common/read-executed-gtid.js';
14
- import { logger } from '@powersync/lib-services-framework';
14
+ import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js';
15
15
 
16
16
  /**
17
17
  * Tests operating on the binlog stream need to configure the stream and manage asynchronous
@@ -113,14 +113,14 @@ export class BinlogStreamTestContext {
113
113
  async getBucketsDataBatch(buckets: Record<string, string>, options?: { timeout?: number }) {
114
114
  const checkpoint = await this.getCheckpoint(options);
115
115
  const map = new Map<string, string>(Object.entries(buckets));
116
- return fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
116
+ return test_utils.fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
117
117
  }
118
118
 
119
119
  async getBucketData(bucket: string, start = '0', options?: { timeout?: number }): Promise<OplogEntry[]> {
120
120
  const checkpoint = await this.getCheckpoint(options);
121
121
  const map = new Map<string, string>([[bucket, start]]);
122
122
  const batch = this.storage!.getBucketDataBatch(checkpoint, map);
123
- const batches = await fromAsync(batch);
123
+ const batches = await test_utils.fromAsync(batch);
124
124
  return batches[0]?.batch.data ?? [];
125
125
  }
126
126
  }
package/test/src/env.ts CHANGED
@@ -2,6 +2,7 @@ import { utils } from '@powersync/lib-services-framework';
2
2
 
3
3
  export const env = utils.collectEnvironmentVariables({
4
4
  MYSQL_TEST_URI: utils.type.string.default('mysql://root:mypassword@localhost:3306/mydatabase'),
5
+ MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'),
5
6
  CI: utils.type.boolean.default('false'),
6
7
  SLOW_TESTS: utils.type.boolean.default('false')
7
8
  });
package/test/src/util.ts CHANGED
@@ -1,9 +1,9 @@
1
1
  import * as types from '@module/types/types.js';
2
- import { BucketStorageFactory, Metrics, MongoBucketStorage } from '@powersync/service-core';
3
- import { env } from './env.js';
4
- import mysqlPromise from 'mysql2/promise';
5
- import { connectMongo } from '@core-tests/util.js';
6
2
  import { getMySQLVersion, isVersionAtLeast } from '@module/utils/mysql-utils.js';
3
+ import { BucketStorageFactory, Metrics } from '@powersync/service-core';
4
+ import * as mongo_module from '@powersync/service-module-mongodb';
5
+ import mysqlPromise from 'mysql2/promise';
6
+ import { env } from './env.js';
7
7
 
8
8
  export const TEST_URI = env.MYSQL_TEST_URI;
9
9
 
@@ -32,9 +32,20 @@ export const INITIALIZED_MONGO_STORAGE_FACTORY: StorageFactory = async () => {
32
32
 
33
33
  await db.clear();
34
34
 
35
- return new MongoBucketStorage(db, { slot_name_prefix: 'test_' });
35
+ return new mongo_module.storage.MongoBucketStorage(db, { slot_name_prefix: 'test_' });
36
36
  };
37
37
 
38
+ export async function connectMongo() {
39
+ // Short timeout for tests, to fail fast when the server is not available.
40
+ // Slightly longer timeouts for CI, to avoid arbitrary test failures
41
+ const client = mongo_module.storage.createMongoClient(env.MONGO_TEST_URL, {
42
+ connectTimeoutMS: env.CI ? 15_000 : 5_000,
43
+ socketTimeoutMS: env.CI ? 15_000 : 5_000,
44
+ serverSelectionTimeoutMS: env.CI ? 15_000 : 2_500
45
+ });
46
+ return new mongo_module.storage.PowerSyncMongo(client);
47
+ }
48
+
38
49
  export async function clearTestDb(connection: mysqlPromise.Connection) {
39
50
  const version = await getMySQLVersion(connection);
40
51
  if (isVersionAtLeast(version, '8.4.0')) {