@powersync/service-module-mysql 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/dist/common/mysql-to-sqlite.js +39 -9
- package/dist/common/mysql-to-sqlite.js.map +1 -1
- package/dist/replication/BinLogStream.js +63 -33
- package/dist/replication/BinLogStream.js.map +1 -1
- package/dist/replication/MySQLConnectionManager.js +9 -1
- package/dist/replication/MySQLConnectionManager.js.map +1 -1
- package/dist/utils/mysql-utils.d.ts +2 -0
- package/dist/utils/mysql-utils.js +3 -0
- package/dist/utils/mysql-utils.js.map +1 -1
- package/package.json +9 -7
- package/src/common/mysql-to-sqlite.ts +35 -9
- package/src/replication/BinLogStream.ts +74 -35
- package/src/replication/MySQLConnectionManager.ts +8 -1
- package/src/replication/zongji/zongji.d.ts +10 -0
- package/src/utils/mysql-utils.ts +5 -0
- package/test/src/BinLogStream.test.ts +257 -230
- package/test/src/BinlogStreamUtils.ts +48 -29
- package/test/src/env.ts +1 -0
- package/test/src/mysql-to-sqlite.test.ts +36 -19
- package/test/src/setup.ts +3 -1
- package/test/src/util.ts +6 -25
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -1,44 +1,26 @@
|
|
|
1
|
+
import { readExecutedGtid } from '@module/common/read-executed-gtid.js';
|
|
2
|
+
import { BinLogStream, BinLogStreamOptions } from '@module/replication/BinLogStream.js';
|
|
3
|
+
import { MySQLConnectionManager } from '@module/replication/MySQLConnectionManager.js';
|
|
4
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
1
5
|
import {
|
|
2
6
|
ActiveCheckpoint,
|
|
3
7
|
BucketStorageFactory,
|
|
4
8
|
OpId,
|
|
5
9
|
OplogEntry,
|
|
10
|
+
storage,
|
|
6
11
|
SyncRulesBucketStorage
|
|
7
12
|
} from '@powersync/service-core';
|
|
8
|
-
import {
|
|
9
|
-
import { fromAsync } from '@core-tests/stream_utils.js';
|
|
10
|
-
import { BinLogStream, BinLogStreamOptions } from '@module/replication/BinLogStream.js';
|
|
11
|
-
import { MySQLConnectionManager } from '@module/replication/MySQLConnectionManager.js';
|
|
13
|
+
import { test_utils } from '@powersync/service-core-tests';
|
|
12
14
|
import mysqlPromise from 'mysql2/promise';
|
|
13
|
-
import {
|
|
14
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
15
|
+
import { clearTestDb, TEST_CONNECTION_OPTIONS } from './util.js';
|
|
15
16
|
|
|
16
17
|
/**
|
|
17
18
|
* Tests operating on the binlog stream need to configure the stream and manage asynchronous
|
|
18
19
|
* replication, which gets a little tricky.
|
|
19
20
|
*
|
|
20
|
-
* This wraps
|
|
21
|
+
* This wraps all the context required for testing, and tears it down afterward
|
|
22
|
+
* by using `await using`.
|
|
21
23
|
*/
|
|
22
|
-
export function binlogStreamTest(
|
|
23
|
-
factory: () => Promise<BucketStorageFactory>,
|
|
24
|
-
test: (context: BinlogStreamTestContext) => Promise<void>
|
|
25
|
-
): () => Promise<void> {
|
|
26
|
-
return async () => {
|
|
27
|
-
const f = await factory();
|
|
28
|
-
const connectionManager = new MySQLConnectionManager(TEST_CONNECTION_OPTIONS, {});
|
|
29
|
-
|
|
30
|
-
const connection = await connectionManager.getConnection();
|
|
31
|
-
await clearTestDb(connection);
|
|
32
|
-
connection.release();
|
|
33
|
-
const context = new BinlogStreamTestContext(f, connectionManager);
|
|
34
|
-
try {
|
|
35
|
-
await test(context);
|
|
36
|
-
} finally {
|
|
37
|
-
await context.dispose();
|
|
38
|
-
}
|
|
39
|
-
};
|
|
40
|
-
}
|
|
41
|
-
|
|
42
24
|
export class BinlogStreamTestContext {
|
|
43
25
|
private _binlogStream?: BinLogStream;
|
|
44
26
|
private abortController = new AbortController();
|
|
@@ -46,6 +28,18 @@ export class BinlogStreamTestContext {
|
|
|
46
28
|
public storage?: SyncRulesBucketStorage;
|
|
47
29
|
private replicationDone = false;
|
|
48
30
|
|
|
31
|
+
static async open(factory: storage.TestStorageFactory, options?: { doNotClear?: boolean }) {
|
|
32
|
+
const f = await factory({ doNotClear: options?.doNotClear });
|
|
33
|
+
const connectionManager = new MySQLConnectionManager(TEST_CONNECTION_OPTIONS, {});
|
|
34
|
+
|
|
35
|
+
if (!options?.doNotClear) {
|
|
36
|
+
const connection = await connectionManager.getConnection();
|
|
37
|
+
await clearTestDb(connection);
|
|
38
|
+
connection.release();
|
|
39
|
+
}
|
|
40
|
+
return new BinlogStreamTestContext(f, connectionManager);
|
|
41
|
+
}
|
|
42
|
+
|
|
49
43
|
constructor(
|
|
50
44
|
public factory: BucketStorageFactory,
|
|
51
45
|
public connectionManager: MySQLConnectionManager
|
|
@@ -57,6 +51,10 @@ export class BinlogStreamTestContext {
|
|
|
57
51
|
await this.connectionManager.end();
|
|
58
52
|
}
|
|
59
53
|
|
|
54
|
+
[Symbol.asyncDispose]() {
|
|
55
|
+
return this.dispose();
|
|
56
|
+
}
|
|
57
|
+
|
|
60
58
|
get connectionTag() {
|
|
61
59
|
return this.connectionManager.connectionTag;
|
|
62
60
|
}
|
|
@@ -67,6 +65,27 @@ export class BinlogStreamTestContext {
|
|
|
67
65
|
return this.storage!;
|
|
68
66
|
}
|
|
69
67
|
|
|
68
|
+
async loadNextSyncRules() {
|
|
69
|
+
const syncRules = await this.factory.getNextSyncRulesContent();
|
|
70
|
+
if (syncRules == null) {
|
|
71
|
+
throw new Error(`Next sync rules not available`);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
this.storage = this.factory.getInstance(syncRules);
|
|
75
|
+
return this.storage!;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
async loadActiveSyncRules() {
|
|
79
|
+
const syncRules = await this.factory.getActiveSyncRulesContent();
|
|
80
|
+
if (syncRules == null) {
|
|
81
|
+
throw new Error(`Active sync rules not available`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
this.storage = this.factory.getInstance(syncRules);
|
|
85
|
+
this.replicationDone = true;
|
|
86
|
+
return this.storage!;
|
|
87
|
+
}
|
|
88
|
+
|
|
70
89
|
get binlogStream(): BinLogStream {
|
|
71
90
|
if (this.storage == null) {
|
|
72
91
|
throw new Error('updateSyncRules() first');
|
|
@@ -113,14 +132,14 @@ export class BinlogStreamTestContext {
|
|
|
113
132
|
async getBucketsDataBatch(buckets: Record<string, string>, options?: { timeout?: number }) {
|
|
114
133
|
const checkpoint = await this.getCheckpoint(options);
|
|
115
134
|
const map = new Map<string, string>(Object.entries(buckets));
|
|
116
|
-
return fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
|
|
135
|
+
return test_utils.fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
|
|
117
136
|
}
|
|
118
137
|
|
|
119
138
|
async getBucketData(bucket: string, start = '0', options?: { timeout?: number }): Promise<OplogEntry[]> {
|
|
120
139
|
const checkpoint = await this.getCheckpoint(options);
|
|
121
140
|
const map = new Map<string, string>([[bucket, start]]);
|
|
122
141
|
const batch = this.storage!.getBucketDataBatch(checkpoint, map);
|
|
123
|
-
const batches = await fromAsync(batch);
|
|
142
|
+
const batches = await test_utils.fromAsync(batch);
|
|
124
143
|
return batches[0]?.batch.data ?? [];
|
|
125
144
|
}
|
|
126
145
|
}
|
package/test/src/env.ts
CHANGED
|
@@ -2,6 +2,7 @@ import { utils } from '@powersync/lib-services-framework';
|
|
|
2
2
|
|
|
3
3
|
export const env = utils.collectEnvironmentVariables({
|
|
4
4
|
MYSQL_TEST_URI: utils.type.string.default('mysql://root:mypassword@localhost:3306/mydatabase'),
|
|
5
|
+
MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'),
|
|
5
6
|
CI: utils.type.boolean.default('false'),
|
|
6
7
|
SLOW_TESTS: utils.type.boolean.default('false')
|
|
7
8
|
});
|
|
@@ -200,6 +200,7 @@ INSERT INTO test_data (
|
|
|
200
200
|
|
|
201
201
|
test('Date types mappings', async () => {
|
|
202
202
|
await setupTable();
|
|
203
|
+
// Timezone offset is set on the pool to +00:00
|
|
203
204
|
await connectionManager.query(`
|
|
204
205
|
INSERT INTO test_data(date_col, datetime_col, timestamp_col, time_col, year_col)
|
|
205
206
|
VALUES('2023-03-06', '2023-03-06 15:47', '2023-03-06 15:47', '15:47:00', '2023');
|
|
@@ -222,23 +223,40 @@ INSERT INTO test_data (
|
|
|
222
223
|
test('Date types edge cases mappings', async () => {
|
|
223
224
|
await setupTable();
|
|
224
225
|
|
|
225
|
-
await connectionManager.
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
226
|
+
const connection = await connectionManager.getConnection();
|
|
227
|
+
try {
|
|
228
|
+
// Disable strict mode, to allow dates such as '2024-00-00'.
|
|
229
|
+
await connection.query(`SET SESSION sql_mode=''`);
|
|
230
|
+
await connection.query(`SET SESSION time_zone='+00:00'`);
|
|
231
|
+
|
|
232
|
+
await connection.query(`INSERT INTO test_data(timestamp_col) VALUES('1970-01-01 00:00:01')`);
|
|
233
|
+
await connection.query(`INSERT INTO test_data(timestamp_col) VALUES('2038-01-19 03:14:07.499')`);
|
|
234
|
+
await connection.query(`INSERT INTO test_data(datetime_col) VALUES('1000-01-01 00:00:00')`);
|
|
235
|
+
await connection.query(`INSERT INTO test_data(datetime_col) VALUES('9999-12-31 23:59:59.499')`);
|
|
236
|
+
await connection.query(`INSERT INTO test_data(datetime_col) VALUES('0000-00-00 00:00:00')`);
|
|
237
|
+
await connection.query(`INSERT INTO test_data(datetime_col) VALUES('2024-00-00 00:00:00')`);
|
|
238
|
+
// TODO: This has a mismatch between querying directly and with Zongji.
|
|
239
|
+
// await connection.query(`INSERT INTO test_data(date_col) VALUES('2024-00-00')`);
|
|
240
|
+
|
|
241
|
+
const expectedResults = [
|
|
242
|
+
{ timestamp_col: '1970-01-01T00:00:01.000Z' },
|
|
243
|
+
{ timestamp_col: '2038-01-19T03:14:07.499Z' },
|
|
244
|
+
{ datetime_col: '1000-01-01T00:00:00.000Z' },
|
|
245
|
+
{ datetime_col: '9999-12-31T23:59:59.499Z' },
|
|
246
|
+
{ datetime_col: null },
|
|
247
|
+
{ datetime_col: null }
|
|
248
|
+
// { date_col: '2023-11-30' } or { date_col: null }?
|
|
249
|
+
];
|
|
250
|
+
|
|
251
|
+
const databaseRows = await getDatabaseRows(connectionManager, 'test_data');
|
|
252
|
+
const replicatedRows = await getReplicatedRows(expectedResults.length);
|
|
253
|
+
|
|
254
|
+
for (let i = 0; i < expectedResults.length; i++) {
|
|
255
|
+
expect(databaseRows[i]).toMatchObject(expectedResults[i]);
|
|
256
|
+
expect(replicatedRows[i]).toMatchObject(expectedResults[i]);
|
|
257
|
+
}
|
|
258
|
+
} finally {
|
|
259
|
+
connection.release;
|
|
242
260
|
}
|
|
243
261
|
});
|
|
244
262
|
|
|
@@ -282,8 +300,7 @@ async function getReplicatedRows(expectedTransactionsCount?: number): Promise<Sq
|
|
|
282
300
|
const zongji = new ZongJi({
|
|
283
301
|
host: TEST_CONNECTION_OPTIONS.hostname,
|
|
284
302
|
user: TEST_CONNECTION_OPTIONS.username,
|
|
285
|
-
password: TEST_CONNECTION_OPTIONS.password
|
|
286
|
-
timeZone: 'Z' // Ensure no auto timezone manipulation of the dates occur
|
|
303
|
+
password: TEST_CONNECTION_OPTIONS.password
|
|
287
304
|
});
|
|
288
305
|
|
|
289
306
|
const completionPromise = new Promise<SqliteRow[]>((resolve, reject) => {
|
package/test/src/setup.ts
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import { container } from '@powersync/lib-services-framework';
|
|
2
|
+
import { test_utils } from '@powersync/service-core-tests';
|
|
2
3
|
import { beforeAll } from 'vitest';
|
|
3
4
|
|
|
4
|
-
beforeAll(() => {
|
|
5
|
+
beforeAll(async () => {
|
|
5
6
|
// Executes for every test file
|
|
6
7
|
container.registerDefaults();
|
|
8
|
+
await test_utils.initMetrics();
|
|
7
9
|
});
|
package/test/src/util.ts
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import * as types from '@module/types/types.js';
|
|
2
|
-
import { BucketStorageFactory, Metrics, MongoBucketStorage } from '@powersync/service-core';
|
|
3
|
-
import { env } from './env.js';
|
|
4
|
-
import mysqlPromise from 'mysql2/promise';
|
|
5
|
-
import { connectMongo } from '@core-tests/util.js';
|
|
6
2
|
import { getMySQLVersion, isVersionAtLeast } from '@module/utils/mysql-utils.js';
|
|
3
|
+
import * as mongo_storage from '@powersync/service-module-mongodb-storage';
|
|
4
|
+
import mysqlPromise from 'mysql2/promise';
|
|
5
|
+
import { env } from './env.js';
|
|
7
6
|
|
|
8
7
|
export const TEST_URI = env.MYSQL_TEST_URI;
|
|
9
8
|
|
|
@@ -12,28 +11,10 @@ export const TEST_CONNECTION_OPTIONS = types.normalizeConnectionConfig({
|
|
|
12
11
|
uri: TEST_URI
|
|
13
12
|
});
|
|
14
13
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
powersync_instance_id: 'test',
|
|
19
|
-
internal_metrics_endpoint: 'unused.for.tests.com'
|
|
14
|
+
export const INITIALIZED_MONGO_STORAGE_FACTORY = mongo_storage.MongoTestStorageFactoryGenerator({
|
|
15
|
+
url: env.MONGO_TEST_URL,
|
|
16
|
+
isCI: env.CI
|
|
20
17
|
});
|
|
21
|
-
Metrics.getInstance().resetCounters();
|
|
22
|
-
|
|
23
|
-
export type StorageFactory = () => Promise<BucketStorageFactory>;
|
|
24
|
-
|
|
25
|
-
export const INITIALIZED_MONGO_STORAGE_FACTORY: StorageFactory = async () => {
|
|
26
|
-
const db = await connectMongo();
|
|
27
|
-
|
|
28
|
-
// None of the tests insert data into this collection, so it was never created
|
|
29
|
-
if (!(await db.db.listCollections({ name: db.bucket_parameters.collectionName }).hasNext())) {
|
|
30
|
-
await db.db.createCollection('bucket_parameters');
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
await db.clear();
|
|
34
|
-
|
|
35
|
-
return new MongoBucketStorage(db, { slot_name_prefix: 'test_' });
|
|
36
|
-
};
|
|
37
18
|
|
|
38
19
|
export async function clearTestDb(connection: mysqlPromise.Connection) {
|
|
39
20
|
const version = await getMySQLVersion(connection);
|