@powersync/service-module-mysql 0.1.6 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -62,7 +62,14 @@ export class MySQLConnectionManager {
62
62
  * @param params
63
63
  */
64
64
  async query(query: string, params?: any[]): Promise<[RowDataPacket[], FieldPacket[]]> {
65
- return this.promisePool.query<RowDataPacket[]>(query, params);
65
+ let connection: mysqlPromise.PoolConnection | undefined;
66
+ try {
67
+ connection = await this.promisePool.getConnection();
68
+ await connection.query(`SET time_zone = '+00:00'`);
69
+ return connection.query<RowDataPacket[]>(query, params);
70
+ } finally {
71
+ connection?.release();
72
+ }
66
73
  }
67
74
 
68
75
  /**
@@ -3,6 +3,7 @@ import mysql from 'mysql2';
3
3
  import mysqlPromise from 'mysql2/promise';
4
4
  import * as types from '../types/types.js';
5
5
  import { coerce, gte } from 'semver';
6
+ import { SourceTable } from '@powersync/service-core';
6
7
 
7
8
  export type RetriedQueryOptions = {
8
9
  connection: mysqlPromise.Connection;
@@ -82,3 +83,7 @@ export function isVersionAtLeast(version: string, minimumVersion: string): boole
82
83
 
83
84
  return gte(coercedVersion!, coercedMinimumVersion!, { loose: true });
84
85
  }
86
+
87
+ export function escapeMysqlTableName(table: SourceTable): string {
88
+ return `\`${table.schema.replaceAll('`', '``')}\`.\`${table.table.replaceAll('`', '``')}\``;
89
+ }
@@ -1,11 +1,9 @@
1
1
  import { putOp, removeOp } from '@core-tests/stream_utils.js';
2
- import { MONGO_STORAGE_FACTORY } from '@core-tests/util.js';
3
- import { BucketStorageFactory, Metrics } from '@powersync/service-core';
4
- import { describe, expect, test } from 'vitest';
5
- import { binlogStreamTest } from './BinlogStreamUtils.js';
2
+ import { MONGO_STORAGE_FACTORY, StorageFactory } from '@core-tests/util.js';
3
+ import { Metrics } from '@powersync/service-core';
6
4
  import { v4 as uuid } from 'uuid';
7
-
8
- type StorageFactory = () => Promise<BucketStorageFactory>;
5
+ import { describe, expect, test } from 'vitest';
6
+ import { BinlogStreamTestContext } from './BinlogStreamUtils.js';
9
7
 
10
8
  const BASIC_SYNC_RULES = `
11
9
  bucket_definitions:
@@ -14,92 +12,81 @@ bucket_definitions:
14
12
  - SELECT id, description FROM "test_data"
15
13
  `;
16
14
 
17
- describe(
18
- ' Binlog stream - mongodb',
19
- function () {
20
- defineBinlogStreamTests(MONGO_STORAGE_FACTORY);
21
- },
22
- { timeout: 20_000 }
23
- );
15
+ describe('Binlog stream - mongodb', { timeout: 20_000 }, function () {
16
+ defineBinlogStreamTests(MONGO_STORAGE_FACTORY);
17
+ });
24
18
 
25
19
  function defineBinlogStreamTests(factory: StorageFactory) {
26
- test(
27
- 'Replicate basic values',
28
- binlogStreamTest(factory, async (context) => {
29
- const { connectionManager } = context;
30
- await context.updateSyncRules(`
20
+ test('Replicate basic values', async () => {
21
+ await using context = await BinlogStreamTestContext.open(factory);
22
+ const { connectionManager } = context;
23
+ await context.updateSyncRules(`
31
24
  bucket_definitions:
32
25
  global:
33
26
  data:
34
27
  - SELECT id, description, num FROM "test_data"`);
35
28
 
36
- await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, num BIGINT)`);
37
-
38
- await context.replicateSnapshot();
39
-
40
- const startRowCount =
41
- (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
42
- const startTxCount =
43
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
44
-
45
- context.startStreaming();
46
- const testId = uuid();
47
- await connectionManager.query(
48
- `INSERT INTO test_data(id, description, num) VALUES('${testId}', 'test1', 1152921504606846976)`
49
- );
50
- const data = await context.getBucketData('global[]');
51
-
52
- expect(data).toMatchObject([putOp('test_data', { id: testId, description: 'test1', num: 1152921504606846976n })]);
53
- const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
54
- const endTxCount =
55
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
56
- expect(endRowCount - startRowCount).toEqual(1);
57
- expect(endTxCount - startTxCount).toEqual(1);
58
- })
59
- );
60
-
61
- test(
62
- 'replicating case sensitive table',
63
- binlogStreamTest(factory, async (context) => {
64
- const { connectionManager } = context;
65
- await context.updateSyncRules(`
29
+ await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, num BIGINT)`);
30
+
31
+ await context.replicateSnapshot();
32
+
33
+ const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
34
+ const startTxCount =
35
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
36
+
37
+ context.startStreaming();
38
+ const testId = uuid();
39
+ await connectionManager.query(
40
+ `INSERT INTO test_data(id, description, num) VALUES('${testId}', 'test1', 1152921504606846976)`
41
+ );
42
+ const data = await context.getBucketData('global[]');
43
+
44
+ expect(data).toMatchObject([putOp('test_data', { id: testId, description: 'test1', num: 1152921504606846976n })]);
45
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
46
+ const endTxCount =
47
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
48
+ expect(endRowCount - startRowCount).toEqual(1);
49
+ expect(endTxCount - startTxCount).toEqual(1);
50
+ });
51
+
52
+ test('replicating case sensitive table', async () => {
53
+ await using context = await BinlogStreamTestContext.open(factory);
54
+ const { connectionManager } = context;
55
+ await context.updateSyncRules(`
66
56
  bucket_definitions:
67
57
  global:
68
58
  data:
69
59
  - SELECT id, description FROM "test_DATA"
70
60
  `);
71
61
 
72
- await connectionManager.query(`CREATE TABLE test_DATA (id CHAR(36) PRIMARY KEY, description text)`);
62
+ await connectionManager.query(`CREATE TABLE test_DATA (id CHAR(36) PRIMARY KEY, description text)`);
73
63
 
74
- await context.replicateSnapshot();
64
+ await context.replicateSnapshot();
75
65
 
76
- const startRowCount =
77
- (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
78
- const startTxCount =
79
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
66
+ const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
67
+ const startTxCount =
68
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
80
69
 
81
- context.startStreaming();
70
+ context.startStreaming();
82
71
 
83
- const testId = uuid();
84
- await connectionManager.query(`INSERT INTO test_DATA(id, description) VALUES('${testId}','test1')`);
72
+ const testId = uuid();
73
+ await connectionManager.query(`INSERT INTO test_DATA(id, description) VALUES('${testId}','test1')`);
85
74
 
86
- const data = await context.getBucketData('global[]');
75
+ const data = await context.getBucketData('global[]');
76
+
77
+ expect(data).toMatchObject([putOp('test_DATA', { id: testId, description: 'test1' })]);
78
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
79
+ const endTxCount =
80
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
81
+ expect(endRowCount - startRowCount).toEqual(1);
82
+ expect(endTxCount - startTxCount).toEqual(1);
83
+ });
87
84
 
88
- expect(data).toMatchObject([putOp('test_DATA', { id: testId, description: 'test1' })]);
89
- const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
90
- const endTxCount =
91
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
92
- expect(endRowCount - startRowCount).toEqual(1);
93
- expect(endTxCount - startTxCount).toEqual(1);
94
- })
95
- );
96
-
97
- // TODO: Not supported yet
98
- // test(
99
- // 'replicating TRUNCATE',
100
- // binlogStreamTest(factory, async (context) => {
101
- // const { connectionManager } = context;
102
- // const syncRuleContent = `
85
+ // TODO: Not supported yet
86
+ // test('replicating TRUNCATE', async () => {
87
+ // await using context = await BinlogStreamTestContext.create(factory);
88
+ // const { connectionManager } = context;
89
+ // const syncRuleContent = `
103
90
  // bucket_definitions:
104
91
  // global:
105
92
  // data:
@@ -108,199 +95,235 @@ function defineBinlogStreamTests(factory: StorageFactory) {
108
95
  // parameters: SELECT id FROM test_data WHERE id = token_parameters.user_id
109
96
  // data: []
110
97
  // `;
111
- // await context.updateSyncRules(syncRuleContent);
112
- // await connectionManager.query(`DROP TABLE IF EXISTS test_data`);
113
- // await connectionManager.query(
114
- // `CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`
115
- // );
116
- //
117
- // await context.replicateSnapshot();
118
- // context.startStreaming();
119
- //
120
- // const [{ test_id }] = pgwireRows(
121
- // await connectionManager.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
122
- // );
123
- // await connectionManager.query(`TRUNCATE test_data`);
124
- //
125
- // const data = await context.getBucketData('global[]');
126
- //
127
- // expect(data).toMatchObject([
128
- // putOp('test_data', { id: test_id, description: 'test1' }),
129
- // removeOp('test_data', test_id)
130
- // ]);
131
- // })
98
+ // await context.updateSyncRules(syncRuleContent);
99
+ // await connectionManager.query(`DROP TABLE IF EXISTS test_data`);
100
+ // await connectionManager.query(
101
+ // `CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`
132
102
  // );
133
103
 
134
- test(
135
- 'replicating changing primary key',
136
- binlogStreamTest(factory, async (context) => {
137
- const { connectionManager } = context;
138
- await context.updateSyncRules(BASIC_SYNC_RULES);
139
-
140
- await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description text)`);
141
-
142
- await context.replicateSnapshot();
143
- context.startStreaming();
144
-
145
- const testId1 = uuid();
146
- await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId1}','test1')`);
147
-
148
- const testId2 = uuid();
149
- await connectionManager.query(
150
- `UPDATE test_data SET id = '${testId2}', description = 'test2a' WHERE id = '${testId1}'`
151
- );
152
-
153
- // This update may fail replicating with:
154
- // Error: Update on missing record public.test_data:074a601e-fc78-4c33-a15d-f89fdd4af31d :: {"g":1,"t":"651e9fbe9fec6155895057ec","k":"1a0b34da-fb8c-5e6f-8421-d7a3c5d4df4f"}
155
- await connectionManager.query(`UPDATE test_data SET description = 'test2b' WHERE id = '${testId2}'`);
104
+ // await context.replicateSnapshot();
105
+ // context.startStreaming();
156
106
 
157
- // Re-use old id again
158
- await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId1}', 'test1b')`);
159
- await connectionManager.query(`UPDATE test_data SET description = 'test1c' WHERE id = '${testId1}'`);
160
-
161
- const data = await context.getBucketData('global[]');
162
- expect(data).toMatchObject([
163
- // Initial insert
164
- putOp('test_data', { id: testId1, description: 'test1' }),
165
- // Update id, then description
166
- removeOp('test_data', testId1),
167
- putOp('test_data', { id: testId2, description: 'test2a' }),
168
- putOp('test_data', { id: testId2, description: 'test2b' }),
169
- // Re-use old id
170
- putOp('test_data', { id: testId1, description: 'test1b' }),
171
- putOp('test_data', { id: testId1, description: 'test1c' })
172
- ]);
173
- })
174
- );
175
-
176
- test(
177
- 'initial sync',
178
- binlogStreamTest(factory, async (context) => {
179
- const { connectionManager } = context;
180
- await context.updateSyncRules(BASIC_SYNC_RULES);
181
-
182
- await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description text)`);
183
-
184
- const testId = uuid();
185
- await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId}','test1')`);
186
-
187
- await context.replicateSnapshot();
188
-
189
- const data = await context.getBucketData('global[]');
190
- expect(data).toMatchObject([putOp('test_data', { id: testId, description: 'test1' })]);
191
- })
192
- );
193
-
194
- test(
195
- 'snapshot with date values',
196
- binlogStreamTest(factory, async (context) => {
197
- const { connectionManager } = context;
198
- await context.updateSyncRules(`
107
+ // const [{ test_id }] = pgwireRows(
108
+ // await connectionManager.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
109
+ // );
110
+ // await connectionManager.query(`TRUNCATE test_data`);
111
+
112
+ // const data = await context.getBucketData('global[]');
113
+
114
+ // expect(data).toMatchObject([
115
+ // putOp('test_data', { id: test_id, description: 'test1' }),
116
+ // removeOp('test_data', test_id)
117
+ // ]);
118
+ // });
119
+
120
+ test('replicating changing primary key', async () => {
121
+ await using context = await BinlogStreamTestContext.open(factory);
122
+ const { connectionManager } = context;
123
+ await context.updateSyncRules(BASIC_SYNC_RULES);
124
+
125
+ await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description text)`);
126
+
127
+ await context.replicateSnapshot();
128
+ context.startStreaming();
129
+
130
+ const testId1 = uuid();
131
+ await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId1}','test1')`);
132
+
133
+ const testId2 = uuid();
134
+ await connectionManager.query(
135
+ `UPDATE test_data SET id = '${testId2}', description = 'test2a' WHERE id = '${testId1}'`
136
+ );
137
+
138
+ // This update may fail replicating with:
139
+ // Error: Update on missing record public.test_data:074a601e-fc78-4c33-a15d-f89fdd4af31d :: {"g":1,"t":"651e9fbe9fec6155895057ec","k":"1a0b34da-fb8c-5e6f-8421-d7a3c5d4df4f"}
140
+ await connectionManager.query(`UPDATE test_data SET description = 'test2b' WHERE id = '${testId2}'`);
141
+
142
+ // Re-use old id again
143
+ await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId1}', 'test1b')`);
144
+ await connectionManager.query(`UPDATE test_data SET description = 'test1c' WHERE id = '${testId1}'`);
145
+
146
+ const data = await context.getBucketData('global[]');
147
+ expect(data).toMatchObject([
148
+ // Initial insert
149
+ putOp('test_data', { id: testId1, description: 'test1' }),
150
+ // Update id, then description
151
+ removeOp('test_data', testId1),
152
+ putOp('test_data', { id: testId2, description: 'test2a' }),
153
+ putOp('test_data', { id: testId2, description: 'test2b' }),
154
+ // Re-use old id
155
+ putOp('test_data', { id: testId1, description: 'test1b' }),
156
+ putOp('test_data', { id: testId1, description: 'test1c' })
157
+ ]);
158
+ });
159
+
160
+ test('initial sync', async () => {
161
+ await using context = await BinlogStreamTestContext.open(factory);
162
+ const { connectionManager } = context;
163
+ await context.updateSyncRules(BASIC_SYNC_RULES);
164
+
165
+ await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description text)`);
166
+
167
+ const testId = uuid();
168
+ await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId}','test1')`);
169
+
170
+ await context.replicateSnapshot();
171
+
172
+ const data = await context.getBucketData('global[]');
173
+ expect(data).toMatchObject([putOp('test_data', { id: testId, description: 'test1' })]);
174
+ });
175
+
176
+ test('snapshot with date values', async () => {
177
+ await using context = await BinlogStreamTestContext.open(factory);
178
+ const { connectionManager } = context;
179
+ await context.updateSyncRules(`
199
180
  bucket_definitions:
200
181
  global:
201
182
  data:
202
183
  - SELECT * FROM "test_data"
203
184
  `);
204
185
 
205
- await connectionManager.query(
206
- `CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, date DATE, datetime DATETIME, timestamp TIMESTAMP)`
207
- );
186
+ await connectionManager.query(
187
+ `CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, date DATE, datetime DATETIME, timestamp TIMESTAMP)`
188
+ );
208
189
 
209
- const testId = uuid();
210
- await connectionManager.query(`
190
+ const testId = uuid();
191
+ await connectionManager.query(`
211
192
  INSERT INTO test_data(id, description, date, datetime, timestamp) VALUES('${testId}','testDates', '2023-03-06', '2023-03-06 15:47', '2023-03-06 15:47')
212
193
  `);
213
194
 
214
- await context.replicateSnapshot();
215
-
216
- const data = await context.getBucketData('global[]');
217
- expect(data).toMatchObject([
218
- putOp('test_data', {
219
- id: testId,
220
- description: 'testDates',
221
- date: `2023-03-06`,
222
- datetime: '2023-03-06T15:47:00.000Z',
223
- timestamp: '2023-03-06T15:47:00.000Z'
224
- })
225
- ]);
226
- })
227
- );
228
-
229
- test(
230
- 'replication with date values',
231
- binlogStreamTest(factory, async (context) => {
232
- const { connectionManager } = context;
233
- await context.updateSyncRules(`
195
+ await context.replicateSnapshot();
196
+
197
+ const data = await context.getBucketData('global[]');
198
+ expect(data).toMatchObject([
199
+ putOp('test_data', {
200
+ id: testId,
201
+ description: 'testDates',
202
+ date: `2023-03-06`,
203
+ datetime: '2023-03-06T15:47:00.000Z',
204
+ timestamp: '2023-03-06T15:47:00.000Z'
205
+ })
206
+ ]);
207
+ });
208
+
209
+ test('replication with date values', async () => {
210
+ await using context = await BinlogStreamTestContext.open(factory);
211
+ const { connectionManager } = context;
212
+ await context.updateSyncRules(`
234
213
  bucket_definitions:
235
214
  global:
236
215
  data:
237
216
  - SELECT * FROM "test_data"
238
217
  `);
239
218
 
240
- await connectionManager.query(
241
- `CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, date DATE, datetime DATETIME, timestamp TIMESTAMP)`
242
- );
219
+ await connectionManager.query(
220
+ `CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, date DATE, datetime DATETIME NULL, timestamp TIMESTAMP NULL)`
221
+ );
243
222
 
244
- await context.replicateSnapshot();
223
+ await context.replicateSnapshot();
245
224
 
246
- const startRowCount =
247
- (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
248
- const startTxCount =
249
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
225
+ const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
226
+ const startTxCount =
227
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
250
228
 
251
- context.startStreaming();
229
+ context.startStreaming();
252
230
 
253
- const testId = uuid();
254
- await connectionManager.query(`
231
+ const testId = uuid();
232
+ await connectionManager.query(`
255
233
  INSERT INTO test_data(id, description, date, datetime, timestamp) VALUES('${testId}','testDates', '2023-03-06', '2023-03-06 15:47', '2023-03-06 15:47')
256
234
  `);
235
+ await connectionManager.query(`UPDATE test_data SET description = ? WHERE id = ?`, ['testUpdated', testId]);
236
+
237
+ const data = await context.getBucketData('global[]');
238
+ expect(data).toMatchObject([
239
+ putOp('test_data', {
240
+ id: testId,
241
+ description: 'testDates',
242
+ date: `2023-03-06`,
243
+ datetime: '2023-03-06T15:47:00.000Z',
244
+ timestamp: '2023-03-06T15:47:00.000Z'
245
+ }),
246
+ putOp('test_data', {
247
+ id: testId,
248
+ description: 'testUpdated',
249
+ date: `2023-03-06`,
250
+ datetime: '2023-03-06T15:47:00.000Z',
251
+ timestamp: '2023-03-06T15:47:00.000Z'
252
+ })
253
+ ]);
254
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
255
+ const endTxCount =
256
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
257
+ expect(endRowCount - startRowCount).toEqual(2);
258
+ expect(endTxCount - startTxCount).toEqual(2);
259
+ });
260
+
261
+ test('table not in sync rules', async () => {
262
+ await using context = await BinlogStreamTestContext.open(factory);
263
+ const { connectionManager } = context;
264
+ await context.updateSyncRules(BASIC_SYNC_RULES);
265
+
266
+ await connectionManager.query(`CREATE TABLE test_donotsync (id CHAR(36) PRIMARY KEY, description text)`);
267
+
268
+ await context.replicateSnapshot();
269
+
270
+ const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
271
+ const startTxCount =
272
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
273
+
274
+ context.startStreaming();
275
+
276
+ await connectionManager.query(`INSERT INTO test_donotsync(id, description) VALUES('${uuid()}','test1')`);
277
+ const data = await context.getBucketData('global[]');
278
+
279
+ expect(data).toMatchObject([]);
280
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
281
+ const endTxCount =
282
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
283
+
284
+ // There was a transaction, but we should not replicate any actual data
285
+ expect(endRowCount - startRowCount).toEqual(0);
286
+ expect(endTxCount - startTxCount).toEqual(1);
287
+ });
288
+
289
+ test('Resume replication', async () => {
290
+ const testId1 = uuid();
291
+ const testId2 = uuid();
292
+ {
293
+ await using context = await BinlogStreamTestContext.open(factory);
294
+ const { connectionManager } = context;
295
+ await context.updateSyncRules(`
296
+ bucket_definitions:
297
+ global:
298
+ data:
299
+ - SELECT id, description, num FROM "test_data"`);
300
+
301
+ await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, num BIGINT)`);
257
302
 
303
+ await context.replicateSnapshot();
304
+ context.startStreaming();
305
+ await connectionManager.query(
306
+ `INSERT INTO test_data(id, description, num) VALUES('${testId1}', 'test1', 1152921504606846976)`
307
+ );
258
308
  const data = await context.getBucketData('global[]');
259
309
  expect(data).toMatchObject([
260
- putOp('test_data', {
261
- id: testId,
262
- description: 'testDates',
263
- date: `2023-03-06`,
264
- datetime: '2023-03-06T15:47:00.000Z',
265
- timestamp: '2023-03-06T15:47:00.000Z'
266
- })
310
+ putOp('test_data', { id: testId1, description: 'test1', num: 1152921504606846976n })
267
311
  ]);
268
- const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
269
- const endTxCount =
270
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
271
- expect(endRowCount - startRowCount).toEqual(1);
272
- expect(endTxCount - startTxCount).toEqual(1);
273
- })
274
- );
275
-
276
- test(
277
- 'table not in sync rules',
278
- binlogStreamTest(factory, async (context) => {
312
+ }
313
+ {
314
+ await using context = await BinlogStreamTestContext.open(factory, { doNotClear: true });
279
315
  const { connectionManager } = context;
280
- await context.updateSyncRules(BASIC_SYNC_RULES);
281
-
282
- await connectionManager.query(`CREATE TABLE test_donotsync (id CHAR(36) PRIMARY KEY, description text)`);
283
-
316
+ await context.loadActiveSyncRules();
317
+ // Does not actually do a snapshot again - just does the required intialization.
284
318
  await context.replicateSnapshot();
285
-
286
- const startRowCount =
287
- (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
288
- const startTxCount =
289
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
290
-
291
319
  context.startStreaming();
292
-
293
- await connectionManager.query(`INSERT INTO test_donotsync(id, description) VALUES('${uuid()}','test1')`);
320
+ await connectionManager.query(`INSERT INTO test_data(id, description, num) VALUES('${testId2}', 'test2', 0)`);
294
321
  const data = await context.getBucketData('global[]');
295
322
 
296
- expect(data).toMatchObject([]);
297
- const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
298
- const endTxCount =
299
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
300
-
301
- // There was a transaction, but we should not replicate any actual data
302
- expect(endRowCount - startRowCount).toEqual(0);
303
- expect(endTxCount - startTxCount).toEqual(1);
304
- })
305
- );
323
+ expect(data).toMatchObject([
324
+ putOp('test_data', { id: testId1, description: 'test1', num: 1152921504606846976n }),
325
+ putOp('test_data', { id: testId2, description: 'test2', num: 0n })
326
+ ]);
327
+ }
328
+ });
306
329
  }