@powersync/service-module-mysql 0.0.0-dev-20241219145106 → 0.0.0-dev-20250102111825

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,12 +1,10 @@
1
- import { BucketStorageFactory, Metrics } from '@powersync/service-core';
2
- import { test_utils } from '@powersync/service-core-tests';
1
+ import { Metrics } from '@powersync/service-core';
2
+ import { putOp, removeOp, StorageFactory } from '@powersync/service-core-tests';
3
3
  import { v4 as uuid } from 'uuid';
4
4
  import { describe, expect, test } from 'vitest';
5
- import { binlogStreamTest } from './BinlogStreamUtils.js';
5
+ import { BinlogStreamTestContext } from './BinlogStreamUtils.js';
6
6
  import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
7
7
 
8
- type StorageFactory = () => Promise<BucketStorageFactory>;
9
-
10
8
  const BASIC_SYNC_RULES = `
11
9
  bucket_definitions:
12
10
  global:
@@ -23,83 +21,76 @@ describe(
23
21
  );
24
22
 
25
23
  function defineBinlogStreamTests(factory: StorageFactory) {
26
- test(
27
- 'Replicate basic values',
28
- binlogStreamTest(factory, async (context) => {
29
- const { connectionManager } = context;
30
- await context.updateSyncRules(`
24
+ test('Replicate basic values', async () => {
25
+ await using context = await BinlogStreamTestContext.open(factory);
26
+ const { connectionManager } = context;
27
+ await context.updateSyncRules(`
31
28
  bucket_definitions:
32
29
  global:
33
30
  data:
34
31
  - SELECT id, description, num FROM "test_data"`);
35
32
 
36
- await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, num BIGINT)`);
37
-
38
- await context.replicateSnapshot();
39
-
40
- const startRowCount =
41
- (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
42
- const startTxCount =
43
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
44
-
45
- context.startStreaming();
46
- const testId = uuid();
47
- await connectionManager.query(
48
- `INSERT INTO test_data(id, description, num) VALUES('${testId}', 'test1', 1152921504606846976)`
49
- );
50
- const data = await context.getBucketData('global[]');
51
-
52
- expect(data).toMatchObject([test_utils.putOp('test_data', { id: testId, description: 'test1', num: 1152921504606846976n })]);
53
- const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
54
- const endTxCount =
55
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
56
- expect(endRowCount - startRowCount).toEqual(1);
57
- expect(endTxCount - startTxCount).toEqual(1);
58
- })
59
- );
60
-
61
- test(
62
- 'replicating case sensitive table',
63
- binlogStreamTest(factory, async (context) => {
64
- const { connectionManager } = context;
65
- await context.updateSyncRules(`
33
+ await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, num BIGINT)`);
34
+
35
+ await context.replicateSnapshot();
36
+
37
+ const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
38
+ const startTxCount =
39
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
40
+
41
+ context.startStreaming();
42
+ const testId = uuid();
43
+ await connectionManager.query(
44
+ `INSERT INTO test_data(id, description, num) VALUES('${testId}', 'test1', 1152921504606846976)`
45
+ );
46
+ const data = await context.getBucketData('global[]');
47
+
48
+ expect(data).toMatchObject([putOp('test_data', { id: testId, description: 'test1', num: 1152921504606846976n })]);
49
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
50
+ const endTxCount =
51
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
52
+ expect(endRowCount - startRowCount).toEqual(1);
53
+ expect(endTxCount - startTxCount).toEqual(1);
54
+ });
55
+
56
+ test('replicating case sensitive table', async () => {
57
+ await using context = await BinlogStreamTestContext.open(factory);
58
+ const { connectionManager } = context;
59
+ await context.updateSyncRules(`
66
60
  bucket_definitions:
67
61
  global:
68
62
  data:
69
63
  - SELECT id, description FROM "test_DATA"
70
64
  `);
71
65
 
72
- await connectionManager.query(`CREATE TABLE test_DATA (id CHAR(36) PRIMARY KEY, description text)`);
66
+ await connectionManager.query(`CREATE TABLE test_DATA (id CHAR(36) PRIMARY KEY, description text)`);
73
67
 
74
- await context.replicateSnapshot();
68
+ await context.replicateSnapshot();
75
69
 
76
- const startRowCount =
77
- (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
78
- const startTxCount =
79
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
70
+ const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
71
+ const startTxCount =
72
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
80
73
 
81
- context.startStreaming();
74
+ context.startStreaming();
82
75
 
83
- const testId = uuid();
84
- await connectionManager.query(`INSERT INTO test_DATA(id, description) VALUES('${testId}','test1')`);
76
+ const testId = uuid();
77
+ await connectionManager.query(`INSERT INTO test_DATA(id, description) VALUES('${testId}','test1')`);
85
78
 
86
- const data = await context.getBucketData('global[]');
79
+ const data = await context.getBucketData('global[]');
87
80
 
88
- expect(data).toMatchObject([test_utils.putOp('test_DATA', { id: testId, description: 'test1' })]);
89
- const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
90
- const endTxCount =
91
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
92
- expect(endRowCount - startRowCount).toEqual(1);
93
- expect(endTxCount - startTxCount).toEqual(1);
94
- })
95
- );
96
-
97
- // TODO: Not supported yet
98
- // test(
99
- // 'replicating TRUNCATE',
100
- // binlogStreamTest(factory, async (context) => {
101
- // const { connectionManager } = context;
102
- // const syncRuleContent = `
81
+ expect(data).toMatchObject([putOp('test_DATA', { id: testId, description: 'test1' })]);
82
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
83
+ const endTxCount =
84
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
85
+ expect(endRowCount - startRowCount).toEqual(1);
86
+ expect(endTxCount - startTxCount).toEqual(1);
87
+ });
88
+
89
+ // TODO: Not supported yet
90
+ // test('replicating TRUNCATE', async () => {
91
+ // await using context = await BinlogStreamTestContext.create(factory);
92
+ // const { connectionManager } = context;
93
+ // const syncRuleContent = `
103
94
  // bucket_definitions:
104
95
  // global:
105
96
  // data:
@@ -108,199 +99,235 @@ function defineBinlogStreamTests(factory: StorageFactory) {
108
99
  // parameters: SELECT id FROM test_data WHERE id = token_parameters.user_id
109
100
  // data: []
110
101
  // `;
111
- // await context.updateSyncRules(syncRuleContent);
112
- // await connectionManager.query(`DROP TABLE IF EXISTS test_data`);
113
- // await connectionManager.query(
114
- // `CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`
115
- // );
116
- //
117
- // await context.replicateSnapshot();
118
- // context.startStreaming();
119
- //
120
- // const [{ test_id }] = pgwireRows(
121
- // await connectionManager.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
122
- // );
123
- // await connectionManager.query(`TRUNCATE test_data`);
124
- //
125
- // const data = await context.getBucketData('global[]');
126
- //
127
- // expect(data).toMatchObject([
128
- // putOp('test_data', { id: test_id, description: 'test1' }),
129
- // removeOp('test_data', test_id)
130
- // ]);
131
- // })
102
+ // await context.updateSyncRules(syncRuleContent);
103
+ // await connectionManager.query(`DROP TABLE IF EXISTS test_data`);
104
+ // await connectionManager.query(
105
+ // `CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`
132
106
  // );
133
107
 
134
- test(
135
- 'replicating changing primary key',
136
- binlogStreamTest(factory, async (context) => {
137
- const { connectionManager } = context;
138
- await context.updateSyncRules(BASIC_SYNC_RULES);
139
-
140
- await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description text)`);
141
-
142
- await context.replicateSnapshot();
143
- context.startStreaming();
144
-
145
- const testId1 = uuid();
146
- await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId1}','test1')`);
147
-
148
- const testId2 = uuid();
149
- await connectionManager.query(
150
- `UPDATE test_data SET id = '${testId2}', description = 'test2a' WHERE id = '${testId1}'`
151
- );
152
-
153
- // This update may fail replicating with:
154
- // Error: Update on missing record public.test_data:074a601e-fc78-4c33-a15d-f89fdd4af31d :: {"g":1,"t":"651e9fbe9fec6155895057ec","k":"1a0b34da-fb8c-5e6f-8421-d7a3c5d4df4f"}
155
- await connectionManager.query(`UPDATE test_data SET description = 'test2b' WHERE id = '${testId2}'`);
156
-
157
- // Re-use old id again
158
- await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId1}', 'test1b')`);
159
- await connectionManager.query(`UPDATE test_data SET description = 'test1c' WHERE id = '${testId1}'`);
160
-
161
- const data = await context.getBucketData('global[]');
162
- expect(data).toMatchObject([
163
- // Initial insert
164
- test_utils.putOp('test_data', { id: testId1, description: 'test1' }),
165
- // Update id, then description
166
- test_utils.removeOp('test_data', testId1),
167
- test_utils.putOp('test_data', { id: testId2, description: 'test2a' }),
168
- test_utils.putOp('test_data', { id: testId2, description: 'test2b' }),
169
- // Re-use old id
170
- test_utils.putOp('test_data', { id: testId1, description: 'test1b' }),
171
- test_utils.putOp('test_data', { id: testId1, description: 'test1c' })
172
- ]);
173
- })
174
- );
175
-
176
- test(
177
- 'initial sync',
178
- binlogStreamTest(factory, async (context) => {
179
- const { connectionManager } = context;
180
- await context.updateSyncRules(BASIC_SYNC_RULES);
181
-
182
- await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description text)`);
183
-
184
- const testId = uuid();
185
- await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId}','test1')`);
186
-
187
- await context.replicateSnapshot();
108
+ // await context.replicateSnapshot();
109
+ // context.startStreaming();
188
110
 
189
- const data = await context.getBucketData('global[]');
190
- expect(data).toMatchObject([test_utils.putOp('test_data', { id: testId, description: 'test1' })]);
191
- })
192
- );
193
-
194
- test(
195
- 'snapshot with date values',
196
- binlogStreamTest(factory, async (context) => {
197
- const { connectionManager } = context;
198
- await context.updateSyncRules(`
111
+ // const [{ test_id }] = pgwireRows(
112
+ // await connectionManager.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
113
+ // );
114
+ // await connectionManager.query(`TRUNCATE test_data`);
115
+
116
+ // const data = await context.getBucketData('global[]');
117
+
118
+ // expect(data).toMatchObject([
119
+ // putOp('test_data', { id: test_id, description: 'test1' }),
120
+ // removeOp('test_data', test_id)
121
+ // ]);
122
+ // });
123
+
124
+ test('replicating changing primary key', async () => {
125
+ await using context = await BinlogStreamTestContext.open(factory);
126
+ const { connectionManager } = context;
127
+ await context.updateSyncRules(BASIC_SYNC_RULES);
128
+
129
+ await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description text)`);
130
+
131
+ await context.replicateSnapshot();
132
+ context.startStreaming();
133
+
134
+ const testId1 = uuid();
135
+ await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId1}','test1')`);
136
+
137
+ const testId2 = uuid();
138
+ await connectionManager.query(
139
+ `UPDATE test_data SET id = '${testId2}', description = 'test2a' WHERE id = '${testId1}'`
140
+ );
141
+
142
+ // This update may fail replicating with:
143
+ // Error: Update on missing record public.test_data:074a601e-fc78-4c33-a15d-f89fdd4af31d :: {"g":1,"t":"651e9fbe9fec6155895057ec","k":"1a0b34da-fb8c-5e6f-8421-d7a3c5d4df4f"}
144
+ await connectionManager.query(`UPDATE test_data SET description = 'test2b' WHERE id = '${testId2}'`);
145
+
146
+ // Re-use old id again
147
+ await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId1}', 'test1b')`);
148
+ await connectionManager.query(`UPDATE test_data SET description = 'test1c' WHERE id = '${testId1}'`);
149
+
150
+ const data = await context.getBucketData('global[]');
151
+ expect(data).toMatchObject([
152
+ // Initial insert
153
+ putOp('test_data', { id: testId1, description: 'test1' }),
154
+ // Update id, then description
155
+ removeOp('test_data', testId1),
156
+ putOp('test_data', { id: testId2, description: 'test2a' }),
157
+ putOp('test_data', { id: testId2, description: 'test2b' }),
158
+ // Re-use old id
159
+ putOp('test_data', { id: testId1, description: 'test1b' }),
160
+ putOp('test_data', { id: testId1, description: 'test1c' })
161
+ ]);
162
+ });
163
+
164
+ test('initial sync', async () => {
165
+ await using context = await BinlogStreamTestContext.open(factory);
166
+ const { connectionManager } = context;
167
+ await context.updateSyncRules(BASIC_SYNC_RULES);
168
+
169
+ await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description text)`);
170
+
171
+ const testId = uuid();
172
+ await connectionManager.query(`INSERT INTO test_data(id, description) VALUES('${testId}','test1')`);
173
+
174
+ await context.replicateSnapshot();
175
+
176
+ const data = await context.getBucketData('global[]');
177
+ expect(data).toMatchObject([putOp('test_data', { id: testId, description: 'test1' })]);
178
+ });
179
+
180
+ test('snapshot with date values', async () => {
181
+ await using context = await BinlogStreamTestContext.open(factory);
182
+ const { connectionManager } = context;
183
+ await context.updateSyncRules(`
199
184
  bucket_definitions:
200
185
  global:
201
186
  data:
202
187
  - SELECT * FROM "test_data"
203
188
  `);
204
189
 
205
- await connectionManager.query(
206
- `CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, date DATE, datetime DATETIME, timestamp TIMESTAMP)`
207
- );
190
+ await connectionManager.query(
191
+ `CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, date DATE, datetime DATETIME, timestamp TIMESTAMP)`
192
+ );
208
193
 
209
- const testId = uuid();
210
- await connectionManager.query(`
194
+ const testId = uuid();
195
+ await connectionManager.query(`
211
196
  INSERT INTO test_data(id, description, date, datetime, timestamp) VALUES('${testId}','testDates', '2023-03-06', '2023-03-06 15:47', '2023-03-06 15:47')
212
197
  `);
213
198
 
214
- await context.replicateSnapshot();
215
-
216
- const data = await context.getBucketData('global[]');
217
- expect(data).toMatchObject([
218
- test_utils.putOp('test_data', {
219
- id: testId,
220
- description: 'testDates',
221
- date: `2023-03-06`,
222
- datetime: '2023-03-06T15:47:00.000Z',
223
- timestamp: '2023-03-06T15:47:00.000Z'
224
- })
225
- ]);
226
- })
227
- );
228
-
229
- test(
230
- 'replication with date values',
231
- binlogStreamTest(factory, async (context) => {
232
- const { connectionManager } = context;
233
- await context.updateSyncRules(`
199
+ await context.replicateSnapshot();
200
+
201
+ const data = await context.getBucketData('global[]');
202
+ expect(data).toMatchObject([
203
+ putOp('test_data', {
204
+ id: testId,
205
+ description: 'testDates',
206
+ date: `2023-03-06`,
207
+ datetime: '2023-03-06T15:47:00.000Z',
208
+ timestamp: '2023-03-06T15:47:00.000Z'
209
+ })
210
+ ]);
211
+ });
212
+
213
+ test('replication with date values', async () => {
214
+ await using context = await BinlogStreamTestContext.open(factory);
215
+ const { connectionManager } = context;
216
+ await context.updateSyncRules(`
234
217
  bucket_definitions:
235
218
  global:
236
219
  data:
237
220
  - SELECT * FROM "test_data"
238
221
  `);
239
222
 
240
- await connectionManager.query(
241
- `CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, date DATE, datetime DATETIME, timestamp TIMESTAMP)`
242
- );
223
+ await connectionManager.query(
224
+ `CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, date DATE, datetime DATETIME NULL, timestamp TIMESTAMP NULL)`
225
+ );
243
226
 
244
- await context.replicateSnapshot();
227
+ await context.replicateSnapshot();
245
228
 
246
- const startRowCount =
247
- (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
248
- const startTxCount =
249
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
229
+ const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
230
+ const startTxCount =
231
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
250
232
 
251
- context.startStreaming();
233
+ context.startStreaming();
252
234
 
253
- const testId = uuid();
254
- await connectionManager.query(`
235
+ const testId = uuid();
236
+ await connectionManager.query(`
255
237
  INSERT INTO test_data(id, description, date, datetime, timestamp) VALUES('${testId}','testDates', '2023-03-06', '2023-03-06 15:47', '2023-03-06 15:47')
256
238
  `);
239
+ await connectionManager.query(`UPDATE test_data SET description = ? WHERE id = ?`, ['testUpdated', testId]);
240
+
241
+ const data = await context.getBucketData('global[]');
242
+ expect(data).toMatchObject([
243
+ putOp('test_data', {
244
+ id: testId,
245
+ description: 'testDates',
246
+ date: `2023-03-06`,
247
+ datetime: '2023-03-06T15:47:00.000Z',
248
+ timestamp: '2023-03-06T15:47:00.000Z'
249
+ }),
250
+ putOp('test_data', {
251
+ id: testId,
252
+ description: 'testUpdated',
253
+ date: `2023-03-06`,
254
+ datetime: '2023-03-06T15:47:00.000Z',
255
+ timestamp: '2023-03-06T15:47:00.000Z'
256
+ })
257
+ ]);
258
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
259
+ const endTxCount =
260
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
261
+ expect(endRowCount - startRowCount).toEqual(2);
262
+ expect(endTxCount - startTxCount).toEqual(2);
263
+ });
264
+
265
+ test('table not in sync rules', async () => {
266
+ await using context = await BinlogStreamTestContext.open(factory);
267
+ const { connectionManager } = context;
268
+ await context.updateSyncRules(BASIC_SYNC_RULES);
269
+
270
+ await connectionManager.query(`CREATE TABLE test_donotsync (id CHAR(36) PRIMARY KEY, description text)`);
271
+
272
+ await context.replicateSnapshot();
273
+
274
+ const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
275
+ const startTxCount =
276
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
277
+
278
+ context.startStreaming();
279
+
280
+ await connectionManager.query(`INSERT INTO test_donotsync(id, description) VALUES('${uuid()}','test1')`);
281
+ const data = await context.getBucketData('global[]');
282
+
283
+ expect(data).toMatchObject([]);
284
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
285
+ const endTxCount =
286
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
287
+
288
+ // There was a transaction, but we should not replicate any actual data
289
+ expect(endRowCount - startRowCount).toEqual(0);
290
+ expect(endTxCount - startTxCount).toEqual(1);
291
+ });
292
+
293
+ test('Resume replication', async () => {
294
+ const testId1 = uuid();
295
+ const testId2 = uuid();
296
+ {
297
+ await using context = await BinlogStreamTestContext.open(factory);
298
+ const { connectionManager } = context;
299
+ await context.updateSyncRules(`
300
+ bucket_definitions:
301
+ global:
302
+ data:
303
+ - SELECT id, description, num FROM "test_data"`);
257
304
 
305
+ await connectionManager.query(`CREATE TABLE test_data (id CHAR(36) PRIMARY KEY, description TEXT, num BIGINT)`);
306
+
307
+ await context.replicateSnapshot();
308
+ context.startStreaming();
309
+ await connectionManager.query(
310
+ `INSERT INTO test_data(id, description, num) VALUES('${testId1}', 'test1', 1152921504606846976)`
311
+ );
258
312
  const data = await context.getBucketData('global[]');
259
313
  expect(data).toMatchObject([
260
- test_utils.putOp('test_data', {
261
- id: testId,
262
- description: 'testDates',
263
- date: `2023-03-06`,
264
- datetime: '2023-03-06T15:47:00.000Z',
265
- timestamp: '2023-03-06T15:47:00.000Z'
266
- })
314
+ putOp('test_data', { id: testId1, description: 'test1', num: 1152921504606846976n })
267
315
  ]);
268
- const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
269
- const endTxCount =
270
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
271
- expect(endRowCount - startRowCount).toEqual(1);
272
- expect(endTxCount - startTxCount).toEqual(1);
273
- })
274
- );
275
-
276
- test(
277
- 'table not in sync rules',
278
- binlogStreamTest(factory, async (context) => {
316
+ }
317
+ {
318
+ await using context = await BinlogStreamTestContext.open(factory, { doNotClear: true });
279
319
  const { connectionManager } = context;
280
- await context.updateSyncRules(BASIC_SYNC_RULES);
281
-
282
- await connectionManager.query(`CREATE TABLE test_donotsync (id CHAR(36) PRIMARY KEY, description text)`);
283
-
320
+ await context.loadActiveSyncRules();
321
+ // Does not actually do a snapshot again - just does the required intialization.
284
322
  await context.replicateSnapshot();
285
-
286
- const startRowCount =
287
- (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
288
- const startTxCount =
289
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
290
-
291
323
  context.startStreaming();
292
-
293
- await connectionManager.query(`INSERT INTO test_donotsync(id, description) VALUES('${uuid()}','test1')`);
324
+ await connectionManager.query(`INSERT INTO test_data(id, description, num) VALUES('${testId2}', 'test2', 0)`);
294
325
  const data = await context.getBucketData('global[]');
295
326
 
296
- expect(data).toMatchObject([]);
297
- const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
298
- const endTxCount =
299
- (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
300
-
301
- // There was a transaction, but we should not replicate any actual data
302
- expect(endRowCount - startRowCount).toEqual(0);
303
- expect(endTxCount - startTxCount).toEqual(1);
304
- })
305
- );
327
+ expect(data).toMatchObject([
328
+ putOp('test_data', { id: testId1, description: 'test1', num: 1152921504606846976n }),
329
+ putOp('test_data', { id: testId2, description: 'test2', num: 0n })
330
+ ]);
331
+ }
332
+ });
306
333
  }
@@ -9,36 +9,17 @@ import {
9
9
  OplogEntry,
10
10
  SyncRulesBucketStorage
11
11
  } from '@powersync/service-core';
12
- import { test_utils } from '@powersync/service-core-tests';
12
+ import { StorageFactory, test_utils } from '@powersync/service-core-tests';
13
13
  import mysqlPromise from 'mysql2/promise';
14
- import { TEST_CONNECTION_OPTIONS, clearTestDb } from './util.js';
14
+ import { clearTestDb, TEST_CONNECTION_OPTIONS } from './util.js';
15
15
 
16
16
  /**
17
17
  * Tests operating on the binlog stream need to configure the stream and manage asynchronous
18
18
  * replication, which gets a little tricky.
19
19
  *
20
- * This wraps a test in a function that configures all the context, and tears it down afterward.
20
+ * This wraps all the context required for testing, and tears it down afterward
21
+ * by using `await using`.
21
22
  */
22
- export function binlogStreamTest(
23
- factory: () => Promise<BucketStorageFactory>,
24
- test: (context: BinlogStreamTestContext) => Promise<void>
25
- ): () => Promise<void> {
26
- return async () => {
27
- const f = await factory();
28
- const connectionManager = new MySQLConnectionManager(TEST_CONNECTION_OPTIONS, {});
29
-
30
- const connection = await connectionManager.getConnection();
31
- await clearTestDb(connection);
32
- connection.release();
33
- const context = new BinlogStreamTestContext(f, connectionManager);
34
- try {
35
- await test(context);
36
- } finally {
37
- await context.dispose();
38
- }
39
- };
40
- }
41
-
42
23
  export class BinlogStreamTestContext {
43
24
  private _binlogStream?: BinLogStream;
44
25
  private abortController = new AbortController();
@@ -46,6 +27,18 @@ export class BinlogStreamTestContext {
46
27
  public storage?: SyncRulesBucketStorage;
47
28
  private replicationDone = false;
48
29
 
30
+ static async open(factory: StorageFactory, options?: { doNotClear?: boolean }) {
31
+ const f = await factory({ doNotClear: options?.doNotClear });
32
+ const connectionManager = new MySQLConnectionManager(TEST_CONNECTION_OPTIONS, {});
33
+
34
+ if (!options?.doNotClear) {
35
+ const connection = await connectionManager.getConnection();
36
+ await clearTestDb(connection);
37
+ connection.release();
38
+ }
39
+ return new BinlogStreamTestContext(f, connectionManager);
40
+ }
41
+
49
42
  constructor(
50
43
  public factory: BucketStorageFactory,
51
44
  public connectionManager: MySQLConnectionManager
@@ -57,6 +50,10 @@ export class BinlogStreamTestContext {
57
50
  await this.connectionManager.end();
58
51
  }
59
52
 
53
+ [Symbol.asyncDispose]() {
54
+ return this.dispose();
55
+ }
56
+
60
57
  get connectionTag() {
61
58
  return this.connectionManager.connectionTag;
62
59
  }
@@ -67,6 +64,27 @@ export class BinlogStreamTestContext {
67
64
  return this.storage!;
68
65
  }
69
66
 
67
+ async loadNextSyncRules() {
68
+ const syncRules = await this.factory.getNextSyncRulesContent();
69
+ if (syncRules == null) {
70
+ throw new Error(`Next sync rules not available`);
71
+ }
72
+
73
+ this.storage = this.factory.getInstance(syncRules);
74
+ return this.storage!;
75
+ }
76
+
77
+ async loadActiveSyncRules() {
78
+ const syncRules = await this.factory.getActiveSyncRulesContent();
79
+ if (syncRules == null) {
80
+ throw new Error(`Active sync rules not available`);
81
+ }
82
+
83
+ this.storage = this.factory.getInstance(syncRules);
84
+ this.replicationDone = true;
85
+ return this.storage!;
86
+ }
87
+
70
88
  get binlogStream(): BinLogStream {
71
89
  if (this.storage == null) {
72
90
  throw new Error('updateSyncRules() first');