@powersync/service-module-postgres 0.0.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/dist/api/PostgresRouteAPIAdapter.d.ts +6 -2
- package/dist/api/PostgresRouteAPIAdapter.js +24 -10
- package/dist/api/PostgresRouteAPIAdapter.js.map +1 -1
- package/dist/module/PostgresModule.js +9 -2
- package/dist/module/PostgresModule.js.map +1 -1
- package/dist/replication/PgManager.js +15 -1
- package/dist/replication/PgManager.js.map +1 -1
- package/dist/replication/WalStream.d.ts +9 -2
- package/dist/replication/WalStream.js +185 -151
- package/dist/replication/WalStream.js.map +1 -1
- package/package.json +5 -5
- package/src/api/PostgresRouteAPIAdapter.ts +31 -12
- package/src/module/PostgresModule.ts +11 -2
- package/src/replication/PgManager.ts +19 -1
- package/src/replication/WalStream.ts +205 -165
- package/test/src/large_batch.test.ts +268 -148
- package/test/src/schema_changes.test.ts +562 -513
- package/test/src/slow_tests.test.ts +2 -1
- package/test/src/util.ts +3 -1
- package/test/src/validation.test.ts +45 -48
- package/test/src/wal_stream.test.ts +224 -249
- package/test/src/wal_stream_utils.ts +61 -22
- package/tsconfig.tsbuildinfo +1 -1
- package/test/src/__snapshots__/pg_test.test.ts.snap +0 -256
|
@@ -4,7 +4,7 @@ import { BucketStorageFactory, Metrics } from '@powersync/service-core';
|
|
|
4
4
|
import { pgwireRows } from '@powersync/service-jpgwire';
|
|
5
5
|
import * as crypto from 'crypto';
|
|
6
6
|
import { describe, expect, test } from 'vitest';
|
|
7
|
-
import {
|
|
7
|
+
import { WalStreamTestContext } from './wal_stream_utils.js';
|
|
8
8
|
|
|
9
9
|
type StorageFactory = () => Promise<BucketStorageFactory>;
|
|
10
10
|
|
|
@@ -15,141 +15,126 @@ bucket_definitions:
|
|
|
15
15
|
- SELECT id, description FROM "test_data"
|
|
16
16
|
`;
|
|
17
17
|
|
|
18
|
-
describe(
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
defineWalStreamTests(MONGO_STORAGE_FACTORY);
|
|
22
|
-
},
|
|
23
|
-
{ timeout: 20_000 }
|
|
24
|
-
);
|
|
18
|
+
describe('wal stream - mongodb', { timeout: 20_000 }, function () {
|
|
19
|
+
defineWalStreamTests(MONGO_STORAGE_FACTORY);
|
|
20
|
+
});
|
|
25
21
|
|
|
26
22
|
function defineWalStreamTests(factory: StorageFactory) {
|
|
27
|
-
test(
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
await context.updateSyncRules(`
|
|
23
|
+
test('replicating basic values', async () => {
|
|
24
|
+
await using context = await WalStreamTestContext.open(factory);
|
|
25
|
+
const { pool } = context;
|
|
26
|
+
await context.updateSyncRules(`
|
|
32
27
|
bucket_definitions:
|
|
33
28
|
global:
|
|
34
29
|
data:
|
|
35
30
|
- SELECT id, description, num FROM "test_data"`);
|
|
36
31
|
|
|
37
|
-
|
|
32
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
33
|
+
await pool.query(
|
|
34
|
+
`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text, num int8)`
|
|
35
|
+
);
|
|
36
|
+
|
|
37
|
+
await context.replicateSnapshot();
|
|
38
|
+
|
|
39
|
+
const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
40
|
+
const startTxCount =
|
|
41
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
42
|
+
|
|
43
|
+
context.startStreaming();
|
|
44
|
+
|
|
45
|
+
const [{ test_id }] = pgwireRows(
|
|
38
46
|
await pool.query(
|
|
39
|
-
`
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
const data = await context.getBucketData('global[]');
|
|
58
|
-
|
|
59
|
-
expect(data).toMatchObject([
|
|
60
|
-
putOp('test_data', { id: test_id, description: 'test1', num: 1152921504606846976n })
|
|
61
|
-
]);
|
|
62
|
-
const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
63
|
-
const endTxCount =
|
|
64
|
-
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
65
|
-
expect(endRowCount - startRowCount).toEqual(1);
|
|
66
|
-
expect(endTxCount - startTxCount).toEqual(1);
|
|
67
|
-
})
|
|
68
|
-
);
|
|
69
|
-
|
|
70
|
-
test(
|
|
71
|
-
'replicating case sensitive table',
|
|
72
|
-
walStreamTest(factory, async (context) => {
|
|
73
|
-
const { pool } = context;
|
|
74
|
-
await context.updateSyncRules(`
|
|
47
|
+
`INSERT INTO test_data(description, num) VALUES('test1', 1152921504606846976) returning id as test_id`
|
|
48
|
+
)
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
const data = await context.getBucketData('global[]');
|
|
52
|
+
|
|
53
|
+
expect(data).toMatchObject([putOp('test_data', { id: test_id, description: 'test1', num: 1152921504606846976n })]);
|
|
54
|
+
const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
55
|
+
const endTxCount =
|
|
56
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
57
|
+
expect(endRowCount - startRowCount).toEqual(1);
|
|
58
|
+
expect(endTxCount - startTxCount).toEqual(1);
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
test('replicating case sensitive table', async () => {
|
|
62
|
+
await using context = await WalStreamTestContext.open(factory);
|
|
63
|
+
const { pool } = context;
|
|
64
|
+
await context.updateSyncRules(`
|
|
75
65
|
bucket_definitions:
|
|
76
66
|
global:
|
|
77
67
|
data:
|
|
78
68
|
- SELECT id, description FROM "test_DATA"
|
|
79
69
|
`);
|
|
80
70
|
|
|
81
|
-
|
|
82
|
-
|
|
71
|
+
await pool.query(`DROP TABLE IF EXISTS "test_DATA"`);
|
|
72
|
+
await pool.query(`CREATE TABLE "test_DATA"(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
83
73
|
|
|
84
|
-
|
|
74
|
+
await context.replicateSnapshot();
|
|
85
75
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
76
|
+
const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
77
|
+
const startTxCount =
|
|
78
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
90
79
|
|
|
91
|
-
|
|
80
|
+
context.startStreaming();
|
|
92
81
|
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
82
|
+
const [{ test_id }] = pgwireRows(
|
|
83
|
+
await pool.query(`INSERT INTO "test_DATA"(description) VALUES('test1') returning id as test_id`)
|
|
84
|
+
);
|
|
96
85
|
|
|
97
|
-
|
|
86
|
+
const data = await context.getBucketData('global[]');
|
|
98
87
|
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
);
|
|
88
|
+
expect(data).toMatchObject([putOp('test_DATA', { id: test_id, description: 'test1' })]);
|
|
89
|
+
const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
90
|
+
const endTxCount =
|
|
91
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
92
|
+
expect(endRowCount - startRowCount).toEqual(1);
|
|
93
|
+
expect(endTxCount - startTxCount).toEqual(1);
|
|
94
|
+
});
|
|
107
95
|
|
|
108
|
-
test(
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
await context.updateSyncRules(`
|
|
96
|
+
test('replicating TOAST values', async () => {
|
|
97
|
+
await using context = await WalStreamTestContext.open(factory);
|
|
98
|
+
const { pool } = context;
|
|
99
|
+
await context.updateSyncRules(`
|
|
113
100
|
bucket_definitions:
|
|
114
101
|
global:
|
|
115
102
|
data:
|
|
116
103
|
- SELECT id, name, description FROM "test_data"
|
|
117
104
|
`);
|
|
118
105
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
const { pool } = context;
|
|
152
|
-
const syncRuleContent = `
|
|
106
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
107
|
+
await pool.query(
|
|
108
|
+
`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), name text, description text)`
|
|
109
|
+
);
|
|
110
|
+
|
|
111
|
+
await context.replicateSnapshot();
|
|
112
|
+
context.startStreaming();
|
|
113
|
+
|
|
114
|
+
// Must be > 8kb after compression
|
|
115
|
+
const largeDescription = crypto.randomBytes(20_000).toString('hex');
|
|
116
|
+
const [{ test_id }] = pgwireRows(
|
|
117
|
+
await pool.query({
|
|
118
|
+
statement: `INSERT INTO test_data(name, description) VALUES('test1', $1) returning id as test_id`,
|
|
119
|
+
params: [{ type: 'varchar', value: largeDescription }]
|
|
120
|
+
})
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
await pool.query(`UPDATE test_data SET name = 'test2' WHERE id = '${test_id}'`);
|
|
124
|
+
|
|
125
|
+
const data = await context.getBucketData('global[]');
|
|
126
|
+
expect(data.slice(0, 1)).toMatchObject([
|
|
127
|
+
putOp('test_data', { id: test_id, name: 'test1', description: largeDescription })
|
|
128
|
+
]);
|
|
129
|
+
expect(data.slice(1)).toMatchObject([
|
|
130
|
+
putOp('test_data', { id: test_id, name: 'test2', description: largeDescription })
|
|
131
|
+
]);
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
test('replicating TRUNCATE', async () => {
|
|
135
|
+
await using context = await WalStreamTestContext.open(factory);
|
|
136
|
+
const { pool } = context;
|
|
137
|
+
const syncRuleContent = `
|
|
153
138
|
bucket_definitions:
|
|
154
139
|
global:
|
|
155
140
|
data:
|
|
@@ -158,162 +143,152 @@ bucket_definitions:
|
|
|
158
143
|
parameters: SELECT id FROM test_data WHERE id = token_parameters.user_id
|
|
159
144
|
data: []
|
|
160
145
|
`;
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
await pool.query(`
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
);
|
|
246
|
-
|
|
247
|
-
test(
|
|
248
|
-
'record too large',
|
|
249
|
-
walStreamTest(factory, async (context) => {
|
|
250
|
-
await context.updateSyncRules(`bucket_definitions:
|
|
146
|
+
await context.updateSyncRules(syncRuleContent);
|
|
147
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
148
|
+
await pool.query(`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
149
|
+
|
|
150
|
+
await context.replicateSnapshot();
|
|
151
|
+
context.startStreaming();
|
|
152
|
+
|
|
153
|
+
const [{ test_id }] = pgwireRows(
|
|
154
|
+
await pool.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
|
|
155
|
+
);
|
|
156
|
+
await pool.query(`TRUNCATE test_data`);
|
|
157
|
+
|
|
158
|
+
const data = await context.getBucketData('global[]');
|
|
159
|
+
|
|
160
|
+
expect(data).toMatchObject([
|
|
161
|
+
putOp('test_data', { id: test_id, description: 'test1' }),
|
|
162
|
+
removeOp('test_data', test_id)
|
|
163
|
+
]);
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
test('replicating changing primary key', async () => {
|
|
167
|
+
await using context = await WalStreamTestContext.open(factory);
|
|
168
|
+
const { pool } = context;
|
|
169
|
+
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
170
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
171
|
+
await pool.query(`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
172
|
+
|
|
173
|
+
await context.replicateSnapshot();
|
|
174
|
+
context.startStreaming();
|
|
175
|
+
|
|
176
|
+
const [{ test_id }] = pgwireRows(
|
|
177
|
+
await pool.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
|
|
178
|
+
);
|
|
179
|
+
|
|
180
|
+
const [{ test_id: test_id2 }] = pgwireRows(
|
|
181
|
+
await pool.query(
|
|
182
|
+
`UPDATE test_data SET id = uuid_generate_v4(), description = 'test2a' WHERE id = '${test_id}' returning id as test_id`
|
|
183
|
+
)
|
|
184
|
+
);
|
|
185
|
+
|
|
186
|
+
// This update may fail replicating with:
|
|
187
|
+
// Error: Update on missing record public.test_data:074a601e-fc78-4c33-a15d-f89fdd4af31d :: {"g":1,"t":"651e9fbe9fec6155895057ec","k":"1a0b34da-fb8c-5e6f-8421-d7a3c5d4df4f"}
|
|
188
|
+
await pool.query(`UPDATE test_data SET description = 'test2b' WHERE id = '${test_id2}'`);
|
|
189
|
+
|
|
190
|
+
// Re-use old id again
|
|
191
|
+
await pool.query(`INSERT INTO test_data(id, description) VALUES('${test_id}', 'test1b')`);
|
|
192
|
+
await pool.query(`UPDATE test_data SET description = 'test1c' WHERE id = '${test_id}'`);
|
|
193
|
+
|
|
194
|
+
const data = await context.getBucketData('global[]');
|
|
195
|
+
expect(data).toMatchObject([
|
|
196
|
+
// Initial insert
|
|
197
|
+
putOp('test_data', { id: test_id, description: 'test1' }),
|
|
198
|
+
// Update id, then description
|
|
199
|
+
removeOp('test_data', test_id),
|
|
200
|
+
putOp('test_data', { id: test_id2, description: 'test2a' }),
|
|
201
|
+
putOp('test_data', { id: test_id2, description: 'test2b' }),
|
|
202
|
+
// Re-use old id
|
|
203
|
+
putOp('test_data', { id: test_id, description: 'test1b' }),
|
|
204
|
+
putOp('test_data', { id: test_id, description: 'test1c' })
|
|
205
|
+
]);
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
test('initial sync', async () => {
|
|
209
|
+
await using context = await WalStreamTestContext.open(factory);
|
|
210
|
+
const { pool } = context;
|
|
211
|
+
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
212
|
+
|
|
213
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
214
|
+
await pool.query(`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
215
|
+
|
|
216
|
+
const [{ test_id }] = pgwireRows(
|
|
217
|
+
await pool.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
|
|
218
|
+
);
|
|
219
|
+
|
|
220
|
+
await context.replicateSnapshot();
|
|
221
|
+
context.startStreaming();
|
|
222
|
+
|
|
223
|
+
const data = await context.getBucketData('global[]');
|
|
224
|
+
expect(data).toMatchObject([putOp('test_data', { id: test_id, description: 'test1' })]);
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
test('record too large', async () => {
|
|
228
|
+
await using context = await WalStreamTestContext.open(factory);
|
|
229
|
+
await context.updateSyncRules(`bucket_definitions:
|
|
251
230
|
global:
|
|
252
231
|
data:
|
|
253
232
|
- SELECT id, description, other FROM "test_data"`);
|
|
254
|
-
|
|
233
|
+
const { pool } = context;
|
|
255
234
|
|
|
256
|
-
|
|
235
|
+
await pool.query(`CREATE TABLE test_data(id text primary key, description text, other text)`);
|
|
257
236
|
|
|
258
|
-
|
|
237
|
+
await context.replicateSnapshot();
|
|
259
238
|
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
239
|
+
// 4MB
|
|
240
|
+
const largeDescription = crypto.randomBytes(2_000_000).toString('hex');
|
|
241
|
+
// 18MB
|
|
242
|
+
const tooLargeDescription = crypto.randomBytes(9_000_000).toString('hex');
|
|
264
243
|
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
244
|
+
await pool.query({
|
|
245
|
+
statement: `INSERT INTO test_data(id, description, other) VALUES('t1', $1, 'foo')`,
|
|
246
|
+
params: [{ type: 'varchar', value: tooLargeDescription }]
|
|
247
|
+
});
|
|
248
|
+
await pool.query({
|
|
249
|
+
statement: `UPDATE test_data SET description = $1 WHERE id = 't1'`,
|
|
250
|
+
params: [{ type: 'varchar', value: largeDescription }]
|
|
251
|
+
});
|
|
273
252
|
|
|
274
|
-
|
|
253
|
+
context.startStreaming();
|
|
275
254
|
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
);
|
|
255
|
+
const data = await context.getBucketData('global[]');
|
|
256
|
+
expect(data.length).toEqual(1);
|
|
257
|
+
const row = JSON.parse(data[0].data as string);
|
|
258
|
+
delete row.description;
|
|
259
|
+
expect(row).toEqual({ id: 't1', other: 'foo' });
|
|
260
|
+
delete data[0].data;
|
|
261
|
+
expect(data[0]).toMatchObject({ object_id: 't1', object_type: 'test_data', op: 'PUT', op_id: '1' });
|
|
262
|
+
});
|
|
285
263
|
|
|
286
|
-
test(
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
264
|
+
test('table not in sync rules', async () => {
|
|
265
|
+
await using context = await WalStreamTestContext.open(factory);
|
|
266
|
+
const { pool } = context;
|
|
267
|
+
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
291
268
|
|
|
292
|
-
|
|
269
|
+
await pool.query(`CREATE TABLE test_donotsync(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
293
270
|
|
|
294
|
-
|
|
271
|
+
await context.replicateSnapshot();
|
|
295
272
|
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
273
|
+
const startRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
274
|
+
const startTxCount =
|
|
275
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
300
276
|
|
|
301
|
-
|
|
277
|
+
context.startStreaming();
|
|
302
278
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
279
|
+
const [{ test_id }] = pgwireRows(
|
|
280
|
+
await pool.query(`INSERT INTO test_donotsync(description) VALUES('test1') returning id as test_id`)
|
|
281
|
+
);
|
|
306
282
|
|
|
307
|
-
|
|
283
|
+
const data = await context.getBucketData('global[]');
|
|
308
284
|
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
285
|
+
expect(data).toMatchObject([]);
|
|
286
|
+
const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
287
|
+
const endTxCount =
|
|
288
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
313
289
|
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
);
|
|
290
|
+
// There was a transaction, but we should not replicate any actual data
|
|
291
|
+
expect(endRowCount - startRowCount).toEqual(0);
|
|
292
|
+
expect(endTxCount - startTxCount).toEqual(1);
|
|
293
|
+
});
|
|
319
294
|
}
|