@powersync/service-module-postgres 0.0.0-dev-20240918092408
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/LICENSE +67 -0
- package/README.md +3 -0
- package/dist/api/PostgresRouteAPIAdapter.d.ts +22 -0
- package/dist/api/PostgresRouteAPIAdapter.js +273 -0
- package/dist/api/PostgresRouteAPIAdapter.js.map +1 -0
- package/dist/auth/SupabaseKeyCollector.d.ts +22 -0
- package/dist/auth/SupabaseKeyCollector.js +64 -0
- package/dist/auth/SupabaseKeyCollector.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +4 -0
- package/dist/index.js.map +1 -0
- package/dist/module/PostgresModule.d.ts +14 -0
- package/dist/module/PostgresModule.js +108 -0
- package/dist/module/PostgresModule.js.map +1 -0
- package/dist/replication/ConnectionManagerFactory.d.ts +10 -0
- package/dist/replication/ConnectionManagerFactory.js +21 -0
- package/dist/replication/ConnectionManagerFactory.js.map +1 -0
- package/dist/replication/PgManager.d.ts +25 -0
- package/dist/replication/PgManager.js +60 -0
- package/dist/replication/PgManager.js.map +1 -0
- package/dist/replication/PgRelation.d.ts +6 -0
- package/dist/replication/PgRelation.js +27 -0
- package/dist/replication/PgRelation.js.map +1 -0
- package/dist/replication/PostgresErrorRateLimiter.d.ts +11 -0
- package/dist/replication/PostgresErrorRateLimiter.js +43 -0
- package/dist/replication/PostgresErrorRateLimiter.js.map +1 -0
- package/dist/replication/WalStream.d.ts +53 -0
- package/dist/replication/WalStream.js +536 -0
- package/dist/replication/WalStream.js.map +1 -0
- package/dist/replication/WalStreamReplicationJob.d.ts +27 -0
- package/dist/replication/WalStreamReplicationJob.js +131 -0
- package/dist/replication/WalStreamReplicationJob.js.map +1 -0
- package/dist/replication/WalStreamReplicator.d.ts +13 -0
- package/dist/replication/WalStreamReplicator.js +36 -0
- package/dist/replication/WalStreamReplicator.js.map +1 -0
- package/dist/replication/replication-index.d.ts +5 -0
- package/dist/replication/replication-index.js +6 -0
- package/dist/replication/replication-index.js.map +1 -0
- package/dist/replication/replication-utils.d.ts +32 -0
- package/dist/replication/replication-utils.js +272 -0
- package/dist/replication/replication-utils.js.map +1 -0
- package/dist/types/types.d.ts +76 -0
- package/dist/types/types.js +110 -0
- package/dist/types/types.js.map +1 -0
- package/dist/utils/migration_lib.d.ts +11 -0
- package/dist/utils/migration_lib.js +64 -0
- package/dist/utils/migration_lib.js.map +1 -0
- package/dist/utils/pgwire_utils.d.ts +16 -0
- package/dist/utils/pgwire_utils.js +70 -0
- package/dist/utils/pgwire_utils.js.map +1 -0
- package/dist/utils/populate_test_data.d.ts +8 -0
- package/dist/utils/populate_test_data.js +65 -0
- package/dist/utils/populate_test_data.js.map +1 -0
- package/package.json +49 -0
- package/src/api/PostgresRouteAPIAdapter.ts +307 -0
- package/src/auth/SupabaseKeyCollector.ts +70 -0
- package/src/index.ts +5 -0
- package/src/module/PostgresModule.ts +122 -0
- package/src/replication/ConnectionManagerFactory.ts +28 -0
- package/src/replication/PgManager.ts +70 -0
- package/src/replication/PgRelation.ts +31 -0
- package/src/replication/PostgresErrorRateLimiter.ts +44 -0
- package/src/replication/WalStream.ts +639 -0
- package/src/replication/WalStreamReplicationJob.ts +142 -0
- package/src/replication/WalStreamReplicator.ts +45 -0
- package/src/replication/replication-index.ts +5 -0
- package/src/replication/replication-utils.ts +329 -0
- package/src/types/types.ts +159 -0
- package/src/utils/migration_lib.ts +79 -0
- package/src/utils/pgwire_utils.ts +73 -0
- package/src/utils/populate_test_data.ts +77 -0
- package/test/src/__snapshots__/pg_test.test.ts.snap +256 -0
- package/test/src/env.ts +7 -0
- package/test/src/large_batch.test.ts +195 -0
- package/test/src/pg_test.test.ts +450 -0
- package/test/src/schema_changes.test.ts +543 -0
- package/test/src/setup.ts +7 -0
- package/test/src/slow_tests.test.ts +335 -0
- package/test/src/util.ts +105 -0
- package/test/src/validation.test.ts +64 -0
- package/test/src/wal_stream.test.ts +319 -0
- package/test/src/wal_stream_utils.ts +121 -0
- package/test/tsconfig.json +28 -0
- package/tsconfig.json +31 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/vitest.config.ts +9 -0
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
import { putOp, removeOp } from '@core-tests/stream_utils.js';
|
|
2
|
+
import { MONGO_STORAGE_FACTORY } from '@core-tests/util.js';
|
|
3
|
+
import { BucketStorageFactory, Metrics } from '@powersync/service-core';
|
|
4
|
+
import { pgwireRows } from '@powersync/service-jpgwire';
|
|
5
|
+
import * as crypto from 'crypto';
|
|
6
|
+
import { describe, expect, test } from 'vitest';
|
|
7
|
+
import { walStreamTest } from './wal_stream_utils.js';
|
|
8
|
+
|
|
9
|
+
type StorageFactory = () => Promise<BucketStorageFactory>;
|
|
10
|
+
|
|
11
|
+
const BASIC_SYNC_RULES = `
|
|
12
|
+
bucket_definitions:
|
|
13
|
+
global:
|
|
14
|
+
data:
|
|
15
|
+
- SELECT id, description FROM "test_data"
|
|
16
|
+
`;
|
|
17
|
+
|
|
18
|
+
describe(
|
|
19
|
+
'wal stream - mongodb',
|
|
20
|
+
function () {
|
|
21
|
+
defineWalStreamTests(MONGO_STORAGE_FACTORY);
|
|
22
|
+
},
|
|
23
|
+
{ timeout: 20_000 }
|
|
24
|
+
);
|
|
25
|
+
|
|
26
|
+
function defineWalStreamTests(factory: StorageFactory) {
|
|
27
|
+
test(
|
|
28
|
+
'replicating basic values',
|
|
29
|
+
walStreamTest(factory, async (context) => {
|
|
30
|
+
const { pool } = context;
|
|
31
|
+
await context.updateSyncRules(`
|
|
32
|
+
bucket_definitions:
|
|
33
|
+
global:
|
|
34
|
+
data:
|
|
35
|
+
- SELECT id, description, num FROM "test_data"`);
|
|
36
|
+
|
|
37
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
38
|
+
await pool.query(
|
|
39
|
+
`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text, num int8)`
|
|
40
|
+
);
|
|
41
|
+
|
|
42
|
+
await context.replicateSnapshot();
|
|
43
|
+
|
|
44
|
+
const startRowCount =
|
|
45
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
46
|
+
const startTxCount =
|
|
47
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
48
|
+
|
|
49
|
+
context.startStreaming();
|
|
50
|
+
|
|
51
|
+
const [{ test_id }] = pgwireRows(
|
|
52
|
+
await pool.query(
|
|
53
|
+
`INSERT INTO test_data(description, num) VALUES('test1', 1152921504606846976) returning id as test_id`
|
|
54
|
+
)
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
const data = await context.getBucketData('global[]');
|
|
58
|
+
|
|
59
|
+
expect(data).toMatchObject([
|
|
60
|
+
putOp('test_data', { id: test_id, description: 'test1', num: 1152921504606846976n })
|
|
61
|
+
]);
|
|
62
|
+
const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
63
|
+
const endTxCount =
|
|
64
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
65
|
+
expect(endRowCount - startRowCount).toEqual(1);
|
|
66
|
+
expect(endTxCount - startTxCount).toEqual(1);
|
|
67
|
+
})
|
|
68
|
+
);
|
|
69
|
+
|
|
70
|
+
test(
|
|
71
|
+
'replicating case sensitive table',
|
|
72
|
+
walStreamTest(factory, async (context) => {
|
|
73
|
+
const { pool } = context;
|
|
74
|
+
await context.updateSyncRules(`
|
|
75
|
+
bucket_definitions:
|
|
76
|
+
global:
|
|
77
|
+
data:
|
|
78
|
+
- SELECT id, description FROM "test_DATA"
|
|
79
|
+
`);
|
|
80
|
+
|
|
81
|
+
await pool.query(`DROP TABLE IF EXISTS "test_DATA"`);
|
|
82
|
+
await pool.query(`CREATE TABLE "test_DATA"(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
83
|
+
|
|
84
|
+
await context.replicateSnapshot();
|
|
85
|
+
|
|
86
|
+
const startRowCount =
|
|
87
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
88
|
+
const startTxCount =
|
|
89
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
90
|
+
|
|
91
|
+
context.startStreaming();
|
|
92
|
+
|
|
93
|
+
const [{ test_id }] = pgwireRows(
|
|
94
|
+
await pool.query(`INSERT INTO "test_DATA"(description) VALUES('test1') returning id as test_id`)
|
|
95
|
+
);
|
|
96
|
+
|
|
97
|
+
const data = await context.getBucketData('global[]');
|
|
98
|
+
|
|
99
|
+
expect(data).toMatchObject([putOp('test_DATA', { id: test_id, description: 'test1' })]);
|
|
100
|
+
const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
101
|
+
const endTxCount =
|
|
102
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
103
|
+
expect(endRowCount - startRowCount).toEqual(1);
|
|
104
|
+
expect(endTxCount - startTxCount).toEqual(1);
|
|
105
|
+
})
|
|
106
|
+
);
|
|
107
|
+
|
|
108
|
+
test(
|
|
109
|
+
'replicating TOAST values',
|
|
110
|
+
walStreamTest(factory, async (context) => {
|
|
111
|
+
const { pool } = context;
|
|
112
|
+
await context.updateSyncRules(`
|
|
113
|
+
bucket_definitions:
|
|
114
|
+
global:
|
|
115
|
+
data:
|
|
116
|
+
- SELECT id, name, description FROM "test_data"
|
|
117
|
+
`);
|
|
118
|
+
|
|
119
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
120
|
+
await pool.query(
|
|
121
|
+
`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), name text, description text)`
|
|
122
|
+
);
|
|
123
|
+
|
|
124
|
+
await context.replicateSnapshot();
|
|
125
|
+
context.startStreaming();
|
|
126
|
+
|
|
127
|
+
// Must be > 8kb after compression
|
|
128
|
+
const largeDescription = crypto.randomBytes(20_000).toString('hex');
|
|
129
|
+
const [{ test_id }] = pgwireRows(
|
|
130
|
+
await pool.query({
|
|
131
|
+
statement: `INSERT INTO test_data(name, description) VALUES('test1', $1) returning id as test_id`,
|
|
132
|
+
params: [{ type: 'varchar', value: largeDescription }]
|
|
133
|
+
})
|
|
134
|
+
);
|
|
135
|
+
|
|
136
|
+
await pool.query(`UPDATE test_data SET name = 'test2' WHERE id = '${test_id}'`);
|
|
137
|
+
|
|
138
|
+
const data = await context.getBucketData('global[]');
|
|
139
|
+
expect(data.slice(0, 1)).toMatchObject([
|
|
140
|
+
putOp('test_data', { id: test_id, name: 'test1', description: largeDescription })
|
|
141
|
+
]);
|
|
142
|
+
expect(data.slice(1)).toMatchObject([
|
|
143
|
+
putOp('test_data', { id: test_id, name: 'test2', description: largeDescription })
|
|
144
|
+
]);
|
|
145
|
+
})
|
|
146
|
+
);
|
|
147
|
+
|
|
148
|
+
test(
|
|
149
|
+
'replicating TRUNCATE',
|
|
150
|
+
walStreamTest(factory, async (context) => {
|
|
151
|
+
const { pool } = context;
|
|
152
|
+
const syncRuleContent = `
|
|
153
|
+
bucket_definitions:
|
|
154
|
+
global:
|
|
155
|
+
data:
|
|
156
|
+
- SELECT id, description FROM "test_data"
|
|
157
|
+
by_test_data:
|
|
158
|
+
parameters: SELECT id FROM test_data WHERE id = token_parameters.user_id
|
|
159
|
+
data: []
|
|
160
|
+
`;
|
|
161
|
+
await context.updateSyncRules(syncRuleContent);
|
|
162
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
163
|
+
await pool.query(`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
164
|
+
|
|
165
|
+
await context.replicateSnapshot();
|
|
166
|
+
context.startStreaming();
|
|
167
|
+
|
|
168
|
+
const [{ test_id }] = pgwireRows(
|
|
169
|
+
await pool.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
|
|
170
|
+
);
|
|
171
|
+
await pool.query(`TRUNCATE test_data`);
|
|
172
|
+
|
|
173
|
+
const data = await context.getBucketData('global[]');
|
|
174
|
+
|
|
175
|
+
expect(data).toMatchObject([
|
|
176
|
+
putOp('test_data', { id: test_id, description: 'test1' }),
|
|
177
|
+
removeOp('test_data', test_id)
|
|
178
|
+
]);
|
|
179
|
+
})
|
|
180
|
+
);
|
|
181
|
+
|
|
182
|
+
test(
|
|
183
|
+
'replicating changing primary key',
|
|
184
|
+
walStreamTest(factory, async (context) => {
|
|
185
|
+
const { pool } = context;
|
|
186
|
+
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
187
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
188
|
+
await pool.query(`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
189
|
+
|
|
190
|
+
await context.replicateSnapshot();
|
|
191
|
+
context.startStreaming();
|
|
192
|
+
|
|
193
|
+
const [{ test_id }] = pgwireRows(
|
|
194
|
+
await pool.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
|
|
195
|
+
);
|
|
196
|
+
|
|
197
|
+
const [{ test_id: test_id2 }] = pgwireRows(
|
|
198
|
+
await pool.query(
|
|
199
|
+
`UPDATE test_data SET id = uuid_generate_v4(), description = 'test2a' WHERE id = '${test_id}' returning id as test_id`
|
|
200
|
+
)
|
|
201
|
+
);
|
|
202
|
+
|
|
203
|
+
// This update may fail replicating with:
|
|
204
|
+
// Error: Update on missing record public.test_data:074a601e-fc78-4c33-a15d-f89fdd4af31d :: {"g":1,"t":"651e9fbe9fec6155895057ec","k":"1a0b34da-fb8c-5e6f-8421-d7a3c5d4df4f"}
|
|
205
|
+
await pool.query(`UPDATE test_data SET description = 'test2b' WHERE id = '${test_id2}'`);
|
|
206
|
+
|
|
207
|
+
// Re-use old id again
|
|
208
|
+
await pool.query(`INSERT INTO test_data(id, description) VALUES('${test_id}', 'test1b')`);
|
|
209
|
+
await pool.query(`UPDATE test_data SET description = 'test1c' WHERE id = '${test_id}'`);
|
|
210
|
+
|
|
211
|
+
const data = await context.getBucketData('global[]');
|
|
212
|
+
expect(data).toMatchObject([
|
|
213
|
+
// Initial insert
|
|
214
|
+
putOp('test_data', { id: test_id, description: 'test1' }),
|
|
215
|
+
// Update id, then description
|
|
216
|
+
removeOp('test_data', test_id),
|
|
217
|
+
putOp('test_data', { id: test_id2, description: 'test2a' }),
|
|
218
|
+
putOp('test_data', { id: test_id2, description: 'test2b' }),
|
|
219
|
+
// Re-use old id
|
|
220
|
+
putOp('test_data', { id: test_id, description: 'test1b' }),
|
|
221
|
+
putOp('test_data', { id: test_id, description: 'test1c' })
|
|
222
|
+
]);
|
|
223
|
+
})
|
|
224
|
+
);
|
|
225
|
+
|
|
226
|
+
test(
|
|
227
|
+
'initial sync',
|
|
228
|
+
walStreamTest(factory, async (context) => {
|
|
229
|
+
const { pool } = context;
|
|
230
|
+
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
231
|
+
|
|
232
|
+
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
233
|
+
await pool.query(`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
234
|
+
|
|
235
|
+
const [{ test_id }] = pgwireRows(
|
|
236
|
+
await pool.query(`INSERT INTO test_data(description) VALUES('test1') returning id as test_id`)
|
|
237
|
+
);
|
|
238
|
+
|
|
239
|
+
await context.replicateSnapshot();
|
|
240
|
+
context.startStreaming();
|
|
241
|
+
|
|
242
|
+
const data = await context.getBucketData('global[]');
|
|
243
|
+
expect(data).toMatchObject([putOp('test_data', { id: test_id, description: 'test1' })]);
|
|
244
|
+
})
|
|
245
|
+
);
|
|
246
|
+
|
|
247
|
+
test(
|
|
248
|
+
'record too large',
|
|
249
|
+
walStreamTest(factory, async (context) => {
|
|
250
|
+
await context.updateSyncRules(`bucket_definitions:
|
|
251
|
+
global:
|
|
252
|
+
data:
|
|
253
|
+
- SELECT id, description, other FROM "test_data"`);
|
|
254
|
+
const { pool } = context;
|
|
255
|
+
|
|
256
|
+
await pool.query(`CREATE TABLE test_data(id text primary key, description text, other text)`);
|
|
257
|
+
|
|
258
|
+
await context.replicateSnapshot();
|
|
259
|
+
|
|
260
|
+
// 4MB
|
|
261
|
+
const largeDescription = crypto.randomBytes(2_000_000).toString('hex');
|
|
262
|
+
// 18MB
|
|
263
|
+
const tooLargeDescription = crypto.randomBytes(9_000_000).toString('hex');
|
|
264
|
+
|
|
265
|
+
await pool.query({
|
|
266
|
+
statement: `INSERT INTO test_data(id, description, other) VALUES('t1', $1, 'foo')`,
|
|
267
|
+
params: [{ type: 'varchar', value: tooLargeDescription }]
|
|
268
|
+
});
|
|
269
|
+
await pool.query({
|
|
270
|
+
statement: `UPDATE test_data SET description = $1 WHERE id = 't1'`,
|
|
271
|
+
params: [{ type: 'varchar', value: largeDescription }]
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
context.startStreaming();
|
|
275
|
+
|
|
276
|
+
const data = await context.getBucketData('global[]');
|
|
277
|
+
expect(data.length).toEqual(1);
|
|
278
|
+
const row = JSON.parse(data[0].data as string);
|
|
279
|
+
delete row.description;
|
|
280
|
+
expect(row).toEqual({ id: 't1', other: 'foo' });
|
|
281
|
+
delete data[0].data;
|
|
282
|
+
expect(data[0]).toMatchObject({ object_id: 't1', object_type: 'test_data', op: 'PUT', op_id: '1' });
|
|
283
|
+
})
|
|
284
|
+
);
|
|
285
|
+
|
|
286
|
+
test(
|
|
287
|
+
'table not in sync rules',
|
|
288
|
+
walStreamTest(factory, async (context) => {
|
|
289
|
+
const { pool } = context;
|
|
290
|
+
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
291
|
+
|
|
292
|
+
await pool.query(`CREATE TABLE test_donotsync(id uuid primary key default uuid_generate_v4(), description text)`);
|
|
293
|
+
|
|
294
|
+
await context.replicateSnapshot();
|
|
295
|
+
|
|
296
|
+
const startRowCount =
|
|
297
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
298
|
+
const startTxCount =
|
|
299
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
300
|
+
|
|
301
|
+
context.startStreaming();
|
|
302
|
+
|
|
303
|
+
const [{ test_id }] = pgwireRows(
|
|
304
|
+
await pool.query(`INSERT INTO test_donotsync(description) VALUES('test1') returning id as test_id`)
|
|
305
|
+
);
|
|
306
|
+
|
|
307
|
+
const data = await context.getBucketData('global[]');
|
|
308
|
+
|
|
309
|
+
expect(data).toMatchObject([]);
|
|
310
|
+
const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
|
|
311
|
+
const endTxCount =
|
|
312
|
+
(await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
|
|
313
|
+
|
|
314
|
+
// There was a transaction, but we should not replicate any actual data
|
|
315
|
+
expect(endRowCount - startRowCount).toEqual(0);
|
|
316
|
+
expect(endTxCount - startTxCount).toEqual(1);
|
|
317
|
+
})
|
|
318
|
+
);
|
|
319
|
+
}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import { BucketStorageFactory, SyncRulesBucketStorage } from '@powersync/service-core';
|
|
2
|
+
import * as pgwire from '@powersync/service-jpgwire';
|
|
3
|
+
import { TEST_CONNECTION_OPTIONS, clearTestDb, getClientCheckpoint } from './util.js';
|
|
4
|
+
import { WalStream, WalStreamOptions, PUBLICATION_NAME } from '@module/replication/WalStream.js';
|
|
5
|
+
import { fromAsync } from '@core-tests/stream_utils.js';
|
|
6
|
+
import { PgManager } from '@module/replication/PgManager.js';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Tests operating on the wal stream need to configure the stream and manage asynchronous
|
|
10
|
+
* replication, which gets a little tricky.
|
|
11
|
+
*
|
|
12
|
+
* This wraps a test in a function that configures all the context, and tears it down afterwards.
|
|
13
|
+
*/
|
|
14
|
+
export function walStreamTest(
|
|
15
|
+
factory: () => Promise<BucketStorageFactory>,
|
|
16
|
+
test: (context: WalStreamTestContext) => Promise<void>
|
|
17
|
+
): () => Promise<void> {
|
|
18
|
+
return async () => {
|
|
19
|
+
const f = await factory();
|
|
20
|
+
const connectionManager = new PgManager(TEST_CONNECTION_OPTIONS, {});
|
|
21
|
+
|
|
22
|
+
await clearTestDb(connectionManager.pool);
|
|
23
|
+
const context = new WalStreamTestContext(f, connectionManager);
|
|
24
|
+
try {
|
|
25
|
+
await test(context);
|
|
26
|
+
} finally {
|
|
27
|
+
await context.dispose();
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export class WalStreamTestContext {
|
|
33
|
+
private _walStream?: WalStream;
|
|
34
|
+
private abortController = new AbortController();
|
|
35
|
+
private streamPromise?: Promise<void>;
|
|
36
|
+
public storage?: SyncRulesBucketStorage;
|
|
37
|
+
private replicationConnection?: pgwire.PgConnection;
|
|
38
|
+
|
|
39
|
+
constructor(public factory: BucketStorageFactory, public connectionManager: PgManager) {}
|
|
40
|
+
|
|
41
|
+
async dispose() {
|
|
42
|
+
this.abortController.abort();
|
|
43
|
+
await this.streamPromise;
|
|
44
|
+
await this.connectionManager.destroy();
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
get pool() {
|
|
48
|
+
return this.connectionManager.pool;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
get connectionTag() {
|
|
52
|
+
return this.connectionManager.connectionTag;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
get publicationName() {
|
|
56
|
+
return PUBLICATION_NAME;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async updateSyncRules(content: string) {
|
|
60
|
+
const syncRules = await this.factory.updateSyncRules({ content: content });
|
|
61
|
+
this.storage = this.factory.getInstance(syncRules);
|
|
62
|
+
return this.storage!;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
get walStream() {
|
|
66
|
+
if (this.storage == null) {
|
|
67
|
+
throw new Error('updateSyncRules() first');
|
|
68
|
+
}
|
|
69
|
+
if (this._walStream) {
|
|
70
|
+
return this._walStream;
|
|
71
|
+
}
|
|
72
|
+
const options: WalStreamOptions = {
|
|
73
|
+
storage: this.storage,
|
|
74
|
+
connections: this.connectionManager,
|
|
75
|
+
abort_signal: this.abortController.signal
|
|
76
|
+
};
|
|
77
|
+
this._walStream = new WalStream(options);
|
|
78
|
+
return this._walStream!;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async replicateSnapshot() {
|
|
82
|
+
this.replicationConnection = await this.connectionManager.replicationConnection();
|
|
83
|
+
await this.walStream.initReplication(this.replicationConnection);
|
|
84
|
+
await this.storage!.autoActivate();
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
startStreaming() {
|
|
88
|
+
if (this.replicationConnection == null) {
|
|
89
|
+
throw new Error('Call replicateSnapshot() before startStreaming()');
|
|
90
|
+
}
|
|
91
|
+
this.streamPromise = this.walStream.streamChanges(this.replicationConnection!);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
async getCheckpoint(options?: { timeout?: number }) {
|
|
95
|
+
let checkpoint = await Promise.race([
|
|
96
|
+
getClientCheckpoint(this.pool, this.factory, { timeout: options?.timeout ?? 15_000 }),
|
|
97
|
+
this.streamPromise
|
|
98
|
+
]);
|
|
99
|
+
if (typeof checkpoint == undefined) {
|
|
100
|
+
// This indicates an issue with the test setup - streamingPromise completed instead
|
|
101
|
+
// of getClientCheckpoint()
|
|
102
|
+
throw new Error('Test failure - streamingPromise completed');
|
|
103
|
+
}
|
|
104
|
+
return checkpoint as string;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async getBucketsDataBatch(buckets: Record<string, string>, options?: { timeout?: number }) {
|
|
108
|
+
let checkpoint = await this.getCheckpoint(options);
|
|
109
|
+
const map = new Map<string, string>(Object.entries(buckets));
|
|
110
|
+
return fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
async getBucketData(bucket: string, start?: string, options?: { timeout?: number }) {
|
|
114
|
+
start ??= '0';
|
|
115
|
+
let checkpoint = await this.getCheckpoint(options);
|
|
116
|
+
const map = new Map<string, string>([[bucket, start]]);
|
|
117
|
+
const batch = this.storage!.getBucketDataBatch(checkpoint, map);
|
|
118
|
+
const batches = await fromAsync(batch);
|
|
119
|
+
return batches[0]?.batch.data ?? [];
|
|
120
|
+
}
|
|
121
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../../tsconfig.base.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"rootDir": "src",
|
|
5
|
+
"baseUrl": "./",
|
|
6
|
+
"noEmit": true,
|
|
7
|
+
"esModuleInterop": true,
|
|
8
|
+
"skipLibCheck": true,
|
|
9
|
+
"sourceMap": true,
|
|
10
|
+
"paths": {
|
|
11
|
+
"@/*": ["../../../packages/service-core/src/*"],
|
|
12
|
+
"@module/*": ["../src/*"],
|
|
13
|
+
"@core-tests/*": ["../../../packages/service-core/test/src/*"]
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"include": ["src"],
|
|
17
|
+
"references": [
|
|
18
|
+
{
|
|
19
|
+
"path": "../"
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"path": "../../../packages/service-core/test"
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
"path": "../../../packages/service-core/"
|
|
26
|
+
}
|
|
27
|
+
]
|
|
28
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../tsconfig.base.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"rootDir": "src",
|
|
5
|
+
"outDir": "dist",
|
|
6
|
+
"esModuleInterop": true,
|
|
7
|
+
"skipLibCheck": true,
|
|
8
|
+
"sourceMap": true
|
|
9
|
+
},
|
|
10
|
+
"include": ["src"],
|
|
11
|
+
"references": [
|
|
12
|
+
{
|
|
13
|
+
"path": "../../packages/types"
|
|
14
|
+
},
|
|
15
|
+
{
|
|
16
|
+
"path": "../../packages/jsonbig"
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
"path": "../../packages/jpgwire"
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"path": "../../packages/sync-rules"
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
"path": "../../packages/service-core"
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
"path": "../../libs/lib-services"
|
|
29
|
+
}
|
|
30
|
+
]
|
|
31
|
+
}
|