@powersync/service-module-postgres 0.19.3 → 0.19.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/dist/api/PostgresRouteAPIAdapter.d.ts +1 -1
  2. package/dist/api/PostgresRouteAPIAdapter.js +63 -72
  3. package/dist/api/PostgresRouteAPIAdapter.js.map +1 -1
  4. package/dist/module/PostgresModule.js.map +1 -1
  5. package/dist/replication/MissingReplicationSlotError.d.ts +41 -0
  6. package/dist/replication/MissingReplicationSlotError.js +33 -0
  7. package/dist/replication/MissingReplicationSlotError.js.map +1 -0
  8. package/dist/replication/PostgresErrorRateLimiter.js +1 -1
  9. package/dist/replication/PostgresErrorRateLimiter.js.map +1 -1
  10. package/dist/replication/SnapshotQuery.js +2 -2
  11. package/dist/replication/SnapshotQuery.js.map +1 -1
  12. package/dist/replication/WalStream.d.ts +35 -3
  13. package/dist/replication/WalStream.js +135 -9
  14. package/dist/replication/WalStream.js.map +1 -1
  15. package/dist/replication/WalStreamReplicationJob.js +6 -3
  16. package/dist/replication/WalStreamReplicationJob.js.map +1 -1
  17. package/dist/replication/replication-index.d.ts +3 -1
  18. package/dist/replication/replication-index.js +3 -1
  19. package/dist/replication/replication-index.js.map +1 -1
  20. package/dist/replication/replication-utils.d.ts +3 -11
  21. package/dist/replication/replication-utils.js +101 -164
  22. package/dist/replication/replication-utils.js.map +1 -1
  23. package/dist/replication/wal-budget-utils.d.ts +23 -0
  24. package/dist/replication/wal-budget-utils.js +57 -0
  25. package/dist/replication/wal-budget-utils.js.map +1 -0
  26. package/dist/types/registry.js +1 -1
  27. package/dist/types/registry.js.map +1 -1
  28. package/package.json +15 -11
  29. package/sql/check-source-configuration.plpgsql +13 -0
  30. package/sql/debug-tables-info-batched.plpgsql +230 -0
  31. package/CHANGELOG.md +0 -858
  32. package/src/api/PostgresRouteAPIAdapter.ts +0 -356
  33. package/src/index.ts +0 -1
  34. package/src/module/PostgresModule.ts +0 -122
  35. package/src/replication/ConnectionManagerFactory.ts +0 -33
  36. package/src/replication/PgManager.ts +0 -122
  37. package/src/replication/PgRelation.ts +0 -41
  38. package/src/replication/PostgresErrorRateLimiter.ts +0 -48
  39. package/src/replication/SnapshotQuery.ts +0 -213
  40. package/src/replication/WalStream.ts +0 -1137
  41. package/src/replication/WalStreamReplicationJob.ts +0 -138
  42. package/src/replication/WalStreamReplicator.ts +0 -53
  43. package/src/replication/replication-index.ts +0 -5
  44. package/src/replication/replication-utils.ts +0 -398
  45. package/src/types/registry.ts +0 -275
  46. package/src/types/resolver.ts +0 -227
  47. package/src/types/types.ts +0 -44
  48. package/src/utils/application-name.ts +0 -8
  49. package/src/utils/migration_lib.ts +0 -80
  50. package/src/utils/populate_test_data.ts +0 -37
  51. package/src/utils/populate_test_data_worker.ts +0 -53
  52. package/src/utils/postgres_version.ts +0 -8
  53. package/test/src/checkpoints.test.ts +0 -86
  54. package/test/src/chunked_snapshots.test.ts +0 -161
  55. package/test/src/env.ts +0 -11
  56. package/test/src/large_batch.test.ts +0 -241
  57. package/test/src/pg_test.test.ts +0 -729
  58. package/test/src/resuming_snapshots.test.ts +0 -160
  59. package/test/src/route_api_adapter.test.ts +0 -62
  60. package/test/src/schema_changes.test.ts +0 -655
  61. package/test/src/setup.ts +0 -12
  62. package/test/src/slow_tests.test.ts +0 -519
  63. package/test/src/storage_combination.test.ts +0 -35
  64. package/test/src/types/registry.test.ts +0 -149
  65. package/test/src/util.ts +0 -151
  66. package/test/src/validation.test.ts +0 -63
  67. package/test/src/wal_stream.test.ts +0 -607
  68. package/test/src/wal_stream_utils.ts +0 -284
  69. package/test/tsconfig.json +0 -27
  70. package/tsconfig.json +0 -34
  71. package/tsconfig.tsbuildinfo +0 -1
  72. package/vitest.config.ts +0 -3
@@ -1,729 +0,0 @@
1
- import { WalStream } from '@module/replication/WalStream.js';
2
- import { PostgresTypeResolver } from '@module/types/resolver.js';
3
- import * as dns from 'node:dns';
4
- import type { LookupFunction } from 'node:net';
5
-
6
- import * as pgwire from '@powersync/service-jpgwire';
7
- import {
8
- applyRowContext,
9
- CompatibilityContext,
10
- CompatibilityEdition,
11
- DateTimeValue,
12
- SqliteInputRow,
13
- TimeValue,
14
- TimeValuePrecision
15
- } from '@powersync/service-sync-rules';
16
- import { describe, expect, Mock, test, vi } from 'vitest';
17
- import { clearTestDb, connectPgPool, connectPgWire, TEST_CONNECTION_OPTIONS, TEST_URI } from './util.js';
18
-
19
- describe('connection options', () => {
20
- test('uses custom lookup', async () => {
21
- const lookup: Mock<LookupFunction> = vi.fn((hostname, options, cb) => {
22
- expect(hostname).toStrictEqual('powersynctest.example.org');
23
- dns.lookup('localhost', options, cb);
24
- });
25
-
26
- const db = await pgwire.connectPgWire({
27
- ...TEST_CONNECTION_OPTIONS,
28
- hostname: 'powersynctest.example.org',
29
- lookup
30
- });
31
- expect(lookup).toHaveBeenCalled();
32
-
33
- try {
34
- await db.query('SELECT 1');
35
- } finally {
36
- await db.end();
37
- }
38
- });
39
- });
40
-
41
- describe('pg data types', () => {
42
- async function setupTable(db: pgwire.PgClient) {
43
- await clearTestDb(db);
44
- await db.query(`CREATE TABLE test_data(
45
- id serial primary key,
46
- text text,
47
- uuid uuid,
48
- varchar varchar(255),
49
- bool bool,
50
- bytea bytea,
51
- int2 int2,
52
- int4 int4,
53
- int8 int8,
54
- float4 float4,
55
- float8 float8,
56
- numeric numeric, -- same as decimal
57
- json json,
58
- jsonb jsonb,
59
- pg_lsn pg_lsn,
60
- date date,
61
- time time,
62
- timestamp timestamp,
63
- timestamptz timestamptz,
64
- interval interval,
65
- macaddr macaddr,
66
- inet inet,
67
- oid oid
68
- )`);
69
-
70
- await db.query(`DROP TABLE IF EXISTS test_data_arrays`);
71
- await db.query(`CREATE TABLE test_data_arrays(
72
- id serial primary key,
73
- text text[],
74
- uuid uuid[],
75
- varchar varchar(255)[],
76
- bool bool[],
77
- bytea bytea[],
78
- int2 int2[],
79
- int4 int4[],
80
- int8 int8[],
81
- float4 float4[],
82
- float8 float8[],
83
- numeric numeric[], -- same as decimal
84
- json json[],
85
- jsonb jsonb[],
86
- pg_lsn pg_lsn[],
87
- date date[],
88
- time time[],
89
- timestamp timestamp[],
90
- timestamptz timestamptz[],
91
- interval interval[],
92
- macaddr macaddr[],
93
- inet inet[],
94
- oid oid[],
95
- multidimensional text[][] -- same as text[]
96
- )`);
97
- }
98
-
99
- async function insert(db: pgwire.PgClient) {
100
- await db.query(`
101
- INSERT INTO test_data(id, text, uuid, varchar, bool, bytea, int2, int4, int8, numeric, float4, float8)
102
- VALUES(1, 'text', 'baeb2514-4c57-436d-b3cc-c1256211656d', 'varchar', true, 'test', 1000, 1000000, 9007199254740993, 18014398509481982123, 3.14, 314);
103
-
104
- INSERT INTO test_data(id, json, jsonb)
105
- VALUES(2, '{"test": "thing" }', '{"test": "thing" }');
106
-
107
- INSERT INTO test_data(id, date, time, timestamp, timestamptz)
108
- VALUES(3, '2023-03-06', '15:47', '2023-03-06 15:47', '2023-03-06 15:47+02');
109
-
110
- INSERT INTO test_data(id, pg_lsn, interval, macaddr, inet, oid)
111
- VALUES(4, '016/B374D848', '1 hour', '00:00:5e:00:53:af', '127.0.0.1', 1007);
112
-
113
- INSERT INTO test_data(id, date, time, timestamp, timestamptz)
114
- VALUES(5, '-infinity'::date, 'allballs'::time, '-infinity'::timestamp, '-infinity'::timestamptz);
115
-
116
- INSERT INTO test_data(id, timestamp, timestamptz)
117
- VALUES(6, 'epoch'::timestamp, 'epoch'::timestamptz);
118
-
119
- INSERT INTO test_data(id, timestamp, timestamptz)
120
- VALUES(7, 'infinity'::timestamp, 'infinity'::timestamptz);
121
-
122
- INSERT INTO test_data(id, timestamptz)
123
- VALUES(8, '0022-02-03 12:13:14+03'::timestamptz);
124
-
125
- INSERT INTO test_data(id, timestamptz)
126
- VALUES(9, '10022-02-03 12:13:14+03'::timestamptz);
127
- `);
128
- }
129
-
130
- async function insertArrays(db: pgwire.PgClient) {
131
- await db.query(`
132
- INSERT INTO test_data_arrays(id, text, uuid, varchar, bool, bytea, int2, int4, int8, numeric)
133
- VALUES(1, ARRAY['text', '}te][xt{"'], '{"baeb2514-4c57-436d-b3cc-c1256211656d"}', '{"varchar"}', '{true}', '{"test"}', '{1000}', '{1000000}', '{9007199254740993}', '{18014398509481982123}');
134
-
135
- INSERT INTO test_data_arrays(id, json, jsonb)
136
- VALUES(2, ARRAY['{"test": "thing"}' :: json, '{"test": "}te][xt{"}' :: json], ARRAY['{"test": "thing", "foo": 5.0, "bignum": 18014398509481982123, "bool":true}' :: jsonb]);
137
-
138
- INSERT INTO test_data_arrays(id, date, time, timestamp, timestamptz)
139
- VALUES(3, ARRAY['2023-03-06'::date], ARRAY['15:47'::time], ARRAY['2023-03-06 15:47'::timestamp], ARRAY['2023-03-06 15:47+02'::timestamptz, '2023-03-06 15:47:00.123450+02'::timestamptz]);
140
-
141
- INSERT INTO test_data_arrays(id, pg_lsn, interval, macaddr, inet, oid)
142
- VALUES(4, ARRAY['016/B374D848'::pg_lsn], ARRAY['1 hour'::interval], ARRAY['00:00:5e:00:53:af'::macaddr], ARRAY['127.0.0.1'::inet], ARRAY[1007::oid]);
143
-
144
- -- Empty arrays
145
- INSERT INTO test_data_arrays(id, text, uuid, varchar, bool, bytea, int2, int4, int8, numeric)
146
- VALUES(5, ARRAY[]::text[], ARRAY[]::uuid[], ARRAY[]::varchar[], ARRAY[]::bool[], ARRAY[]::bytea[], ARRAY[]::int2[], ARRAY[]::int4[], ARRAY[]::int8[], ARRAY[]::numeric[]);
147
-
148
- -- Two-dimentional array
149
- INSERT INTO test_data_arrays(id, multidimensional)
150
- VALUES(6, ARRAY[['one', 1], ['two', 2], ['three', Null]]::TEXT[]);
151
-
152
- -- Empty array
153
- INSERT INTO test_data_arrays(id, multidimensional)
154
- VALUES(7, ARRAY[[], [], []]::TEXT[]);
155
-
156
- -- Empty array
157
- INSERT INTO test_data_arrays(id, multidimensional)
158
- VALUES(8, ARRAY[]::TEXT[]);
159
-
160
- -- Array with only null
161
- INSERT INTO test_data_arrays(id, multidimensional)
162
- VALUES(9, ARRAY[NULL]::TEXT[]);
163
-
164
- -- Array with 'null'
165
- INSERT INTO test_data_arrays(id, multidimensional)
166
- VALUES(10, ARRAY['null']::TEXT[]);
167
- `);
168
- }
169
-
170
- function checkResults(transformed: Record<string, any>[]) {
171
- expect(transformed[0]).toMatchObject({
172
- id: 1n,
173
- text: 'text',
174
- uuid: 'baeb2514-4c57-436d-b3cc-c1256211656d',
175
- varchar: 'varchar',
176
- bool: 1n,
177
- bytea: new Uint8Array([116, 101, 115, 116]),
178
- int2: 1000n,
179
- int4: 1000000n,
180
- int8: 9007199254740993n,
181
- float4: 3.14,
182
- float8: 314,
183
- numeric: '18014398509481982123'
184
- });
185
- expect(transformed[1]).toMatchObject({
186
- id: 2n,
187
- json: '{"test": "thing" }', // Whitespace preserved
188
- jsonb: '{"test": "thing"}' // Whitespace according to pg JSON conventions
189
- });
190
-
191
- expect(transformed[2]).toMatchObject({
192
- id: 3n,
193
- date: '2023-03-06',
194
- time: TimeValue.parse('15:47:00', pgwire.postgresTimeOptions),
195
- timestamp: new DateTimeValue('2023-03-06T15:47:00.000000', '2023-03-06 15:47:00', pgwire.postgresTimeOptions),
196
- timestamptz: new DateTimeValue('2023-03-06T13:47:00Z', '2023-03-06 13:47:00Z', pgwire.postgresTimeOptions)
197
- });
198
-
199
- expect(transformed[3]).toMatchObject({
200
- id: 4n,
201
- pg_lsn: '00000016/B374D848',
202
- interval: '01:00:00',
203
- macaddr: '00:00:5e:00:53:af',
204
- inet: '127.0.0.1',
205
- oid: 1007n
206
- });
207
-
208
- expect(transformed[4]).toMatchObject({
209
- id: 5n,
210
- date: '0000-01-01',
211
- time: TimeValue.parse('00:00:00', pgwire.postgresTimeOptions),
212
- timestamp: new DateTimeValue('0000-01-01T00:00:00', undefined, pgwire.postgresTimeOptions),
213
- timestamptz: new DateTimeValue('0000-01-01T00:00:00Z', undefined, pgwire.postgresTimeOptions)
214
- });
215
-
216
- expect(transformed[5]).toMatchObject({
217
- id: 6n,
218
- timestamp: new DateTimeValue('1970-01-01T00:00:00.000000', '1970-01-01 00:00:00', pgwire.postgresTimeOptions),
219
- timestamptz: new DateTimeValue('1970-01-01T00:00:00Z', '1970-01-01 00:00:00Z', pgwire.postgresTimeOptions)
220
- });
221
-
222
- expect(transformed[6]).toMatchObject({
223
- id: 7n,
224
- timestamp: new DateTimeValue('9999-12-31T23:59:59', undefined, pgwire.postgresTimeOptions),
225
- timestamptz: new DateTimeValue('9999-12-31T23:59:59Z', undefined, pgwire.postgresTimeOptions)
226
- });
227
-
228
- expect(transformed[7]).toMatchObject({
229
- id: 8n,
230
- timestamptz: new DateTimeValue('0022-02-03T09:13:14Z', '0022-02-03 09:13:14Z', pgwire.postgresTimeOptions)
231
- });
232
-
233
- expect(transformed[8]).toMatchObject({
234
- id: 9n,
235
- // 10022-02-03 12:13:14+03 - out of range of both our date parsing logic, and sqlite's date functions
236
- // We can consider just preserving the source string as an alternative if this causes issues.
237
- timestamptz: null
238
- });
239
- }
240
-
241
- function checkResultArrays(transformed: Record<string, any>[]) {
242
- expect(transformed[0]).toMatchObject({
243
- id: 1n,
244
- text: `["text","}te][xt{\\""]`,
245
- uuid: '["baeb2514-4c57-436d-b3cc-c1256211656d"]',
246
- varchar: '["varchar"]',
247
- bool: '[1]',
248
- bytea: '[null]',
249
- int2: '[1000]',
250
- int4: '[1000000]',
251
- int8: `[9007199254740993]`,
252
- numeric: `["18014398509481982123"]`
253
- });
254
-
255
- // Note: Depending on to what extent we use the original postgres value, the whitespace may change, and order may change.
256
- // We do expect that decimals and big numbers are preserved.
257
- expect(transformed[1]).toMatchObject({
258
- id: 2n,
259
- // Expected output after a serialize + parse cycle:
260
- // json: `[{"test":"thing"},{"test":"}te][xt{"}]`,
261
- // jsonb: `[{"foo":5.0,"bool":true,"test":"thing","bignum":18014398509481982123}]`
262
- // Expected using direct PG values:
263
- json: `[{"test": "thing"},{"test": "}te][xt{"}]`,
264
- jsonb: `[{"foo": 5.0, "bool": true, "test": "thing", "bignum": 18014398509481982123}]`
265
- });
266
-
267
- expect(transformed[2]).toMatchObject({
268
- id: 3n,
269
- date: `["2023-03-06"]`,
270
- time: `["15:47:00"]`,
271
- timestamp: '["2023-03-06 15:47:00"]',
272
- timestamptz: '["2023-03-06 13:47:00Z","2023-03-06 13:47:00.12345Z"]'
273
- });
274
-
275
- expect(transformed[3]).toMatchObject({
276
- id: 4n,
277
- pg_lsn: `["00000016/B374D848"]`,
278
- interval: `["01:00:00"]`,
279
- macaddr: `["00:00:5e:00:53:af"]`,
280
- inet: `["127.0.0.1"]`,
281
- oid: `[1007]`
282
- });
283
-
284
- expect(transformed[4]).toMatchObject({
285
- id: 5n,
286
- text: '[]',
287
- uuid: '[]',
288
- varchar: '[]',
289
- bool: '[]',
290
- bytea: '[]',
291
- int2: '[]',
292
- int4: '[]',
293
- int8: '[]',
294
- numeric: '[]'
295
- });
296
-
297
- expect(transformed[5]).toMatchObject({
298
- id: 6n,
299
- multidimensional: '[["one","1"],["two","2"],["three",null]]'
300
- });
301
-
302
- expect(transformed[6]).toMatchObject({
303
- id: 7n,
304
- multidimensional: '[]'
305
- });
306
-
307
- expect(transformed[7]).toMatchObject({
308
- id: 8n,
309
- multidimensional: '[]'
310
- });
311
-
312
- expect(transformed[8]).toMatchObject({
313
- id: 9n,
314
- multidimensional: '[null]'
315
- });
316
-
317
- expect(transformed[9]).toMatchObject({
318
- id: 10n,
319
- multidimensional: '["null"]'
320
- });
321
- }
322
-
323
- test('test direct queries', async () => {
324
- const db = await connectPgPool();
325
- try {
326
- await setupTable(db);
327
-
328
- await insert(db);
329
-
330
- const transformed = await queryAll(db, `SELECT * FROM test_data ORDER BY id`);
331
- checkResults(transformed);
332
- } finally {
333
- await db.end();
334
- }
335
- });
336
-
337
- test('test direct queries - parameterized', async () => {
338
- // Parameterized queries may use a different underlying protocol,
339
- // so we make sure it also gets the same results.
340
- const db = await connectPgPool();
341
- try {
342
- await setupTable(db);
343
-
344
- await insert(db);
345
-
346
- const raw = await db.query({
347
- statement: `SELECT * FROM test_data WHERE $1 ORDER BY id`,
348
- params: [{ type: 'bool', value: true }]
349
- });
350
- const transformed = await interpretResults(db, raw);
351
- checkResults(transformed);
352
- } finally {
353
- await db.end();
354
- }
355
- });
356
-
357
- test('test direct queries - arrays', async () => {
358
- const db = await connectPgPool();
359
- try {
360
- await setupTable(db);
361
-
362
- await insertArrays(db);
363
-
364
- const transformed = (await queryAll(db, `SELECT * FROM test_data_arrays ORDER BY id`)).map((e) =>
365
- applyRowContext(e, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY)
366
- );
367
-
368
- checkResultArrays(transformed);
369
- } finally {
370
- await db.end();
371
- }
372
- });
373
-
374
- test('test replication', async () => {
375
- const db = await connectPgPool();
376
- try {
377
- await setupTable(db);
378
-
379
- const slotName = 'test_slot';
380
-
381
- await db.query({
382
- statement: 'SELECT pg_drop_replication_slot(slot_name) FROM pg_replication_slots WHERE slot_name = $1',
383
- params: [{ type: 'varchar', value: slotName }]
384
- });
385
-
386
- await db.query({
387
- statement: `SELECT slot_name, lsn FROM pg_catalog.pg_create_logical_replication_slot($1, 'pgoutput')`,
388
- params: [{ type: 'varchar', value: slotName }]
389
- });
390
-
391
- await insert(db);
392
-
393
- const pg: pgwire.PgConnection = await pgwire.pgconnect({ replication: 'database' }, TEST_URI);
394
- const replicationStream = await pg.logicalReplication({
395
- slot: slotName,
396
- options: {
397
- proto_version: '1',
398
- publication_names: 'powersync'
399
- }
400
- });
401
-
402
- const transformed = await getReplicationTx(db, replicationStream);
403
- await pg.end();
404
-
405
- checkResults(transformed);
406
- } finally {
407
- await db.end();
408
- }
409
- });
410
-
411
- test('test replication - arrays', async () => {
412
- const db = await connectPgPool();
413
- try {
414
- await setupTable(db);
415
-
416
- const slotName = 'test_slot';
417
-
418
- await db.query({
419
- statement: 'SELECT pg_drop_replication_slot(slot_name) FROM pg_replication_slots WHERE slot_name = $1',
420
- params: [{ type: 'varchar', value: slotName }]
421
- });
422
-
423
- await db.query({
424
- statement: `SELECT slot_name, lsn FROM pg_catalog.pg_create_logical_replication_slot($1, 'pgoutput')`,
425
- params: [{ type: 'varchar', value: slotName }]
426
- });
427
-
428
- await insertArrays(db);
429
-
430
- const pg: pgwire.PgConnection = await pgwire.pgconnect({ replication: 'database' }, TEST_URI);
431
- const replicationStream = await pg.logicalReplication({
432
- slot: slotName,
433
- options: {
434
- proto_version: '1',
435
- publication_names: 'powersync'
436
- }
437
- });
438
-
439
- const transformed = await getReplicationTx(db, replicationStream);
440
- await pg.end();
441
-
442
- checkResultArrays(transformed.map((e) => applyRowContext(e, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY)));
443
- } finally {
444
- await db.end();
445
- }
446
- });
447
-
448
- test('schema', async function () {
449
- const db = await connectPgWire();
450
-
451
- await setupTable(db);
452
-
453
- // TODO need a test for adapter
454
- // const schema = await api.getConnectionsSchema(db);
455
- // expect(schema).toMatchSnapshot();
456
- });
457
-
458
- test('date formats', async () => {
459
- const db = await connectPgPool();
460
- try {
461
- await setupTable(db);
462
-
463
- await db.query(`
464
- INSERT INTO test_data(id, time, timestamp, timestamptz) VALUES (1, '17:42:01.12', '2023-03-06 15:47:12.4', '2023-03-06 15:47+02');
465
- `);
466
-
467
- const [row] = await queryAll(db, `SELECT time, timestamp, timestamptz FROM test_data`);
468
-
469
- const oldFormat = applyRowContext(row, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
470
- expect(oldFormat).toMatchObject({
471
- time: '17:42:01.12',
472
- timestamp: '2023-03-06 15:47:12.4',
473
- timestamptz: '2023-03-06 13:47:00Z'
474
- });
475
-
476
- const newFormat = applyRowContext(row, new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS }));
477
- expect(newFormat).toMatchObject({
478
- time: '17:42:01.120000',
479
- timestamp: '2023-03-06T15:47:12.400000',
480
- timestamptz: '2023-03-06T13:47:00.000000Z'
481
- });
482
-
483
- const reducedPrecisionFormat = applyRowContext(
484
- row,
485
- new CompatibilityContext({
486
- edition: CompatibilityEdition.SYNC_STREAMS,
487
- maxTimeValuePrecision: TimeValuePrecision.milliseconds
488
- })
489
- );
490
- expect(reducedPrecisionFormat).toMatchObject({
491
- time: '17:42:01.120',
492
- timestamp: '2023-03-06T15:47:12.400',
493
- timestamptz: '2023-03-06T13:47:00.000Z'
494
- });
495
- } finally {
496
- await db.end();
497
- }
498
- });
499
-
500
- test('test replication - custom types', async () => {
501
- const db = await connectPgPool();
502
- try {
503
- await clearTestDb(db);
504
- await db.query(`CREATE DOMAIN rating_value AS FLOAT CHECK (VALUE BETWEEN 0 AND 5);`);
505
- await db.query(`CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy')`);
506
- await db.query(`CREATE TYPE composite AS (foo rating_value[], bar TEXT, mood mood);`);
507
- await db.query(`CREATE TYPE nested_composite AS (a BOOLEAN, b composite);`);
508
-
509
- await db.query(`CREATE TABLE test_custom(
510
- id serial primary key,
511
- rating rating_value,
512
- composite composite,
513
- nested_composite nested_composite,
514
- boxes box[],
515
- mood mood,
516
- moods mood[]
517
- );`);
518
-
519
- const slotName = 'test_slot';
520
-
521
- await db.query({
522
- statement: 'SELECT pg_drop_replication_slot(slot_name) FROM pg_replication_slots WHERE slot_name = $1',
523
- params: [{ type: 'varchar', value: slotName }]
524
- });
525
-
526
- await db.query({
527
- statement: `SELECT slot_name, lsn FROM pg_catalog.pg_create_logical_replication_slot($1, 'pgoutput')`,
528
- params: [{ type: 'varchar', value: slotName }]
529
- });
530
-
531
- await db.query(`
532
- INSERT INTO test_custom
533
- (rating, composite, nested_composite, boxes, mood, moods)
534
- VALUES (
535
- 1,
536
- (ARRAY[2,3], 'bar', 'sad'::mood),
537
- (TRUE, (ARRAY[2,3], 'bar', 'sad'::mood)),
538
- ARRAY[box(point '(1,2)', point '(3,4)'), box(point '(5, 6)', point '(7,8)')],
539
- 'happy',
540
- ARRAY['sad'::mood, 'happy'::mood]
541
- );
542
- `);
543
-
544
- const pg: pgwire.PgConnection = await pgwire.pgconnect({ replication: 'database' }, TEST_URI);
545
- const replicationStream = await pg.logicalReplication({
546
- slot: slotName,
547
- options: {
548
- proto_version: '1',
549
- publication_names: 'powersync'
550
- }
551
- });
552
-
553
- const [transformed] = await getReplicationTx(db, replicationStream);
554
- const [queried] = await queryAll(db, `SELECT * FROM test_custom`);
555
- await pg.end();
556
-
557
- const oldFormatStreamed = applyRowContext(transformed, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
558
- expect(oldFormatStreamed).toMatchObject({
559
- rating: '1',
560
- composite: '("{2,3}",bar,sad)',
561
- nested_composite: '(t,"(""{2,3}"",bar,sad)")',
562
- boxes: '["(3","4)","(1","2);(7","8)","(5","6)"]',
563
- mood: 'happy',
564
- moods: '{sad,happy}'
565
- });
566
-
567
- const oldFormatQueried = applyRowContext(queried, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
568
- expect(oldFormatQueried).toMatchObject({
569
- rating: 1,
570
- composite: '("{2,3}",bar,sad)',
571
- nested_composite: '(t,"(""{2,3}"",bar,sad)")',
572
- boxes: '["(3","4)","(1","2);(7","8)","(5","6)"]',
573
- mood: 'happy',
574
- moods: '{sad,happy}'
575
- });
576
-
577
- const newFormatStreamed = applyRowContext(
578
- transformed,
579
- new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS })
580
- );
581
- expect(newFormatStreamed).toMatchObject({
582
- rating: 1,
583
- composite: '{"foo":[2.0,3.0],"bar":"bar","mood":"sad"}',
584
- nested_composite: '{"a":1,"b":{"foo":[2.0,3.0],"bar":"bar","mood":"sad"}}',
585
- boxes: JSON.stringify(['(3,4),(1,2)', '(7,8),(5,6)']),
586
- mood: 'happy',
587
- moods: '["sad","happy"]'
588
- });
589
-
590
- const newFormatQueried = applyRowContext(
591
- queried,
592
- new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS })
593
- );
594
- expect(newFormatQueried).toMatchObject({
595
- rating: 1,
596
- composite: '{"foo":[2.0,3.0],"bar":"bar","mood":"sad"}',
597
- nested_composite: '{"a":1,"b":{"foo":[2.0,3.0],"bar":"bar","mood":"sad"}}',
598
- boxes: JSON.stringify(['(3,4),(1,2)', '(7,8),(5,6)']),
599
- mood: 'happy',
600
- moods: '["sad","happy"]'
601
- });
602
- } finally {
603
- await db.end();
604
- }
605
- });
606
-
607
- test('test replication - multiranges', async () => {
608
- const db = await connectPgPool();
609
-
610
- if (!(await new PostgresTypeResolver(db).supportsMultiRanges())) {
611
- // This test requires Postgres 14 or later.
612
- return;
613
- }
614
-
615
- try {
616
- await clearTestDb(db);
617
-
618
- await db.query(`CREATE TABLE test_custom(
619
- id serial primary key,
620
- ranges int4multirange[]
621
- );`);
622
-
623
- const slotName = 'test_slot';
624
-
625
- await db.query({
626
- statement: 'SELECT pg_drop_replication_slot(slot_name) FROM pg_replication_slots WHERE slot_name = $1',
627
- params: [{ type: 'varchar', value: slotName }]
628
- });
629
-
630
- await db.query({
631
- statement: `SELECT slot_name, lsn FROM pg_catalog.pg_create_logical_replication_slot($1, 'pgoutput')`,
632
- params: [{ type: 'varchar', value: slotName }]
633
- });
634
-
635
- await db.query(`
636
- INSERT INTO test_custom
637
- (ranges)
638
- VALUES (
639
- ARRAY[int4multirange(int4range(2, 4), int4range(5, 7, '(]'))]::int4multirange[]
640
- );
641
- `);
642
-
643
- const pg: pgwire.PgConnection = await pgwire.pgconnect({ replication: 'database' }, TEST_URI);
644
- const replicationStream = await pg.logicalReplication({
645
- slot: slotName,
646
- options: {
647
- proto_version: '1',
648
- publication_names: 'powersync'
649
- }
650
- });
651
-
652
- const [transformed] = await getReplicationTx(db, replicationStream);
653
- const [queried] = await queryAll(db, `SELECT ranges FROM test_custom`);
654
- await pg.end();
655
-
656
- const oldFormatStreamed = applyRowContext(transformed, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
657
- expect(oldFormatStreamed).toMatchObject({
658
- ranges: '{"{[2,4),[6,8)}"}'
659
- });
660
- const oldFormatQueried = applyRowContext(queried, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
661
- expect(oldFormatQueried).toMatchObject({
662
- ranges: '{"{[2,4),[6,8)}"}'
663
- });
664
-
665
- const newFormatStreamed = applyRowContext(
666
- transformed,
667
- new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS })
668
- );
669
- expect(newFormatStreamed).toMatchObject({
670
- ranges: JSON.stringify([
671
- [
672
- { lower: 2, upper: 4, lower_exclusive: 0, upper_exclusive: 1 },
673
- { lower: 6, upper: 8, lower_exclusive: 0, upper_exclusive: 1 }
674
- ]
675
- ])
676
- });
677
-
678
- const newFormatQueried = applyRowContext(
679
- queried,
680
- new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS })
681
- );
682
- expect(newFormatQueried).toMatchObject({
683
- ranges: JSON.stringify([
684
- [
685
- { lower: 2, upper: 4, lower_exclusive: 0, upper_exclusive: 1 },
686
- { lower: 6, upper: 8, lower_exclusive: 0, upper_exclusive: 1 }
687
- ]
688
- ])
689
- });
690
- } finally {
691
- await db.end();
692
- }
693
- });
694
- });
695
-
696
- /**
697
- * Return all the inserts from the first transaction in the replication stream.
698
- */
699
- async function getReplicationTx(db: pgwire.PgClient, replicationStream: pgwire.ReplicationStream) {
700
- const typeCache = new PostgresTypeResolver(db);
701
- await typeCache.fetchTypesForSchema();
702
-
703
- let transformed: SqliteInputRow[] = [];
704
- for await (const batch of replicationStream.pgoutputDecode()) {
705
- for (const msg of batch.messages) {
706
- if (msg.tag == 'insert') {
707
- transformed.push(typeCache.constructAfterRecord(msg));
708
- } else if (msg.tag == 'commit') {
709
- return transformed;
710
- }
711
- }
712
- }
713
- return transformed;
714
- }
715
-
716
- /**
717
- * Simulates what WalStream does for initial snapshots.
718
- */
719
- async function queryAll(db: pgwire.PgClient, sql: string) {
720
- const raw = await db.query(sql);
721
- return await interpretResults(db, raw);
722
- }
723
-
724
- async function interpretResults(db: pgwire.PgClient, results: pgwire.PgResult) {
725
- const typeCache = new PostgresTypeResolver(db);
726
- await typeCache.fetchTypesForSchema();
727
-
728
- return results.rows.map((row) => WalStream.decodeRow(row, typeCache));
729
- }