@powersync/service-module-postgres 0.16.15 → 0.16.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/CHANGELOG.md +16 -0
  2. package/dist/api/PostgresRouteAPIAdapter.js +3 -2
  3. package/dist/api/PostgresRouteAPIAdapter.js.map +1 -1
  4. package/dist/replication/SnapshotQuery.js +4 -3
  5. package/dist/replication/SnapshotQuery.js.map +1 -1
  6. package/dist/replication/WalStream.d.ts +3 -2
  7. package/dist/replication/WalStream.js +23 -35
  8. package/dist/replication/WalStream.js.map +1 -1
  9. package/dist/replication/replication-utils.js +5 -5
  10. package/dist/replication/replication-utils.js.map +1 -1
  11. package/dist/types/registry.js +1 -2
  12. package/dist/types/registry.js.map +1 -1
  13. package/dist/types/resolver.d.ts +0 -1
  14. package/dist/types/resolver.js +9 -14
  15. package/dist/types/resolver.js.map +1 -1
  16. package/dist/utils/migration_lib.js +1 -1
  17. package/dist/utils/migration_lib.js.map +1 -1
  18. package/dist/utils/postgres_version.js +1 -1
  19. package/dist/utils/postgres_version.js.map +1 -1
  20. package/package.json +10 -10
  21. package/src/api/PostgresRouteAPIAdapter.ts +3 -2
  22. package/src/replication/SnapshotQuery.ts +7 -3
  23. package/src/replication/WalStream.ts +31 -34
  24. package/src/replication/replication-utils.ts +5 -5
  25. package/src/types/registry.ts +1 -4
  26. package/src/types/resolver.ts +10 -14
  27. package/src/utils/migration_lib.ts +1 -1
  28. package/src/utils/postgres_version.ts +1 -1
  29. package/test/src/pg_test.test.ts +153 -61
  30. package/test/src/resuming_snapshots.test.ts +12 -6
  31. package/test/src/slow_tests.test.ts +2 -2
  32. package/test/src/types/registry.test.ts +1 -1
  33. package/test/src/wal_stream_utils.ts +1 -1
  34. package/tsconfig.tsbuildinfo +1 -1
@@ -1,3 +1,6 @@
1
+ import type { LookupFunction } from 'node:net';
2
+ import * as dns from 'node:dns';
3
+
1
4
  import * as pgwire from '@powersync/service-jpgwire';
2
5
  import {
3
6
  applyRowContext,
@@ -5,13 +8,35 @@ import {
5
8
  SqliteInputRow,
6
9
  DateTimeValue,
7
10
  TimeValue,
8
- CompatibilityEdition
11
+ CompatibilityEdition,
12
+ TimeValuePrecision
9
13
  } from '@powersync/service-sync-rules';
10
- import { describe, expect, test } from 'vitest';
11
- import { clearTestDb, connectPgPool, connectPgWire, TEST_URI } from './util.js';
14
+ import { describe, expect, Mock, test, vi } from 'vitest';
15
+ import { clearTestDb, connectPgPool, connectPgWire, TEST_CONNECTION_OPTIONS, TEST_URI } from './util.js';
12
16
  import { WalStream } from '@module/replication/WalStream.js';
13
17
  import { PostgresTypeResolver } from '@module/types/resolver.js';
14
- import { CustomTypeRegistry } from '@module/types/registry.js';
18
+
19
+ describe('connection options', () => {
20
+ test('uses custom lookup', async () => {
21
+ const lookup: Mock<LookupFunction> = vi.fn((hostname, options, cb) => {
22
+ expect(hostname).toStrictEqual('powersynctest.example.org');
23
+ dns.lookup('localhost', options, cb);
24
+ });
25
+
26
+ const db = await pgwire.connectPgWire({
27
+ ...TEST_CONNECTION_OPTIONS,
28
+ hostname: 'powersynctest.example.org',
29
+ lookup
30
+ });
31
+ expect(lookup).toHaveBeenCalled();
32
+
33
+ try {
34
+ await db.query('SELECT 1');
35
+ } finally {
36
+ await db.end();
37
+ }
38
+ });
39
+ });
15
40
 
16
41
  describe('pg data types', () => {
17
42
  async function setupTable(db: pgwire.PgClient) {
@@ -166,9 +191,9 @@ VALUES(10, ARRAY['null']::TEXT[]);
166
191
  expect(transformed[2]).toMatchObject({
167
192
  id: 3n,
168
193
  date: '2023-03-06',
169
- time: new TimeValue('15:47:00'),
170
- timestamp: new DateTimeValue('2023-03-06T15:47:00.000000', '2023-03-06 15:47:00'),
171
- timestamptz: new DateTimeValue('2023-03-06T13:47:00.000000Z', '2023-03-06 13:47:00Z')
194
+ time: TimeValue.parse('15:47:00', pgwire.postgresTimeOptions),
195
+ timestamp: new DateTimeValue('2023-03-06T15:47:00.000000', '2023-03-06 15:47:00', pgwire.postgresTimeOptions),
196
+ timestamptz: new DateTimeValue('2023-03-06T13:47:00Z', '2023-03-06 13:47:00Z', pgwire.postgresTimeOptions)
172
197
  });
173
198
 
174
199
  expect(transformed[3]).toMatchObject({
@@ -183,26 +208,26 @@ VALUES(10, ARRAY['null']::TEXT[]);
183
208
  expect(transformed[4]).toMatchObject({
184
209
  id: 5n,
185
210
  date: '0000-01-01',
186
- time: new TimeValue('00:00:00'),
187
- timestamp: new DateTimeValue('0000-01-01T00:00:00'),
188
- timestamptz: new DateTimeValue('0000-01-01T00:00:00Z')
211
+ time: TimeValue.parse('00:00:00', pgwire.postgresTimeOptions),
212
+ timestamp: new DateTimeValue('0000-01-01T00:00:00', undefined, pgwire.postgresTimeOptions),
213
+ timestamptz: new DateTimeValue('0000-01-01T00:00:00Z', undefined, pgwire.postgresTimeOptions)
189
214
  });
190
215
 
191
216
  expect(transformed[5]).toMatchObject({
192
217
  id: 6n,
193
- timestamp: new DateTimeValue('1970-01-01T00:00:00.000000', '1970-01-01 00:00:00'),
194
- timestamptz: new DateTimeValue('1970-01-01T00:00:00.000000Z', '1970-01-01 00:00:00Z')
218
+ timestamp: new DateTimeValue('1970-01-01T00:00:00.000000', '1970-01-01 00:00:00', pgwire.postgresTimeOptions),
219
+ timestamptz: new DateTimeValue('1970-01-01T00:00:00Z', '1970-01-01 00:00:00Z', pgwire.postgresTimeOptions)
195
220
  });
196
221
 
197
222
  expect(transformed[6]).toMatchObject({
198
223
  id: 7n,
199
- timestamp: new DateTimeValue('9999-12-31T23:59:59'),
200
- timestamptz: new DateTimeValue('9999-12-31T23:59:59Z')
224
+ timestamp: new DateTimeValue('9999-12-31T23:59:59', undefined, pgwire.postgresTimeOptions),
225
+ timestamptz: new DateTimeValue('9999-12-31T23:59:59Z', undefined, pgwire.postgresTimeOptions)
201
226
  });
202
227
 
203
228
  expect(transformed[7]).toMatchObject({
204
229
  id: 8n,
205
- timestamptz: new DateTimeValue('0022-02-03T09:13:14.000000Z', '0022-02-03 09:13:14Z')
230
+ timestamptz: new DateTimeValue('0022-02-03T09:13:14Z', '0022-02-03 09:13:14Z', pgwire.postgresTimeOptions)
206
231
  });
207
232
 
208
233
  expect(transformed[8]).toMatchObject({
@@ -302,10 +327,7 @@ VALUES(10, ARRAY['null']::TEXT[]);
302
327
 
303
328
  await insert(db);
304
329
 
305
- const transformed = [
306
- ...WalStream.getQueryData(pgwire.pgwireRows(await db.query(`SELECT * FROM test_data ORDER BY id`)))
307
- ];
308
-
330
+ const transformed = await queryAll(db, `SELECT * FROM test_data ORDER BY id`);
309
331
  checkResults(transformed);
310
332
  } finally {
311
333
  await db.end();
@@ -321,17 +343,11 @@ VALUES(10, ARRAY['null']::TEXT[]);
321
343
 
322
344
  await insert(db);
323
345
 
324
- const transformed = [
325
- ...WalStream.getQueryData(
326
- pgwire.pgwireRows(
327
- await db.query({
328
- statement: `SELECT * FROM test_data WHERE $1 ORDER BY id`,
329
- params: [{ type: 'bool', value: true }]
330
- })
331
- )
332
- )
333
- ];
334
-
346
+ const raw = await db.query({
347
+ statement: `SELECT * FROM test_data WHERE $1 ORDER BY id`,
348
+ params: [{ type: 'bool', value: true }]
349
+ });
350
+ const transformed = await interpretResults(db, raw);
335
351
  checkResults(transformed);
336
352
  } finally {
337
353
  await db.end();
@@ -345,9 +361,9 @@ VALUES(10, ARRAY['null']::TEXT[]);
345
361
 
346
362
  await insertArrays(db);
347
363
 
348
- const transformed = [
349
- ...WalStream.getQueryData(pgwire.pgwireRows(await db.query(`SELECT * FROM test_data_arrays ORDER BY id`)))
350
- ].map((e) => applyRowContext(e, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY));
364
+ const transformed = (await queryAll(db, `SELECT * FROM test_data_arrays ORDER BY id`)).map((e) =>
365
+ applyRowContext(e, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY)
366
+ );
351
367
 
352
368
  checkResultArrays(transformed);
353
369
  } finally {
@@ -440,7 +456,7 @@ VALUES(10, ARRAY['null']::TEXT[]);
440
456
  });
441
457
 
442
458
  test('date formats', async () => {
443
- const db = await connectPgWire();
459
+ const db = await connectPgPool();
444
460
  try {
445
461
  await setupTable(db);
446
462
 
@@ -448,11 +464,7 @@ VALUES(10, ARRAY['null']::TEXT[]);
448
464
  INSERT INTO test_data(id, time, timestamp, timestamptz) VALUES (1, '17:42:01.12', '2023-03-06 15:47:12.4', '2023-03-06 15:47+02');
449
465
  `);
450
466
 
451
- const [row] = [
452
- ...WalStream.getQueryData(
453
- pgwire.pgwireRows(await db.query(`SELECT time, timestamp, timestamptz FROM test_data`))
454
- )
455
- ];
467
+ const [row] = await queryAll(db, `SELECT time, timestamp, timestamptz FROM test_data`);
456
468
 
457
469
  const oldFormat = applyRowContext(row, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
458
470
  expect(oldFormat).toMatchObject({
@@ -461,12 +473,25 @@ INSERT INTO test_data(id, time, timestamp, timestamptz) VALUES (1, '17:42:01.12'
461
473
  timestamptz: '2023-03-06 13:47:00Z'
462
474
  });
463
475
 
464
- const newFormat = applyRowContext(row, new CompatibilityContext(CompatibilityEdition.SYNC_STREAMS));
476
+ const newFormat = applyRowContext(row, new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS }));
465
477
  expect(newFormat).toMatchObject({
466
478
  time: '17:42:01.120000',
467
479
  timestamp: '2023-03-06T15:47:12.400000',
468
480
  timestamptz: '2023-03-06T13:47:00.000000Z'
469
481
  });
482
+
483
+ const reducedPrecisionFormat = applyRowContext(
484
+ row,
485
+ new CompatibilityContext({
486
+ edition: CompatibilityEdition.SYNC_STREAMS,
487
+ maxTimeValuePrecision: TimeValuePrecision.milliseconds
488
+ })
489
+ );
490
+ expect(reducedPrecisionFormat).toMatchObject({
491
+ time: '17:42:01.120',
492
+ timestamp: '2023-03-06T15:47:12.400',
493
+ timestamptz: '2023-03-06T13:47:00.000Z'
494
+ });
470
495
  } finally {
471
496
  await db.end();
472
497
  }
@@ -477,9 +502,9 @@ INSERT INTO test_data(id, time, timestamp, timestamptz) VALUES (1, '17:42:01.12'
477
502
  try {
478
503
  await clearTestDb(db);
479
504
  await db.query(`CREATE DOMAIN rating_value AS FLOAT CHECK (VALUE BETWEEN 0 AND 5);`);
480
- await db.query(`CREATE TYPE composite AS (foo rating_value[], bar TEXT);`);
481
- await db.query(`CREATE TYPE nested_composite AS (a BOOLEAN, b composite);`);
482
505
  await db.query(`CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy')`);
506
+ await db.query(`CREATE TYPE composite AS (foo rating_value[], bar TEXT, mood mood);`);
507
+ await db.query(`CREATE TYPE nested_composite AS (a BOOLEAN, b composite);`);
483
508
 
484
509
  await db.query(`CREATE TABLE test_custom(
485
510
  id serial primary key,
@@ -487,7 +512,8 @@ INSERT INTO test_data(id, time, timestamp, timestamptz) VALUES (1, '17:42:01.12'
487
512
  composite composite,
488
513
  nested_composite nested_composite,
489
514
  boxes box[],
490
- mood mood
515
+ mood mood,
516
+ moods mood[]
491
517
  );`);
492
518
 
493
519
  const slotName = 'test_slot';
@@ -504,13 +530,14 @@ INSERT INTO test_data(id, time, timestamp, timestamptz) VALUES (1, '17:42:01.12'
504
530
 
505
531
  await db.query(`
506
532
  INSERT INTO test_custom
507
- (rating, composite, nested_composite, boxes, mood)
533
+ (rating, composite, nested_composite, boxes, mood, moods)
508
534
  VALUES (
509
535
  1,
510
- (ARRAY[2,3], 'bar'),
511
- (TRUE, (ARRAY[2,3], 'bar')),
536
+ (ARRAY[2,3], 'bar', 'sad'::mood),
537
+ (TRUE, (ARRAY[2,3], 'bar', 'sad'::mood)),
512
538
  ARRAY[box(point '(1,2)', point '(3,4)'), box(point '(5, 6)', point '(7,8)')],
513
- 'happy'
539
+ 'happy',
540
+ ARRAY['sad'::mood, 'happy'::mood]
514
541
  );
515
542
  `);
516
543
 
@@ -524,24 +551,53 @@ INSERT INTO test_data(id, time, timestamp, timestamptz) VALUES (1, '17:42:01.12'
524
551
  });
525
552
 
526
553
  const [transformed] = await getReplicationTx(db, replicationStream);
554
+ const [queried] = await queryAll(db, `SELECT * FROM test_custom`);
527
555
  await pg.end();
528
556
 
529
- const oldFormat = applyRowContext(transformed, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
530
- expect(oldFormat).toMatchObject({
557
+ const oldFormatStreamed = applyRowContext(transformed, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
558
+ expect(oldFormatStreamed).toMatchObject({
531
559
  rating: '1',
532
- composite: '("{2,3}",bar)',
533
- nested_composite: '(t,"(""{2,3}"",bar)")',
560
+ composite: '("{2,3}",bar,sad)',
561
+ nested_composite: '(t,"(""{2,3}"",bar,sad)")',
534
562
  boxes: '["(3","4)","(1","2);(7","8)","(5","6)"]',
535
- mood: 'happy'
563
+ mood: 'happy',
564
+ moods: '{sad,happy}'
536
565
  });
537
566
 
538
- const newFormat = applyRowContext(transformed, new CompatibilityContext(CompatibilityEdition.SYNC_STREAMS));
539
- expect(newFormat).toMatchObject({
567
+ const oldFormatQueried = applyRowContext(queried, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
568
+ expect(oldFormatQueried).toMatchObject({
569
+ rating: 1,
570
+ composite: '("{2,3}",bar,sad)',
571
+ nested_composite: '(t,"(""{2,3}"",bar,sad)")',
572
+ boxes: '["(3","4)","(1","2);(7","8)","(5","6)"]',
573
+ mood: 'happy',
574
+ moods: '{sad,happy}'
575
+ });
576
+
577
+ const newFormatStreamed = applyRowContext(
578
+ transformed,
579
+ new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS })
580
+ );
581
+ expect(newFormatStreamed).toMatchObject({
540
582
  rating: 1,
541
- composite: '{"foo":[2.0,3.0],"bar":"bar"}',
542
- nested_composite: '{"a":1,"b":{"foo":[2.0,3.0],"bar":"bar"}}',
583
+ composite: '{"foo":[2.0,3.0],"bar":"bar","mood":"sad"}',
584
+ nested_composite: '{"a":1,"b":{"foo":[2.0,3.0],"bar":"bar","mood":"sad"}}',
543
585
  boxes: JSON.stringify(['(3,4),(1,2)', '(7,8),(5,6)']),
544
- mood: 'happy'
586
+ mood: 'happy',
587
+ moods: '["sad","happy"]'
588
+ });
589
+
590
+ const newFormatQueried = applyRowContext(
591
+ queried,
592
+ new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS })
593
+ );
594
+ expect(newFormatQueried).toMatchObject({
595
+ rating: 1,
596
+ composite: '{"foo":[2.0,3.0],"bar":"bar","mood":"sad"}',
597
+ nested_composite: '{"a":1,"b":{"foo":[2.0,3.0],"bar":"bar","mood":"sad"}}',
598
+ boxes: JSON.stringify(['(3,4),(1,2)', '(7,8),(5,6)']),
599
+ mood: 'happy',
600
+ moods: '["sad","happy"]'
545
601
  });
546
602
  } finally {
547
603
  await db.end();
@@ -594,15 +650,36 @@ INSERT INTO test_data(id, time, timestamp, timestamptz) VALUES (1, '17:42:01.12'
594
650
  });
595
651
 
596
652
  const [transformed] = await getReplicationTx(db, replicationStream);
653
+ const [queried] = await queryAll(db, `SELECT ranges FROM test_custom`);
597
654
  await pg.end();
598
655
 
599
- const oldFormat = applyRowContext(transformed, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
600
- expect(oldFormat).toMatchObject({
656
+ const oldFormatStreamed = applyRowContext(transformed, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
657
+ expect(oldFormatStreamed).toMatchObject({
658
+ ranges: '{"{[2,4),[6,8)}"}'
659
+ });
660
+ const oldFormatQueried = applyRowContext(queried, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
661
+ expect(oldFormatQueried).toMatchObject({
601
662
  ranges: '{"{[2,4),[6,8)}"}'
602
663
  });
603
664
 
604
- const newFormat = applyRowContext(transformed, new CompatibilityContext(CompatibilityEdition.SYNC_STREAMS));
605
- expect(newFormat).toMatchObject({
665
+ const newFormatStreamed = applyRowContext(
666
+ transformed,
667
+ new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS })
668
+ );
669
+ expect(newFormatStreamed).toMatchObject({
670
+ ranges: JSON.stringify([
671
+ [
672
+ { lower: 2, upper: 4, lower_exclusive: 0, upper_exclusive: 1 },
673
+ { lower: 6, upper: 8, lower_exclusive: 0, upper_exclusive: 1 }
674
+ ]
675
+ ])
676
+ });
677
+
678
+ const newFormatQueried = applyRowContext(
679
+ queried,
680
+ new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS })
681
+ );
682
+ expect(newFormatQueried).toMatchObject({
606
683
  ranges: JSON.stringify([
607
684
  [
608
685
  { lower: 2, upper: 4, lower_exclusive: 0, upper_exclusive: 1 },
@@ -635,3 +712,18 @@ async function getReplicationTx(db: pgwire.PgClient, replicationStream: pgwire.R
635
712
  }
636
713
  return transformed;
637
714
  }
715
+
716
+ /**
717
+ * Simulates what WalStream does for initial snapshots.
718
+ */
719
+ async function queryAll(db: pgwire.PgClient, sql: string) {
720
+ const raw = await db.query(sql);
721
+ return await interpretResults(db, raw);
722
+ }
723
+
724
+ async function interpretResults(db: pgwire.PgClient, results: pgwire.PgResult) {
725
+ const typeCache = new PostgresTypeResolver(db);
726
+ await typeCache.fetchTypesForSchema();
727
+
728
+ return results.rows.map((row) => WalStream.decodeRow(row, typeCache));
729
+ }
@@ -89,16 +89,16 @@ async function testResumingReplication(factory: TestStorageFactory, stopAfter: n
89
89
 
90
90
  // This delete should be using one of the ids already replicated
91
91
  const {
92
- rows: [[id1]]
92
+ rows: [delete1]
93
93
  } = await context2.pool.query(`DELETE FROM test_data2 WHERE id = (SELECT id FROM test_data2 LIMIT 1) RETURNING id`);
94
94
  // This update should also be using one of the ids already replicated
95
95
  const {
96
- rows: [[id2]]
96
+ rows: [delete2]
97
97
  } = await context2.pool.query(
98
98
  `UPDATE test_data2 SET description = 'update1' WHERE id = (SELECT id FROM test_data2 LIMIT 1) RETURNING id`
99
99
  );
100
100
  const {
101
- rows: [[id3]]
101
+ rows: [delete3]
102
102
  } = await context2.pool.query(`INSERT INTO test_data2(description) SELECT 'insert1' RETURNING id`);
103
103
 
104
104
  await context2.loadNextSyncRules();
@@ -107,9 +107,15 @@ async function testResumingReplication(factory: TestStorageFactory, stopAfter: n
107
107
  context2.startStreaming();
108
108
  const data = await context2.getBucketData('global[]', undefined, {});
109
109
 
110
- const deletedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === String(id1));
111
- const updatedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === String(id2));
112
- const insertedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === String(id3));
110
+ const deletedRowOps = data.filter(
111
+ (row) => row.object_type == 'test_data2' && row.object_id === String(delete1.decodeWithoutCustomTypes(0))
112
+ );
113
+ const updatedRowOps = data.filter(
114
+ (row) => row.object_type == 'test_data2' && row.object_id === String(delete2.decodeWithoutCustomTypes(0))
115
+ );
116
+ const insertedRowOps = data.filter(
117
+ (row) => row.object_type == 'test_data2' && row.object_id === String(delete3.decodeWithoutCustomTypes(0))
118
+ );
113
119
 
114
120
  if (deletedRowOps.length != 0) {
115
121
  // The deleted row was part of the first replication batch,
@@ -121,7 +121,7 @@ bucket_definitions:
121
121
  }
122
122
  const results = await pool.query(...statements);
123
123
  const ids = results.results.map((sub) => {
124
- return sub.rows[0][0] as string;
124
+ return sub.rows[0].decodeWithoutCustomTypes(0) as string;
125
125
  });
126
126
  await new Promise((resolve) => setTimeout(resolve, Math.random() * 30));
127
127
 
@@ -372,7 +372,7 @@ bucket_definitions:
372
372
  }
373
373
  const results = await pool.query(...statements);
374
374
  const ids = results.results.map((sub) => {
375
- return sub.rows[0][0] as string;
375
+ return sub.rows[0].decodeWithoutCustomTypes(0) as string;
376
376
  });
377
377
  await new Promise((resolve) => setTimeout(resolve, Math.random() * 30));
378
378
  const deleteStatements: pgwire.Statement[] = ids.map((id) => {
@@ -21,7 +21,7 @@ describe('custom type registry', () => {
21
21
 
22
22
  expect(applyValueContext(syncRulesValue, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY)).toStrictEqual(old);
23
23
  expect(
24
- applyValueContext(syncRulesValue, new CompatibilityContext(CompatibilityEdition.SYNC_STREAMS))
24
+ applyValueContext(syncRulesValue, new CompatibilityContext({ edition: CompatibilityEdition.SYNC_STREAMS }))
25
25
  ).toStrictEqual(fixed);
26
26
  }
27
27
 
@@ -230,7 +230,7 @@ export async function withMaxWalSize(db: pgwire.PgClient, size: string) {
230
230
  await db.query(`ALTER SYSTEM SET max_slot_wal_keep_size = '100MB'`);
231
231
  await db.query(`SELECT pg_reload_conf()`);
232
232
 
233
- const oldSize = r1.results[0].rows[0][0];
233
+ const oldSize = r1.results[0].rows[0].decodeWithoutCustomTypes(0);
234
234
 
235
235
  return {
236
236
  [Symbol.asyncDispose]: async () => {