@powersync/service-module-postgres-storage 0.0.0-dev-20250117095455 → 0.0.0-dev-20250214100224
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +73 -11
- package/README.md +7 -1
- package/dist/.tsbuildinfo +1 -1
- package/dist/@types/storage/PostgresBucketStorageFactory.d.ts +3 -4
- package/dist/@types/storage/batch/PostgresBucketBatch.d.ts +4 -4
- package/dist/@types/types/codecs.d.ts +2 -2
- package/dist/@types/types/models/BucketData.d.ts +1 -1
- package/dist/@types/types/models/BucketParameters.d.ts +2 -2
- package/dist/@types/types/models/CurrentData.d.ts +3 -3
- package/dist/migrations/migration-utils.js +2 -1
- package/dist/migrations/migration-utils.js.map +1 -1
- package/dist/storage/PostgresBucketStorageFactory.js +27 -16
- package/dist/storage/PostgresBucketStorageFactory.js.map +1 -1
- package/dist/storage/PostgresCompactor.js +2 -2
- package/dist/storage/PostgresCompactor.js.map +1 -1
- package/dist/storage/PostgresSyncRulesStorage.js +205 -205
- package/dist/storage/PostgresSyncRulesStorage.js.map +1 -1
- package/dist/storage/batch/PostgresBucketBatch.js +30 -21
- package/dist/storage/batch/PostgresBucketBatch.js.map +1 -1
- package/dist/storage/batch/PostgresPersistedBatch.js +39 -81
- package/dist/storage/batch/PostgresPersistedBatch.js.map +1 -1
- package/dist/storage/sync-rules/PostgresPersistedSyncRulesContent.js +2 -2
- package/dist/storage/sync-rules/PostgresPersistedSyncRulesContent.js.map +1 -1
- package/dist/types/codecs.js +3 -2
- package/dist/types/codecs.js.map +1 -1
- package/package.json +9 -9
- package/src/migrations/migration-utils.ts +2 -1
- package/src/storage/PostgresBucketStorageFactory.ts +31 -20
- package/src/storage/PostgresCompactor.ts +4 -2
- package/src/storage/PostgresSyncRulesStorage.ts +666 -666
- package/src/storage/batch/PostgresBucketBatch.ts +46 -24
- package/src/storage/batch/PostgresPersistedBatch.ts +39 -81
- package/src/storage/sync-rules/PostgresPersistedSyncRulesContent.ts +5 -2
- package/src/types/codecs.ts +3 -2
- package/tsconfig.json +0 -2
|
@@ -1,5 +1,14 @@
|
|
|
1
1
|
import * as lib_postgres from '@powersync/lib-service-postgres';
|
|
2
|
-
import {
|
|
2
|
+
import {
|
|
3
|
+
container,
|
|
4
|
+
DisposableObserver,
|
|
5
|
+
ErrorCode,
|
|
6
|
+
errors,
|
|
7
|
+
logger,
|
|
8
|
+
ReplicationAssertionError,
|
|
9
|
+
ServiceAssertionError,
|
|
10
|
+
ServiceError
|
|
11
|
+
} from '@powersync/lib-services-framework';
|
|
3
12
|
import { storage, utils } from '@powersync/service-core';
|
|
4
13
|
import * as sync_rules from '@powersync/service-sync-rules';
|
|
5
14
|
import * as timers from 'timers/promises';
|
|
@@ -80,7 +89,7 @@ export class PostgresBucketBatch
|
|
|
80
89
|
|
|
81
90
|
async save(record: storage.SaveOptions): Promise<storage.FlushedResult | null> {
|
|
82
91
|
// TODO maybe share with abstract class
|
|
83
|
-
const { after,
|
|
92
|
+
const { after, before, sourceTable, tag } = record;
|
|
84
93
|
for (const event of this.getTableEvents(sourceTable)) {
|
|
85
94
|
this.iterateListeners((cb) =>
|
|
86
95
|
cb.replicationEvent?.({
|
|
@@ -236,7 +245,10 @@ export class PostgresBucketBatch
|
|
|
236
245
|
|
|
237
246
|
private async flushInner(): Promise<storage.FlushedResult | null> {
|
|
238
247
|
const batch = this.batch;
|
|
239
|
-
|
|
248
|
+
// Don't flush empty batches
|
|
249
|
+
// This helps prevent feedback loops when using the same database for
|
|
250
|
+
// the source data and sync bucket storage
|
|
251
|
+
if (batch == null || batch.length == 0) {
|
|
240
252
|
return null;
|
|
241
253
|
}
|
|
242
254
|
|
|
@@ -258,7 +270,7 @@ export class PostgresBucketBatch
|
|
|
258
270
|
this.batch = resumeBatch;
|
|
259
271
|
|
|
260
272
|
if (lastOp == null) {
|
|
261
|
-
throw new
|
|
273
|
+
throw new ServiceAssertionError('Unexpected last_op == null');
|
|
262
274
|
}
|
|
263
275
|
|
|
264
276
|
this.persisted_op = lastOp;
|
|
@@ -266,7 +278,9 @@ export class PostgresBucketBatch
|
|
|
266
278
|
return { flushed_op: String(lastOp) };
|
|
267
279
|
}
|
|
268
280
|
|
|
269
|
-
async commit(lsn: string): Promise<boolean> {
|
|
281
|
+
async commit(lsn: string, options?: storage.BucketBatchCommitOptions): Promise<boolean> {
|
|
282
|
+
const { createEmptyCheckpoints } = { ...storage.DEFAULT_BUCKET_BATCH_COMMIT_OPTIONS, ...options };
|
|
283
|
+
|
|
270
284
|
await this.flush();
|
|
271
285
|
|
|
272
286
|
if (this.last_checkpoint_lsn != null && lsn < this.last_checkpoint_lsn) {
|
|
@@ -300,6 +314,12 @@ export class PostgresBucketBatch
|
|
|
300
314
|
|
|
301
315
|
return false;
|
|
302
316
|
}
|
|
317
|
+
|
|
318
|
+
// Don't create a checkpoint if there were no changes
|
|
319
|
+
if (!createEmptyCheckpoints && this.persisted_op == null) {
|
|
320
|
+
return false;
|
|
321
|
+
}
|
|
322
|
+
|
|
303
323
|
const now = new Date().toISOString();
|
|
304
324
|
const update: Partial<models.SyncRules> = {
|
|
305
325
|
last_checkpoint_lsn: lsn,
|
|
@@ -479,7 +499,7 @@ export class PostgresBucketBatch
|
|
|
479
499
|
jsonb_array_elements(${{ type: 'jsonb', value: sizeLookups }}::jsonb) AS FILTER
|
|
480
500
|
)
|
|
481
501
|
SELECT
|
|
482
|
-
|
|
502
|
+
octet_length(c.data) AS data_size,
|
|
483
503
|
c.source_table,
|
|
484
504
|
c.source_key
|
|
485
505
|
FROM
|
|
@@ -520,23 +540,20 @@ export class PostgresBucketBatch
|
|
|
520
540
|
const current_data_lookup = new Map<string, CurrentDataDecoded>();
|
|
521
541
|
for await (const currentDataRows of db.streamRows<CurrentData>({
|
|
522
542
|
statement: /* sql */ `
|
|
523
|
-
WITH
|
|
524
|
-
filter_data AS (
|
|
525
|
-
SELECT
|
|
526
|
-
decode(FILTER ->> 'source_key', 'hex') AS source_key, -- Decoding from hex to bytea
|
|
527
|
-
(FILTER ->> 'source_table') AS source_table_id
|
|
528
|
-
FROM
|
|
529
|
-
jsonb_array_elements($1::jsonb) AS FILTER
|
|
530
|
-
)
|
|
531
543
|
SELECT
|
|
532
|
-
--- With skipExistingRows, we only need to know whether or not the row exists.
|
|
533
544
|
${this.options.skip_existing_rows ? `c.source_table, c.source_key` : 'c.*'}
|
|
534
545
|
FROM
|
|
535
546
|
current_data c
|
|
536
|
-
JOIN
|
|
547
|
+
JOIN (
|
|
548
|
+
SELECT
|
|
549
|
+
decode(FILTER ->> 'source_key', 'hex') AS source_key,
|
|
550
|
+
FILTER ->> 'source_table' AS source_table_id
|
|
551
|
+
FROM
|
|
552
|
+
jsonb_array_elements($1::jsonb) AS FILTER
|
|
553
|
+
) f ON c.source_table = f.source_table_id
|
|
537
554
|
AND c.source_key = f.source_key
|
|
538
555
|
WHERE
|
|
539
|
-
c.group_id = $2
|
|
556
|
+
c.group_id = $2;
|
|
540
557
|
`,
|
|
541
558
|
params: [
|
|
542
559
|
{
|
|
@@ -544,7 +561,7 @@ export class PostgresBucketBatch
|
|
|
544
561
|
value: lookups
|
|
545
562
|
},
|
|
546
563
|
{
|
|
547
|
-
type: '
|
|
564
|
+
type: 'int4',
|
|
548
565
|
value: this.group_id
|
|
549
566
|
}
|
|
550
567
|
]
|
|
@@ -601,7 +618,12 @@ export class PostgresBucketBatch
|
|
|
601
618
|
await persistedBatch.flush(db);
|
|
602
619
|
}
|
|
603
620
|
}
|
|
604
|
-
|
|
621
|
+
|
|
622
|
+
// Don't return empty batches
|
|
623
|
+
if (resumeBatch?.batch.length) {
|
|
624
|
+
return resumeBatch;
|
|
625
|
+
}
|
|
626
|
+
return null;
|
|
605
627
|
}
|
|
606
628
|
|
|
607
629
|
protected async saveOperation(
|
|
@@ -618,8 +640,8 @@ export class PostgresBucketBatch
|
|
|
618
640
|
|
|
619
641
|
let existingBuckets: CurrentBucket[] = [];
|
|
620
642
|
let newBuckets: CurrentBucket[] = [];
|
|
621
|
-
let existingLookups: Buffer[] = [];
|
|
622
|
-
let newLookups: Buffer[] = [];
|
|
643
|
+
let existingLookups: Buffer<ArrayBuffer>[] = [];
|
|
644
|
+
let newLookups: Buffer<ArrayBuffer>[] = [];
|
|
623
645
|
|
|
624
646
|
if (this.options.skip_existing_rows) {
|
|
625
647
|
if (record.tag == storage.SaveOperationTag.INSERT) {
|
|
@@ -631,7 +653,7 @@ export class PostgresBucketBatch
|
|
|
631
653
|
return null;
|
|
632
654
|
}
|
|
633
655
|
} else {
|
|
634
|
-
throw new
|
|
656
|
+
throw new ReplicationAssertionError(`${record.tag} not supported with skipExistingRows: true`);
|
|
635
657
|
}
|
|
636
658
|
}
|
|
637
659
|
|
|
@@ -673,14 +695,14 @@ export class PostgresBucketBatch
|
|
|
673
695
|
}
|
|
674
696
|
}
|
|
675
697
|
|
|
676
|
-
let afterData: Buffer | undefined;
|
|
698
|
+
let afterData: Buffer<ArrayBuffer> | undefined;
|
|
677
699
|
if (afterId != null && !this.options.store_current_data) {
|
|
678
700
|
afterData = storage.serializeBson({});
|
|
679
701
|
} else if (afterId != null) {
|
|
680
702
|
try {
|
|
681
703
|
afterData = storage.serializeBson(after);
|
|
682
704
|
if (afterData!.byteLength > MAX_ROW_SIZE) {
|
|
683
|
-
throw new
|
|
705
|
+
throw new ServiceError(ErrorCode.PSYNC_S1002, `Row too large: ${afterData?.byteLength}`);
|
|
684
706
|
}
|
|
685
707
|
} catch (e) {
|
|
686
708
|
// Replace with empty values, equivalent to TOAST values
|
|
@@ -256,33 +256,6 @@ export class PostgresPersistedBatch {
|
|
|
256
256
|
protected async flushBucketData(db: lib_postgres.WrappedConnection) {
|
|
257
257
|
if (this.bucketDataInserts.length > 0) {
|
|
258
258
|
await db.sql`
|
|
259
|
-
WITH
|
|
260
|
-
parsed_data AS (
|
|
261
|
-
SELECT
|
|
262
|
-
group_id,
|
|
263
|
-
bucket_name,
|
|
264
|
-
source_table,
|
|
265
|
-
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
266
|
-
table_name,
|
|
267
|
-
op,
|
|
268
|
-
row_id,
|
|
269
|
-
checksum,
|
|
270
|
-
data,
|
|
271
|
-
target_op
|
|
272
|
-
FROM
|
|
273
|
-
jsonb_to_recordset(${{ type: 'jsonb', value: this.bucketDataInserts }}::jsonb) AS t (
|
|
274
|
-
group_id integer,
|
|
275
|
-
bucket_name text,
|
|
276
|
-
source_table text,
|
|
277
|
-
source_key text, -- Input as hex string
|
|
278
|
-
table_name text,
|
|
279
|
-
op text,
|
|
280
|
-
row_id text,
|
|
281
|
-
checksum bigint,
|
|
282
|
-
data text,
|
|
283
|
-
target_op bigint
|
|
284
|
-
)
|
|
285
|
-
)
|
|
286
259
|
INSERT INTO
|
|
287
260
|
bucket_data (
|
|
288
261
|
group_id,
|
|
@@ -303,14 +276,25 @@ export class PostgresPersistedBatch {
|
|
|
303
276
|
nextval('op_id_sequence'),
|
|
304
277
|
op,
|
|
305
278
|
source_table,
|
|
306
|
-
source_key,
|
|
279
|
+
decode(source_key, 'hex') AS source_key,
|
|
307
280
|
table_name,
|
|
308
281
|
row_id,
|
|
309
282
|
checksum,
|
|
310
283
|
data,
|
|
311
284
|
target_op
|
|
312
285
|
FROM
|
|
313
|
-
|
|
286
|
+
json_to_recordset(${{ type: 'json', value: this.bucketDataInserts }}::json) AS t (
|
|
287
|
+
group_id integer,
|
|
288
|
+
bucket_name text,
|
|
289
|
+
source_table text,
|
|
290
|
+
source_key text, -- Input as hex string
|
|
291
|
+
table_name text,
|
|
292
|
+
op text,
|
|
293
|
+
row_id text,
|
|
294
|
+
checksum bigint,
|
|
295
|
+
data text,
|
|
296
|
+
target_op bigint
|
|
297
|
+
);
|
|
314
298
|
`.execute();
|
|
315
299
|
}
|
|
316
300
|
}
|
|
@@ -318,23 +302,6 @@ export class PostgresPersistedBatch {
|
|
|
318
302
|
protected async flushParameterData(db: lib_postgres.WrappedConnection) {
|
|
319
303
|
if (this.parameterDataInserts.length > 0) {
|
|
320
304
|
await db.sql`
|
|
321
|
-
WITH
|
|
322
|
-
parsed_data AS (
|
|
323
|
-
SELECT
|
|
324
|
-
group_id,
|
|
325
|
-
source_table,
|
|
326
|
-
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
327
|
-
decode(lookup, 'hex') AS lookup, -- Decode hex to bytea
|
|
328
|
-
bucket_parameters
|
|
329
|
-
FROM
|
|
330
|
-
jsonb_to_recordset(${{ type: 'jsonb', value: this.parameterDataInserts }}::jsonb) AS t (
|
|
331
|
-
group_id integer,
|
|
332
|
-
source_table text,
|
|
333
|
-
source_key text, -- Input as hex string
|
|
334
|
-
lookup text, -- Input as hex string
|
|
335
|
-
bucket_parameters text -- Input as stringified JSON
|
|
336
|
-
)
|
|
337
|
-
)
|
|
338
305
|
INSERT INTO
|
|
339
306
|
bucket_parameters (
|
|
340
307
|
group_id,
|
|
@@ -346,11 +313,17 @@ export class PostgresPersistedBatch {
|
|
|
346
313
|
SELECT
|
|
347
314
|
group_id,
|
|
348
315
|
source_table,
|
|
349
|
-
source_key, --
|
|
350
|
-
lookup, --
|
|
316
|
+
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
317
|
+
decode(lookup, 'hex') AS lookup, -- Decode hex to bytea
|
|
351
318
|
bucket_parameters
|
|
352
319
|
FROM
|
|
353
|
-
|
|
320
|
+
json_to_recordset(${{ type: 'json', value: this.parameterDataInserts }}::json) AS t (
|
|
321
|
+
group_id integer,
|
|
322
|
+
source_table text,
|
|
323
|
+
source_key text, -- Input as hex string
|
|
324
|
+
lookup text, -- Input as hex string
|
|
325
|
+
bucket_parameters text -- Input as stringified JSON
|
|
326
|
+
)
|
|
354
327
|
`.execute();
|
|
355
328
|
}
|
|
356
329
|
}
|
|
@@ -358,33 +331,6 @@ export class PostgresPersistedBatch {
|
|
|
358
331
|
protected async flushCurrentData(db: lib_postgres.WrappedConnection) {
|
|
359
332
|
if (this.currentDataInserts.size > 0) {
|
|
360
333
|
await db.sql`
|
|
361
|
-
WITH
|
|
362
|
-
parsed_data AS (
|
|
363
|
-
SELECT
|
|
364
|
-
group_id,
|
|
365
|
-
source_table,
|
|
366
|
-
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
367
|
-
buckets::jsonb AS buckets,
|
|
368
|
-
decode(data, 'hex') AS data, -- Decode hex to bytea
|
|
369
|
-
ARRAY(
|
|
370
|
-
SELECT
|
|
371
|
-
decode((value ->> 0)::TEXT, 'hex')
|
|
372
|
-
FROM
|
|
373
|
-
jsonb_array_elements(lookups::jsonb) AS value
|
|
374
|
-
) AS lookups -- Decode array of hex strings to bytea[]
|
|
375
|
-
FROM
|
|
376
|
-
jsonb_to_recordset(${{
|
|
377
|
-
type: 'jsonb',
|
|
378
|
-
value: Array.from(this.currentDataInserts.values())
|
|
379
|
-
}}::jsonb) AS t (
|
|
380
|
-
group_id integer,
|
|
381
|
-
source_table text,
|
|
382
|
-
source_key text, -- Input as hex string
|
|
383
|
-
buckets text,
|
|
384
|
-
data text, -- Input as hex string
|
|
385
|
-
lookups text -- Input as stringified JSONB array of hex strings
|
|
386
|
-
)
|
|
387
|
-
)
|
|
388
334
|
INSERT INTO
|
|
389
335
|
current_data (
|
|
390
336
|
group_id,
|
|
@@ -397,12 +343,24 @@ export class PostgresPersistedBatch {
|
|
|
397
343
|
SELECT
|
|
398
344
|
group_id,
|
|
399
345
|
source_table,
|
|
400
|
-
source_key, --
|
|
401
|
-
buckets,
|
|
402
|
-
data, --
|
|
403
|
-
|
|
346
|
+
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
347
|
+
buckets::jsonb AS buckets,
|
|
348
|
+
decode(data, 'hex') AS data, -- Decode hex to bytea
|
|
349
|
+
array(
|
|
350
|
+
SELECT
|
|
351
|
+
decode(element, 'hex')
|
|
352
|
+
FROM
|
|
353
|
+
unnest(lookups) AS element
|
|
354
|
+
) AS lookups
|
|
404
355
|
FROM
|
|
405
|
-
|
|
356
|
+
json_to_recordset(${{ type: 'json', value: Array.from(this.currentDataInserts.values()) }}::json) AS t (
|
|
357
|
+
group_id integer,
|
|
358
|
+
source_table text,
|
|
359
|
+
source_key text, -- Input as hex string
|
|
360
|
+
buckets text,
|
|
361
|
+
data text, -- Input as hex string
|
|
362
|
+
lookups TEXT[] -- Input as stringified JSONB array of hex strings
|
|
363
|
+
)
|
|
406
364
|
ON CONFLICT (group_id, source_table, source_key) DO UPDATE
|
|
407
365
|
SET
|
|
408
366
|
buckets = EXCLUDED.buckets,
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import * as lib_postgres from '@powersync/lib-service-postgres';
|
|
2
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import { ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
|
|
3
3
|
import { storage } from '@powersync/service-core';
|
|
4
4
|
import { SqlSyncRules } from '@powersync/service-sync-rules';
|
|
5
5
|
|
|
@@ -44,7 +44,10 @@ export class PostgresPersistedSyncRulesContent implements storage.PersistedSyncR
|
|
|
44
44
|
});
|
|
45
45
|
const lockHandle = await manager.acquire();
|
|
46
46
|
if (!lockHandle) {
|
|
47
|
-
throw new
|
|
47
|
+
throw new ServiceError(
|
|
48
|
+
ErrorCode.PSYNC_S1003,
|
|
49
|
+
`Sync rules: ${this.id} have been locked by another process for replication.`
|
|
50
|
+
);
|
|
48
51
|
}
|
|
49
52
|
|
|
50
53
|
const interval = setInterval(async () => {
|
package/src/types/codecs.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { ReplicationAssertionError } from '@powersync/lib-services-framework';
|
|
1
2
|
import * as t from 'ts-codec';
|
|
2
3
|
|
|
3
4
|
export const BIGINT_MAX = BigInt('9223372036854775807');
|
|
@@ -98,7 +99,7 @@ export const hexBuffer = t.codec(
|
|
|
98
99
|
return Buffer.from(encoded);
|
|
99
100
|
}
|
|
100
101
|
if (typeof encoded !== 'string') {
|
|
101
|
-
throw new
|
|
102
|
+
throw new ReplicationAssertionError(`Expected either a Buffer instance or hex encoded buffer string`);
|
|
102
103
|
}
|
|
103
104
|
return Buffer.from(encoded, 'hex');
|
|
104
105
|
}
|
|
@@ -116,7 +117,7 @@ export const pgwire_number = t.codec(
|
|
|
116
117
|
return encoded;
|
|
117
118
|
}
|
|
118
119
|
if (typeof encoded !== 'bigint') {
|
|
119
|
-
throw new
|
|
120
|
+
throw new ReplicationAssertionError(`Expected either number or bigint for value`);
|
|
120
121
|
}
|
|
121
122
|
if (encoded > BigInt(Number.MAX_SAFE_INTEGER) || encoded < BigInt(Number.MIN_SAFE_INTEGER)) {
|
|
122
123
|
throw new RangeError('BigInt value is out of safe integer range for conversion to Number.');
|