@powersync/service-module-postgres-storage 0.1.2 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -0
- package/README.md +7 -1
- package/dist/.tsbuildinfo +1 -1
- package/dist/@types/storage/PostgresBucketStorageFactory.d.ts +1 -0
- package/dist/@types/storage/batch/PostgresBucketBatch.d.ts +1 -1
- package/dist/storage/PostgresBucketStorageFactory.js +7 -0
- package/dist/storage/PostgresBucketStorageFactory.js.map +1 -1
- package/dist/storage/batch/PostgresBucketBatch.js +26 -17
- package/dist/storage/batch/PostgresBucketBatch.js.map +1 -1
- package/dist/storage/batch/PostgresPersistedBatch.js +39 -81
- package/dist/storage/batch/PostgresPersistedBatch.js.map +1 -1
- package/package.json +6 -6
- package/src/storage/PostgresBucketStorageFactory.ts +11 -0
- package/src/storage/batch/PostgresBucketBatch.ts +30 -17
- package/src/storage/batch/PostgresPersistedBatch.ts +39 -81
|
@@ -89,7 +89,7 @@ export class PostgresBucketBatch
|
|
|
89
89
|
|
|
90
90
|
async save(record: storage.SaveOptions): Promise<storage.FlushedResult | null> {
|
|
91
91
|
// TODO maybe share with abstract class
|
|
92
|
-
const { after,
|
|
92
|
+
const { after, before, sourceTable, tag } = record;
|
|
93
93
|
for (const event of this.getTableEvents(sourceTable)) {
|
|
94
94
|
this.iterateListeners((cb) =>
|
|
95
95
|
cb.replicationEvent?.({
|
|
@@ -245,7 +245,10 @@ export class PostgresBucketBatch
|
|
|
245
245
|
|
|
246
246
|
private async flushInner(): Promise<storage.FlushedResult | null> {
|
|
247
247
|
const batch = this.batch;
|
|
248
|
-
|
|
248
|
+
// Don't flush empty batches
|
|
249
|
+
// This helps prevent feedback loops when using the same database for
|
|
250
|
+
// the source data and sync bucket storage
|
|
251
|
+
if (batch == null || batch.length == 0) {
|
|
249
252
|
return null;
|
|
250
253
|
}
|
|
251
254
|
|
|
@@ -275,7 +278,9 @@ export class PostgresBucketBatch
|
|
|
275
278
|
return { flushed_op: String(lastOp) };
|
|
276
279
|
}
|
|
277
280
|
|
|
278
|
-
async commit(lsn: string): Promise<boolean> {
|
|
281
|
+
async commit(lsn: string, options?: storage.BucketBatchCommitOptions): Promise<boolean> {
|
|
282
|
+
const { createEmptyCheckpoints } = { ...storage.DEFAULT_BUCKET_BATCH_COMMIT_OPTIONS, ...options };
|
|
283
|
+
|
|
279
284
|
await this.flush();
|
|
280
285
|
|
|
281
286
|
if (this.last_checkpoint_lsn != null && lsn < this.last_checkpoint_lsn) {
|
|
@@ -309,6 +314,12 @@ export class PostgresBucketBatch
|
|
|
309
314
|
|
|
310
315
|
return false;
|
|
311
316
|
}
|
|
317
|
+
|
|
318
|
+
// Don't create a checkpoint if there were no changes
|
|
319
|
+
if (!createEmptyCheckpoints && this.persisted_op == null) {
|
|
320
|
+
return false;
|
|
321
|
+
}
|
|
322
|
+
|
|
312
323
|
const now = new Date().toISOString();
|
|
313
324
|
const update: Partial<models.SyncRules> = {
|
|
314
325
|
last_checkpoint_lsn: lsn,
|
|
@@ -488,7 +499,7 @@ export class PostgresBucketBatch
|
|
|
488
499
|
jsonb_array_elements(${{ type: 'jsonb', value: sizeLookups }}::jsonb) AS FILTER
|
|
489
500
|
)
|
|
490
501
|
SELECT
|
|
491
|
-
|
|
502
|
+
octet_length(c.data) AS data_size,
|
|
492
503
|
c.source_table,
|
|
493
504
|
c.source_key
|
|
494
505
|
FROM
|
|
@@ -529,23 +540,20 @@ export class PostgresBucketBatch
|
|
|
529
540
|
const current_data_lookup = new Map<string, CurrentDataDecoded>();
|
|
530
541
|
for await (const currentDataRows of db.streamRows<CurrentData>({
|
|
531
542
|
statement: /* sql */ `
|
|
532
|
-
WITH
|
|
533
|
-
filter_data AS (
|
|
534
|
-
SELECT
|
|
535
|
-
decode(FILTER ->> 'source_key', 'hex') AS source_key, -- Decoding from hex to bytea
|
|
536
|
-
(FILTER ->> 'source_table') AS source_table_id
|
|
537
|
-
FROM
|
|
538
|
-
jsonb_array_elements($1::jsonb) AS FILTER
|
|
539
|
-
)
|
|
540
543
|
SELECT
|
|
541
|
-
--- With skipExistingRows, we only need to know whether or not the row exists.
|
|
542
544
|
${this.options.skip_existing_rows ? `c.source_table, c.source_key` : 'c.*'}
|
|
543
545
|
FROM
|
|
544
546
|
current_data c
|
|
545
|
-
JOIN
|
|
547
|
+
JOIN (
|
|
548
|
+
SELECT
|
|
549
|
+
decode(FILTER ->> 'source_key', 'hex') AS source_key,
|
|
550
|
+
FILTER ->> 'source_table' AS source_table_id
|
|
551
|
+
FROM
|
|
552
|
+
jsonb_array_elements($1::jsonb) AS FILTER
|
|
553
|
+
) f ON c.source_table = f.source_table_id
|
|
546
554
|
AND c.source_key = f.source_key
|
|
547
555
|
WHERE
|
|
548
|
-
c.group_id = $2
|
|
556
|
+
c.group_id = $2;
|
|
549
557
|
`,
|
|
550
558
|
params: [
|
|
551
559
|
{
|
|
@@ -553,7 +561,7 @@ export class PostgresBucketBatch
|
|
|
553
561
|
value: lookups
|
|
554
562
|
},
|
|
555
563
|
{
|
|
556
|
-
type: '
|
|
564
|
+
type: 'int4',
|
|
557
565
|
value: this.group_id
|
|
558
566
|
}
|
|
559
567
|
]
|
|
@@ -610,7 +618,12 @@ export class PostgresBucketBatch
|
|
|
610
618
|
await persistedBatch.flush(db);
|
|
611
619
|
}
|
|
612
620
|
}
|
|
613
|
-
|
|
621
|
+
|
|
622
|
+
// Don't return empty batches
|
|
623
|
+
if (resumeBatch?.batch.length) {
|
|
624
|
+
return resumeBatch;
|
|
625
|
+
}
|
|
626
|
+
return null;
|
|
614
627
|
}
|
|
615
628
|
|
|
616
629
|
protected async saveOperation(
|
|
@@ -256,33 +256,6 @@ export class PostgresPersistedBatch {
|
|
|
256
256
|
protected async flushBucketData(db: lib_postgres.WrappedConnection) {
|
|
257
257
|
if (this.bucketDataInserts.length > 0) {
|
|
258
258
|
await db.sql`
|
|
259
|
-
WITH
|
|
260
|
-
parsed_data AS (
|
|
261
|
-
SELECT
|
|
262
|
-
group_id,
|
|
263
|
-
bucket_name,
|
|
264
|
-
source_table,
|
|
265
|
-
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
266
|
-
table_name,
|
|
267
|
-
op,
|
|
268
|
-
row_id,
|
|
269
|
-
checksum,
|
|
270
|
-
data,
|
|
271
|
-
target_op
|
|
272
|
-
FROM
|
|
273
|
-
jsonb_to_recordset(${{ type: 'jsonb', value: this.bucketDataInserts }}::jsonb) AS t (
|
|
274
|
-
group_id integer,
|
|
275
|
-
bucket_name text,
|
|
276
|
-
source_table text,
|
|
277
|
-
source_key text, -- Input as hex string
|
|
278
|
-
table_name text,
|
|
279
|
-
op text,
|
|
280
|
-
row_id text,
|
|
281
|
-
checksum bigint,
|
|
282
|
-
data text,
|
|
283
|
-
target_op bigint
|
|
284
|
-
)
|
|
285
|
-
)
|
|
286
259
|
INSERT INTO
|
|
287
260
|
bucket_data (
|
|
288
261
|
group_id,
|
|
@@ -303,14 +276,25 @@ export class PostgresPersistedBatch {
|
|
|
303
276
|
nextval('op_id_sequence'),
|
|
304
277
|
op,
|
|
305
278
|
source_table,
|
|
306
|
-
source_key,
|
|
279
|
+
decode(source_key, 'hex') AS source_key,
|
|
307
280
|
table_name,
|
|
308
281
|
row_id,
|
|
309
282
|
checksum,
|
|
310
283
|
data,
|
|
311
284
|
target_op
|
|
312
285
|
FROM
|
|
313
|
-
|
|
286
|
+
json_to_recordset(${{ type: 'json', value: this.bucketDataInserts }}::json) AS t (
|
|
287
|
+
group_id integer,
|
|
288
|
+
bucket_name text,
|
|
289
|
+
source_table text,
|
|
290
|
+
source_key text, -- Input as hex string
|
|
291
|
+
table_name text,
|
|
292
|
+
op text,
|
|
293
|
+
row_id text,
|
|
294
|
+
checksum bigint,
|
|
295
|
+
data text,
|
|
296
|
+
target_op bigint
|
|
297
|
+
);
|
|
314
298
|
`.execute();
|
|
315
299
|
}
|
|
316
300
|
}
|
|
@@ -318,23 +302,6 @@ export class PostgresPersistedBatch {
|
|
|
318
302
|
protected async flushParameterData(db: lib_postgres.WrappedConnection) {
|
|
319
303
|
if (this.parameterDataInserts.length > 0) {
|
|
320
304
|
await db.sql`
|
|
321
|
-
WITH
|
|
322
|
-
parsed_data AS (
|
|
323
|
-
SELECT
|
|
324
|
-
group_id,
|
|
325
|
-
source_table,
|
|
326
|
-
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
327
|
-
decode(lookup, 'hex') AS lookup, -- Decode hex to bytea
|
|
328
|
-
bucket_parameters
|
|
329
|
-
FROM
|
|
330
|
-
jsonb_to_recordset(${{ type: 'jsonb', value: this.parameterDataInserts }}::jsonb) AS t (
|
|
331
|
-
group_id integer,
|
|
332
|
-
source_table text,
|
|
333
|
-
source_key text, -- Input as hex string
|
|
334
|
-
lookup text, -- Input as hex string
|
|
335
|
-
bucket_parameters text -- Input as stringified JSON
|
|
336
|
-
)
|
|
337
|
-
)
|
|
338
305
|
INSERT INTO
|
|
339
306
|
bucket_parameters (
|
|
340
307
|
group_id,
|
|
@@ -346,11 +313,17 @@ export class PostgresPersistedBatch {
|
|
|
346
313
|
SELECT
|
|
347
314
|
group_id,
|
|
348
315
|
source_table,
|
|
349
|
-
source_key, --
|
|
350
|
-
lookup, --
|
|
316
|
+
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
317
|
+
decode(lookup, 'hex') AS lookup, -- Decode hex to bytea
|
|
351
318
|
bucket_parameters
|
|
352
319
|
FROM
|
|
353
|
-
|
|
320
|
+
json_to_recordset(${{ type: 'json', value: this.parameterDataInserts }}::json) AS t (
|
|
321
|
+
group_id integer,
|
|
322
|
+
source_table text,
|
|
323
|
+
source_key text, -- Input as hex string
|
|
324
|
+
lookup text, -- Input as hex string
|
|
325
|
+
bucket_parameters text -- Input as stringified JSON
|
|
326
|
+
)
|
|
354
327
|
`.execute();
|
|
355
328
|
}
|
|
356
329
|
}
|
|
@@ -358,33 +331,6 @@ export class PostgresPersistedBatch {
|
|
|
358
331
|
protected async flushCurrentData(db: lib_postgres.WrappedConnection) {
|
|
359
332
|
if (this.currentDataInserts.size > 0) {
|
|
360
333
|
await db.sql`
|
|
361
|
-
WITH
|
|
362
|
-
parsed_data AS (
|
|
363
|
-
SELECT
|
|
364
|
-
group_id,
|
|
365
|
-
source_table,
|
|
366
|
-
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
367
|
-
buckets::jsonb AS buckets,
|
|
368
|
-
decode(data, 'hex') AS data, -- Decode hex to bytea
|
|
369
|
-
ARRAY(
|
|
370
|
-
SELECT
|
|
371
|
-
decode((value ->> 0)::TEXT, 'hex')
|
|
372
|
-
FROM
|
|
373
|
-
jsonb_array_elements(lookups::jsonb) AS value
|
|
374
|
-
) AS lookups -- Decode array of hex strings to bytea[]
|
|
375
|
-
FROM
|
|
376
|
-
jsonb_to_recordset(${{
|
|
377
|
-
type: 'jsonb',
|
|
378
|
-
value: Array.from(this.currentDataInserts.values())
|
|
379
|
-
}}::jsonb) AS t (
|
|
380
|
-
group_id integer,
|
|
381
|
-
source_table text,
|
|
382
|
-
source_key text, -- Input as hex string
|
|
383
|
-
buckets text,
|
|
384
|
-
data text, -- Input as hex string
|
|
385
|
-
lookups text -- Input as stringified JSONB array of hex strings
|
|
386
|
-
)
|
|
387
|
-
)
|
|
388
334
|
INSERT INTO
|
|
389
335
|
current_data (
|
|
390
336
|
group_id,
|
|
@@ -397,12 +343,24 @@ export class PostgresPersistedBatch {
|
|
|
397
343
|
SELECT
|
|
398
344
|
group_id,
|
|
399
345
|
source_table,
|
|
400
|
-
source_key, --
|
|
401
|
-
buckets,
|
|
402
|
-
data, --
|
|
403
|
-
|
|
346
|
+
decode(source_key, 'hex') AS source_key, -- Decode hex to bytea
|
|
347
|
+
buckets::jsonb AS buckets,
|
|
348
|
+
decode(data, 'hex') AS data, -- Decode hex to bytea
|
|
349
|
+
array(
|
|
350
|
+
SELECT
|
|
351
|
+
decode(element, 'hex')
|
|
352
|
+
FROM
|
|
353
|
+
unnest(lookups) AS element
|
|
354
|
+
) AS lookups
|
|
404
355
|
FROM
|
|
405
|
-
|
|
356
|
+
json_to_recordset(${{ type: 'json', value: Array.from(this.currentDataInserts.values()) }}::json) AS t (
|
|
357
|
+
group_id integer,
|
|
358
|
+
source_table text,
|
|
359
|
+
source_key text, -- Input as hex string
|
|
360
|
+
buckets text,
|
|
361
|
+
data text, -- Input as hex string
|
|
362
|
+
lookups TEXT[] -- Input as stringified JSONB array of hex strings
|
|
363
|
+
)
|
|
406
364
|
ON CONFLICT (group_id, source_table, source_key) DO UPDATE
|
|
407
365
|
SET
|
|
408
366
|
buckets = EXCLUDED.buckets,
|