@powersync/service-module-postgres-storage 0.0.0-dev-20250116115804
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +32 -0
- package/LICENSE +67 -0
- package/README.md +67 -0
- package/dist/.tsbuildinfo +1 -0
- package/dist/@types/index.d.ts +7 -0
- package/dist/@types/migrations/PostgresMigrationAgent.d.ts +12 -0
- package/dist/@types/migrations/PostgresMigrationStore.d.ts +14 -0
- package/dist/@types/migrations/migration-utils.d.ts +3 -0
- package/dist/@types/migrations/scripts/1684951997326-init.d.ts +3 -0
- package/dist/@types/module/PostgresStorageModule.d.ts +6 -0
- package/dist/@types/storage/PostgresBucketStorageFactory.d.ts +42 -0
- package/dist/@types/storage/PostgresCompactor.d.ts +40 -0
- package/dist/@types/storage/PostgresStorageProvider.d.ts +5 -0
- package/dist/@types/storage/PostgresSyncRulesStorage.d.ts +46 -0
- package/dist/@types/storage/PostgresTestStorageFactoryGenerator.d.ts +13 -0
- package/dist/@types/storage/batch/OperationBatch.d.ts +47 -0
- package/dist/@types/storage/batch/PostgresBucketBatch.d.ts +90 -0
- package/dist/@types/storage/batch/PostgresPersistedBatch.d.ts +64 -0
- package/dist/@types/storage/checkpoints/PostgresWriteCheckpointAPI.d.ts +20 -0
- package/dist/@types/storage/storage-index.d.ts +5 -0
- package/dist/@types/storage/sync-rules/PostgresPersistedSyncRulesContent.d.ts +17 -0
- package/dist/@types/types/codecs.d.ts +61 -0
- package/dist/@types/types/models/ActiveCheckpoint.d.ts +12 -0
- package/dist/@types/types/models/ActiveCheckpointNotification.d.ts +19 -0
- package/dist/@types/types/models/BucketData.d.ts +22 -0
- package/dist/@types/types/models/BucketParameters.d.ts +11 -0
- package/dist/@types/types/models/CurrentData.d.ts +22 -0
- package/dist/@types/types/models/Instance.d.ts +6 -0
- package/dist/@types/types/models/Migration.d.ts +12 -0
- package/dist/@types/types/models/SourceTable.d.ts +31 -0
- package/dist/@types/types/models/SyncRules.d.ts +47 -0
- package/dist/@types/types/models/WriteCheckpoint.d.ts +15 -0
- package/dist/@types/types/models/models-index.d.ts +10 -0
- package/dist/@types/types/types.d.ts +96 -0
- package/dist/@types/utils/bson.d.ts +6 -0
- package/dist/@types/utils/bucket-data.d.ts +18 -0
- package/dist/@types/utils/db.d.ts +8 -0
- package/dist/@types/utils/ts-codec.d.ts +5 -0
- package/dist/@types/utils/utils-index.d.ts +4 -0
- package/dist/index.js +8 -0
- package/dist/index.js.map +1 -0
- package/dist/migrations/PostgresMigrationAgent.js +36 -0
- package/dist/migrations/PostgresMigrationAgent.js.map +1 -0
- package/dist/migrations/PostgresMigrationStore.js +60 -0
- package/dist/migrations/PostgresMigrationStore.js.map +1 -0
- package/dist/migrations/migration-utils.js +13 -0
- package/dist/migrations/migration-utils.js.map +1 -0
- package/dist/migrations/scripts/1684951997326-init.js +196 -0
- package/dist/migrations/scripts/1684951997326-init.js.map +1 -0
- package/dist/module/PostgresStorageModule.js +23 -0
- package/dist/module/PostgresStorageModule.js.map +1 -0
- package/dist/storage/PostgresBucketStorageFactory.js +433 -0
- package/dist/storage/PostgresBucketStorageFactory.js.map +1 -0
- package/dist/storage/PostgresCompactor.js +298 -0
- package/dist/storage/PostgresCompactor.js.map +1 -0
- package/dist/storage/PostgresStorageProvider.js +35 -0
- package/dist/storage/PostgresStorageProvider.js.map +1 -0
- package/dist/storage/PostgresSyncRulesStorage.js +619 -0
- package/dist/storage/PostgresSyncRulesStorage.js.map +1 -0
- package/dist/storage/PostgresTestStorageFactoryGenerator.js +110 -0
- package/dist/storage/PostgresTestStorageFactoryGenerator.js.map +1 -0
- package/dist/storage/batch/OperationBatch.js +93 -0
- package/dist/storage/batch/OperationBatch.js.map +1 -0
- package/dist/storage/batch/PostgresBucketBatch.js +732 -0
- package/dist/storage/batch/PostgresBucketBatch.js.map +1 -0
- package/dist/storage/batch/PostgresPersistedBatch.js +367 -0
- package/dist/storage/batch/PostgresPersistedBatch.js.map +1 -0
- package/dist/storage/checkpoints/PostgresWriteCheckpointAPI.js +148 -0
- package/dist/storage/checkpoints/PostgresWriteCheckpointAPI.js.map +1 -0
- package/dist/storage/storage-index.js +6 -0
- package/dist/storage/storage-index.js.map +1 -0
- package/dist/storage/sync-rules/PostgresPersistedSyncRulesContent.js +58 -0
- package/dist/storage/sync-rules/PostgresPersistedSyncRulesContent.js.map +1 -0
- package/dist/types/codecs.js +97 -0
- package/dist/types/codecs.js.map +1 -0
- package/dist/types/models/ActiveCheckpoint.js +12 -0
- package/dist/types/models/ActiveCheckpoint.js.map +1 -0
- package/dist/types/models/ActiveCheckpointNotification.js +8 -0
- package/dist/types/models/ActiveCheckpointNotification.js.map +1 -0
- package/dist/types/models/BucketData.js +23 -0
- package/dist/types/models/BucketData.js.map +1 -0
- package/dist/types/models/BucketParameters.js +11 -0
- package/dist/types/models/BucketParameters.js.map +1 -0
- package/dist/types/models/CurrentData.js +16 -0
- package/dist/types/models/CurrentData.js.map +1 -0
- package/dist/types/models/Instance.js +5 -0
- package/dist/types/models/Instance.js.map +1 -0
- package/dist/types/models/Migration.js +12 -0
- package/dist/types/models/Migration.js.map +1 -0
- package/dist/types/models/SourceTable.js +24 -0
- package/dist/types/models/SourceTable.js.map +1 -0
- package/dist/types/models/SyncRules.js +47 -0
- package/dist/types/models/SyncRules.js.map +1 -0
- package/dist/types/models/WriteCheckpoint.js +13 -0
- package/dist/types/models/WriteCheckpoint.js.map +1 -0
- package/dist/types/models/models-index.js +11 -0
- package/dist/types/models/models-index.js.map +1 -0
- package/dist/types/types.js +46 -0
- package/dist/types/types.js.map +1 -0
- package/dist/utils/bson.js +16 -0
- package/dist/utils/bson.js.map +1 -0
- package/dist/utils/bucket-data.js +25 -0
- package/dist/utils/bucket-data.js.map +1 -0
- package/dist/utils/db.js +24 -0
- package/dist/utils/db.js.map +1 -0
- package/dist/utils/ts-codec.js +11 -0
- package/dist/utils/ts-codec.js.map +1 -0
- package/dist/utils/utils-index.js +5 -0
- package/dist/utils/utils-index.js.map +1 -0
- package/package.json +50 -0
- package/src/index.ts +10 -0
- package/src/migrations/PostgresMigrationAgent.ts +46 -0
- package/src/migrations/PostgresMigrationStore.ts +70 -0
- package/src/migrations/migration-utils.ts +14 -0
- package/src/migrations/scripts/1684951997326-init.ts +141 -0
- package/src/module/PostgresStorageModule.ts +30 -0
- package/src/storage/PostgresBucketStorageFactory.ts +496 -0
- package/src/storage/PostgresCompactor.ts +366 -0
- package/src/storage/PostgresStorageProvider.ts +42 -0
- package/src/storage/PostgresSyncRulesStorage.ts +666 -0
- package/src/storage/PostgresTestStorageFactoryGenerator.ts +61 -0
- package/src/storage/batch/OperationBatch.ts +101 -0
- package/src/storage/batch/PostgresBucketBatch.ts +885 -0
- package/src/storage/batch/PostgresPersistedBatch.ts +441 -0
- package/src/storage/checkpoints/PostgresWriteCheckpointAPI.ts +176 -0
- package/src/storage/storage-index.ts +5 -0
- package/src/storage/sync-rules/PostgresPersistedSyncRulesContent.ts +67 -0
- package/src/types/codecs.ts +136 -0
- package/src/types/models/ActiveCheckpoint.ts +15 -0
- package/src/types/models/ActiveCheckpointNotification.ts +14 -0
- package/src/types/models/BucketData.ts +26 -0
- package/src/types/models/BucketParameters.ts +14 -0
- package/src/types/models/CurrentData.ts +23 -0
- package/src/types/models/Instance.ts +8 -0
- package/src/types/models/Migration.ts +19 -0
- package/src/types/models/SourceTable.ts +32 -0
- package/src/types/models/SyncRules.ts +50 -0
- package/src/types/models/WriteCheckpoint.ts +20 -0
- package/src/types/models/models-index.ts +10 -0
- package/src/types/types.ts +73 -0
- package/src/utils/bson.ts +17 -0
- package/src/utils/bucket-data.ts +25 -0
- package/src/utils/db.ts +27 -0
- package/src/utils/ts-codec.ts +14 -0
- package/src/utils/utils-index.ts +4 -0
- package/test/src/__snapshots__/storage.test.ts.snap +9 -0
- package/test/src/__snapshots__/storage_sync.test.ts.snap +332 -0
- package/test/src/env.ts +6 -0
- package/test/src/migrations.test.ts +34 -0
- package/test/src/setup.ts +16 -0
- package/test/src/storage.test.ts +131 -0
- package/test/src/storage_compacting.test.ts +5 -0
- package/test/src/storage_sync.test.ts +12 -0
- package/test/src/util.ts +34 -0
- package/test/tsconfig.json +20 -0
- package/tsconfig.json +36 -0
- package/vitest.config.ts +13 -0
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
|
|
3
|
+
export const BIGINT_MAX = BigInt('9223372036854775807');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* The use of ts-codec:
|
|
7
|
+
* We currently use pgwire for Postgres queries. This library provides fine-grained control
|
|
8
|
+
* over parameter typings and efficient streaming of query responses. Additionally, configuring
|
|
9
|
+
* pgwire with default certificates allows us to use the same connection configuration process
|
|
10
|
+
* for both replication and storage libraries.
|
|
11
|
+
*
|
|
12
|
+
* Unfortunately, ORM driver support for pgwire is limited, so we rely on pure SQL queries in the
|
|
13
|
+
* absence of writing an ORM driver from scratch.
|
|
14
|
+
*
|
|
15
|
+
* [Opinion]: Writing pure SQL queries throughout a codebase can be daunting from a maintenance
|
|
16
|
+
* and debugging perspective. For example, row response types are often declared when performing a query:
|
|
17
|
+
*
|
|
18
|
+
* ```typescript
|
|
19
|
+
* const rows = await db.queryRows<MyRowType>(`SELECT one, two FROM my_table`);
|
|
20
|
+
* ```
|
|
21
|
+
* This type declaration suggests `rows` is an array of `MyRowType` objects, even though no validation
|
|
22
|
+
* is enforced. Adding a field to the `MyRowType` interface without updating the query could easily
|
|
23
|
+
* introduce subtle bugs. Similarly, type mismatches between SQL results and TypeScript interfaces, such as
|
|
24
|
+
* a `Date` field returned as a `string`, require manual conversion.
|
|
25
|
+
*
|
|
26
|
+
* `ts-codec` is not an ORM, but it simplifies working with pure SQL query responses in several ways:
|
|
27
|
+
*
|
|
28
|
+
* - **Validations**: The `decode` operation ensures that the returned row matches the expected object
|
|
29
|
+
* structure, throwing an error if it doesn't.
|
|
30
|
+
* - **Decoding Columns**: pgwire already decodes common SQLite types, but `ts-codec` adds an extra layer
|
|
31
|
+
* for JS-native values. For instance, `jsonb` columns are returned as `JsonContainer`/`string` and can
|
|
32
|
+
* be automatically parsed into objects. Similarly, fields like `group_id` are converted from `Bigint`
|
|
33
|
+
* to `Number` for easier use.
|
|
34
|
+
* - **Encoded Forms**: A single `ts-codec` type definition can infer both encoded and decoded forms. This
|
|
35
|
+
* is especially useful for persisted batch operations that rely on JSON query parameters for bulk inserts.
|
|
36
|
+
* Collections like `bucket_data`, `current_data`, and `bucket_parameters` use encoded/decoded types, making
|
|
37
|
+
* changes easier to manage and validate. While some manual encoding is done for intermediate values (e.g.,
|
|
38
|
+
* size estimation), these types are validated with `ts-codec` to ensure consistency.
|
|
39
|
+
*/
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Wraps a codec which is encoded to a JSON string
|
|
43
|
+
*/
|
|
44
|
+
export const jsonb = <Decoded>(subCodec: t.Codec<Decoded, any>) =>
|
|
45
|
+
t.codec<Decoded, string>(
|
|
46
|
+
'jsonb',
|
|
47
|
+
(decoded: Decoded) => {
|
|
48
|
+
return JSON.stringify(subCodec.encode(decoded) as any);
|
|
49
|
+
},
|
|
50
|
+
(encoded: string | { data: string }) => {
|
|
51
|
+
const s = typeof encoded == 'object' ? encoded.data : encoded;
|
|
52
|
+
return subCodec.decode(JSON.parse(s));
|
|
53
|
+
}
|
|
54
|
+
);
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Just performs a pure JSON.parse for the decoding step
|
|
58
|
+
*/
|
|
59
|
+
export const jsonb_raw = <Decoded>() =>
|
|
60
|
+
t.codec<Decoded, string>(
|
|
61
|
+
'jsonb_raw',
|
|
62
|
+
(decoded: Decoded) => {
|
|
63
|
+
return JSON.stringify(decoded);
|
|
64
|
+
},
|
|
65
|
+
(encoded: string | { data: string }) => {
|
|
66
|
+
const s = typeof encoded == 'object' ? encoded.data : encoded;
|
|
67
|
+
return JSON.parse(s);
|
|
68
|
+
}
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
export const bigint = t.codec<bigint, string | number>(
|
|
72
|
+
'bigint',
|
|
73
|
+
(decoded: BigInt) => {
|
|
74
|
+
return decoded.toString();
|
|
75
|
+
},
|
|
76
|
+
(encoded: string | number) => {
|
|
77
|
+
return BigInt(encoded);
|
|
78
|
+
}
|
|
79
|
+
);
|
|
80
|
+
|
|
81
|
+
export const uint8array = t.codec<Uint8Array, Uint8Array>(
|
|
82
|
+
'uint8array',
|
|
83
|
+
(d) => d,
|
|
84
|
+
(e) => e
|
|
85
|
+
);
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* PGWire returns BYTEA values as Uint8Array instances.
|
|
89
|
+
* We also serialize to a hex string for bulk inserts.
|
|
90
|
+
*/
|
|
91
|
+
export const hexBuffer = t.codec(
|
|
92
|
+
'hexBuffer',
|
|
93
|
+
(decoded: Buffer) => {
|
|
94
|
+
return decoded.toString('hex');
|
|
95
|
+
},
|
|
96
|
+
(encoded: string | Uint8Array) => {
|
|
97
|
+
if (encoded instanceof Uint8Array) {
|
|
98
|
+
return Buffer.from(encoded);
|
|
99
|
+
}
|
|
100
|
+
if (typeof encoded !== 'string') {
|
|
101
|
+
throw new Error(`Expected either a Buffer instance or hex encoded buffer string`);
|
|
102
|
+
}
|
|
103
|
+
return Buffer.from(encoded, 'hex');
|
|
104
|
+
}
|
|
105
|
+
);
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* PGWire returns INTEGER columns as a `bigint`.
|
|
109
|
+
* This does a decode operation to `number`.
|
|
110
|
+
*/
|
|
111
|
+
export const pgwire_number = t.codec(
|
|
112
|
+
'pg_number',
|
|
113
|
+
(decoded: number) => decoded,
|
|
114
|
+
(encoded: bigint | number) => {
|
|
115
|
+
if (typeof encoded == 'number') {
|
|
116
|
+
return encoded;
|
|
117
|
+
}
|
|
118
|
+
if (typeof encoded !== 'bigint') {
|
|
119
|
+
throw new Error(`Expected either number or bigint for value`);
|
|
120
|
+
}
|
|
121
|
+
if (encoded > BigInt(Number.MAX_SAFE_INTEGER) || encoded < BigInt(Number.MIN_SAFE_INTEGER)) {
|
|
122
|
+
throw new RangeError('BigInt value is out of safe integer range for conversion to Number.');
|
|
123
|
+
}
|
|
124
|
+
return Number(encoded);
|
|
125
|
+
}
|
|
126
|
+
);
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* A codec which contains the same type on the input and output.
|
|
130
|
+
*/
|
|
131
|
+
export const IdentityCodec = <T>() =>
|
|
132
|
+
t.codec<T, T>(
|
|
133
|
+
'identity',
|
|
134
|
+
(encoded) => encoded,
|
|
135
|
+
(decoded) => decoded
|
|
136
|
+
);
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
import { bigint, pgwire_number } from '../codecs.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Notification payload sent via Postgres' NOTIFY API.
|
|
6
|
+
*
|
|
7
|
+
*/
|
|
8
|
+
export const ActiveCheckpoint = t.object({
|
|
9
|
+
id: pgwire_number,
|
|
10
|
+
last_checkpoint: t.Null.or(bigint),
|
|
11
|
+
last_checkpoint_lsn: t.Null.or(t.string)
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
export type ActiveCheckpoint = t.Encoded<typeof ActiveCheckpoint>;
|
|
15
|
+
export type ActiveCheckpointDecoded = t.Decoded<typeof ActiveCheckpoint>;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
import { jsonb } from '../codecs.js';
|
|
3
|
+
import { ActiveCheckpoint } from './ActiveCheckpoint.js';
|
|
4
|
+
|
|
5
|
+
export const ActiveCheckpointPayload = t.object({
|
|
6
|
+
active_checkpoint: ActiveCheckpoint
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
export type ActiveCheckpointPayload = t.Encoded<typeof ActiveCheckpointPayload>;
|
|
10
|
+
export type ActiveCheckpointPayloadDecoded = t.Decoded<typeof ActiveCheckpointPayload>;
|
|
11
|
+
|
|
12
|
+
export const ActiveCheckpointNotification = jsonb(ActiveCheckpointPayload);
|
|
13
|
+
export type ActiveCheckpointNotification = t.Encoded<typeof ActiveCheckpointNotification>;
|
|
14
|
+
export type ActiveCheckpointNotificationDecoded = t.Decoded<typeof ActiveCheckpointNotification>;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
import { bigint, hexBuffer, pgwire_number } from '../codecs.js';
|
|
3
|
+
|
|
4
|
+
export enum OpType {
|
|
5
|
+
PUT = 'PUT',
|
|
6
|
+
REMOVE = 'REMOVE',
|
|
7
|
+
MOVE = 'MOVE',
|
|
8
|
+
CLEAR = 'CLEAR'
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export const BucketData = t.object({
|
|
12
|
+
group_id: pgwire_number,
|
|
13
|
+
bucket_name: t.string,
|
|
14
|
+
op_id: bigint,
|
|
15
|
+
op: t.Enum(OpType),
|
|
16
|
+
source_table: t.Null.or(t.string),
|
|
17
|
+
source_key: t.Null.or(hexBuffer),
|
|
18
|
+
table_name: t.string.or(t.Null),
|
|
19
|
+
row_id: t.string.or(t.Null),
|
|
20
|
+
checksum: bigint,
|
|
21
|
+
data: t.Null.or(t.string),
|
|
22
|
+
target_op: t.Null.or(bigint)
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
export type BucketData = t.Encoded<typeof BucketData>;
|
|
26
|
+
export type BucketDataDecoded = t.Decoded<typeof BucketData>;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
import { bigint, hexBuffer, pgwire_number } from '../codecs.js';
|
|
3
|
+
|
|
4
|
+
export const BucketParameters = t.object({
|
|
5
|
+
id: bigint,
|
|
6
|
+
group_id: pgwire_number,
|
|
7
|
+
source_table: t.string,
|
|
8
|
+
source_key: hexBuffer,
|
|
9
|
+
lookup: hexBuffer,
|
|
10
|
+
bucket_parameters: t.string
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
export type BucketParameters = t.Encoded<typeof BucketParameters>;
|
|
14
|
+
export type BucketParametersDecoded = t.Decoded<typeof BucketParameters>;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
import { hexBuffer, jsonb, pgwire_number } from '../codecs.js';
|
|
3
|
+
|
|
4
|
+
export const CurrentBucket = t.object({
|
|
5
|
+
bucket: t.string,
|
|
6
|
+
table: t.string,
|
|
7
|
+
id: t.string
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
export type CurrentBucket = t.Encoded<typeof CurrentBucket>;
|
|
11
|
+
export type CurrentBucketDecoded = t.Decoded<typeof CurrentBucket>;
|
|
12
|
+
|
|
13
|
+
export const CurrentData = t.object({
|
|
14
|
+
buckets: jsonb(t.array(CurrentBucket)),
|
|
15
|
+
data: hexBuffer,
|
|
16
|
+
group_id: pgwire_number,
|
|
17
|
+
lookups: t.array(hexBuffer),
|
|
18
|
+
source_key: hexBuffer,
|
|
19
|
+
source_table: t.string
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
export type CurrentData = t.Encoded<typeof CurrentData>;
|
|
23
|
+
export type CurrentDataDecoded = t.Decoded<typeof CurrentData>;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { framework } from '@powersync/service-core';
|
|
2
|
+
import * as t from 'ts-codec';
|
|
3
|
+
import { jsonb } from '../codecs.js';
|
|
4
|
+
|
|
5
|
+
export const Migration = t.object({
|
|
6
|
+
last_run: t.string,
|
|
7
|
+
log: jsonb(
|
|
8
|
+
t.array(
|
|
9
|
+
t.object({
|
|
10
|
+
name: t.string,
|
|
11
|
+
direction: t.Enum(framework.migrations.Direction),
|
|
12
|
+
timestamp: framework.codecs.date
|
|
13
|
+
})
|
|
14
|
+
)
|
|
15
|
+
)
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
export type Migration = t.Encoded<typeof Migration>;
|
|
19
|
+
export type MigrationDecoded = t.Decoded<typeof Migration>;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
import { bigint, jsonb, jsonb_raw, pgwire_number } from '../codecs.js';
|
|
3
|
+
|
|
4
|
+
export type StoredRelationId = {
|
|
5
|
+
object_id: string | number;
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
export const ColumnDescriptor = t.object({
|
|
9
|
+
name: t.string,
|
|
10
|
+
/**
|
|
11
|
+
* The type of the column ie VARCHAR, INT, etc
|
|
12
|
+
*/
|
|
13
|
+
type: t.string.optional(),
|
|
14
|
+
/**
|
|
15
|
+
* Some data sources have a type id that can be used to identify the type of the column
|
|
16
|
+
*/
|
|
17
|
+
typeId: t.number.optional()
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
export const SourceTable = t.object({
|
|
21
|
+
id: t.string,
|
|
22
|
+
group_id: pgwire_number,
|
|
23
|
+
connection_id: bigint,
|
|
24
|
+
relation_id: t.Null.or(jsonb_raw<StoredRelationId>()),
|
|
25
|
+
schema_name: t.string,
|
|
26
|
+
table_name: t.string,
|
|
27
|
+
replica_id_columns: t.Null.or(jsonb(t.array(ColumnDescriptor))),
|
|
28
|
+
snapshot_done: t.boolean
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
export type SourceTable = t.Encoded<typeof SourceTable>;
|
|
32
|
+
export type SourceTableDecoded = t.Decoded<typeof SourceTable>;
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { framework, storage } from '@powersync/service-core';
|
|
2
|
+
import * as t from 'ts-codec';
|
|
3
|
+
import { bigint, pgwire_number } from '../codecs.js';
|
|
4
|
+
|
|
5
|
+
export const SyncRules = t.object({
|
|
6
|
+
id: pgwire_number,
|
|
7
|
+
state: t.Enum(storage.SyncRuleState),
|
|
8
|
+
/**
|
|
9
|
+
* True if initial snapshot has been replicated.
|
|
10
|
+
*
|
|
11
|
+
* Can only be false if state == PROCESSING.
|
|
12
|
+
*/
|
|
13
|
+
snapshot_done: t.boolean,
|
|
14
|
+
/**
|
|
15
|
+
* The last consistent checkpoint.
|
|
16
|
+
*
|
|
17
|
+
* There may be higher OpIds used in the database if we're in the middle of replicating a large transaction.
|
|
18
|
+
*/
|
|
19
|
+
last_checkpoint: t.Null.or(bigint),
|
|
20
|
+
/**
|
|
21
|
+
* The LSN associated with the last consistent checkpoint.
|
|
22
|
+
*/
|
|
23
|
+
last_checkpoint_lsn: t.Null.or(t.string),
|
|
24
|
+
/**
|
|
25
|
+
* If set, no new checkpoints may be created < this value.
|
|
26
|
+
*/
|
|
27
|
+
no_checkpoint_before: t.Null.or(t.string),
|
|
28
|
+
slot_name: t.string,
|
|
29
|
+
/**
|
|
30
|
+
* Last time we persisted a checkpoint.
|
|
31
|
+
*
|
|
32
|
+
* This may be old if no data is incoming.
|
|
33
|
+
*/
|
|
34
|
+
last_checkpoint_ts: t.Null.or(framework.codecs.date),
|
|
35
|
+
/**
|
|
36
|
+
* Last time we persisted a checkpoint or keepalive.
|
|
37
|
+
*
|
|
38
|
+
* This should stay fairly current while replicating.
|
|
39
|
+
*/
|
|
40
|
+
last_keepalive_ts: t.Null.or(framework.codecs.date),
|
|
41
|
+
/**
|
|
42
|
+
* If an error is stopping replication, it will be stored here.
|
|
43
|
+
*/
|
|
44
|
+
last_fatal_error: t.Null.or(t.string),
|
|
45
|
+
keepalive_op: t.Null.or(bigint),
|
|
46
|
+
content: t.string
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
export type SyncRules = t.Encoded<typeof SyncRules>;
|
|
50
|
+
export type SyncRulesDecoded = t.Decoded<typeof SyncRules>;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
import { bigint, jsonb } from '../codecs.js';
|
|
3
|
+
|
|
4
|
+
export const WriteCheckpoint = t.object({
|
|
5
|
+
user_id: t.string,
|
|
6
|
+
lsns: jsonb(t.record(t.string)),
|
|
7
|
+
write_checkpoint: bigint
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
export type WriteCheckpoint = t.Encoded<typeof WriteCheckpoint>;
|
|
11
|
+
export type WriteCheckpointDecoded = t.Decoded<typeof WriteCheckpoint>;
|
|
12
|
+
|
|
13
|
+
export const CustomWriteCheckpoint = t.object({
|
|
14
|
+
user_id: t.string,
|
|
15
|
+
write_checkpoint: bigint,
|
|
16
|
+
sync_rules_id: bigint
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
export type CustomWriteCheckpoint = t.Encoded<typeof CustomWriteCheckpoint>;
|
|
20
|
+
export type CustomWriteCheckpointDecoded = t.Decoded<typeof CustomWriteCheckpoint>;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export * from './ActiveCheckpoint.js';
|
|
2
|
+
export * from './ActiveCheckpointNotification.js';
|
|
3
|
+
export * from './BucketData.js';
|
|
4
|
+
export * from './BucketParameters.js';
|
|
5
|
+
export * from './CurrentData.js';
|
|
6
|
+
export * from './Instance.js';
|
|
7
|
+
export * from './Migration.js';
|
|
8
|
+
export * from './SourceTable.js';
|
|
9
|
+
export * from './SyncRules.js';
|
|
10
|
+
export * from './WriteCheckpoint.js';
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import * as lib_postgres from '@powersync/lib-service-postgres';
|
|
2
|
+
import * as pg_wire from '@powersync/service-jpgwire';
|
|
3
|
+
import { configFile } from '@powersync/service-types';
|
|
4
|
+
import * as t from 'ts-codec';
|
|
5
|
+
export * as models from './models/models-index.js';
|
|
6
|
+
|
|
7
|
+
export const MAX_BATCH_RECORD_COUNT = 2000;
|
|
8
|
+
|
|
9
|
+
export const MAX_BATCH_ESTIMATED_SIZE = 5_000_000;
|
|
10
|
+
|
|
11
|
+
export const MAX_BATCH_CURRENT_DATA_SIZE = 50_000_000;
|
|
12
|
+
|
|
13
|
+
export const BatchLimits = t.object({
|
|
14
|
+
/**
|
|
15
|
+
* Maximum size of operations we write in a single transaction.
|
|
16
|
+
*/
|
|
17
|
+
max_estimated_size: t.number.optional(),
|
|
18
|
+
/**
|
|
19
|
+
* Limit number of documents to write in a single transaction.
|
|
20
|
+
*/
|
|
21
|
+
max_record_count: t.number.optional()
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
export type BatchLimits = t.Encoded<typeof BatchLimits>;
|
|
25
|
+
|
|
26
|
+
export const OperationBatchLimits = BatchLimits.and(
|
|
27
|
+
t.object({
|
|
28
|
+
/**
|
|
29
|
+
* Maximum size of size of current_data documents we lookup at a time.
|
|
30
|
+
*/
|
|
31
|
+
max_current_data_batch_size: t.number.optional()
|
|
32
|
+
})
|
|
33
|
+
);
|
|
34
|
+
|
|
35
|
+
export type OperationBatchLimits = t.Encoded<typeof OperationBatchLimits>;
|
|
36
|
+
|
|
37
|
+
export const PostgresStorageConfig = configFile.BaseStorageConfig.and(lib_postgres.BasePostgresConnectionConfig).and(
|
|
38
|
+
t.object({
|
|
39
|
+
/**
|
|
40
|
+
* Allow batch operation limits to be configurable.
|
|
41
|
+
* Postgres has less batch size restrictions compared to MongoDB.
|
|
42
|
+
* Increasing limits can drastically improve replication performance, but
|
|
43
|
+
* can come at the cost of higher memory usage or potential issues.
|
|
44
|
+
*/
|
|
45
|
+
batch_limits: OperationBatchLimits.optional()
|
|
46
|
+
})
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
export type PostgresStorageConfig = t.Encoded<typeof PostgresStorageConfig>;
|
|
50
|
+
export type PostgresStorageConfigDecoded = t.Decoded<typeof PostgresStorageConfig>;
|
|
51
|
+
|
|
52
|
+
export type RequiredOperationBatchLimits = Required<OperationBatchLimits>;
|
|
53
|
+
|
|
54
|
+
export type NormalizedPostgresStorageConfig = pg_wire.NormalizedConnectionConfig & {
|
|
55
|
+
batch_limits: RequiredOperationBatchLimits;
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
export const normalizePostgresStorageConfig = (
|
|
59
|
+
baseConfig: PostgresStorageConfigDecoded
|
|
60
|
+
): NormalizedPostgresStorageConfig => {
|
|
61
|
+
return {
|
|
62
|
+
...lib_postgres.normalizeConnectionConfig(baseConfig),
|
|
63
|
+
batch_limits: {
|
|
64
|
+
max_current_data_batch_size: baseConfig.batch_limits?.max_current_data_batch_size ?? MAX_BATCH_CURRENT_DATA_SIZE,
|
|
65
|
+
max_estimated_size: baseConfig.batch_limits?.max_estimated_size ?? MAX_BATCH_ESTIMATED_SIZE,
|
|
66
|
+
max_record_count: baseConfig.batch_limits?.max_record_count ?? MAX_BATCH_RECORD_COUNT
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
};
|
|
70
|
+
|
|
71
|
+
export const isPostgresStorageConfig = (config: configFile.BaseStorageConfig): config is PostgresStorageConfig => {
|
|
72
|
+
return config.type == lib_postgres.POSTGRES_CONNECTION_TYPE;
|
|
73
|
+
};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { storage, utils } from '@powersync/service-core';
|
|
2
|
+
import * as uuid from 'uuid';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* BSON is used to serialize certain documents for storage in BYTEA columns.
|
|
6
|
+
* JSONB columns do not directly support storing binary data which could be required in future.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
export function replicaIdToSubkey(tableId: string, id: storage.ReplicaId): string {
|
|
10
|
+
// Hashed UUID from the table and id
|
|
11
|
+
if (storage.isUUID(id)) {
|
|
12
|
+
// Special case for UUID for backwards-compatiblity
|
|
13
|
+
return `${tableId}/${id.toHexString()}`;
|
|
14
|
+
}
|
|
15
|
+
const repr = storage.serializeBson({ table: tableId, id });
|
|
16
|
+
return uuid.v5(repr, utils.ID_NAMESPACE);
|
|
17
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { utils } from '@powersync/service-core';
|
|
2
|
+
import { models } from '../types/types.js';
|
|
3
|
+
import { replicaIdToSubkey } from './bson.js';
|
|
4
|
+
|
|
5
|
+
export const mapOpEntry = (entry: models.BucketDataDecoded) => {
|
|
6
|
+
if (entry.op == models.OpType.PUT || entry.op == models.OpType.REMOVE) {
|
|
7
|
+
return {
|
|
8
|
+
op_id: utils.timestampToOpId(entry.op_id),
|
|
9
|
+
op: entry.op,
|
|
10
|
+
object_type: entry.table_name ?? undefined,
|
|
11
|
+
object_id: entry.row_id ?? undefined,
|
|
12
|
+
checksum: Number(entry.checksum),
|
|
13
|
+
subkey: replicaIdToSubkey(entry.source_table!, entry.source_key!),
|
|
14
|
+
data: entry.data
|
|
15
|
+
};
|
|
16
|
+
} else {
|
|
17
|
+
// MOVE, CLEAR
|
|
18
|
+
|
|
19
|
+
return {
|
|
20
|
+
op_id: utils.timestampToOpId(entry.op_id),
|
|
21
|
+
op: entry.op,
|
|
22
|
+
checksum: Number(entry.checksum)
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
};
|
package/src/utils/db.ts
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import * as lib_postgres from '@powersync/lib-service-postgres';
|
|
2
|
+
|
|
3
|
+
export const STORAGE_SCHEMA_NAME = 'powersync';
|
|
4
|
+
|
|
5
|
+
export const NOTIFICATION_CHANNEL = 'powersynccheckpoints';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Re export for prettier to detect the tag better
|
|
9
|
+
*/
|
|
10
|
+
export const sql = lib_postgres.sql;
|
|
11
|
+
|
|
12
|
+
export const dropTables = async (client: lib_postgres.DatabaseClient) => {
|
|
13
|
+
// Lock a connection for automatic schema search paths
|
|
14
|
+
await client.lockConnection(async (db) => {
|
|
15
|
+
await db.sql`DROP TABLE IF EXISTS bucket_data`.execute();
|
|
16
|
+
await db.sql`DROP TABLE IF EXISTS bucket_parameters`.execute();
|
|
17
|
+
await db.sql`DROP TABLE IF EXISTS sync_rules`.execute();
|
|
18
|
+
await db.sql`DROP TABLE IF EXISTS instance`.execute();
|
|
19
|
+
await db.sql`DROP TABLE IF EXISTS bucket_data`.execute();
|
|
20
|
+
await db.sql`DROP TABLE IF EXISTS current_data`.execute();
|
|
21
|
+
await db.sql`DROP TABLE IF EXISTS source_tables`.execute();
|
|
22
|
+
await db.sql`DROP TABLE IF EXISTS write_checkpoints`.execute();
|
|
23
|
+
await db.sql`DROP TABLE IF EXISTS custom_write_checkpoints`.execute();
|
|
24
|
+
await db.sql`DROP SEQUENCE IF EXISTS op_id_sequence`.execute();
|
|
25
|
+
await db.sql`DROP SEQUENCE IF EXISTS sync_rules_id_sequence`.execute();
|
|
26
|
+
});
|
|
27
|
+
};
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Returns a new codec with a subset of keys. Equivalent to the TypeScript Pick utility.
|
|
5
|
+
*/
|
|
6
|
+
export const pick = <T extends t.AnyObjectCodecShape, Keys extends keyof T>(codec: t.ObjectCodec<T>, keys: Keys[]) => {
|
|
7
|
+
// Filter the shape by the specified keys
|
|
8
|
+
const newShape = Object.fromEntries(
|
|
9
|
+
Object.entries(codec.props.shape).filter(([key]) => keys.includes(key as Keys))
|
|
10
|
+
) as Pick<T, Keys>;
|
|
11
|
+
|
|
12
|
+
// Return a new codec with the narrowed shape
|
|
13
|
+
return t.object(newShape) as t.ObjectCodec<Pick<T, Keys>>;
|
|
14
|
+
};
|