@powersync/service-module-postgres-storage 0.0.0-dev-20250116115804
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +32 -0
- package/LICENSE +67 -0
- package/README.md +67 -0
- package/dist/.tsbuildinfo +1 -0
- package/dist/@types/index.d.ts +7 -0
- package/dist/@types/migrations/PostgresMigrationAgent.d.ts +12 -0
- package/dist/@types/migrations/PostgresMigrationStore.d.ts +14 -0
- package/dist/@types/migrations/migration-utils.d.ts +3 -0
- package/dist/@types/migrations/scripts/1684951997326-init.d.ts +3 -0
- package/dist/@types/module/PostgresStorageModule.d.ts +6 -0
- package/dist/@types/storage/PostgresBucketStorageFactory.d.ts +42 -0
- package/dist/@types/storage/PostgresCompactor.d.ts +40 -0
- package/dist/@types/storage/PostgresStorageProvider.d.ts +5 -0
- package/dist/@types/storage/PostgresSyncRulesStorage.d.ts +46 -0
- package/dist/@types/storage/PostgresTestStorageFactoryGenerator.d.ts +13 -0
- package/dist/@types/storage/batch/OperationBatch.d.ts +47 -0
- package/dist/@types/storage/batch/PostgresBucketBatch.d.ts +90 -0
- package/dist/@types/storage/batch/PostgresPersistedBatch.d.ts +64 -0
- package/dist/@types/storage/checkpoints/PostgresWriteCheckpointAPI.d.ts +20 -0
- package/dist/@types/storage/storage-index.d.ts +5 -0
- package/dist/@types/storage/sync-rules/PostgresPersistedSyncRulesContent.d.ts +17 -0
- package/dist/@types/types/codecs.d.ts +61 -0
- package/dist/@types/types/models/ActiveCheckpoint.d.ts +12 -0
- package/dist/@types/types/models/ActiveCheckpointNotification.d.ts +19 -0
- package/dist/@types/types/models/BucketData.d.ts +22 -0
- package/dist/@types/types/models/BucketParameters.d.ts +11 -0
- package/dist/@types/types/models/CurrentData.d.ts +22 -0
- package/dist/@types/types/models/Instance.d.ts +6 -0
- package/dist/@types/types/models/Migration.d.ts +12 -0
- package/dist/@types/types/models/SourceTable.d.ts +31 -0
- package/dist/@types/types/models/SyncRules.d.ts +47 -0
- package/dist/@types/types/models/WriteCheckpoint.d.ts +15 -0
- package/dist/@types/types/models/models-index.d.ts +10 -0
- package/dist/@types/types/types.d.ts +96 -0
- package/dist/@types/utils/bson.d.ts +6 -0
- package/dist/@types/utils/bucket-data.d.ts +18 -0
- package/dist/@types/utils/db.d.ts +8 -0
- package/dist/@types/utils/ts-codec.d.ts +5 -0
- package/dist/@types/utils/utils-index.d.ts +4 -0
- package/dist/index.js +8 -0
- package/dist/index.js.map +1 -0
- package/dist/migrations/PostgresMigrationAgent.js +36 -0
- package/dist/migrations/PostgresMigrationAgent.js.map +1 -0
- package/dist/migrations/PostgresMigrationStore.js +60 -0
- package/dist/migrations/PostgresMigrationStore.js.map +1 -0
- package/dist/migrations/migration-utils.js +13 -0
- package/dist/migrations/migration-utils.js.map +1 -0
- package/dist/migrations/scripts/1684951997326-init.js +196 -0
- package/dist/migrations/scripts/1684951997326-init.js.map +1 -0
- package/dist/module/PostgresStorageModule.js +23 -0
- package/dist/module/PostgresStorageModule.js.map +1 -0
- package/dist/storage/PostgresBucketStorageFactory.js +433 -0
- package/dist/storage/PostgresBucketStorageFactory.js.map +1 -0
- package/dist/storage/PostgresCompactor.js +298 -0
- package/dist/storage/PostgresCompactor.js.map +1 -0
- package/dist/storage/PostgresStorageProvider.js +35 -0
- package/dist/storage/PostgresStorageProvider.js.map +1 -0
- package/dist/storage/PostgresSyncRulesStorage.js +619 -0
- package/dist/storage/PostgresSyncRulesStorage.js.map +1 -0
- package/dist/storage/PostgresTestStorageFactoryGenerator.js +110 -0
- package/dist/storage/PostgresTestStorageFactoryGenerator.js.map +1 -0
- package/dist/storage/batch/OperationBatch.js +93 -0
- package/dist/storage/batch/OperationBatch.js.map +1 -0
- package/dist/storage/batch/PostgresBucketBatch.js +732 -0
- package/dist/storage/batch/PostgresBucketBatch.js.map +1 -0
- package/dist/storage/batch/PostgresPersistedBatch.js +367 -0
- package/dist/storage/batch/PostgresPersistedBatch.js.map +1 -0
- package/dist/storage/checkpoints/PostgresWriteCheckpointAPI.js +148 -0
- package/dist/storage/checkpoints/PostgresWriteCheckpointAPI.js.map +1 -0
- package/dist/storage/storage-index.js +6 -0
- package/dist/storage/storage-index.js.map +1 -0
- package/dist/storage/sync-rules/PostgresPersistedSyncRulesContent.js +58 -0
- package/dist/storage/sync-rules/PostgresPersistedSyncRulesContent.js.map +1 -0
- package/dist/types/codecs.js +97 -0
- package/dist/types/codecs.js.map +1 -0
- package/dist/types/models/ActiveCheckpoint.js +12 -0
- package/dist/types/models/ActiveCheckpoint.js.map +1 -0
- package/dist/types/models/ActiveCheckpointNotification.js +8 -0
- package/dist/types/models/ActiveCheckpointNotification.js.map +1 -0
- package/dist/types/models/BucketData.js +23 -0
- package/dist/types/models/BucketData.js.map +1 -0
- package/dist/types/models/BucketParameters.js +11 -0
- package/dist/types/models/BucketParameters.js.map +1 -0
- package/dist/types/models/CurrentData.js +16 -0
- package/dist/types/models/CurrentData.js.map +1 -0
- package/dist/types/models/Instance.js +5 -0
- package/dist/types/models/Instance.js.map +1 -0
- package/dist/types/models/Migration.js +12 -0
- package/dist/types/models/Migration.js.map +1 -0
- package/dist/types/models/SourceTable.js +24 -0
- package/dist/types/models/SourceTable.js.map +1 -0
- package/dist/types/models/SyncRules.js +47 -0
- package/dist/types/models/SyncRules.js.map +1 -0
- package/dist/types/models/WriteCheckpoint.js +13 -0
- package/dist/types/models/WriteCheckpoint.js.map +1 -0
- package/dist/types/models/models-index.js +11 -0
- package/dist/types/models/models-index.js.map +1 -0
- package/dist/types/types.js +46 -0
- package/dist/types/types.js.map +1 -0
- package/dist/utils/bson.js +16 -0
- package/dist/utils/bson.js.map +1 -0
- package/dist/utils/bucket-data.js +25 -0
- package/dist/utils/bucket-data.js.map +1 -0
- package/dist/utils/db.js +24 -0
- package/dist/utils/db.js.map +1 -0
- package/dist/utils/ts-codec.js +11 -0
- package/dist/utils/ts-codec.js.map +1 -0
- package/dist/utils/utils-index.js +5 -0
- package/dist/utils/utils-index.js.map +1 -0
- package/package.json +50 -0
- package/src/index.ts +10 -0
- package/src/migrations/PostgresMigrationAgent.ts +46 -0
- package/src/migrations/PostgresMigrationStore.ts +70 -0
- package/src/migrations/migration-utils.ts +14 -0
- package/src/migrations/scripts/1684951997326-init.ts +141 -0
- package/src/module/PostgresStorageModule.ts +30 -0
- package/src/storage/PostgresBucketStorageFactory.ts +496 -0
- package/src/storage/PostgresCompactor.ts +366 -0
- package/src/storage/PostgresStorageProvider.ts +42 -0
- package/src/storage/PostgresSyncRulesStorage.ts +666 -0
- package/src/storage/PostgresTestStorageFactoryGenerator.ts +61 -0
- package/src/storage/batch/OperationBatch.ts +101 -0
- package/src/storage/batch/PostgresBucketBatch.ts +885 -0
- package/src/storage/batch/PostgresPersistedBatch.ts +441 -0
- package/src/storage/checkpoints/PostgresWriteCheckpointAPI.ts +176 -0
- package/src/storage/storage-index.ts +5 -0
- package/src/storage/sync-rules/PostgresPersistedSyncRulesContent.ts +67 -0
- package/src/types/codecs.ts +136 -0
- package/src/types/models/ActiveCheckpoint.ts +15 -0
- package/src/types/models/ActiveCheckpointNotification.ts +14 -0
- package/src/types/models/BucketData.ts +26 -0
- package/src/types/models/BucketParameters.ts +14 -0
- package/src/types/models/CurrentData.ts +23 -0
- package/src/types/models/Instance.ts +8 -0
- package/src/types/models/Migration.ts +19 -0
- package/src/types/models/SourceTable.ts +32 -0
- package/src/types/models/SyncRules.ts +50 -0
- package/src/types/models/WriteCheckpoint.ts +20 -0
- package/src/types/models/models-index.ts +10 -0
- package/src/types/types.ts +73 -0
- package/src/utils/bson.ts +17 -0
- package/src/utils/bucket-data.ts +25 -0
- package/src/utils/db.ts +27 -0
- package/src/utils/ts-codec.ts +14 -0
- package/src/utils/utils-index.ts +4 -0
- package/test/src/__snapshots__/storage.test.ts.snap +9 -0
- package/test/src/__snapshots__/storage_sync.test.ts.snap +332 -0
- package/test/src/env.ts +6 -0
- package/test/src/migrations.test.ts +34 -0
- package/test/src/setup.ts +16 -0
- package/test/src/storage.test.ts +131 -0
- package/test/src/storage_compacting.test.ts +5 -0
- package/test/src/storage_sync.test.ts +12 -0
- package/test/src/util.ts +34 -0
- package/test/tsconfig.json +20 -0
- package/tsconfig.json +36 -0
- package/vitest.config.ts +13 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { storage } from '@powersync/service-core';
|
|
2
|
+
import * as t from 'ts-codec';
|
|
3
|
+
export declare const SyncRules: t.ObjectCodec<{
|
|
4
|
+
id: t.Codec<number, number, "pg_number", t.CodecProps>;
|
|
5
|
+
state: t.EnumCodec<typeof storage.SyncRuleState>;
|
|
6
|
+
/**
|
|
7
|
+
* True if initial snapshot has been replicated.
|
|
8
|
+
*
|
|
9
|
+
* Can only be false if state == PROCESSING.
|
|
10
|
+
*/
|
|
11
|
+
snapshot_done: t.IdentityCodec<t.CodecType.Boolean>;
|
|
12
|
+
/**
|
|
13
|
+
* The last consistent checkpoint.
|
|
14
|
+
*
|
|
15
|
+
* There may be higher OpIds used in the database if we're in the middle of replicating a large transaction.
|
|
16
|
+
*/
|
|
17
|
+
last_checkpoint: t.Union<t.Codec<null, null, string, t.CodecProps>, t.Codec<bigint, string | number, string, t.CodecProps>>;
|
|
18
|
+
/**
|
|
19
|
+
* The LSN associated with the last consistent checkpoint.
|
|
20
|
+
*/
|
|
21
|
+
last_checkpoint_lsn: t.Union<t.Codec<null, null, string, t.CodecProps>, t.IdentityCodec<t.CodecType.String>>;
|
|
22
|
+
/**
|
|
23
|
+
* If set, no new checkpoints may be created < this value.
|
|
24
|
+
*/
|
|
25
|
+
no_checkpoint_before: t.Union<t.Codec<null, null, string, t.CodecProps>, t.IdentityCodec<t.CodecType.String>>;
|
|
26
|
+
slot_name: t.IdentityCodec<t.CodecType.String>;
|
|
27
|
+
/**
|
|
28
|
+
* Last time we persisted a checkpoint.
|
|
29
|
+
*
|
|
30
|
+
* This may be old if no data is incoming.
|
|
31
|
+
*/
|
|
32
|
+
last_checkpoint_ts: t.Union<t.Codec<null, null, string, t.CodecProps>, t.Codec<Date, string, string, t.CodecProps>>;
|
|
33
|
+
/**
|
|
34
|
+
* Last time we persisted a checkpoint or keepalive.
|
|
35
|
+
*
|
|
36
|
+
* This should stay fairly current while replicating.
|
|
37
|
+
*/
|
|
38
|
+
last_keepalive_ts: t.Union<t.Codec<null, null, string, t.CodecProps>, t.Codec<Date, string, string, t.CodecProps>>;
|
|
39
|
+
/**
|
|
40
|
+
* If an error is stopping replication, it will be stored here.
|
|
41
|
+
*/
|
|
42
|
+
last_fatal_error: t.Union<t.Codec<null, null, string, t.CodecProps>, t.IdentityCodec<t.CodecType.String>>;
|
|
43
|
+
keepalive_op: t.Union<t.Codec<null, null, string, t.CodecProps>, t.Codec<bigint, string | number, string, t.CodecProps>>;
|
|
44
|
+
content: t.IdentityCodec<t.CodecType.String>;
|
|
45
|
+
}>;
|
|
46
|
+
export type SyncRules = t.Encoded<typeof SyncRules>;
|
|
47
|
+
export type SyncRulesDecoded = t.Decoded<typeof SyncRules>;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
export declare const WriteCheckpoint: t.ObjectCodec<{
|
|
3
|
+
user_id: t.IdentityCodec<t.CodecType.String>;
|
|
4
|
+
lsns: t.Codec<Record<string, string>, string, string, t.CodecProps>;
|
|
5
|
+
write_checkpoint: t.Codec<bigint, string | number, string, t.CodecProps>;
|
|
6
|
+
}>;
|
|
7
|
+
export type WriteCheckpoint = t.Encoded<typeof WriteCheckpoint>;
|
|
8
|
+
export type WriteCheckpointDecoded = t.Decoded<typeof WriteCheckpoint>;
|
|
9
|
+
export declare const CustomWriteCheckpoint: t.ObjectCodec<{
|
|
10
|
+
user_id: t.IdentityCodec<t.CodecType.String>;
|
|
11
|
+
write_checkpoint: t.Codec<bigint, string | number, string, t.CodecProps>;
|
|
12
|
+
sync_rules_id: t.Codec<bigint, string | number, string, t.CodecProps>;
|
|
13
|
+
}>;
|
|
14
|
+
export type CustomWriteCheckpoint = t.Encoded<typeof CustomWriteCheckpoint>;
|
|
15
|
+
export type CustomWriteCheckpointDecoded = t.Decoded<typeof CustomWriteCheckpoint>;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export * from './ActiveCheckpoint.js';
|
|
2
|
+
export * from './ActiveCheckpointNotification.js';
|
|
3
|
+
export * from './BucketData.js';
|
|
4
|
+
export * from './BucketParameters.js';
|
|
5
|
+
export * from './CurrentData.js';
|
|
6
|
+
export * from './Instance.js';
|
|
7
|
+
export * from './Migration.js';
|
|
8
|
+
export * from './SourceTable.js';
|
|
9
|
+
export * from './SyncRules.js';
|
|
10
|
+
export * from './WriteCheckpoint.js';
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import * as pg_wire from '@powersync/service-jpgwire';
|
|
2
|
+
import { configFile } from '@powersync/service-types';
|
|
3
|
+
import * as t from 'ts-codec';
|
|
4
|
+
export * as models from './models/models-index.js';
|
|
5
|
+
export declare const MAX_BATCH_RECORD_COUNT = 2000;
|
|
6
|
+
export declare const MAX_BATCH_ESTIMATED_SIZE = 5000000;
|
|
7
|
+
export declare const MAX_BATCH_CURRENT_DATA_SIZE = 50000000;
|
|
8
|
+
export declare const BatchLimits: t.ObjectCodec<{
|
|
9
|
+
/**
|
|
10
|
+
* Maximum size of operations we write in a single transaction.
|
|
11
|
+
*/
|
|
12
|
+
max_estimated_size: t.OptionalCodec<t.Codec<number, number, string, t.CodecProps>>;
|
|
13
|
+
/**
|
|
14
|
+
* Limit number of documents to write in a single transaction.
|
|
15
|
+
*/
|
|
16
|
+
max_record_count: t.OptionalCodec<t.Codec<number, number, string, t.CodecProps>>;
|
|
17
|
+
}>;
|
|
18
|
+
export type BatchLimits = t.Encoded<typeof BatchLimits>;
|
|
19
|
+
export declare const OperationBatchLimits: t.Intersection<t.Codec<{
|
|
20
|
+
max_estimated_size?: number | undefined;
|
|
21
|
+
max_record_count?: number | undefined;
|
|
22
|
+
}, {
|
|
23
|
+
max_estimated_size?: number | undefined;
|
|
24
|
+
max_record_count?: number | undefined;
|
|
25
|
+
}, string, t.CodecProps>, t.ObjectCodec<{
|
|
26
|
+
/**
|
|
27
|
+
* Maximum size of size of current_data documents we lookup at a time.
|
|
28
|
+
*/
|
|
29
|
+
max_current_data_batch_size: t.OptionalCodec<t.Codec<number, number, string, t.CodecProps>>;
|
|
30
|
+
}>>;
|
|
31
|
+
export type OperationBatchLimits = t.Encoded<typeof OperationBatchLimits>;
|
|
32
|
+
export declare const PostgresStorageConfig: t.Intersection<t.Codec<{
|
|
33
|
+
type: string;
|
|
34
|
+
} & {
|
|
35
|
+
type: "postgresql";
|
|
36
|
+
id?: string | undefined;
|
|
37
|
+
tag?: string | undefined;
|
|
38
|
+
uri?: string | undefined;
|
|
39
|
+
hostname?: string | undefined;
|
|
40
|
+
port?: number | undefined;
|
|
41
|
+
username?: string | undefined;
|
|
42
|
+
password?: string | undefined;
|
|
43
|
+
database?: string | undefined;
|
|
44
|
+
sslmode?: "verify-full" | "verify-ca" | "disable" | undefined;
|
|
45
|
+
cacert?: string | undefined;
|
|
46
|
+
client_certificate?: string | undefined;
|
|
47
|
+
client_private_key?: string | undefined;
|
|
48
|
+
tls_servername?: string | undefined;
|
|
49
|
+
reject_ip_ranges?: string[] | undefined;
|
|
50
|
+
slot_name_prefix?: string | undefined;
|
|
51
|
+
}, {
|
|
52
|
+
type: string;
|
|
53
|
+
} & {
|
|
54
|
+
type: "postgresql";
|
|
55
|
+
id?: string | undefined;
|
|
56
|
+
tag?: string | undefined;
|
|
57
|
+
uri?: string | undefined;
|
|
58
|
+
hostname?: string | undefined;
|
|
59
|
+
port?: string | number | undefined;
|
|
60
|
+
username?: string | undefined;
|
|
61
|
+
password?: string | undefined;
|
|
62
|
+
database?: string | undefined;
|
|
63
|
+
sslmode?: "verify-full" | "verify-ca" | "disable" | undefined;
|
|
64
|
+
cacert?: string | undefined;
|
|
65
|
+
client_certificate?: string | undefined;
|
|
66
|
+
client_private_key?: string | undefined;
|
|
67
|
+
tls_servername?: string | undefined;
|
|
68
|
+
reject_ip_ranges?: string[] | undefined;
|
|
69
|
+
slot_name_prefix?: string | undefined;
|
|
70
|
+
}, string, t.CodecProps>, t.ObjectCodec<{
|
|
71
|
+
/**
|
|
72
|
+
* Allow batch operation limits to be configurable.
|
|
73
|
+
* Postgres has less batch size restrictions compared to MongoDB.
|
|
74
|
+
* Increasing limits can drastically improve replication performance, but
|
|
75
|
+
* can come at the cost of higher memory usage or potential issues.
|
|
76
|
+
*/
|
|
77
|
+
batch_limits: t.OptionalCodec<t.Codec<{
|
|
78
|
+
max_estimated_size?: number | undefined;
|
|
79
|
+
max_record_count?: number | undefined;
|
|
80
|
+
} & {
|
|
81
|
+
max_current_data_batch_size?: number | undefined;
|
|
82
|
+
}, {
|
|
83
|
+
max_estimated_size?: number | undefined;
|
|
84
|
+
max_record_count?: number | undefined;
|
|
85
|
+
} & {
|
|
86
|
+
max_current_data_batch_size?: number | undefined;
|
|
87
|
+
}, string, t.CodecProps>>;
|
|
88
|
+
}>>;
|
|
89
|
+
export type PostgresStorageConfig = t.Encoded<typeof PostgresStorageConfig>;
|
|
90
|
+
export type PostgresStorageConfigDecoded = t.Decoded<typeof PostgresStorageConfig>;
|
|
91
|
+
export type RequiredOperationBatchLimits = Required<OperationBatchLimits>;
|
|
92
|
+
export type NormalizedPostgresStorageConfig = pg_wire.NormalizedConnectionConfig & {
|
|
93
|
+
batch_limits: RequiredOperationBatchLimits;
|
|
94
|
+
};
|
|
95
|
+
export declare const normalizePostgresStorageConfig: (baseConfig: PostgresStorageConfigDecoded) => NormalizedPostgresStorageConfig;
|
|
96
|
+
export declare const isPostgresStorageConfig: (config: configFile.BaseStorageConfig) => config is PostgresStorageConfig;
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { storage } from '@powersync/service-core';
|
|
2
|
+
/**
|
|
3
|
+
* BSON is used to serialize certain documents for storage in BYTEA columns.
|
|
4
|
+
* JSONB columns do not directly support storing binary data which could be required in future.
|
|
5
|
+
*/
|
|
6
|
+
export declare function replicaIdToSubkey(tableId: string, id: storage.ReplicaId): string;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { models } from '../types/types.js';
|
|
2
|
+
export declare const mapOpEntry: (entry: models.BucketDataDecoded) => {
|
|
3
|
+
op_id: string;
|
|
4
|
+
op: models.OpType;
|
|
5
|
+
object_type: string | undefined;
|
|
6
|
+
object_id: string | undefined;
|
|
7
|
+
checksum: number;
|
|
8
|
+
subkey: string;
|
|
9
|
+
data: string | null;
|
|
10
|
+
} | {
|
|
11
|
+
op_id: string;
|
|
12
|
+
op: models.OpType;
|
|
13
|
+
checksum: number;
|
|
14
|
+
object_type?: undefined;
|
|
15
|
+
object_id?: undefined;
|
|
16
|
+
subkey?: undefined;
|
|
17
|
+
data?: undefined;
|
|
18
|
+
};
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import * as lib_postgres from '@powersync/lib-service-postgres';
|
|
2
|
+
export declare const STORAGE_SCHEMA_NAME = "powersync";
|
|
3
|
+
export declare const NOTIFICATION_CHANNEL = "powersynccheckpoints";
|
|
4
|
+
/**
|
|
5
|
+
* Re export for prettier to detect the tag better
|
|
6
|
+
*/
|
|
7
|
+
export declare const sql: (strings: TemplateStringsArray, ...params: import("@powersync/service-jpgwire").StatementParam[]) => import("@powersync/service-jpgwire").Statement;
|
|
8
|
+
export declare const dropTables: (client: lib_postgres.DatabaseClient) => Promise<void>;
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
2
|
+
/**
|
|
3
|
+
* Returns a new codec with a subset of keys. Equivalent to the TypeScript Pick utility.
|
|
4
|
+
*/
|
|
5
|
+
export declare const pick: <T extends t.AnyObjectCodecShape, Keys extends keyof T>(codec: t.ObjectCodec<T>, keys: Keys[]) => t.ObjectCodec<Pick<T, Keys>>;
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export * from './module/PostgresStorageModule.js';
|
|
2
|
+
export * from './migrations/PostgresMigrationAgent.js';
|
|
3
|
+
export * from './utils/utils-index.js';
|
|
4
|
+
export * as utils from './utils/utils-index.js';
|
|
5
|
+
export * from './storage/storage-index.js';
|
|
6
|
+
export * as storage from './storage/storage-index.js';
|
|
7
|
+
export * from './types/types.js';
|
|
8
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,mCAAmC,CAAC;AAElD,cAAc,wCAAwC,CAAC;AAEvD,cAAc,wBAAwB,CAAC;AACvC,OAAO,KAAK,KAAK,MAAM,wBAAwB,CAAC;AAEhD,cAAc,4BAA4B,CAAC;AAC3C,OAAO,KAAK,OAAO,MAAM,4BAA4B,CAAC;AACtD,cAAc,kBAAkB,CAAC"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import * as lib_postgres from '@powersync/lib-service-postgres';
|
|
2
|
+
import { migrations } from '@powersync/service-core';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
import { fileURLToPath } from 'url';
|
|
5
|
+
import { normalizePostgresStorageConfig } from '../types/types.js';
|
|
6
|
+
import { STORAGE_SCHEMA_NAME } from '../utils/db.js';
|
|
7
|
+
import { PostgresMigrationStore } from './PostgresMigrationStore.js';
|
|
8
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
+
const __dirname = path.dirname(__filename);
|
|
10
|
+
const MIGRATIONS_DIR = path.join(__dirname, 'scripts');
|
|
11
|
+
export class PostgresMigrationAgent extends migrations.AbstractPowerSyncMigrationAgent {
|
|
12
|
+
store;
|
|
13
|
+
locks;
|
|
14
|
+
db;
|
|
15
|
+
constructor(config) {
|
|
16
|
+
super();
|
|
17
|
+
this.db = new lib_postgres.DatabaseClient({
|
|
18
|
+
config: normalizePostgresStorageConfig(config),
|
|
19
|
+
schema: STORAGE_SCHEMA_NAME
|
|
20
|
+
});
|
|
21
|
+
this.store = new PostgresMigrationStore({
|
|
22
|
+
db: this.db
|
|
23
|
+
});
|
|
24
|
+
this.locks = new lib_postgres.PostgresLockManager({
|
|
25
|
+
name: 'migrations',
|
|
26
|
+
db: this.db
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
getInternalScriptsDir() {
|
|
30
|
+
return MIGRATIONS_DIR;
|
|
31
|
+
}
|
|
32
|
+
async [Symbol.asyncDispose]() {
|
|
33
|
+
await this.db[Symbol.asyncDispose]();
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
//# sourceMappingURL=PostgresMigrationAgent.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"PostgresMigrationAgent.js","sourceRoot":"","sources":["../../src/migrations/PostgresMigrationAgent.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,YAAY,MAAM,iCAAiC,CAAC;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AACrD,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AAEpC,OAAO,EAAE,8BAA8B,EAAgC,MAAM,mBAAmB,CAAC;AAEjG,OAAO,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AACrD,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AAErE,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AAE3C,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;AAEvD,MAAM,OAAO,sBAAuB,SAAQ,UAAU,CAAC,+BAA+B;IACpF,KAAK,CAA2B;IAChC,KAAK,CAAwB;IAEnB,EAAE,CAA8B;IAE1C,YAAY,MAAoC;QAC9C,KAAK,EAAE,CAAC;QAER,IAAI,CAAC,EAAE,GAAG,IAAI,YAAY,CAAC,cAAc,CAAC;YACxC,MAAM,EAAE,8BAA8B,CAAC,MAAM,CAAC;YAC9C,MAAM,EAAE,mBAAmB;SAC5B,CAAC,CAAC;QACH,IAAI,CAAC,KAAK,GAAG,IAAI,sBAAsB,CAAC;YACtC,EAAE,EAAE,IAAI,CAAC,EAAE;SACZ,CAAC,CAAC;QACH,IAAI,CAAC,KAAK,GAAG,IAAI,YAAY,CAAC,mBAAmB,CAAC;YAChD,IAAI,EAAE,YAAY;YAClB,EAAE,EAAE,IAAI,CAAC,EAAE;SACZ,CAAC,CAAC;IACL,CAAC;IAED,qBAAqB;QACnB,OAAO,cAAc,CAAC;IACxB,CAAC;IAED,KAAK,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC;QACzB,MAAM,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE,CAAC;IACvC,CAAC;CACF"}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { models } from '../types/types.js';
|
|
2
|
+
import { sql } from '../utils/db.js';
|
|
3
|
+
export class PostgresMigrationStore {
|
|
4
|
+
options;
|
|
5
|
+
constructor(options) {
|
|
6
|
+
this.options = options;
|
|
7
|
+
}
|
|
8
|
+
get db() {
|
|
9
|
+
return this.options.db;
|
|
10
|
+
}
|
|
11
|
+
async init() {
|
|
12
|
+
await this.db.query(sql `
|
|
13
|
+
CREATE TABLE IF NOT EXISTS migrations (
|
|
14
|
+
id SERIAL PRIMARY KEY,
|
|
15
|
+
last_run TEXT,
|
|
16
|
+
LOG JSONB NOT NULL
|
|
17
|
+
);
|
|
18
|
+
`);
|
|
19
|
+
}
|
|
20
|
+
async clear() {
|
|
21
|
+
await this.db.query(sql `DELETE FROM migrations;`);
|
|
22
|
+
}
|
|
23
|
+
async load() {
|
|
24
|
+
const res = await this.db.sql `
|
|
25
|
+
SELECT
|
|
26
|
+
last_run,
|
|
27
|
+
LOG
|
|
28
|
+
FROM
|
|
29
|
+
migrations
|
|
30
|
+
LIMIT
|
|
31
|
+
1
|
|
32
|
+
`
|
|
33
|
+
.decoded(models.Migration)
|
|
34
|
+
.first();
|
|
35
|
+
if (!res) {
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
return {
|
|
39
|
+
last_run: res.last_run,
|
|
40
|
+
log: res.log
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
async save(state) {
|
|
44
|
+
await this.db.query(sql `
|
|
45
|
+
INSERT INTO
|
|
46
|
+
migrations (id, last_run, LOG)
|
|
47
|
+
VALUES
|
|
48
|
+
(
|
|
49
|
+
1,
|
|
50
|
+
${{ type: 'varchar', value: state.last_run }},
|
|
51
|
+
${{ type: 'jsonb', value: state.log }}
|
|
52
|
+
)
|
|
53
|
+
ON CONFLICT (id) DO UPDATE
|
|
54
|
+
SET
|
|
55
|
+
last_run = EXCLUDED.last_run,
|
|
56
|
+
LOG = EXCLUDED.log;
|
|
57
|
+
`);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
//# sourceMappingURL=PostgresMigrationStore.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"PostgresMigrationStore.js","sourceRoot":"","sources":["../../src/migrations/PostgresMigrationStore.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAC3C,OAAO,EAAE,GAAG,EAAE,MAAM,gBAAgB,CAAC;AAMrC,MAAM,OAAO,sBAAsB;IACX;IAAtB,YAAsB,OAAsC;QAAtC,YAAO,GAAP,OAAO,CAA+B;IAAG,CAAC;IAEhE,IAAc,EAAE;QACd,OAAO,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;IACzB,CAAC;IAED,KAAK,CAAC,IAAI;QACR,MAAM,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAA;;;;;;KAMtB,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,KAAK;QACT,MAAM,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAA,yBAAyB,CAAC,CAAC;IACpD,CAAC;IAED,KAAK,CAAC,IAAI;QACR,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,GAAG,CAAA;;;;;;;;KAQ5B;aACE,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC;aACzB,KAAK,EAAE,CAAC;QAEX,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,OAAO;QACT,CAAC;QAED,OAAO;YACL,QAAQ,EAAE,GAAG,CAAC,QAAQ;YACtB,GAAG,EAAE,GAAG,CAAC,GAAG;SACb,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,IAAI,CAAC,KAAgC;QACzC,MAAM,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAA;;;;;;YAMf,EAAE,IAAI,EAAE,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,QAAQ,EAAE;YAC1C,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,GAAG,EAAE;;;;;;KAM1C,CAAC,CAAC;IACL,CAAC;CACF"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import * as lib_postgres from '@powersync/lib-service-postgres';
|
|
2
|
+
import { isPostgresStorageConfig, normalizePostgresStorageConfig, PostgresStorageConfig } from '../types/types.js';
|
|
3
|
+
import { STORAGE_SCHEMA_NAME } from '../utils/db.js';
|
|
4
|
+
export const openMigrationDB = (config) => {
|
|
5
|
+
if (!isPostgresStorageConfig(config)) {
|
|
6
|
+
throw new Error(`Input storage configuration is not for Postgres`);
|
|
7
|
+
}
|
|
8
|
+
return new lib_postgres.DatabaseClient({
|
|
9
|
+
config: normalizePostgresStorageConfig(PostgresStorageConfig.decode(config)),
|
|
10
|
+
schema: STORAGE_SCHEMA_NAME
|
|
11
|
+
});
|
|
12
|
+
};
|
|
13
|
+
//# sourceMappingURL=migration-utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migration-utils.js","sourceRoot":"","sources":["../../src/migrations/migration-utils.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,YAAY,MAAM,iCAAiC,CAAC;AAEhE,OAAO,EAAE,uBAAuB,EAAE,8BAA8B,EAAE,qBAAqB,EAAE,MAAM,mBAAmB,CAAC;AACnH,OAAO,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAErD,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,MAAoC,EAAE,EAAE;IACtE,IAAI,CAAC,uBAAuB,CAAC,MAAM,CAAC,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAC;IACrE,CAAC;IACD,OAAO,IAAI,YAAY,CAAC,cAAc,CAAC;QACrC,MAAM,EAAE,8BAA8B,CAAC,qBAAqB,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAC5E,MAAM,EAAE,mBAAmB;KAC5B,CAAC,CAAC;AACL,CAAC,CAAC"}
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
|
|
2
|
+
if (value !== null && value !== void 0) {
|
|
3
|
+
if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
|
|
4
|
+
var dispose, inner;
|
|
5
|
+
if (async) {
|
|
6
|
+
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
|
|
7
|
+
dispose = value[Symbol.asyncDispose];
|
|
8
|
+
}
|
|
9
|
+
if (dispose === void 0) {
|
|
10
|
+
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
|
|
11
|
+
dispose = value[Symbol.dispose];
|
|
12
|
+
if (async) inner = dispose;
|
|
13
|
+
}
|
|
14
|
+
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
|
|
15
|
+
if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
|
|
16
|
+
env.stack.push({ value: value, dispose: dispose, async: async });
|
|
17
|
+
}
|
|
18
|
+
else if (async) {
|
|
19
|
+
env.stack.push({ async: true });
|
|
20
|
+
}
|
|
21
|
+
return value;
|
|
22
|
+
};
|
|
23
|
+
var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
|
|
24
|
+
return function (env) {
|
|
25
|
+
function fail(e) {
|
|
26
|
+
env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
|
|
27
|
+
env.hasError = true;
|
|
28
|
+
}
|
|
29
|
+
var r, s = 0;
|
|
30
|
+
function next() {
|
|
31
|
+
while (r = env.stack.pop()) {
|
|
32
|
+
try {
|
|
33
|
+
if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
|
|
34
|
+
if (r.dispose) {
|
|
35
|
+
var result = r.dispose.call(r.value);
|
|
36
|
+
if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
|
|
37
|
+
}
|
|
38
|
+
else s |= 1;
|
|
39
|
+
}
|
|
40
|
+
catch (e) {
|
|
41
|
+
fail(e);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
|
|
45
|
+
if (env.hasError) throw env.error;
|
|
46
|
+
}
|
|
47
|
+
return next();
|
|
48
|
+
};
|
|
49
|
+
})(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
|
|
50
|
+
var e = new Error(message);
|
|
51
|
+
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
52
|
+
});
|
|
53
|
+
import { dropTables } from '../../utils/db.js';
|
|
54
|
+
import { openMigrationDB } from '../migration-utils.js';
|
|
55
|
+
export const up = async (context) => {
|
|
56
|
+
const env_1 = { stack: [], error: void 0, hasError: false };
|
|
57
|
+
try {
|
|
58
|
+
const { service_context: { configuration } } = context;
|
|
59
|
+
const client = __addDisposableResource(env_1, openMigrationDB(configuration.storage), true);
|
|
60
|
+
/**
|
|
61
|
+
* Request an explicit connection which will automatically set the search
|
|
62
|
+
* path to the powersync schema
|
|
63
|
+
*/
|
|
64
|
+
await client.transaction(async (db) => {
|
|
65
|
+
await db.sql `
|
|
66
|
+
CREATE SEQUENCE op_id_sequence AS int8 START
|
|
67
|
+
WITH
|
|
68
|
+
1
|
|
69
|
+
`.execute();
|
|
70
|
+
await db.sql `
|
|
71
|
+
CREATE SEQUENCE sync_rules_id_sequence AS int START
|
|
72
|
+
WITH
|
|
73
|
+
1
|
|
74
|
+
`.execute();
|
|
75
|
+
await db.sql `
|
|
76
|
+
CREATE TABLE bucket_data (
|
|
77
|
+
group_id integer NOT NULL,
|
|
78
|
+
bucket_name TEXT NOT NULL,
|
|
79
|
+
op_id bigint NOT NULL,
|
|
80
|
+
CONSTRAINT unique_id PRIMARY KEY (group_id, bucket_name, op_id),
|
|
81
|
+
op text NOT NULL,
|
|
82
|
+
source_table TEXT,
|
|
83
|
+
source_key bytea,
|
|
84
|
+
table_name TEXT,
|
|
85
|
+
row_id TEXT,
|
|
86
|
+
checksum bigint NOT NULL,
|
|
87
|
+
data TEXT,
|
|
88
|
+
target_op bigint
|
|
89
|
+
)
|
|
90
|
+
`.execute();
|
|
91
|
+
await db.sql `CREATE TABLE instance (id TEXT PRIMARY KEY) `.execute();
|
|
92
|
+
await db.sql `
|
|
93
|
+
CREATE TABLE sync_rules (
|
|
94
|
+
id INTEGER PRIMARY KEY,
|
|
95
|
+
state TEXT NOT NULL,
|
|
96
|
+
snapshot_done BOOLEAN NOT NULL DEFAULT FALSE,
|
|
97
|
+
last_checkpoint BIGINT,
|
|
98
|
+
last_checkpoint_lsn TEXT,
|
|
99
|
+
no_checkpoint_before TEXT,
|
|
100
|
+
slot_name TEXT,
|
|
101
|
+
last_checkpoint_ts TIMESTAMP WITH TIME ZONE,
|
|
102
|
+
last_keepalive_ts TIMESTAMP WITH TIME ZONE,
|
|
103
|
+
keepalive_op BIGINT,
|
|
104
|
+
last_fatal_error TEXT,
|
|
105
|
+
content TEXT NOT NULL
|
|
106
|
+
);
|
|
107
|
+
`.execute();
|
|
108
|
+
await db.sql `
|
|
109
|
+
CREATE TABLE bucket_parameters (
|
|
110
|
+
id BIGINT DEFAULT nextval('op_id_sequence') PRIMARY KEY,
|
|
111
|
+
group_id integer NOT NULL,
|
|
112
|
+
source_table TEXT NOT NULL,
|
|
113
|
+
source_key bytea NOT NULL,
|
|
114
|
+
lookup bytea NOT NULL,
|
|
115
|
+
--- Stored as text which is stringified with JSONBig
|
|
116
|
+
--- BigInts are not standard JSON, storing as JSONB seems risky
|
|
117
|
+
bucket_parameters text NOT NULL
|
|
118
|
+
);
|
|
119
|
+
`.execute();
|
|
120
|
+
await db.sql `
|
|
121
|
+
CREATE INDEX bucket_parameters_lookup_index ON bucket_parameters (group_id ASC, lookup ASC, id DESC)
|
|
122
|
+
`.execute();
|
|
123
|
+
await db.sql `
|
|
124
|
+
CREATE INDEX bucket_parameters_source_index ON bucket_parameters (group_id, source_table, source_key)
|
|
125
|
+
`.execute();
|
|
126
|
+
await db.sql `
|
|
127
|
+
CREATE TABLE current_data (
|
|
128
|
+
group_id integer NOT NULL,
|
|
129
|
+
source_table TEXT NOT NULL,
|
|
130
|
+
source_key bytea NOT NULL,
|
|
131
|
+
CONSTRAINT unique_current_data_id PRIMARY KEY (group_id, source_table, source_key),
|
|
132
|
+
buckets jsonb NOT NULL,
|
|
133
|
+
data bytea NOT NULL,
|
|
134
|
+
lookups bytea[] NOT NULL
|
|
135
|
+
);
|
|
136
|
+
`.execute();
|
|
137
|
+
await db.sql `
|
|
138
|
+
CREATE TABLE source_tables (
|
|
139
|
+
--- This is currently a TEXT column to make the (shared) tests easier to integrate
|
|
140
|
+
--- we could improve this if necessary
|
|
141
|
+
id TEXT PRIMARY KEY,
|
|
142
|
+
group_id integer NOT NULL,
|
|
143
|
+
connection_id integer NOT NULL,
|
|
144
|
+
relation_id jsonb,
|
|
145
|
+
schema_name text NOT NULL,
|
|
146
|
+
table_name text NOT NULL,
|
|
147
|
+
replica_id_columns jsonb,
|
|
148
|
+
snapshot_done BOOLEAN NOT NULL DEFAULT FALSE
|
|
149
|
+
)
|
|
150
|
+
`.execute();
|
|
151
|
+
await db.sql `CREATE INDEX source_table_lookup ON source_tables (group_id, table_name)`.execute();
|
|
152
|
+
await db.sql `
|
|
153
|
+
CREATE TABLE write_checkpoints (
|
|
154
|
+
user_id text PRIMARY KEY,
|
|
155
|
+
lsns jsonb NOT NULL,
|
|
156
|
+
write_checkpoint BIGINT NOT NULL
|
|
157
|
+
)
|
|
158
|
+
`.execute();
|
|
159
|
+
await db.sql `
|
|
160
|
+
CREATE TABLE custom_write_checkpoints (
|
|
161
|
+
user_id text NOT NULL,
|
|
162
|
+
write_checkpoint BIGINT NOT NULL,
|
|
163
|
+
sync_rules_id integer NOT NULL,
|
|
164
|
+
CONSTRAINT unique_user_sync PRIMARY KEY (user_id, sync_rules_id)
|
|
165
|
+
);
|
|
166
|
+
`.execute();
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
catch (e_1) {
|
|
170
|
+
env_1.error = e_1;
|
|
171
|
+
env_1.hasError = true;
|
|
172
|
+
}
|
|
173
|
+
finally {
|
|
174
|
+
const result_1 = __disposeResources(env_1);
|
|
175
|
+
if (result_1)
|
|
176
|
+
await result_1;
|
|
177
|
+
}
|
|
178
|
+
};
|
|
179
|
+
export const down = async (context) => {
|
|
180
|
+
const env_2 = { stack: [], error: void 0, hasError: false };
|
|
181
|
+
try {
|
|
182
|
+
const { service_context: { configuration } } = context;
|
|
183
|
+
const client = __addDisposableResource(env_2, openMigrationDB(configuration.storage), true);
|
|
184
|
+
await dropTables(client);
|
|
185
|
+
}
|
|
186
|
+
catch (e_2) {
|
|
187
|
+
env_2.error = e_2;
|
|
188
|
+
env_2.hasError = true;
|
|
189
|
+
}
|
|
190
|
+
finally {
|
|
191
|
+
const result_2 = __disposeResources(env_2);
|
|
192
|
+
if (result_2)
|
|
193
|
+
await result_2;
|
|
194
|
+
}
|
|
195
|
+
};
|
|
196
|
+
//# sourceMappingURL=1684951997326-init.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"1684951997326-init.js","sourceRoot":"","sources":["../../../src/migrations/scripts/1684951997326-init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAExD,MAAM,CAAC,MAAM,EAAE,GAA0C,KAAK,EAAE,OAAO,EAAE,EAAE;;;QACzE,MAAM,EACJ,eAAe,EAAE,EAAE,aAAa,EAAE,EACnC,GAAG,OAAO,CAAC;QACZ,MAAY,MAAM,kCAAG,eAAe,CAAC,aAAa,CAAC,OAAO,CAAC,OAAA,CAAC;QAE5D;;;WAGG;QACH,MAAM,MAAM,CAAC,WAAW,CAAC,KAAK,EAAE,EAAE,EAAE,EAAE;YACpC,MAAM,EAAE,CAAC,GAAG,CAAA;;;;KAIX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA;;;;KAIX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA;;;;;;;;;;;;;;;KAeX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA,8CAA8C,CAAC,OAAO,EAAE,CAAC;YAErE,MAAM,EAAE,CAAC,GAAG,CAAA;;;;;;;;;;;;;;;KAeX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA;;;;;;;;;;;KAWX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA;;KAEX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA;;KAEX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA;;;;;;;;;;KAUX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA;;;;;;;;;;;;;KAaX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA,0EAA0E,CAAC,OAAO,EAAE,CAAC;YAEjG,MAAM,EAAE,CAAC,GAAG,CAAA;;;;;;KAMX,CAAC,OAAO,EAAE,CAAC;YAEZ,MAAM,EAAE,CAAC,GAAG,CAAA;;;;;;;KAOX,CAAC,OAAO,EAAE,CAAC;QACd,CAAC,CAAC,CAAC;;;;;;;;;;;CACJ,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAA0C,KAAK,EAAE,OAAO,EAAE,EAAE;;;QAC3E,MAAM,EACJ,eAAe,EAAE,EAAE,aAAa,EAAE,EACnC,GAAG,OAAO,CAAC;QACZ,MAAY,MAAM,kCAAG,eAAe,CAAC,aAAa,CAAC,OAAO,CAAC,OAAA,CAAC;QAE5D,MAAM,UAAU,CAAC,MAAM,CAAC,CAAC;;;;;;;;;;;CAC1B,CAAC"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { modules } from '@powersync/service-core';
|
|
2
|
+
import { PostgresMigrationAgent } from '../migrations/PostgresMigrationAgent.js';
|
|
3
|
+
import { PostgresStorageProvider } from '../storage/PostgresStorageProvider.js';
|
|
4
|
+
import { isPostgresStorageConfig, PostgresStorageConfig } from '../types/types.js';
|
|
5
|
+
export class PostgresStorageModule extends modules.AbstractModule {
|
|
6
|
+
constructor() {
|
|
7
|
+
super({
|
|
8
|
+
name: 'Postgres Bucket Storage'
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
async initialize(context) {
|
|
12
|
+
const { storageEngine } = context;
|
|
13
|
+
// Register the ability to use Postgres as a BucketStorage
|
|
14
|
+
storageEngine.registerProvider(new PostgresStorageProvider());
|
|
15
|
+
if (isPostgresStorageConfig(context.configuration.storage)) {
|
|
16
|
+
context.migrations.registerMigrationAgent(new PostgresMigrationAgent(PostgresStorageConfig.decode(context.configuration.storage)));
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
async teardown() {
|
|
20
|
+
// Teardown for this module is implemented in the storage engine
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
//# sourceMappingURL=PostgresStorageModule.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"PostgresStorageModule.js","sourceRoot":"","sources":["../../src/module/PostgresStorageModule.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAU,MAAM,yBAAyB,CAAC;AAE1D,OAAO,EAAE,sBAAsB,EAAE,MAAM,yCAAyC,CAAC;AACjF,OAAO,EAAE,uBAAuB,EAAE,MAAM,uCAAuC,CAAC;AAChF,OAAO,EAAE,uBAAuB,EAAE,qBAAqB,EAAE,MAAM,mBAAmB,CAAC;AAEnF,MAAM,OAAO,qBAAsB,SAAQ,OAAO,CAAC,cAAc;IAC/D;QACE,KAAK,CAAC;YACJ,IAAI,EAAE,yBAAyB;SAChC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,UAAU,CAAC,OAAuC;QACtD,MAAM,EAAE,aAAa,EAAE,GAAG,OAAO,CAAC;QAElC,0DAA0D;QAC1D,aAAa,CAAC,gBAAgB,CAAC,IAAI,uBAAuB,EAAE,CAAC,CAAC;QAE9D,IAAI,uBAAuB,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC;YAC3D,OAAO,CAAC,UAAU,CAAC,sBAAsB,CACvC,IAAI,sBAAsB,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC,CACxF,CAAC;QACJ,CAAC;IACH,CAAC;IAED,KAAK,CAAC,QAAQ;QACZ,gEAAgE;IAClE,CAAC;CACF"}
|