@powersync/service-module-mongodb-storage 0.0.0-dev-20250108073049
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/LICENSE +67 -0
- package/README.md +3 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +6 -0
- package/dist/index.js.map +1 -0
- package/dist/migrations/MongoMigrationAgent.d.ts +12 -0
- package/dist/migrations/MongoMigrationAgent.js +25 -0
- package/dist/migrations/MongoMigrationAgent.js.map +1 -0
- package/dist/migrations/db/migrations/1684951997326-init.d.ts +3 -0
- package/dist/migrations/db/migrations/1684951997326-init.js +30 -0
- package/dist/migrations/db/migrations/1684951997326-init.js.map +1 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.d.ts +2 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js +5 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js.map +1 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.d.ts +3 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +54 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.d.ts +3 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js +26 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +1 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.d.ts +3 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js +28 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js.map +1 -0
- package/dist/migrations/mongo-migration-store.d.ts +7 -0
- package/dist/migrations/mongo-migration-store.js +49 -0
- package/dist/migrations/mongo-migration-store.js.map +1 -0
- package/dist/module/MongoStorageModule.d.ts +10 -0
- package/dist/module/MongoStorageModule.js +31 -0
- package/dist/module/MongoStorageModule.js.map +1 -0
- package/dist/storage/MongoBucketStorage.d.ts +48 -0
- package/dist/storage/MongoBucketStorage.js +426 -0
- package/dist/storage/MongoBucketStorage.js.map +1 -0
- package/dist/storage/implementation/MongoBucketBatch.d.ts +72 -0
- package/dist/storage/implementation/MongoBucketBatch.js +681 -0
- package/dist/storage/implementation/MongoBucketBatch.js.map +1 -0
- package/dist/storage/implementation/MongoCompactor.d.ts +40 -0
- package/dist/storage/implementation/MongoCompactor.js +300 -0
- package/dist/storage/implementation/MongoCompactor.js.map +1 -0
- package/dist/storage/implementation/MongoIdSequence.d.ts +12 -0
- package/dist/storage/implementation/MongoIdSequence.js +21 -0
- package/dist/storage/implementation/MongoIdSequence.js.map +1 -0
- package/dist/storage/implementation/MongoPersistedSyncRules.d.ts +9 -0
- package/dist/storage/implementation/MongoPersistedSyncRules.js +9 -0
- package/dist/storage/implementation/MongoPersistedSyncRules.js.map +1 -0
- package/dist/storage/implementation/MongoPersistedSyncRulesContent.d.ts +20 -0
- package/dist/storage/implementation/MongoPersistedSyncRulesContent.js +26 -0
- package/dist/storage/implementation/MongoPersistedSyncRulesContent.js.map +1 -0
- package/dist/storage/implementation/MongoStorageProvider.d.ts +5 -0
- package/dist/storage/implementation/MongoStorageProvider.js +33 -0
- package/dist/storage/implementation/MongoStorageProvider.js.map +1 -0
- package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +36 -0
- package/dist/storage/implementation/MongoSyncBucketStorage.js +529 -0
- package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -0
- package/dist/storage/implementation/MongoSyncRulesLock.d.ts +16 -0
- package/dist/storage/implementation/MongoSyncRulesLock.js +65 -0
- package/dist/storage/implementation/MongoSyncRulesLock.js.map +1 -0
- package/dist/storage/implementation/MongoTestStorageFactoryGenerator.d.ts +7 -0
- package/dist/storage/implementation/MongoTestStorageFactoryGenerator.js +16 -0
- package/dist/storage/implementation/MongoTestStorageFactoryGenerator.js.map +1 -0
- package/dist/storage/implementation/MongoWriteCheckpointAPI.d.ts +20 -0
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js +104 -0
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js.map +1 -0
- package/dist/storage/implementation/OperationBatch.d.ts +34 -0
- package/dist/storage/implementation/OperationBatch.js +119 -0
- package/dist/storage/implementation/OperationBatch.js.map +1 -0
- package/dist/storage/implementation/PersistedBatch.d.ts +46 -0
- package/dist/storage/implementation/PersistedBatch.js +223 -0
- package/dist/storage/implementation/PersistedBatch.js.map +1 -0
- package/dist/storage/implementation/db.d.ts +36 -0
- package/dist/storage/implementation/db.js +47 -0
- package/dist/storage/implementation/db.js.map +1 -0
- package/dist/storage/implementation/models.d.ts +139 -0
- package/dist/storage/implementation/models.js +2 -0
- package/dist/storage/implementation/models.js.map +1 -0
- package/dist/storage/implementation/util.d.ts +46 -0
- package/dist/storage/implementation/util.js +155 -0
- package/dist/storage/implementation/util.js.map +1 -0
- package/dist/storage/storage-index.d.ts +14 -0
- package/dist/storage/storage-index.js +15 -0
- package/dist/storage/storage-index.js.map +1 -0
- package/dist/types/types.d.ts +18 -0
- package/dist/types/types.js +9 -0
- package/dist/types/types.js.map +1 -0
- package/package.json +48 -0
- package/src/index.ts +7 -0
- package/src/migrations/MongoMigrationAgent.ts +39 -0
- package/src/migrations/db/migrations/1684951997326-init.ts +39 -0
- package/src/migrations/db/migrations/1688556755264-initial-sync-rules.ts +5 -0
- package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +105 -0
- package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +38 -0
- package/src/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.ts +40 -0
- package/src/migrations/mongo-migration-store.ts +62 -0
- package/src/module/MongoStorageModule.ts +37 -0
- package/src/storage/MongoBucketStorage.ts +531 -0
- package/src/storage/implementation/MongoBucketBatch.ts +896 -0
- package/src/storage/implementation/MongoCompactor.ts +381 -0
- package/src/storage/implementation/MongoIdSequence.ts +24 -0
- package/src/storage/implementation/MongoPersistedSyncRules.ts +16 -0
- package/src/storage/implementation/MongoPersistedSyncRulesContent.ts +49 -0
- package/src/storage/implementation/MongoStorageProvider.ts +39 -0
- package/src/storage/implementation/MongoSyncBucketStorage.ts +612 -0
- package/src/storage/implementation/MongoSyncRulesLock.ts +88 -0
- package/src/storage/implementation/MongoTestStorageFactoryGenerator.ts +25 -0
- package/src/storage/implementation/MongoWriteCheckpointAPI.ts +146 -0
- package/src/storage/implementation/OperationBatch.ts +129 -0
- package/src/storage/implementation/PersistedBatch.ts +283 -0
- package/src/storage/implementation/db.ts +87 -0
- package/src/storage/implementation/models.ts +161 -0
- package/src/storage/implementation/util.ts +169 -0
- package/src/storage/storage-index.ts +14 -0
- package/src/types/types.ts +18 -0
- package/test/src/__snapshots__/storage_sync.test.ts.snap +332 -0
- package/test/src/env.ts +6 -0
- package/test/src/setup.ts +9 -0
- package/test/src/storage.test.ts +7 -0
- package/test/src/storage_compacting.test.ts +6 -0
- package/test/src/storage_sync.test.ts +113 -0
- package/test/src/util.ts +8 -0
- package/test/tsconfig.json +31 -0
- package/tsconfig.json +31 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"db.js","sourceRoot":"","sources":["../../../src/storage/implementation/db.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,SAAS,MAAM,gCAAgC,CAAC;AAC5D,OAAO,EAAE,OAAO,EAAE,MAAM,yBAAyB,CAAC;AAuBlD,MAAM,OAAO,cAAc;IAezB,YAAY,MAAyB,EAAE,OAA+B;QACpE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QAErB,MAAM,EAAE,GAAG,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,QAAQ,EAAE;YACtC,GAAG,OAAO,CAAC,wBAAwB;SACpC,CAAC,CAAC;QACH,IAAI,CAAC,EAAE,GAAG,EAAE,CAAC;QAEb,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC,UAAU,CAAsB,cAAc,CAAC,CAAC;QACvE,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC,UAAU,CAAC,aAAa,CAAC,CAAC;QAChD,IAAI,CAAC,iBAAiB,GAAG,EAAE,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAAC;QAC5D,IAAI,CAAC,cAAc,GAAG,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;QACtD,IAAI,CAAC,UAAU,GAAG,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;QAC9C,IAAI,CAAC,aAAa,GAAG,EAAE,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC;QACpD,IAAI,CAAC,wBAAwB,GAAG,EAAE,CAAC,UAAU,CAAC,0BAA0B,CAAC,CAAC;QAC1E,IAAI,CAAC,iBAAiB,GAAG,EAAE,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAAC;QAC5D,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;QAC1C,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;IAC3C,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,KAAK;QACT,MAAM,IAAI,CAAC,YAAY,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACvC,MAAM,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,MAAM,IAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QAC5C,MAAM,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACzC,MAAM,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACrC,MAAM,IAAI,CAAC,aAAa,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,MAAM,IAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QAC5C,MAAM,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAClC,MAAM,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;IAClC,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,IAAI;QACR,MAAM,IAAI,CAAC,EAAE,CAAC,YAAY,EAAE,CAAC;IAC/B,CAAC;CACF;AAED,MAAM,UAAU,oBAAoB,CAAC,MAA0B;IAC7D,OAAO,IAAI,cAAc,CAAC,SAAS,CAAC,iBAAiB,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,EAAE,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChG,CAAC"}
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import { storage } from '@powersync/service-core';
|
|
2
|
+
import { SqliteJsonValue } from '@powersync/service-sync-rules';
|
|
3
|
+
import * as bson from 'bson';
|
|
4
|
+
/**
|
|
5
|
+
* Replica id uniquely identifying a row on the source database.
|
|
6
|
+
*
|
|
7
|
+
* Can be any value serializable to BSON.
|
|
8
|
+
*
|
|
9
|
+
* If the value is an entire document, the data serialized to a v5 UUID may be a good choice here.
|
|
10
|
+
*/
|
|
11
|
+
export type ReplicaId = bson.UUID | bson.Document | any;
|
|
12
|
+
export interface SourceKey {
|
|
13
|
+
/** group_id */
|
|
14
|
+
g: number;
|
|
15
|
+
/** source table id */
|
|
16
|
+
t: bson.ObjectId;
|
|
17
|
+
/** source key */
|
|
18
|
+
k: ReplicaId;
|
|
19
|
+
}
|
|
20
|
+
export interface BucketDataKey {
|
|
21
|
+
/** group_id */
|
|
22
|
+
g: number;
|
|
23
|
+
/** bucket name */
|
|
24
|
+
b: string;
|
|
25
|
+
/** op_id */
|
|
26
|
+
o: bigint;
|
|
27
|
+
}
|
|
28
|
+
export interface CurrentDataDocument {
|
|
29
|
+
_id: SourceKey;
|
|
30
|
+
data: bson.Binary;
|
|
31
|
+
buckets: CurrentBucket[];
|
|
32
|
+
lookups: bson.Binary[];
|
|
33
|
+
}
|
|
34
|
+
export interface CurrentBucket {
|
|
35
|
+
bucket: string;
|
|
36
|
+
table: string;
|
|
37
|
+
id: string;
|
|
38
|
+
}
|
|
39
|
+
export interface BucketParameterDocument {
|
|
40
|
+
_id: bigint;
|
|
41
|
+
key: SourceKey;
|
|
42
|
+
lookup: bson.Binary;
|
|
43
|
+
bucket_parameters: Record<string, SqliteJsonValue>[];
|
|
44
|
+
}
|
|
45
|
+
export interface BucketDataDocument {
|
|
46
|
+
_id: BucketDataKey;
|
|
47
|
+
op: OpType;
|
|
48
|
+
source_table?: bson.ObjectId;
|
|
49
|
+
source_key?: ReplicaId;
|
|
50
|
+
table?: string;
|
|
51
|
+
row_id?: string;
|
|
52
|
+
checksum: number;
|
|
53
|
+
data: string | null;
|
|
54
|
+
target_op?: bigint | null;
|
|
55
|
+
}
|
|
56
|
+
export type OpType = 'PUT' | 'REMOVE' | 'MOVE' | 'CLEAR';
|
|
57
|
+
export interface SourceTableDocument {
|
|
58
|
+
_id: bson.ObjectId;
|
|
59
|
+
group_id: number;
|
|
60
|
+
connection_id: number;
|
|
61
|
+
relation_id: number | string | undefined;
|
|
62
|
+
schema_name: string;
|
|
63
|
+
table_name: string;
|
|
64
|
+
replica_id_columns: string[] | null;
|
|
65
|
+
replica_id_columns2: {
|
|
66
|
+
name: string;
|
|
67
|
+
type_oid?: number;
|
|
68
|
+
type?: string;
|
|
69
|
+
}[] | undefined;
|
|
70
|
+
snapshot_done: boolean | undefined;
|
|
71
|
+
}
|
|
72
|
+
export interface IdSequenceDocument {
|
|
73
|
+
_id: string;
|
|
74
|
+
op_id: bigint;
|
|
75
|
+
}
|
|
76
|
+
export interface SyncRuleDocument {
|
|
77
|
+
_id: number;
|
|
78
|
+
state: storage.SyncRuleState;
|
|
79
|
+
/**
|
|
80
|
+
* True if initial snapshot has been replicated.
|
|
81
|
+
*
|
|
82
|
+
* Can only be false if state == PROCESSING.
|
|
83
|
+
*/
|
|
84
|
+
snapshot_done: boolean;
|
|
85
|
+
/**
|
|
86
|
+
* The last consistent checkpoint.
|
|
87
|
+
*
|
|
88
|
+
* There may be higher OpIds used in the database if we're in the middle of replicating a large transaction.
|
|
89
|
+
*/
|
|
90
|
+
last_checkpoint: bigint | null;
|
|
91
|
+
/**
|
|
92
|
+
* The LSN associated with the last consistent checkpoint.
|
|
93
|
+
*/
|
|
94
|
+
last_checkpoint_lsn: string | null;
|
|
95
|
+
/**
|
|
96
|
+
* If set, no new checkpoints may be created < this value.
|
|
97
|
+
*/
|
|
98
|
+
no_checkpoint_before: string | null;
|
|
99
|
+
/**
|
|
100
|
+
* Goes together with no_checkpoint_before.
|
|
101
|
+
*
|
|
102
|
+
* If a keepalive is triggered that creates the checkpoint > no_checkpoint_before,
|
|
103
|
+
* then the checkpoint must be equal to this keepalive_op.
|
|
104
|
+
*/
|
|
105
|
+
keepalive_op: string | null;
|
|
106
|
+
slot_name: string | null;
|
|
107
|
+
/**
|
|
108
|
+
* Last time we persisted a checkpoint.
|
|
109
|
+
*
|
|
110
|
+
* This may be old if no data is incoming.
|
|
111
|
+
*/
|
|
112
|
+
last_checkpoint_ts: Date | null;
|
|
113
|
+
/**
|
|
114
|
+
* Last time we persisted a checkpoint or keepalive.
|
|
115
|
+
*
|
|
116
|
+
* This should stay fairly current while replicating.
|
|
117
|
+
*/
|
|
118
|
+
last_keepalive_ts: Date | null;
|
|
119
|
+
/**
|
|
120
|
+
* If an error is stopping replication, it will be stored here.
|
|
121
|
+
*/
|
|
122
|
+
last_fatal_error: string | null;
|
|
123
|
+
content: string;
|
|
124
|
+
}
|
|
125
|
+
export interface CustomWriteCheckpointDocument {
|
|
126
|
+
_id: bson.ObjectId;
|
|
127
|
+
user_id: string;
|
|
128
|
+
checkpoint: bigint;
|
|
129
|
+
sync_rules_id: number;
|
|
130
|
+
}
|
|
131
|
+
export interface WriteCheckpointDocument {
|
|
132
|
+
_id: bson.ObjectId;
|
|
133
|
+
user_id: string;
|
|
134
|
+
lsns: Record<string, string>;
|
|
135
|
+
client_id: bigint;
|
|
136
|
+
}
|
|
137
|
+
export interface InstanceDocument {
|
|
138
|
+
_id: string;
|
|
139
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/storage/implementation/models.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { storage, utils } from '@powersync/service-core';
|
|
2
|
+
import * as bson from 'bson';
|
|
3
|
+
import * as mongo from 'mongodb';
|
|
4
|
+
import { PowerSyncMongo } from './db.js';
|
|
5
|
+
import { BucketDataDocument } from './models.js';
|
|
6
|
+
export declare function idPrefixFilter<T>(prefix: Partial<T>, rest: (keyof T)[]): mongo.Condition<T>;
|
|
7
|
+
export declare function generateSlotName(prefix: string, sync_rules_id: number): string;
|
|
8
|
+
/**
|
|
9
|
+
* Read a single batch of data from a cursor, then close it.
|
|
10
|
+
*
|
|
11
|
+
* We do our best to avoid MongoDB fetching any more data than this single batch.
|
|
12
|
+
*
|
|
13
|
+
* This is similar to using `singleBatch: true` in find options.
|
|
14
|
+
* However, that makes `has_more` detection very difficult, since the cursor is always closed
|
|
15
|
+
* after the first batch. Instead, we do a workaround to only fetch a single batch below.
|
|
16
|
+
*
|
|
17
|
+
* For this to be effective, set batchSize = limit in the find command.
|
|
18
|
+
*/
|
|
19
|
+
export declare function readSingleBatch<T>(cursor: mongo.FindCursor<T>): Promise<{
|
|
20
|
+
data: T[];
|
|
21
|
+
hasMore: boolean;
|
|
22
|
+
}>;
|
|
23
|
+
export declare function mapOpEntry(row: BucketDataDocument): utils.OplogEntry;
|
|
24
|
+
export declare function replicaIdToSubkey(table: bson.ObjectId, id: storage.ReplicaId): string;
|
|
25
|
+
/**
|
|
26
|
+
* Helper function for creating a MongoDB client from consumers of this package
|
|
27
|
+
*/
|
|
28
|
+
export declare const createMongoClient: (url: string, options?: mongo.MongoClientOptions) => mongo.MongoClient;
|
|
29
|
+
/**
|
|
30
|
+
* Helper for unit tests
|
|
31
|
+
*/
|
|
32
|
+
export declare const connectMongoForTests: (url: string, isCI: boolean) => PowerSyncMongo;
|
|
33
|
+
/**
|
|
34
|
+
* MongoDB bulkWrite internally splits the operations into batches
|
|
35
|
+
* so that no batch exceeds 16MB. However, there are cases where
|
|
36
|
+
* the batch size is very close to 16MB, where additional metadata
|
|
37
|
+
* on the server pushes it over the limit, resulting in this error
|
|
38
|
+
* from the server:
|
|
39
|
+
*
|
|
40
|
+
* > MongoBulkWriteError: BSONObj size: 16814023 (0x1008FC7) is invalid. Size must be between 0 and 16793600(16MB) First element: insert: "bucket_data"
|
|
41
|
+
*
|
|
42
|
+
* We work around the issue by doing our own batching, limiting the
|
|
43
|
+
* batch size to 15MB. This does add additional overhead with
|
|
44
|
+
* BSON.calculateObjectSize.
|
|
45
|
+
*/
|
|
46
|
+
export declare function safeBulkWrite<T extends mongo.Document>(collection: mongo.Collection<T>, operations: mongo.AnyBulkWriteOperation<T>[], options: mongo.BulkWriteOptions): Promise<void>;
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import { storage, utils } from '@powersync/service-core';
|
|
2
|
+
import * as bson from 'bson';
|
|
3
|
+
import * as crypto from 'crypto';
|
|
4
|
+
import * as mongo from 'mongodb';
|
|
5
|
+
import * as uuid from 'uuid';
|
|
6
|
+
import { PowerSyncMongo } from './db.js';
|
|
7
|
+
export function idPrefixFilter(prefix, rest) {
|
|
8
|
+
let filter = {
|
|
9
|
+
$gte: {
|
|
10
|
+
...prefix
|
|
11
|
+
},
|
|
12
|
+
$lt: {
|
|
13
|
+
...prefix
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
for (let key of rest) {
|
|
17
|
+
filter.$gte[key] = new bson.MinKey();
|
|
18
|
+
filter.$lt[key] = new bson.MaxKey();
|
|
19
|
+
}
|
|
20
|
+
return filter;
|
|
21
|
+
}
|
|
22
|
+
export function generateSlotName(prefix, sync_rules_id) {
|
|
23
|
+
const slot_suffix = crypto.randomBytes(2).toString('hex');
|
|
24
|
+
return `${prefix}${sync_rules_id}_${slot_suffix}`;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Read a single batch of data from a cursor, then close it.
|
|
28
|
+
*
|
|
29
|
+
* We do our best to avoid MongoDB fetching any more data than this single batch.
|
|
30
|
+
*
|
|
31
|
+
* This is similar to using `singleBatch: true` in find options.
|
|
32
|
+
* However, that makes `has_more` detection very difficult, since the cursor is always closed
|
|
33
|
+
* after the first batch. Instead, we do a workaround to only fetch a single batch below.
|
|
34
|
+
*
|
|
35
|
+
* For this to be effective, set batchSize = limit in the find command.
|
|
36
|
+
*/
|
|
37
|
+
export async function readSingleBatch(cursor) {
|
|
38
|
+
try {
|
|
39
|
+
let data;
|
|
40
|
+
let hasMore = true;
|
|
41
|
+
// Let MongoDB load the first batch of data
|
|
42
|
+
const hasAny = await cursor.hasNext();
|
|
43
|
+
// Now it's in memory, and we can read it
|
|
44
|
+
data = cursor.readBufferedDocuments();
|
|
45
|
+
if (!hasAny || cursor.id?.isZero()) {
|
|
46
|
+
// A zero id means the cursor is exhaused.
|
|
47
|
+
// No results (hasAny == false) means even this batch doesn't have data.
|
|
48
|
+
// This should similar results as `await cursor.hasNext()`, but without
|
|
49
|
+
// actually fetching the next batch.
|
|
50
|
+
//
|
|
51
|
+
// Note that it is safe (but slightly inefficient) to return `hasMore: true`
|
|
52
|
+
// without there being more data, as long as the next batch
|
|
53
|
+
// will return `hasMore: false`.
|
|
54
|
+
hasMore = false;
|
|
55
|
+
}
|
|
56
|
+
return { data, hasMore };
|
|
57
|
+
}
|
|
58
|
+
finally {
|
|
59
|
+
// Match the from the cursor iterator logic here:
|
|
60
|
+
// https://github.com/mongodb/node-mongodb-native/blob/e02534e7d1c627bf50b85ca39f5995dbf165ad44/src/cursor/abstract_cursor.ts#L327-L331
|
|
61
|
+
if (!cursor.closed) {
|
|
62
|
+
await cursor.close();
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
export function mapOpEntry(row) {
|
|
67
|
+
if (row.op == 'PUT' || row.op == 'REMOVE') {
|
|
68
|
+
return {
|
|
69
|
+
op_id: utils.timestampToOpId(row._id.o),
|
|
70
|
+
op: row.op,
|
|
71
|
+
object_type: row.table,
|
|
72
|
+
object_id: row.row_id,
|
|
73
|
+
checksum: Number(row.checksum),
|
|
74
|
+
subkey: replicaIdToSubkey(row.source_table, row.source_key),
|
|
75
|
+
data: row.data
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
// MOVE, CLEAR
|
|
80
|
+
return {
|
|
81
|
+
op_id: utils.timestampToOpId(row._id.o),
|
|
82
|
+
op: row.op,
|
|
83
|
+
checksum: Number(row.checksum)
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
export function replicaIdToSubkey(table, id) {
|
|
88
|
+
if (storage.isUUID(id)) {
|
|
89
|
+
// Special case for UUID for backwards-compatiblity
|
|
90
|
+
return `${table.toHexString()}/${id.toHexString()}`;
|
|
91
|
+
}
|
|
92
|
+
else {
|
|
93
|
+
// Hashed UUID from the table and id
|
|
94
|
+
const repr = bson.serialize({ table, id });
|
|
95
|
+
return uuid.v5(repr, utils.ID_NAMESPACE);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Helper function for creating a MongoDB client from consumers of this package
|
|
100
|
+
*/
|
|
101
|
+
export const createMongoClient = (url, options) => {
|
|
102
|
+
return new mongo.MongoClient(url, options);
|
|
103
|
+
};
|
|
104
|
+
/**
|
|
105
|
+
* Helper for unit tests
|
|
106
|
+
*/
|
|
107
|
+
export const connectMongoForTests = (url, isCI) => {
|
|
108
|
+
// Short timeout for tests, to fail fast when the server is not available.
|
|
109
|
+
// Slightly longer timeouts for CI, to avoid arbitrary test failures
|
|
110
|
+
const client = createMongoClient(url, {
|
|
111
|
+
connectTimeoutMS: isCI ? 15000 : 5000,
|
|
112
|
+
socketTimeoutMS: isCI ? 15000 : 5000,
|
|
113
|
+
serverSelectionTimeoutMS: isCI ? 15000 : 2500
|
|
114
|
+
});
|
|
115
|
+
return new PowerSyncMongo(client);
|
|
116
|
+
};
|
|
117
|
+
/**
|
|
118
|
+
* MongoDB bulkWrite internally splits the operations into batches
|
|
119
|
+
* so that no batch exceeds 16MB. However, there are cases where
|
|
120
|
+
* the batch size is very close to 16MB, where additional metadata
|
|
121
|
+
* on the server pushes it over the limit, resulting in this error
|
|
122
|
+
* from the server:
|
|
123
|
+
*
|
|
124
|
+
* > MongoBulkWriteError: BSONObj size: 16814023 (0x1008FC7) is invalid. Size must be between 0 and 16793600(16MB) First element: insert: "bucket_data"
|
|
125
|
+
*
|
|
126
|
+
* We work around the issue by doing our own batching, limiting the
|
|
127
|
+
* batch size to 15MB. This does add additional overhead with
|
|
128
|
+
* BSON.calculateObjectSize.
|
|
129
|
+
*/
|
|
130
|
+
export async function safeBulkWrite(collection, operations, options) {
|
|
131
|
+
// Must be below 16MB.
|
|
132
|
+
// We could probably go a little closer, but 15MB is a safe threshold.
|
|
133
|
+
const BULK_WRITE_LIMIT = 15 * 1024 * 1024;
|
|
134
|
+
let batch = [];
|
|
135
|
+
let currentSize = 0;
|
|
136
|
+
// Estimated overhead per operation, should be smaller in reality.
|
|
137
|
+
const keySize = 8;
|
|
138
|
+
for (let op of operations) {
|
|
139
|
+
const bsonSize = mongo.BSON.calculateObjectSize(op, {
|
|
140
|
+
checkKeys: false,
|
|
141
|
+
ignoreUndefined: true
|
|
142
|
+
}) + keySize;
|
|
143
|
+
if (batch.length > 0 && currentSize + bsonSize > BULK_WRITE_LIMIT) {
|
|
144
|
+
await collection.bulkWrite(batch, options);
|
|
145
|
+
currentSize = 0;
|
|
146
|
+
batch = [];
|
|
147
|
+
}
|
|
148
|
+
batch.push(op);
|
|
149
|
+
currentSize += bsonSize;
|
|
150
|
+
}
|
|
151
|
+
if (batch.length > 0) {
|
|
152
|
+
await collection.bulkWrite(batch, options);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
//# sourceMappingURL=util.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/storage/implementation/util.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AACzD,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,KAAK,MAAM,SAAS,CAAC;AACjC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAGzC,MAAM,UAAU,cAAc,CAAI,MAAkB,EAAE,IAAiB;IACrE,IAAI,MAAM,GAAG;QACX,IAAI,EAAE;YACJ,GAAG,MAAM;SACH;QACR,GAAG,EAAE;YACH,GAAG,MAAM;SACH;KACT,CAAC;IAEF,KAAK,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QACrB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;QACrC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;IACtC,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,MAAc,EAAE,aAAqB;IACpE,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1D,OAAO,GAAG,MAAM,GAAG,aAAa,IAAI,WAAW,EAAE,CAAC;AACpD,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAI,MAA2B;IAClE,IAAI,CAAC;QACH,IAAI,IAAS,CAAC;QACd,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,2CAA2C;QAC3C,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QACtC,yCAAyC;QACzC,IAAI,GAAG,MAAM,CAAC,qBAAqB,EAAE,CAAC;QACtC,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC;YACnC,0CAA0C;YAC1C,wEAAwE;YACxE,uEAAuE;YACvE,oCAAoC;YACpC,EAAE;YACF,4EAA4E;YAC5E,2DAA2D;YAC3D,gCAAgC;YAChC,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;QACD,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;IAC3B,CAAC;YAAS,CAAC;QACT,iDAAiD;QACjD,uIAAuI;QACvI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACnB,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAuB;IAChD,IAAI,GAAG,CAAC,EAAE,IAAI,KAAK,IAAI,GAAG,CAAC,EAAE,IAAI,QAAQ,EAAE,CAAC;QAC1C,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,WAAW,EAAE,GAAG,CAAC,KAAK;YACtB,SAAS,EAAE,GAAG,CAAC,MAAM;YACrB,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;YAC9B,MAAM,EAAE,iBAAiB,CAAC,GAAG,CAAC,YAAa,EAAE,GAAG,CAAC,UAAW,CAAC;YAC7D,IAAI,EAAE,GAAG,CAAC,IAAI;SACf,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,cAAc;QAEd,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;SAC/B,CAAC;IACJ,CAAC;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,KAAoB,EAAE,EAAqB;IAC3E,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;QACvB,mDAAmD;QACnD,OAAO,GAAG,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC;IACtD,CAAC;SAAM,CAAC;QACN,oCAAoC;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,GAAW,EAAE,OAAkC,EAAE,EAAE;IACnF,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAW,EAAE,IAAa,EAAE,EAAE;IACjE,0EAA0E;IAC1E,oEAAoE;IACpE,MAAM,MAAM,GAAG,iBAAiB,CAAC,GAAG,EAAE;QACpC,gBAAgB,EAAE,IAAI,CAAC,CAAC,CAAC,KAAM,CAAC,CAAC,CAAC,IAAK;QACvC,eAAe,EAAE,IAAI,CAAC,CAAC,CAAC,KAAM,CAAC,CAAC,CAAC,IAAK;QACtC,wBAAwB,EAAE,IAAI,CAAC,CAAC,CAAC,KAAM,CAAC,CAAC,CAAC,IAAK;KAChD,CAAC,CAAC;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;AACpC,CAAC,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,UAA+B,EAC/B,UAA4C,EAC5C,OAA+B;IAE/B,sBAAsB;IACtB,sEAAsE;IACtE,MAAM,gBAAgB,GAAG,EAAE,GAAG,IAAI,GAAG,IAAI,CAAC;IAE1C,IAAI,KAAK,GAAqC,EAAE,CAAC;IACjD,IAAI,WAAW,GAAG,CAAC,CAAC;IACpB,kEAAkE;IAClE,MAAM,OAAO,GAAG,CAAC,CAAC;IAClB,KAAK,IAAI,EAAE,IAAI,UAAU,EAAE,CAAC;QAC1B,MAAM,QAAQ,GACZ,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC,EAAE,EAAE;YACjC,SAAS,EAAE,KAAK;YAChB,eAAe,EAAE,IAAI;SACf,CAAC,GAAG,OAAO,CAAC;QACtB,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,WAAW,GAAG,QAAQ,GAAG,gBAAgB,EAAE,CAAC;YAClE,MAAM,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;YAC3C,WAAW,GAAG,CAAC,CAAC;YAChB,KAAK,GAAG,EAAE,CAAC;QACb,CAAC;QACD,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACf,WAAW,IAAI,QAAQ,CAAC;IAC1B,CAAC;IACD,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACrB,MAAM,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAC7C,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export * from './implementation/db.js';
|
|
2
|
+
export * from './implementation/models.js';
|
|
3
|
+
export * from './implementation/MongoBucketBatch.js';
|
|
4
|
+
export * from './implementation/MongoIdSequence.js';
|
|
5
|
+
export * from './implementation/MongoPersistedSyncRules.js';
|
|
6
|
+
export * from './implementation/MongoPersistedSyncRulesContent.js';
|
|
7
|
+
export * from './implementation/MongoStorageProvider.js';
|
|
8
|
+
export * from './implementation/MongoSyncBucketStorage.js';
|
|
9
|
+
export * from './implementation/MongoSyncRulesLock.js';
|
|
10
|
+
export * from './implementation/MongoTestStorageFactoryGenerator.js';
|
|
11
|
+
export * from './implementation/OperationBatch.js';
|
|
12
|
+
export * from './implementation/PersistedBatch.js';
|
|
13
|
+
export * from './implementation/util.js';
|
|
14
|
+
export * from './MongoBucketStorage.js';
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export * from './implementation/db.js';
|
|
2
|
+
export * from './implementation/models.js';
|
|
3
|
+
export * from './implementation/MongoBucketBatch.js';
|
|
4
|
+
export * from './implementation/MongoIdSequence.js';
|
|
5
|
+
export * from './implementation/MongoPersistedSyncRules.js';
|
|
6
|
+
export * from './implementation/MongoPersistedSyncRulesContent.js';
|
|
7
|
+
export * from './implementation/MongoStorageProvider.js';
|
|
8
|
+
export * from './implementation/MongoSyncBucketStorage.js';
|
|
9
|
+
export * from './implementation/MongoSyncRulesLock.js';
|
|
10
|
+
export * from './implementation/MongoTestStorageFactoryGenerator.js';
|
|
11
|
+
export * from './implementation/OperationBatch.js';
|
|
12
|
+
export * from './implementation/PersistedBatch.js';
|
|
13
|
+
export * from './implementation/util.js';
|
|
14
|
+
export * from './MongoBucketStorage.js';
|
|
15
|
+
//# sourceMappingURL=storage-index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"storage-index.js","sourceRoot":"","sources":["../../src/storage/storage-index.ts"],"names":[],"mappings":"AAAA,cAAc,wBAAwB,CAAC;AACvC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,sCAAsC,CAAC;AACrD,cAAc,qCAAqC,CAAC;AACpD,cAAc,6CAA6C,CAAC;AAC5D,cAAc,oDAAoD,CAAC;AACnE,cAAc,0CAA0C,CAAC;AACzD,cAAc,4CAA4C,CAAC;AAC3D,cAAc,wCAAwC,CAAC;AACvD,cAAc,sDAAsD,CAAC;AACrE,cAAc,oCAAoC,CAAC;AACnD,cAAc,oCAAoC,CAAC;AACnD,cAAc,0BAA0B,CAAC;AACzC,cAAc,yBAAyB,CAAC"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import * as service_types from '@powersync/service-types';
|
|
2
|
+
import * as t from 'ts-codec';
|
|
3
|
+
export declare const MongoStorageConfig: t.Intersection<t.Codec<{
|
|
4
|
+
uri: string;
|
|
5
|
+
type: "mongodb";
|
|
6
|
+
database?: string | undefined;
|
|
7
|
+
username?: string | undefined;
|
|
8
|
+
password?: string | undefined;
|
|
9
|
+
}, {
|
|
10
|
+
uri: string;
|
|
11
|
+
type: "mongodb";
|
|
12
|
+
database?: string | undefined;
|
|
13
|
+
username?: string | undefined;
|
|
14
|
+
password?: string | undefined;
|
|
15
|
+
}, string, t.CodecProps>, t.ObjectCodec<{}>>;
|
|
16
|
+
export type MongoStorageConfig = t.Encoded<typeof MongoStorageConfig>;
|
|
17
|
+
export type MongoStorageConfigDecoded = t.Decoded<typeof MongoStorageConfig>;
|
|
18
|
+
export declare function isMongoStorageConfig(config: service_types.configFile.GenericStorageConfig): config is MongoStorageConfig;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
2
|
+
import * as t from 'ts-codec';
|
|
3
|
+
export const MongoStorageConfig = lib_mongo.BaseMongoConfig.and(t.object({
|
|
4
|
+
// Add any mongo specific storage settings here in future
|
|
5
|
+
}));
|
|
6
|
+
export function isMongoStorageConfig(config) {
|
|
7
|
+
return config.type == lib_mongo.MONGO_CONNECTION_TYPE;
|
|
8
|
+
}
|
|
9
|
+
//# sourceMappingURL=types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/types/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,SAAS,MAAM,gCAAgC,CAAC;AAE5D,OAAO,KAAK,CAAC,MAAM,UAAU,CAAC;AAE9B,MAAM,CAAC,MAAM,kBAAkB,GAAG,SAAS,CAAC,eAAe,CAAC,GAAG,CAC7D,CAAC,CAAC,MAAM,CAAC;AACP,yDAAyD;CAC1D,CAAC,CACH,CAAC;AAKF,MAAM,UAAU,oBAAoB,CAClC,MAAqD;IAErD,OAAO,MAAM,CAAC,IAAI,IAAI,SAAS,CAAC,qBAAqB,CAAC;AACxD,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@powersync/service-module-mongodb-storage",
|
|
3
|
+
"repository": "https://github.com/powersync-ja/powersync-service",
|
|
4
|
+
"types": "dist/index.d.ts",
|
|
5
|
+
"version": "0.0.0-dev-20250108073049",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"license": "FSL-1.1-Apache-2.0",
|
|
8
|
+
"type": "module",
|
|
9
|
+
"publishConfig": {
|
|
10
|
+
"access": "public"
|
|
11
|
+
},
|
|
12
|
+
"exports": {
|
|
13
|
+
".": {
|
|
14
|
+
"import": "./dist/index.js",
|
|
15
|
+
"require": "./dist/index.js",
|
|
16
|
+
"default": "./dist/index.js"
|
|
17
|
+
},
|
|
18
|
+
"./types": {
|
|
19
|
+
"import": "./dist/types/types.js",
|
|
20
|
+
"require": "./dist/types/types.js",
|
|
21
|
+
"default": "./dist/types/types.js"
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
"dependencies": {
|
|
25
|
+
"mongodb": "^6.11.0",
|
|
26
|
+
"bson": "^6.8.0",
|
|
27
|
+
"ts-codec": "^1.3.0",
|
|
28
|
+
"ix": "^5.0.0",
|
|
29
|
+
"lru-cache": "^10.2.2",
|
|
30
|
+
"uuid": "^9.0.1",
|
|
31
|
+
"@powersync/lib-services-framework": "0.0.0-dev-20250108073049",
|
|
32
|
+
"@powersync/service-core": "0.0.0-dev-20250108073049",
|
|
33
|
+
"@powersync/service-jsonbig": "0.17.10",
|
|
34
|
+
"@powersync/service-sync-rules": "0.0.0-dev-20250108073049",
|
|
35
|
+
"@powersync/service-types": "0.0.0-dev-20250108073049",
|
|
36
|
+
"@powersync/lib-service-mongodb": "0.0.0-dev-20250108073049"
|
|
37
|
+
},
|
|
38
|
+
"devDependencies": {
|
|
39
|
+
"@types/uuid": "^9.0.4",
|
|
40
|
+
"@powersync/service-core-tests": "0.0.0-dev-20250108073049"
|
|
41
|
+
},
|
|
42
|
+
"scripts": {
|
|
43
|
+
"build": "tsc -b",
|
|
44
|
+
"build:tests": "tsc -b test/tsconfig.json",
|
|
45
|
+
"clean": "rm -rf ./dist && tsc -b --clean",
|
|
46
|
+
"test": "vitest"
|
|
47
|
+
}
|
|
48
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import * as framework from '@powersync/lib-services-framework';
|
|
2
|
+
|
|
3
|
+
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
4
|
+
import { migrations } from '@powersync/service-core';
|
|
5
|
+
import * as path from 'path';
|
|
6
|
+
import { fileURLToPath } from 'url';
|
|
7
|
+
import { createPowerSyncMongo, PowerSyncMongo } from '../storage/storage-index.js';
|
|
8
|
+
import { MongoStorageConfig } from '../types/types.js';
|
|
9
|
+
import { createMongoMigrationStore } from './mongo-migration-store.js';
|
|
10
|
+
|
|
11
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
12
|
+
const __dirname = path.dirname(__filename);
|
|
13
|
+
|
|
14
|
+
const MONGO_LOCK_PROCESS = 'migrations';
|
|
15
|
+
const MIGRATIONS_DIR = path.join(__dirname, '/db/migrations');
|
|
16
|
+
|
|
17
|
+
export class MongoMigrationAgent extends migrations.AbstractPowerSyncMigrationAgent {
|
|
18
|
+
store: framework.MigrationStore;
|
|
19
|
+
locks: framework.LockManager;
|
|
20
|
+
|
|
21
|
+
protected client: PowerSyncMongo;
|
|
22
|
+
|
|
23
|
+
constructor(mongoConfig: MongoStorageConfig) {
|
|
24
|
+
super();
|
|
25
|
+
|
|
26
|
+
this.client = createPowerSyncMongo(mongoConfig);
|
|
27
|
+
|
|
28
|
+
this.store = createMongoMigrationStore(this.client.db);
|
|
29
|
+
this.locks = new lib_mongo.locks.MongoLockManager({ collection: this.client.locks, name: MONGO_LOCK_PROCESS });
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
getInternalScriptsDir(): string {
|
|
33
|
+
return MIGRATIONS_DIR;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
async [Symbol.asyncDispose](): Promise<void> {
|
|
37
|
+
await this.client.client.close();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
2
|
+
import { migrations } from '@powersync/service-core';
|
|
3
|
+
import * as storage from '../../../storage/storage-index.js';
|
|
4
|
+
import { MongoStorageConfig } from '../../../types/types.js';
|
|
5
|
+
|
|
6
|
+
export const up: migrations.PowerSyncMigrationFunction = async (context) => {
|
|
7
|
+
const {
|
|
8
|
+
service_context: { configuration }
|
|
9
|
+
} = context;
|
|
10
|
+
const database = storage.createPowerSyncMongo(configuration.storage as MongoStorageConfig);
|
|
11
|
+
await lib_mongo.waitForAuth(database.db);
|
|
12
|
+
try {
|
|
13
|
+
await database.bucket_parameters.createIndex(
|
|
14
|
+
{
|
|
15
|
+
'key.g': 1,
|
|
16
|
+
lookup: 1,
|
|
17
|
+
_id: 1
|
|
18
|
+
},
|
|
19
|
+
{ name: 'lookup1' }
|
|
20
|
+
);
|
|
21
|
+
} finally {
|
|
22
|
+
await database.client.close();
|
|
23
|
+
}
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export const down: migrations.PowerSyncMigrationFunction = async (context) => {
|
|
27
|
+
const {
|
|
28
|
+
service_context: { configuration }
|
|
29
|
+
} = context;
|
|
30
|
+
|
|
31
|
+
const database = storage.createPowerSyncMongo(configuration.storage as MongoStorageConfig);
|
|
32
|
+
try {
|
|
33
|
+
if (await database.bucket_parameters.indexExists('lookup')) {
|
|
34
|
+
await database.bucket_parameters.dropIndex('lookup1');
|
|
35
|
+
}
|
|
36
|
+
} finally {
|
|
37
|
+
await database.client.close();
|
|
38
|
+
}
|
|
39
|
+
};
|