@powersync/service-core 0.0.0-dev-20241007120318 → 0.0.0-dev-20241015084348
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -5
- package/dist/api/diagnostics.js +167 -103
- package/dist/api/diagnostics.js.map +1 -1
- package/dist/entry/commands/compact-action.js +73 -9
- package/dist/entry/commands/compact-action.js.map +1 -1
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.d.ts +3 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js +31 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js.map +1 -0
- package/dist/replication/AbstractReplicationJob.d.ts +1 -1
- package/dist/replication/AbstractReplicationJob.js.map +1 -1
- package/dist/replication/AbstractReplicator.d.ts +2 -2
- package/dist/replication/AbstractReplicator.js +66 -3
- package/dist/replication/AbstractReplicator.js.map +1 -1
- package/dist/replication/ReplicationEngine.js.map +1 -1
- package/dist/replication/replication-index.d.ts +1 -1
- package/dist/replication/replication-index.js +1 -1
- package/dist/replication/replication-index.js.map +1 -1
- package/dist/routes/endpoints/checkpointing.js +5 -2
- package/dist/routes/endpoints/checkpointing.js.map +1 -1
- package/dist/runner/teardown.js +66 -4
- package/dist/runner/teardown.js.map +1 -1
- package/dist/storage/BucketStorage.d.ts +25 -7
- package/dist/storage/BucketStorage.js.map +1 -1
- package/dist/storage/MongoBucketStorage.d.ts +12 -5
- package/dist/storage/MongoBucketStorage.js +44 -23
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/ReplicationEventPayload.d.ts +14 -0
- package/dist/storage/ReplicationEventPayload.js +2 -0
- package/dist/storage/ReplicationEventPayload.js.map +1 -0
- package/dist/storage/SourceTable.d.ts +8 -0
- package/dist/storage/SourceTable.js +9 -1
- package/dist/storage/SourceTable.js.map +1 -1
- package/dist/storage/StorageEngine.d.ts +10 -2
- package/dist/storage/StorageEngine.js +23 -3
- package/dist/storage/StorageEngine.js.map +1 -1
- package/dist/storage/StorageProvider.d.ts +9 -2
- package/dist/storage/mongo/MongoBucketBatch.d.ts +12 -4
- package/dist/storage/mongo/MongoBucketBatch.js +59 -21
- package/dist/storage/mongo/MongoBucketBatch.js.map +1 -1
- package/dist/storage/mongo/MongoStorageProvider.d.ts +1 -1
- package/dist/storage/mongo/MongoStorageProvider.js +3 -2
- package/dist/storage/mongo/MongoStorageProvider.js.map +1 -1
- package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +3 -2
- package/dist/storage/mongo/MongoSyncBucketStorage.js +71 -10
- package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/mongo/MongoWriteCheckpointAPI.d.ts +18 -0
- package/dist/storage/mongo/MongoWriteCheckpointAPI.js +90 -0
- package/dist/storage/mongo/MongoWriteCheckpointAPI.js.map +1 -0
- package/dist/storage/mongo/db.d.ts +3 -2
- package/dist/storage/mongo/db.js +1 -0
- package/dist/storage/mongo/db.js.map +1 -1
- package/dist/storage/mongo/models.d.ts +7 -1
- package/dist/storage/storage-index.d.ts +2 -0
- package/dist/storage/storage-index.js +2 -0
- package/dist/storage/storage-index.js.map +1 -1
- package/dist/storage/write-checkpoint.d.ts +55 -0
- package/dist/storage/write-checkpoint.js +16 -0
- package/dist/storage/write-checkpoint.js.map +1 -0
- package/dist/util/config/compound-config-collector.js +2 -1
- package/dist/util/config/compound-config-collector.js.map +1 -1
- package/dist/util/config/types.d.ts +1 -0
- package/package.json +5 -5
- package/src/api/diagnostics.ts +6 -5
- package/src/entry/commands/compact-action.ts +4 -2
- package/src/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.ts +37 -0
- package/src/replication/AbstractReplicationJob.ts +2 -2
- package/src/replication/AbstractReplicator.ts +5 -4
- package/src/replication/ReplicationEngine.ts +1 -1
- package/src/replication/replication-index.ts +1 -1
- package/src/routes/endpoints/checkpointing.ts +5 -2
- package/src/runner/teardown.ts +3 -3
- package/src/storage/BucketStorage.ts +32 -9
- package/src/storage/MongoBucketStorage.ts +70 -29
- package/src/storage/ReplicationEventPayload.ts +16 -0
- package/src/storage/SourceTable.ts +10 -1
- package/src/storage/StorageEngine.ts +34 -5
- package/src/storage/StorageProvider.ts +10 -2
- package/src/storage/mongo/MongoBucketBatch.ts +82 -27
- package/src/storage/mongo/MongoStorageProvider.ts +4 -3
- package/src/storage/mongo/MongoSyncBucketStorage.ts +17 -15
- package/src/storage/mongo/MongoWriteCheckpointAPI.ts +136 -0
- package/src/storage/mongo/db.ts +4 -1
- package/src/storage/mongo/models.ts +8 -1
- package/src/storage/storage-index.ts +2 -0
- package/src/storage/write-checkpoint.ts +67 -0
- package/src/util/config/compound-config-collector.ts +2 -1
- package/src/util/config/types.ts +1 -0
- package/test/src/data_storage.test.ts +42 -10
- package/test/src/util.ts +1 -2
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -8,11 +8,12 @@ import * as locks from '../locks/locks-index.js';
|
|
|
8
8
|
import * as sync from '../sync/sync-index.js';
|
|
9
9
|
import * as util from '../util/util-index.js';
|
|
10
10
|
|
|
11
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
11
|
+
import { DisposableObserver, logger } from '@powersync/lib-services-framework';
|
|
12
12
|
import { v4 as uuid } from 'uuid';
|
|
13
13
|
import {
|
|
14
14
|
ActiveCheckpoint,
|
|
15
15
|
BucketStorageFactory,
|
|
16
|
+
BucketStorageFactoryListener,
|
|
16
17
|
ParseSyncRulesOptions,
|
|
17
18
|
PersistedSyncRules,
|
|
18
19
|
PersistedSyncRulesContent,
|
|
@@ -20,20 +21,36 @@ import {
|
|
|
20
21
|
UpdateSyncRulesOptions,
|
|
21
22
|
WriteCheckpoint
|
|
22
23
|
} from './BucketStorage.js';
|
|
23
|
-
import { MongoPersistedSyncRulesContent } from './mongo/MongoPersistedSyncRulesContent.js';
|
|
24
|
-
import { MongoSyncBucketStorage } from './mongo/MongoSyncBucketStorage.js';
|
|
25
24
|
import { PowerSyncMongo, PowerSyncMongoOptions } from './mongo/db.js';
|
|
26
25
|
import { SyncRuleDocument, SyncRuleState } from './mongo/models.js';
|
|
26
|
+
import { MongoPersistedSyncRulesContent } from './mongo/MongoPersistedSyncRulesContent.js';
|
|
27
|
+
import { MongoSyncBucketStorage } from './mongo/MongoSyncBucketStorage.js';
|
|
28
|
+
import { MongoWriteCheckpointAPI } from './mongo/MongoWriteCheckpointAPI.js';
|
|
27
29
|
import { generateSlotName } from './mongo/util.js';
|
|
30
|
+
import {
|
|
31
|
+
CustomWriteCheckpointOptions,
|
|
32
|
+
DEFAULT_WRITE_CHECKPOINT_MODE,
|
|
33
|
+
LastWriteCheckpointFilters,
|
|
34
|
+
ManagedWriteCheckpointOptions,
|
|
35
|
+
WriteCheckpointAPI,
|
|
36
|
+
WriteCheckpointMode
|
|
37
|
+
} from './write-checkpoint.js';
|
|
28
38
|
|
|
29
39
|
export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {}
|
|
30
40
|
|
|
31
|
-
export class MongoBucketStorage
|
|
41
|
+
export class MongoBucketStorage
|
|
42
|
+
extends DisposableObserver<BucketStorageFactoryListener>
|
|
43
|
+
implements BucketStorageFactory
|
|
44
|
+
{
|
|
32
45
|
private readonly client: mongo.MongoClient;
|
|
33
46
|
private readonly session: mongo.ClientSession;
|
|
34
47
|
// TODO: This is still Postgres specific and needs to be reworked
|
|
35
48
|
public readonly slot_name_prefix: string;
|
|
36
49
|
|
|
50
|
+
readonly write_checkpoint_mode: WriteCheckpointMode;
|
|
51
|
+
|
|
52
|
+
protected readonly writeCheckpointAPI: WriteCheckpointAPI;
|
|
53
|
+
|
|
37
54
|
private readonly storageCache = new LRUCache<number, MongoSyncBucketStorage>({
|
|
38
55
|
max: 3,
|
|
39
56
|
fetchMethod: async (id) => {
|
|
@@ -49,16 +66,31 @@ export class MongoBucketStorage implements BucketStorageFactory {
|
|
|
49
66
|
}
|
|
50
67
|
const rules = new MongoPersistedSyncRulesContent(this.db, doc2);
|
|
51
68
|
return this.getInstance(rules);
|
|
69
|
+
},
|
|
70
|
+
dispose: (storage) => {
|
|
71
|
+
storage[Symbol.dispose]();
|
|
52
72
|
}
|
|
53
73
|
});
|
|
54
74
|
|
|
55
75
|
public readonly db: PowerSyncMongo;
|
|
56
76
|
|
|
57
|
-
constructor(
|
|
77
|
+
constructor(
|
|
78
|
+
db: PowerSyncMongo,
|
|
79
|
+
options: {
|
|
80
|
+
slot_name_prefix: string;
|
|
81
|
+
write_checkpoint_mode?: WriteCheckpointMode;
|
|
82
|
+
}
|
|
83
|
+
) {
|
|
84
|
+
super();
|
|
58
85
|
this.client = db.client;
|
|
59
86
|
this.db = db;
|
|
60
87
|
this.session = this.client.startSession();
|
|
61
88
|
this.slot_name_prefix = options.slot_name_prefix;
|
|
89
|
+
this.write_checkpoint_mode = options.write_checkpoint_mode ?? DEFAULT_WRITE_CHECKPOINT_MODE;
|
|
90
|
+
this.writeCheckpointAPI = new MongoWriteCheckpointAPI({
|
|
91
|
+
db,
|
|
92
|
+
mode: this.write_checkpoint_mode
|
|
93
|
+
});
|
|
62
94
|
}
|
|
63
95
|
|
|
64
96
|
getInstance(options: PersistedSyncRulesContent): MongoSyncBucketStorage {
|
|
@@ -66,7 +98,17 @@ export class MongoBucketStorage implements BucketStorageFactory {
|
|
|
66
98
|
if ((typeof id as any) == 'bigint') {
|
|
67
99
|
id = Number(id);
|
|
68
100
|
}
|
|
69
|
-
|
|
101
|
+
const storage = new MongoSyncBucketStorage(this, id, options, slot_name);
|
|
102
|
+
this.iterateListeners((cb) => cb.syncStorageCreated?.(storage));
|
|
103
|
+
storage.registerListener({
|
|
104
|
+
batchStarted: (batch) => {
|
|
105
|
+
// This nested listener will be automatically disposed when the storage is disposed
|
|
106
|
+
batch.registerManagedListener(storage, {
|
|
107
|
+
replicationEvent: (payload) => this.iterateListeners((cb) => cb.replicationEvent?.(payload))
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
return storage;
|
|
70
112
|
}
|
|
71
113
|
|
|
72
114
|
async configureSyncRules(sync_rules: string, options?: { lock?: boolean }) {
|
|
@@ -257,30 +299,20 @@ export class MongoBucketStorage implements BucketStorageFactory {
|
|
|
257
299
|
});
|
|
258
300
|
}
|
|
259
301
|
|
|
260
|
-
async
|
|
261
|
-
|
|
262
|
-
{
|
|
263
|
-
user_id: user_id
|
|
264
|
-
},
|
|
265
|
-
{
|
|
266
|
-
$set: {
|
|
267
|
-
lsns: lsns
|
|
268
|
-
},
|
|
269
|
-
$inc: {
|
|
270
|
-
client_id: 1n
|
|
271
|
-
}
|
|
272
|
-
},
|
|
273
|
-
{ upsert: true, returnDocument: 'after' }
|
|
274
|
-
);
|
|
275
|
-
return doc!.client_id;
|
|
302
|
+
async batchCreateCustomWriteCheckpoints(checkpoints: CustomWriteCheckpointOptions[]): Promise<void> {
|
|
303
|
+
return this.writeCheckpointAPI.batchCreateCustomWriteCheckpoints(checkpoints);
|
|
276
304
|
}
|
|
277
305
|
|
|
278
|
-
async
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
return
|
|
306
|
+
async createCustomWriteCheckpoint(options: CustomWriteCheckpointOptions): Promise<bigint> {
|
|
307
|
+
return this.writeCheckpointAPI.createCustomWriteCheckpoint(options);
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
async createManagedWriteCheckpoint(options: ManagedWriteCheckpointOptions): Promise<bigint> {
|
|
311
|
+
return this.writeCheckpointAPI.createManagedWriteCheckpoint(options);
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
async lastWriteCheckpoint(filters: LastWriteCheckpointFilters): Promise<bigint | null> {
|
|
315
|
+
return this.writeCheckpointAPI.lastWriteCheckpoint(filters);
|
|
284
316
|
}
|
|
285
317
|
|
|
286
318
|
async getActiveCheckpoint(): Promise<ActiveCheckpoint> {
|
|
@@ -496,8 +528,17 @@ export class MongoBucketStorage implements BucketStorageFactory {
|
|
|
496
528
|
// What is important is:
|
|
497
529
|
// 1. checkpoint (op_id) changes.
|
|
498
530
|
// 2. write checkpoint changes for the specific user
|
|
531
|
+
const bucketStorage = await cp.getBucketStorage();
|
|
499
532
|
|
|
500
|
-
const
|
|
533
|
+
const lsnFilters: Record<string, string> = lsn ? { 1: lsn } : {};
|
|
534
|
+
|
|
535
|
+
const currentWriteCheckpoint = await this.lastWriteCheckpoint({
|
|
536
|
+
user_id,
|
|
537
|
+
sync_rules_id: bucketStorage?.group_id,
|
|
538
|
+
heads: {
|
|
539
|
+
...lsnFilters
|
|
540
|
+
}
|
|
541
|
+
});
|
|
501
542
|
|
|
502
543
|
if (currentWriteCheckpoint == lastWriteCheckpoint && checkpoint == lastCheckpoint) {
|
|
503
544
|
// No change - wait for next one
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import * as sync_rules from '@powersync/service-sync-rules';
|
|
2
|
+
import { BucketStorageBatch, SaveOp } from './BucketStorage.js';
|
|
3
|
+
import { SourceTable } from './SourceTable.js';
|
|
4
|
+
|
|
5
|
+
export type EventData = {
|
|
6
|
+
op: SaveOp;
|
|
7
|
+
before?: sync_rules.SqliteRow;
|
|
8
|
+
after?: sync_rules.SqliteRow;
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
export type ReplicationEventPayload = {
|
|
12
|
+
batch: BucketStorageBatch;
|
|
13
|
+
data: EventData;
|
|
14
|
+
event: sync_rules.SqlEventDescriptor;
|
|
15
|
+
table: SourceTable;
|
|
16
|
+
};
|
|
@@ -23,6 +23,15 @@ export class SourceTable {
|
|
|
23
23
|
*/
|
|
24
24
|
public syncParameters = true;
|
|
25
25
|
|
|
26
|
+
/**
|
|
27
|
+
* True if the table is used in sync rules for events.
|
|
28
|
+
*
|
|
29
|
+
* This value is resolved externally, and cached here.
|
|
30
|
+
*
|
|
31
|
+
* Defaults to true for tests.
|
|
32
|
+
*/
|
|
33
|
+
public syncEvent = true;
|
|
34
|
+
|
|
26
35
|
constructor(
|
|
27
36
|
public readonly id: any,
|
|
28
37
|
public readonly connectionTag: string,
|
|
@@ -53,6 +62,6 @@ export class SourceTable {
|
|
|
53
62
|
}
|
|
54
63
|
|
|
55
64
|
get syncAny() {
|
|
56
|
-
return this.syncData || this.syncParameters;
|
|
65
|
+
return this.syncData || this.syncParameters || this.syncEvent;
|
|
57
66
|
}
|
|
58
67
|
}
|
|
@@ -1,18 +1,31 @@
|
|
|
1
|
+
import { DisposableListener, DisposableObserver, logger } from '@powersync/lib-services-framework';
|
|
1
2
|
import { ResolvedPowerSyncConfig } from '../util/util-index.js';
|
|
2
3
|
import { BucketStorageFactory } from './BucketStorage.js';
|
|
3
|
-
import { BucketStorageProvider,
|
|
4
|
-
import {
|
|
4
|
+
import { ActiveStorage, BucketStorageProvider, StorageSettings } from './StorageProvider.js';
|
|
5
|
+
import { DEFAULT_WRITE_CHECKPOINT_MODE } from './write-checkpoint.js';
|
|
5
6
|
|
|
6
7
|
export type StorageEngineOptions = {
|
|
7
8
|
configuration: ResolvedPowerSyncConfig;
|
|
8
9
|
};
|
|
9
10
|
|
|
10
|
-
export
|
|
11
|
+
export const DEFAULT_STORAGE_SETTINGS: StorageSettings = {
|
|
12
|
+
writeCheckpointMode: DEFAULT_WRITE_CHECKPOINT_MODE
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
export interface StorageEngineListener extends DisposableListener {
|
|
16
|
+
storageActivated: (storage: BucketStorageFactory) => void;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export class StorageEngine extends DisposableObserver<StorageEngineListener> {
|
|
11
20
|
// TODO: This will need to revisited when we actually support multiple storage providers.
|
|
12
21
|
private storageProviders: Map<string, BucketStorageProvider> = new Map();
|
|
13
22
|
private currentActiveStorage: ActiveStorage | null = null;
|
|
23
|
+
private _activeSettings: StorageSettings;
|
|
14
24
|
|
|
15
|
-
constructor(private options: StorageEngineOptions) {
|
|
25
|
+
constructor(private options: StorageEngineOptions) {
|
|
26
|
+
super();
|
|
27
|
+
this._activeSettings = DEFAULT_STORAGE_SETTINGS;
|
|
28
|
+
}
|
|
16
29
|
|
|
17
30
|
get activeBucketStorage(): BucketStorageFactory {
|
|
18
31
|
return this.activeStorage.storage;
|
|
@@ -26,6 +39,20 @@ export class StorageEngine {
|
|
|
26
39
|
return this.currentActiveStorage;
|
|
27
40
|
}
|
|
28
41
|
|
|
42
|
+
get activeSettings(): StorageSettings {
|
|
43
|
+
return { ...this._activeSettings };
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
updateSettings(settings: Partial<StorageSettings>) {
|
|
47
|
+
if (this.currentActiveStorage) {
|
|
48
|
+
throw new Error(`Storage is already active, settings cannot be modified.`);
|
|
49
|
+
}
|
|
50
|
+
this._activeSettings = {
|
|
51
|
+
...this._activeSettings,
|
|
52
|
+
...settings
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
29
56
|
/**
|
|
30
57
|
* Register a provider which generates a {@link BucketStorageFactory}
|
|
31
58
|
* given the matching config specified in the loaded {@link ResolvedPowerSyncConfig}
|
|
@@ -38,8 +65,10 @@ export class StorageEngine {
|
|
|
38
65
|
logger.info('Starting Storage Engine...');
|
|
39
66
|
const { configuration } = this.options;
|
|
40
67
|
this.currentActiveStorage = await this.storageProviders.get(configuration.storage.type)!.getStorage({
|
|
41
|
-
resolvedConfig: configuration
|
|
68
|
+
resolvedConfig: configuration,
|
|
69
|
+
...this.activeSettings
|
|
42
70
|
});
|
|
71
|
+
this.iterateListeners((cb) => cb.storageActivated?.(this.activeBucketStorage));
|
|
43
72
|
logger.info(`Successfully activated storage: ${configuration.storage.type}.`);
|
|
44
73
|
logger.info('Successfully started Storage Engine.');
|
|
45
74
|
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
import { BucketStorageFactory } from './BucketStorage.js';
|
|
2
1
|
import * as util from '../util/util-index.js';
|
|
2
|
+
import { BucketStorageFactory } from './BucketStorage.js';
|
|
3
|
+
import { WriteCheckpointMode } from './write-checkpoint.js';
|
|
3
4
|
|
|
4
5
|
export interface ActiveStorage {
|
|
5
6
|
storage: BucketStorageFactory;
|
|
@@ -11,7 +12,14 @@ export interface ActiveStorage {
|
|
|
11
12
|
tearDown(): Promise<boolean>;
|
|
12
13
|
}
|
|
13
14
|
|
|
14
|
-
|
|
15
|
+
/**
|
|
16
|
+
* Settings which can be modified by various modules in their initialization.
|
|
17
|
+
*/
|
|
18
|
+
export interface StorageSettings {
|
|
19
|
+
writeCheckpointMode: WriteCheckpointMode;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface GetStorageOptions extends StorageSettings {
|
|
15
23
|
// TODO: This should just be the storage config. Update once the slot name prefix coupling has been removed from the storage
|
|
16
24
|
resolvedConfig: util.ResolvedPowerSyncConfig;
|
|
17
25
|
}
|
|
@@ -1,14 +1,22 @@
|
|
|
1
|
-
import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
1
|
+
import { SqlEventDescriptor, SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
2
2
|
import * as bson from 'bson';
|
|
3
3
|
import * as mongo from 'mongodb';
|
|
4
4
|
|
|
5
|
-
import { container, errors, logger } from '@powersync/lib-services-framework';
|
|
5
|
+
import { container, DisposableObserver, errors, logger } from '@powersync/lib-services-framework';
|
|
6
6
|
import * as util from '../../util/util-index.js';
|
|
7
|
-
import {
|
|
7
|
+
import {
|
|
8
|
+
BucketBatchStorageListener,
|
|
9
|
+
BucketStorageBatch,
|
|
10
|
+
FlushedResult,
|
|
11
|
+
mergeToast,
|
|
12
|
+
SaveOptions
|
|
13
|
+
} from '../BucketStorage.js';
|
|
8
14
|
import { SourceTable } from '../SourceTable.js';
|
|
15
|
+
import { CustomWriteCheckpointOptions } from '../write-checkpoint.js';
|
|
9
16
|
import { PowerSyncMongo } from './db.js';
|
|
10
17
|
import { CurrentBucket, CurrentDataDocument, SourceKey, SyncRuleDocument } from './models.js';
|
|
11
18
|
import { MongoIdSequence } from './MongoIdSequence.js';
|
|
19
|
+
import { batchCreateCustomWriteCheckpoints } from './MongoWriteCheckpointAPI.js';
|
|
12
20
|
import { cacheKey, OperationBatch, RecordOperation } from './OperationBatch.js';
|
|
13
21
|
import { PersistedBatch } from './PersistedBatch.js';
|
|
14
22
|
import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, replicaIdEquals, serializeLookup } from './util.js';
|
|
@@ -25,7 +33,7 @@ const MAX_ROW_SIZE = 15 * 1024 * 1024;
|
|
|
25
33
|
// In the future, we can investigate allowing multiple replication streams operating independently.
|
|
26
34
|
const replicationMutex = new util.Mutex();
|
|
27
35
|
|
|
28
|
-
export class MongoBucketBatch implements BucketStorageBatch {
|
|
36
|
+
export class MongoBucketBatch extends DisposableObserver<BucketBatchStorageListener> implements BucketStorageBatch {
|
|
29
37
|
private readonly client: mongo.MongoClient;
|
|
30
38
|
public readonly db: PowerSyncMongo;
|
|
31
39
|
public readonly session: mongo.ClientSession;
|
|
@@ -36,6 +44,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
36
44
|
private readonly slot_name: string;
|
|
37
45
|
|
|
38
46
|
private batch: OperationBatch | null = null;
|
|
47
|
+
private write_checkpoint_batch: CustomWriteCheckpointOptions[] = [];
|
|
39
48
|
|
|
40
49
|
/**
|
|
41
50
|
* Last LSN received associated with a checkpoint.
|
|
@@ -63,14 +72,22 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
63
72
|
last_checkpoint_lsn: string | null,
|
|
64
73
|
no_checkpoint_before_lsn: string
|
|
65
74
|
) {
|
|
66
|
-
|
|
75
|
+
super();
|
|
67
76
|
this.client = db.client;
|
|
68
|
-
this.
|
|
77
|
+
this.db = db;
|
|
69
78
|
this.group_id = group_id;
|
|
70
|
-
this.slot_name = slot_name;
|
|
71
|
-
this.session = this.client.startSession();
|
|
72
79
|
this.last_checkpoint_lsn = last_checkpoint_lsn;
|
|
73
80
|
this.no_checkpoint_before_lsn = no_checkpoint_before_lsn;
|
|
81
|
+
this.session = this.client.startSession();
|
|
82
|
+
this.slot_name = slot_name;
|
|
83
|
+
this.sync_rules = sync_rules;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
addCustomWriteCheckpoint(checkpoint: CustomWriteCheckpointOptions): void {
|
|
87
|
+
this.write_checkpoint_batch.push({
|
|
88
|
+
...checkpoint,
|
|
89
|
+
sync_rules_id: this.group_id
|
|
90
|
+
});
|
|
74
91
|
}
|
|
75
92
|
|
|
76
93
|
get lastCheckpointLsn() {
|
|
@@ -87,6 +104,8 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
87
104
|
result = r;
|
|
88
105
|
}
|
|
89
106
|
}
|
|
107
|
+
await batchCreateCustomWriteCheckpoints(this.db, this.write_checkpoint_batch);
|
|
108
|
+
this.write_checkpoint_batch = [];
|
|
90
109
|
return result;
|
|
91
110
|
}
|
|
92
111
|
|
|
@@ -532,8 +551,9 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
532
551
|
});
|
|
533
552
|
}
|
|
534
553
|
|
|
535
|
-
async
|
|
554
|
+
async [Symbol.asyncDispose]() {
|
|
536
555
|
await this.session.endSession();
|
|
556
|
+
super[Symbol.dispose]();
|
|
537
557
|
}
|
|
538
558
|
|
|
539
559
|
async commit(lsn: string): Promise<boolean> {
|
|
@@ -550,26 +570,29 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
550
570
|
return false;
|
|
551
571
|
}
|
|
552
572
|
|
|
573
|
+
const now = new Date();
|
|
574
|
+
const update: Partial<SyncRuleDocument> = {
|
|
575
|
+
last_checkpoint_lsn: lsn,
|
|
576
|
+
last_checkpoint_ts: now,
|
|
577
|
+
last_keepalive_ts: now,
|
|
578
|
+
snapshot_done: true,
|
|
579
|
+
last_fatal_error: null
|
|
580
|
+
};
|
|
581
|
+
|
|
553
582
|
if (this.persisted_op != null) {
|
|
554
|
-
|
|
555
|
-
await this.db.sync_rules.updateOne(
|
|
556
|
-
{
|
|
557
|
-
_id: this.group_id
|
|
558
|
-
},
|
|
559
|
-
{
|
|
560
|
-
$set: {
|
|
561
|
-
last_checkpoint: this.persisted_op,
|
|
562
|
-
last_checkpoint_lsn: lsn,
|
|
563
|
-
last_checkpoint_ts: now,
|
|
564
|
-
last_keepalive_ts: now,
|
|
565
|
-
snapshot_done: true,
|
|
566
|
-
last_fatal_error: null
|
|
567
|
-
}
|
|
568
|
-
},
|
|
569
|
-
{ session: this.session }
|
|
570
|
-
);
|
|
571
|
-
this.persisted_op = null;
|
|
583
|
+
update.last_checkpoint = this.persisted_op;
|
|
572
584
|
}
|
|
585
|
+
|
|
586
|
+
await this.db.sync_rules.updateOne(
|
|
587
|
+
{
|
|
588
|
+
_id: this.group_id
|
|
589
|
+
},
|
|
590
|
+
{
|
|
591
|
+
$set: update
|
|
592
|
+
},
|
|
593
|
+
{ session: this.session }
|
|
594
|
+
);
|
|
595
|
+
this.persisted_op = null;
|
|
573
596
|
this.last_checkpoint_lsn = lsn;
|
|
574
597
|
return true;
|
|
575
598
|
}
|
|
@@ -610,6 +633,29 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
610
633
|
}
|
|
611
634
|
|
|
612
635
|
async save(record: SaveOptions): Promise<FlushedResult | null> {
|
|
636
|
+
const { after, before, sourceTable, tag } = record;
|
|
637
|
+
for (const event of this.getTableEvents(sourceTable)) {
|
|
638
|
+
this.iterateListeners((cb) =>
|
|
639
|
+
cb.replicationEvent?.({
|
|
640
|
+
batch: this,
|
|
641
|
+
table: sourceTable,
|
|
642
|
+
data: {
|
|
643
|
+
op: tag,
|
|
644
|
+
after: after && util.isCompleteRow(after) ? after : undefined,
|
|
645
|
+
before: before && util.isCompleteRow(before) ? before : undefined
|
|
646
|
+
},
|
|
647
|
+
event
|
|
648
|
+
})
|
|
649
|
+
);
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
/**
|
|
653
|
+
* Return if the table is just an event table
|
|
654
|
+
*/
|
|
655
|
+
if (!sourceTable.syncData && !sourceTable.syncParameters) {
|
|
656
|
+
return null;
|
|
657
|
+
}
|
|
658
|
+
|
|
613
659
|
logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`);
|
|
614
660
|
|
|
615
661
|
this.batch ??= new OperationBatch();
|
|
@@ -758,6 +804,15 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
758
804
|
return copy;
|
|
759
805
|
});
|
|
760
806
|
}
|
|
807
|
+
|
|
808
|
+
/**
|
|
809
|
+
* Gets relevant {@link SqlEventDescriptor}s for the given {@link SourceTable}
|
|
810
|
+
*/
|
|
811
|
+
protected getTableEvents(table: SourceTable): SqlEventDescriptor[] {
|
|
812
|
+
return this.sync_rules.event_descriptors.filter((evt) =>
|
|
813
|
+
[...evt.getSourceTables()].some((sourceTable) => sourceTable.matches(table))
|
|
814
|
+
);
|
|
815
|
+
}
|
|
761
816
|
}
|
|
762
817
|
|
|
763
818
|
export function currentBucketKey(b: CurrentBucket) {
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
1
2
|
import * as db from '../../db/db-index.js';
|
|
2
3
|
import { MongoBucketStorage } from '../MongoBucketStorage.js';
|
|
3
|
-
import {
|
|
4
|
+
import { ActiveStorage, BucketStorageProvider, GetStorageOptions } from '../StorageProvider.js';
|
|
4
5
|
import { PowerSyncMongo } from './db.js';
|
|
5
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
6
6
|
|
|
7
7
|
export class MongoStorageProvider implements BucketStorageProvider {
|
|
8
8
|
get type() {
|
|
@@ -19,7 +19,8 @@ export class MongoStorageProvider implements BucketStorageProvider {
|
|
|
19
19
|
return {
|
|
20
20
|
storage: new MongoBucketStorage(database, {
|
|
21
21
|
// TODO currently need the entire resolved config due to this
|
|
22
|
-
slot_name_prefix: resolvedConfig.slot_name_prefix
|
|
22
|
+
slot_name_prefix: resolvedConfig.slot_name_prefix,
|
|
23
|
+
write_checkpoint_mode: options.writeCheckpointMode
|
|
23
24
|
}),
|
|
24
25
|
shutDown: () => client.close(),
|
|
25
26
|
tearDown: () => {
|
|
@@ -2,6 +2,7 @@ import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service
|
|
|
2
2
|
import * as bson from 'bson';
|
|
3
3
|
import * as mongo from 'mongodb';
|
|
4
4
|
|
|
5
|
+
import { DisposableObserver } from '@powersync/lib-services-framework';
|
|
5
6
|
import * as db from '../../db/db-index.js';
|
|
6
7
|
import * as util from '../../util/util-index.js';
|
|
7
8
|
import {
|
|
@@ -12,13 +13,13 @@ import {
|
|
|
12
13
|
DEFAULT_DOCUMENT_CHUNK_LIMIT_BYTES,
|
|
13
14
|
FlushedResult,
|
|
14
15
|
ParseSyncRulesOptions,
|
|
15
|
-
PersistedSyncRules,
|
|
16
16
|
PersistedSyncRulesContent,
|
|
17
17
|
ResolveTableOptions,
|
|
18
18
|
ResolveTableResult,
|
|
19
19
|
StartBatchOptions,
|
|
20
20
|
SyncBucketDataBatch,
|
|
21
21
|
SyncRulesBucketStorage,
|
|
22
|
+
SyncRulesBucketStorageListener,
|
|
22
23
|
SyncRuleStatus,
|
|
23
24
|
TerminateOptions
|
|
24
25
|
} from '../BucketStorage.js';
|
|
@@ -31,7 +32,10 @@ import { MongoBucketBatch } from './MongoBucketBatch.js';
|
|
|
31
32
|
import { MongoCompactor } from './MongoCompactor.js';
|
|
32
33
|
import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, mapOpEntry, readSingleBatch, serializeLookup } from './util.js';
|
|
33
34
|
|
|
34
|
-
export class MongoSyncBucketStorage
|
|
35
|
+
export class MongoSyncBucketStorage
|
|
36
|
+
extends DisposableObserver<SyncRulesBucketStorageListener>
|
|
37
|
+
implements SyncRulesBucketStorage
|
|
38
|
+
{
|
|
35
39
|
private readonly db: PowerSyncMongo;
|
|
36
40
|
private checksumCache = new ChecksumCache({
|
|
37
41
|
fetchChecksums: (batch) => {
|
|
@@ -47,6 +51,7 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage {
|
|
|
47
51
|
private readonly sync_rules: PersistedSyncRulesContent,
|
|
48
52
|
public readonly slot_name: string
|
|
49
53
|
) {
|
|
54
|
+
super();
|
|
50
55
|
this.db = factory.db;
|
|
51
56
|
}
|
|
52
57
|
|
|
@@ -79,7 +84,7 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage {
|
|
|
79
84
|
);
|
|
80
85
|
const checkpoint_lsn = doc?.last_checkpoint_lsn ?? null;
|
|
81
86
|
|
|
82
|
-
|
|
87
|
+
await using batch = new MongoBucketBatch(
|
|
83
88
|
this.db,
|
|
84
89
|
this.sync_rules.parsed(options).sync_rules,
|
|
85
90
|
this.group_id,
|
|
@@ -87,18 +92,14 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage {
|
|
|
87
92
|
checkpoint_lsn,
|
|
88
93
|
doc?.no_checkpoint_before ?? options.zeroLSN
|
|
89
94
|
);
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
}
|
|
99
|
-
} catch (e) {
|
|
100
|
-
await batch.abort();
|
|
101
|
-
throw e;
|
|
95
|
+
this.iterateListeners((cb) => cb.batchStarted?.(batch));
|
|
96
|
+
|
|
97
|
+
await callback(batch);
|
|
98
|
+
await batch.flush();
|
|
99
|
+
if (batch.last_flushed_op) {
|
|
100
|
+
return { flushed_op: String(batch.last_flushed_op) };
|
|
101
|
+
} else {
|
|
102
|
+
return null;
|
|
102
103
|
}
|
|
103
104
|
}
|
|
104
105
|
|
|
@@ -150,6 +151,7 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage {
|
|
|
150
151
|
replicationColumns,
|
|
151
152
|
doc.snapshot_done ?? true
|
|
152
153
|
);
|
|
154
|
+
sourceTable.syncEvent = options.sync_rules.tableTriggersEvent(sourceTable);
|
|
153
155
|
sourceTable.syncData = options.sync_rules.tableSyncsData(sourceTable);
|
|
154
156
|
sourceTable.syncParameters = options.sync_rules.tableSyncsParameters(sourceTable);
|
|
155
157
|
|