@powersync/service-core 0.0.0-dev-20241128134723 → 0.0.0-dev-20241219091224

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. package/CHANGELOG.md +63 -4
  2. package/dist/auth/KeySpec.d.ts +1 -0
  3. package/dist/auth/KeySpec.js +10 -8
  4. package/dist/auth/KeySpec.js.map +1 -1
  5. package/dist/auth/RemoteJWKSCollector.js +2 -2
  6. package/dist/auth/RemoteJWKSCollector.js.map +1 -1
  7. package/dist/entry/commands/compact-action.js +15 -15
  8. package/dist/entry/commands/compact-action.js.map +1 -1
  9. package/dist/entry/commands/migrate-action.js +15 -4
  10. package/dist/entry/commands/migrate-action.js.map +1 -1
  11. package/dist/index.d.ts +1 -3
  12. package/dist/index.js +1 -3
  13. package/dist/index.js.map +1 -1
  14. package/dist/migrations/PowerSyncMigrationManager.d.ts +17 -0
  15. package/dist/migrations/PowerSyncMigrationManager.js +22 -0
  16. package/dist/migrations/PowerSyncMigrationManager.js.map +1 -0
  17. package/dist/migrations/ensure-automatic-migrations.d.ts +4 -0
  18. package/dist/migrations/ensure-automatic-migrations.js +14 -0
  19. package/dist/migrations/ensure-automatic-migrations.js.map +1 -0
  20. package/dist/migrations/migrations-index.d.ts +2 -3
  21. package/dist/migrations/migrations-index.js +2 -3
  22. package/dist/migrations/migrations-index.js.map +1 -1
  23. package/dist/routes/RouterEngine.js +2 -1
  24. package/dist/routes/RouterEngine.js.map +1 -1
  25. package/dist/routes/configure-fastify.d.ts +28 -28
  26. package/dist/routes/endpoints/admin.d.ts +24 -24
  27. package/dist/storage/BucketStorage.d.ts +41 -1
  28. package/dist/storage/BucketStorage.js +26 -0
  29. package/dist/storage/BucketStorage.js.map +1 -1
  30. package/dist/storage/storage-index.d.ts +2 -14
  31. package/dist/storage/storage-index.js +2 -14
  32. package/dist/storage/storage-index.js.map +1 -1
  33. package/dist/sync/sync.js +12 -3
  34. package/dist/sync/sync.js.map +1 -1
  35. package/dist/system/ServiceContext.d.ts +3 -0
  36. package/dist/system/ServiceContext.js +11 -3
  37. package/dist/system/ServiceContext.js.map +1 -1
  38. package/dist/util/config/types.d.ts +2 -2
  39. package/dist/util/utils.d.ts +14 -1
  40. package/dist/util/utils.js +56 -0
  41. package/dist/util/utils.js.map +1 -1
  42. package/package.json +6 -7
  43. package/src/auth/KeySpec.ts +12 -9
  44. package/src/auth/RemoteJWKSCollector.ts +2 -2
  45. package/src/entry/commands/compact-action.ts +20 -15
  46. package/src/entry/commands/migrate-action.ts +17 -4
  47. package/src/index.ts +1 -4
  48. package/src/migrations/PowerSyncMigrationManager.ts +43 -0
  49. package/src/migrations/ensure-automatic-migrations.ts +15 -0
  50. package/src/migrations/migrations-index.ts +2 -3
  51. package/src/routes/RouterEngine.ts +2 -1
  52. package/src/storage/BucketStorage.ts +44 -1
  53. package/src/storage/storage-index.ts +3 -15
  54. package/src/sync/sync.ts +12 -3
  55. package/src/system/ServiceContext.ts +17 -4
  56. package/src/util/config/types.ts +2 -2
  57. package/src/util/utils.ts +59 -1
  58. package/test/src/auth.test.ts +54 -21
  59. package/test/src/env.ts +0 -1
  60. package/tsconfig.tsbuildinfo +1 -1
  61. package/dist/db/db-index.d.ts +0 -1
  62. package/dist/db/db-index.js +0 -2
  63. package/dist/db/db-index.js.map +0 -1
  64. package/dist/db/mongo.d.ts +0 -35
  65. package/dist/db/mongo.js +0 -73
  66. package/dist/db/mongo.js.map +0 -1
  67. package/dist/locks/LockManager.d.ts +0 -10
  68. package/dist/locks/LockManager.js +0 -7
  69. package/dist/locks/LockManager.js.map +0 -1
  70. package/dist/locks/MongoLocks.d.ts +0 -36
  71. package/dist/locks/MongoLocks.js +0 -81
  72. package/dist/locks/MongoLocks.js.map +0 -1
  73. package/dist/locks/locks-index.d.ts +0 -2
  74. package/dist/locks/locks-index.js +0 -3
  75. package/dist/locks/locks-index.js.map +0 -1
  76. package/dist/migrations/db/migrations/1684951997326-init.d.ts +0 -3
  77. package/dist/migrations/db/migrations/1684951997326-init.js +0 -33
  78. package/dist/migrations/db/migrations/1684951997326-init.js.map +0 -1
  79. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.d.ts +0 -2
  80. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js +0 -5
  81. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js.map +0 -1
  82. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.d.ts +0 -3
  83. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +0 -56
  84. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +0 -1
  85. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.d.ts +0 -3
  86. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js +0 -29
  87. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +0 -1
  88. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.d.ts +0 -3
  89. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js +0 -31
  90. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js.map +0 -1
  91. package/dist/migrations/definitions.d.ts +0 -18
  92. package/dist/migrations/definitions.js +0 -6
  93. package/dist/migrations/definitions.js.map +0 -1
  94. package/dist/migrations/executor.d.ts +0 -16
  95. package/dist/migrations/executor.js +0 -64
  96. package/dist/migrations/executor.js.map +0 -1
  97. package/dist/migrations/migrations.d.ts +0 -18
  98. package/dist/migrations/migrations.js +0 -110
  99. package/dist/migrations/migrations.js.map +0 -1
  100. package/dist/migrations/store/migration-store.d.ts +0 -11
  101. package/dist/migrations/store/migration-store.js +0 -46
  102. package/dist/migrations/store/migration-store.js.map +0 -1
  103. package/dist/storage/MongoBucketStorage.d.ts +0 -48
  104. package/dist/storage/MongoBucketStorage.js +0 -426
  105. package/dist/storage/MongoBucketStorage.js.map +0 -1
  106. package/dist/storage/mongo/MongoBucketBatch.d.ts +0 -67
  107. package/dist/storage/mongo/MongoBucketBatch.js +0 -643
  108. package/dist/storage/mongo/MongoBucketBatch.js.map +0 -1
  109. package/dist/storage/mongo/MongoCompactor.d.ts +0 -40
  110. package/dist/storage/mongo/MongoCompactor.js +0 -309
  111. package/dist/storage/mongo/MongoCompactor.js.map +0 -1
  112. package/dist/storage/mongo/MongoIdSequence.d.ts +0 -12
  113. package/dist/storage/mongo/MongoIdSequence.js +0 -21
  114. package/dist/storage/mongo/MongoIdSequence.js.map +0 -1
  115. package/dist/storage/mongo/MongoPersistedSyncRules.d.ts +0 -9
  116. package/dist/storage/mongo/MongoPersistedSyncRules.js +0 -9
  117. package/dist/storage/mongo/MongoPersistedSyncRules.js.map +0 -1
  118. package/dist/storage/mongo/MongoPersistedSyncRulesContent.d.ts +0 -20
  119. package/dist/storage/mongo/MongoPersistedSyncRulesContent.js +0 -26
  120. package/dist/storage/mongo/MongoPersistedSyncRulesContent.js.map +0 -1
  121. package/dist/storage/mongo/MongoStorageProvider.d.ts +0 -5
  122. package/dist/storage/mongo/MongoStorageProvider.js +0 -26
  123. package/dist/storage/mongo/MongoStorageProvider.js.map +0 -1
  124. package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +0 -38
  125. package/dist/storage/mongo/MongoSyncBucketStorage.js +0 -531
  126. package/dist/storage/mongo/MongoSyncBucketStorage.js.map +0 -1
  127. package/dist/storage/mongo/MongoSyncRulesLock.d.ts +0 -16
  128. package/dist/storage/mongo/MongoSyncRulesLock.js +0 -65
  129. package/dist/storage/mongo/MongoSyncRulesLock.js.map +0 -1
  130. package/dist/storage/mongo/MongoWriteCheckpointAPI.d.ts +0 -20
  131. package/dist/storage/mongo/MongoWriteCheckpointAPI.js +0 -103
  132. package/dist/storage/mongo/MongoWriteCheckpointAPI.js.map +0 -1
  133. package/dist/storage/mongo/OperationBatch.d.ts +0 -35
  134. package/dist/storage/mongo/OperationBatch.js +0 -119
  135. package/dist/storage/mongo/OperationBatch.js.map +0 -1
  136. package/dist/storage/mongo/PersistedBatch.d.ts +0 -46
  137. package/dist/storage/mongo/PersistedBatch.js +0 -213
  138. package/dist/storage/mongo/PersistedBatch.js.map +0 -1
  139. package/dist/storage/mongo/config.d.ts +0 -19
  140. package/dist/storage/mongo/config.js +0 -26
  141. package/dist/storage/mongo/config.js.map +0 -1
  142. package/dist/storage/mongo/db.d.ts +0 -36
  143. package/dist/storage/mongo/db.js +0 -47
  144. package/dist/storage/mongo/db.js.map +0 -1
  145. package/dist/storage/mongo/models.d.ts +0 -156
  146. package/dist/storage/mongo/models.js +0 -27
  147. package/dist/storage/mongo/models.js.map +0 -1
  148. package/dist/storage/mongo/util.d.ts +0 -40
  149. package/dist/storage/mongo/util.js +0 -151
  150. package/dist/storage/mongo/util.js.map +0 -1
  151. package/src/db/db-index.ts +0 -1
  152. package/src/db/mongo.ts +0 -81
  153. package/src/locks/LockManager.ts +0 -16
  154. package/src/locks/MongoLocks.ts +0 -142
  155. package/src/locks/locks-index.ts +0 -2
  156. package/src/migrations/db/migrations/1684951997326-init.ts +0 -38
  157. package/src/migrations/db/migrations/1688556755264-initial-sync-rules.ts +0 -5
  158. package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +0 -102
  159. package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +0 -34
  160. package/src/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.ts +0 -37
  161. package/src/migrations/definitions.ts +0 -21
  162. package/src/migrations/executor.ts +0 -87
  163. package/src/migrations/migrations.ts +0 -142
  164. package/src/migrations/store/migration-store.ts +0 -63
  165. package/src/storage/MongoBucketStorage.ts +0 -540
  166. package/src/storage/mongo/MongoBucketBatch.ts +0 -841
  167. package/src/storage/mongo/MongoCompactor.ts +0 -392
  168. package/src/storage/mongo/MongoIdSequence.ts +0 -24
  169. package/src/storage/mongo/MongoPersistedSyncRules.ts +0 -16
  170. package/src/storage/mongo/MongoPersistedSyncRulesContent.ts +0 -50
  171. package/src/storage/mongo/MongoStorageProvider.ts +0 -31
  172. package/src/storage/mongo/MongoSyncBucketStorage.ts +0 -636
  173. package/src/storage/mongo/MongoSyncRulesLock.ts +0 -85
  174. package/src/storage/mongo/MongoWriteCheckpointAPI.ts +0 -151
  175. package/src/storage/mongo/OperationBatch.ts +0 -131
  176. package/src/storage/mongo/PersistedBatch.ts +0 -272
  177. package/src/storage/mongo/config.ts +0 -40
  178. package/src/storage/mongo/db.ts +0 -88
  179. package/src/storage/mongo/models.ts +0 -179
  180. package/src/storage/mongo/util.ts +0 -158
  181. package/test/src/__snapshots__/sync.test.ts.snap +0 -332
  182. package/test/src/bucket_validation.test.ts +0 -142
  183. package/test/src/bucket_validation.ts +0 -116
  184. package/test/src/compacting.test.ts +0 -295
  185. package/test/src/data_storage.test.ts +0 -1499
  186. package/test/src/stream_utils.ts +0 -42
  187. package/test/src/sync.test.ts +0 -511
  188. package/test/src/util.ts +0 -148
@@ -1,636 +0,0 @@
1
- import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
2
- import * as bson from 'bson';
3
- import * as mongo from 'mongodb';
4
-
5
- import { DisposableObserver, logger } from '@powersync/lib-services-framework';
6
- import * as timers from 'timers/promises';
7
- import * as db from '../../db/db-index.js';
8
- import * as util from '../../util/util-index.js';
9
- import {
10
- BucketDataBatchOptions,
11
- BucketStorageBatch,
12
- CompactOptions,
13
- DEFAULT_DOCUMENT_BATCH_LIMIT,
14
- DEFAULT_DOCUMENT_CHUNK_LIMIT_BYTES,
15
- FlushedResult,
16
- ParseSyncRulesOptions,
17
- PersistedSyncRulesContent,
18
- ReplicationCheckpoint,
19
- ResolveTableOptions,
20
- ResolveTableResult,
21
- StartBatchOptions,
22
- SyncBucketDataBatch,
23
- SyncRulesBucketStorage,
24
- SyncRulesBucketStorageListener,
25
- SyncRuleStatus,
26
- TerminateOptions
27
- } from '../BucketStorage.js';
28
- import { ChecksumCache, FetchPartialBucketChecksum, PartialChecksum, PartialChecksumMap } from '../ChecksumCache.js';
29
- import { MongoBucketStorage } from '../MongoBucketStorage.js';
30
- import { SourceTable } from '../SourceTable.js';
31
- import {
32
- BatchedCustomWriteCheckpointOptions,
33
- ManagedWriteCheckpointOptions,
34
- SyncStorageLastWriteCheckpointFilters,
35
- WriteCheckpointAPI,
36
- WriteCheckpointMode
37
- } from '../WriteCheckpointAPI.js';
38
- import { PowerSyncMongo } from './db.js';
39
- import { BucketDataDocument, BucketDataKey, SourceKey, SyncRuleState } from './models.js';
40
- import { MongoBucketBatch } from './MongoBucketBatch.js';
41
- import { MongoCompactor } from './MongoCompactor.js';
42
- import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
43
- import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, mapOpEntry, readSingleBatch, serializeLookup } from './util.js';
44
-
45
- export class MongoSyncBucketStorage
46
- extends DisposableObserver<SyncRulesBucketStorageListener>
47
- implements SyncRulesBucketStorage
48
- {
49
- private readonly db: PowerSyncMongo;
50
- private checksumCache = new ChecksumCache({
51
- fetchChecksums: (batch) => {
52
- return this.getChecksumsInternal(batch);
53
- }
54
- });
55
-
56
- private parsedSyncRulesCache: { parsed: SqlSyncRules; options: ParseSyncRulesOptions } | undefined;
57
- private writeCheckpointAPI: WriteCheckpointAPI;
58
-
59
- constructor(
60
- public readonly factory: MongoBucketStorage,
61
- public readonly group_id: number,
62
- private readonly sync_rules: PersistedSyncRulesContent,
63
- public readonly slot_name: string,
64
- writeCheckpointMode: WriteCheckpointMode = WriteCheckpointMode.MANAGED
65
- ) {
66
- super();
67
- this.db = factory.db;
68
- this.writeCheckpointAPI = new MongoWriteCheckpointAPI({
69
- db: this.db,
70
- mode: writeCheckpointMode
71
- });
72
- }
73
-
74
- get writeCheckpointMode() {
75
- return this.writeCheckpointAPI.writeCheckpointMode;
76
- }
77
-
78
- setWriteCheckpointMode(mode: WriteCheckpointMode): void {
79
- this.writeCheckpointAPI.setWriteCheckpointMode(mode);
80
- }
81
-
82
- batchCreateCustomWriteCheckpoints(checkpoints: BatchedCustomWriteCheckpointOptions[]): Promise<void> {
83
- return this.writeCheckpointAPI.batchCreateCustomWriteCheckpoints(
84
- checkpoints.map((checkpoint) => ({ ...checkpoint, sync_rules_id: this.group_id }))
85
- );
86
- }
87
-
88
- createCustomWriteCheckpoint(checkpoint: BatchedCustomWriteCheckpointOptions): Promise<bigint> {
89
- return this.writeCheckpointAPI.createCustomWriteCheckpoint({
90
- ...checkpoint,
91
- sync_rules_id: this.group_id
92
- });
93
- }
94
-
95
- createManagedWriteCheckpoint(checkpoint: ManagedWriteCheckpointOptions): Promise<bigint> {
96
- return this.writeCheckpointAPI.createManagedWriteCheckpoint(checkpoint);
97
- }
98
-
99
- lastWriteCheckpoint(filters: SyncStorageLastWriteCheckpointFilters): Promise<bigint | null> {
100
- return this.writeCheckpointAPI.lastWriteCheckpoint({
101
- ...filters,
102
- sync_rules_id: this.group_id
103
- });
104
- }
105
-
106
- getParsedSyncRules(options: ParseSyncRulesOptions): SqlSyncRules {
107
- const { parsed, options: cachedOptions } = this.parsedSyncRulesCache ?? {};
108
- /**
109
- * Check if the cached sync rules, if present, had the same options.
110
- * Parse sync rules if the options are different or if there is no cached value.
111
- */
112
- if (!parsed || options.defaultSchema != cachedOptions?.defaultSchema) {
113
- this.parsedSyncRulesCache = { parsed: this.sync_rules.parsed(options).sync_rules, options };
114
- }
115
-
116
- return this.parsedSyncRulesCache!.parsed;
117
- }
118
-
119
- async getCheckpoint(): Promise<ReplicationCheckpoint> {
120
- const doc = await this.db.sync_rules.findOne(
121
- { _id: this.group_id },
122
- {
123
- projection: { last_checkpoint: 1, last_checkpoint_lsn: 1 }
124
- }
125
- );
126
- return {
127
- checkpoint: util.timestampToOpId(doc?.last_checkpoint ?? 0n),
128
- lsn: doc?.last_checkpoint_lsn ?? null
129
- };
130
- }
131
-
132
- async startBatch(
133
- options: StartBatchOptions,
134
- callback: (batch: BucketStorageBatch) => Promise<void>
135
- ): Promise<FlushedResult | null> {
136
- const doc = await this.db.sync_rules.findOne(
137
- {
138
- _id: this.group_id
139
- },
140
- { projection: { last_checkpoint_lsn: 1, no_checkpoint_before: 1 } }
141
- );
142
- const checkpoint_lsn = doc?.last_checkpoint_lsn ?? null;
143
-
144
- await using batch = new MongoBucketBatch({
145
- db: this.db,
146
- syncRules: this.sync_rules.parsed(options).sync_rules,
147
- groupId: this.group_id,
148
- slotName: this.slot_name,
149
- lastCheckpointLsn: checkpoint_lsn,
150
- noCheckpointBeforeLsn: doc?.no_checkpoint_before ?? options.zeroLSN,
151
- storeCurrentData: options.storeCurrentData
152
- });
153
- this.iterateListeners((cb) => cb.batchStarted?.(batch));
154
-
155
- await callback(batch);
156
- await batch.flush();
157
- if (batch.last_flushed_op) {
158
- return { flushed_op: String(batch.last_flushed_op) };
159
- } else {
160
- return null;
161
- }
162
- }
163
-
164
- async resolveTable(options: ResolveTableOptions): Promise<ResolveTableResult> {
165
- const { group_id, connection_id, connection_tag, entity_descriptor } = options;
166
-
167
- const { schema, name: table, objectId, replicationColumns } = entity_descriptor;
168
-
169
- const columns = replicationColumns.map((column) => ({
170
- name: column.name,
171
- type: column.type,
172
- type_oid: column.typeId
173
- }));
174
- let result: ResolveTableResult | null = null;
175
- await this.db.client.withSession(async (session) => {
176
- const col = this.db.source_tables;
177
- let doc = await col.findOne(
178
- {
179
- group_id: group_id,
180
- connection_id: connection_id,
181
- relation_id: objectId,
182
- schema_name: schema,
183
- table_name: table,
184
- replica_id_columns2: columns
185
- },
186
- { session }
187
- );
188
- if (doc == null) {
189
- doc = {
190
- _id: new bson.ObjectId(),
191
- group_id: group_id,
192
- connection_id: connection_id,
193
- relation_id: objectId,
194
- schema_name: schema,
195
- table_name: table,
196
- replica_id_columns: null,
197
- replica_id_columns2: columns,
198
- snapshot_done: false
199
- };
200
-
201
- await col.insertOne(doc, { session });
202
- }
203
- const sourceTable = new SourceTable(
204
- doc._id,
205
- connection_tag,
206
- objectId,
207
- schema,
208
- table,
209
- replicationColumns,
210
- doc.snapshot_done ?? true
211
- );
212
- sourceTable.syncEvent = options.sync_rules.tableTriggersEvent(sourceTable);
213
- sourceTable.syncData = options.sync_rules.tableSyncsData(sourceTable);
214
- sourceTable.syncParameters = options.sync_rules.tableSyncsParameters(sourceTable);
215
-
216
- const truncate = await col
217
- .find(
218
- {
219
- group_id: group_id,
220
- connection_id: connection_id,
221
- _id: { $ne: doc._id },
222
- $or: [{ relation_id: objectId }, { schema_name: schema, table_name: table }]
223
- },
224
- { session }
225
- )
226
- .toArray();
227
- result = {
228
- table: sourceTable,
229
- dropTables: truncate.map(
230
- (doc) =>
231
- new SourceTable(
232
- doc._id,
233
- connection_tag,
234
- doc.relation_id ?? 0,
235
- doc.schema_name,
236
- doc.table_name,
237
- doc.replica_id_columns2?.map((c) => ({ name: c.name, typeOid: c.type_oid, type: c.type })) ?? [],
238
- doc.snapshot_done ?? true
239
- )
240
- )
241
- };
242
- });
243
- return result!;
244
- }
245
-
246
- async getParameterSets(checkpoint: util.OpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> {
247
- const lookupFilter = lookups.map((lookup) => {
248
- return serializeLookup(lookup);
249
- });
250
- const rows = await this.db.bucket_parameters
251
- .aggregate([
252
- {
253
- $match: {
254
- 'key.g': this.group_id,
255
- lookup: { $in: lookupFilter },
256
- _id: { $lte: BigInt(checkpoint) }
257
- }
258
- },
259
- {
260
- $sort: {
261
- _id: -1
262
- }
263
- },
264
- {
265
- $group: {
266
- _id: '$key',
267
- bucket_parameters: {
268
- $first: '$bucket_parameters'
269
- }
270
- }
271
- }
272
- ])
273
- .toArray();
274
- const groupedParameters = rows.map((row) => {
275
- return row.bucket_parameters;
276
- });
277
- return groupedParameters.flat();
278
- }
279
-
280
- async *getBucketDataBatch(
281
- checkpoint: util.OpId,
282
- dataBuckets: Map<string, string>,
283
- options?: BucketDataBatchOptions
284
- ): AsyncIterable<SyncBucketDataBatch> {
285
- if (dataBuckets.size == 0) {
286
- return;
287
- }
288
- let filters: mongo.Filter<BucketDataDocument>[] = [];
289
-
290
- const end = checkpoint ? BigInt(checkpoint) : new bson.MaxKey();
291
- for (let [name, start] of dataBuckets.entries()) {
292
- filters.push({
293
- _id: {
294
- $gt: {
295
- g: this.group_id,
296
- b: name,
297
- o: BigInt(start)
298
- },
299
- $lte: {
300
- g: this.group_id,
301
- b: name,
302
- o: end as any
303
- }
304
- }
305
- });
306
- }
307
-
308
- const limit = options?.limit ?? DEFAULT_DOCUMENT_BATCH_LIMIT;
309
- const sizeLimit = options?.chunkLimitBytes ?? DEFAULT_DOCUMENT_CHUNK_LIMIT_BYTES;
310
-
311
- const cursor = this.db.bucket_data.find(
312
- {
313
- $or: filters
314
- },
315
- {
316
- session: undefined,
317
- sort: { _id: 1 },
318
- limit: limit,
319
- // Increase batch size above the default 101, so that we can fill an entire batch in
320
- // one go.
321
- batchSize: limit,
322
- // Raw mode is returns an array of Buffer instead of parsed documents.
323
- // We use it so that:
324
- // 1. We can calculate the document size accurately without serializing again.
325
- // 2. We can delay parsing the results until it's needed.
326
- // We manually use bson.deserialize below
327
- raw: true,
328
-
329
- // Since we're using raw: true and parsing ourselves later, we don't need bigint
330
- // support here.
331
- // Disabling due to https://jira.mongodb.org/browse/NODE-6165, and the fact that this
332
- // is one of our most common queries.
333
- useBigInt64: false
334
- }
335
- ) as unknown as mongo.FindCursor<Buffer>;
336
-
337
- // We want to limit results to a single batch to avoid high memory usage.
338
- // This approach uses MongoDB's batch limits to limit the data here, which limits
339
- // to the lower of the batch count and size limits.
340
- // This is similar to using `singleBatch: true` in the find options, but allows
341
- // detecting "hasMore".
342
- let { data, hasMore } = await readSingleBatch(cursor);
343
- if (data.length == limit) {
344
- // Limit reached - could have more data, despite the cursor being drained.
345
- hasMore = true;
346
- }
347
-
348
- let batchSize = 0;
349
- let currentBatch: util.SyncBucketData | null = null;
350
- let targetOp: bigint | null = null;
351
-
352
- // Ordered by _id, meaning buckets are grouped together
353
- for (let rawData of data) {
354
- const row = bson.deserialize(rawData, BSON_DESERIALIZE_OPTIONS) as BucketDataDocument;
355
- const bucket = row._id.b;
356
-
357
- if (currentBatch == null || currentBatch.bucket != bucket || batchSize >= sizeLimit) {
358
- let start: string | undefined = undefined;
359
- if (currentBatch != null) {
360
- if (currentBatch.bucket == bucket) {
361
- currentBatch.has_more = true;
362
- }
363
-
364
- const yieldBatch = currentBatch;
365
- start = currentBatch.after;
366
- currentBatch = null;
367
- batchSize = 0;
368
- yield { batch: yieldBatch, targetOp: targetOp };
369
- targetOp = null;
370
- }
371
-
372
- start ??= dataBuckets.get(bucket);
373
- if (start == null) {
374
- throw new Error(`data for unexpected bucket: ${bucket}`);
375
- }
376
- currentBatch = {
377
- bucket,
378
- after: start,
379
- has_more: hasMore,
380
- data: [],
381
- next_after: start
382
- };
383
- targetOp = null;
384
- }
385
-
386
- const entry = mapOpEntry(row);
387
-
388
- if (row.target_op != null) {
389
- // MOVE, CLEAR
390
- if (targetOp == null || row.target_op > targetOp) {
391
- targetOp = row.target_op;
392
- }
393
- }
394
-
395
- currentBatch.data.push(entry);
396
- currentBatch.next_after = entry.op_id;
397
-
398
- batchSize += rawData.byteLength;
399
- }
400
-
401
- if (currentBatch != null) {
402
- const yieldBatch = currentBatch;
403
- currentBatch = null;
404
- yield { batch: yieldBatch, targetOp: targetOp };
405
- targetOp = null;
406
- }
407
- }
408
-
409
- async getChecksums(checkpoint: util.OpId, buckets: string[]): Promise<util.ChecksumMap> {
410
- return this.checksumCache.getChecksumMap(checkpoint, buckets);
411
- }
412
-
413
- private async getChecksumsInternal(batch: FetchPartialBucketChecksum[]): Promise<PartialChecksumMap> {
414
- if (batch.length == 0) {
415
- return new Map();
416
- }
417
-
418
- const filters: any[] = [];
419
- for (let request of batch) {
420
- filters.push({
421
- _id: {
422
- $gt: {
423
- g: this.group_id,
424
- b: request.bucket,
425
- o: request.start ? BigInt(request.start) : new bson.MinKey()
426
- },
427
- $lte: {
428
- g: this.group_id,
429
- b: request.bucket,
430
- o: BigInt(request.end)
431
- }
432
- }
433
- });
434
- }
435
-
436
- const aggregate = await this.db.bucket_data
437
- .aggregate(
438
- [
439
- {
440
- $match: {
441
- $or: filters
442
- }
443
- },
444
- {
445
- $group: {
446
- _id: '$_id.b',
447
- checksum_total: { $sum: '$checksum' },
448
- count: { $sum: 1 },
449
- has_clear_op: {
450
- $max: {
451
- $cond: [{ $eq: ['$op', 'CLEAR'] }, 1, 0]
452
- }
453
- }
454
- }
455
- }
456
- ],
457
- { session: undefined, readConcern: 'snapshot' }
458
- )
459
- .toArray();
460
-
461
- return new Map<string, PartialChecksum>(
462
- aggregate.map((doc) => {
463
- return [
464
- doc._id,
465
- {
466
- bucket: doc._id,
467
- partialCount: doc.count,
468
- partialChecksum: Number(BigInt(doc.checksum_total) & 0xffffffffn) & 0xffffffff,
469
- isFullChecksum: doc.has_clear_op == 1
470
- } satisfies PartialChecksum
471
- ];
472
- })
473
- );
474
- }
475
-
476
- async terminate(options?: TerminateOptions) {
477
- // Default is to clear the storage except when explicitly requested not to.
478
- if (!options || options?.clearStorage) {
479
- await this.clear();
480
- }
481
- await this.db.sync_rules.updateOne(
482
- {
483
- _id: this.group_id
484
- },
485
- {
486
- $set: {
487
- state: SyncRuleState.TERMINATED,
488
- persisted_lsn: null,
489
- snapshot_done: false
490
- }
491
- }
492
- );
493
- }
494
-
495
- async getStatus(): Promise<SyncRuleStatus> {
496
- const doc = await this.db.sync_rules.findOne(
497
- {
498
- _id: this.group_id
499
- },
500
- {
501
- projection: {
502
- snapshot_done: 1,
503
- last_checkpoint_lsn: 1,
504
- state: 1
505
- }
506
- }
507
- );
508
- if (doc == null) {
509
- throw new Error('Cannot find sync rules status');
510
- }
511
-
512
- return {
513
- snapshot_done: doc.snapshot_done,
514
- active: doc.state == 'ACTIVE',
515
- checkpoint_lsn: doc.last_checkpoint_lsn
516
- };
517
- }
518
-
519
- async clear(): Promise<void> {
520
- while (true) {
521
- try {
522
- await this.clearIteration();
523
- return;
524
- } catch (e: unknown) {
525
- if (e instanceof mongo.MongoServerError && e.codeName == 'MaxTimeMSExpired') {
526
- logger.info(
527
- `Clearing took longer than ${db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS}ms, waiting and triggering another iteration.`
528
- );
529
- await timers.setTimeout(db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS / 5);
530
- continue;
531
- } else {
532
- throw e;
533
- }
534
- }
535
- }
536
- }
537
-
538
- private async clearIteration(): Promise<void> {
539
- // Individual operations here may time out with the maxTimeMS option.
540
- // It is expected to still make progress, and continue on the next try.
541
-
542
- await this.db.sync_rules.updateOne(
543
- {
544
- _id: this.group_id
545
- },
546
- {
547
- $set: {
548
- snapshot_done: false,
549
- persisted_lsn: null,
550
- last_checkpoint_lsn: null,
551
- last_checkpoint: null,
552
- no_checkpoint_before: null
553
- }
554
- },
555
- { maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
556
- );
557
- await this.db.bucket_data.deleteMany(
558
- {
559
- _id: idPrefixFilter<BucketDataKey>({ g: this.group_id }, ['b', 'o'])
560
- },
561
- { maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
562
- );
563
- await this.db.bucket_parameters.deleteMany(
564
- {
565
- key: idPrefixFilter<SourceKey>({ g: this.group_id }, ['t', 'k'])
566
- },
567
- { maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
568
- );
569
-
570
- await this.db.current_data.deleteMany(
571
- {
572
- _id: idPrefixFilter<SourceKey>({ g: this.group_id }, ['t', 'k'])
573
- },
574
- { maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
575
- );
576
-
577
- await this.db.source_tables.deleteMany(
578
- {
579
- group_id: this.group_id
580
- },
581
- { maxTimeMS: db.mongo.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
582
- );
583
- }
584
-
585
- async autoActivate(): Promise<void> {
586
- await this.db.client.withSession(async (session) => {
587
- await session.withTransaction(async () => {
588
- const doc = await this.db.sync_rules.findOne({ _id: this.group_id }, { session });
589
- if (doc && doc.state == 'PROCESSING') {
590
- await this.db.sync_rules.updateOne(
591
- {
592
- _id: this.group_id
593
- },
594
- {
595
- $set: {
596
- state: SyncRuleState.ACTIVE
597
- }
598
- },
599
- { session }
600
- );
601
-
602
- await this.db.sync_rules.updateMany(
603
- {
604
- _id: { $ne: this.group_id },
605
- state: SyncRuleState.ACTIVE
606
- },
607
- {
608
- $set: {
609
- state: SyncRuleState.STOP
610
- }
611
- },
612
- { session }
613
- );
614
- }
615
- });
616
- });
617
- }
618
-
619
- async reportError(e: any): Promise<void> {
620
- const message = String(e.message ?? 'Replication failure');
621
- await this.db.sync_rules.updateOne(
622
- {
623
- _id: this.group_id
624
- },
625
- {
626
- $set: {
627
- last_fatal_error: message
628
- }
629
- }
630
- );
631
- }
632
-
633
- async compact(options?: CompactOptions) {
634
- return new MongoCompactor(this.db, this.group_id, options).compact();
635
- }
636
- }
@@ -1,85 +0,0 @@
1
- import crypto from 'crypto';
2
-
3
- import { PersistedSyncRulesContent, ReplicationLock } from '../BucketStorage.js';
4
- import { PowerSyncMongo } from './db.js';
5
- import { logger } from '@powersync/lib-services-framework';
6
-
7
- /**
8
- * Manages a lock on a sync rules document, so that only one process
9
- * replicates those sync rules at a time.
10
- */
11
- export class MongoSyncRulesLock implements ReplicationLock {
12
- private readonly refreshInterval: NodeJS.Timeout;
13
-
14
- static async createLock(db: PowerSyncMongo, sync_rules: PersistedSyncRulesContent): Promise<MongoSyncRulesLock> {
15
- const lockId = crypto.randomBytes(8).toString('hex');
16
- const doc = await db.sync_rules.findOneAndUpdate(
17
- { _id: sync_rules.id, $or: [{ lock: null }, { 'lock.expires_at': { $lt: new Date() } }] },
18
- {
19
- $set: {
20
- lock: {
21
- id: lockId,
22
- expires_at: new Date(Date.now() + 60 * 1000)
23
- }
24
- }
25
- },
26
- {
27
- projection: { lock: 1 },
28
- returnDocument: 'before'
29
- }
30
- );
31
-
32
- if (doc == null) {
33
- throw new Error(`Sync rules: ${sync_rules.id} have been locked by another process for replication.`);
34
- }
35
- return new MongoSyncRulesLock(db, sync_rules.id, lockId);
36
- }
37
-
38
- constructor(
39
- private db: PowerSyncMongo,
40
- public sync_rules_id: number,
41
- private lock_id: string
42
- ) {
43
- this.refreshInterval = setInterval(async () => {
44
- try {
45
- await this.refresh();
46
- } catch (e) {
47
- logger.error('Failed to refresh lock', e);
48
- clearInterval(this.refreshInterval);
49
- }
50
- }, 30_130);
51
- }
52
-
53
- async release(): Promise<void> {
54
- clearInterval(this.refreshInterval);
55
- const result = await this.db.sync_rules.updateOne(
56
- {
57
- _id: this.sync_rules_id,
58
- 'lock.id': this.lock_id
59
- },
60
- {
61
- $unset: { lock: 1 }
62
- }
63
- );
64
- if (result.modifiedCount == 0) {
65
- // Log and ignore
66
- logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`);
67
- }
68
- }
69
-
70
- private async refresh(): Promise<void> {
71
- const result = await this.db.sync_rules.findOneAndUpdate(
72
- {
73
- _id: this.sync_rules_id,
74
- 'lock.id': this.lock_id
75
- },
76
- {
77
- $set: { 'lock.expires_at': new Date(Date.now() + 60 * 1000) }
78
- },
79
- { returnDocument: 'after' }
80
- );
81
- if (result == null) {
82
- throw new Error(`Lock not held anymore: ${this.sync_rules_id}/${this.lock_id}`);
83
- }
84
- }
85
- }