@powersync/service-core 0.13.0 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (181) hide show
  1. package/CHANGELOG.md +31 -0
  2. package/dist/entry/commands/compact-action.js +14 -14
  3. package/dist/entry/commands/compact-action.js.map +1 -1
  4. package/dist/entry/commands/migrate-action.js +15 -4
  5. package/dist/entry/commands/migrate-action.js.map +1 -1
  6. package/dist/index.d.ts +1 -3
  7. package/dist/index.js +1 -3
  8. package/dist/index.js.map +1 -1
  9. package/dist/migrations/PowerSyncMigrationManager.d.ts +17 -0
  10. package/dist/migrations/PowerSyncMigrationManager.js +21 -0
  11. package/dist/migrations/PowerSyncMigrationManager.js.map +1 -0
  12. package/dist/migrations/ensure-automatic-migrations.d.ts +4 -0
  13. package/dist/migrations/ensure-automatic-migrations.js +14 -0
  14. package/dist/migrations/ensure-automatic-migrations.js.map +1 -0
  15. package/dist/migrations/migrations-index.d.ts +2 -3
  16. package/dist/migrations/migrations-index.js +2 -3
  17. package/dist/migrations/migrations-index.js.map +1 -1
  18. package/dist/routes/configure-fastify.d.ts +12 -12
  19. package/dist/routes/endpoints/admin.d.ts +24 -24
  20. package/dist/storage/BucketStorage.d.ts +51 -3
  21. package/dist/storage/BucketStorage.js +26 -0
  22. package/dist/storage/BucketStorage.js.map +1 -1
  23. package/dist/storage/bson.d.ts +24 -0
  24. package/dist/storage/bson.js +73 -0
  25. package/dist/storage/bson.js.map +1 -0
  26. package/dist/storage/storage-index.d.ts +3 -14
  27. package/dist/storage/storage-index.js +3 -14
  28. package/dist/storage/storage-index.js.map +1 -1
  29. package/dist/sync/sync.js +3 -1
  30. package/dist/sync/sync.js.map +1 -1
  31. package/dist/system/ServiceContext.d.ts +3 -0
  32. package/dist/system/ServiceContext.js +11 -3
  33. package/dist/system/ServiceContext.js.map +1 -1
  34. package/dist/util/config/types.d.ts +2 -2
  35. package/dist/util/utils.d.ts +17 -1
  36. package/dist/util/utils.js +49 -1
  37. package/dist/util/utils.js.map +1 -1
  38. package/package.json +7 -8
  39. package/src/entry/commands/compact-action.ts +19 -14
  40. package/src/entry/commands/migrate-action.ts +17 -4
  41. package/src/index.ts +1 -4
  42. package/src/migrations/PowerSyncMigrationManager.ts +42 -0
  43. package/src/migrations/ensure-automatic-migrations.ts +15 -0
  44. package/src/migrations/migrations-index.ts +2 -3
  45. package/src/storage/BucketStorage.ts +59 -3
  46. package/src/storage/bson.ts +78 -0
  47. package/src/storage/storage-index.ts +3 -15
  48. package/src/sync/sync.ts +3 -1
  49. package/src/system/ServiceContext.ts +17 -4
  50. package/src/util/config/types.ts +2 -2
  51. package/src/util/utils.ts +47 -1
  52. package/test/src/env.ts +0 -1
  53. package/tsconfig.tsbuildinfo +1 -1
  54. package/dist/db/db-index.d.ts +0 -1
  55. package/dist/db/db-index.js +0 -2
  56. package/dist/db/db-index.js.map +0 -1
  57. package/dist/db/mongo.d.ts +0 -35
  58. package/dist/db/mongo.js +0 -73
  59. package/dist/db/mongo.js.map +0 -1
  60. package/dist/locks/LockManager.d.ts +0 -10
  61. package/dist/locks/LockManager.js +0 -7
  62. package/dist/locks/LockManager.js.map +0 -1
  63. package/dist/locks/MongoLocks.d.ts +0 -36
  64. package/dist/locks/MongoLocks.js +0 -81
  65. package/dist/locks/MongoLocks.js.map +0 -1
  66. package/dist/locks/locks-index.d.ts +0 -2
  67. package/dist/locks/locks-index.js +0 -3
  68. package/dist/locks/locks-index.js.map +0 -1
  69. package/dist/migrations/db/migrations/1684951997326-init.d.ts +0 -3
  70. package/dist/migrations/db/migrations/1684951997326-init.js +0 -33
  71. package/dist/migrations/db/migrations/1684951997326-init.js.map +0 -1
  72. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.d.ts +0 -2
  73. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js +0 -5
  74. package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js.map +0 -1
  75. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.d.ts +0 -3
  76. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +0 -56
  77. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +0 -1
  78. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.d.ts +0 -3
  79. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js +0 -29
  80. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +0 -1
  81. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.d.ts +0 -3
  82. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js +0 -31
  83. package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js.map +0 -1
  84. package/dist/migrations/definitions.d.ts +0 -18
  85. package/dist/migrations/definitions.js +0 -6
  86. package/dist/migrations/definitions.js.map +0 -1
  87. package/dist/migrations/executor.d.ts +0 -16
  88. package/dist/migrations/executor.js +0 -64
  89. package/dist/migrations/executor.js.map +0 -1
  90. package/dist/migrations/migrations.d.ts +0 -18
  91. package/dist/migrations/migrations.js +0 -110
  92. package/dist/migrations/migrations.js.map +0 -1
  93. package/dist/migrations/store/migration-store.d.ts +0 -11
  94. package/dist/migrations/store/migration-store.js +0 -46
  95. package/dist/migrations/store/migration-store.js.map +0 -1
  96. package/dist/storage/MongoBucketStorage.d.ts +0 -48
  97. package/dist/storage/MongoBucketStorage.js +0 -427
  98. package/dist/storage/MongoBucketStorage.js.map +0 -1
  99. package/dist/storage/mongo/MongoBucketBatch.d.ts +0 -74
  100. package/dist/storage/mongo/MongoBucketBatch.js +0 -683
  101. package/dist/storage/mongo/MongoBucketBatch.js.map +0 -1
  102. package/dist/storage/mongo/MongoCompactor.d.ts +0 -40
  103. package/dist/storage/mongo/MongoCompactor.js +0 -310
  104. package/dist/storage/mongo/MongoCompactor.js.map +0 -1
  105. package/dist/storage/mongo/MongoIdSequence.d.ts +0 -12
  106. package/dist/storage/mongo/MongoIdSequence.js +0 -21
  107. package/dist/storage/mongo/MongoIdSequence.js.map +0 -1
  108. package/dist/storage/mongo/MongoPersistedSyncRules.d.ts +0 -9
  109. package/dist/storage/mongo/MongoPersistedSyncRules.js +0 -9
  110. package/dist/storage/mongo/MongoPersistedSyncRules.js.map +0 -1
  111. package/dist/storage/mongo/MongoPersistedSyncRulesContent.d.ts +0 -20
  112. package/dist/storage/mongo/MongoPersistedSyncRulesContent.js +0 -26
  113. package/dist/storage/mongo/MongoPersistedSyncRulesContent.js.map +0 -1
  114. package/dist/storage/mongo/MongoStorageProvider.d.ts +0 -5
  115. package/dist/storage/mongo/MongoStorageProvider.js +0 -26
  116. package/dist/storage/mongo/MongoStorageProvider.js.map +0 -1
  117. package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +0 -38
  118. package/dist/storage/mongo/MongoSyncBucketStorage.js +0 -534
  119. package/dist/storage/mongo/MongoSyncBucketStorage.js.map +0 -1
  120. package/dist/storage/mongo/MongoSyncRulesLock.d.ts +0 -16
  121. package/dist/storage/mongo/MongoSyncRulesLock.js +0 -65
  122. package/dist/storage/mongo/MongoSyncRulesLock.js.map +0 -1
  123. package/dist/storage/mongo/MongoWriteCheckpointAPI.d.ts +0 -20
  124. package/dist/storage/mongo/MongoWriteCheckpointAPI.js +0 -104
  125. package/dist/storage/mongo/MongoWriteCheckpointAPI.js.map +0 -1
  126. package/dist/storage/mongo/OperationBatch.d.ts +0 -35
  127. package/dist/storage/mongo/OperationBatch.js +0 -119
  128. package/dist/storage/mongo/OperationBatch.js.map +0 -1
  129. package/dist/storage/mongo/PersistedBatch.d.ts +0 -46
  130. package/dist/storage/mongo/PersistedBatch.js +0 -223
  131. package/dist/storage/mongo/PersistedBatch.js.map +0 -1
  132. package/dist/storage/mongo/config.d.ts +0 -19
  133. package/dist/storage/mongo/config.js +0 -26
  134. package/dist/storage/mongo/config.js.map +0 -1
  135. package/dist/storage/mongo/db.d.ts +0 -36
  136. package/dist/storage/mongo/db.js +0 -47
  137. package/dist/storage/mongo/db.js.map +0 -1
  138. package/dist/storage/mongo/models.d.ts +0 -163
  139. package/dist/storage/mongo/models.js +0 -27
  140. package/dist/storage/mongo/models.js.map +0 -1
  141. package/dist/storage/mongo/util.d.ts +0 -54
  142. package/dist/storage/mongo/util.js +0 -190
  143. package/dist/storage/mongo/util.js.map +0 -1
  144. package/src/db/db-index.ts +0 -1
  145. package/src/db/mongo.ts +0 -81
  146. package/src/locks/LockManager.ts +0 -16
  147. package/src/locks/MongoLocks.ts +0 -142
  148. package/src/locks/locks-index.ts +0 -2
  149. package/src/migrations/db/migrations/1684951997326-init.ts +0 -38
  150. package/src/migrations/db/migrations/1688556755264-initial-sync-rules.ts +0 -5
  151. package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +0 -102
  152. package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +0 -34
  153. package/src/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.ts +0 -37
  154. package/src/migrations/definitions.ts +0 -21
  155. package/src/migrations/executor.ts +0 -87
  156. package/src/migrations/migrations.ts +0 -142
  157. package/src/migrations/store/migration-store.ts +0 -63
  158. package/src/storage/MongoBucketStorage.ts +0 -541
  159. package/src/storage/mongo/MongoBucketBatch.ts +0 -900
  160. package/src/storage/mongo/MongoCompactor.ts +0 -393
  161. package/src/storage/mongo/MongoIdSequence.ts +0 -24
  162. package/src/storage/mongo/MongoPersistedSyncRules.ts +0 -16
  163. package/src/storage/mongo/MongoPersistedSyncRulesContent.ts +0 -50
  164. package/src/storage/mongo/MongoStorageProvider.ts +0 -31
  165. package/src/storage/mongo/MongoSyncBucketStorage.ts +0 -640
  166. package/src/storage/mongo/MongoSyncRulesLock.ts +0 -85
  167. package/src/storage/mongo/MongoWriteCheckpointAPI.ts +0 -154
  168. package/src/storage/mongo/OperationBatch.ts +0 -131
  169. package/src/storage/mongo/PersistedBatch.ts +0 -285
  170. package/src/storage/mongo/config.ts +0 -40
  171. package/src/storage/mongo/db.ts +0 -88
  172. package/src/storage/mongo/models.ts +0 -187
  173. package/src/storage/mongo/util.ts +0 -203
  174. package/test/src/__snapshots__/sync.test.ts.snap +0 -332
  175. package/test/src/bucket_validation.test.ts +0 -143
  176. package/test/src/bucket_validation.ts +0 -60
  177. package/test/src/compacting.test.ts +0 -295
  178. package/test/src/data_storage.test.ts +0 -1569
  179. package/test/src/stream_utils.ts +0 -42
  180. package/test/src/sync.test.ts +0 -511
  181. package/test/src/util.ts +0 -150
@@ -1,85 +0,0 @@
1
- import crypto from 'crypto';
2
-
3
- import { PersistedSyncRulesContent, ReplicationLock } from '../BucketStorage.js';
4
- import { PowerSyncMongo } from './db.js';
5
- import { logger } from '@powersync/lib-services-framework';
6
-
7
- /**
8
- * Manages a lock on a sync rules document, so that only one process
9
- * replicates those sync rules at a time.
10
- */
11
- export class MongoSyncRulesLock implements ReplicationLock {
12
- private readonly refreshInterval: NodeJS.Timeout;
13
-
14
- static async createLock(db: PowerSyncMongo, sync_rules: PersistedSyncRulesContent): Promise<MongoSyncRulesLock> {
15
- const lockId = crypto.randomBytes(8).toString('hex');
16
- const doc = await db.sync_rules.findOneAndUpdate(
17
- { _id: sync_rules.id, $or: [{ lock: null }, { 'lock.expires_at': { $lt: new Date() } }] },
18
- {
19
- $set: {
20
- lock: {
21
- id: lockId,
22
- expires_at: new Date(Date.now() + 60 * 1000)
23
- }
24
- }
25
- },
26
- {
27
- projection: { lock: 1 },
28
- returnDocument: 'before'
29
- }
30
- );
31
-
32
- if (doc == null) {
33
- throw new Error(`Sync rules: ${sync_rules.id} have been locked by another process for replication.`);
34
- }
35
- return new MongoSyncRulesLock(db, sync_rules.id, lockId);
36
- }
37
-
38
- constructor(
39
- private db: PowerSyncMongo,
40
- public sync_rules_id: number,
41
- private lock_id: string
42
- ) {
43
- this.refreshInterval = setInterval(async () => {
44
- try {
45
- await this.refresh();
46
- } catch (e) {
47
- logger.error('Failed to refresh lock', e);
48
- clearInterval(this.refreshInterval);
49
- }
50
- }, 30_130);
51
- }
52
-
53
- async release(): Promise<void> {
54
- clearInterval(this.refreshInterval);
55
- const result = await this.db.sync_rules.updateOne(
56
- {
57
- _id: this.sync_rules_id,
58
- 'lock.id': this.lock_id
59
- },
60
- {
61
- $unset: { lock: 1 }
62
- }
63
- );
64
- if (result.modifiedCount == 0) {
65
- // Log and ignore
66
- logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`);
67
- }
68
- }
69
-
70
- private async refresh(): Promise<void> {
71
- const result = await this.db.sync_rules.findOneAndUpdate(
72
- {
73
- _id: this.sync_rules_id,
74
- 'lock.id': this.lock_id
75
- },
76
- {
77
- $set: { 'lock.expires_at': new Date(Date.now() + 60 * 1000) }
78
- },
79
- { returnDocument: 'after' }
80
- );
81
- if (result == null) {
82
- throw new Error(`Lock not held anymore: ${this.sync_rules_id}/${this.lock_id}`);
83
- }
84
- }
85
- }
@@ -1,154 +0,0 @@
1
- import * as framework from '@powersync/lib-services-framework';
2
- import {
3
- CustomWriteCheckpointFilters,
4
- CustomWriteCheckpointOptions,
5
- LastWriteCheckpointFilters,
6
- ManagedWriteCheckpointFilters,
7
- ManagedWriteCheckpointOptions,
8
- WriteCheckpointAPI,
9
- WriteCheckpointMode
10
- } from '../WriteCheckpointAPI.js';
11
- import { PowerSyncMongo } from './db.js';
12
- import { safeBulkWrite } from './util.js';
13
-
14
- export type MongoCheckpointAPIOptions = {
15
- db: PowerSyncMongo;
16
- mode: WriteCheckpointMode;
17
- };
18
-
19
- export class MongoWriteCheckpointAPI implements WriteCheckpointAPI {
20
- readonly db: PowerSyncMongo;
21
- private _mode: WriteCheckpointMode;
22
-
23
- constructor(options: MongoCheckpointAPIOptions) {
24
- this.db = options.db;
25
- this._mode = options.mode;
26
- }
27
-
28
- get writeCheckpointMode() {
29
- return this._mode;
30
- }
31
-
32
- setWriteCheckpointMode(mode: WriteCheckpointMode): void {
33
- this._mode = mode;
34
- }
35
-
36
- async batchCreateCustomWriteCheckpoints(checkpoints: CustomWriteCheckpointOptions[]): Promise<void> {
37
- return batchCreateCustomWriteCheckpoints(this.db, checkpoints);
38
- }
39
-
40
- async createCustomWriteCheckpoint(options: CustomWriteCheckpointOptions): Promise<bigint> {
41
- if (this.writeCheckpointMode !== WriteCheckpointMode.CUSTOM) {
42
- throw new framework.errors.ValidationError(
43
- `Creating a custom Write Checkpoint when the current Write Checkpoint mode is set to "${this.writeCheckpointMode}"`
44
- );
45
- }
46
-
47
- const { checkpoint, user_id, sync_rules_id } = options;
48
- const doc = await this.db.custom_write_checkpoints.findOneAndUpdate(
49
- {
50
- user_id: user_id,
51
- sync_rules_id
52
- },
53
- {
54
- $set: {
55
- checkpoint
56
- }
57
- },
58
- { upsert: true, returnDocument: 'after' }
59
- );
60
- return doc!.checkpoint;
61
- }
62
-
63
- async createManagedWriteCheckpoint(checkpoint: ManagedWriteCheckpointOptions): Promise<bigint> {
64
- if (this.writeCheckpointMode !== WriteCheckpointMode.MANAGED) {
65
- throw new framework.errors.ValidationError(
66
- `Attempting to create a managed Write Checkpoint when the current Write Checkpoint mode is set to "${this.writeCheckpointMode}"`
67
- );
68
- }
69
-
70
- const { user_id, heads: lsns } = checkpoint;
71
- const doc = await this.db.write_checkpoints.findOneAndUpdate(
72
- {
73
- user_id: user_id
74
- },
75
- {
76
- $set: {
77
- lsns
78
- },
79
- $inc: {
80
- client_id: 1n
81
- }
82
- },
83
- { upsert: true, returnDocument: 'after' }
84
- );
85
- return doc!.client_id;
86
- }
87
-
88
- async lastWriteCheckpoint(filters: LastWriteCheckpointFilters): Promise<bigint | null> {
89
- switch (this.writeCheckpointMode) {
90
- case WriteCheckpointMode.CUSTOM:
91
- if (false == 'sync_rules_id' in filters) {
92
- throw new framework.errors.ValidationError(`Sync rules ID is required for custom Write Checkpoint filtering`);
93
- }
94
- return this.lastCustomWriteCheckpoint(filters);
95
- case WriteCheckpointMode.MANAGED:
96
- if (false == 'heads' in filters) {
97
- throw new framework.errors.ValidationError(
98
- `Replication HEAD is required for managed Write Checkpoint filtering`
99
- );
100
- }
101
- return this.lastManagedWriteCheckpoint(filters);
102
- }
103
- }
104
-
105
- protected async lastCustomWriteCheckpoint(filters: CustomWriteCheckpointFilters) {
106
- const { user_id, sync_rules_id } = filters;
107
- const lastWriteCheckpoint = await this.db.custom_write_checkpoints.findOne({
108
- user_id,
109
- sync_rules_id
110
- });
111
- return lastWriteCheckpoint?.checkpoint ?? null;
112
- }
113
-
114
- protected async lastManagedWriteCheckpoint(filters: ManagedWriteCheckpointFilters) {
115
- const { user_id, heads } = filters;
116
- // TODO: support multiple heads when we need to support multiple connections
117
- const lsn = heads['1'];
118
- if (lsn == null) {
119
- // Can happen if we haven't replicated anything yet.
120
- return null;
121
- }
122
- const lastWriteCheckpoint = await this.db.write_checkpoints.findOne({
123
- user_id: user_id,
124
- 'lsns.1': { $lte: lsn }
125
- });
126
- return lastWriteCheckpoint?.client_id ?? null;
127
- }
128
- }
129
-
130
- export async function batchCreateCustomWriteCheckpoints(
131
- db: PowerSyncMongo,
132
- checkpoints: CustomWriteCheckpointOptions[]
133
- ): Promise<void> {
134
- if (!checkpoints.length) {
135
- return;
136
- }
137
-
138
- await safeBulkWrite(
139
- db.custom_write_checkpoints,
140
- checkpoints.map((checkpointOptions) => ({
141
- updateOne: {
142
- filter: { user_id: checkpointOptions.user_id, sync_rules_id: checkpointOptions.sync_rules_id },
143
- update: {
144
- $set: {
145
- checkpoint: checkpointOptions.checkpoint,
146
- sync_rules_id: checkpointOptions.sync_rules_id
147
- }
148
- },
149
- upsert: true
150
- }
151
- })),
152
- {}
153
- );
154
- }
@@ -1,131 +0,0 @@
1
- import { ToastableSqliteRow } from '@powersync/service-sync-rules';
2
- import * as bson from 'bson';
3
-
4
- import { SaveOptions } from '../BucketStorage.js';
5
- import { isUUID } from './util.js';
6
- import { ReplicaId } from './models.js';
7
-
8
- /**
9
- * Maximum number of operations in a batch.
10
- */
11
- const MAX_BATCH_COUNT = 2000;
12
-
13
- /**
14
- * Maximum size of operations in the batch (estimated).
15
- */
16
- const MAX_RECORD_BATCH_SIZE = 5_000_000;
17
-
18
- /**
19
- * Maximum size of size of current_data documents we lookup at a time.
20
- */
21
- const MAX_CURRENT_DATA_BATCH_SIZE = 16_000_000;
22
-
23
- /**
24
- * Batch of input operations.
25
- *
26
- * We accumulate operations up to MAX_RECORD_BATCH_SIZE,
27
- * then further split into sub-batches if MAX_CURRENT_DATA_BATCH_SIZE is exceeded.
28
- */
29
- export class OperationBatch {
30
- batch: RecordOperation[] = [];
31
- currentSize: number = 0;
32
-
33
- get length() {
34
- return this.batch.length;
35
- }
36
-
37
- push(op: RecordOperation) {
38
- this.batch.push(op);
39
- this.currentSize += op.estimatedSize;
40
- }
41
-
42
- shouldFlush() {
43
- return this.batch.length >= MAX_BATCH_COUNT || this.currentSize > MAX_RECORD_BATCH_SIZE;
44
- }
45
-
46
- /**
47
- *
48
- * @param sizes Map of source key to estimated size of the current_data document, or undefined if current_data is not persisted.
49
- *
50
- */
51
- *batched(sizes: Map<string, number> | undefined): Generator<RecordOperation[]> {
52
- if (sizes == null) {
53
- yield this.batch;
54
- return;
55
- }
56
- let currentBatch: RecordOperation[] = [];
57
- let currentBatchSize = 0;
58
- for (let op of this.batch) {
59
- const key = op.internalBeforeKey;
60
- const size = sizes.get(key) ?? 0;
61
- if (currentBatchSize + size > MAX_CURRENT_DATA_BATCH_SIZE && currentBatch.length > 0) {
62
- yield currentBatch;
63
- currentBatch = [];
64
- currentBatchSize = 0;
65
- }
66
- currentBatchSize += size;
67
- currentBatch.push(op);
68
- }
69
- if (currentBatch.length > 0) {
70
- yield currentBatch;
71
- }
72
- }
73
- }
74
-
75
- export class RecordOperation {
76
- public readonly afterId: ReplicaId | null;
77
- public readonly beforeId: ReplicaId;
78
- public readonly internalBeforeKey: string;
79
- public readonly internalAfterKey: string | null;
80
- public readonly estimatedSize: number;
81
-
82
- constructor(public readonly record: SaveOptions) {
83
- const afterId = record.afterReplicaId ?? null;
84
- const beforeId = record.beforeReplicaId ?? record.afterReplicaId;
85
- this.afterId = afterId;
86
- this.beforeId = beforeId;
87
- this.internalBeforeKey = cacheKey(record.sourceTable.id, beforeId);
88
- this.internalAfterKey = afterId ? cacheKey(record.sourceTable.id, afterId) : null;
89
-
90
- this.estimatedSize = estimateRowSize(record.before) + estimateRowSize(record.after);
91
- }
92
- }
93
-
94
- /**
95
- * In-memory cache key - must not be persisted.
96
- */
97
- export function cacheKey(table: bson.ObjectId, id: ReplicaId) {
98
- if (isUUID(id)) {
99
- return `${table.toHexString()}.${id.toHexString()}`;
100
- } else if (typeof id == 'string') {
101
- return `${table.toHexString()}.${id}`;
102
- } else {
103
- return `${table.toHexString()}.${(bson.serialize({ id: id }) as Buffer).toString('base64')}`;
104
- }
105
- }
106
-
107
- /**
108
- * Estimate in-memory size of row.
109
- */
110
- function estimateRowSize(record: ToastableSqliteRow | undefined) {
111
- if (record == null) {
112
- return 12;
113
- }
114
- let size = 0;
115
- for (let [key, value] of Object.entries(record)) {
116
- size += 12 + key.length;
117
- // number | string | null | bigint | Uint8Array
118
- if (value == null) {
119
- size += 4;
120
- } else if (typeof value == 'number') {
121
- size += 8;
122
- } else if (typeof value == 'bigint') {
123
- size += 8;
124
- } else if (typeof value == 'string') {
125
- size += value.length;
126
- } else if (value instanceof Uint8Array) {
127
- size += value.byteLength;
128
- }
129
- }
130
- return size;
131
- }
@@ -1,285 +0,0 @@
1
- import { JSONBig } from '@powersync/service-jsonbig';
2
- import { EvaluatedParameters, EvaluatedRow } from '@powersync/service-sync-rules';
3
- import * as bson from 'bson';
4
- import * as mongo from 'mongodb';
5
-
6
- import * as util from '../../util/util-index.js';
7
- import { SourceTable } from '../SourceTable.js';
8
- import { currentBucketKey } from './MongoBucketBatch.js';
9
- import { MongoIdSequence } from './MongoIdSequence.js';
10
- import { PowerSyncMongo } from './db.js';
11
- import {
12
- BucketDataDocument,
13
- BucketParameterDocument,
14
- CurrentBucket,
15
- CurrentDataDocument,
16
- SourceKey,
17
- ReplicaId
18
- } from './models.js';
19
- import { replicaIdToSubkey, safeBulkWrite, serializeLookup } from './util.js';
20
- import { logger } from '@powersync/lib-services-framework';
21
-
22
- /**
23
- * Maximum size of operations we write in a single transaction.
24
- *
25
- * It's tricky to find the exact limit, but from experience, over 100MB
26
- * can cause an error:
27
- * > transaction is too large and will not fit in the storage engine cache
28
- *
29
- * Additionally, unbounded size here can balloon our memory usage in some edge
30
- * cases.
31
- *
32
- * When we reach this threshold, we commit the transaction and start a new one.
33
- */
34
- const MAX_TRANSACTION_BATCH_SIZE = 30_000_000;
35
-
36
- /**
37
- * Limit number of documents to write in a single transaction.
38
- *
39
- * This has an effect on error message size in some cases.
40
- */
41
- const MAX_TRANSACTION_DOC_COUNT = 2_000;
42
-
43
- /**
44
- * Keeps track of bulkwrite operations within a transaction.
45
- *
46
- * There may be multiple of these batches per transaction, but it may not span
47
- * multiple transactions.
48
- */
49
- export class PersistedBatch {
50
- bucketData: mongo.AnyBulkWriteOperation<BucketDataDocument>[] = [];
51
- bucketParameters: mongo.AnyBulkWriteOperation<BucketParameterDocument>[] = [];
52
- currentData: mongo.AnyBulkWriteOperation<CurrentDataDocument>[] = [];
53
-
54
- /**
55
- * For debug logging only.
56
- */
57
- debugLastOpId: bigint | null = null;
58
-
59
- /**
60
- * Very rough estimate of transaction size.
61
- */
62
- currentSize = 0;
63
-
64
- constructor(
65
- private group_id: number,
66
- writtenSize: number
67
- ) {
68
- this.currentSize = writtenSize;
69
- }
70
-
71
- saveBucketData(options: {
72
- op_seq: MongoIdSequence;
73
- sourceKey: ReplicaId;
74
- table: SourceTable;
75
- evaluated: EvaluatedRow[];
76
- before_buckets: CurrentBucket[];
77
- }) {
78
- const remaining_buckets = new Map<string, CurrentBucket>();
79
- for (let b of options.before_buckets) {
80
- const key = currentBucketKey(b);
81
- remaining_buckets.set(key, b);
82
- }
83
-
84
- const dchecksum = util.hashDelete(replicaIdToSubkey(options.table.id, options.sourceKey));
85
-
86
- for (let k of options.evaluated) {
87
- const key = currentBucketKey(k);
88
- remaining_buckets.delete(key);
89
-
90
- // INSERT
91
- const recordData = JSONBig.stringify(k.data);
92
- const checksum = util.hashData(k.table, k.id, recordData);
93
- this.currentSize += recordData.length + 200;
94
-
95
- const op_id = options.op_seq.next();
96
- this.debugLastOpId = op_id;
97
-
98
- this.bucketData.push({
99
- insertOne: {
100
- document: {
101
- _id: {
102
- g: this.group_id,
103
- b: k.bucket,
104
- o: op_id
105
- },
106
- op: 'PUT',
107
- source_table: options.table.id,
108
- source_key: options.sourceKey,
109
- table: k.table,
110
- row_id: k.id,
111
- checksum: checksum,
112
- data: recordData
113
- }
114
- }
115
- });
116
- }
117
-
118
- for (let bd of remaining_buckets.values()) {
119
- // REMOVE
120
-
121
- const op_id = options.op_seq.next();
122
- this.debugLastOpId = op_id;
123
-
124
- this.bucketData.push({
125
- insertOne: {
126
- document: {
127
- _id: {
128
- g: this.group_id,
129
- b: bd.bucket,
130
- o: op_id
131
- },
132
- op: 'REMOVE',
133
- source_table: options.table.id,
134
- source_key: options.sourceKey,
135
- table: bd.table,
136
- row_id: bd.id,
137
- checksum: dchecksum,
138
- data: null
139
- }
140
- }
141
- });
142
- this.currentSize += 200;
143
- }
144
- }
145
-
146
- saveParameterData(data: {
147
- op_seq: MongoIdSequence;
148
- sourceKey: ReplicaId;
149
- sourceTable: SourceTable;
150
- evaluated: EvaluatedParameters[];
151
- existing_lookups: bson.Binary[];
152
- }) {
153
- // This is similar to saving bucket data.
154
- // A key difference is that we don't need to keep the history intact.
155
- // We do need to keep track of recent history though - enough that we can get consistent data for any specific checkpoint.
156
- // Instead of storing per bucket id, we store per "lookup".
157
- // A key difference is that we don't need to store or keep track of anything per-bucket - the entire record is
158
- // either persisted or removed.
159
- // We also don't need to keep history intact.
160
- const { sourceTable, sourceKey, evaluated } = data;
161
-
162
- const remaining_lookups = new Map<string, bson.Binary>();
163
- for (let l of data.existing_lookups) {
164
- remaining_lookups.set(l.toString('base64'), l);
165
- }
166
-
167
- // 1. Insert new entries
168
- for (let result of evaluated) {
169
- const binLookup = serializeLookup(result.lookup);
170
- const hex = binLookup.toString('base64');
171
- remaining_lookups.delete(hex);
172
-
173
- const op_id = data.op_seq.next();
174
- this.debugLastOpId = op_id;
175
- this.bucketParameters.push({
176
- insertOne: {
177
- document: {
178
- _id: op_id,
179
- key: {
180
- g: this.group_id,
181
- t: sourceTable.id,
182
- k: sourceKey
183
- },
184
- lookup: binLookup,
185
- bucket_parameters: result.bucket_parameters
186
- }
187
- }
188
- });
189
-
190
- this.currentSize += 200;
191
- }
192
-
193
- // 2. "REMOVE" entries for any lookup not touched.
194
- for (let lookup of remaining_lookups.values()) {
195
- const op_id = data.op_seq.next();
196
- this.debugLastOpId = op_id;
197
- this.bucketParameters.push({
198
- insertOne: {
199
- document: {
200
- _id: op_id,
201
- key: {
202
- g: this.group_id,
203
- t: sourceTable.id,
204
- k: sourceKey
205
- },
206
- lookup: lookup,
207
- bucket_parameters: []
208
- }
209
- }
210
- });
211
-
212
- this.currentSize += 200;
213
- }
214
- }
215
-
216
- deleteCurrentData(id: SourceKey) {
217
- const op: mongo.AnyBulkWriteOperation<CurrentDataDocument> = {
218
- deleteOne: {
219
- filter: { _id: id }
220
- }
221
- };
222
- this.currentData.push(op);
223
- this.currentSize += 50;
224
- }
225
-
226
- upsertCurrentData(id: SourceKey, values: Partial<CurrentDataDocument>) {
227
- const op: mongo.AnyBulkWriteOperation<CurrentDataDocument> = {
228
- updateOne: {
229
- filter: { _id: id },
230
- update: {
231
- $set: values
232
- },
233
- upsert: true
234
- }
235
- };
236
- this.currentData.push(op);
237
- this.currentSize += (values.data?.length() ?? 0) + 100;
238
- }
239
-
240
- shouldFlushTransaction() {
241
- return (
242
- this.currentSize >= MAX_TRANSACTION_BATCH_SIZE ||
243
- this.bucketData.length >= MAX_TRANSACTION_DOC_COUNT ||
244
- this.currentData.length >= MAX_TRANSACTION_DOC_COUNT ||
245
- this.bucketParameters.length >= MAX_TRANSACTION_DOC_COUNT
246
- );
247
- }
248
-
249
- async flush(db: PowerSyncMongo, session: mongo.ClientSession) {
250
- if (this.bucketData.length > 0) {
251
- // calculate total size
252
- await safeBulkWrite(db.bucket_data, this.bucketData, {
253
- session,
254
- // inserts only - order doesn't matter
255
- ordered: false
256
- });
257
- }
258
- if (this.bucketParameters.length > 0) {
259
- await safeBulkWrite(db.bucket_parameters, this.bucketParameters, {
260
- session,
261
- // inserts only - order doesn't matter
262
- ordered: false
263
- });
264
- }
265
- if (this.currentData.length > 0) {
266
- await safeBulkWrite(db.current_data, this.currentData, {
267
- session,
268
- // may update and delete data within the same batch - order matters
269
- ordered: true
270
- });
271
- }
272
-
273
- logger.info(
274
- `powersync_${this.group_id} Flushed ${this.bucketData.length} + ${this.bucketParameters.length} + ${
275
- this.currentData.length
276
- } updates, ${Math.round(this.currentSize / 1024)}kb. Last op_id: ${this.debugLastOpId}`
277
- );
278
-
279
- this.bucketData = [];
280
- this.bucketParameters = [];
281
- this.currentData = [];
282
- this.currentSize = 0;
283
- this.debugLastOpId = null;
284
- }
285
- }
@@ -1,40 +0,0 @@
1
- import * as urijs from 'uri-js';
2
-
3
- export interface MongoConnectionConfig {
4
- uri: string;
5
- username?: string;
6
- password?: string;
7
- database?: string;
8
- }
9
-
10
- /**
11
- * Validate and normalize connection options.
12
- *
13
- * Returns destructured options.
14
- *
15
- * For use by both storage and mongo module.
16
- */
17
- export function normalizeMongoConfig(options: MongoConnectionConfig) {
18
- let uri = urijs.parse(options.uri);
19
-
20
- const database = options.database ?? uri.path?.substring(1) ?? '';
21
-
22
- const userInfo = uri.userinfo?.split(':');
23
-
24
- const username = options.username ?? userInfo?.[0];
25
- const password = options.password ?? userInfo?.[1];
26
-
27
- if (database == '') {
28
- throw new Error(`database required`);
29
- }
30
-
31
- delete uri.userinfo;
32
-
33
- return {
34
- uri: urijs.serialize(uri),
35
- database,
36
-
37
- username,
38
- password
39
- };
40
- }