@powersync/service-module-mongodb-storage 0.14.0 → 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/CHANGELOG.md +45 -0
  2. package/dist/storage/MongoBucketStorage.js +16 -3
  3. package/dist/storage/MongoBucketStorage.js.map +1 -1
  4. package/dist/storage/implementation/MongoBucketBatch.d.ts +13 -11
  5. package/dist/storage/implementation/MongoBucketBatch.js +208 -127
  6. package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
  7. package/dist/storage/implementation/MongoChecksums.d.ts +4 -4
  8. package/dist/storage/implementation/MongoChecksums.js +1 -0
  9. package/dist/storage/implementation/MongoChecksums.js.map +1 -1
  10. package/dist/storage/implementation/MongoCompactor.d.ts +8 -2
  11. package/dist/storage/implementation/MongoCompactor.js +50 -21
  12. package/dist/storage/implementation/MongoCompactor.js.map +1 -1
  13. package/dist/storage/implementation/MongoParameterCompactor.d.ts +2 -2
  14. package/dist/storage/implementation/MongoParameterCompactor.js +13 -1
  15. package/dist/storage/implementation/MongoParameterCompactor.js.map +1 -1
  16. package/dist/storage/implementation/MongoPersistedSyncRulesContent.js +2 -7
  17. package/dist/storage/implementation/MongoPersistedSyncRulesContent.js.map +1 -1
  18. package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +9 -4
  19. package/dist/storage/implementation/MongoSyncBucketStorage.js +35 -33
  20. package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
  21. package/dist/storage/implementation/MongoSyncRulesLock.d.ts +3 -3
  22. package/dist/storage/implementation/MongoSyncRulesLock.js.map +1 -1
  23. package/dist/storage/implementation/MongoWriteCheckpointAPI.d.ts +4 -4
  24. package/dist/storage/implementation/MongoWriteCheckpointAPI.js.map +1 -1
  25. package/dist/storage/implementation/OperationBatch.js +3 -2
  26. package/dist/storage/implementation/OperationBatch.js.map +1 -1
  27. package/dist/storage/implementation/PersistedBatch.d.ts +11 -4
  28. package/dist/storage/implementation/PersistedBatch.js +42 -11
  29. package/dist/storage/implementation/PersistedBatch.js.map +1 -1
  30. package/dist/storage/implementation/db.d.ts +35 -1
  31. package/dist/storage/implementation/db.js +99 -0
  32. package/dist/storage/implementation/db.js.map +1 -1
  33. package/dist/storage/implementation/models.d.ts +15 -3
  34. package/dist/storage/implementation/models.js +2 -1
  35. package/dist/storage/implementation/models.js.map +1 -1
  36. package/dist/utils/test-utils.d.ts +4 -1
  37. package/dist/utils/test-utils.js +15 -12
  38. package/dist/utils/test-utils.js.map +1 -1
  39. package/dist/utils/util.d.ts +2 -1
  40. package/dist/utils/util.js +15 -1
  41. package/dist/utils/util.js.map +1 -1
  42. package/package.json +6 -6
  43. package/src/storage/MongoBucketStorage.ts +29 -8
  44. package/src/storage/implementation/MongoBucketBatch.ts +263 -177
  45. package/src/storage/implementation/MongoChecksums.ts +5 -3
  46. package/src/storage/implementation/MongoCompactor.ts +53 -24
  47. package/src/storage/implementation/MongoParameterCompactor.ts +17 -4
  48. package/src/storage/implementation/MongoPersistedSyncRulesContent.ts +3 -11
  49. package/src/storage/implementation/MongoSyncBucketStorage.ts +33 -26
  50. package/src/storage/implementation/MongoSyncRulesLock.ts +3 -3
  51. package/src/storage/implementation/MongoWriteCheckpointAPI.ts +4 -4
  52. package/src/storage/implementation/OperationBatch.ts +3 -2
  53. package/src/storage/implementation/PersistedBatch.ts +42 -11
  54. package/src/storage/implementation/db.ts +129 -1
  55. package/src/storage/implementation/models.ts +18 -4
  56. package/src/utils/test-utils.ts +15 -12
  57. package/src/utils/util.ts +17 -2
  58. package/test/src/__snapshots__/storage.test.ts.snap +201 -0
  59. package/test/src/__snapshots__/storage_compacting.test.ts.snap +17 -0
  60. package/test/src/__snapshots__/storage_sync.test.ts.snap +1111 -16
  61. package/test/src/storage.test.ts +9 -7
  62. package/test/src/storage_compacting.test.ts +117 -45
  63. package/test/src/storage_sync.test.ts +53 -51
  64. package/test/src/util.ts +3 -3
  65. package/tsconfig.tsbuildinfo +1 -1
@@ -1,17 +1,19 @@
1
1
  import { register } from '@powersync/service-core-tests';
2
2
  import { describe } from 'vitest';
3
- import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
3
+ import { INITIALIZED_MONGO_STORAGE_FACTORY, TEST_STORAGE_VERSIONS } from './util.js';
4
4
  import { env } from './env.js';
5
5
  import { mongoTestStorageFactoryGenerator } from '@module/utils/test-utils.js';
6
6
 
7
- describe('Mongo Sync Bucket Storage - Parameters', () =>
8
- register.registerDataStorageParameterTests(INITIALIZED_MONGO_STORAGE_FACTORY));
7
+ for (let storageVersion of TEST_STORAGE_VERSIONS) {
8
+ describe(`Mongo Sync Bucket Storage - Parameters - v${storageVersion}`, () =>
9
+ register.registerDataStorageParameterTests({ ...INITIALIZED_MONGO_STORAGE_FACTORY, storageVersion }));
9
10
 
10
- describe('Mongo Sync Bucket Storage - Data', () =>
11
- register.registerDataStorageDataTests(INITIALIZED_MONGO_STORAGE_FACTORY));
11
+ describe(`Mongo Sync Bucket Storage - Data - v${storageVersion}`, () =>
12
+ register.registerDataStorageDataTests({ ...INITIALIZED_MONGO_STORAGE_FACTORY, storageVersion }));
12
13
 
13
- describe('Mongo Sync Bucket Storage - Checkpoints', () =>
14
- register.registerDataStorageCheckpointTests(INITIALIZED_MONGO_STORAGE_FACTORY));
14
+ describe(`Mongo Sync Bucket Storage - Checkpoints - v${storageVersion}`, () =>
15
+ register.registerDataStorageCheckpointTests({ ...INITIALIZED_MONGO_STORAGE_FACTORY, storageVersion }));
16
+ }
15
17
 
16
18
  describe('Sync Bucket Validation', register.registerBucketValidationTests);
17
19
 
@@ -1,43 +1,53 @@
1
- import { bucketRequest, bucketRequests, register, TEST_TABLE, test_utils } from '@powersync/service-core-tests';
1
+ import { storage, SyncRulesBucketStorage, updateSyncRulesFromYaml } from '@powersync/service-core';
2
+ import { bucketRequest, register, test_utils } from '@powersync/service-core-tests';
2
3
  import { describe, expect, test } from 'vitest';
4
+ import { MongoCompactor } from '../../src/storage/implementation/MongoCompactor.js';
3
5
  import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
4
- import { storage, SyncRulesBucketStorage, updateSyncRulesFromYaml } from '@powersync/service-core';
5
6
 
6
7
  describe('Mongo Sync Bucket Storage Compact', () => {
7
8
  register.registerCompactTests(INITIALIZED_MONGO_STORAGE_FACTORY);
8
9
 
9
10
  describe('with blank bucket_state', () => {
10
11
  // This can happen when migrating from older service versions, that did not populate bucket_state yet.
11
- const populate = async (bucketStorage: SyncRulesBucketStorage) => {
12
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
13
- await batch.save({
14
- sourceTable: TEST_TABLE,
15
- tag: storage.SaveOperationTag.INSERT,
16
- after: {
17
- id: 't1',
18
- owner_id: 'u1'
19
- },
20
- afterReplicaId: test_utils.rid('t1')
21
- });
22
-
23
- await batch.save({
24
- sourceTable: TEST_TABLE,
25
- tag: storage.SaveOperationTag.INSERT,
26
- after: {
27
- id: 't2',
28
- owner_id: 'u2'
29
- },
30
- afterReplicaId: test_utils.rid('t2')
31
- });
32
-
33
- await batch.commit('1/1');
12
+ const populate = async (bucketStorage: SyncRulesBucketStorage, sourceTableIndex: number) => {
13
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
14
+
15
+ const sourceTable = await test_utils.resolveTestTable(
16
+ writer,
17
+ 'test',
18
+ ['id'],
19
+ INITIALIZED_MONGO_STORAGE_FACTORY,
20
+ sourceTableIndex
21
+ );
22
+ await writer.markAllSnapshotDone('1/1');
23
+
24
+ await writer.save({
25
+ sourceTable,
26
+ tag: storage.SaveOperationTag.INSERT,
27
+ after: {
28
+ id: 't1',
29
+ owner_id: 'u1'
30
+ },
31
+ afterReplicaId: test_utils.rid('t1')
34
32
  });
35
33
 
34
+ await writer.save({
35
+ sourceTable,
36
+ tag: storage.SaveOperationTag.INSERT,
37
+ after: {
38
+ id: 't2',
39
+ owner_id: 'u2'
40
+ },
41
+ afterReplicaId: test_utils.rid('t2')
42
+ });
43
+
44
+ await writer.commit('1/1');
45
+
36
46
  return bucketStorage.getCheckpoint();
37
47
  };
38
48
 
39
49
  const setup = async () => {
40
- await using factory = await INITIALIZED_MONGO_STORAGE_FACTORY();
50
+ await using factory = await INITIALIZED_MONGO_STORAGE_FACTORY.factory();
41
51
  const syncRules = await factory.updateSyncRules(
42
52
  updateSyncRulesFromYaml(`
43
53
  bucket_definitions:
@@ -47,7 +57,7 @@ bucket_definitions:
47
57
  `)
48
58
  );
49
59
  const bucketStorage = factory.getInstance(syncRules);
50
- const { checkpoint } = await populate(bucketStorage);
60
+ const { checkpoint } = await populate(bucketStorage, 1);
51
61
 
52
62
  return { bucketStorage, checkpoint, factory, syncRules };
53
63
  };
@@ -68,17 +78,17 @@ bucket_definitions:
68
78
  signal: null as any
69
79
  });
70
80
 
71
- const checksumAfter = await bucketStorage.getChecksums(
72
- checkpoint,
73
- bucketRequests(syncRules, ['by_user["u1"]', 'by_user["u2"]'])
74
- );
75
- expect(checksumAfter.get(bucketRequest(syncRules, 'by_user["u1"]'))).toEqual({
76
- bucket: bucketRequest(syncRules, 'by_user["u1"]'),
81
+ const users = ['u1', 'u2'];
82
+ const userRequests = users.map((user) => bucketRequest(syncRules, `by_user["${user}"]`));
83
+ const [u1Request, u2Request] = userRequests;
84
+ const checksumAfter = await bucketStorage.getChecksums(checkpoint, userRequests);
85
+ expect(checksumAfter.get(u1Request.bucket)).toEqual({
86
+ bucket: u1Request.bucket,
77
87
  checksum: -659469718,
78
88
  count: 1
79
89
  });
80
- expect(checksumAfter.get(bucketRequest(syncRules, 'by_user["u2"]'))).toEqual({
81
- bucket: bucketRequest(syncRules, 'by_user["u2"]'),
90
+ expect(checksumAfter.get(u2Request.bucket)).toEqual({
91
+ bucket: u2Request.bucket,
82
92
  checksum: 430217650,
83
93
  count: 1
84
94
  });
@@ -88,7 +98,7 @@ bucket_definitions:
88
98
  // Populate old sync rules version
89
99
  const { factory } = await setup();
90
100
 
91
- // Not populate another version (bucket definition name changed)
101
+ // Now populate another version (bucket definition name changed)
92
102
  const syncRules = await factory.updateSyncRules(
93
103
  updateSyncRulesFromYaml(`
94
104
  bucket_definitions:
@@ -99,7 +109,7 @@ bucket_definitions:
99
109
  );
100
110
  const bucketStorage = factory.getInstance(syncRules);
101
111
 
102
- await populate(bucketStorage);
112
+ await populate(bucketStorage, 2);
103
113
  const { checkpoint } = await bucketStorage.getCheckpoint();
104
114
 
105
115
  // Default is to small small numbers - should be a no-op
@@ -122,21 +132,83 @@ bucket_definitions:
122
132
  });
123
133
  expect(result2.buckets).toEqual(0);
124
134
 
125
- const checksumAfter = await bucketStorage.getChecksums(
126
- checkpoint,
127
- bucketRequests(syncRules, ['by_user2["u1"]', 'by_user2["u2"]'])
128
- );
129
- expect(checksumAfter.get(bucketRequest(syncRules, 'by_user2["u1"]'))).toEqual({
130
- bucket: bucketRequest(syncRules, 'by_user2["u1"]'),
135
+ const users = ['u1', 'u2'];
136
+ const userRequests = users.map((user) => bucketRequest(syncRules, `by_user2["${user}"]`));
137
+ const [u1Request, u2Request] = userRequests;
138
+ const checksumAfter = await bucketStorage.getChecksums(checkpoint, userRequests);
139
+ expect(checksumAfter.get(u1Request.bucket)).toEqual({
140
+ bucket: u1Request.bucket,
131
141
  checksum: -659469718,
132
142
  count: 1
133
143
  });
134
- expect(checksumAfter.get(bucketRequest(syncRules, 'by_user2["u2"]'))).toEqual({
135
- bucket: bucketRequest(syncRules, 'by_user2["u2"]'),
144
+ expect(checksumAfter.get(u2Request.bucket)).toEqual({
145
+ bucket: u2Request.bucket,
136
146
  checksum: 430217650,
137
147
  count: 1
138
148
  });
139
149
  });
150
+
151
+ test('dirty bucket discovery handles bigint bucket_state bytes', async () => {
152
+ await using factory = await INITIALIZED_MONGO_STORAGE_FACTORY.factory();
153
+ const syncRules = await factory.updateSyncRules(
154
+ updateSyncRulesFromYaml(`
155
+ bucket_definitions:
156
+ global:
157
+ data: [select * from test]
158
+ `)
159
+ );
160
+ const bucketStorage = factory.getInstance(syncRules);
161
+
162
+ // This simulates bucket_state created using bigint bytes.
163
+ // This typically happens when buckets get very large (> 2GiB). We don't want to create that much
164
+ // data in the tests, so we directly insert the bucket_state here.
165
+ await factory.db.bucket_state.insertOne({
166
+ _id: {
167
+ g: bucketStorage.group_id,
168
+ b: 'global[]'
169
+ },
170
+ last_op: 5n,
171
+ compacted_state: {
172
+ op_id: 3n,
173
+ count: 3,
174
+ checksum: 0n,
175
+ bytes: 7n
176
+ },
177
+ estimate_since_compact: {
178
+ count: 2,
179
+ bytes: 5n
180
+ }
181
+ });
182
+
183
+ // This test uses a couple of internal APIs of the compactor - there is no simple way
184
+ // to test this using the current public APIs.
185
+ const compactor = new MongoCompactor(bucketStorage, (bucketStorage as any).db, {
186
+ maxOpId: 5n
187
+ });
188
+
189
+ const dirtyBuckets = (compactor as any).dirtyBucketBatches({
190
+ minBucketChanges: 1,
191
+ minChangeRatio: 0.39
192
+ });
193
+ const firstBatch = await dirtyBuckets.next();
194
+
195
+ expect(firstBatch.done).toBe(false);
196
+ expect(firstBatch.value).toHaveLength(1);
197
+ expect(firstBatch.value[0].bucket).toBe('global[]');
198
+ expect(firstBatch.value[0].estimatedCount).toBe(5);
199
+ expect(typeof firstBatch.value[0].estimatedCount).toBe('number');
200
+ expect(firstBatch.value[0].dirtyRatio).toBeCloseTo(5 / 12);
201
+
202
+ const checksumBuckets = await (compactor as any).dirtyBucketBatchForChecksums({
203
+ minBucketChanges: 1
204
+ });
205
+ expect(checksumBuckets).toEqual([
206
+ {
207
+ bucket: 'global[]',
208
+ estimatedCount: 5
209
+ }
210
+ ]);
211
+ });
140
212
  });
141
213
  });
142
214
 
@@ -1,17 +1,19 @@
1
1
  import { storage, updateSyncRulesFromYaml } from '@powersync/service-core';
2
- import { bucketRequest, register, TEST_TABLE, test_utils } from '@powersync/service-core-tests';
2
+ import { bucketRequest, register, test_utils } from '@powersync/service-core-tests';
3
3
  import { describe, expect, test } from 'vitest';
4
4
  import { INITIALIZED_MONGO_STORAGE_FACTORY, TEST_STORAGE_VERSIONS } from './util.js';
5
5
 
6
- function registerSyncStorageTests(storageFactory: storage.TestStorageFactory, storageVersion: number) {
7
- register.registerSyncTests(storageFactory, { storageVersion });
8
-
6
+ function registerSyncStorageTests(storageConfig: storage.TestStorageConfig, storageVersion: number) {
7
+ register.registerSyncTests(storageConfig.factory, {
8
+ storageVersion,
9
+ tableIdStrings: storageConfig.tableIdStrings
10
+ });
9
11
  // The split of returned results can vary depending on storage drivers
10
12
  test('large batch (2)', async () => {
11
13
  // Test syncing a batch of data that is small in count,
12
14
  // but large enough in size to be split over multiple returned chunks.
13
15
  // Similar to the above test, but splits over 1MB chunks.
14
- await using factory = await storageFactory();
16
+ await using factory = await storageConfig.factory();
15
17
  const syncRules = await factory.updateSyncRules(
16
18
  updateSyncRulesFromYaml(
17
19
  `
@@ -24,61 +26,61 @@ function registerSyncStorageTests(storageFactory: storage.TestStorageFactory, st
24
26
  )
25
27
  );
26
28
  const bucketStorage = factory.getInstance(syncRules);
27
- const globalBucket = bucketRequest(syncRules, 'global[]');
28
29
 
29
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
30
- const sourceTable = TEST_TABLE;
30
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
31
31
 
32
- const largeDescription = '0123456789'.repeat(2_000_00);
32
+ const sourceTable = await test_utils.resolveTestTable(writer, 'test', ['id'], INITIALIZED_MONGO_STORAGE_FACTORY);
33
33
 
34
- await batch.save({
35
- sourceTable,
36
- tag: storage.SaveOperationTag.INSERT,
37
- after: {
38
- id: 'test1',
39
- description: 'test1'
40
- },
41
- afterReplicaId: test_utils.rid('test1')
42
- });
34
+ const largeDescription = '0123456789'.repeat(2_000_00);
35
+
36
+ await writer.save({
37
+ sourceTable,
38
+ tag: storage.SaveOperationTag.INSERT,
39
+ after: {
40
+ id: 'test1',
41
+ description: 'test1'
42
+ },
43
+ afterReplicaId: test_utils.rid('test1')
44
+ });
43
45
 
44
- await batch.save({
45
- sourceTable,
46
- tag: storage.SaveOperationTag.INSERT,
47
- after: {
48
- id: 'large1',
49
- description: largeDescription
50
- },
51
- afterReplicaId: test_utils.rid('large1')
52
- });
46
+ await writer.save({
47
+ sourceTable,
48
+ tag: storage.SaveOperationTag.INSERT,
49
+ after: {
50
+ id: 'large1',
51
+ description: largeDescription
52
+ },
53
+ afterReplicaId: test_utils.rid('large1')
54
+ });
53
55
 
54
- // Large enough to split the returned batch
55
- await batch.save({
56
- sourceTable,
57
- tag: storage.SaveOperationTag.INSERT,
58
- after: {
59
- id: 'large2',
60
- description: largeDescription
61
- },
62
- afterReplicaId: test_utils.rid('large2')
63
- });
56
+ // Large enough to split the returned batch
57
+ await writer.save({
58
+ sourceTable,
59
+ tag: storage.SaveOperationTag.INSERT,
60
+ after: {
61
+ id: 'large2',
62
+ description: largeDescription
63
+ },
64
+ afterReplicaId: test_utils.rid('large2')
65
+ });
64
66
 
65
- await batch.save({
66
- sourceTable,
67
- tag: storage.SaveOperationTag.INSERT,
68
- after: {
69
- id: 'test3',
70
- description: 'test3'
71
- },
72
- afterReplicaId: test_utils.rid('test3')
73
- });
67
+ await writer.save({
68
+ sourceTable,
69
+ tag: storage.SaveOperationTag.INSERT,
70
+ after: {
71
+ id: 'test3',
72
+ description: 'test3'
73
+ },
74
+ afterReplicaId: test_utils.rid('test3')
74
75
  });
75
76
 
76
- const checkpoint = result!.flushed_op;
77
+ const flushResult = await writer.flush();
77
78
 
78
- const options: storage.BucketDataBatchOptions = {};
79
+ const checkpoint = flushResult!.flushed_op;
79
80
 
81
+ const options: storage.BucketDataBatchOptions = {};
80
82
  const batch1 = await test_utils.fromAsync(
81
- bucketStorage.getBucketDataBatch(checkpoint, new Map([[globalBucket, 0n]]), options)
83
+ bucketStorage.getBucketDataBatch(checkpoint, [bucketRequest(syncRules, 'global[]', 0n)], options)
82
84
  );
83
85
  expect(test_utils.getBatchData(batch1)).toEqual([
84
86
  { op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
@@ -93,7 +95,7 @@ function registerSyncStorageTests(storageFactory: storage.TestStorageFactory, st
93
95
  const batch2 = await test_utils.fromAsync(
94
96
  bucketStorage.getBucketDataBatch(
95
97
  checkpoint,
96
- new Map([[globalBucket, BigInt(batch1[0].chunkData.next_after)]]),
98
+ [bucketRequest(syncRules, 'global[]', batch1[0].chunkData.next_after)],
97
99
  options
98
100
  )
99
101
  );
@@ -109,7 +111,7 @@ function registerSyncStorageTests(storageFactory: storage.TestStorageFactory, st
109
111
  const batch3 = await test_utils.fromAsync(
110
112
  bucketStorage.getBucketDataBatch(
111
113
  checkpoint,
112
- new Map([[globalBucket, BigInt(batch2[0].chunkData.next_after)]]),
114
+ [bucketRequest(syncRules, 'global[]', batch2[0].chunkData.next_after)],
113
115
  options
114
116
  )
115
117
  );
package/test/src/util.ts CHANGED
@@ -1,6 +1,6 @@
1
- import { env } from './env.js';
2
1
  import { mongoTestReportStorageFactoryGenerator, mongoTestStorageFactoryGenerator } from '@module/utils/test-utils.js';
3
- import { CURRENT_STORAGE_VERSION, LEGACY_STORAGE_VERSION } from '@powersync/service-core';
2
+ import { SUPPORTED_STORAGE_VERSIONS } from '@powersync/service-core';
3
+ import { env } from './env.js';
4
4
 
5
5
  export const INITIALIZED_MONGO_STORAGE_FACTORY = mongoTestStorageFactoryGenerator({
6
6
  url: env.MONGO_TEST_URL,
@@ -12,4 +12,4 @@ export const INITIALIZED_MONGO_REPORT_STORAGE_FACTORY = mongoTestReportStorageFa
12
12
  isCI: env.CI
13
13
  });
14
14
 
15
- export const TEST_STORAGE_VERSIONS = [LEGACY_STORAGE_VERSION, CURRENT_STORAGE_VERSION];
15
+ export const TEST_STORAGE_VERSIONS = SUPPORTED_STORAGE_VERSIONS;