@powersync/service-core 0.0.0-dev-20250214100224 → 0.0.0-dev-20250227082606

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/CHANGELOG.md +42 -2
  2. package/dist/api/RouteAPI.d.ts +1 -1
  3. package/dist/api/diagnostics.js +107 -169
  4. package/dist/api/diagnostics.js.map +1 -1
  5. package/dist/entry/commands/compact-action.js +10 -73
  6. package/dist/entry/commands/compact-action.js.map +1 -1
  7. package/dist/modules/AbstractModule.d.ts +1 -1
  8. package/dist/replication/AbstractReplicator.js +8 -76
  9. package/dist/replication/AbstractReplicator.js.map +1 -1
  10. package/dist/routes/RouterEngine.js.map +1 -1
  11. package/dist/routes/endpoints/checkpointing.js +3 -2
  12. package/dist/routes/endpoints/checkpointing.js.map +1 -1
  13. package/dist/routes/endpoints/socket-route.js +7 -6
  14. package/dist/routes/endpoints/socket-route.js.map +1 -1
  15. package/dist/routes/endpoints/sync-stream.js +7 -6
  16. package/dist/routes/endpoints/sync-stream.js.map +1 -1
  17. package/dist/runner/teardown.js +3 -65
  18. package/dist/runner/teardown.js.map +1 -1
  19. package/dist/storage/BucketStorage.d.ts +8 -441
  20. package/dist/storage/BucketStorage.js +9 -10
  21. package/dist/storage/BucketStorage.js.map +1 -1
  22. package/dist/storage/BucketStorageBatch.d.ts +130 -0
  23. package/dist/storage/BucketStorageBatch.js +10 -0
  24. package/dist/storage/BucketStorageBatch.js.map +1 -0
  25. package/dist/storage/BucketStorageFactory.d.ts +145 -0
  26. package/dist/storage/BucketStorageFactory.js +2 -0
  27. package/dist/storage/BucketStorageFactory.js.map +1 -0
  28. package/dist/storage/ChecksumCache.js.map +1 -1
  29. package/dist/storage/PersistedSyncRulesContent.d.ts +20 -0
  30. package/dist/storage/PersistedSyncRulesContent.js +2 -0
  31. package/dist/storage/PersistedSyncRulesContent.js.map +1 -0
  32. package/dist/storage/ReplicationEventPayload.d.ts +1 -1
  33. package/dist/storage/ReplicationLock.d.ts +4 -0
  34. package/dist/storage/ReplicationLock.js +2 -0
  35. package/dist/storage/ReplicationLock.js.map +1 -0
  36. package/dist/storage/SourceEntity.d.ts +6 -2
  37. package/dist/storage/SourceTable.d.ts +2 -2
  38. package/dist/storage/SourceTable.js.map +1 -1
  39. package/dist/storage/StorageEngine.d.ts +4 -4
  40. package/dist/storage/StorageEngine.js +2 -2
  41. package/dist/storage/StorageEngine.js.map +1 -1
  42. package/dist/storage/StorageProvider.d.ts +4 -1
  43. package/dist/storage/SyncRulesBucketStorage.d.ts +207 -0
  44. package/dist/storage/SyncRulesBucketStorage.js +7 -0
  45. package/dist/storage/SyncRulesBucketStorage.js.map +1 -0
  46. package/dist/storage/bson.d.ts +14 -3
  47. package/dist/storage/bson.js +18 -2
  48. package/dist/storage/bson.js.map +1 -1
  49. package/dist/storage/storage-index.d.ts +5 -0
  50. package/dist/storage/storage-index.js +5 -0
  51. package/dist/storage/storage-index.js.map +1 -1
  52. package/dist/sync/BucketChecksumState.d.ts +95 -0
  53. package/dist/sync/BucketChecksumState.js +321 -0
  54. package/dist/sync/BucketChecksumState.js.map +1 -0
  55. package/dist/sync/SyncContext.d.ts +17 -0
  56. package/dist/sync/SyncContext.js +23 -0
  57. package/dist/sync/SyncContext.js.map +1 -0
  58. package/dist/sync/sync-index.d.ts +2 -0
  59. package/dist/sync/sync-index.js +2 -0
  60. package/dist/sync/sync-index.js.map +1 -1
  61. package/dist/sync/sync.d.ts +10 -4
  62. package/dist/sync/sync.js +142 -148
  63. package/dist/sync/sync.js.map +1 -1
  64. package/dist/sync/util.d.ts +9 -0
  65. package/dist/sync/util.js +44 -0
  66. package/dist/sync/util.js.map +1 -1
  67. package/dist/system/ServiceContext.d.ts +3 -0
  68. package/dist/system/ServiceContext.js +7 -0
  69. package/dist/system/ServiceContext.js.map +1 -1
  70. package/dist/util/checkpointing.d.ts +1 -1
  71. package/dist/util/checkpointing.js +15 -78
  72. package/dist/util/checkpointing.js.map +1 -1
  73. package/dist/util/config/compound-config-collector.js +13 -1
  74. package/dist/util/config/compound-config-collector.js.map +1 -1
  75. package/dist/util/config/defaults.d.ts +5 -0
  76. package/dist/util/config/defaults.js +6 -0
  77. package/dist/util/config/defaults.js.map +1 -0
  78. package/dist/util/config/types.d.ts +7 -2
  79. package/dist/util/config/types.js.map +1 -1
  80. package/dist/util/protocol-types.d.ts +13 -4
  81. package/package.json +6 -6
  82. package/src/api/RouteAPI.ts +1 -1
  83. package/src/api/diagnostics.ts +1 -1
  84. package/src/entry/commands/compact-action.ts +2 -3
  85. package/src/modules/AbstractModule.ts +1 -1
  86. package/src/replication/AbstractReplicator.ts +7 -12
  87. package/src/routes/RouterEngine.ts +1 -0
  88. package/src/routes/endpoints/checkpointing.ts +3 -3
  89. package/src/routes/endpoints/socket-route.ts +9 -6
  90. package/src/routes/endpoints/sync-stream.ts +10 -6
  91. package/src/runner/teardown.ts +1 -1
  92. package/src/storage/BucketStorage.ts +8 -550
  93. package/src/storage/BucketStorageBatch.ts +158 -0
  94. package/src/storage/BucketStorageFactory.ts +166 -0
  95. package/src/storage/ChecksumCache.ts +1 -0
  96. package/src/storage/PersistedSyncRulesContent.ts +26 -0
  97. package/src/storage/ReplicationEventPayload.ts +1 -1
  98. package/src/storage/ReplicationLock.ts +5 -0
  99. package/src/storage/SourceEntity.ts +6 -2
  100. package/src/storage/SourceTable.ts +1 -1
  101. package/src/storage/StorageEngine.ts +4 -4
  102. package/src/storage/StorageProvider.ts +4 -1
  103. package/src/storage/SyncRulesBucketStorage.ts +265 -0
  104. package/src/storage/bson.ts +22 -4
  105. package/src/storage/storage-index.ts +5 -0
  106. package/src/sync/BucketChecksumState.ts +418 -0
  107. package/src/sync/SyncContext.ts +36 -0
  108. package/src/sync/sync-index.ts +2 -0
  109. package/src/sync/sync.ts +199 -177
  110. package/src/sync/util.ts +54 -0
  111. package/src/system/ServiceContext.ts +9 -0
  112. package/src/util/checkpointing.ts +4 -6
  113. package/src/util/config/compound-config-collector.ts +24 -1
  114. package/src/util/config/defaults.ts +5 -0
  115. package/src/util/config/types.ts +8 -2
  116. package/src/util/protocol-types.ts +16 -4
  117. package/test/src/auth.test.ts +5 -5
  118. package/test/src/sync/BucketChecksumState.test.ts +580 -0
  119. package/test/src/sync/util.test.ts +34 -0
  120. package/tsconfig.tsbuildinfo +1 -1
@@ -1,5 +1,4 @@
1
1
  import { configFile } from '@powersync/service-types';
2
- import { PowerSyncConfig } from '@powersync/service-types/src/config/PowerSyncConfig.js';
3
2
  import { CompoundKeyCollector } from '../../auth/CompoundKeyCollector.js';
4
3
  import { KeySpec } from '../../auth/KeySpec.js';
5
4
  import { KeyStore } from '../../auth/KeyStore.js';
@@ -30,7 +29,7 @@ export type SyncRulesConfig = {
30
29
  };
31
30
 
32
31
  export type ResolvedPowerSyncConfig = {
33
- base_config: PowerSyncConfig;
32
+ base_config: configFile.PowerSyncConfig;
34
33
  connections?: configFile.GenericDataSourceConfig[];
35
34
  storage: configFile.GenericStorageConfig;
36
35
  dev: {
@@ -60,6 +59,13 @@ export type ResolvedPowerSyncConfig = {
60
59
  internal_service_endpoint: string;
61
60
  };
62
61
 
62
+ api_parameters: {
63
+ max_concurrent_connections: number;
64
+ max_data_fetch_concurrency: number;
65
+ max_buckets_per_connection: number;
66
+ max_parameter_query_results: number;
67
+ };
68
+
63
69
  /** Prefix for postgres replication slot names. May eventually be connection-specific. */
64
70
  slot_name_prefix: string;
65
71
  parameters: Record<string, number | string | boolean | null>;
@@ -1,5 +1,5 @@
1
1
  import * as t from 'ts-codec';
2
- import { SqliteJsonValue } from '@powersync/service-sync-rules';
2
+ import { BucketDescription, BucketPriority, SqliteJsonValue } from '@powersync/service-sync-rules';
3
3
 
4
4
  export const BucketRequest = t.object({
5
5
  name: t.string,
@@ -59,7 +59,7 @@ export interface StreamingSyncCheckpointDiff {
59
59
  checkpoint_diff: {
60
60
  last_op_id: OpId;
61
61
  write_checkpoint?: OpId;
62
- updated_buckets: BucketChecksum[];
62
+ updated_buckets: BucketChecksumWithDescription[];
63
63
  removed_buckets: string[];
64
64
  };
65
65
  }
@@ -74,13 +74,23 @@ export interface StreamingSyncCheckpointComplete {
74
74
  };
75
75
  }
76
76
 
77
- export interface StreamingSyncKeepalive {}
77
+ export interface StreamingSyncCheckpointPartiallyComplete {
78
+ partial_checkpoint_complete: {
79
+ last_op_id: OpId;
80
+ priority: BucketPriority;
81
+ };
82
+ }
83
+
84
+ export interface StreamingSyncKeepalive {
85
+ token_expires_in: number;
86
+ }
78
87
 
79
88
  export type StreamingSyncLine =
80
89
  | StreamingSyncData
81
90
  | StreamingSyncCheckpoint
82
91
  | StreamingSyncCheckpointDiff
83
92
  | StreamingSyncCheckpointComplete
93
+ | StreamingSyncCheckpointPartiallyComplete
84
94
  | StreamingSyncKeepalive;
85
95
 
86
96
  /**
@@ -91,7 +101,7 @@ export type OpId = string;
91
101
  export interface Checkpoint {
92
102
  last_op_id: OpId;
93
103
  write_checkpoint?: OpId;
94
- buckets: BucketChecksum[];
104
+ buckets: BucketChecksumWithDescription[];
95
105
  }
96
106
 
97
107
  export interface BucketState {
@@ -142,3 +152,5 @@ export interface BucketChecksum {
142
152
  */
143
153
  count: number;
144
154
  }
155
+
156
+ export interface BucketChecksumWithDescription extends BucketChecksum, BucketDescription {}
@@ -274,7 +274,7 @@ describe('JWT Auth', () => {
274
274
  ).rejects.toThrow('Token must expire in a maximum of');
275
275
  });
276
276
 
277
- test('http', async () => {
277
+ test('http', { timeout: 20_000 }, async () => {
278
278
  // Not ideal to rely on an external endpoint for tests, but it is good to test that this
279
279
  // one actually works.
280
280
  const remote = new RemoteJWKSCollector(
@@ -290,9 +290,9 @@ describe('JWT Auth', () => {
290
290
  reject_ip_ranges: ['local']
291
291
  }
292
292
  });
293
- expect(invalid.getKeys()).rejects.toThrow('IPs in this range are not supported');
293
+ await expect(invalid.getKeys()).rejects.toThrow('IPs in this range are not supported');
294
294
 
295
- // IPS throw an error immediately
295
+ // IPs throw an error immediately
296
296
  expect(
297
297
  () =>
298
298
  new RemoteJWKSCollector('https://127.0.0.1/.well-known/jwks.json', {
@@ -315,11 +315,11 @@ describe('JWT Auth', () => {
315
315
 
316
316
  const invalid = new RemoteJWKSCollector('https://127.0.0.1/.well-known/jwks.json');
317
317
  // Should try and fetch
318
- expect(invalid.getKeys()).rejects.toThrow();
318
+ await expect(invalid.getKeys()).rejects.toThrow();
319
319
 
320
320
  const invalid2 = new RemoteJWKSCollector('https://localhost/.well-known/jwks.json');
321
321
  // Should try and fetch
322
- expect(invalid2.getKeys()).rejects.toThrow();
322
+ await expect(invalid2.getKeys()).rejects.toThrow();
323
323
  });
324
324
 
325
325
  test('caching', async () => {
@@ -0,0 +1,580 @@
1
+ import {
2
+ BucketChecksum,
3
+ BucketChecksumState,
4
+ BucketChecksumStateStorage,
5
+ CHECKPOINT_INVALIDATE_ALL,
6
+ ChecksumMap,
7
+ OpId,
8
+ SyncContext,
9
+ WatchFilterEvent
10
+ } from '@/index.js';
11
+ import { RequestParameters, SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
12
+ import { describe, expect, test } from 'vitest';
13
+
14
+ describe('BucketChecksumState', () => {
15
+ // Single global[] bucket.
16
+ // We don't care about data in these tests
17
+ const SYNC_RULES_GLOBAL = SqlSyncRules.fromYaml(
18
+ `
19
+ bucket_definitions:
20
+ global:
21
+ data: []
22
+ `,
23
+ { defaultSchema: 'public' }
24
+ );
25
+
26
+ // global[1] and global[2]
27
+ const SYNC_RULES_GLOBAL_TWO = SqlSyncRules.fromYaml(
28
+ `
29
+ bucket_definitions:
30
+ global:
31
+ parameters:
32
+ - select 1 as id
33
+ - select 2 as id
34
+ data: []
35
+ `,
36
+ { defaultSchema: 'public' }
37
+ );
38
+
39
+ // by_project[n]
40
+ const SYNC_RULES_DYNAMIC = SqlSyncRules.fromYaml(
41
+ `
42
+ bucket_definitions:
43
+ by_project:
44
+ parameters: select id from projects where user_id = request.user_id()
45
+ data: []
46
+ `,
47
+ { defaultSchema: 'public' }
48
+ );
49
+
50
+ const syncContext = new SyncContext({
51
+ maxBuckets: 100,
52
+ maxParameterQueryResults: 100,
53
+ maxDataFetchConcurrency: 10
54
+ });
55
+
56
+ test('global bucket with update', async () => {
57
+ const storage = new MockBucketChecksumStateStorage();
58
+ // Set intial state
59
+ storage.updateTestChecksum({ bucket: 'global[]', checksum: 1, count: 1 });
60
+
61
+ const state = new BucketChecksumState({
62
+ syncContext,
63
+ syncParams: new RequestParameters({ sub: '' }, {}),
64
+ syncRules: SYNC_RULES_GLOBAL,
65
+ bucketStorage: storage
66
+ });
67
+
68
+ const line = (await state.buildNextCheckpointLine({
69
+ base: { checkpoint: '1', lsn: '1' },
70
+ writeCheckpoint: null,
71
+ update: CHECKPOINT_INVALIDATE_ALL
72
+ }))!;
73
+ expect(line.checkpointLine).toEqual({
74
+ checkpoint: {
75
+ buckets: [{ bucket: 'global[]', checksum: 1, count: 1, priority: 3 }],
76
+ last_op_id: '1',
77
+ write_checkpoint: undefined
78
+ }
79
+ });
80
+ expect(line.bucketsToFetch).toEqual([
81
+ {
82
+ bucket: 'global[]',
83
+ priority: 3
84
+ }
85
+ ]);
86
+ // This is the bucket data to be fetched
87
+ expect(state.getFilteredBucketPositions(line.bucketsToFetch)).toEqual(new Map([['global[]', '0']]));
88
+
89
+ // This similuates the bucket data being sent
90
+ state.updateBucketPosition({ bucket: 'global[]', nextAfter: '1', hasMore: false });
91
+
92
+ // Update bucket storage state
93
+ storage.updateTestChecksum({ bucket: 'global[]', checksum: 2, count: 2 });
94
+
95
+ // Now we get a new line
96
+ const line2 = (await state.buildNextCheckpointLine({
97
+ base: { checkpoint: '2', lsn: '2' },
98
+ writeCheckpoint: null,
99
+ update: {
100
+ updatedDataBuckets: ['global[]'],
101
+ invalidateDataBuckets: false,
102
+ updatedParameterBucketDefinitions: [],
103
+ invalidateParameterBuckets: false
104
+ }
105
+ }))!;
106
+ expect(line2.checkpointLine).toEqual({
107
+ checkpoint_diff: {
108
+ removed_buckets: [],
109
+ updated_buckets: [{ bucket: 'global[]', checksum: 2, count: 2, priority: 3 }],
110
+ last_op_id: '2',
111
+ write_checkpoint: undefined
112
+ }
113
+ });
114
+ expect(state.getFilteredBucketPositions(line2.bucketsToFetch)).toEqual(new Map([['global[]', '1']]));
115
+ });
116
+
117
+ test('global bucket with initial state', async () => {
118
+ // This tests the client sending an initial state
119
+ // This does not affect the checkpoint, but does affect the data to be fetched
120
+ /// (getFilteredBucketStates)
121
+ const storage = new MockBucketChecksumStateStorage();
122
+ // Set intial state
123
+ storage.updateTestChecksum({ bucket: 'global[]', checksum: 1, count: 1 });
124
+
125
+ const state = new BucketChecksumState({
126
+ syncContext,
127
+ // Client sets the initial state here
128
+ initialBucketPositions: [{ name: 'global[]', after: '1' }],
129
+ syncParams: new RequestParameters({ sub: '' }, {}),
130
+ syncRules: SYNC_RULES_GLOBAL,
131
+ bucketStorage: storage
132
+ });
133
+
134
+ const line = (await state.buildNextCheckpointLine({
135
+ base: { checkpoint: '1', lsn: '1' },
136
+ writeCheckpoint: null,
137
+ update: CHECKPOINT_INVALIDATE_ALL
138
+ }))!;
139
+ expect(line.checkpointLine).toEqual({
140
+ checkpoint: {
141
+ buckets: [{ bucket: 'global[]', checksum: 1, count: 1, priority: 3 }],
142
+ last_op_id: '1',
143
+ write_checkpoint: undefined
144
+ }
145
+ });
146
+ expect(line.bucketsToFetch).toEqual([
147
+ {
148
+ bucket: 'global[]',
149
+ priority: 3
150
+ }
151
+ ]);
152
+ // This is the main difference between this and the previous test
153
+ expect(state.getFilteredBucketPositions(line.bucketsToFetch)).toEqual(new Map([['global[]', '1']]));
154
+ });
155
+
156
+ test('multiple static buckets', async () => {
157
+ const storage = new MockBucketChecksumStateStorage();
158
+ // Set intial state
159
+ storage.updateTestChecksum({ bucket: 'global[1]', checksum: 1, count: 1 });
160
+ storage.updateTestChecksum({ bucket: 'global[2]', checksum: 1, count: 1 });
161
+
162
+ const state = new BucketChecksumState({
163
+ syncContext,
164
+ syncParams: new RequestParameters({ sub: '' }, {}),
165
+ syncRules: SYNC_RULES_GLOBAL_TWO,
166
+ bucketStorage: storage
167
+ });
168
+
169
+ const line = (await state.buildNextCheckpointLine({
170
+ base: { checkpoint: '1', lsn: '1' },
171
+ writeCheckpoint: null,
172
+ update: CHECKPOINT_INVALIDATE_ALL
173
+ }))!;
174
+ expect(line.checkpointLine).toEqual({
175
+ checkpoint: {
176
+ buckets: [
177
+ { bucket: 'global[1]', checksum: 1, count: 1, priority: 3 },
178
+ { bucket: 'global[2]', checksum: 1, count: 1, priority: 3 }
179
+ ],
180
+ last_op_id: '1',
181
+ write_checkpoint: undefined
182
+ }
183
+ });
184
+ expect(line.bucketsToFetch).toEqual([
185
+ {
186
+ bucket: 'global[1]',
187
+ priority: 3
188
+ },
189
+ {
190
+ bucket: 'global[2]',
191
+ priority: 3
192
+ }
193
+ ]);
194
+
195
+ storage.updateTestChecksum({ bucket: 'global[1]', checksum: 2, count: 2 });
196
+ storage.updateTestChecksum({ bucket: 'global[2]', checksum: 2, count: 2 });
197
+
198
+ const line2 = (await state.buildNextCheckpointLine({
199
+ base: { checkpoint: '2', lsn: '2' },
200
+ writeCheckpoint: null,
201
+ update: {
202
+ ...CHECKPOINT_INVALIDATE_ALL,
203
+ updatedDataBuckets: ['global[1]', 'global[2]'],
204
+ invalidateDataBuckets: false
205
+ }
206
+ }))!;
207
+ expect(line2.checkpointLine).toEqual({
208
+ checkpoint_diff: {
209
+ removed_buckets: [],
210
+ updated_buckets: [
211
+ { bucket: 'global[1]', checksum: 2, count: 2, priority: 3 },
212
+ { bucket: 'global[2]', checksum: 2, count: 2, priority: 3 }
213
+ ],
214
+ last_op_id: '2',
215
+ write_checkpoint: undefined
216
+ }
217
+ });
218
+ });
219
+
220
+ test('removing a static bucket', async () => {
221
+ // This tests the client sending an initial state, with a bucket that we don't have.
222
+ // This makes effectively no difference to the output. By not including the bucket
223
+ // in the output, the client will remove the bucket.
224
+ const storage = new MockBucketChecksumStateStorage();
225
+
226
+ const state = new BucketChecksumState({
227
+ syncContext,
228
+ // Client sets the initial state here
229
+ initialBucketPositions: [{ name: 'something_here[]', after: '1' }],
230
+ syncParams: new RequestParameters({ sub: '' }, {}),
231
+ syncRules: SYNC_RULES_GLOBAL,
232
+ bucketStorage: storage
233
+ });
234
+
235
+ storage.updateTestChecksum({ bucket: 'global[]', checksum: 1, count: 1 });
236
+
237
+ const line = (await state.buildNextCheckpointLine({
238
+ base: { checkpoint: '1', lsn: '1' },
239
+ writeCheckpoint: null,
240
+ update: CHECKPOINT_INVALIDATE_ALL
241
+ }))!;
242
+ expect(line.checkpointLine).toEqual({
243
+ checkpoint: {
244
+ buckets: [{ bucket: 'global[]', checksum: 1, count: 1, priority: 3 }],
245
+ last_op_id: '1',
246
+ write_checkpoint: undefined
247
+ }
248
+ });
249
+ expect(line.bucketsToFetch).toEqual([
250
+ {
251
+ bucket: 'global[]',
252
+ priority: 3
253
+ }
254
+ ]);
255
+ expect(state.getFilteredBucketPositions(line.bucketsToFetch)).toEqual(new Map([['global[]', '0']]));
256
+ });
257
+
258
+ test('invalidating individual bucket', async () => {
259
+ // We manually control the filter events here.
260
+
261
+ const storage = new MockBucketChecksumStateStorage();
262
+ // Set initial state
263
+ storage.updateTestChecksum({ bucket: 'global[1]', checksum: 1, count: 1 });
264
+ storage.updateTestChecksum({ bucket: 'global[2]', checksum: 1, count: 1 });
265
+
266
+ const state = new BucketChecksumState({
267
+ syncContext,
268
+ syncParams: new RequestParameters({ sub: '' }, {}),
269
+ syncRules: SYNC_RULES_GLOBAL_TWO,
270
+ bucketStorage: storage
271
+ });
272
+
273
+ // We specifically do not set this here, so that we have manual control over the events.
274
+ // storage.filter = state.checkpointFilter;
275
+
276
+ await state.buildNextCheckpointLine({
277
+ base: { checkpoint: '1', lsn: '1' },
278
+ writeCheckpoint: null,
279
+ update: CHECKPOINT_INVALIDATE_ALL
280
+ });
281
+
282
+ state.updateBucketPosition({ bucket: 'global[1]', nextAfter: '1', hasMore: false });
283
+ state.updateBucketPosition({ bucket: 'global[2]', nextAfter: '1', hasMore: false });
284
+
285
+ storage.updateTestChecksum({ bucket: 'global[1]', checksum: 2, count: 2 });
286
+ storage.updateTestChecksum({ bucket: 'global[2]', checksum: 2, count: 2 });
287
+
288
+ const line2 = (await state.buildNextCheckpointLine({
289
+ base: { checkpoint: '2', lsn: '2' },
290
+ writeCheckpoint: null,
291
+ update: {
292
+ ...CHECKPOINT_INVALIDATE_ALL,
293
+ // Invalidate the state for global[1] - will only re-check the single bucket.
294
+ // This is essentially inconsistent state, but is the simplest way to test that
295
+ // the filter is working.
296
+ updatedDataBuckets: ['global[1]'],
297
+ invalidateDataBuckets: false
298
+ }
299
+ }))!;
300
+ expect(line2.checkpointLine).toEqual({
301
+ checkpoint_diff: {
302
+ removed_buckets: [],
303
+ updated_buckets: [
304
+ // This does not include global[2], since it was not invalidated.
305
+ { bucket: 'global[1]', checksum: 2, count: 2, priority: 3 }
306
+ ],
307
+ last_op_id: '2',
308
+ write_checkpoint: undefined
309
+ }
310
+ });
311
+ expect(line2.bucketsToFetch).toEqual([{ bucket: 'global[1]', priority: 3 }]);
312
+ });
313
+
314
+ test('invalidating all buckets', async () => {
315
+ // We manually control the filter events here.
316
+ const storage = new MockBucketChecksumStateStorage();
317
+
318
+ const state = new BucketChecksumState({
319
+ syncContext,
320
+ syncParams: new RequestParameters({ sub: '' }, {}),
321
+ syncRules: SYNC_RULES_GLOBAL_TWO,
322
+ bucketStorage: storage
323
+ });
324
+
325
+ // We specifically do not set this here, so that we have manual control over the events.
326
+ // storage.filter = state.checkpointFilter;
327
+
328
+ // Set initial state
329
+ storage.updateTestChecksum({ bucket: 'global[1]', checksum: 1, count: 1 });
330
+ storage.updateTestChecksum({ bucket: 'global[2]', checksum: 1, count: 1 });
331
+
332
+ await state.buildNextCheckpointLine({
333
+ base: { checkpoint: '1', lsn: '1' },
334
+ writeCheckpoint: null,
335
+ update: CHECKPOINT_INVALIDATE_ALL
336
+ });
337
+
338
+ storage.updateTestChecksum({ bucket: 'global[1]', checksum: 2, count: 2 });
339
+ storage.updateTestChecksum({ bucket: 'global[2]', checksum: 2, count: 2 });
340
+
341
+ const line2 = (await state.buildNextCheckpointLine({
342
+ base: { checkpoint: '2', lsn: '2' },
343
+ writeCheckpoint: null,
344
+ // Invalidate the state - will re-check all buckets
345
+ update: CHECKPOINT_INVALIDATE_ALL
346
+ }))!;
347
+ expect(line2.checkpointLine).toEqual({
348
+ checkpoint_diff: {
349
+ removed_buckets: [],
350
+ updated_buckets: [
351
+ { bucket: 'global[1]', checksum: 2, count: 2, priority: 3 },
352
+ { bucket: 'global[2]', checksum: 2, count: 2, priority: 3 }
353
+ ],
354
+ last_op_id: '2',
355
+ write_checkpoint: undefined
356
+ }
357
+ });
358
+ expect(line2.bucketsToFetch).toEqual([
359
+ { bucket: 'global[1]', priority: 3 },
360
+ { bucket: 'global[2]', priority: 3 }
361
+ ]);
362
+ });
363
+
364
+ test('interrupt and resume static buckets checkpoint', async () => {
365
+ const storage = new MockBucketChecksumStateStorage();
366
+ // Set intial state
367
+ storage.updateTestChecksum({ bucket: 'global[1]', checksum: 3, count: 3 });
368
+ storage.updateTestChecksum({ bucket: 'global[2]', checksum: 3, count: 3 });
369
+
370
+ const state = new BucketChecksumState({
371
+ syncContext,
372
+ syncParams: new RequestParameters({ sub: '' }, {}),
373
+ syncRules: SYNC_RULES_GLOBAL_TWO,
374
+ bucketStorage: storage
375
+ });
376
+
377
+ const line = (await state.buildNextCheckpointLine({
378
+ base: { checkpoint: '3', lsn: '3' },
379
+ writeCheckpoint: null,
380
+ update: CHECKPOINT_INVALIDATE_ALL
381
+ }))!;
382
+ expect(line.checkpointLine).toEqual({
383
+ checkpoint: {
384
+ buckets: [
385
+ { bucket: 'global[1]', checksum: 3, count: 3, priority: 3 },
386
+ { bucket: 'global[2]', checksum: 3, count: 3, priority: 3 }
387
+ ],
388
+ last_op_id: '3',
389
+ write_checkpoint: undefined
390
+ }
391
+ });
392
+ expect(line.bucketsToFetch).toEqual([
393
+ {
394
+ bucket: 'global[1]',
395
+ priority: 3
396
+ },
397
+ {
398
+ bucket: 'global[2]',
399
+ priority: 3
400
+ }
401
+ ]);
402
+
403
+ // This is the bucket data to be fetched
404
+ expect(state.getFilteredBucketPositions(line.bucketsToFetch)).toEqual(
405
+ new Map([
406
+ ['global[1]', '0'],
407
+ ['global[2]', '0']
408
+ ])
409
+ );
410
+
411
+ // No data changes here.
412
+ // We simulate partial data sent, before a checkpoint is interrupted.
413
+ state.updateBucketPosition({ bucket: 'global[1]', nextAfter: '3', hasMore: false });
414
+ state.updateBucketPosition({ bucket: 'global[2]', nextAfter: '1', hasMore: true });
415
+ storage.updateTestChecksum({ bucket: 'global[1]', checksum: 4, count: 4 });
416
+
417
+ const line2 = (await state.buildNextCheckpointLine({
418
+ base: { checkpoint: '4', lsn: '4' },
419
+ writeCheckpoint: null,
420
+ update: {
421
+ ...CHECKPOINT_INVALIDATE_ALL,
422
+ invalidateDataBuckets: false,
423
+ updatedDataBuckets: ['global[1]']
424
+ }
425
+ }))!;
426
+ expect(line2.checkpointLine).toEqual({
427
+ checkpoint_diff: {
428
+ removed_buckets: [],
429
+ updated_buckets: [
430
+ {
431
+ bucket: 'global[1]',
432
+ checksum: 4,
433
+ count: 4,
434
+ priority: 3
435
+ }
436
+ ],
437
+ last_op_id: '4',
438
+ write_checkpoint: undefined
439
+ }
440
+ });
441
+ // This should contain both buckets, even though only one changed.
442
+ expect(line2.bucketsToFetch).toEqual([
443
+ {
444
+ bucket: 'global[1]',
445
+ priority: 3
446
+ },
447
+ {
448
+ bucket: 'global[2]',
449
+ priority: 3
450
+ }
451
+ ]);
452
+
453
+ expect(state.getFilteredBucketPositions(line2.bucketsToFetch)).toEqual(
454
+ new Map([
455
+ ['global[1]', '3'],
456
+ ['global[2]', '1']
457
+ ])
458
+ );
459
+ });
460
+
461
+ test('dynamic buckets with updates', async () => {
462
+ const storage = new MockBucketChecksumStateStorage();
463
+ // Set intial state
464
+ storage.updateTestChecksum({ bucket: 'by_project[1]', checksum: 1, count: 1 });
465
+ storage.updateTestChecksum({ bucket: 'by_project[2]', checksum: 1, count: 1 });
466
+ storage.updateTestChecksum({ bucket: 'by_project[3]', checksum: 1, count: 1 });
467
+
468
+ const state = new BucketChecksumState({
469
+ syncContext,
470
+ syncParams: new RequestParameters({ sub: 'u1' }, {}),
471
+ syncRules: SYNC_RULES_DYNAMIC,
472
+ bucketStorage: storage
473
+ });
474
+
475
+ storage.getParameterSets = async (checkpoint: OpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> => {
476
+ expect(checkpoint).toEqual('1');
477
+ expect(lookups).toEqual([['by_project', '1', 'u1']]);
478
+ return [{ id: 1 }, { id: 2 }];
479
+ };
480
+
481
+ const line = (await state.buildNextCheckpointLine({
482
+ base: { checkpoint: '1', lsn: '1' },
483
+ writeCheckpoint: null,
484
+ update: CHECKPOINT_INVALIDATE_ALL
485
+ }))!;
486
+ expect(line.checkpointLine).toEqual({
487
+ checkpoint: {
488
+ buckets: [
489
+ { bucket: 'by_project[1]', checksum: 1, count: 1, priority: 3 },
490
+ { bucket: 'by_project[2]', checksum: 1, count: 1, priority: 3 }
491
+ ],
492
+ last_op_id: '1',
493
+ write_checkpoint: undefined
494
+ }
495
+ });
496
+ expect(line.bucketsToFetch).toEqual([
497
+ {
498
+ bucket: 'by_project[1]',
499
+ priority: 3
500
+ },
501
+ {
502
+ bucket: 'by_project[2]',
503
+ priority: 3
504
+ }
505
+ ]);
506
+ // This is the bucket data to be fetched
507
+ expect(state.getFilteredBucketPositions(line.bucketsToFetch)).toEqual(
508
+ new Map([
509
+ ['by_project[1]', '0'],
510
+ ['by_project[2]', '0']
511
+ ])
512
+ );
513
+
514
+ state.updateBucketPosition({ bucket: 'by_project[1]', nextAfter: '1', hasMore: false });
515
+ state.updateBucketPosition({ bucket: 'by_project[2]', nextAfter: '1', hasMore: false });
516
+
517
+ storage.getParameterSets = async (checkpoint: OpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> => {
518
+ expect(checkpoint).toEqual('2');
519
+ expect(lookups).toEqual([['by_project', '1', 'u1']]);
520
+ return [{ id: 1 }, { id: 2 }, { id: 3 }];
521
+ };
522
+
523
+ // Now we get a new line
524
+ const line2 = (await state.buildNextCheckpointLine({
525
+ base: { checkpoint: '2', lsn: '2' },
526
+ writeCheckpoint: null,
527
+ update: {
528
+ invalidateDataBuckets: false,
529
+ updatedDataBuckets: [],
530
+ updatedParameterBucketDefinitions: ['by_project'],
531
+ invalidateParameterBuckets: false
532
+ }
533
+ }))!;
534
+ expect(line2.checkpointLine).toEqual({
535
+ checkpoint_diff: {
536
+ removed_buckets: [],
537
+ updated_buckets: [{ bucket: 'by_project[3]', checksum: 1, count: 1, priority: 3 }],
538
+ last_op_id: '2',
539
+ write_checkpoint: undefined
540
+ }
541
+ });
542
+ expect(state.getFilteredBucketPositions(line2.bucketsToFetch)).toEqual(new Map([['by_project[3]', '0']]));
543
+ });
544
+ });
545
+
546
+ class MockBucketChecksumStateStorage implements BucketChecksumStateStorage {
547
+ private state: ChecksumMap = new Map();
548
+ public filter?: (event: WatchFilterEvent) => boolean;
549
+
550
+ constructor() {}
551
+
552
+ updateTestChecksum(checksum: BucketChecksum): void {
553
+ this.state.set(checksum.bucket, checksum);
554
+ this.filter?.({ changedDataBucket: checksum.bucket });
555
+ }
556
+
557
+ invalidate() {
558
+ this.filter?.({ invalidate: true });
559
+ }
560
+
561
+ async getChecksums(checkpoint: OpId, buckets: string[]): Promise<ChecksumMap> {
562
+ return new Map<string, BucketChecksum>(
563
+ buckets.map((bucket) => {
564
+ const checksum = this.state.get(bucket);
565
+ return [
566
+ bucket,
567
+ {
568
+ bucket: bucket,
569
+ checksum: checksum?.checksum ?? 0,
570
+ count: checksum?.count ?? 0
571
+ }
572
+ ];
573
+ })
574
+ );
575
+ }
576
+
577
+ async getParameterSets(checkpoint: OpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> {
578
+ throw new Error('Method not implemented.');
579
+ }
580
+ }