@powersync/service-core 0.8.4 → 0.8.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ import {
18
18
  SyncRulesBucketStorage,
19
19
  SyncRuleStatus
20
20
  } from '../BucketStorage.js';
21
- import { ChecksumCache, FetchPartialBucketChecksum } from '../ChecksumCache.js';
21
+ import { ChecksumCache, FetchPartialBucketChecksum, PartialChecksum, PartialChecksumMap } from '../ChecksumCache.js';
22
22
  import { MongoBucketStorage } from '../MongoBucketStorage.js';
23
23
  import { SourceTable } from '../SourceTable.js';
24
24
  import { PowerSyncMongo } from './db.js';
@@ -333,7 +333,7 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage {
333
333
  return this.checksumCache.getChecksumMap(checkpoint, buckets);
334
334
  }
335
335
 
336
- private async getChecksumsInternal(batch: FetchPartialBucketChecksum[]): Promise<util.ChecksumMap> {
336
+ private async getChecksumsInternal(batch: FetchPartialBucketChecksum[]): Promise<PartialChecksumMap> {
337
337
  if (batch.length == 0) {
338
338
  return new Map();
339
339
  }
@@ -365,22 +365,32 @@ export class MongoSyncBucketStorage implements SyncRulesBucketStorage {
365
365
  }
366
366
  },
367
367
  {
368
- $group: { _id: '$_id.b', checksum_total: { $sum: '$checksum' }, count: { $sum: 1 } }
368
+ $group: {
369
+ _id: '$_id.b',
370
+ checksum_total: { $sum: '$checksum' },
371
+ count: { $sum: 1 },
372
+ has_clear_op: {
373
+ $max: {
374
+ $cond: [{ $eq: ['$op', 'CLEAR'] }, 1, 0]
375
+ }
376
+ }
377
+ }
369
378
  }
370
379
  ],
371
- { session: undefined }
380
+ { session: undefined, readConcern: 'snapshot' }
372
381
  )
373
382
  .toArray();
374
383
 
375
- return new Map<string, util.BucketChecksum>(
384
+ return new Map<string, PartialChecksum>(
376
385
  aggregate.map((doc) => {
377
386
  return [
378
387
  doc._id,
379
388
  {
380
389
  bucket: doc._id,
381
- count: doc.count,
382
- checksum: Number(BigInt(doc.checksum_total) & 0xffffffffn) & 0xffffffff
383
- } satisfies util.BucketChecksum
390
+ partialCount: doc.count,
391
+ partialChecksum: Number(BigInt(doc.checksum_total) & 0xffffffffn) & 0xffffffff,
392
+ isFullChecksum: doc.has_clear_op == 1
393
+ } satisfies PartialChecksum
384
394
  ];
385
395
  })
386
396
  );
@@ -1,10 +1,10 @@
1
1
  import { SqliteJsonValue } from '@powersync/service-sync-rules';
2
2
  import * as bson from 'bson';
3
- import * as mongo from 'mongodb';
4
3
  import * as crypto from 'crypto';
5
- import { BucketDataDocument } from './models.js';
6
- import { timestampToOpId } from '../../util/utils.js';
4
+ import * as mongo from 'mongodb';
7
5
  import { OplogEntry } from '../../util/protocol-types.js';
6
+ import { timestampToOpId } from '../../util/utils.js';
7
+ import { BucketDataDocument } from './models.js';
8
8
 
9
9
  /**
10
10
  * Lookup serialization must be number-agnostic. I.e. normalize numbers, instead of preserving numbers.
@@ -1,59 +1,6 @@
1
1
  import * as t from 'ts-codec';
2
2
  import { SqliteJsonValue } from '@powersync/service-sync-rules';
3
3
 
4
- /**
5
- * For sync2.json
6
- */
7
- export interface ContinueCheckpointRequest {
8
- /**
9
- * Existing bucket states. Only these buckets are synchronized.
10
- */
11
- buckets: BucketRequest[];
12
-
13
- checkpoint_token: string;
14
-
15
- limit?: number;
16
- }
17
-
18
- export interface SyncNewCheckpointRequest {
19
- /**
20
- * Existing bucket states. Used if include_data is specified.
21
- */
22
- buckets?: BucketRequest[];
23
-
24
- request_checkpoint: {
25
- /**
26
- * Whether or not to include an initial data request.
27
- */
28
- include_data: boolean;
29
-
30
- /**
31
- * Whether or not to compute a checksum.
32
- */
33
- include_checksum: boolean;
34
- };
35
-
36
- limit?: number;
37
- }
38
-
39
- export type SyncRequest = ContinueCheckpointRequest | SyncNewCheckpointRequest;
40
-
41
- export interface SyncResponse {
42
- /**
43
- * Data for the buckets returned. May not have an an entry for each bucket in the request.
44
- */
45
- data?: SyncBucketData[];
46
-
47
- /**
48
- * True if the response limit has been reached, and another request must be made.
49
- */
50
- has_more: boolean;
51
-
52
- checkpoint_token?: string;
53
-
54
- checkpoint?: Checkpoint;
55
- }
56
-
57
4
  export const BucketRequest = t.object({
58
5
  name: t.string,
59
6
 
@@ -195,39 +142,3 @@ export interface BucketChecksum {
195
142
  */
196
143
  count: number;
197
144
  }
198
-
199
- export function isContinueCheckpointRequest(request: SyncRequest): request is ContinueCheckpointRequest {
200
- return (
201
- Array.isArray((request as ContinueCheckpointRequest).buckets) &&
202
- typeof (request as ContinueCheckpointRequest).checkpoint_token == 'string'
203
- );
204
- }
205
-
206
- export function isSyncNewCheckpointRequest(request: SyncRequest): request is SyncNewCheckpointRequest {
207
- return typeof (request as SyncNewCheckpointRequest).request_checkpoint == 'object';
208
- }
209
-
210
- /**
211
- * For crud.json
212
- */
213
- export interface CrudRequest {
214
- data: CrudEntry[];
215
- }
216
-
217
- export interface CrudEntry {
218
- op: 'PUT' | 'PATCH' | 'DELETE';
219
- type: string;
220
- id: string;
221
- data: string;
222
- }
223
-
224
- export interface CrudResponse {
225
- /**
226
- * A sync response with a checkpoint >= this checkpoint would contain all the changes in this request.
227
- *
228
- * Any earlier checkpoint may or may not contain these changes.
229
- *
230
- * May be empty when the request contains no ops.
231
- */
232
- checkpoint?: OpId;
233
- }
package/src/util/utils.ts CHANGED
@@ -1,11 +1,12 @@
1
1
  import crypto from 'crypto';
2
+
3
+ import { logger } from '@powersync/lib-services-framework';
2
4
  import * as pgwire from '@powersync/service-jpgwire';
3
5
  import { pgwireRows } from '@powersync/service-jpgwire';
4
-
6
+ import { PartialChecksum } from '../storage/ChecksumCache.js';
5
7
  import * as storage from '../storage/storage-index.js';
6
- import { BucketChecksum, OpId } from './protocol-types.js';
7
8
  import { retriedQuery } from './pgwire_utils.js';
8
- import { logger } from '@powersync/lib-services-framework';
9
+ import { BucketChecksum, OpId } from './protocol-types.js';
9
10
 
10
11
  export type ChecksumMap = Map<string, BucketChecksum>;
11
12
 
@@ -64,14 +65,20 @@ export function addChecksums(a: number, b: number) {
64
65
  return (a + b) & 0xffffffff;
65
66
  }
66
67
 
67
- export function addBucketChecksums(a: BucketChecksum, b: BucketChecksum | null): BucketChecksum {
68
+ export function addBucketChecksums(a: BucketChecksum, b: PartialChecksum | null): BucketChecksum {
68
69
  if (b == null) {
69
70
  return a;
71
+ } else if (b.isFullChecksum) {
72
+ return {
73
+ bucket: b.bucket,
74
+ count: b.partialCount,
75
+ checksum: b.partialChecksum
76
+ };
70
77
  } else {
71
78
  return {
72
79
  bucket: a.bucket,
73
- count: a.count + b.count,
74
- checksum: addChecksums(a.checksum, b.checksum)
80
+ count: a.count + b.partialCount,
81
+ checksum: addChecksums(a.checksum, b.partialChecksum)
75
82
  };
76
83
  }
77
84
  }
@@ -1,8 +1,8 @@
1
- import { describe, expect, it } from 'vitest';
2
- import { BucketChecksum, OpId } from '@/util/protocol-types.js';
1
+ import { ChecksumCache, FetchChecksums, FetchPartialBucketChecksum, PartialChecksum } from '@/storage/ChecksumCache.js';
2
+ import { OpId } from '@/util/protocol-types.js';
3
+ import { addChecksums } from '@/util/util-index.js';
3
4
  import * as crypto from 'node:crypto';
4
- import { addBucketChecksums } from '@/util/util-index.js';
5
- import { ChecksumCache, FetchChecksums, FetchPartialBucketChecksum } from '@/storage/ChecksumCache.js';
5
+ import { describe, expect, it } from 'vitest';
6
6
 
7
7
  /**
8
8
  * Create a deterministic BucketChecksum based on the bucket name and checkpoint for testing purposes.
@@ -13,28 +13,22 @@ function testHash(bucket: string, checkpoint: OpId) {
13
13
  return hash;
14
14
  }
15
15
 
16
- function testPartialHash(request: FetchPartialBucketChecksum): BucketChecksum {
16
+ function testPartialHash(request: FetchPartialBucketChecksum): PartialChecksum {
17
17
  if (request.start) {
18
18
  const a = testHash(request.bucket, request.start);
19
19
  const b = testHash(request.bucket, request.end);
20
- return addBucketChecksums(
21
- {
22
- bucket: request.bucket,
23
- checksum: b,
24
- count: Number(request.end)
25
- },
26
- {
27
- // Subtract a
28
- bucket: request.bucket,
29
- checksum: -a,
30
- count: -Number(request.start)
31
- }
32
- );
20
+ return {
21
+ bucket: request.bucket,
22
+ partialCount: Number(request.end) - Number(request.start),
23
+ partialChecksum: addChecksums(b, -a),
24
+ isFullChecksum: false
25
+ };
33
26
  } else {
34
27
  return {
35
28
  bucket: request.bucket,
36
- checksum: testHash(request.bucket, request.end),
37
- count: Number(request.end)
29
+ partialChecksum: testHash(request.bucket, request.end),
30
+ partialCount: Number(request.end),
31
+ isFullChecksum: true
38
32
  };
39
33
  }
40
34
  }
@@ -433,4 +427,17 @@ describe('checksum cache', function () {
433
427
  [{ bucket: 'test2', end: '123' }]
434
428
  ]);
435
429
  });
430
+
431
+ it('should handle CLEAR/isFullChecksum checksums', async function () {
432
+ let lookups: FetchPartialBucketChecksum[][] = [];
433
+ const cache = factory(async (batch) => {
434
+ lookups.push(batch);
435
+ // This forces a `isFullChecksum: true` result
436
+ delete batch[0].start;
437
+ return fetchTestChecksums(batch);
438
+ });
439
+
440
+ expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
441
+ expect(await cache.getChecksums('1234', ['test'])).toEqual([TEST_1234]);
442
+ });
436
443
  });
@@ -56,6 +56,7 @@ bucket_definitions:
56
56
 
57
57
  const batchBefore = await oneFromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
58
58
  const dataBefore = batchBefore.batch.data;
59
+ const checksumBefore = await storage.getChecksums(checkpoint, ['global[]']);
59
60
 
60
61
  expect(dataBefore).toMatchObject([
61
62
  {
@@ -82,6 +83,7 @@ bucket_definitions:
82
83
 
83
84
  const batchAfter = await oneFromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
84
85
  const dataAfter = batchAfter.batch.data;
86
+ const checksumAfter = await storage.getChecksums(checkpoint, ['global[]']);
85
87
 
86
88
  expect(batchAfter.targetOp).toEqual(3n);
87
89
  expect(dataAfter).toMatchObject([
@@ -104,6 +106,8 @@ bucket_definitions:
104
106
  }
105
107
  ]);
106
108
 
109
+ expect(checksumBefore.get('global[]')).toEqual(checksumAfter.get('global[]'));
110
+
107
111
  validateCompactedBucket(dataBefore, dataAfter);
108
112
  });
109
113
 
@@ -154,6 +158,7 @@ bucket_definitions:
154
158
 
155
159
  const batchBefore = await oneFromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
156
160
  const dataBefore = batchBefore.batch.data;
161
+ const checksumBefore = await storage.getChecksums(checkpoint, ['global[]']);
157
162
 
158
163
  expect(dataBefore).toMatchObject([
159
164
  {
@@ -186,6 +191,7 @@ bucket_definitions:
186
191
 
187
192
  const batchAfter = await oneFromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
188
193
  const dataAfter = batchAfter.batch.data;
194
+ const checksumAfter = await storage.getChecksums(checkpoint, ['global[]']);
189
195
 
190
196
  expect(batchAfter.targetOp).toEqual(4n);
191
197
  expect(dataAfter).toMatchObject([
@@ -201,7 +207,79 @@ bucket_definitions:
201
207
  op_id: '4'
202
208
  }
203
209
  ]);
210
+ expect(checksumBefore.get('global[]')).toEqual(checksumAfter.get('global[]'));
204
211
 
205
212
  validateCompactedBucket(dataBefore, dataAfter);
206
213
  });
214
+
215
+ test('compacting (3)', async () => {
216
+ const sync_rules = SqlSyncRules.fromYaml(`
217
+ bucket_definitions:
218
+ global:
219
+ data: [select * from test]
220
+ `);
221
+
222
+ const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
223
+
224
+ const result = await storage.startBatch({}, async (batch) => {
225
+ await batch.save({
226
+ sourceTable: TEST_TABLE,
227
+ tag: 'insert',
228
+ after: {
229
+ id: 't1'
230
+ }
231
+ });
232
+
233
+ await batch.save({
234
+ sourceTable: TEST_TABLE,
235
+ tag: 'insert',
236
+ after: {
237
+ id: 't2'
238
+ }
239
+ });
240
+
241
+ await batch.save({
242
+ sourceTable: TEST_TABLE,
243
+ tag: 'delete',
244
+ before: {
245
+ id: 't1'
246
+ }
247
+ });
248
+ });
249
+
250
+ const checkpoint1 = result!.flushed_op;
251
+ const checksumBefore = await storage.getChecksums(checkpoint1, ['global[]']);
252
+ console.log('before', checksumBefore);
253
+
254
+ const result2 = await storage.startBatch({}, async (batch) => {
255
+ await batch.save({
256
+ sourceTable: TEST_TABLE,
257
+ tag: 'delete',
258
+ before: {
259
+ id: 't2'
260
+ }
261
+ });
262
+ });
263
+ const checkpoint2 = result2!.flushed_op;
264
+
265
+ await storage.compact(compactOptions);
266
+
267
+ const batchAfter = await oneFromAsync(storage.getBucketDataBatch(checkpoint2, new Map([['global[]', '0']])));
268
+ const dataAfter = batchAfter.batch.data;
269
+ const checksumAfter = await storage.getChecksums(checkpoint2, ['global[]']);
270
+
271
+ expect(batchAfter.targetOp).toEqual(4n);
272
+ expect(dataAfter).toMatchObject([
273
+ {
274
+ checksum: 857217610,
275
+ op: 'CLEAR',
276
+ op_id: '4'
277
+ }
278
+ ]);
279
+ expect(checksumAfter.get('global[]')).toEqual({
280
+ bucket: 'global[]',
281
+ count: 1,
282
+ checksum: 857217610
283
+ });
284
+ });
207
285
  }