@powersync/service-core 0.8.4 → 0.8.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/CHANGELOG.md +19 -0
  2. package/dist/db/mongo.d.ts +6 -0
  3. package/dist/db/mongo.js +6 -0
  4. package/dist/db/mongo.js.map +1 -1
  5. package/dist/replication/WalStream.js +8 -4
  6. package/dist/replication/WalStream.js.map +1 -1
  7. package/dist/storage/BucketStorage.d.ts +0 -1
  8. package/dist/storage/BucketStorage.js.map +1 -1
  9. package/dist/storage/ChecksumCache.d.ts +19 -1
  10. package/dist/storage/ChecksumCache.js +8 -1
  11. package/dist/storage/ChecksumCache.js.map +1 -1
  12. package/dist/storage/MongoBucketStorage.js +19 -9
  13. package/dist/storage/MongoBucketStorage.js.map +1 -1
  14. package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +1 -1
  15. package/dist/storage/mongo/MongoSyncBucketStorage.js +40 -21
  16. package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
  17. package/dist/storage/mongo/db.d.ts +9 -0
  18. package/dist/storage/mongo/db.js +11 -0
  19. package/dist/storage/mongo/db.js.map +1 -1
  20. package/dist/storage/mongo/util.d.ts +1 -1
  21. package/dist/storage/mongo/util.js.map +1 -1
  22. package/dist/util/protocol-types.d.ts +0 -65
  23. package/dist/util/protocol-types.js +0 -7
  24. package/dist/util/protocol-types.js.map +1 -1
  25. package/dist/util/utils.d.ts +2 -1
  26. package/dist/util/utils.js +10 -3
  27. package/dist/util/utils.js.map +1 -1
  28. package/package.json +4 -4
  29. package/src/db/mongo.ts +7 -0
  30. package/src/replication/WalStream.ts +11 -4
  31. package/src/storage/BucketStorage.ts +0 -2
  32. package/src/storage/ChecksumCache.ts +32 -2
  33. package/src/storage/MongoBucketStorage.ts +18 -9
  34. package/src/storage/mongo/MongoSyncBucketStorage.ts +44 -29
  35. package/src/storage/mongo/db.ts +12 -0
  36. package/src/storage/mongo/util.ts +3 -3
  37. package/src/util/protocol-types.ts +0 -89
  38. package/src/util/utils.ts +13 -6
  39. package/test/src/checksum_cache.test.ts +27 -20
  40. package/test/src/compacting.test.ts +78 -0
  41. package/test/src/data_storage.test.ts +22 -0
  42. package/test/src/sync.test.ts +0 -7
  43. package/test/src/util.ts +14 -3
  44. package/tsconfig.tsbuildinfo +1 -1
@@ -1,59 +1,6 @@
1
1
  import * as t from 'ts-codec';
2
2
  import { SqliteJsonValue } from '@powersync/service-sync-rules';
3
3
 
4
- /**
5
- * For sync2.json
6
- */
7
- export interface ContinueCheckpointRequest {
8
- /**
9
- * Existing bucket states. Only these buckets are synchronized.
10
- */
11
- buckets: BucketRequest[];
12
-
13
- checkpoint_token: string;
14
-
15
- limit?: number;
16
- }
17
-
18
- export interface SyncNewCheckpointRequest {
19
- /**
20
- * Existing bucket states. Used if include_data is specified.
21
- */
22
- buckets?: BucketRequest[];
23
-
24
- request_checkpoint: {
25
- /**
26
- * Whether or not to include an initial data request.
27
- */
28
- include_data: boolean;
29
-
30
- /**
31
- * Whether or not to compute a checksum.
32
- */
33
- include_checksum: boolean;
34
- };
35
-
36
- limit?: number;
37
- }
38
-
39
- export type SyncRequest = ContinueCheckpointRequest | SyncNewCheckpointRequest;
40
-
41
- export interface SyncResponse {
42
- /**
43
- * Data for the buckets returned. May not have an an entry for each bucket in the request.
44
- */
45
- data?: SyncBucketData[];
46
-
47
- /**
48
- * True if the response limit has been reached, and another request must be made.
49
- */
50
- has_more: boolean;
51
-
52
- checkpoint_token?: string;
53
-
54
- checkpoint?: Checkpoint;
55
- }
56
-
57
4
  export const BucketRequest = t.object({
58
5
  name: t.string,
59
6
 
@@ -195,39 +142,3 @@ export interface BucketChecksum {
195
142
  */
196
143
  count: number;
197
144
  }
198
-
199
- export function isContinueCheckpointRequest(request: SyncRequest): request is ContinueCheckpointRequest {
200
- return (
201
- Array.isArray((request as ContinueCheckpointRequest).buckets) &&
202
- typeof (request as ContinueCheckpointRequest).checkpoint_token == 'string'
203
- );
204
- }
205
-
206
- export function isSyncNewCheckpointRequest(request: SyncRequest): request is SyncNewCheckpointRequest {
207
- return typeof (request as SyncNewCheckpointRequest).request_checkpoint == 'object';
208
- }
209
-
210
- /**
211
- * For crud.json
212
- */
213
- export interface CrudRequest {
214
- data: CrudEntry[];
215
- }
216
-
217
- export interface CrudEntry {
218
- op: 'PUT' | 'PATCH' | 'DELETE';
219
- type: string;
220
- id: string;
221
- data: string;
222
- }
223
-
224
- export interface CrudResponse {
225
- /**
226
- * A sync response with a checkpoint >= this checkpoint would contain all the changes in this request.
227
- *
228
- * Any earlier checkpoint may or may not contain these changes.
229
- *
230
- * May be empty when the request contains no ops.
231
- */
232
- checkpoint?: OpId;
233
- }
package/src/util/utils.ts CHANGED
@@ -1,11 +1,12 @@
1
1
  import crypto from 'crypto';
2
+
3
+ import { logger } from '@powersync/lib-services-framework';
2
4
  import * as pgwire from '@powersync/service-jpgwire';
3
5
  import { pgwireRows } from '@powersync/service-jpgwire';
4
-
6
+ import { PartialChecksum } from '../storage/ChecksumCache.js';
5
7
  import * as storage from '../storage/storage-index.js';
6
- import { BucketChecksum, OpId } from './protocol-types.js';
7
8
  import { retriedQuery } from './pgwire_utils.js';
8
- import { logger } from '@powersync/lib-services-framework';
9
+ import { BucketChecksum, OpId } from './protocol-types.js';
9
10
 
10
11
  export type ChecksumMap = Map<string, BucketChecksum>;
11
12
 
@@ -64,14 +65,20 @@ export function addChecksums(a: number, b: number) {
64
65
  return (a + b) & 0xffffffff;
65
66
  }
66
67
 
67
- export function addBucketChecksums(a: BucketChecksum, b: BucketChecksum | null): BucketChecksum {
68
+ export function addBucketChecksums(a: BucketChecksum, b: PartialChecksum | null): BucketChecksum {
68
69
  if (b == null) {
69
70
  return a;
71
+ } else if (b.isFullChecksum) {
72
+ return {
73
+ bucket: b.bucket,
74
+ count: b.partialCount,
75
+ checksum: b.partialChecksum
76
+ };
70
77
  } else {
71
78
  return {
72
79
  bucket: a.bucket,
73
- count: a.count + b.count,
74
- checksum: addChecksums(a.checksum, b.checksum)
80
+ count: a.count + b.partialCount,
81
+ checksum: addChecksums(a.checksum, b.partialChecksum)
75
82
  };
76
83
  }
77
84
  }
@@ -1,8 +1,8 @@
1
- import { describe, expect, it } from 'vitest';
2
- import { BucketChecksum, OpId } from '@/util/protocol-types.js';
1
+ import { ChecksumCache, FetchChecksums, FetchPartialBucketChecksum, PartialChecksum } from '@/storage/ChecksumCache.js';
2
+ import { OpId } from '@/util/protocol-types.js';
3
+ import { addChecksums } from '@/util/util-index.js';
3
4
  import * as crypto from 'node:crypto';
4
- import { addBucketChecksums } from '@/util/util-index.js';
5
- import { ChecksumCache, FetchChecksums, FetchPartialBucketChecksum } from '@/storage/ChecksumCache.js';
5
+ import { describe, expect, it } from 'vitest';
6
6
 
7
7
  /**
8
8
  * Create a deterministic BucketChecksum based on the bucket name and checkpoint for testing purposes.
@@ -13,28 +13,22 @@ function testHash(bucket: string, checkpoint: OpId) {
13
13
  return hash;
14
14
  }
15
15
 
16
- function testPartialHash(request: FetchPartialBucketChecksum): BucketChecksum {
16
+ function testPartialHash(request: FetchPartialBucketChecksum): PartialChecksum {
17
17
  if (request.start) {
18
18
  const a = testHash(request.bucket, request.start);
19
19
  const b = testHash(request.bucket, request.end);
20
- return addBucketChecksums(
21
- {
22
- bucket: request.bucket,
23
- checksum: b,
24
- count: Number(request.end)
25
- },
26
- {
27
- // Subtract a
28
- bucket: request.bucket,
29
- checksum: -a,
30
- count: -Number(request.start)
31
- }
32
- );
20
+ return {
21
+ bucket: request.bucket,
22
+ partialCount: Number(request.end) - Number(request.start),
23
+ partialChecksum: addChecksums(b, -a),
24
+ isFullChecksum: false
25
+ };
33
26
  } else {
34
27
  return {
35
28
  bucket: request.bucket,
36
- checksum: testHash(request.bucket, request.end),
37
- count: Number(request.end)
29
+ partialChecksum: testHash(request.bucket, request.end),
30
+ partialCount: Number(request.end),
31
+ isFullChecksum: true
38
32
  };
39
33
  }
40
34
  }
@@ -433,4 +427,17 @@ describe('checksum cache', function () {
433
427
  [{ bucket: 'test2', end: '123' }]
434
428
  ]);
435
429
  });
430
+
431
+ it('should handle CLEAR/isFullChecksum checksums', async function () {
432
+ let lookups: FetchPartialBucketChecksum[][] = [];
433
+ const cache = factory(async (batch) => {
434
+ lookups.push(batch);
435
+ // This forces a `isFullChecksum: true` result
436
+ delete batch[0].start;
437
+ return fetchTestChecksums(batch);
438
+ });
439
+
440
+ expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
441
+ expect(await cache.getChecksums('1234', ['test'])).toEqual([TEST_1234]);
442
+ });
436
443
  });
@@ -56,6 +56,7 @@ bucket_definitions:
56
56
 
57
57
  const batchBefore = await oneFromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
58
58
  const dataBefore = batchBefore.batch.data;
59
+ const checksumBefore = await storage.getChecksums(checkpoint, ['global[]']);
59
60
 
60
61
  expect(dataBefore).toMatchObject([
61
62
  {
@@ -82,6 +83,7 @@ bucket_definitions:
82
83
 
83
84
  const batchAfter = await oneFromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
84
85
  const dataAfter = batchAfter.batch.data;
86
+ const checksumAfter = await storage.getChecksums(checkpoint, ['global[]']);
85
87
 
86
88
  expect(batchAfter.targetOp).toEqual(3n);
87
89
  expect(dataAfter).toMatchObject([
@@ -104,6 +106,8 @@ bucket_definitions:
104
106
  }
105
107
  ]);
106
108
 
109
+ expect(checksumBefore.get('global[]')).toEqual(checksumAfter.get('global[]'));
110
+
107
111
  validateCompactedBucket(dataBefore, dataAfter);
108
112
  });
109
113
 
@@ -154,6 +158,7 @@ bucket_definitions:
154
158
 
155
159
  const batchBefore = await oneFromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
156
160
  const dataBefore = batchBefore.batch.data;
161
+ const checksumBefore = await storage.getChecksums(checkpoint, ['global[]']);
157
162
 
158
163
  expect(dataBefore).toMatchObject([
159
164
  {
@@ -186,6 +191,7 @@ bucket_definitions:
186
191
 
187
192
  const batchAfter = await oneFromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
188
193
  const dataAfter = batchAfter.batch.data;
194
+ const checksumAfter = await storage.getChecksums(checkpoint, ['global[]']);
189
195
 
190
196
  expect(batchAfter.targetOp).toEqual(4n);
191
197
  expect(dataAfter).toMatchObject([
@@ -201,7 +207,79 @@ bucket_definitions:
201
207
  op_id: '4'
202
208
  }
203
209
  ]);
210
+ expect(checksumBefore.get('global[]')).toEqual(checksumAfter.get('global[]'));
204
211
 
205
212
  validateCompactedBucket(dataBefore, dataAfter);
206
213
  });
214
+
215
+ test('compacting (3)', async () => {
216
+ const sync_rules = SqlSyncRules.fromYaml(`
217
+ bucket_definitions:
218
+ global:
219
+ data: [select * from test]
220
+ `);
221
+
222
+ const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
223
+
224
+ const result = await storage.startBatch({}, async (batch) => {
225
+ await batch.save({
226
+ sourceTable: TEST_TABLE,
227
+ tag: 'insert',
228
+ after: {
229
+ id: 't1'
230
+ }
231
+ });
232
+
233
+ await batch.save({
234
+ sourceTable: TEST_TABLE,
235
+ tag: 'insert',
236
+ after: {
237
+ id: 't2'
238
+ }
239
+ });
240
+
241
+ await batch.save({
242
+ sourceTable: TEST_TABLE,
243
+ tag: 'delete',
244
+ before: {
245
+ id: 't1'
246
+ }
247
+ });
248
+ });
249
+
250
+ const checkpoint1 = result!.flushed_op;
251
+ const checksumBefore = await storage.getChecksums(checkpoint1, ['global[]']);
252
+ console.log('before', checksumBefore);
253
+
254
+ const result2 = await storage.startBatch({}, async (batch) => {
255
+ await batch.save({
256
+ sourceTable: TEST_TABLE,
257
+ tag: 'delete',
258
+ before: {
259
+ id: 't2'
260
+ }
261
+ });
262
+ });
263
+ const checkpoint2 = result2!.flushed_op;
264
+
265
+ await storage.compact(compactOptions);
266
+
267
+ const batchAfter = await oneFromAsync(storage.getBucketDataBatch(checkpoint2, new Map([['global[]', '0']])));
268
+ const dataAfter = batchAfter.batch.data;
269
+ const checksumAfter = await storage.getChecksums(checkpoint2, ['global[]']);
270
+
271
+ expect(batchAfter.targetOp).toEqual(4n);
272
+ expect(dataAfter).toMatchObject([
273
+ {
274
+ checksum: 857217610,
275
+ op: 'CLEAR',
276
+ op_id: '4'
277
+ }
278
+ ]);
279
+ expect(checksumAfter.get('global[]')).toEqual({
280
+ bucket: 'global[]',
281
+ count: 1,
282
+ checksum: 857217610
283
+ });
284
+ });
207
285
  }
@@ -1294,4 +1294,26 @@ bucket_definitions:
1294
1294
 
1295
1295
  expect(getBatchMeta(batch3)).toEqual(null);
1296
1296
  });
1297
+
1298
+ test('empty storage metrics', async () => {
1299
+ const f = await factory({ dropAll: true });
1300
+
1301
+ const metrics = await f.getStorageMetrics();
1302
+ expect(metrics).toEqual({
1303
+ operations_size_bytes: 0,
1304
+ parameters_size_bytes: 0,
1305
+ replication_size_bytes: 0
1306
+ });
1307
+
1308
+ const r = await f.configureSyncRules('bucket_definitions: {}');
1309
+ const storage = f.getInstance(r.persisted_sync_rules!.parsed());
1310
+ await storage.autoActivate();
1311
+
1312
+ const metrics2 = await f.getStorageMetrics();
1313
+ expect(metrics2).toEqual({
1314
+ operations_size_bytes: 0,
1315
+ parameters_size_bytes: 0,
1316
+ replication_size_bytes: 0
1317
+ });
1318
+ });
1297
1319
  }
@@ -5,7 +5,6 @@ import { JSONBig } from '@powersync/service-jsonbig';
5
5
  import { RequestParameters } from '@powersync/service-sync-rules';
6
6
  import * as timers from 'timers/promises';
7
7
  import { describe, expect, test } from 'vitest';
8
- import { ZERO_LSN } from '../../src/replication/WalStream.js';
9
8
  import { streamResponse } from '../../src/sync/sync.js';
10
9
  import { makeTestTable, MONGO_STORAGE_FACTORY, StorageFactory } from './util.js';
11
10
 
@@ -33,7 +32,6 @@ function defineTests(factory: StorageFactory) {
33
32
  });
34
33
 
35
34
  const storage = await f.getInstance(syncRules.parsed());
36
- await storage.setSnapshotDone(ZERO_LSN);
37
35
  await storage.autoActivate();
38
36
 
39
37
  const result = await storage.startBatch({}, async (batch) => {
@@ -82,7 +80,6 @@ function defineTests(factory: StorageFactory) {
82
80
  });
83
81
 
84
82
  const storage = await f.getInstance(syncRules.parsed());
85
- await storage.setSnapshotDone(ZERO_LSN);
86
83
  await storage.autoActivate();
87
84
 
88
85
  const result = await storage.startBatch({}, async (batch) => {
@@ -125,7 +122,6 @@ function defineTests(factory: StorageFactory) {
125
122
  });
126
123
 
127
124
  const storage = await f.getInstance(syncRules.parsed());
128
- await storage.setSnapshotDone(ZERO_LSN);
129
125
  await storage.autoActivate();
130
126
 
131
127
  const stream = streamResponse({
@@ -152,7 +148,6 @@ function defineTests(factory: StorageFactory) {
152
148
  });
153
149
 
154
150
  const storage = await f.getInstance(syncRules.parsed());
155
- await storage.setSnapshotDone(ZERO_LSN);
156
151
  await storage.autoActivate();
157
152
 
158
153
  const stream = streamResponse({
@@ -211,7 +206,6 @@ function defineTests(factory: StorageFactory) {
211
206
  });
212
207
 
213
208
  const storage = await f.getInstance(syncRules.parsed());
214
- await storage.setSnapshotDone(ZERO_LSN);
215
209
  await storage.autoActivate();
216
210
 
217
211
  const exp = Date.now() / 1000 + 0.1;
@@ -249,7 +243,6 @@ function defineTests(factory: StorageFactory) {
249
243
  });
250
244
 
251
245
  const storage = await f.getInstance(syncRules.parsed());
252
- await storage.setSnapshotDone(ZERO_LSN);
253
246
  await storage.autoActivate();
254
247
 
255
248
  await storage.startBatch({}, async (batch) => {
package/test/src/util.ts CHANGED
@@ -22,11 +22,22 @@ Metrics.getInstance().resetCounters();
22
22
 
23
23
  export const TEST_URI = env.PG_TEST_URL;
24
24
 
25
- export type StorageFactory = () => Promise<BucketStorageFactory>;
25
+ export interface StorageOptions {
26
+ /**
27
+ * By default, collections are only cleared/
28
+ * Setting this to true will drop the collections completely.
29
+ */
30
+ dropAll?: boolean;
31
+ }
32
+ export type StorageFactory = (options?: StorageOptions) => Promise<BucketStorageFactory>;
26
33
 
27
- export const MONGO_STORAGE_FACTORY: StorageFactory = async () => {
34
+ export const MONGO_STORAGE_FACTORY: StorageFactory = async (options?: StorageOptions) => {
28
35
  const db = await connectMongo();
29
- await db.clear();
36
+ if (options?.dropAll) {
37
+ await db.drop();
38
+ } else {
39
+ await db.clear();
40
+ }
30
41
  return new MongoBucketStorage(db, { slot_name_prefix: 'test_' });
31
42
  };
32
43