@powersync/service-core 0.0.0-dev-20240620173428 → 0.0.0-dev-20240708103353

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/sync/sync.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { JSONBig, JsonContainer } from '@powersync/service-jsonbig';
2
- import { SyncParameters } from '@powersync/service-sync-rules';
2
+ import { RequestParameters } from '@powersync/service-sync-rules';
3
3
  import { Semaphore } from 'async-mutex';
4
4
  import { AbortError } from 'ix/aborterror.js';
5
5
 
@@ -7,10 +7,10 @@ import * as auth from '../auth/auth-index.js';
7
7
  import * as storage from '../storage/storage-index.js';
8
8
  import * as util from '../util/util-index.js';
9
9
 
10
+ import { logger } from '@powersync/lib-services-framework';
11
+ import { Metrics } from '../metrics/Metrics.js';
10
12
  import { mergeAsyncIterables } from './merge.js';
11
13
  import { TokenStreamOptions, tokenStream } from './util.js';
12
- import { Metrics } from '../metrics/Metrics.js';
13
- import { logger } from '@powersync/lib-services-framework';
14
14
 
15
15
  /**
16
16
  * Maximum number of connections actively fetching data.
@@ -21,7 +21,7 @@ const syncSemaphore = new Semaphore(MAX_ACTIVE_CONNECTIONS);
21
21
  export interface SyncStreamParameters {
22
22
  storage: storage.BucketStorageFactory;
23
23
  params: util.StreamingSyncRequest;
24
- syncParams: SyncParameters;
24
+ syncParams: RequestParameters;
25
25
  token: auth.JwtPayload;
26
26
  /**
27
27
  * If this signal is aborted, the stream response ends as soon as possible, without error.
@@ -71,7 +71,7 @@ export async function* streamResponse(
71
71
  async function* streamResponseInner(
72
72
  storage: storage.BucketStorageFactory,
73
73
  params: util.StreamingSyncRequest,
74
- syncParams: SyncParameters,
74
+ syncParams: RequestParameters,
75
75
  signal: AbortSignal
76
76
  ): AsyncGenerator<util.StreamingSyncLine | string | null> {
77
77
  // Bucket state of bucket id -> op_id.
@@ -1,4 +1,4 @@
1
- import { SqlSyncRules } from '@powersync/service-sync-rules';
1
+ import { RequestParameters, SqlSyncRules } from '@powersync/service-sync-rules';
2
2
  import * as bson from 'bson';
3
3
  import { describe, expect, test } from 'vitest';
4
4
  import { SourceTable } from '../../src/storage/SourceTable.js';
@@ -289,12 +289,7 @@ bucket_definitions:
289
289
 
290
290
  const checkpoint = result!.flushed_op;
291
291
 
292
- const parameters = {
293
- token_parameters: {
294
- user_id: 'u1'
295
- },
296
- user_parameters: {}
297
- };
292
+ const parameters = new RequestParameters({ sub: 'u1' }, {});
298
293
 
299
294
  const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
300
295
 
@@ -358,12 +353,7 @@ bucket_definitions:
358
353
 
359
354
  const checkpoint = result!.flushed_op;
360
355
 
361
- const parameters = {
362
- token_parameters: {
363
- user_id: 'unknown'
364
- },
365
- user_parameters: {}
366
- };
356
+ const parameters = new RequestParameters({ sub: 'unknown' }, {});
367
357
 
368
358
  const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
369
359
 
@@ -442,12 +432,7 @@ bucket_definitions:
442
432
 
443
433
  const checkpoint = result!.flushed_op;
444
434
 
445
- const parameters = {
446
- token_parameters: {
447
- user_id: 'u1'
448
- },
449
- user_parameters: {}
450
- };
435
+ const parameters = new RequestParameters({ sub: 'u1' }, {});
451
436
 
452
437
  // Test intermediate values - could be moved to sync_rules.test.ts
453
438
  const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
@@ -897,6 +882,183 @@ bucket_definitions:
897
882
  ]);
898
883
  });
899
884
 
885
+ test('changed data with replica identity full', async () => {
886
+ const sync_rules = SqlSyncRules.fromYaml(`
887
+ bucket_definitions:
888
+ global:
889
+ data:
890
+ - SELECT id, description FROM "test"
891
+ `);
892
+ const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
893
+
894
+ const sourceTable = makeTestTable('test', ['id', 'description']);
895
+
896
+ // Pre-setup
897
+ const result1 = await storage.startBatch({}, async (batch) => {
898
+ await batch.save({
899
+ sourceTable,
900
+ tag: 'insert',
901
+ after: {
902
+ id: 'test1',
903
+ description: 'test1a'
904
+ }
905
+ });
906
+ });
907
+
908
+ const checkpoint1 = result1?.flushed_op ?? '0';
909
+
910
+ const result2 = await storage.startBatch({}, async (batch) => {
911
+ // Unchanged, but has a before id
912
+ await batch.save({
913
+ sourceTable,
914
+ tag: 'update',
915
+ before: {
916
+ id: 'test1',
917
+ description: 'test1a'
918
+ },
919
+ after: {
920
+ id: 'test1',
921
+ description: 'test1b'
922
+ }
923
+ });
924
+ });
925
+
926
+ const result3 = await storage.startBatch({}, async (batch) => {
927
+ // Delete
928
+ await batch.save({
929
+ sourceTable,
930
+ tag: 'delete',
931
+ before: {
932
+ id: 'test1',
933
+ description: 'test1b'
934
+ },
935
+ after: undefined
936
+ });
937
+ });
938
+
939
+ const checkpoint3 = result3!.flushed_op;
940
+
941
+ const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
942
+ const data = batch[0].data.map((d) => {
943
+ return {
944
+ op: d.op,
945
+ object_id: d.object_id,
946
+ data: d.data,
947
+ subkey: d.subkey
948
+ };
949
+ });
950
+
951
+ // Operations must be in this order
952
+ expect(data).toEqual([
953
+ // 2
954
+ // The REMOVE is expected because the subkey changes
955
+ {
956
+ op: 'REMOVE',
957
+ object_id: 'test1',
958
+ data: null,
959
+ subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
960
+ },
961
+ {
962
+ op: 'PUT',
963
+ object_id: 'test1',
964
+ data: JSON.stringify({ id: 'test1', description: 'test1b' }),
965
+ subkey: '6544e3899293153fa7b38331/500e9b68-a2fd-51ff-9c00-313e2fb9f562'
966
+ },
967
+ // 3
968
+ {
969
+ op: 'REMOVE',
970
+ object_id: 'test1',
971
+ data: null,
972
+ subkey: '6544e3899293153fa7b38331/500e9b68-a2fd-51ff-9c00-313e2fb9f562'
973
+ }
974
+ ]);
975
+ });
976
+
977
+ test('unchanged data with replica identity full', async () => {
978
+ const sync_rules = SqlSyncRules.fromYaml(`
979
+ bucket_definitions:
980
+ global:
981
+ data:
982
+ - SELECT id, description FROM "test"
983
+ `);
984
+ const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
985
+
986
+ const sourceTable = makeTestTable('test', ['id', 'description']);
987
+
988
+ // Pre-setup
989
+ const result1 = await storage.startBatch({}, async (batch) => {
990
+ await batch.save({
991
+ sourceTable,
992
+ tag: 'insert',
993
+ after: {
994
+ id: 'test1',
995
+ description: 'test1a'
996
+ }
997
+ });
998
+ });
999
+
1000
+ const checkpoint1 = result1?.flushed_op ?? '0';
1001
+
1002
+ const result2 = await storage.startBatch({}, async (batch) => {
1003
+ // Unchanged, but has a before id
1004
+ await batch.save({
1005
+ sourceTable,
1006
+ tag: 'update',
1007
+ before: {
1008
+ id: 'test1',
1009
+ description: 'test1a'
1010
+ },
1011
+ after: {
1012
+ id: 'test1',
1013
+ description: 'test1a'
1014
+ }
1015
+ });
1016
+ });
1017
+
1018
+ const result3 = await storage.startBatch({}, async (batch) => {
1019
+ // Delete
1020
+ await batch.save({
1021
+ sourceTable,
1022
+ tag: 'delete',
1023
+ before: {
1024
+ id: 'test1',
1025
+ description: 'test1a'
1026
+ },
1027
+ after: undefined
1028
+ });
1029
+ });
1030
+
1031
+ const checkpoint3 = result3!.flushed_op;
1032
+
1033
+ const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
1034
+ const data = batch[0].data.map((d) => {
1035
+ return {
1036
+ op: d.op,
1037
+ object_id: d.object_id,
1038
+ data: d.data,
1039
+ subkey: d.subkey
1040
+ };
1041
+ });
1042
+
1043
+ // Operations must be in this order
1044
+ expect(data).toEqual([
1045
+ // 2
1046
+ {
1047
+ op: 'PUT',
1048
+ object_id: 'test1',
1049
+ data: JSON.stringify({ id: 'test1', description: 'test1a' }),
1050
+ subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
1051
+ },
1052
+ // 3
1053
+ {
1054
+ op: 'REMOVE',
1055
+ object_id: 'test1',
1056
+ data: null,
1057
+ subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
1058
+ }
1059
+ ]);
1060
+ });
1061
+
900
1062
  test('large batch', async () => {
901
1063
  // Test syncing a batch of data that is small in count,
902
1064
  // but large enough in size to be split over multiple returned batches.
@@ -62,7 +62,7 @@ function defineSlowTests(factory: StorageFactory) {
62
62
  bucket_definitions:
63
63
  global:
64
64
  data:
65
- - SELECT id, description FROM "test_data"
65
+ - SELECT * FROM "test_data"
66
66
  `;
67
67
  const syncRules = await f.updateSyncRules({ content: syncRuleContent });
68
68
  const storage = f.getInstance(syncRules.parsed());
@@ -76,7 +76,10 @@ bucket_definitions:
76
76
  walStream = new WalStream(options);
77
77
 
78
78
  await pool.query(`DROP TABLE IF EXISTS test_data`);
79
- await pool.query(`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`);
79
+ await pool.query(
80
+ `CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text, num decimal)`
81
+ );
82
+ await pool.query(`ALTER TABLE test_data REPLICA IDENTITY FULL`);
80
83
 
81
84
  await walStream.initReplication(replicationConnection);
82
85
  await storage.autoActivate();
@@ -88,14 +91,17 @@ bucket_definitions:
88
91
 
89
92
  while (!abort && Date.now() - start < TEST_DURATION_MS) {
90
93
  const bg = async () => {
91
- for (let j = 0; j < 5 && !abort; j++) {
92
- const n = Math.floor(Math.random() * 50);
94
+ for (let j = 0; j < 1 && !abort; j++) {
95
+ const n = 1;
93
96
  let statements: pgwire.Statement[] = [];
94
97
  for (let i = 0; i < n; i++) {
95
98
  const description = `test${i}`;
96
99
  statements.push({
97
- statement: `INSERT INTO test_data(description) VALUES($1) returning id as test_id`,
98
- params: [{ type: 'varchar', value: description }]
100
+ statement: `INSERT INTO test_data(description, num) VALUES($1, $2) returning id as test_id`,
101
+ params: [
102
+ { type: 'varchar', value: description },
103
+ { type: 'float8', value: Math.random() }
104
+ ]
99
105
  });
100
106
  }
101
107
  const results = await pool.query(...statements);
@@ -104,6 +110,24 @@ bucket_definitions:
104
110
  });
105
111
  await new Promise((resolve) => setTimeout(resolve, Math.random() * 30));
106
112
 
113
+ if (Math.random() > 0.5) {
114
+ const updateStatements: pgwire.Statement[] = ids.map((id) => {
115
+ return {
116
+ statement: `UPDATE test_data SET num = $2 WHERE id = $1`,
117
+ params: [
118
+ { type: 'uuid', value: id },
119
+ { type: 'float8', value: Math.random() }
120
+ ]
121
+ };
122
+ });
123
+
124
+ await pool.query(...updateStatements);
125
+ if (Math.random() > 0.5) {
126
+ // Special case - an update that doesn't change data
127
+ await pool.query(...updateStatements);
128
+ }
129
+ }
130
+
107
131
  const deleteStatements: pgwire.Statement[] = ids.map((id) => {
108
132
  return {
109
133
  statement: `DELETE FROM test_data WHERE id = $1`,
@@ -129,6 +153,21 @@ bucket_definitions:
129
153
  return bson.deserialize((doc.data as mongo.Binary).buffer) as SqliteRow;
130
154
  });
131
155
  expect(transformed).toEqual([]);
156
+
157
+ // Check that each PUT has a REMOVE
158
+ const ops = await f.db.bucket_data.find().sort({ _id: 1 }).toArray();
159
+ let active = new Set<string>();
160
+ for (let op of ops) {
161
+ const key = op.source_key.toHexString();
162
+ if (op.op == 'PUT') {
163
+ active.add(key);
164
+ } else if (op.op == 'REMOVE') {
165
+ active.delete(key);
166
+ }
167
+ }
168
+ if (active.size > 0) {
169
+ throw new Error(`${active.size} rows not removed`);
170
+ }
132
171
  }
133
172
 
134
173
  abortController.abort();
@@ -8,6 +8,7 @@ import { JSONBig } from '@powersync/service-jsonbig';
8
8
  import { streamResponse } from '../../src/sync/sync.js';
9
9
  import * as timers from 'timers/promises';
10
10
  import { lsnMakeComparable } from '@powersync/service-jpgwire';
11
+ import { RequestParameters } from '@powersync/service-sync-rules';
11
12
 
12
13
  describe('sync - mongodb', function () {
13
14
  defineTests(MONGO_STORAGE_FACTORY);
@@ -77,7 +78,7 @@ function defineTests(factory: StorageFactory) {
77
78
  include_checksum: true,
78
79
  raw_data: true
79
80
  },
80
- syncParams: { token_parameters: {}, user_parameters: {} },
81
+ syncParams: new RequestParameters({ sub: '' }, {}),
81
82
  token: { exp: Date.now() / 1000 + 10 } as any
82
83
  });
83
84
 
@@ -117,7 +118,7 @@ function defineTests(factory: StorageFactory) {
117
118
  include_checksum: true,
118
119
  raw_data: false
119
120
  },
120
- syncParams: { token_parameters: {}, user_parameters: {} },
121
+ syncParams: new RequestParameters({ sub: '' }, {}),
121
122
  token: { exp: Date.now() / 1000 + 10 } as any
122
123
  });
123
124
 
@@ -145,7 +146,7 @@ function defineTests(factory: StorageFactory) {
145
146
  include_checksum: true,
146
147
  raw_data: true
147
148
  },
148
- syncParams: { token_parameters: {}, user_parameters: {} },
149
+ syncParams: new RequestParameters({ sub: '' }, {}),
149
150
  token: { exp: 0 } as any
150
151
  });
151
152
 
@@ -171,7 +172,7 @@ function defineTests(factory: StorageFactory) {
171
172
  include_checksum: true,
172
173
  raw_data: true
173
174
  },
174
- syncParams: { token_parameters: {}, user_parameters: {} },
175
+ syncParams: new RequestParameters({ sub: '' }, {}),
175
176
  token: { exp: Date.now() / 1000 + 10 } as any
176
177
  });
177
178
  const iter = stream[Symbol.asyncIterator]();
@@ -231,7 +232,7 @@ function defineTests(factory: StorageFactory) {
231
232
  include_checksum: true,
232
233
  raw_data: true
233
234
  },
234
- syncParams: { token_parameters: {}, user_parameters: {} },
235
+ syncParams: new RequestParameters({ sub: '' }, {}),
235
236
  token: { exp: exp } as any
236
237
  });
237
238
  const iter = stream[Symbol.asyncIterator]();