@powersync/service-core 0.0.0-dev-20240620165410 → 0.0.0-dev-20240708103353
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -5
- package/dist/auth/JwtPayload.d.ts +6 -2
- package/dist/auth/KeyStore.js +1 -7
- package/dist/auth/KeyStore.js.map +1 -1
- package/dist/migrations/migrations.js +1 -1
- package/dist/migrations/migrations.js.map +1 -1
- package/dist/migrations/store/migration-store.d.ts +2 -2
- package/dist/migrations/store/migration-store.js +2 -2
- package/dist/migrations/store/migration-store.js.map +1 -1
- package/dist/routes/endpoints/socket-route.js +5 -5
- package/dist/routes/endpoints/socket-route.js.map +1 -1
- package/dist/routes/endpoints/sync-stream.d.ts +1 -1
- package/dist/routes/endpoints/sync-stream.js +4 -4
- package/dist/routes/endpoints/sync-stream.js.map +1 -1
- package/dist/routes/hooks.js +0 -1
- package/dist/routes/hooks.js.map +1 -1
- package/dist/storage/BucketStorage.d.ts +3 -0
- package/dist/storage/BucketStorage.js.map +1 -1
- package/dist/storage/mongo/MongoBucketBatch.js +7 -2
- package/dist/storage/mongo/MongoBucketBatch.js.map +1 -1
- package/dist/sync/sync.d.ts +2 -2
- package/dist/sync/sync.js +2 -2
- package/dist/sync/sync.js.map +1 -1
- package/package.json +5 -5
- package/src/auth/JwtPayload.ts +6 -2
- package/src/auth/KeyStore.ts +1 -7
- package/src/migrations/migrations.ts +1 -1
- package/src/migrations/store/migration-store.ts +3 -3
- package/src/routes/endpoints/socket-route.ts +6 -9
- package/src/routes/endpoints/sync-stream.ts +4 -7
- package/src/routes/hooks.ts +0 -1
- package/src/storage/BucketStorage.ts +5 -0
- package/src/storage/mongo/MongoBucketBatch.ts +11 -2
- package/src/sync/sync.ts +5 -5
- package/test/src/data_storage.test.ts +181 -19
- package/test/src/slow_tests.test.ts +45 -6
- package/test/src/sync.test.ts +6 -5
- package/tsconfig.tsbuildinfo +1 -1
- package/test/src/sql_functions.test.ts +0 -254
- package/test/src/sql_operators.test.ts +0 -132
- package/test/src/sync_rules.test.ts +0 -1053
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import { Readable } from 'stream';
|
|
2
|
-
import { SyncParameters, normalizeTokenParameters } from '@powersync/service-sync-rules';
|
|
3
1
|
import { errors, logger, router, schema } from '@powersync/lib-services-framework';
|
|
2
|
+
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
3
|
+
import { Readable } from 'stream';
|
|
4
4
|
|
|
5
5
|
import * as sync from '../../sync/sync-index.js';
|
|
6
6
|
import * as util from '../../util/util-index.js';
|
|
7
7
|
|
|
8
|
+
import { Metrics } from '../../metrics/Metrics.js';
|
|
8
9
|
import { authUser } from '../auth.js';
|
|
9
10
|
import { routeDefinition } from '../router.js';
|
|
10
|
-
import { Metrics } from '../../metrics/Metrics.js';
|
|
11
11
|
|
|
12
12
|
export enum SyncRoutes {
|
|
13
13
|
STREAM = '/sync/stream'
|
|
@@ -30,10 +30,7 @@ export const syncStreamed = routeDefinition({
|
|
|
30
30
|
}
|
|
31
31
|
|
|
32
32
|
const params: util.StreamingSyncRequest = payload.params;
|
|
33
|
-
const syncParams
|
|
34
|
-
payload.context.token_payload!.parameters ?? {},
|
|
35
|
-
payload.params.parameters ?? {}
|
|
36
|
-
);
|
|
33
|
+
const syncParams = new RequestParameters(payload.context.token_payload!, payload.params.parameters ?? {});
|
|
37
34
|
|
|
38
35
|
const storage = system.storage;
|
|
39
36
|
// Sanity check before we start the stream
|
package/src/routes/hooks.ts
CHANGED
|
@@ -367,7 +367,12 @@ export interface SaveInsert {
|
|
|
367
367
|
export interface SaveUpdate {
|
|
368
368
|
tag: 'update';
|
|
369
369
|
sourceTable: SourceTable;
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* This is only present when the id has changed, and will only contain replica identity columns.
|
|
373
|
+
*/
|
|
370
374
|
before?: SqliteRow;
|
|
375
|
+
|
|
371
376
|
/**
|
|
372
377
|
* A null value means null column.
|
|
373
378
|
*
|
|
@@ -187,6 +187,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
187
187
|
}
|
|
188
188
|
const currentData = current_data_lookup.get(op.internalBeforeKey) ?? null;
|
|
189
189
|
if (currentData != null) {
|
|
190
|
+
// If it will be used again later, it will be set again using nextData below
|
|
190
191
|
current_data_lookup.delete(op.internalBeforeKey);
|
|
191
192
|
}
|
|
192
193
|
const nextData = this.saveOperation(persistedBatch!, op, currentData, op_seq);
|
|
@@ -242,6 +243,10 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
242
243
|
// Not an error if we re-apply a transaction
|
|
243
244
|
existing_buckets = [];
|
|
244
245
|
existing_lookups = [];
|
|
246
|
+
// Log to help with debugging if there was a consistency issue
|
|
247
|
+
logger.warn(
|
|
248
|
+
`Cannot find previous record for update on ${record.sourceTable.qualifiedName}: ${beforeId} / ${record.before?.id}`
|
|
249
|
+
);
|
|
245
250
|
} else {
|
|
246
251
|
const data = bson.deserialize((result.data as mongo.Binary).buffer, BSON_DESERIALIZE_OPTIONS) as SqliteRow;
|
|
247
252
|
existing_buckets = result.buckets;
|
|
@@ -254,6 +259,10 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
254
259
|
// Not an error if we re-apply a transaction
|
|
255
260
|
existing_buckets = [];
|
|
256
261
|
existing_lookups = [];
|
|
262
|
+
// Log to help with debugging if there was a consistency issue
|
|
263
|
+
logger.warn(
|
|
264
|
+
`Cannot find previous record for delete on ${record.sourceTable.qualifiedName}: ${beforeId} / ${record.before?.id}`
|
|
265
|
+
);
|
|
257
266
|
} else {
|
|
258
267
|
existing_buckets = result.buckets;
|
|
259
268
|
existing_lookups = result.lookups;
|
|
@@ -292,7 +301,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
292
301
|
}
|
|
293
302
|
|
|
294
303
|
// 2. Save bucket data
|
|
295
|
-
if (beforeId != null &&
|
|
304
|
+
if (beforeId != null && (afterId == null || !beforeId.equals(afterId))) {
|
|
296
305
|
// Source ID updated
|
|
297
306
|
if (sourceTable.syncData) {
|
|
298
307
|
// Delete old record
|
|
@@ -422,7 +431,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
|
|
|
422
431
|
};
|
|
423
432
|
}
|
|
424
433
|
|
|
425
|
-
if (
|
|
434
|
+
if (afterId == null || !beforeId.equals(afterId)) {
|
|
426
435
|
// Either a delete (afterId == null), or replaced the old replication id
|
|
427
436
|
batch.deleteCurrentData(before_key);
|
|
428
437
|
}
|
package/src/sync/sync.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { JSONBig, JsonContainer } from '@powersync/service-jsonbig';
|
|
2
|
-
import {
|
|
2
|
+
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
3
3
|
import { Semaphore } from 'async-mutex';
|
|
4
4
|
import { AbortError } from 'ix/aborterror.js';
|
|
5
5
|
|
|
@@ -7,10 +7,10 @@ import * as auth from '../auth/auth-index.js';
|
|
|
7
7
|
import * as storage from '../storage/storage-index.js';
|
|
8
8
|
import * as util from '../util/util-index.js';
|
|
9
9
|
|
|
10
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
11
|
+
import { Metrics } from '../metrics/Metrics.js';
|
|
10
12
|
import { mergeAsyncIterables } from './merge.js';
|
|
11
13
|
import { TokenStreamOptions, tokenStream } from './util.js';
|
|
12
|
-
import { Metrics } from '../metrics/Metrics.js';
|
|
13
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
14
14
|
|
|
15
15
|
/**
|
|
16
16
|
* Maximum number of connections actively fetching data.
|
|
@@ -21,7 +21,7 @@ const syncSemaphore = new Semaphore(MAX_ACTIVE_CONNECTIONS);
|
|
|
21
21
|
export interface SyncStreamParameters {
|
|
22
22
|
storage: storage.BucketStorageFactory;
|
|
23
23
|
params: util.StreamingSyncRequest;
|
|
24
|
-
syncParams:
|
|
24
|
+
syncParams: RequestParameters;
|
|
25
25
|
token: auth.JwtPayload;
|
|
26
26
|
/**
|
|
27
27
|
* If this signal is aborted, the stream response ends as soon as possible, without error.
|
|
@@ -71,7 +71,7 @@ export async function* streamResponse(
|
|
|
71
71
|
async function* streamResponseInner(
|
|
72
72
|
storage: storage.BucketStorageFactory,
|
|
73
73
|
params: util.StreamingSyncRequest,
|
|
74
|
-
syncParams:
|
|
74
|
+
syncParams: RequestParameters,
|
|
75
75
|
signal: AbortSignal
|
|
76
76
|
): AsyncGenerator<util.StreamingSyncLine | string | null> {
|
|
77
77
|
// Bucket state of bucket id -> op_id.
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { SqlSyncRules } from '@powersync/service-sync-rules';
|
|
1
|
+
import { RequestParameters, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
2
2
|
import * as bson from 'bson';
|
|
3
3
|
import { describe, expect, test } from 'vitest';
|
|
4
4
|
import { SourceTable } from '../../src/storage/SourceTable.js';
|
|
@@ -289,12 +289,7 @@ bucket_definitions:
|
|
|
289
289
|
|
|
290
290
|
const checkpoint = result!.flushed_op;
|
|
291
291
|
|
|
292
|
-
const parameters = {
|
|
293
|
-
token_parameters: {
|
|
294
|
-
user_id: 'u1'
|
|
295
|
-
},
|
|
296
|
-
user_parameters: {}
|
|
297
|
-
};
|
|
292
|
+
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
298
293
|
|
|
299
294
|
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
|
|
300
295
|
|
|
@@ -358,12 +353,7 @@ bucket_definitions:
|
|
|
358
353
|
|
|
359
354
|
const checkpoint = result!.flushed_op;
|
|
360
355
|
|
|
361
|
-
const parameters = {
|
|
362
|
-
token_parameters: {
|
|
363
|
-
user_id: 'unknown'
|
|
364
|
-
},
|
|
365
|
-
user_parameters: {}
|
|
366
|
-
};
|
|
356
|
+
const parameters = new RequestParameters({ sub: 'unknown' }, {});
|
|
367
357
|
|
|
368
358
|
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
|
|
369
359
|
|
|
@@ -442,12 +432,7 @@ bucket_definitions:
|
|
|
442
432
|
|
|
443
433
|
const checkpoint = result!.flushed_op;
|
|
444
434
|
|
|
445
|
-
const parameters = {
|
|
446
|
-
token_parameters: {
|
|
447
|
-
user_id: 'u1'
|
|
448
|
-
},
|
|
449
|
-
user_parameters: {}
|
|
450
|
-
};
|
|
435
|
+
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
451
436
|
|
|
452
437
|
// Test intermediate values - could be moved to sync_rules.test.ts
|
|
453
438
|
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
|
|
@@ -897,6 +882,183 @@ bucket_definitions:
|
|
|
897
882
|
]);
|
|
898
883
|
});
|
|
899
884
|
|
|
885
|
+
test('changed data with replica identity full', async () => {
|
|
886
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
887
|
+
bucket_definitions:
|
|
888
|
+
global:
|
|
889
|
+
data:
|
|
890
|
+
- SELECT id, description FROM "test"
|
|
891
|
+
`);
|
|
892
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
893
|
+
|
|
894
|
+
const sourceTable = makeTestTable('test', ['id', 'description']);
|
|
895
|
+
|
|
896
|
+
// Pre-setup
|
|
897
|
+
const result1 = await storage.startBatch({}, async (batch) => {
|
|
898
|
+
await batch.save({
|
|
899
|
+
sourceTable,
|
|
900
|
+
tag: 'insert',
|
|
901
|
+
after: {
|
|
902
|
+
id: 'test1',
|
|
903
|
+
description: 'test1a'
|
|
904
|
+
}
|
|
905
|
+
});
|
|
906
|
+
});
|
|
907
|
+
|
|
908
|
+
const checkpoint1 = result1?.flushed_op ?? '0';
|
|
909
|
+
|
|
910
|
+
const result2 = await storage.startBatch({}, async (batch) => {
|
|
911
|
+
// Unchanged, but has a before id
|
|
912
|
+
await batch.save({
|
|
913
|
+
sourceTable,
|
|
914
|
+
tag: 'update',
|
|
915
|
+
before: {
|
|
916
|
+
id: 'test1',
|
|
917
|
+
description: 'test1a'
|
|
918
|
+
},
|
|
919
|
+
after: {
|
|
920
|
+
id: 'test1',
|
|
921
|
+
description: 'test1b'
|
|
922
|
+
}
|
|
923
|
+
});
|
|
924
|
+
});
|
|
925
|
+
|
|
926
|
+
const result3 = await storage.startBatch({}, async (batch) => {
|
|
927
|
+
// Delete
|
|
928
|
+
await batch.save({
|
|
929
|
+
sourceTable,
|
|
930
|
+
tag: 'delete',
|
|
931
|
+
before: {
|
|
932
|
+
id: 'test1',
|
|
933
|
+
description: 'test1b'
|
|
934
|
+
},
|
|
935
|
+
after: undefined
|
|
936
|
+
});
|
|
937
|
+
});
|
|
938
|
+
|
|
939
|
+
const checkpoint3 = result3!.flushed_op;
|
|
940
|
+
|
|
941
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
|
|
942
|
+
const data = batch[0].data.map((d) => {
|
|
943
|
+
return {
|
|
944
|
+
op: d.op,
|
|
945
|
+
object_id: d.object_id,
|
|
946
|
+
data: d.data,
|
|
947
|
+
subkey: d.subkey
|
|
948
|
+
};
|
|
949
|
+
});
|
|
950
|
+
|
|
951
|
+
// Operations must be in this order
|
|
952
|
+
expect(data).toEqual([
|
|
953
|
+
// 2
|
|
954
|
+
// The REMOVE is expected because the subkey changes
|
|
955
|
+
{
|
|
956
|
+
op: 'REMOVE',
|
|
957
|
+
object_id: 'test1',
|
|
958
|
+
data: null,
|
|
959
|
+
subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
|
|
960
|
+
},
|
|
961
|
+
{
|
|
962
|
+
op: 'PUT',
|
|
963
|
+
object_id: 'test1',
|
|
964
|
+
data: JSON.stringify({ id: 'test1', description: 'test1b' }),
|
|
965
|
+
subkey: '6544e3899293153fa7b38331/500e9b68-a2fd-51ff-9c00-313e2fb9f562'
|
|
966
|
+
},
|
|
967
|
+
// 3
|
|
968
|
+
{
|
|
969
|
+
op: 'REMOVE',
|
|
970
|
+
object_id: 'test1',
|
|
971
|
+
data: null,
|
|
972
|
+
subkey: '6544e3899293153fa7b38331/500e9b68-a2fd-51ff-9c00-313e2fb9f562'
|
|
973
|
+
}
|
|
974
|
+
]);
|
|
975
|
+
});
|
|
976
|
+
|
|
977
|
+
test('unchanged data with replica identity full', async () => {
|
|
978
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
979
|
+
bucket_definitions:
|
|
980
|
+
global:
|
|
981
|
+
data:
|
|
982
|
+
- SELECT id, description FROM "test"
|
|
983
|
+
`);
|
|
984
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
985
|
+
|
|
986
|
+
const sourceTable = makeTestTable('test', ['id', 'description']);
|
|
987
|
+
|
|
988
|
+
// Pre-setup
|
|
989
|
+
const result1 = await storage.startBatch({}, async (batch) => {
|
|
990
|
+
await batch.save({
|
|
991
|
+
sourceTable,
|
|
992
|
+
tag: 'insert',
|
|
993
|
+
after: {
|
|
994
|
+
id: 'test1',
|
|
995
|
+
description: 'test1a'
|
|
996
|
+
}
|
|
997
|
+
});
|
|
998
|
+
});
|
|
999
|
+
|
|
1000
|
+
const checkpoint1 = result1?.flushed_op ?? '0';
|
|
1001
|
+
|
|
1002
|
+
const result2 = await storage.startBatch({}, async (batch) => {
|
|
1003
|
+
// Unchanged, but has a before id
|
|
1004
|
+
await batch.save({
|
|
1005
|
+
sourceTable,
|
|
1006
|
+
tag: 'update',
|
|
1007
|
+
before: {
|
|
1008
|
+
id: 'test1',
|
|
1009
|
+
description: 'test1a'
|
|
1010
|
+
},
|
|
1011
|
+
after: {
|
|
1012
|
+
id: 'test1',
|
|
1013
|
+
description: 'test1a'
|
|
1014
|
+
}
|
|
1015
|
+
});
|
|
1016
|
+
});
|
|
1017
|
+
|
|
1018
|
+
const result3 = await storage.startBatch({}, async (batch) => {
|
|
1019
|
+
// Delete
|
|
1020
|
+
await batch.save({
|
|
1021
|
+
sourceTable,
|
|
1022
|
+
tag: 'delete',
|
|
1023
|
+
before: {
|
|
1024
|
+
id: 'test1',
|
|
1025
|
+
description: 'test1a'
|
|
1026
|
+
},
|
|
1027
|
+
after: undefined
|
|
1028
|
+
});
|
|
1029
|
+
});
|
|
1030
|
+
|
|
1031
|
+
const checkpoint3 = result3!.flushed_op;
|
|
1032
|
+
|
|
1033
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
|
|
1034
|
+
const data = batch[0].data.map((d) => {
|
|
1035
|
+
return {
|
|
1036
|
+
op: d.op,
|
|
1037
|
+
object_id: d.object_id,
|
|
1038
|
+
data: d.data,
|
|
1039
|
+
subkey: d.subkey
|
|
1040
|
+
};
|
|
1041
|
+
});
|
|
1042
|
+
|
|
1043
|
+
// Operations must be in this order
|
|
1044
|
+
expect(data).toEqual([
|
|
1045
|
+
// 2
|
|
1046
|
+
{
|
|
1047
|
+
op: 'PUT',
|
|
1048
|
+
object_id: 'test1',
|
|
1049
|
+
data: JSON.stringify({ id: 'test1', description: 'test1a' }),
|
|
1050
|
+
subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
|
|
1051
|
+
},
|
|
1052
|
+
// 3
|
|
1053
|
+
{
|
|
1054
|
+
op: 'REMOVE',
|
|
1055
|
+
object_id: 'test1',
|
|
1056
|
+
data: null,
|
|
1057
|
+
subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
|
|
1058
|
+
}
|
|
1059
|
+
]);
|
|
1060
|
+
});
|
|
1061
|
+
|
|
900
1062
|
test('large batch', async () => {
|
|
901
1063
|
// Test syncing a batch of data that is small in count,
|
|
902
1064
|
// but large enough in size to be split over multiple returned batches.
|
|
@@ -62,7 +62,7 @@ function defineSlowTests(factory: StorageFactory) {
|
|
|
62
62
|
bucket_definitions:
|
|
63
63
|
global:
|
|
64
64
|
data:
|
|
65
|
-
- SELECT
|
|
65
|
+
- SELECT * FROM "test_data"
|
|
66
66
|
`;
|
|
67
67
|
const syncRules = await f.updateSyncRules({ content: syncRuleContent });
|
|
68
68
|
const storage = f.getInstance(syncRules.parsed());
|
|
@@ -76,7 +76,10 @@ bucket_definitions:
|
|
|
76
76
|
walStream = new WalStream(options);
|
|
77
77
|
|
|
78
78
|
await pool.query(`DROP TABLE IF EXISTS test_data`);
|
|
79
|
-
await pool.query(
|
|
79
|
+
await pool.query(
|
|
80
|
+
`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text, num decimal)`
|
|
81
|
+
);
|
|
82
|
+
await pool.query(`ALTER TABLE test_data REPLICA IDENTITY FULL`);
|
|
80
83
|
|
|
81
84
|
await walStream.initReplication(replicationConnection);
|
|
82
85
|
await storage.autoActivate();
|
|
@@ -88,14 +91,17 @@ bucket_definitions:
|
|
|
88
91
|
|
|
89
92
|
while (!abort && Date.now() - start < TEST_DURATION_MS) {
|
|
90
93
|
const bg = async () => {
|
|
91
|
-
for (let j = 0; j <
|
|
92
|
-
const n =
|
|
94
|
+
for (let j = 0; j < 1 && !abort; j++) {
|
|
95
|
+
const n = 1;
|
|
93
96
|
let statements: pgwire.Statement[] = [];
|
|
94
97
|
for (let i = 0; i < n; i++) {
|
|
95
98
|
const description = `test${i}`;
|
|
96
99
|
statements.push({
|
|
97
|
-
statement: `INSERT INTO test_data(description) VALUES($1) returning id as test_id`,
|
|
98
|
-
params: [
|
|
100
|
+
statement: `INSERT INTO test_data(description, num) VALUES($1, $2) returning id as test_id`,
|
|
101
|
+
params: [
|
|
102
|
+
{ type: 'varchar', value: description },
|
|
103
|
+
{ type: 'float8', value: Math.random() }
|
|
104
|
+
]
|
|
99
105
|
});
|
|
100
106
|
}
|
|
101
107
|
const results = await pool.query(...statements);
|
|
@@ -104,6 +110,24 @@ bucket_definitions:
|
|
|
104
110
|
});
|
|
105
111
|
await new Promise((resolve) => setTimeout(resolve, Math.random() * 30));
|
|
106
112
|
|
|
113
|
+
if (Math.random() > 0.5) {
|
|
114
|
+
const updateStatements: pgwire.Statement[] = ids.map((id) => {
|
|
115
|
+
return {
|
|
116
|
+
statement: `UPDATE test_data SET num = $2 WHERE id = $1`,
|
|
117
|
+
params: [
|
|
118
|
+
{ type: 'uuid', value: id },
|
|
119
|
+
{ type: 'float8', value: Math.random() }
|
|
120
|
+
]
|
|
121
|
+
};
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
await pool.query(...updateStatements);
|
|
125
|
+
if (Math.random() > 0.5) {
|
|
126
|
+
// Special case - an update that doesn't change data
|
|
127
|
+
await pool.query(...updateStatements);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
107
131
|
const deleteStatements: pgwire.Statement[] = ids.map((id) => {
|
|
108
132
|
return {
|
|
109
133
|
statement: `DELETE FROM test_data WHERE id = $1`,
|
|
@@ -129,6 +153,21 @@ bucket_definitions:
|
|
|
129
153
|
return bson.deserialize((doc.data as mongo.Binary).buffer) as SqliteRow;
|
|
130
154
|
});
|
|
131
155
|
expect(transformed).toEqual([]);
|
|
156
|
+
|
|
157
|
+
// Check that each PUT has a REMOVE
|
|
158
|
+
const ops = await f.db.bucket_data.find().sort({ _id: 1 }).toArray();
|
|
159
|
+
let active = new Set<string>();
|
|
160
|
+
for (let op of ops) {
|
|
161
|
+
const key = op.source_key.toHexString();
|
|
162
|
+
if (op.op == 'PUT') {
|
|
163
|
+
active.add(key);
|
|
164
|
+
} else if (op.op == 'REMOVE') {
|
|
165
|
+
active.delete(key);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
if (active.size > 0) {
|
|
169
|
+
throw new Error(`${active.size} rows not removed`);
|
|
170
|
+
}
|
|
132
171
|
}
|
|
133
172
|
|
|
134
173
|
abortController.abort();
|
package/test/src/sync.test.ts
CHANGED
|
@@ -8,6 +8,7 @@ import { JSONBig } from '@powersync/service-jsonbig';
|
|
|
8
8
|
import { streamResponse } from '../../src/sync/sync.js';
|
|
9
9
|
import * as timers from 'timers/promises';
|
|
10
10
|
import { lsnMakeComparable } from '@powersync/service-jpgwire';
|
|
11
|
+
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
11
12
|
|
|
12
13
|
describe('sync - mongodb', function () {
|
|
13
14
|
defineTests(MONGO_STORAGE_FACTORY);
|
|
@@ -77,7 +78,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
77
78
|
include_checksum: true,
|
|
78
79
|
raw_data: true
|
|
79
80
|
},
|
|
80
|
-
syncParams: {
|
|
81
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
81
82
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
82
83
|
});
|
|
83
84
|
|
|
@@ -117,7 +118,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
117
118
|
include_checksum: true,
|
|
118
119
|
raw_data: false
|
|
119
120
|
},
|
|
120
|
-
syncParams: {
|
|
121
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
121
122
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
122
123
|
});
|
|
123
124
|
|
|
@@ -145,7 +146,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
145
146
|
include_checksum: true,
|
|
146
147
|
raw_data: true
|
|
147
148
|
},
|
|
148
|
-
syncParams: {
|
|
149
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
149
150
|
token: { exp: 0 } as any
|
|
150
151
|
});
|
|
151
152
|
|
|
@@ -171,7 +172,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
171
172
|
include_checksum: true,
|
|
172
173
|
raw_data: true
|
|
173
174
|
},
|
|
174
|
-
syncParams: {
|
|
175
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
175
176
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
176
177
|
});
|
|
177
178
|
const iter = stream[Symbol.asyncIterator]();
|
|
@@ -231,7 +232,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
231
232
|
include_checksum: true,
|
|
232
233
|
raw_data: true
|
|
233
234
|
},
|
|
234
|
-
syncParams: {
|
|
235
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
235
236
|
token: { exp: exp } as any
|
|
236
237
|
});
|
|
237
238
|
const iter = stream[Symbol.asyncIterator]();
|