@powersync/service-core 0.0.0-dev-20240718134716 → 0.0.0-dev-20240725112650
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -6
- package/dist/entry/cli-entry.js +2 -1
- package/dist/entry/cli-entry.js.map +1 -1
- package/dist/entry/commands/compact-action.d.ts +2 -0
- package/dist/entry/commands/compact-action.js +48 -0
- package/dist/entry/commands/compact-action.js.map +1 -0
- package/dist/entry/entry-index.d.ts +1 -0
- package/dist/entry/entry-index.js +1 -0
- package/dist/entry/entry-index.js.map +1 -1
- package/dist/metrics/Metrics.d.ts +4 -3
- package/dist/metrics/Metrics.js +51 -0
- package/dist/metrics/Metrics.js.map +1 -1
- package/dist/replication/WalStream.js +6 -8
- package/dist/replication/WalStream.js.map +1 -1
- package/dist/routes/configure-fastify.d.ts +883 -0
- package/dist/routes/configure-fastify.js +58 -0
- package/dist/routes/configure-fastify.js.map +1 -0
- package/dist/routes/configure-rsocket.d.ts +13 -0
- package/dist/routes/configure-rsocket.js +46 -0
- package/dist/routes/configure-rsocket.js.map +1 -0
- package/dist/routes/endpoints/socket-route.js +6 -14
- package/dist/routes/endpoints/socket-route.js.map +1 -1
- package/dist/routes/endpoints/sync-stream.js +4 -5
- package/dist/routes/endpoints/sync-stream.js.map +1 -1
- package/dist/routes/route-register.d.ts +1 -1
- package/dist/routes/route-register.js +1 -1
- package/dist/routes/route-register.js.map +1 -1
- package/dist/routes/router-socket.d.ts +4 -4
- package/dist/routes/router-socket.js.map +1 -1
- package/dist/routes/router.d.ts +1 -0
- package/dist/routes/router.js.map +1 -1
- package/dist/routes/routes-index.d.ts +2 -0
- package/dist/routes/routes-index.js +2 -0
- package/dist/routes/routes-index.js.map +1 -1
- package/dist/storage/BucketStorage.d.ts +31 -1
- package/dist/storage/BucketStorage.js.map +1 -1
- package/dist/storage/mongo/MongoCompactor.d.ts +40 -0
- package/dist/storage/mongo/MongoCompactor.js +292 -0
- package/dist/storage/mongo/MongoCompactor.js.map +1 -0
- package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +3 -2
- package/dist/storage/mongo/MongoSyncBucketStorage.js +19 -13
- package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/mongo/models.d.ts +5 -4
- package/dist/storage/mongo/models.js.map +1 -1
- package/dist/storage/mongo/util.d.ts +3 -0
- package/dist/storage/mongo/util.js +22 -0
- package/dist/storage/mongo/util.js.map +1 -1
- package/dist/sync/RequestTracker.js +2 -3
- package/dist/sync/RequestTracker.js.map +1 -1
- package/dist/sync/sync-index.d.ts +1 -0
- package/dist/sync/sync-index.js +1 -0
- package/dist/sync/sync-index.js.map +1 -1
- package/dist/sync/sync.js +20 -7
- package/dist/sync/sync.js.map +1 -1
- package/dist/sync/util.js.map +1 -1
- package/dist/util/config/collectors/config-collector.d.ts +12 -0
- package/dist/util/config/collectors/config-collector.js +43 -0
- package/dist/util/config/collectors/config-collector.js.map +1 -1
- package/dist/util/config/compound-config-collector.d.ts +3 -29
- package/dist/util/config/compound-config-collector.js +22 -69
- package/dist/util/config/compound-config-collector.js.map +1 -1
- package/package.json +6 -4
- package/src/entry/cli-entry.ts +2 -1
- package/src/entry/commands/compact-action.ts +54 -0
- package/src/entry/entry-index.ts +1 -0
- package/src/metrics/Metrics.ts +67 -2
- package/src/replication/WalStream.ts +6 -10
- package/src/routes/configure-fastify.ts +102 -0
- package/src/routes/configure-rsocket.ts +59 -0
- package/src/routes/endpoints/socket-route.ts +6 -15
- package/src/routes/endpoints/sync-stream.ts +4 -5
- package/src/routes/route-register.ts +2 -2
- package/src/routes/router-socket.ts +5 -5
- package/src/routes/router.ts +2 -0
- package/src/routes/routes-index.ts +2 -0
- package/src/storage/BucketStorage.ts +36 -1
- package/src/storage/mongo/MongoCompactor.ts +371 -0
- package/src/storage/mongo/MongoSyncBucketStorage.ts +25 -14
- package/src/storage/mongo/models.ts +5 -4
- package/src/storage/mongo/util.ts +25 -0
- package/src/sync/RequestTracker.ts +3 -3
- package/src/sync/sync-index.ts +1 -0
- package/src/sync/sync.ts +21 -7
- package/src/sync/util.ts +1 -0
- package/src/util/config/collectors/config-collector.ts +48 -0
- package/src/util/config/compound-config-collector.ts +23 -87
- package/test/src/__snapshots__/sync.test.ts.snap +85 -0
- package/test/src/bucket_validation.test.ts +142 -0
- package/test/src/bucket_validation.ts +116 -0
- package/test/src/compacting.test.ts +207 -0
- package/test/src/data_storage.test.ts +19 -60
- package/test/src/slow_tests.test.ts +144 -102
- package/test/src/sync.test.ts +169 -29
- package/test/src/util.ts +71 -13
- package/test/src/wal_stream.test.ts +21 -16
- package/test/src/wal_stream_utils.ts +13 -4
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import * as t from 'ts-codec';
|
|
2
1
|
import { configFile, normalizeConnection } from '@powersync/service-types';
|
|
3
2
|
import { ConfigCollector } from './collectors/config-collector.js';
|
|
4
3
|
import { ResolvedConnection, ResolvedPowerSyncConfig, RunnerConfig, SyncRulesConfig } from './types.js';
|
|
@@ -10,7 +9,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co
|
|
|
10
9
|
import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js';
|
|
11
10
|
import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js';
|
|
12
11
|
import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js';
|
|
13
|
-
import { logger
|
|
12
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
14
13
|
|
|
15
14
|
const POWERSYNC_DEV_KID = 'powersync-dev';
|
|
16
15
|
|
|
@@ -29,12 +28,6 @@ export type CompoundConfigCollectorOptions = {
|
|
|
29
28
|
syncRulesCollectors: SyncRulesCollector[];
|
|
30
29
|
};
|
|
31
30
|
|
|
32
|
-
export type ConfigCollectorGenerics = {
|
|
33
|
-
SERIALIZED: configFile.SerializedPowerSyncConfig;
|
|
34
|
-
DESERIALIZED: configFile.PowerSyncConfig;
|
|
35
|
-
RESOLVED: ResolvedPowerSyncConfig;
|
|
36
|
-
};
|
|
37
|
-
|
|
38
31
|
const DEFAULT_COLLECTOR_OPTIONS: CompoundConfigCollectorOptions = {
|
|
39
32
|
configCollectors: [new Base64ConfigCollector(), new FileSystemConfigCollector(), new FallbackConfigCollector()],
|
|
40
33
|
syncRulesCollectors: [
|
|
@@ -44,56 +37,15 @@ const DEFAULT_COLLECTOR_OPTIONS: CompoundConfigCollectorOptions = {
|
|
|
44
37
|
]
|
|
45
38
|
};
|
|
46
39
|
|
|
47
|
-
export class CompoundConfigCollector
|
|
40
|
+
export class CompoundConfigCollector {
|
|
48
41
|
constructor(protected options: CompoundConfigCollectorOptions = DEFAULT_COLLECTOR_OPTIONS) {}
|
|
49
42
|
|
|
50
|
-
/**
|
|
51
|
-
* The default ts-codec for validations and decoding
|
|
52
|
-
*/
|
|
53
|
-
get codec(): t.AnyCodec {
|
|
54
|
-
return configFile.powerSyncConfig;
|
|
55
|
-
}
|
|
56
|
-
|
|
57
43
|
/**
|
|
58
44
|
* Collects and resolves base config
|
|
59
45
|
*/
|
|
60
|
-
async collectConfig(
|
|
61
|
-
const baseConfig = await this.collectBaseConfig(
|
|
62
|
-
const baseResolvedConfig = await this.resolveBaseConfig(baseConfig, runnerConfig);
|
|
63
|
-
return this.resolveConfig(baseConfig, baseResolvedConfig, runnerConfig);
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
/**
|
|
67
|
-
* Collects the base PowerSyncConfig from various registered collectors.
|
|
68
|
-
* @throws if no collector could return a configuration.
|
|
69
|
-
*/
|
|
70
|
-
protected async collectBaseConfig(runner_config: RunnerConfig): Promise<Generics['DESERIALIZED']> {
|
|
71
|
-
for (const collector of this.options.configCollectors) {
|
|
72
|
-
try {
|
|
73
|
-
const baseConfig = await collector.collectSerialized(runner_config);
|
|
74
|
-
if (baseConfig) {
|
|
75
|
-
const decoded = this.decode(baseConfig);
|
|
76
|
-
this.validate(decoded);
|
|
77
|
-
return decoded;
|
|
78
|
-
}
|
|
79
|
-
logger.debug(
|
|
80
|
-
`Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.`
|
|
81
|
-
);
|
|
82
|
-
} catch (ex) {
|
|
83
|
-
// An error in a collector is a hard stop
|
|
84
|
-
throw new Error(`Could not collect config using ${collector.name} method. Caught exception: ${ex}`);
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
throw new Error('PowerSyncConfig could not be collected using any of the registered config collectors.');
|
|
88
|
-
}
|
|
46
|
+
async collectConfig(runner_config: RunnerConfig = {}): Promise<ResolvedPowerSyncConfig> {
|
|
47
|
+
const baseConfig = await this.collectBaseConfig(runner_config);
|
|
89
48
|
|
|
90
|
-
/**
|
|
91
|
-
* Performs the resolving of the common (shared) base configuration
|
|
92
|
-
*/
|
|
93
|
-
protected async resolveBaseConfig(
|
|
94
|
-
baseConfig: Generics['DESERIALIZED'],
|
|
95
|
-
runnerConfig: RunnerConfig = {}
|
|
96
|
-
): Promise<ResolvedPowerSyncConfig> {
|
|
97
49
|
const connections = baseConfig.replication?.connections ?? [];
|
|
98
50
|
if (connections.length > 1) {
|
|
99
51
|
throw new Error('Only a single replication connection is supported currently');
|
|
@@ -141,7 +93,7 @@ export class CompoundConfigCollector<Generics extends ConfigCollectorGenerics =
|
|
|
141
93
|
devKey = await auth.KeySpec.importKey(baseDevKey);
|
|
142
94
|
}
|
|
143
95
|
|
|
144
|
-
const sync_rules = await this.collectSyncRules(baseConfig,
|
|
96
|
+
const sync_rules = await this.collectSyncRules(baseConfig, runner_config);
|
|
145
97
|
|
|
146
98
|
let jwt_audiences: string[] = baseConfig.client_auth?.audience ?? [];
|
|
147
99
|
|
|
@@ -178,17 +130,25 @@ export class CompoundConfigCollector<Generics extends ConfigCollectorGenerics =
|
|
|
178
130
|
}
|
|
179
131
|
|
|
180
132
|
/**
|
|
181
|
-
*
|
|
182
|
-
*
|
|
183
|
-
*
|
|
133
|
+
* Collects the base PowerSyncConfig from various registered collectors.
|
|
134
|
+
* @throws if no collector could return a configuration.
|
|
184
135
|
*/
|
|
185
|
-
protected async
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
136
|
+
protected async collectBaseConfig(runner_config: RunnerConfig): Promise<configFile.PowerSyncConfig> {
|
|
137
|
+
for (const collector of this.options.configCollectors) {
|
|
138
|
+
try {
|
|
139
|
+
const baseConfig = await collector.collect(runner_config);
|
|
140
|
+
if (baseConfig) {
|
|
141
|
+
return baseConfig;
|
|
142
|
+
}
|
|
143
|
+
logger.debug(
|
|
144
|
+
`Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.`
|
|
145
|
+
);
|
|
146
|
+
} catch (ex) {
|
|
147
|
+
// An error in a collector is a hard stop
|
|
148
|
+
throw new Error(`Could not collect config using ${collector.name} method. Caught exception: ${ex}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
throw new Error('PowerSyncConfig could not be collected using any of the registered config collectors.');
|
|
192
152
|
}
|
|
193
153
|
|
|
194
154
|
protected async collectSyncRules(
|
|
@@ -213,28 +173,4 @@ export class CompoundConfigCollector<Generics extends ConfigCollectorGenerics =
|
|
|
213
173
|
present: false
|
|
214
174
|
};
|
|
215
175
|
}
|
|
216
|
-
|
|
217
|
-
/**
|
|
218
|
-
* Validates input config
|
|
219
|
-
* ts-codec itself doesn't give great validation errors, so we use json schema for that
|
|
220
|
-
*/
|
|
221
|
-
protected validate(config: Generics['DESERIALIZED']) {
|
|
222
|
-
// ts-codec itself doesn't give great validation errors, so we use json schema for that
|
|
223
|
-
const validator = schema
|
|
224
|
-
.parseJSONSchema(t.generateJSONSchema(this.codec, { allowAdditional: true, parsers: [configFile.portParser] }))
|
|
225
|
-
.validator();
|
|
226
|
-
|
|
227
|
-
const valid = validator.validate(config);
|
|
228
|
-
if (!valid.valid) {
|
|
229
|
-
throw new Error(`Failed to validate PowerSync config: ${valid.errors.join(', ')}`);
|
|
230
|
-
}
|
|
231
|
-
}
|
|
232
|
-
|
|
233
|
-
protected decode(encoded: Generics['SERIALIZED']): Generics['DESERIALIZED'] {
|
|
234
|
-
try {
|
|
235
|
-
return this.codec.decode(encoded);
|
|
236
|
-
} catch (ex) {
|
|
237
|
-
throw new Error(`Failed to decode PowerSync config: ${ex}`);
|
|
238
|
-
}
|
|
239
|
-
}
|
|
240
176
|
}
|
|
@@ -1,5 +1,90 @@
|
|
|
1
1
|
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
|
2
2
|
|
|
3
|
+
exports[`sync - mongodb > compacting data - invalidate checkpoint 1`] = `
|
|
4
|
+
[
|
|
5
|
+
{
|
|
6
|
+
"checkpoint": {
|
|
7
|
+
"buckets": [
|
|
8
|
+
{
|
|
9
|
+
"bucket": "mybucket[]",
|
|
10
|
+
"checksum": -93886621,
|
|
11
|
+
"count": 2,
|
|
12
|
+
},
|
|
13
|
+
],
|
|
14
|
+
"last_op_id": "2",
|
|
15
|
+
"write_checkpoint": undefined,
|
|
16
|
+
},
|
|
17
|
+
},
|
|
18
|
+
]
|
|
19
|
+
`;
|
|
20
|
+
|
|
21
|
+
exports[`sync - mongodb > compacting data - invalidate checkpoint 2`] = `
|
|
22
|
+
[
|
|
23
|
+
{
|
|
24
|
+
"data": {
|
|
25
|
+
"after": "0",
|
|
26
|
+
"bucket": "mybucket[]",
|
|
27
|
+
"data": [
|
|
28
|
+
{
|
|
29
|
+
"checksum": -93886621n,
|
|
30
|
+
"op": "CLEAR",
|
|
31
|
+
"op_id": "2",
|
|
32
|
+
},
|
|
33
|
+
],
|
|
34
|
+
"has_more": false,
|
|
35
|
+
"next_after": "2",
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
"checkpoint_diff": {
|
|
40
|
+
"last_op_id": "4",
|
|
41
|
+
"removed_buckets": [],
|
|
42
|
+
"updated_buckets": [
|
|
43
|
+
{
|
|
44
|
+
"bucket": "mybucket[]",
|
|
45
|
+
"checksum": 499012468,
|
|
46
|
+
"count": 4,
|
|
47
|
+
},
|
|
48
|
+
],
|
|
49
|
+
"write_checkpoint": undefined,
|
|
50
|
+
},
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
"data": {
|
|
54
|
+
"after": "2",
|
|
55
|
+
"bucket": "mybucket[]",
|
|
56
|
+
"data": [
|
|
57
|
+
{
|
|
58
|
+
"checksum": 1859363232n,
|
|
59
|
+
"data": "{\\"id\\":\\"t1\\",\\"description\\":\\"Test 1b\\"}",
|
|
60
|
+
"object_id": "t1",
|
|
61
|
+
"object_type": "test",
|
|
62
|
+
"op": "PUT",
|
|
63
|
+
"op_id": "3",
|
|
64
|
+
"subkey": "6544e3899293153fa7b38331/117ab485-4b42-58a2-ab32-0053a22c3423",
|
|
65
|
+
},
|
|
66
|
+
{
|
|
67
|
+
"checksum": 3028503153n,
|
|
68
|
+
"data": "{\\"id\\":\\"t2\\",\\"description\\":\\"Test 2b\\"}",
|
|
69
|
+
"object_id": "t2",
|
|
70
|
+
"object_type": "test",
|
|
71
|
+
"op": "PUT",
|
|
72
|
+
"op_id": "4",
|
|
73
|
+
"subkey": "6544e3899293153fa7b38331/ec27c691-b47a-5d92-927a-9944feb89eee",
|
|
74
|
+
},
|
|
75
|
+
],
|
|
76
|
+
"has_more": false,
|
|
77
|
+
"next_after": "4",
|
|
78
|
+
},
|
|
79
|
+
},
|
|
80
|
+
{
|
|
81
|
+
"checkpoint_complete": {
|
|
82
|
+
"last_op_id": "4",
|
|
83
|
+
},
|
|
84
|
+
},
|
|
85
|
+
]
|
|
86
|
+
`;
|
|
87
|
+
|
|
3
88
|
exports[`sync - mongodb > expired token 1`] = `
|
|
4
89
|
[
|
|
5
90
|
{
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import { OplogEntry } from '@/util/protocol-types.js';
|
|
2
|
+
import { describe, expect, test } from 'vitest';
|
|
3
|
+
import { reduceBucket, validateBucket } from './bucket_validation.js';
|
|
4
|
+
|
|
5
|
+
// This tests the reduceBucket function.
|
|
6
|
+
// While this function is not used directly in the service implementation,
|
|
7
|
+
// it is an important part of validating consistency in other tests.
|
|
8
|
+
describe('bucket validation', () => {
|
|
9
|
+
const ops1: OplogEntry[] = [
|
|
10
|
+
{
|
|
11
|
+
op_id: '1',
|
|
12
|
+
op: 'PUT',
|
|
13
|
+
object_type: 'test',
|
|
14
|
+
object_id: 't1',
|
|
15
|
+
checksum: 2634521662,
|
|
16
|
+
subkey: '6544e3899293153fa7b38331/117ab485-4b42-58a2-ab32-0053a22c3423',
|
|
17
|
+
data: '{"id":"t1"}'
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
op_id: '2',
|
|
21
|
+
op: 'PUT',
|
|
22
|
+
object_type: 'test',
|
|
23
|
+
object_id: 't2',
|
|
24
|
+
checksum: 4243212114,
|
|
25
|
+
subkey: '6544e3899293153fa7b38331/ec27c691-b47a-5d92-927a-9944feb89eee',
|
|
26
|
+
data: '{"id":"t2"}'
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
op_id: '3',
|
|
30
|
+
op: 'REMOVE',
|
|
31
|
+
object_type: 'test',
|
|
32
|
+
object_id: 't1',
|
|
33
|
+
checksum: 4228978084,
|
|
34
|
+
subkey: '6544e3899293153fa7b38331/117ab485-4b42-58a2-ab32-0053a22c3423',
|
|
35
|
+
data: null
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
op_id: '4',
|
|
39
|
+
op: 'PUT',
|
|
40
|
+
object_type: 'test',
|
|
41
|
+
object_id: 't2',
|
|
42
|
+
checksum: 4243212114,
|
|
43
|
+
subkey: '6544e3899293153fa7b38331/ec27c691-b47a-5d92-927a-9944feb89eee',
|
|
44
|
+
data: '{"id":"t2"}'
|
|
45
|
+
}
|
|
46
|
+
];
|
|
47
|
+
|
|
48
|
+
test('reduce 1', () => {
|
|
49
|
+
expect(reduceBucket(ops1)).toEqual([
|
|
50
|
+
{
|
|
51
|
+
checksum: -1778190028,
|
|
52
|
+
op: 'CLEAR',
|
|
53
|
+
op_id: '0'
|
|
54
|
+
},
|
|
55
|
+
{
|
|
56
|
+
checksum: 4243212114,
|
|
57
|
+
data: '{"id":"t2"}',
|
|
58
|
+
object_id: 't2',
|
|
59
|
+
object_type: 'test',
|
|
60
|
+
op: 'PUT',
|
|
61
|
+
op_id: '4',
|
|
62
|
+
subkey: '6544e3899293153fa7b38331/ec27c691-b47a-5d92-927a-9944feb89eee'
|
|
63
|
+
}
|
|
64
|
+
]);
|
|
65
|
+
|
|
66
|
+
expect(reduceBucket(reduceBucket(ops1))).toEqual([
|
|
67
|
+
{
|
|
68
|
+
checksum: -1778190028,
|
|
69
|
+
op: 'CLEAR',
|
|
70
|
+
op_id: '0'
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
checksum: 4243212114,
|
|
74
|
+
data: '{"id":"t2"}',
|
|
75
|
+
object_id: 't2',
|
|
76
|
+
object_type: 'test',
|
|
77
|
+
op: 'PUT',
|
|
78
|
+
op_id: '4',
|
|
79
|
+
subkey: '6544e3899293153fa7b38331/ec27c691-b47a-5d92-927a-9944feb89eee'
|
|
80
|
+
}
|
|
81
|
+
]);
|
|
82
|
+
|
|
83
|
+
validateBucket(ops1);
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
test('reduce 2', () => {
|
|
87
|
+
const bucket: OplogEntry[] = [
|
|
88
|
+
...ops1,
|
|
89
|
+
|
|
90
|
+
{
|
|
91
|
+
checksum: 93784613,
|
|
92
|
+
op: 'CLEAR',
|
|
93
|
+
op_id: '5'
|
|
94
|
+
},
|
|
95
|
+
{
|
|
96
|
+
checksum: 5133378,
|
|
97
|
+
data: '{"id":"t3"}',
|
|
98
|
+
object_id: 't3',
|
|
99
|
+
object_type: 'test',
|
|
100
|
+
op: 'PUT',
|
|
101
|
+
op_id: '11',
|
|
102
|
+
subkey: '6544e3899293153fa7b38333/ec27c691-b47a-5d92-927a-9944feb89eee'
|
|
103
|
+
}
|
|
104
|
+
];
|
|
105
|
+
|
|
106
|
+
expect(reduceBucket(bucket)).toEqual([
|
|
107
|
+
{
|
|
108
|
+
checksum: 93784613,
|
|
109
|
+
op: 'CLEAR',
|
|
110
|
+
op_id: '0'
|
|
111
|
+
},
|
|
112
|
+
{
|
|
113
|
+
checksum: 5133378,
|
|
114
|
+
data: '{"id":"t3"}',
|
|
115
|
+
object_id: 't3',
|
|
116
|
+
object_type: 'test',
|
|
117
|
+
op: 'PUT',
|
|
118
|
+
op_id: '11',
|
|
119
|
+
subkey: '6544e3899293153fa7b38333/ec27c691-b47a-5d92-927a-9944feb89eee'
|
|
120
|
+
}
|
|
121
|
+
]);
|
|
122
|
+
|
|
123
|
+
expect(reduceBucket(reduceBucket(bucket))).toEqual([
|
|
124
|
+
{
|
|
125
|
+
checksum: 93784613,
|
|
126
|
+
op: 'CLEAR',
|
|
127
|
+
op_id: '0'
|
|
128
|
+
},
|
|
129
|
+
{
|
|
130
|
+
checksum: 5133378,
|
|
131
|
+
data: '{"id":"t3"}',
|
|
132
|
+
object_id: 't3',
|
|
133
|
+
object_type: 'test',
|
|
134
|
+
op: 'PUT',
|
|
135
|
+
op_id: '11',
|
|
136
|
+
subkey: '6544e3899293153fa7b38333/ec27c691-b47a-5d92-927a-9944feb89eee'
|
|
137
|
+
}
|
|
138
|
+
]);
|
|
139
|
+
|
|
140
|
+
validateBucket(bucket);
|
|
141
|
+
});
|
|
142
|
+
});
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { OplogEntry } from '@/util/protocol-types.js';
|
|
2
|
+
import { addChecksums } from '@/util/utils.js';
|
|
3
|
+
import { expect } from 'vitest';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Reduce a bucket to the final state as stored on the client.
|
|
7
|
+
*
|
|
8
|
+
* This keeps the final state for each row as a PUT operation.
|
|
9
|
+
*
|
|
10
|
+
* All other operations are replaced with a single CLEAR operation,
|
|
11
|
+
* summing their checksums, and using a 0 as an op_id.
|
|
12
|
+
*
|
|
13
|
+
* This is the function $r(B)$, as described in /docs/bucket-properties.md.
|
|
14
|
+
*/
|
|
15
|
+
export function reduceBucket(operations: OplogEntry[]) {
|
|
16
|
+
let rowState = new Map<string, OplogEntry>();
|
|
17
|
+
let otherChecksum = 0;
|
|
18
|
+
|
|
19
|
+
for (let op of operations) {
|
|
20
|
+
const key = rowKey(op);
|
|
21
|
+
if (op.op == 'PUT') {
|
|
22
|
+
const existing = rowState.get(key);
|
|
23
|
+
if (existing) {
|
|
24
|
+
otherChecksum = addChecksums(otherChecksum, existing.checksum as number);
|
|
25
|
+
}
|
|
26
|
+
rowState.set(key, op);
|
|
27
|
+
} else if (op.op == 'REMOVE') {
|
|
28
|
+
const existing = rowState.get(key);
|
|
29
|
+
if (existing) {
|
|
30
|
+
otherChecksum = addChecksums(otherChecksum, existing.checksum as number);
|
|
31
|
+
}
|
|
32
|
+
rowState.delete(key);
|
|
33
|
+
otherChecksum = addChecksums(otherChecksum, op.checksum as number);
|
|
34
|
+
} else if (op.op == 'CLEAR') {
|
|
35
|
+
rowState.clear();
|
|
36
|
+
otherChecksum = op.checksum as number;
|
|
37
|
+
} else if (op.op == 'MOVE') {
|
|
38
|
+
otherChecksum = addChecksums(otherChecksum, op.checksum as number);
|
|
39
|
+
} else {
|
|
40
|
+
throw new Error(`Unknown operation ${op.op}`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const puts = [...rowState.values()].sort((a, b) => {
|
|
45
|
+
return Number(BigInt(a.op_id) - BigInt(b.op_id));
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
let finalState: OplogEntry[] = [
|
|
49
|
+
// Special operation to indiciate the checksum remainder
|
|
50
|
+
{ op_id: '0', op: 'CLEAR', checksum: otherChecksum },
|
|
51
|
+
...puts
|
|
52
|
+
];
|
|
53
|
+
|
|
54
|
+
return finalState;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function rowKey(entry: OplogEntry) {
|
|
58
|
+
return `${entry.object_type}/${entry.object_id}/${entry.subkey}`;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Validate this property, as described in /docs/bucket-properties.md:
|
|
63
|
+
*
|
|
64
|
+
* $r(B_{[..id_n]}) = r(r(B_{[..id_i]}) \cup B_{[id_{i+1}..id_n]}) \;\forall\; i \in [1..n]$
|
|
65
|
+
*
|
|
66
|
+
* We test that a client syncing the entire bucket in one go (left side of the equation),
|
|
67
|
+
* ends up with the same result as another client syncing up to operation id_i, then sync
|
|
68
|
+
* the rest.
|
|
69
|
+
*/
|
|
70
|
+
export function validateBucket(bucket: OplogEntry[]) {
|
|
71
|
+
const r1 = reduceBucket(bucket);
|
|
72
|
+
for (let i = 0; i <= bucket.length; i++) {
|
|
73
|
+
const r2 = reduceBucket(bucket.slice(0, i + 1));
|
|
74
|
+
const b3 = bucket.slice(i + 1);
|
|
75
|
+
const r3 = r2.concat(b3);
|
|
76
|
+
const r4 = reduceBucket(r3);
|
|
77
|
+
expect(r4).toEqual(r1);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// This is the same check, just implemented differently
|
|
81
|
+
validateCompactedBucket(bucket, bucket);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Validate these properties for a bucket $B$ and its compacted version $B'$,:
|
|
86
|
+
* as described in /docs/bucket-properties.md:
|
|
87
|
+
*
|
|
88
|
+
* 1. $r(B) = r(B')$
|
|
89
|
+
* 2. $r(B_{[..c]}) = r(r(B_{[..c_i]}) \cup B'_{[c_i+1..c]}) \;\forall\; c_i \in B$
|
|
90
|
+
*
|
|
91
|
+
* The first one is that the result of syncing the original bucket is the same as
|
|
92
|
+
* syncing the compacted bucket.
|
|
93
|
+
*
|
|
94
|
+
* The second property is that result of syncing the entire original bucket, is the same
|
|
95
|
+
* as syncing any partial version of that (up to op $c_i$), and then continue syncing
|
|
96
|
+
* using the compacted bucket.
|
|
97
|
+
*/
|
|
98
|
+
export function validateCompactedBucket(bucket: OplogEntry[], compacted: OplogEntry[]) {
|
|
99
|
+
// r(B_{[..c]})
|
|
100
|
+
const r1 = reduceBucket(bucket);
|
|
101
|
+
// r(B) = r(B')
|
|
102
|
+
expect(reduceBucket(compacted)).toEqual(r1);
|
|
103
|
+
|
|
104
|
+
for (let i = 0; i < bucket.length; i++) {
|
|
105
|
+
// r(B_{[..c_i]})
|
|
106
|
+
const r2 = reduceBucket(bucket.slice(0, i + 1));
|
|
107
|
+
const c_i = BigInt(bucket[i].op_id);
|
|
108
|
+
// B'_{[c_i+1..c]}
|
|
109
|
+
const b3 = compacted.filter((op) => BigInt(op.op_id) > c_i);
|
|
110
|
+
// r(B_{[..c_i]}) \cup B'_{[c_i+1..c]}
|
|
111
|
+
const r3 = r2.concat(b3);
|
|
112
|
+
// r(r(B_{[..c_i]}) \cup B'_{[c_i+1..c]})
|
|
113
|
+
const r4 = reduceBucket(r3);
|
|
114
|
+
expect(r4).toEqual(r1);
|
|
115
|
+
}
|
|
116
|
+
}
|