@powersync/service-core 0.0.0-dev-20240709124106 → 0.0.0-dev-20240718134716
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +34 -2
- package/dist/metrics/Metrics.d.ts +3 -4
- package/dist/metrics/Metrics.js +0 -51
- package/dist/metrics/Metrics.js.map +1 -1
- package/dist/replication/WalStream.js +8 -6
- package/dist/replication/WalStream.js.map +1 -1
- package/dist/routes/endpoints/socket-route.js +13 -4
- package/dist/routes/endpoints/socket-route.js.map +1 -1
- package/dist/routes/endpoints/sync-stream.js +14 -5
- package/dist/routes/endpoints/sync-stream.js.map +1 -1
- package/dist/routes/route-register.js +1 -0
- package/dist/routes/route-register.js.map +1 -1
- package/dist/sync/RequestTracker.d.ts +9 -0
- package/dist/sync/RequestTracker.js +20 -0
- package/dist/sync/RequestTracker.js.map +1 -0
- package/dist/sync/sync.d.ts +2 -0
- package/dist/sync/sync.js +31 -11
- package/dist/sync/sync.js.map +1 -1
- package/dist/sync/util.d.ts +2 -1
- package/dist/sync/util.js +2 -3
- package/dist/sync/util.js.map +1 -1
- package/dist/util/config/collectors/config-collector.d.ts +0 -12
- package/dist/util/config/collectors/config-collector.js +0 -43
- package/dist/util/config/collectors/config-collector.js.map +1 -1
- package/dist/util/config/compound-config-collector.d.ts +29 -3
- package/dist/util/config/compound-config-collector.js +69 -22
- package/dist/util/config/compound-config-collector.js.map +1 -1
- package/package.json +6 -8
- package/src/metrics/Metrics.ts +2 -67
- package/src/replication/WalStream.ts +10 -6
- package/src/routes/endpoints/socket-route.ts +14 -4
- package/src/routes/endpoints/sync-stream.ts +15 -5
- package/src/routes/route-register.ts +1 -0
- package/src/sync/RequestTracker.ts +21 -0
- package/src/sync/sync.ts +41 -11
- package/src/sync/util.ts +6 -3
- package/src/util/config/collectors/config-collector.ts +0 -48
- package/src/util/config/compound-config-collector.ts +87 -23
- package/test/src/sync.test.ts +8 -0
- package/test/src/util.ts +12 -6
- package/test/src/wal_stream.test.ts +16 -21
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -406,7 +406,7 @@ WHERE oid = $1::regclass`,
|
|
|
406
406
|
await batch.save({ tag: 'insert', sourceTable: table, before: undefined, after: record });
|
|
407
407
|
}
|
|
408
408
|
at += rows.length;
|
|
409
|
-
|
|
409
|
+
container.getImplementation(Metrics).rows_replicated_total.add(rows.length);
|
|
410
410
|
|
|
411
411
|
await touch();
|
|
412
412
|
}
|
|
@@ -492,19 +492,21 @@ WHERE oid = $1::regclass`,
|
|
|
492
492
|
return null;
|
|
493
493
|
}
|
|
494
494
|
|
|
495
|
+
const metrics = container.getImplementation(Metrics);
|
|
496
|
+
|
|
495
497
|
if (msg.tag == 'insert') {
|
|
496
|
-
|
|
498
|
+
metrics.rows_replicated_total.add(1);
|
|
497
499
|
const baseRecord = util.constructAfterRecord(msg);
|
|
498
500
|
return await batch.save({ tag: 'insert', sourceTable: table, before: undefined, after: baseRecord });
|
|
499
501
|
} else if (msg.tag == 'update') {
|
|
500
|
-
|
|
502
|
+
metrics.rows_replicated_total.add(1);
|
|
501
503
|
// "before" may be null if the replica id columns are unchanged
|
|
502
504
|
// It's fine to treat that the same as an insert.
|
|
503
505
|
const before = util.constructBeforeRecord(msg);
|
|
504
506
|
const after = util.constructAfterRecord(msg);
|
|
505
507
|
return await batch.save({ tag: 'update', sourceTable: table, before: before, after: after });
|
|
506
508
|
} else if (msg.tag == 'delete') {
|
|
507
|
-
|
|
509
|
+
metrics.rows_replicated_total.add(1);
|
|
508
510
|
const before = util.constructBeforeRecord(msg)!;
|
|
509
511
|
|
|
510
512
|
return await batch.save({ tag: 'delete', sourceTable: table, before: before, after: undefined });
|
|
@@ -555,6 +557,8 @@ WHERE oid = $1::regclass`,
|
|
|
555
557
|
// Auto-activate as soon as initial replication is done
|
|
556
558
|
await this.storage.autoActivate();
|
|
557
559
|
|
|
560
|
+
const metrics = container.getImplementation(Metrics);
|
|
561
|
+
|
|
558
562
|
await this.storage.startBatch({}, async (batch) => {
|
|
559
563
|
// Replication never starts in the middle of a transaction
|
|
560
564
|
let inTx = false;
|
|
@@ -577,7 +581,7 @@ WHERE oid = $1::regclass`,
|
|
|
577
581
|
} else if (msg.tag == 'begin') {
|
|
578
582
|
inTx = true;
|
|
579
583
|
} else if (msg.tag == 'commit') {
|
|
580
|
-
|
|
584
|
+
metrics.transactions_replicated_total.add(1);
|
|
581
585
|
inTx = false;
|
|
582
586
|
await batch.commit(msg.lsn!);
|
|
583
587
|
await this.ack(msg.lsn!, replicationStream);
|
|
@@ -602,7 +606,7 @@ WHERE oid = $1::regclass`,
|
|
|
602
606
|
}
|
|
603
607
|
}
|
|
604
608
|
|
|
605
|
-
|
|
609
|
+
metrics.chunks_replicated_total.add(1);
|
|
606
610
|
}
|
|
607
611
|
});
|
|
608
612
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { errors, logger, schema } from '@powersync/lib-services-framework';
|
|
1
|
+
import { container, errors, logger, schema } from '@powersync/lib-services-framework';
|
|
2
2
|
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
3
3
|
import { serialize } from 'bson';
|
|
4
4
|
|
|
@@ -7,6 +7,7 @@ import { streamResponse } from '../../sync/sync.js';
|
|
|
7
7
|
import * as util from '../../util/util-index.js';
|
|
8
8
|
import { SocketRouteGenerator } from '../router-socket.js';
|
|
9
9
|
import { SyncRoutes } from './sync-stream.js';
|
|
10
|
+
import { RequestTracker } from '../../sync/RequestTracker.js';
|
|
10
11
|
|
|
11
12
|
export const syncStreamReactive: SocketRouteGenerator = (router) =>
|
|
12
13
|
router.reactiveStream<util.StreamingSyncRequest, any>(SyncRoutes.STREAM, {
|
|
@@ -65,7 +66,10 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
|
|
|
65
66
|
observer.triggerCancel();
|
|
66
67
|
});
|
|
67
68
|
|
|
68
|
-
|
|
69
|
+
const metrics = container.getImplementation(Metrics);
|
|
70
|
+
|
|
71
|
+
metrics.concurrent_connections.add(1);
|
|
72
|
+
const tracker = new RequestTracker();
|
|
69
73
|
try {
|
|
70
74
|
for await (const data of streamResponse({
|
|
71
75
|
storage,
|
|
@@ -79,6 +83,7 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
|
|
|
79
83
|
// RSocket handles keepalive events by default
|
|
80
84
|
keep_alive: false
|
|
81
85
|
},
|
|
86
|
+
tracker,
|
|
82
87
|
signal: controller.signal
|
|
83
88
|
})) {
|
|
84
89
|
if (data == null) {
|
|
@@ -94,7 +99,7 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
|
|
|
94
99
|
const serialized = serialize(data) as Buffer;
|
|
95
100
|
responder.onNext({ data: serialized }, false);
|
|
96
101
|
requestedN--;
|
|
97
|
-
|
|
102
|
+
tracker.addDataSynced(serialized.length);
|
|
98
103
|
}
|
|
99
104
|
|
|
100
105
|
if (requestedN <= 0) {
|
|
@@ -126,7 +131,12 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
|
|
|
126
131
|
responder.onComplete();
|
|
127
132
|
removeStopHandler();
|
|
128
133
|
disposer();
|
|
129
|
-
|
|
134
|
+
logger.info(`Sync stream complete`, {
|
|
135
|
+
user_id: syncParams.user_id,
|
|
136
|
+
operations_synced: tracker.operationsSynced,
|
|
137
|
+
data_synced_bytes: tracker.dataSyncedBytes
|
|
138
|
+
});
|
|
139
|
+
metrics.concurrent_connections.add(-1);
|
|
130
140
|
}
|
|
131
141
|
}
|
|
132
142
|
});
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { errors, logger, router, schema } from '@powersync/lib-services-framework';
|
|
1
|
+
import { container, errors, logger, router, schema } from '@powersync/lib-services-framework';
|
|
2
2
|
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
3
3
|
import { Readable } from 'stream';
|
|
4
4
|
|
|
@@ -8,6 +8,7 @@ import * as util from '../../util/util-index.js';
|
|
|
8
8
|
import { Metrics } from '../../metrics/Metrics.js';
|
|
9
9
|
import { authUser } from '../auth.js';
|
|
10
10
|
import { routeDefinition } from '../router.js';
|
|
11
|
+
import { RequestTracker } from '../../sync/RequestTracker.js';
|
|
11
12
|
|
|
12
13
|
export enum SyncRoutes {
|
|
13
14
|
STREAM = '/sync/stream'
|
|
@@ -42,9 +43,11 @@ export const syncStreamed = routeDefinition({
|
|
|
42
43
|
description: 'No sync rules available'
|
|
43
44
|
});
|
|
44
45
|
}
|
|
46
|
+
const metrics = container.getImplementation(Metrics);
|
|
45
47
|
const controller = new AbortController();
|
|
48
|
+
const tracker = new RequestTracker();
|
|
46
49
|
try {
|
|
47
|
-
|
|
50
|
+
metrics.concurrent_connections.add(1);
|
|
48
51
|
const stream = Readable.from(
|
|
49
52
|
sync.transformToBytesTracked(
|
|
50
53
|
sync.ndjson(
|
|
@@ -53,9 +56,11 @@ export const syncStreamed = routeDefinition({
|
|
|
53
56
|
params,
|
|
54
57
|
syncParams,
|
|
55
58
|
token: payload.context.token_payload!,
|
|
59
|
+
tracker,
|
|
56
60
|
signal: controller.signal
|
|
57
61
|
})
|
|
58
|
-
)
|
|
62
|
+
),
|
|
63
|
+
tracker
|
|
59
64
|
),
|
|
60
65
|
{ objectMode: false, highWaterMark: 16 * 1024 }
|
|
61
66
|
);
|
|
@@ -85,12 +90,17 @@ export const syncStreamed = routeDefinition({
|
|
|
85
90
|
data: stream,
|
|
86
91
|
afterSend: async () => {
|
|
87
92
|
controller.abort();
|
|
88
|
-
|
|
93
|
+
metrics.concurrent_connections.add(-1);
|
|
94
|
+
logger.info(`Sync stream complete`, {
|
|
95
|
+
user_id: syncParams.user_id,
|
|
96
|
+
operations_synced: tracker.operationsSynced,
|
|
97
|
+
data_synced_bytes: tracker.dataSyncedBytes
|
|
98
|
+
});
|
|
89
99
|
}
|
|
90
100
|
});
|
|
91
101
|
} catch (ex) {
|
|
92
102
|
controller.abort();
|
|
93
|
-
|
|
103
|
+
metrics.concurrent_connections.add(-1);
|
|
94
104
|
}
|
|
95
105
|
}
|
|
96
106
|
});
|
|
@@ -63,6 +63,7 @@ export function registerFastifyRoutes(
|
|
|
63
63
|
}
|
|
64
64
|
} catch (ex) {
|
|
65
65
|
const journeyError = errors.JourneyError.isJourneyError(ex) ? ex : new errors.InternalServerError(ex);
|
|
66
|
+
logger.error(`Request failed`, journeyError);
|
|
66
67
|
|
|
67
68
|
response = new router.RouterResponse({
|
|
68
69
|
status: journeyError.errorData.status || 500,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { container } from '@powersync/lib-services-framework';
|
|
2
|
+
import { Metrics } from '../metrics/Metrics.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Record sync stats per request stream.
|
|
6
|
+
*/
|
|
7
|
+
export class RequestTracker {
|
|
8
|
+
operationsSynced = 0;
|
|
9
|
+
dataSyncedBytes = 0;
|
|
10
|
+
|
|
11
|
+
addOperationsSynced(operations: number) {
|
|
12
|
+
this.operationsSynced += operations;
|
|
13
|
+
container.getImplementation(Metrics).operations_synced_total.add(operations);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
addDataSynced(bytes: number) {
|
|
17
|
+
this.dataSyncedBytes += bytes;
|
|
18
|
+
|
|
19
|
+
container.getImplementation(Metrics).data_synced_bytes.add(bytes);
|
|
20
|
+
}
|
|
21
|
+
}
|
package/src/sync/sync.ts
CHANGED
|
@@ -8,9 +8,9 @@ import * as storage from '../storage/storage-index.js';
|
|
|
8
8
|
import * as util from '../util/util-index.js';
|
|
9
9
|
|
|
10
10
|
import { logger } from '@powersync/lib-services-framework';
|
|
11
|
-
import { Metrics } from '../metrics/Metrics.js';
|
|
12
11
|
import { mergeAsyncIterables } from './merge.js';
|
|
13
12
|
import { TokenStreamOptions, tokenStream } from './util.js';
|
|
13
|
+
import { RequestTracker } from './RequestTracker.js';
|
|
14
14
|
|
|
15
15
|
/**
|
|
16
16
|
* Maximum number of connections actively fetching data.
|
|
@@ -28,12 +28,14 @@ export interface SyncStreamParameters {
|
|
|
28
28
|
*/
|
|
29
29
|
signal?: AbortSignal;
|
|
30
30
|
tokenStreamOptions?: Partial<TokenStreamOptions>;
|
|
31
|
+
|
|
32
|
+
tracker: RequestTracker;
|
|
31
33
|
}
|
|
32
34
|
|
|
33
35
|
export async function* streamResponse(
|
|
34
36
|
options: SyncStreamParameters
|
|
35
37
|
): AsyncIterable<util.StreamingSyncLine | string | null> {
|
|
36
|
-
const { storage, params, syncParams, token, tokenStreamOptions, signal } = options;
|
|
38
|
+
const { storage, params, syncParams, token, tokenStreamOptions, tracker, signal } = options;
|
|
37
39
|
// We also need to be able to abort, so we create our own controller.
|
|
38
40
|
const controller = new AbortController();
|
|
39
41
|
if (signal) {
|
|
@@ -49,7 +51,7 @@ export async function* streamResponse(
|
|
|
49
51
|
}
|
|
50
52
|
}
|
|
51
53
|
const ki = tokenStream(token, controller.signal, tokenStreamOptions);
|
|
52
|
-
const stream = streamResponseInner(storage, params, syncParams, controller.signal);
|
|
54
|
+
const stream = streamResponseInner(storage, params, syncParams, tracker, controller.signal);
|
|
53
55
|
// Merge the two streams, and abort as soon as one of the streams end.
|
|
54
56
|
const merged = mergeAsyncIterables([stream, ki], controller.signal);
|
|
55
57
|
|
|
@@ -72,6 +74,7 @@ async function* streamResponseInner(
|
|
|
72
74
|
storage: storage.BucketStorageFactory,
|
|
73
75
|
params: util.StreamingSyncRequest,
|
|
74
76
|
syncParams: RequestParameters,
|
|
77
|
+
tracker: RequestTracker,
|
|
75
78
|
signal: AbortSignal
|
|
76
79
|
): AsyncGenerator<util.StreamingSyncLine | string | null> {
|
|
77
80
|
// Bucket state of bucket id -> op_id.
|
|
@@ -109,6 +112,11 @@ async function* streamResponseInner(
|
|
|
109
112
|
});
|
|
110
113
|
|
|
111
114
|
if (allBuckets.length > 1000) {
|
|
115
|
+
logger.error(`Too many buckets`, {
|
|
116
|
+
checkpoint,
|
|
117
|
+
user_id: syncParams.user_id,
|
|
118
|
+
buckets: allBuckets.length
|
|
119
|
+
});
|
|
112
120
|
// TODO: Limit number of buckets even before we get to this point
|
|
113
121
|
throw new Error(`Too many buckets: ${allBuckets.length}`);
|
|
114
122
|
}
|
|
@@ -137,11 +145,18 @@ async function* streamResponseInner(
|
|
|
137
145
|
}
|
|
138
146
|
bucketsToFetch = diff.updatedBuckets.map((c) => c.bucket);
|
|
139
147
|
|
|
140
|
-
let message = `Updated checkpoint: ${checkpoint} |
|
|
148
|
+
let message = `Updated checkpoint: ${checkpoint} | `;
|
|
149
|
+
message += `write: ${writeCheckpoint} | `;
|
|
141
150
|
message += `buckets: ${allBuckets.length} | `;
|
|
142
151
|
message += `updated: ${limitedBuckets(diff.updatedBuckets, 20)} | `;
|
|
143
|
-
message += `removed: ${limitedBuckets(diff.removedBuckets, 20)}
|
|
144
|
-
logger.info(message
|
|
152
|
+
message += `removed: ${limitedBuckets(diff.removedBuckets, 20)}`;
|
|
153
|
+
logger.info(message, {
|
|
154
|
+
checkpoint,
|
|
155
|
+
user_id: syncParams.user_id,
|
|
156
|
+
buckets: allBuckets.length,
|
|
157
|
+
updated: diff.updatedBuckets.length,
|
|
158
|
+
removed: diff.removedBuckets.length
|
|
159
|
+
});
|
|
145
160
|
|
|
146
161
|
const checksum_line: util.StreamingSyncCheckpointDiff = {
|
|
147
162
|
checkpoint_diff: {
|
|
@@ -156,7 +171,7 @@ async function* streamResponseInner(
|
|
|
156
171
|
} else {
|
|
157
172
|
let message = `New checkpoint: ${checkpoint} | write: ${writeCheckpoint} | `;
|
|
158
173
|
message += `buckets: ${allBuckets.length} ${limitedBuckets(allBuckets, 20)}`;
|
|
159
|
-
logger.info(message);
|
|
174
|
+
logger.info(message, { checkpoint, user_id: syncParams.user_id, buckets: allBuckets.length });
|
|
160
175
|
bucketsToFetch = allBuckets;
|
|
161
176
|
const checksum_line: util.StreamingSyncCheckpoint = {
|
|
162
177
|
checkpoint: {
|
|
@@ -172,7 +187,16 @@ async function* streamResponseInner(
|
|
|
172
187
|
|
|
173
188
|
// This incrementally updates dataBuckets with each individual bucket position.
|
|
174
189
|
// At the end of this, we can be sure that all buckets have data up to the checkpoint.
|
|
175
|
-
yield* bucketDataInBatches({
|
|
190
|
+
yield* bucketDataInBatches({
|
|
191
|
+
storage,
|
|
192
|
+
checkpoint,
|
|
193
|
+
bucketsToFetch,
|
|
194
|
+
dataBuckets,
|
|
195
|
+
raw_data,
|
|
196
|
+
binary_data,
|
|
197
|
+
signal,
|
|
198
|
+
tracker
|
|
199
|
+
});
|
|
176
200
|
|
|
177
201
|
await new Promise((resolve) => setTimeout(resolve, 10));
|
|
178
202
|
}
|
|
@@ -186,6 +210,7 @@ interface BucketDataRequest {
|
|
|
186
210
|
dataBuckets: Map<string, string>;
|
|
187
211
|
raw_data: boolean | undefined;
|
|
188
212
|
binary_data: boolean | undefined;
|
|
213
|
+
tracker: RequestTracker;
|
|
189
214
|
signal: AbortSignal;
|
|
190
215
|
}
|
|
191
216
|
|
|
@@ -221,11 +246,16 @@ async function* bucketDataInBatches(request: BucketDataRequest) {
|
|
|
221
246
|
}
|
|
222
247
|
}
|
|
223
248
|
|
|
249
|
+
interface BucketDataBatchResult {
|
|
250
|
+
done: boolean;
|
|
251
|
+
data: any;
|
|
252
|
+
}
|
|
253
|
+
|
|
224
254
|
/**
|
|
225
255
|
* Extracted as a separate internal function just to avoid memory leaks.
|
|
226
256
|
*/
|
|
227
|
-
async function* bucketDataBatch(request: BucketDataRequest) {
|
|
228
|
-
const { storage, checkpoint, bucketsToFetch, dataBuckets, raw_data, binary_data, signal } = request;
|
|
257
|
+
async function* bucketDataBatch(request: BucketDataRequest): AsyncGenerator<BucketDataBatchResult, void> {
|
|
258
|
+
const { storage, checkpoint, bucketsToFetch, dataBuckets, raw_data, binary_data, tracker, signal } = request;
|
|
229
259
|
|
|
230
260
|
const [_, release] = await syncSemaphore.acquire();
|
|
231
261
|
try {
|
|
@@ -272,7 +302,7 @@ async function* bucketDataBatch(request: BucketDataRequest) {
|
|
|
272
302
|
// iterator memory in case if large data sent.
|
|
273
303
|
yield { data: null, done: false };
|
|
274
304
|
}
|
|
275
|
-
|
|
305
|
+
tracker.addOperationsSynced(r.data.length);
|
|
276
306
|
|
|
277
307
|
dataBuckets.set(r.bucket, r.next_after);
|
|
278
308
|
}
|
package/src/sync/util.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import * as timers from 'timers/promises';
|
|
2
2
|
|
|
3
3
|
import * as util from '../util/util-index.js';
|
|
4
|
-
import {
|
|
4
|
+
import { RequestTracker } from './RequestTracker.js';
|
|
5
5
|
|
|
6
6
|
export type TokenStreamOptions = {
|
|
7
7
|
/**
|
|
@@ -89,10 +89,13 @@ export async function* ndjson(iterator: AsyncIterable<string | null | Record<str
|
|
|
89
89
|
}
|
|
90
90
|
}
|
|
91
91
|
|
|
92
|
-
export async function* transformToBytesTracked(
|
|
92
|
+
export async function* transformToBytesTracked(
|
|
93
|
+
iterator: AsyncIterable<string>,
|
|
94
|
+
tracker: RequestTracker
|
|
95
|
+
): AsyncGenerator<Buffer> {
|
|
93
96
|
for await (let data of iterator) {
|
|
94
97
|
const encoded = Buffer.from(data, 'utf8');
|
|
95
|
-
|
|
98
|
+
tracker.addDataSynced(encoded.length);
|
|
96
99
|
yield encoded;
|
|
97
100
|
}
|
|
98
101
|
}
|
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
import * as t from 'ts-codec';
|
|
2
1
|
import * as yaml from 'yaml';
|
|
3
2
|
|
|
4
3
|
import { configFile } from '@powersync/service-types';
|
|
5
|
-
import { schema } from '@powersync/lib-services-framework';
|
|
6
4
|
|
|
7
5
|
import { RunnerConfig } from '../types.js';
|
|
8
6
|
|
|
@@ -23,13 +21,6 @@ export enum ConfigFileFormat {
|
|
|
23
21
|
*/
|
|
24
22
|
const YAML_ENV_PREFIX = 'PS_';
|
|
25
23
|
|
|
26
|
-
// ts-codec itself doesn't give great validation errors, so we use json schema for that
|
|
27
|
-
const configSchemaValidator = schema
|
|
28
|
-
.parseJSONSchema(
|
|
29
|
-
t.generateJSONSchema(configFile.powerSyncConfig, { allowAdditional: true, parsers: [configFile.portParser] })
|
|
30
|
-
)
|
|
31
|
-
.validator();
|
|
32
|
-
|
|
33
24
|
export abstract class ConfigCollector {
|
|
34
25
|
abstract get name(): string;
|
|
35
26
|
|
|
@@ -39,45 +30,6 @@ export abstract class ConfigCollector {
|
|
|
39
30
|
*/
|
|
40
31
|
abstract collectSerialized(runnerConfig: RunnerConfig): Promise<configFile.SerializedPowerSyncConfig | null>;
|
|
41
32
|
|
|
42
|
-
/**
|
|
43
|
-
* Collects the PowerSyncConfig settings.
|
|
44
|
-
* Validates and decodes the config.
|
|
45
|
-
* @returns null if this collector cannot provide a config
|
|
46
|
-
*/
|
|
47
|
-
async collect(runner_config: RunnerConfig): Promise<configFile.PowerSyncConfig | null> {
|
|
48
|
-
const serialized = await this.collectSerialized(runner_config);
|
|
49
|
-
if (!serialized) {
|
|
50
|
-
return null;
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
/**
|
|
54
|
-
* After this point a serialized config has been found. Any failures to decode or validate
|
|
55
|
-
* will result in a hard stop.
|
|
56
|
-
*/
|
|
57
|
-
const decoded = this.decode(serialized);
|
|
58
|
-
this.validate(decoded);
|
|
59
|
-
return decoded;
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
/**
|
|
63
|
-
* Validates input config
|
|
64
|
-
* ts-codec itself doesn't give great validation errors, so we use json schema for that
|
|
65
|
-
*/
|
|
66
|
-
validate(config: configFile.PowerSyncConfig) {
|
|
67
|
-
const valid = configSchemaValidator.validate(config);
|
|
68
|
-
if (!valid.valid) {
|
|
69
|
-
throw new Error(`Failed to validate PowerSync config: ${valid.errors.join(', ')}`);
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
decode(encoded: configFile.SerializedPowerSyncConfig): configFile.PowerSyncConfig {
|
|
74
|
-
try {
|
|
75
|
-
return configFile.powerSyncConfig.decode(encoded);
|
|
76
|
-
} catch (ex) {
|
|
77
|
-
throw new Error(`Failed to decode PowerSync config: ${ex}`);
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
|
|
81
33
|
protected parseContent(content: string, contentType?: ConfigFileFormat) {
|
|
82
34
|
switch (contentType) {
|
|
83
35
|
case ConfigFileFormat.YAML:
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import * as t from 'ts-codec';
|
|
1
2
|
import { configFile, normalizeConnection } from '@powersync/service-types';
|
|
2
3
|
import { ConfigCollector } from './collectors/config-collector.js';
|
|
3
4
|
import { ResolvedConnection, ResolvedPowerSyncConfig, RunnerConfig, SyncRulesConfig } from './types.js';
|
|
@@ -9,7 +10,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co
|
|
|
9
10
|
import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js';
|
|
10
11
|
import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js';
|
|
11
12
|
import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js';
|
|
12
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
13
|
+
import { logger, schema } from '@powersync/lib-services-framework';
|
|
13
14
|
|
|
14
15
|
const POWERSYNC_DEV_KID = 'powersync-dev';
|
|
15
16
|
|
|
@@ -28,6 +29,12 @@ export type CompoundConfigCollectorOptions = {
|
|
|
28
29
|
syncRulesCollectors: SyncRulesCollector[];
|
|
29
30
|
};
|
|
30
31
|
|
|
32
|
+
export type ConfigCollectorGenerics = {
|
|
33
|
+
SERIALIZED: configFile.SerializedPowerSyncConfig;
|
|
34
|
+
DESERIALIZED: configFile.PowerSyncConfig;
|
|
35
|
+
RESOLVED: ResolvedPowerSyncConfig;
|
|
36
|
+
};
|
|
37
|
+
|
|
31
38
|
const DEFAULT_COLLECTOR_OPTIONS: CompoundConfigCollectorOptions = {
|
|
32
39
|
configCollectors: [new Base64ConfigCollector(), new FileSystemConfigCollector(), new FallbackConfigCollector()],
|
|
33
40
|
syncRulesCollectors: [
|
|
@@ -37,15 +44,56 @@ const DEFAULT_COLLECTOR_OPTIONS: CompoundConfigCollectorOptions = {
|
|
|
37
44
|
]
|
|
38
45
|
};
|
|
39
46
|
|
|
40
|
-
export class CompoundConfigCollector {
|
|
47
|
+
export class CompoundConfigCollector<Generics extends ConfigCollectorGenerics = ConfigCollectorGenerics> {
|
|
41
48
|
constructor(protected options: CompoundConfigCollectorOptions = DEFAULT_COLLECTOR_OPTIONS) {}
|
|
42
49
|
|
|
50
|
+
/**
|
|
51
|
+
* The default ts-codec for validations and decoding
|
|
52
|
+
*/
|
|
53
|
+
get codec(): t.AnyCodec {
|
|
54
|
+
return configFile.powerSyncConfig;
|
|
55
|
+
}
|
|
56
|
+
|
|
43
57
|
/**
|
|
44
58
|
* Collects and resolves base config
|
|
45
59
|
*/
|
|
46
|
-
async collectConfig(
|
|
47
|
-
const baseConfig = await this.collectBaseConfig(
|
|
60
|
+
async collectConfig(runnerConfig: RunnerConfig = {}): Promise<Generics['RESOLVED']> {
|
|
61
|
+
const baseConfig = await this.collectBaseConfig(runnerConfig);
|
|
62
|
+
const baseResolvedConfig = await this.resolveBaseConfig(baseConfig, runnerConfig);
|
|
63
|
+
return this.resolveConfig(baseConfig, baseResolvedConfig, runnerConfig);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Collects the base PowerSyncConfig from various registered collectors.
|
|
68
|
+
* @throws if no collector could return a configuration.
|
|
69
|
+
*/
|
|
70
|
+
protected async collectBaseConfig(runner_config: RunnerConfig): Promise<Generics['DESERIALIZED']> {
|
|
71
|
+
for (const collector of this.options.configCollectors) {
|
|
72
|
+
try {
|
|
73
|
+
const baseConfig = await collector.collectSerialized(runner_config);
|
|
74
|
+
if (baseConfig) {
|
|
75
|
+
const decoded = this.decode(baseConfig);
|
|
76
|
+
this.validate(decoded);
|
|
77
|
+
return decoded;
|
|
78
|
+
}
|
|
79
|
+
logger.debug(
|
|
80
|
+
`Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.`
|
|
81
|
+
);
|
|
82
|
+
} catch (ex) {
|
|
83
|
+
// An error in a collector is a hard stop
|
|
84
|
+
throw new Error(`Could not collect config using ${collector.name} method. Caught exception: ${ex}`);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
throw new Error('PowerSyncConfig could not be collected using any of the registered config collectors.');
|
|
88
|
+
}
|
|
48
89
|
|
|
90
|
+
/**
|
|
91
|
+
* Performs the resolving of the common (shared) base configuration
|
|
92
|
+
*/
|
|
93
|
+
protected async resolveBaseConfig(
|
|
94
|
+
baseConfig: Generics['DESERIALIZED'],
|
|
95
|
+
runnerConfig: RunnerConfig = {}
|
|
96
|
+
): Promise<ResolvedPowerSyncConfig> {
|
|
49
97
|
const connections = baseConfig.replication?.connections ?? [];
|
|
50
98
|
if (connections.length > 1) {
|
|
51
99
|
throw new Error('Only a single replication connection is supported currently');
|
|
@@ -93,7 +141,7 @@ export class CompoundConfigCollector {
|
|
|
93
141
|
devKey = await auth.KeySpec.importKey(baseDevKey);
|
|
94
142
|
}
|
|
95
143
|
|
|
96
|
-
const sync_rules = await this.collectSyncRules(baseConfig,
|
|
144
|
+
const sync_rules = await this.collectSyncRules(baseConfig, runnerConfig);
|
|
97
145
|
|
|
98
146
|
let jwt_audiences: string[] = baseConfig.client_auth?.audience ?? [];
|
|
99
147
|
|
|
@@ -130,25 +178,17 @@ export class CompoundConfigCollector {
|
|
|
130
178
|
}
|
|
131
179
|
|
|
132
180
|
/**
|
|
133
|
-
*
|
|
134
|
-
*
|
|
181
|
+
* Perform any additional resolving from {@link ResolvedPowerSyncConfig}
|
|
182
|
+
* to the extended {@link Generics['RESOLVED']}
|
|
183
|
+
*
|
|
135
184
|
*/
|
|
136
|
-
protected async
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
logger.debug(
|
|
144
|
-
`Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.`
|
|
145
|
-
);
|
|
146
|
-
} catch (ex) {
|
|
147
|
-
// An error in a collector is a hard stop
|
|
148
|
-
throw new Error(`Could not collect config using ${collector.name} method. Caught exception: ${ex}`);
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
throw new Error('PowerSyncConfig could not be collected using any of the registered config collectors.');
|
|
185
|
+
protected async resolveConfig(
|
|
186
|
+
baseConfig: Generics['DESERIALIZED'],
|
|
187
|
+
resolvedBaseConfig: ResolvedPowerSyncConfig,
|
|
188
|
+
runnerConfig: RunnerConfig = {}
|
|
189
|
+
): Promise<Generics['RESOLVED']> {
|
|
190
|
+
// The base version has ResolvedPowerSyncConfig == Generics['RESOLVED']
|
|
191
|
+
return resolvedBaseConfig;
|
|
152
192
|
}
|
|
153
193
|
|
|
154
194
|
protected async collectSyncRules(
|
|
@@ -173,4 +213,28 @@ export class CompoundConfigCollector {
|
|
|
173
213
|
present: false
|
|
174
214
|
};
|
|
175
215
|
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* Validates input config
|
|
219
|
+
* ts-codec itself doesn't give great validation errors, so we use json schema for that
|
|
220
|
+
*/
|
|
221
|
+
protected validate(config: Generics['DESERIALIZED']) {
|
|
222
|
+
// ts-codec itself doesn't give great validation errors, so we use json schema for that
|
|
223
|
+
const validator = schema
|
|
224
|
+
.parseJSONSchema(t.generateJSONSchema(this.codec, { allowAdditional: true, parsers: [configFile.portParser] }))
|
|
225
|
+
.validator();
|
|
226
|
+
|
|
227
|
+
const valid = validator.validate(config);
|
|
228
|
+
if (!valid.valid) {
|
|
229
|
+
throw new Error(`Failed to validate PowerSync config: ${valid.errors.join(', ')}`);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
protected decode(encoded: Generics['SERIALIZED']): Generics['DESERIALIZED'] {
|
|
234
|
+
try {
|
|
235
|
+
return this.codec.decode(encoded);
|
|
236
|
+
} catch (ex) {
|
|
237
|
+
throw new Error(`Failed to decode PowerSync config: ${ex}`);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
176
240
|
}
|
package/test/src/sync.test.ts
CHANGED
|
@@ -9,6 +9,7 @@ import { streamResponse } from '../../src/sync/sync.js';
|
|
|
9
9
|
import * as timers from 'timers/promises';
|
|
10
10
|
import { lsnMakeComparable } from '@powersync/service-jpgwire';
|
|
11
11
|
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
12
|
+
import { RequestTracker } from '@/sync/RequestTracker.js';
|
|
12
13
|
|
|
13
14
|
describe('sync - mongodb', function () {
|
|
14
15
|
defineTests(MONGO_STORAGE_FACTORY);
|
|
@@ -38,6 +39,8 @@ bucket_definitions:
|
|
|
38
39
|
`;
|
|
39
40
|
|
|
40
41
|
function defineTests(factory: StorageFactory) {
|
|
42
|
+
const tracker = new RequestTracker();
|
|
43
|
+
|
|
41
44
|
test('sync global data', async () => {
|
|
42
45
|
const f = await factory();
|
|
43
46
|
|
|
@@ -78,6 +81,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
78
81
|
include_checksum: true,
|
|
79
82
|
raw_data: true
|
|
80
83
|
},
|
|
84
|
+
tracker,
|
|
81
85
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
82
86
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
83
87
|
});
|
|
@@ -118,6 +122,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
118
122
|
include_checksum: true,
|
|
119
123
|
raw_data: false
|
|
120
124
|
},
|
|
125
|
+
tracker,
|
|
121
126
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
122
127
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
123
128
|
});
|
|
@@ -146,6 +151,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
146
151
|
include_checksum: true,
|
|
147
152
|
raw_data: true
|
|
148
153
|
},
|
|
154
|
+
tracker,
|
|
149
155
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
150
156
|
token: { exp: 0 } as any
|
|
151
157
|
});
|
|
@@ -172,6 +178,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
172
178
|
include_checksum: true,
|
|
173
179
|
raw_data: true
|
|
174
180
|
},
|
|
181
|
+
tracker,
|
|
175
182
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
176
183
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
177
184
|
});
|
|
@@ -232,6 +239,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
232
239
|
include_checksum: true,
|
|
233
240
|
raw_data: true
|
|
234
241
|
},
|
|
242
|
+
tracker,
|
|
235
243
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
236
244
|
token: { exp: exp } as any
|
|
237
245
|
});
|
package/test/src/util.ts
CHANGED
|
@@ -7,14 +7,20 @@ import { PowerSyncMongo } from '../../src/storage/mongo/db.js';
|
|
|
7
7
|
import { escapeIdentifier } from '../../src/util/pgwire_utils.js';
|
|
8
8
|
import { env } from './env.js';
|
|
9
9
|
import { Metrics } from '@/metrics/Metrics.js';
|
|
10
|
+
import { container } from '@powersync/lib-services-framework';
|
|
11
|
+
import { MeterProvider } from '@opentelemetry/sdk-metrics';
|
|
12
|
+
import { PrometheusExporter } from '@opentelemetry/exporter-prometheus';
|
|
10
13
|
|
|
11
14
|
// The metrics need to be initialised before they can be used
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
})
|
|
17
|
-
|
|
15
|
+
const prometheus = new PrometheusExporter();
|
|
16
|
+
const metrics = new Metrics(
|
|
17
|
+
new MeterProvider({
|
|
18
|
+
readers: [prometheus]
|
|
19
|
+
}),
|
|
20
|
+
prometheus
|
|
21
|
+
);
|
|
22
|
+
container.register(Metrics, metrics);
|
|
23
|
+
metrics.resetCounters();
|
|
18
24
|
|
|
19
25
|
export const TEST_URI = env.PG_TEST_URL;
|
|
20
26
|
|