@powersync/service-core 0.4.1 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/routes/endpoints/socket-route.js +9 -1
- package/dist/routes/endpoints/socket-route.js.map +1 -1
- package/dist/routes/endpoints/sync-stream.js +9 -1
- package/dist/routes/endpoints/sync-stream.js.map +1 -1
- package/dist/sync/RequestTracker.d.ts +9 -0
- package/dist/sync/RequestTracker.js +19 -0
- package/dist/sync/RequestTracker.js.map +1 -0
- package/dist/sync/sync.d.ts +2 -0
- package/dist/sync/sync.js +31 -11
- package/dist/sync/sync.js.map +1 -1
- package/dist/sync/util.d.ts +2 -1
- package/dist/sync/util.js +2 -3
- package/dist/sync/util.js.map +1 -1
- package/package.json +4 -4
- package/src/routes/endpoints/socket-route.ts +9 -1
- package/src/routes/endpoints/sync-stream.ts +10 -1
- package/src/sync/RequestTracker.ts +21 -0
- package/src/sync/sync.ts +41 -10
- package/src/sync/util.ts +6 -2
- package/test/src/sync.test.ts +8 -0
- package/tsconfig.tsbuildinfo +1 -1
package/src/sync/sync.ts
CHANGED
|
@@ -11,6 +11,7 @@ import { logger } from '@powersync/lib-services-framework';
|
|
|
11
11
|
import { Metrics } from '../metrics/Metrics.js';
|
|
12
12
|
import { mergeAsyncIterables } from './merge.js';
|
|
13
13
|
import { TokenStreamOptions, tokenStream } from './util.js';
|
|
14
|
+
import { RequestTracker } from './RequestTracker.js';
|
|
14
15
|
|
|
15
16
|
/**
|
|
16
17
|
* Maximum number of connections actively fetching data.
|
|
@@ -28,12 +29,14 @@ export interface SyncStreamParameters {
|
|
|
28
29
|
*/
|
|
29
30
|
signal?: AbortSignal;
|
|
30
31
|
tokenStreamOptions?: Partial<TokenStreamOptions>;
|
|
32
|
+
|
|
33
|
+
tracker: RequestTracker;
|
|
31
34
|
}
|
|
32
35
|
|
|
33
36
|
export async function* streamResponse(
|
|
34
37
|
options: SyncStreamParameters
|
|
35
38
|
): AsyncIterable<util.StreamingSyncLine | string | null> {
|
|
36
|
-
const { storage, params, syncParams, token, tokenStreamOptions, signal } = options;
|
|
39
|
+
const { storage, params, syncParams, token, tokenStreamOptions, tracker, signal } = options;
|
|
37
40
|
// We also need to be able to abort, so we create our own controller.
|
|
38
41
|
const controller = new AbortController();
|
|
39
42
|
if (signal) {
|
|
@@ -49,7 +52,7 @@ export async function* streamResponse(
|
|
|
49
52
|
}
|
|
50
53
|
}
|
|
51
54
|
const ki = tokenStream(token, controller.signal, tokenStreamOptions);
|
|
52
|
-
const stream = streamResponseInner(storage, params, syncParams, controller.signal);
|
|
55
|
+
const stream = streamResponseInner(storage, params, syncParams, tracker, controller.signal);
|
|
53
56
|
// Merge the two streams, and abort as soon as one of the streams end.
|
|
54
57
|
const merged = mergeAsyncIterables([stream, ki], controller.signal);
|
|
55
58
|
|
|
@@ -72,6 +75,7 @@ async function* streamResponseInner(
|
|
|
72
75
|
storage: storage.BucketStorageFactory,
|
|
73
76
|
params: util.StreamingSyncRequest,
|
|
74
77
|
syncParams: RequestParameters,
|
|
78
|
+
tracker: RequestTracker,
|
|
75
79
|
signal: AbortSignal
|
|
76
80
|
): AsyncGenerator<util.StreamingSyncLine | string | null> {
|
|
77
81
|
// Bucket state of bucket id -> op_id.
|
|
@@ -109,6 +113,11 @@ async function* streamResponseInner(
|
|
|
109
113
|
});
|
|
110
114
|
|
|
111
115
|
if (allBuckets.length > 1000) {
|
|
116
|
+
logger.error(`Too many buckets`, {
|
|
117
|
+
checkpoint,
|
|
118
|
+
user_id: syncParams.user_id,
|
|
119
|
+
buckets: allBuckets.length
|
|
120
|
+
});
|
|
112
121
|
// TODO: Limit number of buckets even before we get to this point
|
|
113
122
|
throw new Error(`Too many buckets: ${allBuckets.length}`);
|
|
114
123
|
}
|
|
@@ -137,11 +146,18 @@ async function* streamResponseInner(
|
|
|
137
146
|
}
|
|
138
147
|
bucketsToFetch = diff.updatedBuckets.map((c) => c.bucket);
|
|
139
148
|
|
|
140
|
-
let message = `Updated checkpoint: ${checkpoint} |
|
|
149
|
+
let message = `Updated checkpoint: ${checkpoint} | `;
|
|
150
|
+
message += `write: ${writeCheckpoint} | `;
|
|
141
151
|
message += `buckets: ${allBuckets.length} | `;
|
|
142
152
|
message += `updated: ${limitedBuckets(diff.updatedBuckets, 20)} | `;
|
|
143
|
-
message += `removed: ${limitedBuckets(diff.removedBuckets, 20)}
|
|
144
|
-
logger.info(message
|
|
153
|
+
message += `removed: ${limitedBuckets(diff.removedBuckets, 20)}`;
|
|
154
|
+
logger.info(message, {
|
|
155
|
+
checkpoint,
|
|
156
|
+
user_id: syncParams.user_id,
|
|
157
|
+
buckets: allBuckets.length,
|
|
158
|
+
updated: diff.updatedBuckets.length,
|
|
159
|
+
removed: diff.removedBuckets.length
|
|
160
|
+
});
|
|
145
161
|
|
|
146
162
|
const checksum_line: util.StreamingSyncCheckpointDiff = {
|
|
147
163
|
checkpoint_diff: {
|
|
@@ -156,7 +172,7 @@ async function* streamResponseInner(
|
|
|
156
172
|
} else {
|
|
157
173
|
let message = `New checkpoint: ${checkpoint} | write: ${writeCheckpoint} | `;
|
|
158
174
|
message += `buckets: ${allBuckets.length} ${limitedBuckets(allBuckets, 20)}`;
|
|
159
|
-
logger.info(message);
|
|
175
|
+
logger.info(message, { checkpoint, user_id: syncParams.user_id, buckets: allBuckets.length });
|
|
160
176
|
bucketsToFetch = allBuckets;
|
|
161
177
|
const checksum_line: util.StreamingSyncCheckpoint = {
|
|
162
178
|
checkpoint: {
|
|
@@ -172,7 +188,16 @@ async function* streamResponseInner(
|
|
|
172
188
|
|
|
173
189
|
// This incrementally updates dataBuckets with each individual bucket position.
|
|
174
190
|
// At the end of this, we can be sure that all buckets have data up to the checkpoint.
|
|
175
|
-
yield* bucketDataInBatches({
|
|
191
|
+
yield* bucketDataInBatches({
|
|
192
|
+
storage,
|
|
193
|
+
checkpoint,
|
|
194
|
+
bucketsToFetch,
|
|
195
|
+
dataBuckets,
|
|
196
|
+
raw_data,
|
|
197
|
+
binary_data,
|
|
198
|
+
signal,
|
|
199
|
+
tracker
|
|
200
|
+
});
|
|
176
201
|
|
|
177
202
|
await new Promise((resolve) => setTimeout(resolve, 10));
|
|
178
203
|
}
|
|
@@ -186,6 +211,7 @@ interface BucketDataRequest {
|
|
|
186
211
|
dataBuckets: Map<string, string>;
|
|
187
212
|
raw_data: boolean | undefined;
|
|
188
213
|
binary_data: boolean | undefined;
|
|
214
|
+
tracker: RequestTracker;
|
|
189
215
|
signal: AbortSignal;
|
|
190
216
|
}
|
|
191
217
|
|
|
@@ -221,11 +247,16 @@ async function* bucketDataInBatches(request: BucketDataRequest) {
|
|
|
221
247
|
}
|
|
222
248
|
}
|
|
223
249
|
|
|
250
|
+
interface BucketDataBatchResult {
|
|
251
|
+
done: boolean;
|
|
252
|
+
data: any;
|
|
253
|
+
}
|
|
254
|
+
|
|
224
255
|
/**
|
|
225
256
|
* Extracted as a separate internal function just to avoid memory leaks.
|
|
226
257
|
*/
|
|
227
|
-
async function* bucketDataBatch(request: BucketDataRequest) {
|
|
228
|
-
const { storage, checkpoint, bucketsToFetch, dataBuckets, raw_data, binary_data, signal } = request;
|
|
258
|
+
async function* bucketDataBatch(request: BucketDataRequest): AsyncGenerator<BucketDataBatchResult, void> {
|
|
259
|
+
const { storage, checkpoint, bucketsToFetch, dataBuckets, raw_data, binary_data, tracker, signal } = request;
|
|
229
260
|
|
|
230
261
|
const [_, release] = await syncSemaphore.acquire();
|
|
231
262
|
try {
|
|
@@ -272,7 +303,7 @@ async function* bucketDataBatch(request: BucketDataRequest) {
|
|
|
272
303
|
// iterator memory in case if large data sent.
|
|
273
304
|
yield { data: null, done: false };
|
|
274
305
|
}
|
|
275
|
-
|
|
306
|
+
tracker.addOperationsSynced(r.data.length);
|
|
276
307
|
|
|
277
308
|
dataBuckets.set(r.bucket, r.next_after);
|
|
278
309
|
}
|
package/src/sync/util.ts
CHANGED
|
@@ -2,6 +2,7 @@ import * as timers from 'timers/promises';
|
|
|
2
2
|
|
|
3
3
|
import * as util from '../util/util-index.js';
|
|
4
4
|
import { Metrics } from '../metrics/Metrics.js';
|
|
5
|
+
import { RequestTracker } from './RequestTracker.js';
|
|
5
6
|
|
|
6
7
|
export type TokenStreamOptions = {
|
|
7
8
|
/**
|
|
@@ -89,10 +90,13 @@ export async function* ndjson(iterator: AsyncIterable<string | null | Record<str
|
|
|
89
90
|
}
|
|
90
91
|
}
|
|
91
92
|
|
|
92
|
-
export async function* transformToBytesTracked(
|
|
93
|
+
export async function* transformToBytesTracked(
|
|
94
|
+
iterator: AsyncIterable<string>,
|
|
95
|
+
tracker: RequestTracker
|
|
96
|
+
): AsyncGenerator<Buffer> {
|
|
93
97
|
for await (let data of iterator) {
|
|
94
98
|
const encoded = Buffer.from(data, 'utf8');
|
|
95
|
-
|
|
99
|
+
tracker.addDataSynced(encoded.length);
|
|
96
100
|
yield encoded;
|
|
97
101
|
}
|
|
98
102
|
}
|
package/test/src/sync.test.ts
CHANGED
|
@@ -9,6 +9,7 @@ import { streamResponse } from '../../src/sync/sync.js';
|
|
|
9
9
|
import * as timers from 'timers/promises';
|
|
10
10
|
import { lsnMakeComparable } from '@powersync/service-jpgwire';
|
|
11
11
|
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
12
|
+
import { RequestTracker } from '@/sync/RequestTracker.js';
|
|
12
13
|
|
|
13
14
|
describe('sync - mongodb', function () {
|
|
14
15
|
defineTests(MONGO_STORAGE_FACTORY);
|
|
@@ -38,6 +39,8 @@ bucket_definitions:
|
|
|
38
39
|
`;
|
|
39
40
|
|
|
40
41
|
function defineTests(factory: StorageFactory) {
|
|
42
|
+
const tracker = new RequestTracker();
|
|
43
|
+
|
|
41
44
|
test('sync global data', async () => {
|
|
42
45
|
const f = await factory();
|
|
43
46
|
|
|
@@ -78,6 +81,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
78
81
|
include_checksum: true,
|
|
79
82
|
raw_data: true
|
|
80
83
|
},
|
|
84
|
+
tracker,
|
|
81
85
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
82
86
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
83
87
|
});
|
|
@@ -118,6 +122,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
118
122
|
include_checksum: true,
|
|
119
123
|
raw_data: false
|
|
120
124
|
},
|
|
125
|
+
tracker,
|
|
121
126
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
122
127
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
123
128
|
});
|
|
@@ -146,6 +151,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
146
151
|
include_checksum: true,
|
|
147
152
|
raw_data: true
|
|
148
153
|
},
|
|
154
|
+
tracker,
|
|
149
155
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
150
156
|
token: { exp: 0 } as any
|
|
151
157
|
});
|
|
@@ -172,6 +178,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
172
178
|
include_checksum: true,
|
|
173
179
|
raw_data: true
|
|
174
180
|
},
|
|
181
|
+
tracker,
|
|
175
182
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
176
183
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
177
184
|
});
|
|
@@ -232,6 +239,7 @@ function defineTests(factory: StorageFactory) {
|
|
|
232
239
|
include_checksum: true,
|
|
233
240
|
raw_data: true
|
|
234
241
|
},
|
|
242
|
+
tracker,
|
|
235
243
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
236
244
|
token: { exp: exp } as any
|
|
237
245
|
});
|