@powersync/service-core 1.9.0 → 1.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -1
- package/dist/routes/configure-fastify.d.ts +1 -1
- package/dist/routes/endpoints/probes.d.ts +2 -2
- package/dist/routes/endpoints/probes.js +16 -2
- package/dist/routes/endpoints/probes.js.map +1 -1
- package/dist/storage/SyncRulesBucketStorage.d.ts +16 -5
- package/dist/storage/SyncRulesBucketStorage.js +2 -2
- package/dist/storage/SyncRulesBucketStorage.js.map +1 -1
- package/dist/storage/WriteCheckpointAPI.d.ts +24 -2
- package/dist/storage/WriteCheckpointAPI.js.map +1 -1
- package/dist/storage/bson.d.ts +4 -3
- package/dist/storage/bson.js +6 -10
- package/dist/storage/bson.js.map +1 -1
- package/dist/{sync → streams}/BroadcastIterable.js +2 -2
- package/dist/streams/BroadcastIterable.js.map +1 -0
- package/dist/streams/Demultiplexer.d.ts +52 -0
- package/dist/streams/Demultiplexer.js +128 -0
- package/dist/streams/Demultiplexer.js.map +1 -0
- package/dist/{sync → streams}/LastValueSink.d.ts +2 -2
- package/dist/{sync → streams}/LastValueSink.js +2 -2
- package/dist/streams/LastValueSink.js.map +1 -0
- package/dist/{sync → streams}/merge.js +1 -1
- package/dist/streams/merge.js.map +1 -0
- package/dist/streams/streams-index.d.ts +4 -0
- package/dist/streams/streams-index.js +5 -0
- package/dist/streams/streams-index.js.map +1 -0
- package/dist/sync/BucketChecksumState.d.ts +7 -2
- package/dist/sync/BucketChecksumState.js +61 -40
- package/dist/sync/BucketChecksumState.js.map +1 -1
- package/dist/sync/sync-index.d.ts +0 -3
- package/dist/sync/sync-index.js +0 -3
- package/dist/sync/sync-index.js.map +1 -1
- package/dist/sync/sync.js +2 -2
- package/dist/sync/sync.js.map +1 -1
- package/dist/sync/util.d.ts +10 -1
- package/dist/sync/util.js +30 -0
- package/dist/sync/util.js.map +1 -1
- package/package.json +2 -2
- package/src/index.ts +3 -0
- package/src/routes/endpoints/probes.ts +18 -2
- package/src/storage/SyncRulesBucketStorage.ts +18 -7
- package/src/storage/WriteCheckpointAPI.ts +28 -2
- package/src/storage/bson.ts +10 -12
- package/src/{sync → streams}/BroadcastIterable.ts +2 -2
- package/src/streams/Demultiplexer.ts +165 -0
- package/src/{sync → streams}/LastValueSink.ts +2 -2
- package/src/{sync → streams}/merge.ts +1 -1
- package/src/streams/streams-index.ts +4 -0
- package/src/sync/BucketChecksumState.ts +71 -55
- package/src/sync/sync-index.ts +0 -3
- package/src/sync/sync.ts +2 -2
- package/src/sync/util.ts +34 -1
- package/test/src/broadcast_iterable.test.ts +8 -8
- package/test/src/demultiplexer.test.ts +205 -0
- package/test/src/merge_iterable.test.ts +1 -1
- package/test/src/routes/probes.integration.test.ts +5 -5
- package/test/src/routes/probes.test.ts +5 -4
- package/test/src/sync/BucketChecksumState.test.ts +14 -13
- package/test/src/util.test.ts +48 -0
- package/tsconfig.tsbuildinfo +1 -1
- package/dist/sync/BroadcastIterable.js.map +0 -1
- package/dist/sync/LastValueSink.js.map +0 -1
- package/dist/sync/merge.js.map +0 -1
- /package/dist/{sync → streams}/BroadcastIterable.d.ts +0 -0
- /package/dist/{sync → streams}/merge.d.ts +0 -0
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import { AbortError } from 'ix/aborterror.js';
|
|
2
|
+
import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
|
|
3
|
+
import { LastValueSink } from './LastValueSink.js';
|
|
4
|
+
|
|
5
|
+
export interface DemultiplexerValue<T> {
|
|
6
|
+
/**
|
|
7
|
+
* The key used for demultiplexing, for example the user id.
|
|
8
|
+
*/
|
|
9
|
+
key: string;
|
|
10
|
+
/**
|
|
11
|
+
* The stream value.
|
|
12
|
+
*/
|
|
13
|
+
value: T;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export interface DemultiplexerSource<T> {
|
|
17
|
+
/**
|
|
18
|
+
* The async iterator providing a stream of values.
|
|
19
|
+
*/
|
|
20
|
+
iterator: AsyncIterable<DemultiplexerValue<T>>;
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Fetches the first value for a given key.
|
|
24
|
+
*
|
|
25
|
+
* This is used to get an initial value for each subscription.
|
|
26
|
+
*/
|
|
27
|
+
getFirstValue(key: string): Promise<T>;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export type DemultiplexerSourceFactory<T> = (signal: AbortSignal) => DemultiplexerSource<T>;
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Takes a multiplexed stream (e.g. a changestream covering many individual users),
|
|
34
|
+
* and allows subscribing to individual streams.
|
|
35
|
+
*
|
|
36
|
+
* The source subscription is lazy:
|
|
37
|
+
* 1. We only start subscribing when there is a downstream subscriber.
|
|
38
|
+
* 2. When all downstream subscriptions have ended, we end the source subscription.
|
|
39
|
+
*
|
|
40
|
+
* For each subscriber, if backpressure builds up, we only keep the _last_ value.
|
|
41
|
+
*/
|
|
42
|
+
export class Demultiplexer<T> {
|
|
43
|
+
private subscribers: Map<string, Set<LastValueSink<T>>> | undefined = undefined;
|
|
44
|
+
private abortController: AbortController | undefined = undefined;
|
|
45
|
+
private currentSource: DemultiplexerSource<T> | undefined = undefined;
|
|
46
|
+
|
|
47
|
+
constructor(private source: DemultiplexerSourceFactory<T>) {}
|
|
48
|
+
|
|
49
|
+
private start(filter: string, sink: LastValueSink<T>) {
|
|
50
|
+
const abortController = new AbortController();
|
|
51
|
+
const listeners = new Map();
|
|
52
|
+
listeners.set(filter, new Set([sink]));
|
|
53
|
+
|
|
54
|
+
this.abortController = abortController;
|
|
55
|
+
this.subscribers = listeners;
|
|
56
|
+
|
|
57
|
+
const source = this.source(abortController.signal);
|
|
58
|
+
this.currentSource = source;
|
|
59
|
+
this.loop(source, abortController, listeners);
|
|
60
|
+
return source;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
private async loop(
|
|
64
|
+
source: DemultiplexerSource<T>,
|
|
65
|
+
abortController: AbortController,
|
|
66
|
+
sinks: Map<string, Set<LastValueSink<T>>>
|
|
67
|
+
) {
|
|
68
|
+
try {
|
|
69
|
+
for await (let doc of source.iterator) {
|
|
70
|
+
if (abortController.signal.aborted || sinks.size == 0) {
|
|
71
|
+
throw new AbortError();
|
|
72
|
+
}
|
|
73
|
+
const key = doc.key;
|
|
74
|
+
const keySinks = sinks.get(key);
|
|
75
|
+
if (keySinks == null) {
|
|
76
|
+
continue;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
for (let sink of keySinks) {
|
|
80
|
+
sink.write(doc.value);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// End of stream
|
|
85
|
+
for (let keySinks of sinks.values()) {
|
|
86
|
+
for (let sink of keySinks) {
|
|
87
|
+
sink.end();
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
} catch (e) {
|
|
91
|
+
// Just in case the error is not from the source
|
|
92
|
+
abortController.abort();
|
|
93
|
+
|
|
94
|
+
for (let keySinks of sinks.values()) {
|
|
95
|
+
for (let sink of keySinks) {
|
|
96
|
+
sink.error(e);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
} finally {
|
|
100
|
+
// Clear state, so that a new subscription may be started
|
|
101
|
+
if (this.subscribers === sinks) {
|
|
102
|
+
this.subscribers = undefined;
|
|
103
|
+
this.abortController = undefined;
|
|
104
|
+
this.currentSource = undefined;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
private removeSink(key: string, sink: LastValueSink<T>) {
|
|
110
|
+
const existing = this.subscribers?.get(key);
|
|
111
|
+
if (existing == null) {
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
existing.delete(sink);
|
|
115
|
+
if (existing.size == 0) {
|
|
116
|
+
this.subscribers!.delete(key);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (this.subscribers?.size == 0) {
|
|
120
|
+
// This is not immediate - there may be a delay until it is fully stopped,
|
|
121
|
+
// depending on the underlying source.
|
|
122
|
+
this.abortController?.abort();
|
|
123
|
+
this.subscribers = undefined;
|
|
124
|
+
this.abortController = undefined;
|
|
125
|
+
this.currentSource = undefined;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
private addSink(key: string, sink: LastValueSink<T>) {
|
|
130
|
+
if (this.currentSource == null) {
|
|
131
|
+
return this.start(key, sink);
|
|
132
|
+
} else {
|
|
133
|
+
const existing = this.subscribers!.get(key);
|
|
134
|
+
if (existing != null) {
|
|
135
|
+
existing.add(sink);
|
|
136
|
+
} else {
|
|
137
|
+
this.subscribers!.set(key, new Set([sink]));
|
|
138
|
+
}
|
|
139
|
+
return this.currentSource;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Subscribe to a specific stream.
|
|
145
|
+
*
|
|
146
|
+
* @param key The key used for demultiplexing, e.g. user id.
|
|
147
|
+
* @param signal
|
|
148
|
+
*/
|
|
149
|
+
async *subscribe(key: string, signal: AbortSignal): AsyncIterable<T> {
|
|
150
|
+
const sink = new LastValueSink<T>(undefined);
|
|
151
|
+
// Important that we register the sink before calling getFirstValue().
|
|
152
|
+
const source = this.addSink(key, sink);
|
|
153
|
+
try {
|
|
154
|
+
const firstValue = await source.getFirstValue(key);
|
|
155
|
+
yield firstValue;
|
|
156
|
+
yield* sink.withSignal(signal);
|
|
157
|
+
} finally {
|
|
158
|
+
this.removeSink(key, sink);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
get active() {
|
|
163
|
+
return this.subscribers != null;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
@@ -17,7 +17,7 @@ export class LastValueSink<T> implements AsyncIterable<T> {
|
|
|
17
17
|
}
|
|
18
18
|
}
|
|
19
19
|
|
|
20
|
-
|
|
20
|
+
write(value: T) {
|
|
21
21
|
this.push({
|
|
22
22
|
value,
|
|
23
23
|
done: false,
|
|
@@ -25,7 +25,7 @@ export class LastValueSink<T> implements AsyncIterable<T> {
|
|
|
25
25
|
});
|
|
26
26
|
}
|
|
27
27
|
|
|
28
|
-
|
|
28
|
+
end() {
|
|
29
29
|
this.push({
|
|
30
30
|
value: undefined,
|
|
31
31
|
done: true,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { throwIfAborted } from 'ix/aborterror.js';
|
|
2
2
|
import { AsyncIterableX } from 'ix/asynciterable/index.js';
|
|
3
3
|
import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
|
|
4
|
-
import { safeRace } from '
|
|
4
|
+
import { safeRace } from '../sync/safeRace.js';
|
|
5
5
|
|
|
6
6
|
/**
|
|
7
7
|
* Merge multiple source AsyncIterables into one output AsyncIterable.
|
|
@@ -4,9 +4,11 @@ import * as storage from '../storage/storage-index.js';
|
|
|
4
4
|
import * as util from '../util/util-index.js';
|
|
5
5
|
|
|
6
6
|
import { ErrorCode, logger, ServiceAssertionError, ServiceError } from '@powersync/lib-services-framework';
|
|
7
|
+
import { JSONBig } from '@powersync/service-jsonbig';
|
|
7
8
|
import { BucketParameterQuerier } from '@powersync/service-sync-rules/src/BucketParameterQuerier.js';
|
|
8
9
|
import { BucketSyncState } from './sync.js';
|
|
9
10
|
import { SyncContext } from './SyncContext.js';
|
|
11
|
+
import { getIntersection, hasIntersection } from './util.js';
|
|
10
12
|
|
|
11
13
|
export interface BucketChecksumStateOptions {
|
|
12
14
|
syncContext: SyncContext;
|
|
@@ -68,10 +70,9 @@ export class BucketChecksumState {
|
|
|
68
70
|
const storage = this.bucketStorage;
|
|
69
71
|
|
|
70
72
|
const update = await this.parameterState.getCheckpointUpdate(next);
|
|
71
|
-
if (update == null) {
|
|
73
|
+
if (update == null && this.lastWriteCheckpoint == writeCheckpoint) {
|
|
72
74
|
return null;
|
|
73
75
|
}
|
|
74
|
-
|
|
75
76
|
const { buckets: allBuckets, updatedBuckets } = update;
|
|
76
77
|
|
|
77
78
|
let dataBucketsNew = new Map<string, BucketSyncState>();
|
|
@@ -90,7 +91,7 @@ export class BucketChecksumState {
|
|
|
90
91
|
}
|
|
91
92
|
|
|
92
93
|
let checksumMap: util.ChecksumMap;
|
|
93
|
-
if (updatedBuckets !=
|
|
94
|
+
if (updatedBuckets != INVALIDATE_ALL_BUCKETS) {
|
|
94
95
|
if (this.lastChecksums == null) {
|
|
95
96
|
throw new ServiceAssertionError(`Bucket diff received without existing checksums`);
|
|
96
97
|
}
|
|
@@ -113,9 +114,11 @@ export class BucketChecksumState {
|
|
|
113
114
|
}
|
|
114
115
|
}
|
|
115
116
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
117
|
+
if (checksumLookups.length > 0) {
|
|
118
|
+
let updatedChecksums = await storage.getChecksums(base.checkpoint, checksumLookups);
|
|
119
|
+
for (let [bucket, value] of updatedChecksums.entries()) {
|
|
120
|
+
newChecksums.set(bucket, value);
|
|
121
|
+
}
|
|
119
122
|
}
|
|
120
123
|
checksumMap = newChecksums;
|
|
121
124
|
} else {
|
|
@@ -123,6 +126,7 @@ export class BucketChecksumState {
|
|
|
123
126
|
const bucketList = [...dataBucketsNew.keys()];
|
|
124
127
|
checksumMap = await storage.getChecksums(base.checkpoint, bucketList);
|
|
125
128
|
}
|
|
129
|
+
|
|
126
130
|
// Subset of buckets for which there may be new data in this batch.
|
|
127
131
|
let bucketsToFetch: BucketDescription[];
|
|
128
132
|
|
|
@@ -247,6 +251,8 @@ export class BucketChecksumState {
|
|
|
247
251
|
}
|
|
248
252
|
}
|
|
249
253
|
|
|
254
|
+
const INVALIDATE_ALL_BUCKETS = Symbol('INVALIDATE_ALL_BUCKETS');
|
|
255
|
+
|
|
250
256
|
export interface CheckpointUpdate {
|
|
251
257
|
/**
|
|
252
258
|
* All buckets forming part of the checkpoint.
|
|
@@ -258,7 +264,7 @@ export interface CheckpointUpdate {
|
|
|
258
264
|
*
|
|
259
265
|
* If null, assume that any bucket in `buckets` may have been updated.
|
|
260
266
|
*/
|
|
261
|
-
updatedBuckets: Set<string> |
|
|
267
|
+
updatedBuckets: Set<string> | typeof INVALIDATE_ALL_BUCKETS;
|
|
262
268
|
}
|
|
263
269
|
|
|
264
270
|
export class BucketParameterState {
|
|
@@ -268,6 +274,10 @@ export class BucketParameterState {
|
|
|
268
274
|
public readonly syncParams: RequestParameters;
|
|
269
275
|
private readonly querier: BucketParameterQuerier;
|
|
270
276
|
private readonly staticBuckets: Map<string, BucketDescription>;
|
|
277
|
+
private cachedDynamicBuckets: BucketDescription[] | null = null;
|
|
278
|
+
private cachedDynamicBucketSet: Set<string> | null = null;
|
|
279
|
+
|
|
280
|
+
private readonly lookups: Set<string>;
|
|
271
281
|
|
|
272
282
|
constructor(
|
|
273
283
|
context: SyncContext,
|
|
@@ -282,21 +292,18 @@ export class BucketParameterState {
|
|
|
282
292
|
|
|
283
293
|
this.querier = syncRules.getBucketParameterQuerier(this.syncParams);
|
|
284
294
|
this.staticBuckets = new Map<string, BucketDescription>(this.querier.staticBuckets.map((b) => [b.bucket, b]));
|
|
295
|
+
this.lookups = new Set<string>(this.querier.parameterQueryLookups.map((l) => JSONBig.stringify(l.values)));
|
|
285
296
|
}
|
|
286
297
|
|
|
287
|
-
async getCheckpointUpdate(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate
|
|
298
|
+
async getCheckpointUpdate(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate> {
|
|
288
299
|
const querier = this.querier;
|
|
289
|
-
let update: CheckpointUpdate
|
|
300
|
+
let update: CheckpointUpdate;
|
|
290
301
|
if (querier.hasDynamicBuckets) {
|
|
291
302
|
update = await this.getCheckpointUpdateDynamic(checkpoint);
|
|
292
303
|
} else {
|
|
293
304
|
update = await this.getCheckpointUpdateStatic(checkpoint);
|
|
294
305
|
}
|
|
295
306
|
|
|
296
|
-
if (update == null) {
|
|
297
|
-
return null;
|
|
298
|
-
}
|
|
299
|
-
|
|
300
307
|
if (update.buckets.length > this.context.maxParameterQueryResults) {
|
|
301
308
|
// TODO: Limit number of results even before we get to this point
|
|
302
309
|
// This limit applies _before_ we get the unique set
|
|
@@ -318,32 +325,18 @@ export class BucketParameterState {
|
|
|
318
325
|
/**
|
|
319
326
|
* For static buckets, we can keep track of which buckets have been updated.
|
|
320
327
|
*/
|
|
321
|
-
private async getCheckpointUpdateStatic(
|
|
322
|
-
checkpoint: storage.StorageCheckpointUpdate
|
|
323
|
-
): Promise<CheckpointUpdate | null> {
|
|
328
|
+
private async getCheckpointUpdateStatic(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate> {
|
|
324
329
|
const querier = this.querier;
|
|
325
330
|
const update = checkpoint.update;
|
|
326
331
|
|
|
327
332
|
if (update.invalidateDataBuckets) {
|
|
328
333
|
return {
|
|
329
334
|
buckets: querier.staticBuckets,
|
|
330
|
-
updatedBuckets:
|
|
335
|
+
updatedBuckets: INVALIDATE_ALL_BUCKETS
|
|
331
336
|
};
|
|
332
337
|
}
|
|
333
338
|
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
for (let bucket of update.updatedDataBuckets ?? []) {
|
|
337
|
-
if (this.staticBuckets.has(bucket)) {
|
|
338
|
-
updatedBuckets.add(bucket);
|
|
339
|
-
}
|
|
340
|
-
}
|
|
341
|
-
|
|
342
|
-
if (updatedBuckets.size == 0) {
|
|
343
|
-
// No change - skip this checkpoint
|
|
344
|
-
return null;
|
|
345
|
-
}
|
|
346
|
-
|
|
339
|
+
const updatedBuckets = new Set<string>(getIntersection(this.staticBuckets, update.updatedDataBuckets));
|
|
347
340
|
return {
|
|
348
341
|
buckets: querier.staticBuckets,
|
|
349
342
|
updatedBuckets
|
|
@@ -353,44 +346,67 @@ export class BucketParameterState {
|
|
|
353
346
|
/**
|
|
354
347
|
* For dynamic buckets, we need to re-query the list of buckets every time.
|
|
355
348
|
*/
|
|
356
|
-
private async getCheckpointUpdateDynamic(
|
|
357
|
-
checkpoint: storage.StorageCheckpointUpdate
|
|
358
|
-
): Promise<CheckpointUpdate | null> {
|
|
349
|
+
private async getCheckpointUpdateDynamic(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate> {
|
|
359
350
|
const querier = this.querier;
|
|
360
351
|
const storage = this.bucketStorage;
|
|
361
352
|
const staticBuckets = querier.staticBuckets;
|
|
362
353
|
const update = checkpoint.update;
|
|
363
354
|
|
|
364
|
-
let
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
355
|
+
let hasParameterChange = false;
|
|
356
|
+
let invalidateDataBuckets = false;
|
|
357
|
+
// If hasParameterChange == true, then invalidateDataBuckets = true
|
|
358
|
+
// If invalidateDataBuckets == true, we ignore updatedBuckets
|
|
359
|
+
let updatedBuckets = new Set<string>();
|
|
360
|
+
|
|
361
|
+
if (update.invalidateDataBuckets) {
|
|
362
|
+
invalidateDataBuckets = true;
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
if (update.invalidateParameterBuckets) {
|
|
366
|
+
hasParameterChange = true;
|
|
369
367
|
} else {
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
break;
|
|
374
|
-
}
|
|
368
|
+
if (hasIntersection(this.lookups, update.updatedParameterLookups)) {
|
|
369
|
+
// This is a very coarse re-check of all queries
|
|
370
|
+
hasParameterChange = true;
|
|
375
371
|
}
|
|
376
372
|
}
|
|
377
373
|
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
374
|
+
let dynamicBuckets: BucketDescription[];
|
|
375
|
+
if (hasParameterChange || this.cachedDynamicBuckets == null || this.cachedDynamicBucketSet == null) {
|
|
376
|
+
dynamicBuckets = await querier.queryDynamicBucketDescriptions({
|
|
377
|
+
getParameterSets(lookups) {
|
|
378
|
+
return storage.getParameterSets(checkpoint.base.checkpoint, lookups);
|
|
379
|
+
}
|
|
380
|
+
});
|
|
381
|
+
this.cachedDynamicBuckets = dynamicBuckets;
|
|
382
|
+
this.cachedDynamicBucketSet = new Set<string>(dynamicBuckets.map((b) => b.bucket));
|
|
383
|
+
invalidateDataBuckets = true;
|
|
384
|
+
} else {
|
|
385
|
+
dynamicBuckets = this.cachedDynamicBuckets;
|
|
381
386
|
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
387
|
+
if (!invalidateDataBuckets) {
|
|
388
|
+
for (let bucket of getIntersection(this.staticBuckets, update.updatedDataBuckets)) {
|
|
389
|
+
updatedBuckets.add(bucket);
|
|
390
|
+
}
|
|
391
|
+
for (let bucket of getIntersection(this.cachedDynamicBucketSet, update.updatedDataBuckets)) {
|
|
392
|
+
updatedBuckets.add(bucket);
|
|
393
|
+
}
|
|
385
394
|
}
|
|
386
|
-
}
|
|
395
|
+
}
|
|
387
396
|
const allBuckets = [...staticBuckets, ...dynamicBuckets];
|
|
388
397
|
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
398
|
+
if (invalidateDataBuckets) {
|
|
399
|
+
return {
|
|
400
|
+
buckets: allBuckets,
|
|
401
|
+
// We cannot track individual bucket updates for dynamic lookups yet
|
|
402
|
+
updatedBuckets: INVALIDATE_ALL_BUCKETS
|
|
403
|
+
};
|
|
404
|
+
} else {
|
|
405
|
+
return {
|
|
406
|
+
buckets: allBuckets,
|
|
407
|
+
updatedBuckets: updatedBuckets
|
|
408
|
+
};
|
|
409
|
+
}
|
|
394
410
|
}
|
|
395
411
|
}
|
|
396
412
|
|
package/src/sync/sync-index.ts
CHANGED
package/src/sync/sync.ts
CHANGED
|
@@ -9,7 +9,7 @@ import * as util from '../util/util-index.js';
|
|
|
9
9
|
|
|
10
10
|
import { logger } from '@powersync/lib-services-framework';
|
|
11
11
|
import { BucketChecksumState } from './BucketChecksumState.js';
|
|
12
|
-
import { mergeAsyncIterables } from '
|
|
12
|
+
import { mergeAsyncIterables } from '../streams/streams-index.js';
|
|
13
13
|
import { acquireSemaphoreAbortable, settledPromise, tokenStream, TokenStreamOptions } from './util.js';
|
|
14
14
|
import { SyncContext } from './SyncContext.js';
|
|
15
15
|
import { RequestTracker } from './RequestTracker.js';
|
|
@@ -105,7 +105,7 @@ async function* streamResponseInner(
|
|
|
105
105
|
after: BigInt(bucket.after)
|
|
106
106
|
}))
|
|
107
107
|
});
|
|
108
|
-
const stream = bucketStorage.
|
|
108
|
+
const stream = bucketStorage.watchCheckpointChanges({
|
|
109
109
|
user_id: checkpointUserId,
|
|
110
110
|
signal
|
|
111
111
|
});
|
package/src/sync/util.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import * as timers from 'timers/promises';
|
|
2
2
|
|
|
3
|
+
import { SemaphoreInterface } from 'async-mutex';
|
|
3
4
|
import * as util from '../util/util-index.js';
|
|
4
5
|
import { RequestTracker } from './RequestTracker.js';
|
|
5
|
-
import { SemaphoreInterface } from 'async-mutex';
|
|
6
6
|
|
|
7
7
|
export type TokenStreamOptions = {
|
|
8
8
|
/**
|
|
@@ -153,3 +153,36 @@ export function settledPromise<T>(promise: Promise<T>): Promise<PromiseSettledRe
|
|
|
153
153
|
}
|
|
154
154
|
);
|
|
155
155
|
}
|
|
156
|
+
|
|
157
|
+
export type MapOrSet<T> = Map<T, any> | Set<T>;
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Check if two sets have any element(s) in common.
|
|
161
|
+
*/
|
|
162
|
+
export function hasIntersection<T>(a: MapOrSet<T>, b: MapOrSet<T>) {
|
|
163
|
+
for (let _ of getIntersection(a, b)) {
|
|
164
|
+
return true;
|
|
165
|
+
}
|
|
166
|
+
return false;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Return the intersection of two sets or maps.
|
|
171
|
+
*/
|
|
172
|
+
export function* getIntersection<T>(a: MapOrSet<T>, b: MapOrSet<T>): IterableIterator<T> {
|
|
173
|
+
// Iterate over the smaller set to reduce the number of lookups
|
|
174
|
+
if (a.size < b.size) {
|
|
175
|
+
for (let key of a.keys()) {
|
|
176
|
+
if (b.has(key)) {
|
|
177
|
+
yield key;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
return false;
|
|
181
|
+
} else {
|
|
182
|
+
for (let key of b.keys()) {
|
|
183
|
+
if (a.has(key)) {
|
|
184
|
+
yield key;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import { BroadcastIterable, IterableSource } from '@/
|
|
1
|
+
import { BroadcastIterable, IterableSource } from '@/streams/BroadcastIterable.js';
|
|
2
2
|
import { AsyncIterableX, interval } from 'ix/asynciterable/index.js';
|
|
3
3
|
import { delayEach } from 'ix/asynciterable/operators/delayeach.js';
|
|
4
4
|
import { take } from 'ix/asynciterable/operators/take.js';
|
|
5
5
|
import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
|
|
6
6
|
import { toArray } from 'ix/asynciterable/toarray.js';
|
|
7
7
|
import * as timers from 'timers/promises';
|
|
8
|
-
import { describe, expect,
|
|
8
|
+
import { describe, expect, it } from 'vitest';
|
|
9
9
|
|
|
10
10
|
describe('BroadcastIterable', () => {
|
|
11
|
-
|
|
11
|
+
it('should iterate', async () => {
|
|
12
12
|
const range = AsyncIterableX.from([1, 2, 3]);
|
|
13
13
|
const broadcast = new BroadcastIterable(() => range);
|
|
14
14
|
|
|
@@ -17,7 +17,7 @@ describe('BroadcastIterable', () => {
|
|
|
17
17
|
expect(broadcast.active).toBe(false);
|
|
18
18
|
});
|
|
19
19
|
|
|
20
|
-
|
|
20
|
+
it('should skip values if sink is slow', async () => {
|
|
21
21
|
const range = AsyncIterableX.from([1, 2, 3]);
|
|
22
22
|
const broadcast = new BroadcastIterable(() => range);
|
|
23
23
|
|
|
@@ -30,7 +30,7 @@ describe('BroadcastIterable', () => {
|
|
|
30
30
|
expect(broadcast.active).toBe(false);
|
|
31
31
|
});
|
|
32
32
|
|
|
33
|
-
|
|
33
|
+
it('should abort', async () => {
|
|
34
34
|
const range = AsyncIterableX.from([1, 2, 3]);
|
|
35
35
|
let recordedSignal: AbortSignal | undefined;
|
|
36
36
|
const broadcast = new BroadcastIterable((signal) => {
|
|
@@ -46,7 +46,7 @@ describe('BroadcastIterable', () => {
|
|
|
46
46
|
expect(recordedSignal!.aborted).toEqual(true);
|
|
47
47
|
});
|
|
48
48
|
|
|
49
|
-
|
|
49
|
+
it('should handle indefinite sources', async () => {
|
|
50
50
|
const source: IterableSource<number> = (signal) => {
|
|
51
51
|
return wrapWithAbort(interval(1), signal);
|
|
52
52
|
};
|
|
@@ -65,7 +65,7 @@ describe('BroadcastIterable', () => {
|
|
|
65
65
|
expect(broadcast.active).toBe(false);
|
|
66
66
|
});
|
|
67
67
|
|
|
68
|
-
|
|
68
|
+
it('should handle multiple subscribers', async () => {
|
|
69
69
|
let sourceIndex = 0;
|
|
70
70
|
const source = async function* (signal: AbortSignal) {
|
|
71
71
|
// Test value out by 1000 means it may have used the wrong iteration of the source
|
|
@@ -111,7 +111,7 @@ describe('BroadcastIterable', () => {
|
|
|
111
111
|
expect(results3[4]).toBeLessThan(2145);
|
|
112
112
|
});
|
|
113
113
|
|
|
114
|
-
|
|
114
|
+
it('should handle errors on multiple subscribers', async () => {
|
|
115
115
|
let sourceIndex = 0;
|
|
116
116
|
const source = async function* (signal: AbortSignal) {
|
|
117
117
|
// Test value out by 1000 means it may have used the wrong iteration of the source
|