@powersync/service-core 0.0.0-dev-20250227082606 → 0.0.0-dev-20250303114151
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1 -10
- package/dist/routes/RouterEngine.js.map +1 -1
- package/dist/routes/endpoints/socket-route.js +1 -2
- package/dist/routes/endpoints/socket-route.js.map +1 -1
- package/dist/routes/endpoints/sync-stream.js +1 -2
- package/dist/routes/endpoints/sync-stream.js.map +1 -1
- package/dist/sync/BucketChecksumState.d.ts +1 -5
- package/dist/sync/BucketChecksumState.js +5 -13
- package/dist/sync/BucketChecksumState.js.map +1 -1
- package/dist/sync/sync-index.d.ts +0 -1
- package/dist/sync/sync-index.js +0 -1
- package/dist/sync/sync-index.js.map +1 -1
- package/dist/sync/sync.d.ts +1 -3
- package/dist/sync/sync.js +18 -8
- package/dist/sync/sync.js.map +1 -1
- package/dist/system/ServiceContext.d.ts +0 -3
- package/dist/system/ServiceContext.js +0 -7
- package/dist/system/ServiceContext.js.map +1 -1
- package/dist/util/config/compound-config-collector.js +1 -13
- package/dist/util/config/compound-config-collector.js.map +1 -1
- package/dist/util/config/types.d.ts +2 -7
- package/dist/util/config/types.js.map +1 -1
- package/package.json +2 -2
- package/src/routes/RouterEngine.ts +0 -1
- package/src/routes/endpoints/socket-route.ts +1 -2
- package/src/routes/endpoints/sync-stream.ts +1 -2
- package/src/sync/BucketChecksumState.ts +5 -31
- package/src/sync/sync-index.ts +0 -1
- package/src/sync/sync.ts +24 -21
- package/src/system/ServiceContext.ts +0 -9
- package/src/util/config/compound-config-collector.ts +1 -24
- package/src/util/config/types.ts +2 -8
- package/test/src/sync/BucketChecksumState.test.ts +0 -15
- package/tsconfig.tsbuildinfo +1 -1
- package/dist/sync/SyncContext.d.ts +0 -17
- package/dist/sync/SyncContext.js +0 -23
- package/dist/sync/SyncContext.js.map +0 -1
- package/dist/util/config/defaults.d.ts +0 -5
- package/dist/util/config/defaults.js +0 -6
- package/dist/util/config/defaults.js.map +0 -1
- package/src/sync/SyncContext.ts +0 -36
- package/src/util/config/defaults.ts +0 -5
|
@@ -20,7 +20,7 @@ export const syncStreamed = routeDefinition({
|
|
|
20
20
|
validator: schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }),
|
|
21
21
|
handler: async (payload) => {
|
|
22
22
|
const { service_context } = payload.context;
|
|
23
|
-
const { routerEngine, storageEngine
|
|
23
|
+
const { routerEngine, storageEngine } = service_context;
|
|
24
24
|
const headers = payload.request.headers;
|
|
25
25
|
const userAgent = headers['x-user-agent'] ?? headers['user-agent'];
|
|
26
26
|
const clientId = payload.params.client_id;
|
|
@@ -56,7 +56,6 @@ export const syncStreamed = routeDefinition({
|
|
|
56
56
|
sync.transformToBytesTracked(
|
|
57
57
|
sync.ndjson(
|
|
58
58
|
sync.streamResponse({
|
|
59
|
-
syncContext: syncContext,
|
|
60
59
|
bucketStorage,
|
|
61
60
|
syncRules: syncRules,
|
|
62
61
|
params,
|
|
@@ -6,10 +6,8 @@ import * as util from '../util/util-index.js';
|
|
|
6
6
|
import { ErrorCode, logger, ServiceAssertionError, ServiceError } from '@powersync/lib-services-framework';
|
|
7
7
|
import { BucketParameterQuerier } from '@powersync/service-sync-rules/src/BucketParameterQuerier.js';
|
|
8
8
|
import { BucketSyncState } from './sync.js';
|
|
9
|
-
import { SyncContext } from './SyncContext.js';
|
|
10
9
|
|
|
11
10
|
export interface BucketChecksumStateOptions {
|
|
12
|
-
syncContext: SyncContext;
|
|
13
11
|
bucketStorage: BucketChecksumStateStorage;
|
|
14
12
|
syncRules: SqlSyncRules;
|
|
15
13
|
syncParams: RequestParameters;
|
|
@@ -22,7 +20,6 @@ export interface BucketChecksumStateOptions {
|
|
|
22
20
|
* Handles incrementally re-computing checkpoints.
|
|
23
21
|
*/
|
|
24
22
|
export class BucketChecksumState {
|
|
25
|
-
private readonly context: SyncContext;
|
|
26
23
|
private readonly bucketStorage: BucketChecksumStateStorage;
|
|
27
24
|
|
|
28
25
|
/**
|
|
@@ -46,14 +43,8 @@ export class BucketChecksumState {
|
|
|
46
43
|
private pendingBucketDownloads = new Set<string>();
|
|
47
44
|
|
|
48
45
|
constructor(options: BucketChecksumStateOptions) {
|
|
49
|
-
this.context = options.syncContext;
|
|
50
46
|
this.bucketStorage = options.bucketStorage;
|
|
51
|
-
this.parameterState = new BucketParameterState(
|
|
52
|
-
options.syncContext,
|
|
53
|
-
options.bucketStorage,
|
|
54
|
-
options.syncRules,
|
|
55
|
-
options.syncParams
|
|
56
|
-
);
|
|
47
|
+
this.parameterState = new BucketParameterState(options.bucketStorage, options.syncRules, options.syncParams);
|
|
57
48
|
this.bucketDataPositions = new Map();
|
|
58
49
|
|
|
59
50
|
for (let { name, after: start } of options.initialBucketPositions ?? []) {
|
|
@@ -82,12 +73,6 @@ export class BucketChecksumState {
|
|
|
82
73
|
});
|
|
83
74
|
}
|
|
84
75
|
this.bucketDataPositions = dataBucketsNew;
|
|
85
|
-
if (dataBucketsNew.size > this.context.maxBuckets) {
|
|
86
|
-
throw new ServiceError(
|
|
87
|
-
ErrorCode.PSYNC_S2305,
|
|
88
|
-
`Too many buckets: ${dataBucketsNew.size} (limit of ${this.context.maxBuckets})`
|
|
89
|
-
);
|
|
90
|
-
}
|
|
91
76
|
|
|
92
77
|
let checksumMap: util.ChecksumMap;
|
|
93
78
|
if (updatedBuckets != null) {
|
|
@@ -262,20 +247,13 @@ export interface CheckpointUpdate {
|
|
|
262
247
|
}
|
|
263
248
|
|
|
264
249
|
export class BucketParameterState {
|
|
265
|
-
private readonly context: SyncContext;
|
|
266
250
|
public readonly bucketStorage: BucketChecksumStateStorage;
|
|
267
251
|
public readonly syncRules: SqlSyncRules;
|
|
268
252
|
public readonly syncParams: RequestParameters;
|
|
269
253
|
private readonly querier: BucketParameterQuerier;
|
|
270
254
|
private readonly staticBuckets: Map<string, BucketDescription>;
|
|
271
255
|
|
|
272
|
-
constructor(
|
|
273
|
-
context: SyncContext,
|
|
274
|
-
bucketStorage: BucketChecksumStateStorage,
|
|
275
|
-
syncRules: SqlSyncRules,
|
|
276
|
-
syncParams: RequestParameters
|
|
277
|
-
) {
|
|
278
|
-
this.context = context;
|
|
256
|
+
constructor(bucketStorage: BucketChecksumStateStorage, syncRules: SqlSyncRules, syncParams: RequestParameters) {
|
|
279
257
|
this.bucketStorage = bucketStorage;
|
|
280
258
|
this.syncRules = syncRules;
|
|
281
259
|
this.syncParams = syncParams;
|
|
@@ -297,13 +275,9 @@ export class BucketParameterState {
|
|
|
297
275
|
return null;
|
|
298
276
|
}
|
|
299
277
|
|
|
300
|
-
if (update.buckets.length >
|
|
301
|
-
// TODO: Limit number of
|
|
302
|
-
|
|
303
|
-
const error = new ServiceError(
|
|
304
|
-
ErrorCode.PSYNC_S2305,
|
|
305
|
-
`Too many parameter query results: ${update.buckets.length} (limit of ${this.context.maxParameterQueryResults})`
|
|
306
|
-
);
|
|
278
|
+
if (update.buckets.length > 1000) {
|
|
279
|
+
// TODO: Limit number of buckets even before we get to this point
|
|
280
|
+
const error = new ServiceError(ErrorCode.PSYNC_S2305, `Too many buckets: ${update.buckets.length}`);
|
|
307
281
|
logger.error(error.message, {
|
|
308
282
|
checkpoint: checkpoint,
|
|
309
283
|
user_id: this.syncParams.user_id,
|
package/src/sync/sync-index.ts
CHANGED
package/src/sync/sync.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { JSONBig, JsonContainer } from '@powersync/service-jsonbig';
|
|
2
2
|
import { BucketDescription, BucketPriority, RequestParameters, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
3
|
+
import { Semaphore, withTimeout } from 'async-mutex';
|
|
3
4
|
|
|
4
5
|
import { AbortError } from 'ix/aborterror.js';
|
|
5
6
|
|
|
@@ -10,12 +11,28 @@ import * as util from '../util/util-index.js';
|
|
|
10
11
|
import { logger } from '@powersync/lib-services-framework';
|
|
11
12
|
import { BucketChecksumState } from './BucketChecksumState.js';
|
|
12
13
|
import { mergeAsyncIterables } from './merge.js';
|
|
13
|
-
import { acquireSemaphoreAbortable, settledPromise, tokenStream, TokenStreamOptions } from './util.js';
|
|
14
|
-
import { SyncContext } from './SyncContext.js';
|
|
15
14
|
import { RequestTracker } from './RequestTracker.js';
|
|
15
|
+
import { acquireSemaphoreAbortable, settledPromise, tokenStream, TokenStreamOptions } from './util.js';
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Maximum number of connections actively fetching data.
|
|
19
|
+
*/
|
|
20
|
+
const MAX_ACTIVE_CONNECTIONS = 10;
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Maximum duration to wait for the mutex to become available.
|
|
24
|
+
*
|
|
25
|
+
* This gives an explicit error if there are mutex issues, rather than just hanging.
|
|
26
|
+
*/
|
|
27
|
+
const MUTEX_ACQUIRE_TIMEOUT = 30_000;
|
|
28
|
+
|
|
29
|
+
const syncSemaphore = withTimeout(
|
|
30
|
+
new Semaphore(MAX_ACTIVE_CONNECTIONS),
|
|
31
|
+
MUTEX_ACQUIRE_TIMEOUT,
|
|
32
|
+
new Error(`Timeout while waiting for data`)
|
|
33
|
+
);
|
|
16
34
|
|
|
17
35
|
export interface SyncStreamParameters {
|
|
18
|
-
syncContext: SyncContext;
|
|
19
36
|
bucketStorage: storage.SyncRulesBucketStorage;
|
|
20
37
|
syncRules: SqlSyncRules;
|
|
21
38
|
params: util.StreamingSyncRequest;
|
|
@@ -33,8 +50,7 @@ export interface SyncStreamParameters {
|
|
|
33
50
|
export async function* streamResponse(
|
|
34
51
|
options: SyncStreamParameters
|
|
35
52
|
): AsyncIterable<util.StreamingSyncLine | string | null> {
|
|
36
|
-
const {
|
|
37
|
-
options;
|
|
53
|
+
const { bucketStorage, syncRules, params, syncParams, token, tokenStreamOptions, tracker, signal } = options;
|
|
38
54
|
// We also need to be able to abort, so we create our own controller.
|
|
39
55
|
const controller = new AbortController();
|
|
40
56
|
if (signal) {
|
|
@@ -50,15 +66,7 @@ export async function* streamResponse(
|
|
|
50
66
|
}
|
|
51
67
|
}
|
|
52
68
|
const ki = tokenStream(token, controller.signal, tokenStreamOptions);
|
|
53
|
-
const stream = streamResponseInner(
|
|
54
|
-
syncContext,
|
|
55
|
-
bucketStorage,
|
|
56
|
-
syncRules,
|
|
57
|
-
params,
|
|
58
|
-
syncParams,
|
|
59
|
-
tracker,
|
|
60
|
-
controller.signal
|
|
61
|
-
);
|
|
69
|
+
const stream = streamResponseInner(bucketStorage, syncRules, params, syncParams, tracker, controller.signal);
|
|
62
70
|
// Merge the two streams, and abort as soon as one of the streams end.
|
|
63
71
|
const merged = mergeAsyncIterables([stream, ki], controller.signal);
|
|
64
72
|
|
|
@@ -83,7 +91,6 @@ export type BucketSyncState = {
|
|
|
83
91
|
};
|
|
84
92
|
|
|
85
93
|
async function* streamResponseInner(
|
|
86
|
-
syncContext: SyncContext,
|
|
87
94
|
bucketStorage: storage.SyncRulesBucketStorage,
|
|
88
95
|
syncRules: SqlSyncRules,
|
|
89
96
|
params: util.StreamingSyncRequest,
|
|
@@ -96,7 +103,6 @@ async function* streamResponseInner(
|
|
|
96
103
|
const checkpointUserId = util.checkpointUserId(syncParams.token_parameters.user_id as string, params.client_id);
|
|
97
104
|
|
|
98
105
|
const checksumState = new BucketChecksumState({
|
|
99
|
-
syncContext,
|
|
100
106
|
bucketStorage,
|
|
101
107
|
syncRules,
|
|
102
108
|
syncParams,
|
|
@@ -189,7 +195,6 @@ async function* streamResponseInner(
|
|
|
189
195
|
}
|
|
190
196
|
|
|
191
197
|
yield* bucketDataInBatches({
|
|
192
|
-
syncContext: syncContext,
|
|
193
198
|
bucketStorage: bucketStorage,
|
|
194
199
|
checkpoint: next.value.value.base.checkpoint,
|
|
195
200
|
bucketsToFetch: buckets,
|
|
@@ -216,7 +221,6 @@ async function* streamResponseInner(
|
|
|
216
221
|
}
|
|
217
222
|
|
|
218
223
|
interface BucketDataRequest {
|
|
219
|
-
syncContext: SyncContext;
|
|
220
224
|
bucketStorage: storage.SyncRulesBucketStorage;
|
|
221
225
|
checkpoint: string;
|
|
222
226
|
bucketsToFetch: BucketDescription[];
|
|
@@ -278,7 +282,6 @@ interface BucketDataBatchResult {
|
|
|
278
282
|
*/
|
|
279
283
|
async function* bucketDataBatch(request: BucketDataRequest): AsyncGenerator<BucketDataBatchResult, void> {
|
|
280
284
|
const {
|
|
281
|
-
syncContext,
|
|
282
285
|
bucketStorage: storage,
|
|
283
286
|
checkpoint,
|
|
284
287
|
bucketsToFetch,
|
|
@@ -293,10 +296,10 @@ async function* bucketDataBatch(request: BucketDataRequest): AsyncGenerator<Buck
|
|
|
293
296
|
const checkpointOp = BigInt(checkpoint);
|
|
294
297
|
let checkpointInvalidated = false;
|
|
295
298
|
|
|
296
|
-
if (
|
|
299
|
+
if (syncSemaphore.isLocked()) {
|
|
297
300
|
logger.info('Sync concurrency limit reached, waiting for lock', { user_id: request.user_id });
|
|
298
301
|
}
|
|
299
|
-
const acquired = await acquireSemaphoreAbortable(
|
|
302
|
+
const acquired = await acquireSemaphoreAbortable(syncSemaphore, AbortSignal.any([abort_batch]));
|
|
300
303
|
if (acquired === 'aborted') {
|
|
301
304
|
return;
|
|
302
305
|
}
|
|
@@ -7,7 +7,6 @@ import * as replication from '../replication/replication-index.js';
|
|
|
7
7
|
import * as routes from '../routes/routes-index.js';
|
|
8
8
|
import * as storage from '../storage/storage-index.js';
|
|
9
9
|
import * as utils from '../util/util-index.js';
|
|
10
|
-
import { SyncContext } from '../sync/SyncContext.js';
|
|
11
10
|
|
|
12
11
|
export interface ServiceContext {
|
|
13
12
|
configuration: utils.ResolvedPowerSyncConfig;
|
|
@@ -17,7 +16,6 @@ export interface ServiceContext {
|
|
|
17
16
|
routerEngine: routes.RouterEngine | null;
|
|
18
17
|
storageEngine: storage.StorageEngine;
|
|
19
18
|
migrations: PowerSyncMigrationManager;
|
|
20
|
-
syncContext: SyncContext;
|
|
21
19
|
}
|
|
22
20
|
|
|
23
21
|
/**
|
|
@@ -28,7 +26,6 @@ export interface ServiceContext {
|
|
|
28
26
|
export class ServiceContextContainer implements ServiceContext {
|
|
29
27
|
lifeCycleEngine: LifeCycledSystem;
|
|
30
28
|
storageEngine: storage.StorageEngine;
|
|
31
|
-
syncContext: SyncContext;
|
|
32
29
|
|
|
33
30
|
constructor(public configuration: utils.ResolvedPowerSyncConfig) {
|
|
34
31
|
this.lifeCycleEngine = new LifeCycledSystem();
|
|
@@ -37,12 +34,6 @@ export class ServiceContextContainer implements ServiceContext {
|
|
|
37
34
|
configuration
|
|
38
35
|
});
|
|
39
36
|
|
|
40
|
-
this.syncContext = new SyncContext({
|
|
41
|
-
maxDataFetchConcurrency: configuration.api_parameters.max_data_fetch_concurrency,
|
|
42
|
-
maxBuckets: configuration.api_parameters.max_buckets_per_connection,
|
|
43
|
-
maxParameterQueryResults: configuration.api_parameters.max_parameter_query_results
|
|
44
|
-
});
|
|
45
|
-
|
|
46
37
|
const migrationManager = new MigrationManager();
|
|
47
38
|
container.register(framework.ContainerImplementation.MIGRATION_MANAGER, migrationManager);
|
|
48
39
|
|
|
@@ -10,13 +10,6 @@ import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-
|
|
|
10
10
|
import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js';
|
|
11
11
|
import { SyncRulesCollector } from './sync-rules/sync-collector.js';
|
|
12
12
|
import { ResolvedPowerSyncConfig, RunnerConfig, SyncRulesConfig } from './types.js';
|
|
13
|
-
import {
|
|
14
|
-
DEFAULT_MAX_BUCKETS_PER_CONNECTION,
|
|
15
|
-
DEFAULT_MAX_CONCURRENT_CONNECTIONS,
|
|
16
|
-
DEFAULT_MAX_DATA_FETCH_CONCURRENCY,
|
|
17
|
-
DEFAULT_MAX_PARAMETER_QUERY_RESULTS,
|
|
18
|
-
DEFAULT_MAX_POOL_SIZE
|
|
19
|
-
} from './defaults.js';
|
|
20
13
|
|
|
21
14
|
export type CompoundConfigCollectorOptions = {
|
|
22
15
|
/**
|
|
@@ -131,12 +124,7 @@ export class CompoundConfigCollector {
|
|
|
131
124
|
let config: ResolvedPowerSyncConfig = {
|
|
132
125
|
base_config: baseConfig,
|
|
133
126
|
connections: baseConfig.replication?.connections || [],
|
|
134
|
-
storage:
|
|
135
|
-
...baseConfig.storage,
|
|
136
|
-
parameters: {
|
|
137
|
-
max_pool_size: baseConfig.storage?.parameters?.max_pool_size ?? DEFAULT_MAX_POOL_SIZE
|
|
138
|
-
}
|
|
139
|
-
},
|
|
127
|
+
storage: baseConfig.storage,
|
|
140
128
|
client_keystore: keyStore,
|
|
141
129
|
// Dev tokens only use the static keys, no external key sources
|
|
142
130
|
// We may restrict this even further to only the powersync-dev key.
|
|
@@ -158,17 +146,6 @@ export class CompoundConfigCollector {
|
|
|
158
146
|
internal_service_endpoint:
|
|
159
147
|
baseConfig.telemetry?.internal_service_endpoint ?? 'https://pulse.journeyapps.com/v1/metrics'
|
|
160
148
|
},
|
|
161
|
-
api_parameters: {
|
|
162
|
-
max_buckets_per_connection:
|
|
163
|
-
baseConfig.api?.parameters?.max_buckets_per_connection ?? DEFAULT_MAX_BUCKETS_PER_CONNECTION,
|
|
164
|
-
|
|
165
|
-
max_parameter_query_results:
|
|
166
|
-
baseConfig.api?.parameters?.max_parameter_query_results ?? DEFAULT_MAX_PARAMETER_QUERY_RESULTS,
|
|
167
|
-
max_concurrent_connections:
|
|
168
|
-
baseConfig.api?.parameters?.max_concurrent_connections ?? DEFAULT_MAX_CONCURRENT_CONNECTIONS,
|
|
169
|
-
max_data_fetch_concurrency:
|
|
170
|
-
baseConfig.api?.parameters?.max_data_fetch_concurrency ?? DEFAULT_MAX_DATA_FETCH_CONCURRENCY
|
|
171
|
-
},
|
|
172
149
|
// TODO maybe move this out of the connection or something
|
|
173
150
|
// slot_name_prefix: connections[0]?.slot_name_prefix ?? 'powersync_'
|
|
174
151
|
slot_name_prefix: 'powersync_',
|
package/src/util/config/types.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { configFile } from '@powersync/service-types';
|
|
2
|
+
import { PowerSyncConfig } from '@powersync/service-types/src/config/PowerSyncConfig.js';
|
|
2
3
|
import { CompoundKeyCollector } from '../../auth/CompoundKeyCollector.js';
|
|
3
4
|
import { KeySpec } from '../../auth/KeySpec.js';
|
|
4
5
|
import { KeyStore } from '../../auth/KeyStore.js';
|
|
@@ -29,7 +30,7 @@ export type SyncRulesConfig = {
|
|
|
29
30
|
};
|
|
30
31
|
|
|
31
32
|
export type ResolvedPowerSyncConfig = {
|
|
32
|
-
base_config:
|
|
33
|
+
base_config: PowerSyncConfig;
|
|
33
34
|
connections?: configFile.GenericDataSourceConfig[];
|
|
34
35
|
storage: configFile.GenericStorageConfig;
|
|
35
36
|
dev: {
|
|
@@ -59,13 +60,6 @@ export type ResolvedPowerSyncConfig = {
|
|
|
59
60
|
internal_service_endpoint: string;
|
|
60
61
|
};
|
|
61
62
|
|
|
62
|
-
api_parameters: {
|
|
63
|
-
max_concurrent_connections: number;
|
|
64
|
-
max_data_fetch_concurrency: number;
|
|
65
|
-
max_buckets_per_connection: number;
|
|
66
|
-
max_parameter_query_results: number;
|
|
67
|
-
};
|
|
68
|
-
|
|
69
63
|
/** Prefix for postgres replication slot names. May eventually be connection-specific. */
|
|
70
64
|
slot_name_prefix: string;
|
|
71
65
|
parameters: Record<string, number | string | boolean | null>;
|
|
@@ -5,7 +5,6 @@ import {
|
|
|
5
5
|
CHECKPOINT_INVALIDATE_ALL,
|
|
6
6
|
ChecksumMap,
|
|
7
7
|
OpId,
|
|
8
|
-
SyncContext,
|
|
9
8
|
WatchFilterEvent
|
|
10
9
|
} from '@/index.js';
|
|
11
10
|
import { RequestParameters, SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
@@ -47,19 +46,12 @@ bucket_definitions:
|
|
|
47
46
|
{ defaultSchema: 'public' }
|
|
48
47
|
);
|
|
49
48
|
|
|
50
|
-
const syncContext = new SyncContext({
|
|
51
|
-
maxBuckets: 100,
|
|
52
|
-
maxParameterQueryResults: 100,
|
|
53
|
-
maxDataFetchConcurrency: 10
|
|
54
|
-
});
|
|
55
|
-
|
|
56
49
|
test('global bucket with update', async () => {
|
|
57
50
|
const storage = new MockBucketChecksumStateStorage();
|
|
58
51
|
// Set intial state
|
|
59
52
|
storage.updateTestChecksum({ bucket: 'global[]', checksum: 1, count: 1 });
|
|
60
53
|
|
|
61
54
|
const state = new BucketChecksumState({
|
|
62
|
-
syncContext,
|
|
63
55
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
64
56
|
syncRules: SYNC_RULES_GLOBAL,
|
|
65
57
|
bucketStorage: storage
|
|
@@ -123,7 +115,6 @@ bucket_definitions:
|
|
|
123
115
|
storage.updateTestChecksum({ bucket: 'global[]', checksum: 1, count: 1 });
|
|
124
116
|
|
|
125
117
|
const state = new BucketChecksumState({
|
|
126
|
-
syncContext,
|
|
127
118
|
// Client sets the initial state here
|
|
128
119
|
initialBucketPositions: [{ name: 'global[]', after: '1' }],
|
|
129
120
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
@@ -160,7 +151,6 @@ bucket_definitions:
|
|
|
160
151
|
storage.updateTestChecksum({ bucket: 'global[2]', checksum: 1, count: 1 });
|
|
161
152
|
|
|
162
153
|
const state = new BucketChecksumState({
|
|
163
|
-
syncContext,
|
|
164
154
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
165
155
|
syncRules: SYNC_RULES_GLOBAL_TWO,
|
|
166
156
|
bucketStorage: storage
|
|
@@ -224,7 +214,6 @@ bucket_definitions:
|
|
|
224
214
|
const storage = new MockBucketChecksumStateStorage();
|
|
225
215
|
|
|
226
216
|
const state = new BucketChecksumState({
|
|
227
|
-
syncContext,
|
|
228
217
|
// Client sets the initial state here
|
|
229
218
|
initialBucketPositions: [{ name: 'something_here[]', after: '1' }],
|
|
230
219
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
@@ -264,7 +253,6 @@ bucket_definitions:
|
|
|
264
253
|
storage.updateTestChecksum({ bucket: 'global[2]', checksum: 1, count: 1 });
|
|
265
254
|
|
|
266
255
|
const state = new BucketChecksumState({
|
|
267
|
-
syncContext,
|
|
268
256
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
269
257
|
syncRules: SYNC_RULES_GLOBAL_TWO,
|
|
270
258
|
bucketStorage: storage
|
|
@@ -316,7 +304,6 @@ bucket_definitions:
|
|
|
316
304
|
const storage = new MockBucketChecksumStateStorage();
|
|
317
305
|
|
|
318
306
|
const state = new BucketChecksumState({
|
|
319
|
-
syncContext,
|
|
320
307
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
321
308
|
syncRules: SYNC_RULES_GLOBAL_TWO,
|
|
322
309
|
bucketStorage: storage
|
|
@@ -368,7 +355,6 @@ bucket_definitions:
|
|
|
368
355
|
storage.updateTestChecksum({ bucket: 'global[2]', checksum: 3, count: 3 });
|
|
369
356
|
|
|
370
357
|
const state = new BucketChecksumState({
|
|
371
|
-
syncContext,
|
|
372
358
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
373
359
|
syncRules: SYNC_RULES_GLOBAL_TWO,
|
|
374
360
|
bucketStorage: storage
|
|
@@ -466,7 +452,6 @@ bucket_definitions:
|
|
|
466
452
|
storage.updateTestChecksum({ bucket: 'by_project[3]', checksum: 1, count: 1 });
|
|
467
453
|
|
|
468
454
|
const state = new BucketChecksumState({
|
|
469
|
-
syncContext,
|
|
470
455
|
syncParams: new RequestParameters({ sub: 'u1' }, {}),
|
|
471
456
|
syncRules: SYNC_RULES_DYNAMIC,
|
|
472
457
|
bucketStorage: storage
|