@powersync/service-core 1.20.0 → 1.20.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/CHANGELOG.md +16 -0
  2. package/dist/routes/endpoints/admin.js +1 -0
  3. package/dist/routes/endpoints/admin.js.map +1 -1
  4. package/dist/routes/endpoints/sync-stream.js +6 -1
  5. package/dist/routes/endpoints/sync-stream.js.map +1 -1
  6. package/dist/storage/BucketStorageBatch.d.ts +21 -8
  7. package/dist/storage/BucketStorageBatch.js.map +1 -1
  8. package/dist/storage/BucketStorageFactory.d.ts +5 -0
  9. package/dist/storage/ChecksumCache.d.ts +5 -2
  10. package/dist/storage/ChecksumCache.js +8 -4
  11. package/dist/storage/ChecksumCache.js.map +1 -1
  12. package/dist/storage/PersistedSyncRulesContent.d.ts +6 -2
  13. package/dist/storage/PersistedSyncRulesContent.js +2 -1
  14. package/dist/storage/PersistedSyncRulesContent.js.map +1 -1
  15. package/dist/storage/SourceTable.d.ts +7 -2
  16. package/dist/storage/SourceTable.js.map +1 -1
  17. package/dist/storage/StorageVersionConfig.d.ts +33 -0
  18. package/dist/storage/StorageVersionConfig.js +39 -6
  19. package/dist/storage/StorageVersionConfig.js.map +1 -1
  20. package/dist/storage/SyncRulesBucketStorage.d.ts +12 -3
  21. package/dist/storage/SyncRulesBucketStorage.js.map +1 -1
  22. package/dist/sync/BucketChecksumState.d.ts +3 -3
  23. package/dist/sync/BucketChecksumState.js +12 -42
  24. package/dist/sync/BucketChecksumState.js.map +1 -1
  25. package/dist/sync/sync.js.map +1 -1
  26. package/dist/sync/util.d.ts +1 -0
  27. package/dist/sync/util.js +10 -0
  28. package/dist/sync/util.js.map +1 -1
  29. package/package.json +4 -4
  30. package/src/routes/endpoints/admin.ts +1 -0
  31. package/src/routes/endpoints/sync-stream.ts +6 -1
  32. package/src/storage/BucketStorageBatch.ts +23 -9
  33. package/src/storage/BucketStorageFactory.ts +6 -0
  34. package/src/storage/ChecksumCache.ts +14 -6
  35. package/src/storage/PersistedSyncRulesContent.ts +7 -2
  36. package/src/storage/SourceTable.ts +7 -1
  37. package/src/storage/StorageVersionConfig.ts +54 -6
  38. package/src/storage/SyncRulesBucketStorage.ts +18 -3
  39. package/src/sync/BucketChecksumState.ts +18 -49
  40. package/src/sync/sync.ts +9 -3
  41. package/src/sync/util.ts +10 -0
  42. package/test/src/checksum_cache.test.ts +102 -57
  43. package/test/src/sync/BucketChecksumState.test.ts +53 -21
  44. package/test/src/utils.ts +9 -0
  45. package/tsconfig.tsbuildinfo +1 -1
@@ -1,5 +1,10 @@
1
1
  import { Logger, ObserverClient } from '@powersync/lib-services-framework';
2
- import { HydratedSyncRules, ScopedParameterLookup, SqliteJsonRow } from '@powersync/service-sync-rules';
2
+ import {
3
+ BucketDataSource,
4
+ HydratedSyncRules,
5
+ ScopedParameterLookup,
6
+ SqliteJsonRow
7
+ } from '@powersync/service-sync-rules';
3
8
  import * as util from '../util/util-index.js';
4
9
  import { BucketStorageBatch, FlushedResult, SaveUpdate } from './BucketStorageBatch.js';
5
10
  import { BucketStorageFactory } from './BucketStorageFactory.js';
@@ -103,7 +108,7 @@ export interface SyncRulesBucketStorage
103
108
  */
104
109
  getBucketDataBatch(
105
110
  checkpoint: util.InternalOpId,
106
- dataBuckets: Map<string, util.InternalOpId>,
111
+ dataBuckets: BucketDataRequest[],
107
112
  options?: BucketDataBatchOptions
108
113
  ): AsyncIterable<SyncBucketDataChunk>;
109
114
 
@@ -115,7 +120,7 @@ export interface SyncRulesBucketStorage
115
120
  * This may be slow, depending on the size of the buckets.
116
121
  * The checksums are cached internally to compensate for this, but does not cover all cases.
117
122
  */
118
- getChecksums(checkpoint: util.InternalOpId, buckets: string[]): Promise<util.ChecksumMap>;
123
+ getChecksums(checkpoint: util.InternalOpId, buckets: BucketChecksumRequest[]): Promise<util.ChecksumMap>;
119
124
 
120
125
  /**
121
126
  * Clear checksum cache. Primarily intended for tests.
@@ -127,6 +132,16 @@ export interface SyncRulesBucketStorageListener {
127
132
  batchStarted: (batch: BucketStorageBatch) => void;
128
133
  }
129
134
 
135
+ export interface BucketDataRequest {
136
+ bucket: string;
137
+ start: util.InternalOpId;
138
+ source: BucketDataSource;
139
+ }
140
+ export interface BucketChecksumRequest {
141
+ bucket: string;
142
+ source: BucketDataSource;
143
+ }
144
+
130
145
  export interface SyncRuleStatus {
131
146
  checkpoint_lsn: string | null;
132
147
  active: boolean;
@@ -7,7 +7,8 @@ import {
7
7
  QuerierError,
8
8
  RequestedStream,
9
9
  RequestParameters,
10
- ResolvedBucket
10
+ ResolvedBucket,
11
+ mergeBuckets
11
12
  } from '@powersync/service-sync-rules';
12
13
 
13
14
  import * as storage from '../storage/storage-index.js';
@@ -137,20 +138,20 @@ export class BucketChecksumState {
137
138
  }
138
139
 
139
140
  // Re-check updated buckets only
140
- let checksumLookups: string[] = [];
141
+ let checksumLookups: storage.BucketChecksumRequest[] = [];
141
142
 
142
143
  let newChecksums = new Map<string, util.BucketChecksum>();
143
- for (let bucket of bucketDescriptionMap.keys()) {
144
- if (!updatedBuckets.has(bucket)) {
145
- const existing = this.lastChecksums.get(bucket);
144
+ for (let desc of bucketDescriptionMap.values()) {
145
+ if (!updatedBuckets.has(desc.bucket)) {
146
+ const existing = this.lastChecksums.get(desc.bucket);
146
147
  if (existing == null) {
147
148
  // If this happens, it means updatedBuckets did not correctly include all new buckets
148
- throw new ServiceAssertionError(`Existing checksum not found for bucket ${bucket}`);
149
+ throw new ServiceAssertionError(`Existing checksum not found for bucket ${desc.bucket}`);
149
150
  }
150
151
  // Bucket is not specifically updated, and we have a previous checksum
151
- newChecksums.set(bucket, existing);
152
+ newChecksums.set(desc.bucket, existing);
152
153
  } else {
153
- checksumLookups.push(bucket);
154
+ checksumLookups.push({ bucket: desc.bucket, source: desc.source });
154
155
  }
155
156
  }
156
157
 
@@ -163,12 +164,12 @@ export class BucketChecksumState {
163
164
  checksumMap = newChecksums;
164
165
  } else {
165
166
  // Re-check all buckets
166
- const bucketList = [...bucketDescriptionMap.keys()];
167
+ const bucketList = [...bucketDescriptionMap.values()].map((b) => ({ bucket: b.bucket, source: b.source }));
167
168
  checksumMap = await storage.getChecksums(base.checkpoint, bucketList);
168
169
  }
169
170
 
170
171
  // Subset of buckets for which there may be new data in this batch.
171
- let bucketsToFetch: BucketDescription[];
172
+ let bucketsToFetch: ResolvedBucket[];
172
173
 
173
174
  let checkpointLine: util.StreamingSyncCheckpointDiff | util.StreamingSyncCheckpoint;
174
175
 
@@ -207,10 +208,7 @@ export class BucketChecksumState {
207
208
  ...this.parameterState.translateResolvedBucket(bucketDescriptionMap.get(e.bucket)!, streamNameToIndex)
208
209
  }));
209
210
  bucketsToFetch = [...generateBucketsToFetch].map((b) => {
210
- return {
211
- priority: bucketDescriptionMap.get(b)!.priority,
212
- bucket: b
213
- };
211
+ return bucketDescriptionMap.get(b)!;
214
212
  });
215
213
 
216
214
  deferredLog = () => {
@@ -265,7 +263,7 @@ export class BucketChecksumState {
265
263
  totalParamResults
266
264
  );
267
265
  };
268
- bucketsToFetch = allBuckets.map((b) => ({ bucket: b.bucket, priority: b.priority }));
266
+ bucketsToFetch = allBuckets;
269
267
 
270
268
  const subscriptions: util.StreamDescription[] = [];
271
269
  const streamNameToIndex = new Map<string, number>();
@@ -342,17 +340,17 @@ export class BucketChecksumState {
342
340
  deferredLog();
343
341
  },
344
342
 
345
- getFilteredBucketPositions: (buckets?: BucketDescription[]): Map<string, util.InternalOpId> => {
343
+ getFilteredBucketPositions: (buckets?: ResolvedBucket[]): storage.BucketDataRequest[] => {
346
344
  if (!hasAdvanced) {
347
345
  throw new ServiceAssertionError('Call line.advance() before getFilteredBucketPositions()');
348
346
  }
349
347
  buckets ??= bucketsToFetch;
350
- const filtered = new Map<string, util.InternalOpId>();
348
+ const filtered: storage.BucketDataRequest[] = [];
351
349
 
352
350
  for (let bucket of buckets) {
353
351
  const state = this.bucketDataPositions.get(bucket.bucket);
354
352
  if (state) {
355
- filtered.set(bucket.bucket, state.start_op_id);
353
+ filtered.push({ bucket: bucket.bucket, start: state.start_op_id, source: bucket.source });
356
354
  }
357
355
  }
358
356
  return filtered;
@@ -660,7 +658,7 @@ export class BucketParameterState {
660
658
 
661
659
  export interface CheckpointLine {
662
660
  checkpointLine: util.StreamingSyncCheckpointDiff | util.StreamingSyncCheckpoint;
663
- bucketsToFetch: BucketDescription[];
661
+ bucketsToFetch: ResolvedBucket[];
664
662
 
665
663
  /**
666
664
  * Call when a checkpoint line is being sent to a client, to update the internal state.
@@ -672,7 +670,7 @@ export interface CheckpointLine {
672
670
  *
673
671
  * @param bucketsToFetch List of buckets to fetch - either this.bucketsToFetch, or a subset of it. Defaults to this.bucketsToFetch.
674
672
  */
675
- getFilteredBucketPositions(bucketsToFetch?: BucketDescription[]): Map<string, util.InternalOpId>;
673
+ getFilteredBucketPositions(bucketsToFetch?: ResolvedBucket[]): storage.BucketDataRequest[];
676
674
 
677
675
  /**
678
676
  * Update the position of bucket data the client has, after it was sent to the client.
@@ -762,32 +760,3 @@ function limitedBuckets(buckets: string[] | { bucket: string }[], limit: number)
762
760
  const limited = buckets.slice(0, limit);
763
761
  return `${JSON.stringify(limited)}...`;
764
762
  }
765
-
766
- /**
767
- * Resolves duplicate buckets in the given array, merging the inclusion reasons for duplicate.
768
- *
769
- * It's possible for duplicates to occur when a stream has multiple subscriptions, consider e.g.
770
- *
771
- * ```
772
- * sync_streams:
773
- * assets_by_category:
774
- * query: select * from assets where category in (request.parameters() -> 'categories')
775
- * ```
776
- *
777
- * Here, a client might subscribe once with `{"categories": [1]}` and once with `{"categories": [1, 2]}`. Since each
778
- * subscription is evaluated independently, this would lead to three buckets, with a duplicate `assets_by_category[1]`
779
- * bucket.
780
- */
781
- function mergeBuckets(buckets: ResolvedBucket[]): ResolvedBucket[] {
782
- const byBucketId: Record<string, ResolvedBucket> = {};
783
-
784
- for (const bucket of buckets) {
785
- if (Object.hasOwn(byBucketId, bucket.bucket)) {
786
- byBucketId[bucket.bucket].inclusion_reasons.push(...bucket.inclusion_reasons);
787
- } else {
788
- byBucketId[bucket.bucket] = structuredClone(bucket);
789
- }
790
- }
791
-
792
- return Object.values(byBucketId);
793
- }
package/src/sync/sync.ts CHANGED
@@ -1,5 +1,11 @@
1
1
  import { JSONBig, JsonContainer } from '@powersync/service-jsonbig';
2
- import { BucketDescription, BucketPriority, HydratedSyncRules, SqliteJsonValue } from '@powersync/service-sync-rules';
2
+ import {
3
+ BucketDescription,
4
+ BucketPriority,
5
+ HydratedSyncRules,
6
+ ResolvedBucket,
7
+ SqliteJsonValue
8
+ } from '@powersync/service-sync-rules';
3
9
 
4
10
  import { AbortError } from 'ix/aborterror.js';
5
11
 
@@ -179,7 +185,7 @@ async function* streamResponseInner(
179
185
  // receive a sync complete message after the synchronization is done (which happens in the last
180
186
  // bucketDataInBatches iteration). Without any batch, the line is missing and clients might not complete their
181
187
  // sync properly.
182
- const priorityBatches: [BucketPriority | null, BucketDescription[]][] = bucketsByPriority;
188
+ const priorityBatches: [BucketPriority | null, ResolvedBucket[]][] = bucketsByPriority;
183
189
  if (priorityBatches.length == 0) {
184
190
  priorityBatches.push([null, []]);
185
191
  }
@@ -257,7 +263,7 @@ interface BucketDataRequest {
257
263
  /** Contains current bucket state. Modified by the request as data is sent. */
258
264
  checkpointLine: CheckpointLine;
259
265
  /** Subset of checkpointLine.bucketsToFetch, filtered by priority. */
260
- bucketsToFetch: BucketDescription[];
266
+ bucketsToFetch: ResolvedBucket[];
261
267
  /** Whether data lines should be encoded in a legacy format where {@link util.OplogEntry.data} is a nested object. */
262
268
  legacyDataLines: boolean;
263
269
  /** Signals that the connection was aborted and that streaming should stop ASAP. */
package/src/sync/util.ts CHANGED
@@ -183,6 +183,16 @@ export function settledPromise<T>(promise: Promise<T>): Promise<PromiseSettledRe
183
183
  );
184
184
  }
185
185
 
186
+ export function unsettledPromise<T>(settled: Promise<PromiseSettledResult<T>>): Promise<T> {
187
+ return settled.then((result) => {
188
+ if (result.status === 'fulfilled') {
189
+ return Promise.resolve(result.value);
190
+ } else {
191
+ return Promise.reject(result.reason);
192
+ }
193
+ });
194
+ }
195
+
186
196
  export type MapOrSet<T> = Map<T, any> | Set<T>;
187
197
 
188
198
  /**
@@ -1,7 +1,9 @@
1
1
  import { ChecksumCache, FetchChecksums, FetchPartialBucketChecksum } from '@/storage/ChecksumCache.js';
2
2
  import { addChecksums, BucketChecksum, InternalOpId, PartialChecksum } from '@/util/util-index.js';
3
+ import { BucketDataSource } from '@powersync/service-sync-rules';
3
4
  import * as crypto from 'node:crypto';
4
5
  import { describe, expect, it } from 'vitest';
6
+ import { removeSource } from './utils.js';
5
7
 
6
8
  /**
7
9
  * Create a deterministic BucketChecksum based on the bucket name and checkpoint for testing purposes.
@@ -67,6 +69,12 @@ describe('checksum cache', function () {
67
69
  return new ChecksumCache({ fetchChecksums: fetch });
68
70
  };
69
71
 
72
+ const DUMMY_SOURCE: BucketDataSource = null as any;
73
+
74
+ function removeLookupSources(lookups: FetchPartialBucketChecksum[]) {
75
+ return lookups.map((b) => removeSource(b));
76
+ }
77
+
70
78
  it('should handle a sequential lookups (a)', async function () {
71
79
  let lookups: FetchPartialBucketChecksum[][] = [];
72
80
  const cache = factory(async (batch) => {
@@ -74,13 +82,13 @@ describe('checksum cache', function () {
74
82
  return fetchTestChecksums(batch);
75
83
  });
76
84
 
77
- expect(await cache.getChecksums(123n, ['test'])).toEqual([TEST_123]);
85
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_123]);
78
86
 
79
- expect(await cache.getChecksums(1234n, ['test'])).toEqual([TEST_1234]);
87
+ expect(await cache.getChecksums(1234n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_1234]);
80
88
 
81
- expect(await cache.getChecksums(123n, ['test2'])).toEqual([TEST2_123]);
89
+ expect(await cache.getChecksums(123n, [{ bucket: 'test2', source: DUMMY_SOURCE }])).toEqual([TEST2_123]);
82
90
 
83
- expect(lookups).toEqual([
91
+ expect(lookups.map(removeLookupSources)).toMatchObject([
84
92
  [{ bucket: 'test', end: 123n }],
85
93
  // This should use the previous lookup
86
94
  [{ bucket: 'test', start: 123n, end: 1234n }],
@@ -96,13 +104,13 @@ describe('checksum cache', function () {
96
104
  return fetchTestChecksums(batch);
97
105
  });
98
106
 
99
- expect(await cache.getChecksums(123n, ['test2'])).toEqual([TEST2_123]);
107
+ expect(await cache.getChecksums(123n, [{ bucket: 'test2', source: DUMMY_SOURCE }])).toEqual([TEST2_123]);
100
108
 
101
- expect(await cache.getChecksums(1234n, ['test'])).toEqual([TEST_1234]);
109
+ expect(await cache.getChecksums(1234n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_1234]);
102
110
 
103
- expect(await cache.getChecksums(123n, ['test'])).toEqual([TEST_123]);
111
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_123]);
104
112
 
105
- expect(lookups).toEqual([
113
+ expect(lookups.map(removeLookupSources)).toEqual([
106
114
  // With this order, there is no option for a partial lookup
107
115
  [{ bucket: 'test2', end: 123n }],
108
116
  [{ bucket: 'test', end: 1234n }],
@@ -117,16 +125,16 @@ describe('checksum cache', function () {
117
125
  return fetchTestChecksums(batch);
118
126
  });
119
127
 
120
- const p1 = cache.getChecksums(123n, ['test']);
121
- const p2 = cache.getChecksums(1234n, ['test']);
122
- const p3 = cache.getChecksums(123n, ['test2']);
128
+ const p1 = cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }]);
129
+ const p2 = cache.getChecksums(1234n, [{ bucket: 'test', source: DUMMY_SOURCE }]);
130
+ const p3 = cache.getChecksums(123n, [{ bucket: 'test2', source: DUMMY_SOURCE }]);
123
131
 
124
132
  expect(await p1).toEqual([TEST_123]);
125
133
  expect(await p2).toEqual([TEST_1234]);
126
134
  expect(await p3).toEqual([TEST2_123]);
127
135
 
128
136
  // Concurrent requests, so we can't do a partial lookup for 123 -> 1234
129
- expect(lookups).toEqual([
137
+ expect(lookups.map(removeLookupSources)).toEqual([
130
138
  [{ bucket: 'test', end: 123n }],
131
139
  [{ bucket: 'test', end: 1234n }],
132
140
  [{ bucket: 'test2', end: 123n }]
@@ -140,15 +148,15 @@ describe('checksum cache', function () {
140
148
  return fetchTestChecksums(batch);
141
149
  });
142
150
 
143
- const p1 = cache.getChecksums(123n, ['test']);
144
- const p2 = cache.getChecksums(123n, ['test']);
151
+ const p1 = cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }]);
152
+ const p2 = cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }]);
145
153
 
146
154
  expect(await p1).toEqual([TEST_123]);
147
155
 
148
156
  expect(await p2).toEqual([TEST_123]);
149
157
 
150
158
  // The lookup should be deduplicated, even though it's in progress
151
- expect(lookups).toEqual([[{ bucket: 'test', end: 123n }]]);
159
+ expect(lookups.map(removeLookupSources)).toEqual([[{ bucket: 'test', end: 123n }]]);
152
160
  });
153
161
 
154
162
  it('should handle serial + concurrent lookups', async function () {
@@ -158,15 +166,15 @@ describe('checksum cache', function () {
158
166
  return fetchTestChecksums(batch);
159
167
  });
160
168
 
161
- expect(await cache.getChecksums(123n, ['test'])).toEqual([TEST_123]);
169
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_123]);
162
170
 
163
- const p2 = cache.getChecksums(1234n, ['test']);
164
- const p3 = cache.getChecksums(1234n, ['test']);
171
+ const p2 = cache.getChecksums(1234n, [{ bucket: 'test', source: DUMMY_SOURCE }]);
172
+ const p3 = cache.getChecksums(1234n, [{ bucket: 'test', source: DUMMY_SOURCE }]);
165
173
 
166
174
  expect(await p2).toEqual([TEST_1234]);
167
175
  expect(await p3).toEqual([TEST_1234]);
168
176
 
169
- expect(lookups).toEqual([
177
+ expect(lookups.map(removeLookupSources)).toEqual([
170
178
  [{ bucket: 'test', end: 123n }],
171
179
  // This lookup is deduplicated
172
180
  [{ bucket: 'test', start: 123n, end: 1234n }]
@@ -180,9 +188,14 @@ describe('checksum cache', function () {
180
188
  return fetchTestChecksums(batch);
181
189
  });
182
190
 
183
- expect(await cache.getChecksums(123n, ['test', 'test2'])).toEqual([TEST_123, TEST2_123]);
191
+ expect(
192
+ await cache.getChecksums(123n, [
193
+ { bucket: 'test', source: DUMMY_SOURCE },
194
+ { bucket: 'test2', source: DUMMY_SOURCE }
195
+ ])
196
+ ).toEqual([TEST_123, TEST2_123]);
184
197
 
185
- expect(lookups).toEqual([
198
+ expect(lookups.map(removeLookupSources)).toEqual([
186
199
  [
187
200
  // Both lookups in the same request
188
201
  { bucket: 'test', end: 123n },
@@ -198,10 +211,15 @@ describe('checksum cache', function () {
198
211
  return fetchTestChecksums(batch);
199
212
  });
200
213
 
201
- expect(await cache.getChecksums(123n, ['test'])).toEqual([TEST_123]);
202
- expect(await cache.getChecksums(123n, ['test', 'test2'])).toEqual([TEST_123, TEST2_123]);
214
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_123]);
215
+ expect(
216
+ await cache.getChecksums(123n, [
217
+ { bucket: 'test', source: DUMMY_SOURCE },
218
+ { bucket: 'test2', source: DUMMY_SOURCE }
219
+ ])
220
+ ).toEqual([TEST_123, TEST2_123]);
203
221
 
204
- expect(lookups).toEqual([
222
+ expect(lookups.map(removeLookupSources)).toEqual([
205
223
  // Request 1
206
224
  [{ bucket: 'test', end: 123n }],
207
225
  // Request 2
@@ -216,13 +234,19 @@ describe('checksum cache', function () {
216
234
  return fetchTestChecksums(batch);
217
235
  });
218
236
 
219
- const a = cache.getChecksums(123n, ['test', 'test2']);
220
- const b = cache.getChecksums(123n, ['test2', 'test3']);
237
+ const a = cache.getChecksums(123n, [
238
+ { bucket: 'test', source: DUMMY_SOURCE },
239
+ { bucket: 'test2', source: DUMMY_SOURCE }
240
+ ]);
241
+ const b = cache.getChecksums(123n, [
242
+ { bucket: 'test2', source: DUMMY_SOURCE },
243
+ { bucket: 'test3', source: DUMMY_SOURCE }
244
+ ]);
221
245
 
222
246
  expect(await a).toEqual([TEST_123, TEST2_123]);
223
247
  expect(await b).toEqual([TEST2_123, TEST3_123]);
224
248
 
225
- expect(lookups).toEqual([
249
+ expect(lookups.map(removeLookupSources)).toEqual([
226
250
  // Request A
227
251
  [
228
252
  { bucket: 'test', end: 123n },
@@ -240,9 +264,9 @@ describe('checksum cache', function () {
240
264
  return fetchTestChecksums(batch);
241
265
  });
242
266
 
243
- expect(await cache.getChecksums(123n, ['test'])).toEqual([TEST_123]);
267
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_123]);
244
268
 
245
- expect(await cache.getChecksums(125n, ['test'])).toEqual([
269
+ expect(await cache.getChecksums(125n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
246
270
  {
247
271
  bucket: 'test',
248
272
  checksum: -1865121912,
@@ -250,14 +274,14 @@ describe('checksum cache', function () {
250
274
  }
251
275
  ]);
252
276
 
253
- expect(await cache.getChecksums(124n, ['test'])).toEqual([
277
+ expect(await cache.getChecksums(124n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
254
278
  {
255
279
  bucket: 'test',
256
280
  checksum: 1887460431,
257
281
  count: 124
258
282
  }
259
283
  ]);
260
- expect(lookups).toEqual([
284
+ expect(lookups.map(removeLookupSources)).toEqual([
261
285
  [{ bucket: 'test', end: 123n }],
262
286
  [{ bucket: 'test', start: 123n, end: 125n }],
263
287
  [{ bucket: 'test', start: 123n, end: 124n }]
@@ -275,19 +299,31 @@ describe('checksum cache', function () {
275
299
  return fetchTestChecksums(batch);
276
300
  });
277
301
 
278
- const a = cache.getChecksums(123n, ['test', 'test2']);
279
- const b = cache.getChecksums(123n, ['test2', 'test3']);
302
+ const a = cache.getChecksums(123n, [
303
+ { bucket: 'test', source: DUMMY_SOURCE },
304
+ { bucket: 'test2', source: DUMMY_SOURCE }
305
+ ]);
306
+ const b = cache.getChecksums(123n, [
307
+ { bucket: 'test2', source: DUMMY_SOURCE },
308
+ { bucket: 'test3', source: DUMMY_SOURCE }
309
+ ]);
280
310
 
281
311
  await expect(a).rejects.toEqual(TEST_ERROR);
282
312
  await expect(b).rejects.toEqual(TEST_ERROR);
283
313
 
284
- const a2 = cache.getChecksums(123n, ['test', 'test2']);
285
- const b2 = cache.getChecksums(123n, ['test2', 'test3']);
314
+ const a2 = cache.getChecksums(123n, [
315
+ { bucket: 'test', source: DUMMY_SOURCE },
316
+ { bucket: 'test2', source: DUMMY_SOURCE }
317
+ ]);
318
+ const b2 = cache.getChecksums(123n, [
319
+ { bucket: 'test2', source: DUMMY_SOURCE },
320
+ { bucket: 'test3', source: DUMMY_SOURCE }
321
+ ]);
286
322
 
287
323
  expect(await a2).toEqual([TEST_123, TEST2_123]);
288
324
  expect(await b2).toEqual([TEST2_123, TEST3_123]);
289
325
 
290
- expect(lookups).toEqual([
326
+ expect(lookups.map(removeLookupSources)).toEqual([
291
327
  // Request A (fails)
292
328
  [
293
329
  { bucket: 'test', end: 123n },
@@ -311,11 +347,15 @@ describe('checksum cache', function () {
311
347
  return fetchTestChecksums(batch.filter((b) => b.bucket != 'test'));
312
348
  });
313
349
 
314
- expect(await cache.getChecksums(123n, ['test'])).toEqual([{ bucket: 'test', checksum: 0, count: 0 }]);
315
- expect(await cache.getChecksums(123n, ['test', 'test2'])).toEqual([
316
- { bucket: 'test', checksum: 0, count: 0 },
317
- TEST2_123
350
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
351
+ { bucket: 'test', checksum: 0, count: 0 }
318
352
  ]);
353
+ expect(
354
+ await cache.getChecksums(123n, [
355
+ { bucket: 'test', source: DUMMY_SOURCE },
356
+ { bucket: 'test2', source: DUMMY_SOURCE }
357
+ ])
358
+ ).toEqual([{ bucket: 'test', checksum: 0, count: 0 }, TEST2_123]);
319
359
  });
320
360
 
321
361
  it('should handle missing checksums (b)', async function () {
@@ -325,8 +365,10 @@ describe('checksum cache', function () {
325
365
  return fetchTestChecksums(batch.filter((b) => b.bucket != 'test' || b.end != 123n));
326
366
  });
327
367
 
328
- expect(await cache.getChecksums(123n, ['test'])).toEqual([{ bucket: 'test', checksum: 0, count: 0 }]);
329
- expect(await cache.getChecksums(1234n, ['test'])).toEqual([
368
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
369
+ { bucket: 'test', checksum: 0, count: 0 }
370
+ ]);
371
+ expect(await cache.getChecksums(1234n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
330
372
  {
331
373
  bucket: 'test',
332
374
  checksum: 1597020602,
@@ -334,7 +376,10 @@ describe('checksum cache', function () {
334
376
  }
335
377
  ]);
336
378
 
337
- expect(lookups).toEqual([[{ bucket: 'test', end: 123n }], [{ bucket: 'test', start: 123n, end: 1234n }]]);
379
+ expect(lookups.map(removeLookupSources)).toEqual([
380
+ [{ bucket: 'test', end: 123n }],
381
+ [{ bucket: 'test', start: 123n, end: 1234n }]
382
+ ]);
338
383
  });
339
384
 
340
385
  it('should use maxSize', async function () {
@@ -347,8 +392,8 @@ describe('checksum cache', function () {
347
392
  maxSize: 2
348
393
  });
349
394
 
350
- expect(await cache.getChecksums(123n, ['test'])).toEqual([TEST_123]);
351
- expect(await cache.getChecksums(124n, ['test'])).toEqual([
395
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_123]);
396
+ expect(await cache.getChecksums(124n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
352
397
  {
353
398
  bucket: 'test',
354
399
  checksum: 1887460431,
@@ -356,30 +401,30 @@ describe('checksum cache', function () {
356
401
  }
357
402
  ]);
358
403
 
359
- expect(await cache.getChecksums(125n, ['test'])).toEqual([
404
+ expect(await cache.getChecksums(125n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
360
405
  {
361
406
  bucket: 'test',
362
407
  checksum: -1865121912,
363
408
  count: 125
364
409
  }
365
410
  ]);
366
- expect(await cache.getChecksums(126n, ['test'])).toEqual([
411
+ expect(await cache.getChecksums(126n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
367
412
  {
368
413
  bucket: 'test',
369
414
  checksum: -1720007310,
370
415
  count: 126
371
416
  }
372
417
  ]);
373
- expect(await cache.getChecksums(124n, ['test'])).toEqual([
418
+ expect(await cache.getChecksums(124n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([
374
419
  {
375
420
  bucket: 'test',
376
421
  checksum: 1887460431,
377
422
  count: 124
378
423
  }
379
424
  ]);
380
- expect(await cache.getChecksums(123n, ['test'])).toEqual([TEST_123]);
425
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_123]);
381
426
 
382
- expect(lookups).toEqual([
427
+ expect(lookups.map(removeLookupSources)).toEqual([
383
428
  [{ bucket: 'test', end: 123n }],
384
429
  [{ bucket: 'test', start: 123n, end: 124n }],
385
430
  [{ bucket: 'test', start: 124n, end: 125n }],
@@ -400,10 +445,10 @@ describe('checksum cache', function () {
400
445
  maxSize: 2
401
446
  });
402
447
 
403
- const p3 = cache.getChecksums(123n, ['test3']);
404
- const p4 = cache.getChecksums(123n, ['test4']);
405
- const p1 = cache.getChecksums(123n, ['test']);
406
- const p2 = cache.getChecksums(123n, ['test2']);
448
+ const p3 = cache.getChecksums(123n, [{ bucket: 'test3', source: DUMMY_SOURCE }]);
449
+ const p4 = cache.getChecksums(123n, [{ bucket: 'test4', source: DUMMY_SOURCE }]);
450
+ const p1 = cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }]);
451
+ const p2 = cache.getChecksums(123n, [{ bucket: 'test2', source: DUMMY_SOURCE }]);
407
452
 
408
453
  expect(await p1).toEqual([TEST_123]);
409
454
  expect(await p2).toEqual([TEST2_123]);
@@ -417,7 +462,7 @@ describe('checksum cache', function () {
417
462
  ]);
418
463
 
419
464
  // The lookup should be deduplicated, even though it's in progress
420
- expect(lookups).toEqual([
465
+ expect(lookups.map(removeLookupSources)).toEqual([
421
466
  [{ bucket: 'test3', end: 123n }],
422
467
  [{ bucket: 'test4', end: 123n }],
423
468
  [{ bucket: 'test', end: 123n }],
@@ -434,7 +479,7 @@ describe('checksum cache', function () {
434
479
  return fetchTestChecksums(batch);
435
480
  });
436
481
 
437
- expect(await cache.getChecksums(123n, ['test'])).toEqual([TEST_123]);
438
- expect(await cache.getChecksums(1234n, ['test'])).toEqual([TEST_1234]);
482
+ expect(await cache.getChecksums(123n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_123]);
483
+ expect(await cache.getChecksums(1234n, [{ bucket: 'test', source: DUMMY_SOURCE }])).toEqual([TEST_1234]);
439
484
  });
440
485
  });