@powersync/service-core 0.0.0-dev-20250827091123 → 0.0.0-dev-20250828090417

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/CHANGELOG.md +18 -12
  2. package/dist/api/api-metrics.js +5 -0
  3. package/dist/api/api-metrics.js.map +1 -1
  4. package/dist/metrics/open-telemetry/util.d.ts +0 -3
  5. package/dist/metrics/open-telemetry/util.js +18 -13
  6. package/dist/metrics/open-telemetry/util.js.map +1 -1
  7. package/dist/routes/compression.d.ts +19 -0
  8. package/dist/routes/compression.js +70 -0
  9. package/dist/routes/compression.js.map +1 -0
  10. package/dist/routes/configure-fastify.js.map +1 -1
  11. package/dist/routes/endpoints/socket-route.js +23 -18
  12. package/dist/routes/endpoints/socket-route.js.map +1 -1
  13. package/dist/routes/endpoints/sync-stream.js +14 -24
  14. package/dist/routes/endpoints/sync-stream.js.map +1 -1
  15. package/dist/routes/router.d.ts +3 -3
  16. package/dist/storage/BucketStorage.d.ts +1 -1
  17. package/dist/storage/BucketStorage.js.map +1 -1
  18. package/dist/storage/BucketStorageFactory.d.ts +0 -2
  19. package/dist/storage/ChecksumCache.d.ts +4 -19
  20. package/dist/storage/ChecksumCache.js +4 -0
  21. package/dist/storage/ChecksumCache.js.map +1 -1
  22. package/dist/storage/StorageEngine.d.ts +2 -2
  23. package/dist/storage/StorageEngine.js.map +1 -1
  24. package/dist/storage/StorageProvider.d.ts +1 -3
  25. package/dist/storage/SyncRulesBucketStorage.d.ts +9 -0
  26. package/dist/storage/SyncRulesBucketStorage.js.map +1 -1
  27. package/dist/storage/storage-index.d.ts +0 -1
  28. package/dist/storage/storage-index.js +0 -1
  29. package/dist/storage/storage-index.js.map +1 -1
  30. package/dist/sync/BucketChecksumState.d.ts +7 -3
  31. package/dist/sync/BucketChecksumState.js +5 -4
  32. package/dist/sync/BucketChecksumState.js.map +1 -1
  33. package/dist/sync/RequestTracker.d.ts +7 -1
  34. package/dist/sync/RequestTracker.js +22 -2
  35. package/dist/sync/RequestTracker.js.map +1 -1
  36. package/dist/sync/sync.d.ts +2 -2
  37. package/dist/sync/sync.js.map +1 -1
  38. package/dist/sync/util.js +1 -1
  39. package/dist/sync/util.js.map +1 -1
  40. package/dist/system/ServiceContext.d.ts +0 -3
  41. package/dist/system/ServiceContext.js +1 -10
  42. package/dist/system/ServiceContext.js.map +1 -1
  43. package/dist/util/utils.d.ts +17 -2
  44. package/dist/util/utils.js +33 -9
  45. package/dist/util/utils.js.map +1 -1
  46. package/package.json +13 -13
  47. package/src/api/api-metrics.ts +6 -0
  48. package/src/metrics/open-telemetry/util.ts +22 -21
  49. package/src/routes/compression.ts +75 -0
  50. package/src/routes/configure-fastify.ts +1 -0
  51. package/src/routes/endpoints/socket-route.ts +24 -19
  52. package/src/routes/endpoints/sync-stream.ts +15 -24
  53. package/src/routes/router.ts +3 -3
  54. package/src/storage/BucketStorage.ts +2 -2
  55. package/src/storage/BucketStorageFactory.ts +0 -2
  56. package/src/storage/ChecksumCache.ts +8 -22
  57. package/src/storage/StorageEngine.ts +3 -3
  58. package/src/storage/StorageProvider.ts +1 -3
  59. package/src/storage/SyncRulesBucketStorage.ts +12 -0
  60. package/src/storage/storage-index.ts +0 -1
  61. package/src/sync/BucketChecksumState.ts +12 -6
  62. package/src/sync/RequestTracker.ts +27 -2
  63. package/src/sync/sync.ts +3 -3
  64. package/src/sync/util.ts +1 -1
  65. package/src/system/ServiceContext.ts +1 -13
  66. package/src/util/utils.ts +55 -11
  67. package/test/src/checksum_cache.test.ts +6 -8
  68. package/test/src/routes/mocks.ts +59 -0
  69. package/test/src/routes/stream.test.ts +84 -0
  70. package/test/src/sync/BucketChecksumState.test.ts +48 -26
  71. package/tsconfig.tsbuildinfo +1 -1
  72. package/dist/events/EventsEngine.d.ts +0 -14
  73. package/dist/events/EventsEngine.js +0 -33
  74. package/dist/events/EventsEngine.js.map +0 -1
  75. package/dist/storage/ReportStorage.d.ts +0 -36
  76. package/dist/storage/ReportStorage.js +0 -2
  77. package/dist/storage/ReportStorage.js.map +0 -1
  78. package/src/events/EventsEngine.ts +0 -38
  79. package/src/storage/ReportStorage.ts +0 -39
@@ -1,11 +1,9 @@
1
1
  import { ServiceError } from '@powersync/lib-services-framework';
2
2
  import * as util from '../util/util-index.js';
3
3
  import { BucketStorageFactory } from './BucketStorageFactory.js';
4
- import { ReportStorage } from './ReportStorage.js';
5
4
 
6
5
  export interface ActiveStorage {
7
6
  storage: BucketStorageFactory;
8
- reportStorage: ReportStorage;
9
7
  shutDown(): Promise<void>;
10
8
 
11
9
  /**
@@ -24,7 +22,7 @@ export interface GetStorageOptions {
24
22
  /**
25
23
  * Represents a provider that can create a storage instance for a specific storage type from configuration.
26
24
  */
27
- export interface StorageProvider {
25
+ export interface BucketStorageProvider {
28
26
  /**
29
27
  * The storage type that this provider provides.
30
28
  * The type should match the `type` field in the config.
@@ -62,6 +62,11 @@ export interface SyncRulesBucketStorage
62
62
 
63
63
  compact(options?: CompactOptions): Promise<void>;
64
64
 
65
+ /**
66
+ * Lightweight "compact" process to populate the checksum cache, if any.
67
+ */
68
+ populatePersistentChecksumCache(options?: Pick<CompactOptions, 'signal' | 'maxOpId'>): Promise<void>;
69
+
65
70
  // ## Read operations
66
71
 
67
72
  getCheckpoint(): Promise<ReplicationCheckpoint>;
@@ -108,6 +113,11 @@ export interface SyncRulesBucketStorage
108
113
  * Returns zero checksums for any buckets not found.
109
114
  */
110
115
  getChecksums(checkpoint: util.InternalOpId, buckets: string[]): Promise<util.ChecksumMap>;
116
+
117
+ /**
118
+ * Clear checksum cache. Primarily intended for tests.
119
+ */
120
+ clearChecksumCache(): void;
111
121
  }
112
122
 
113
123
  export interface SyncRulesBucketStorageListener {
@@ -208,6 +218,8 @@ export interface CompactOptions {
208
218
  * Internal/testing use: Cache size for compacting parameters.
209
219
  */
210
220
  compactParameterCacheLimit?: number;
221
+
222
+ signal?: AbortSignal;
211
223
  }
212
224
 
213
225
  export interface ClearStorageOptions {
@@ -13,4 +13,3 @@ export * from './BucketStorageBatch.js';
13
13
  export * from './SyncRulesBucketStorage.js';
14
14
  export * from './PersistedSyncRulesContent.js';
15
15
  export * from './ReplicationLock.js';
16
- export * from './ReportStorage.js';
@@ -24,10 +24,15 @@ import { BucketParameterQuerier, QuerierError } from '@powersync/service-sync-ru
24
24
  import { SyncContext } from './SyncContext.js';
25
25
  import { getIntersection, hasIntersection } from './util.js';
26
26
 
27
+ export interface VersionedSyncRules {
28
+ syncRules: SqlSyncRules;
29
+ version: number;
30
+ }
31
+
27
32
  export interface BucketChecksumStateOptions {
28
33
  syncContext: SyncContext;
29
34
  bucketStorage: BucketChecksumStateStorage;
30
- syncRules: SqlSyncRules;
35
+ syncRules: VersionedSyncRules;
31
36
  tokenPayload: RequestJwtPayload;
32
37
  syncRequest: util.StreamingSyncRequest;
33
38
  logger?: Logger;
@@ -248,7 +253,7 @@ export class BucketChecksumState {
248
253
  const streamNameToIndex = new Map<string, number>();
249
254
  this.streamNameToIndex = streamNameToIndex;
250
255
 
251
- for (const source of this.parameterState.syncRules.bucketSources) {
256
+ for (const source of this.parameterState.syncRules.syncRules.bucketSources) {
252
257
  if (this.parameterState.isSubscribedToStream(source)) {
253
258
  streamNameToIndex.set(source.name, subscriptions.length);
254
259
 
@@ -376,7 +381,7 @@ export interface CheckpointUpdate {
376
381
  export class BucketParameterState {
377
382
  private readonly context: SyncContext;
378
383
  public readonly bucketStorage: BucketChecksumStateStorage;
379
- public readonly syncRules: SqlSyncRules;
384
+ public readonly syncRules: VersionedSyncRules;
380
385
  public readonly syncParams: RequestParameters;
381
386
  private readonly querier: BucketParameterQuerier;
382
387
  /**
@@ -399,7 +404,7 @@ export class BucketParameterState {
399
404
  constructor(
400
405
  context: SyncContext,
401
406
  bucketStorage: BucketChecksumStateStorage,
402
- syncRules: SqlSyncRules,
407
+ syncRules: VersionedSyncRules,
403
408
  tokenPayload: RequestJwtPayload,
404
409
  request: util.StreamingSyncRequest,
405
410
  logger: Logger
@@ -431,10 +436,11 @@ export class BucketParameterState {
431
436
  this.includeDefaultStreams = subscriptions?.include_defaults ?? true;
432
437
  this.explicitStreamSubscriptions = explicitStreamSubscriptions;
433
438
 
434
- const { querier, errors } = syncRules.getBucketParameterQuerier({
439
+ const { querier, errors } = syncRules.syncRules.getBucketParameterQuerier({
435
440
  globalParameters: this.syncParams,
436
441
  hasDefaultStreams: this.includeDefaultStreams,
437
- streams: streamsByName
442
+ streams: streamsByName,
443
+ bucketIdTransformer: SqlSyncRules.versionedBucketIdTransformer(`${syncRules.version}`)
438
444
  });
439
445
  this.querier = querier;
440
446
  this.streamErrors = Object.groupBy(errors, (e) => e.descriptor) as Record<string, QuerierError[]>;
@@ -2,6 +2,7 @@ import { MetricsEngine } from '../metrics/MetricsEngine.js';
2
2
 
3
3
  import { APIMetric } from '@powersync/service-types';
4
4
  import { SyncBucketData } from '../util/protocol-types.js';
5
+ import { ServiceAssertionError } from '@powersync/lib-services-framework';
5
6
 
6
7
  /**
7
8
  * Record sync stats per request stream.
@@ -9,9 +10,12 @@ import { SyncBucketData } from '../util/protocol-types.js';
9
10
  export class RequestTracker {
10
11
  operationsSynced = 0;
11
12
  dataSyncedBytes = 0;
13
+ dataSentBytes = 0;
12
14
  operationCounts: OperationCounts = { put: 0, remove: 0, move: 0, clear: 0 };
13
15
  largeBuckets: Record<string, number> = {};
14
16
 
17
+ private encoding: string | undefined = undefined;
18
+
15
19
  constructor(private metrics: MetricsEngine) {
16
20
  this.metrics = metrics;
17
21
  }
@@ -29,18 +33,39 @@ export class RequestTracker {
29
33
  this.metrics.getCounter(APIMetric.OPERATIONS_SYNCED).add(operations.total);
30
34
  }
31
35
 
32
- addDataSynced(bytes: number) {
36
+ setCompressed(encoding: string) {
37
+ this.encoding = encoding;
38
+ }
39
+
40
+ addPlaintextDataSynced(bytes: number) {
33
41
  this.dataSyncedBytes += bytes;
34
42
 
35
43
  this.metrics.getCounter(APIMetric.DATA_SYNCED_BYTES).add(bytes);
44
+
45
+ if (this.encoding == null) {
46
+ // This avoids having to create a separate stream just to track this
47
+ this.dataSentBytes += bytes;
48
+
49
+ this.metrics.getCounter(APIMetric.DATA_SENT_BYTES).add(bytes);
50
+ }
51
+ }
52
+
53
+ addCompressedDataSent(bytes: number) {
54
+ if (this.encoding == null) {
55
+ throw new ServiceAssertionError('No compression encoding set');
56
+ }
57
+ this.dataSentBytes += bytes;
58
+ this.metrics.getCounter(APIMetric.DATA_SENT_BYTES).add(bytes);
36
59
  }
37
60
 
38
61
  getLogMeta() {
39
62
  return {
40
63
  operations_synced: this.operationsSynced,
41
64
  data_synced_bytes: this.dataSyncedBytes,
65
+ data_sent_bytes: this.dataSentBytes,
42
66
  operation_counts: this.operationCounts,
43
- large_buckets: this.largeBuckets
67
+ large_buckets: this.largeBuckets,
68
+ encoding: this.encoding
44
69
  };
45
70
  }
46
71
  }
package/src/sync/sync.ts CHANGED
@@ -14,7 +14,7 @@ import * as storage from '../storage/storage-index.js';
14
14
  import * as util from '../util/util-index.js';
15
15
 
16
16
  import { Logger, logger as defaultLogger } from '@powersync/lib-services-framework';
17
- import { BucketChecksumState, CheckpointLine } from './BucketChecksumState.js';
17
+ import { BucketChecksumState, CheckpointLine, VersionedSyncRules } from './BucketChecksumState.js';
18
18
  import { mergeAsyncIterables } from '../streams/streams-index.js';
19
19
  import { acquireSemaphoreAbortable, settledPromise, tokenStream, TokenStreamOptions } from './util.js';
20
20
  import { SyncContext } from './SyncContext.js';
@@ -23,7 +23,7 @@ import { OperationsSentStats, RequestTracker, statsForBatch } from './RequestTra
23
23
  export interface SyncStreamParameters {
24
24
  syncContext: SyncContext;
25
25
  bucketStorage: storage.SyncRulesBucketStorage;
26
- syncRules: SqlSyncRules;
26
+ syncRules: VersionedSyncRules;
27
27
  params: util.StreamingSyncRequest;
28
28
  token: auth.JwtPayload;
29
29
  logger?: Logger;
@@ -100,7 +100,7 @@ export async function* streamResponse(
100
100
  async function* streamResponseInner(
101
101
  syncContext: SyncContext,
102
102
  bucketStorage: storage.SyncRulesBucketStorage,
103
- syncRules: SqlSyncRules,
103
+ syncRules: VersionedSyncRules,
104
104
  params: util.StreamingSyncRequest,
105
105
  tokenPayload: RequestJwtPayload,
106
106
  tracker: RequestTracker,
package/src/sync/util.ts CHANGED
@@ -125,7 +125,7 @@ export async function* transformToBytesTracked(
125
125
  encoded = data;
126
126
  }
127
127
 
128
- tracker.addDataSynced(encoded.length);
128
+ tracker.addPlaintextDataSynced(encoded.length);
129
129
  yield encoded;
130
130
  }
131
131
  }
@@ -1,4 +1,4 @@
1
- import { container, LifeCycledSystem, MigrationManager, ServiceIdentifier } from '@powersync/lib-services-framework';
1
+ import { LifeCycledSystem, MigrationManager, ServiceIdentifier, container } from '@powersync/lib-services-framework';
2
2
 
3
3
  import { framework } from '../index.js';
4
4
  import * as metrics from '../metrics/MetricsEngine.js';
@@ -8,7 +8,6 @@ import * as routes from '../routes/routes-index.js';
8
8
  import * as storage from '../storage/storage-index.js';
9
9
  import { SyncContext } from '../sync/SyncContext.js';
10
10
  import * as utils from '../util/util-index.js';
11
- import { EventsEngine } from '../events/EventsEngine.js';
12
11
 
13
12
  export interface ServiceContext {
14
13
  configuration: utils.ResolvedPowerSyncConfig;
@@ -20,7 +19,6 @@ export interface ServiceContext {
20
19
  migrations: PowerSyncMigrationManager;
21
20
  syncContext: SyncContext;
22
21
  serviceMode: ServiceContextMode;
23
- eventsEngine: EventsEngine;
24
22
  }
25
23
 
26
24
  export enum ServiceContextMode {
@@ -47,7 +45,6 @@ export class ServiceContextContainer implements ServiceContext {
47
45
  configuration: utils.ResolvedPowerSyncConfig;
48
46
  lifeCycleEngine: LifeCycledSystem;
49
47
  storageEngine: storage.StorageEngine;
50
- eventsEngine: EventsEngine;
51
48
  syncContext: SyncContext;
52
49
  routerEngine: routes.RouterEngine;
53
50
  serviceMode: ServiceContextMode;
@@ -69,11 +66,6 @@ export class ServiceContextContainer implements ServiceContext {
69
66
  }
70
67
  });
71
68
 
72
- this.eventsEngine = new EventsEngine();
73
- this.lifeCycleEngine.withLifecycle(this.eventsEngine, {
74
- stop: (emitterEngine) => emitterEngine.shutDown()
75
- });
76
-
77
69
  this.lifeCycleEngine.withLifecycle(this.storageEngine, {
78
70
  start: (storageEngine) => storageEngine.start(),
79
71
  stop: (storageEngine) => storageEngine.shutDown()
@@ -97,10 +89,6 @@ export class ServiceContextContainer implements ServiceContext {
97
89
  // Migrations should be executed before the system starts
98
90
  start: () => migrationManager[Symbol.asyncDispose]()
99
91
  });
100
-
101
- this.lifeCycleEngine.withLifecycle(this.eventsEngine, {
102
- stop: (emitterEngine) => emitterEngine.shutDown()
103
- });
104
92
  }
105
93
 
106
94
  get replicationEngine(): replication.ReplicationEngine | null {
package/src/util/utils.ts CHANGED
@@ -6,11 +6,26 @@ import { BucketChecksum, ProtocolOpId, OplogEntry } from './protocol-types.js';
6
6
 
7
7
  import * as storage from '../storage/storage-index.js';
8
8
 
9
- import { PartialChecksum } from '../storage/ChecksumCache.js';
10
9
  import { ServiceAssertionError } from '@powersync/lib-services-framework';
11
10
 
12
11
  export type ChecksumMap = Map<string, BucketChecksum>;
13
12
 
13
+ /**
14
+ * A partial checksum can never be used on its own - must always be combined with a full BucketChecksum.
15
+ */
16
+ export interface PartialChecksum {
17
+ bucket: string;
18
+ /**
19
+ * 32-bit unsigned hash.
20
+ */
21
+ partialChecksum: number;
22
+
23
+ /**
24
+ * Count of operations - informational only.
25
+ */
26
+ partialCount: number;
27
+ }
28
+
14
29
  /**
15
30
  * op_id as used internally, for individual operations and checkpoints.
16
31
  *
@@ -83,20 +98,49 @@ export function addChecksums(a: number, b: number) {
83
98
  return (a + b) & 0xffffffff;
84
99
  }
85
100
 
86
- export function addBucketChecksums(a: BucketChecksum, b: PartialChecksum | null): BucketChecksum {
87
- if (b == null) {
88
- return a;
89
- } else if (b.isFullChecksum) {
101
+ export function isPartialChecksum(c: PartialChecksum | BucketChecksum): c is PartialChecksum {
102
+ return 'partialChecksum' in c;
103
+ }
104
+
105
+ export function addBucketChecksums(a: BucketChecksum, b: PartialChecksum | BucketChecksum | null): BucketChecksum {
106
+ const checksum = addPartialChecksums(a.bucket, a, b);
107
+ if (isPartialChecksum(checksum)) {
108
+ // Should not happen since a != null
109
+ throw new ServiceAssertionError('Expected full checksum');
110
+ }
111
+ return checksum;
112
+ }
113
+
114
+ export function addPartialChecksums(
115
+ bucket: string,
116
+ a: BucketChecksum | null,
117
+ b: PartialChecksum | BucketChecksum | null
118
+ ): PartialChecksum | BucketChecksum {
119
+ if (a != null && b != null) {
120
+ if (!isPartialChecksum(b)) {
121
+ // Replaces preState
122
+ return b;
123
+ }
124
+ // merge
125
+ return {
126
+ bucket,
127
+ checksum: addChecksums(a.checksum, b.partialChecksum),
128
+ count: a.count + b.partialCount
129
+ };
130
+ } else if (a != null) {
90
131
  return {
91
- bucket: b.bucket,
92
- count: b.partialCount,
93
- checksum: b.partialChecksum
132
+ bucket,
133
+ checksum: a.checksum,
134
+ count: a.count
94
135
  };
136
+ } else if (b != null) {
137
+ return b;
95
138
  } else {
139
+ // No data found (may still have a previously-cached checksum).
96
140
  return {
97
- bucket: a.bucket,
98
- count: a.count + b.partialCount,
99
- checksum: addChecksums(a.checksum, b.partialChecksum)
141
+ bucket,
142
+ partialChecksum: 0,
143
+ partialCount: 0
100
144
  };
101
145
  }
102
146
  }
@@ -1,5 +1,5 @@
1
- import { ChecksumCache, FetchChecksums, FetchPartialBucketChecksum, PartialChecksum } from '@/storage/ChecksumCache.js';
2
- import { addChecksums, InternalOpId } from '@/util/util-index.js';
1
+ import { ChecksumCache, FetchChecksums, FetchPartialBucketChecksum } from '@/storage/ChecksumCache.js';
2
+ import { addChecksums, BucketChecksum, InternalOpId, PartialChecksum } from '@/util/util-index.js';
3
3
  import * as crypto from 'node:crypto';
4
4
  import { describe, expect, it } from 'vitest';
5
5
 
@@ -12,22 +12,20 @@ function testHash(bucket: string, checkpoint: InternalOpId) {
12
12
  return hash;
13
13
  }
14
14
 
15
- function testPartialHash(request: FetchPartialBucketChecksum): PartialChecksum {
15
+ function testPartialHash(request: FetchPartialBucketChecksum): PartialChecksum | BucketChecksum {
16
16
  if (request.start) {
17
17
  const a = testHash(request.bucket, request.start);
18
18
  const b = testHash(request.bucket, request.end);
19
19
  return {
20
20
  bucket: request.bucket,
21
21
  partialCount: Number(request.end) - Number(request.start),
22
- partialChecksum: addChecksums(b, -a),
23
- isFullChecksum: false
22
+ partialChecksum: addChecksums(b, -a)
24
23
  };
25
24
  } else {
26
25
  return {
27
26
  bucket: request.bucket,
28
- partialChecksum: testHash(request.bucket, request.end),
29
- partialCount: Number(request.end),
30
- isFullChecksum: true
27
+ checksum: testHash(request.bucket, request.end),
28
+ count: Number(request.end)
31
29
  };
32
30
  }
33
31
  }
@@ -0,0 +1,59 @@
1
+ import {
2
+ BucketStorageFactory,
3
+ createCoreAPIMetrics,
4
+ MetricsEngine,
5
+ OpenTelemetryMetricsFactory,
6
+ RouteAPI,
7
+ RouterEngine,
8
+ ServiceContext,
9
+ StorageEngine,
10
+ SyncContext,
11
+ SyncRulesBucketStorage
12
+ } from '@/index.js';
13
+ import { MeterProvider } from '@opentelemetry/sdk-metrics';
14
+
15
+ export function mockServiceContext(storage: Partial<SyncRulesBucketStorage> | null) {
16
+ // This is very incomplete - just enough to get the current tests passing.
17
+
18
+ const storageEngine: StorageEngine = {
19
+ activeBucketStorage: {
20
+ async getActiveStorage() {
21
+ return storage;
22
+ }
23
+ } as Partial<BucketStorageFactory>
24
+ } as any;
25
+
26
+ const meterProvider = new MeterProvider({
27
+ readers: []
28
+ });
29
+ const meter = meterProvider.getMeter('powersync-tests');
30
+ const metricsEngine = new MetricsEngine({
31
+ disable_telemetry_sharing: true,
32
+ factory: new OpenTelemetryMetricsFactory(meter)
33
+ });
34
+ createCoreAPIMetrics(metricsEngine);
35
+ const service_context: Partial<ServiceContext> = {
36
+ syncContext: new SyncContext({ maxBuckets: 1, maxDataFetchConcurrency: 1, maxParameterQueryResults: 1 }),
37
+ routerEngine: {
38
+ getAPI() {
39
+ return {
40
+ getParseSyncRulesOptions() {
41
+ return { defaultSchema: 'public' };
42
+ }
43
+ } as Partial<RouteAPI>;
44
+ },
45
+ addStopHandler() {
46
+ return () => {};
47
+ }
48
+ } as Partial<RouterEngine> as any,
49
+ storageEngine,
50
+ metricsEngine: metricsEngine,
51
+ // Not used
52
+ configuration: null as any,
53
+ lifeCycleEngine: null as any,
54
+ migrations: null as any,
55
+ replicationEngine: null as any,
56
+ serviceMode: null as any
57
+ };
58
+ return service_context as ServiceContext;
59
+ }
@@ -0,0 +1,84 @@
1
+ import { BasicRouterRequest, Context, SyncRulesBucketStorage } from '@/index.js';
2
+ import { logger, RouterResponse, ServiceError } from '@powersync/lib-services-framework';
3
+ import { SqlSyncRules } from '@powersync/service-sync-rules';
4
+ import { Readable, Writable } from 'stream';
5
+ import { pipeline } from 'stream/promises';
6
+ import { beforeEach, describe, expect, it, vi } from 'vitest';
7
+ import { syncStreamed } from '../../../src/routes/endpoints/sync-stream.js';
8
+ import { mockServiceContext } from './mocks.js';
9
+
10
+ describe('Stream Route', () => {
11
+ describe('compressed stream', () => {
12
+ it('handles missing sync rules', async () => {
13
+ const context: Context = {
14
+ logger: logger,
15
+ service_context: mockServiceContext(null)
16
+ };
17
+
18
+ const request: BasicRouterRequest = {
19
+ headers: {},
20
+ hostname: '',
21
+ protocol: 'http'
22
+ };
23
+
24
+ const error = (await (syncStreamed.handler({ context, params: {}, request }) as Promise<RouterResponse>).catch(
25
+ (e) => e
26
+ )) as ServiceError;
27
+
28
+ expect(error.errorData.status).toEqual(500);
29
+ expect(error.errorData.code).toEqual('PSYNC_S2302');
30
+ });
31
+
32
+ it('handles a stream error with compression', async () => {
33
+ // This primarily tests that an underlying storage error doesn't result in an uncaught error
34
+ // when compressing the stream.
35
+
36
+ const storage = {
37
+ getParsedSyncRules() {
38
+ return new SqlSyncRules('bucket_definitions: {}');
39
+ },
40
+ watchCheckpointChanges: async function* (options) {
41
+ throw new Error('Simulated storage error');
42
+ }
43
+ } as Partial<SyncRulesBucketStorage>;
44
+ const serviceContext = mockServiceContext(storage);
45
+
46
+ const context: Context = {
47
+ logger: logger,
48
+ service_context: serviceContext,
49
+ token_payload: {
50
+ exp: new Date().getTime() / 1000 + 10000,
51
+ iat: new Date().getTime() / 1000 - 10000,
52
+ sub: 'test-user'
53
+ }
54
+ };
55
+
56
+ // It may be worth eventually doing this via Fastify to test the full stack
57
+
58
+ const request: BasicRouterRequest = {
59
+ headers: {
60
+ 'accept-encoding': 'gzip'
61
+ },
62
+ hostname: '',
63
+ protocol: 'http'
64
+ };
65
+
66
+ const response = await (syncStreamed.handler({ context, params: {}, request }) as Promise<RouterResponse>);
67
+ expect(response.status).toEqual(200);
68
+ const stream = response.data as Readable;
69
+ const r = await drainWithTimeout(stream).catch((error) => error);
70
+ expect(r.message).toContain('Simulated storage error');
71
+ });
72
+ });
73
+ });
74
+
75
+ export async function drainWithTimeout(readable: Readable, ms = 2_000) {
76
+ const devNull = new Writable({
77
+ write(_chunk, _enc, cb) {
78
+ cb();
79
+ } // discard everything
80
+ });
81
+
82
+ // Throws AbortError if it takes longer than ms, and destroys the stream
83
+ await pipeline(readable, devNull, { signal: AbortSignal.timeout(ms) });
84
+ }