@powersync/service-core 0.0.0-dev-20250317122913 → 0.0.0-dev-20250326092547

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/CHANGELOG.md +18 -4
  2. package/dist/api/api-index.d.ts +1 -0
  3. package/dist/api/api-index.js +1 -0
  4. package/dist/api/api-index.js.map +1 -1
  5. package/dist/api/api-metrics.d.ts +11 -0
  6. package/dist/api/api-metrics.js +30 -0
  7. package/dist/api/api-metrics.js.map +1 -0
  8. package/dist/index.d.ts +2 -2
  9. package/dist/index.js +2 -2
  10. package/dist/index.js.map +1 -1
  11. package/dist/metrics/MetricsEngine.d.ts +21 -0
  12. package/dist/metrics/MetricsEngine.js +79 -0
  13. package/dist/metrics/MetricsEngine.js.map +1 -0
  14. package/dist/metrics/metrics-index.d.ts +5 -0
  15. package/dist/metrics/metrics-index.js +6 -0
  16. package/dist/metrics/metrics-index.js.map +1 -0
  17. package/dist/metrics/metrics-interfaces.d.ts +36 -0
  18. package/dist/metrics/metrics-interfaces.js +6 -0
  19. package/dist/metrics/metrics-interfaces.js.map +1 -0
  20. package/dist/metrics/open-telemetry/OpenTelemetryMetricsFactory.d.ts +10 -0
  21. package/dist/metrics/open-telemetry/OpenTelemetryMetricsFactory.js +51 -0
  22. package/dist/metrics/open-telemetry/OpenTelemetryMetricsFactory.js.map +1 -0
  23. package/dist/metrics/open-telemetry/util.d.ts +6 -0
  24. package/dist/metrics/open-telemetry/util.js +56 -0
  25. package/dist/metrics/open-telemetry/util.js.map +1 -0
  26. package/dist/metrics/register-metrics.d.ts +11 -0
  27. package/dist/metrics/register-metrics.js +44 -0
  28. package/dist/metrics/register-metrics.js.map +1 -0
  29. package/dist/replication/AbstractReplicationJob.d.ts +2 -0
  30. package/dist/replication/AbstractReplicationJob.js.map +1 -1
  31. package/dist/replication/AbstractReplicator.d.ts +3 -0
  32. package/dist/replication/AbstractReplicator.js +3 -0
  33. package/dist/replication/AbstractReplicator.js.map +1 -1
  34. package/dist/replication/ReplicationModule.d.ts +7 -0
  35. package/dist/replication/ReplicationModule.js +1 -0
  36. package/dist/replication/ReplicationModule.js.map +1 -1
  37. package/dist/replication/replication-index.d.ts +1 -0
  38. package/dist/replication/replication-index.js +1 -0
  39. package/dist/replication/replication-index.js.map +1 -1
  40. package/dist/replication/replication-metrics.d.ts +11 -0
  41. package/dist/replication/replication-metrics.js +39 -0
  42. package/dist/replication/replication-metrics.js.map +1 -0
  43. package/dist/routes/configure-fastify.d.ts +1 -1
  44. package/dist/routes/endpoints/probes.d.ts +2 -2
  45. package/dist/routes/endpoints/probes.js +16 -2
  46. package/dist/routes/endpoints/probes.js.map +1 -1
  47. package/dist/routes/endpoints/socket-route.js +5 -5
  48. package/dist/routes/endpoints/socket-route.js.map +1 -1
  49. package/dist/routes/endpoints/sync-stream.js +6 -6
  50. package/dist/routes/endpoints/sync-stream.js.map +1 -1
  51. package/dist/storage/SyncRulesBucketStorage.d.ts +11 -1
  52. package/dist/storage/SyncRulesBucketStorage.js +1 -1
  53. package/dist/storage/SyncRulesBucketStorage.js.map +1 -1
  54. package/dist/storage/WriteCheckpointAPI.d.ts +0 -2
  55. package/dist/storage/WriteCheckpointAPI.js.map +1 -1
  56. package/dist/storage/storage-index.d.ts +1 -0
  57. package/dist/storage/storage-index.js +1 -0
  58. package/dist/storage/storage-index.js.map +1 -1
  59. package/dist/storage/storage-metrics.d.ts +4 -0
  60. package/dist/storage/storage-metrics.js +56 -0
  61. package/dist/storage/storage-metrics.js.map +1 -0
  62. package/dist/sync/BucketChecksumState.d.ts +4 -2
  63. package/dist/sync/BucketChecksumState.js +17 -26
  64. package/dist/sync/BucketChecksumState.js.map +1 -1
  65. package/dist/sync/RequestTracker.d.ts +3 -0
  66. package/dist/sync/RequestTracker.js +8 -3
  67. package/dist/sync/RequestTracker.js.map +1 -1
  68. package/dist/sync/util.d.ts +10 -2
  69. package/dist/sync/util.js +25 -6
  70. package/dist/sync/util.js.map +1 -1
  71. package/dist/system/ServiceContext.d.ts +3 -3
  72. package/dist/system/ServiceContext.js +7 -3
  73. package/dist/system/ServiceContext.js.map +1 -1
  74. package/package.json +8 -8
  75. package/src/api/api-index.ts +1 -0
  76. package/src/api/api-metrics.ts +35 -0
  77. package/src/index.ts +2 -2
  78. package/src/metrics/MetricsEngine.ts +98 -0
  79. package/src/metrics/metrics-index.ts +5 -0
  80. package/src/metrics/metrics-interfaces.ts +41 -0
  81. package/src/metrics/open-telemetry/OpenTelemetryMetricsFactory.ts +66 -0
  82. package/src/metrics/open-telemetry/util.ts +74 -0
  83. package/src/metrics/register-metrics.ts +56 -0
  84. package/src/replication/AbstractReplicationJob.ts +2 -0
  85. package/src/replication/AbstractReplicator.ts +7 -0
  86. package/src/replication/ReplicationModule.ts +10 -0
  87. package/src/replication/replication-index.ts +1 -0
  88. package/src/replication/replication-metrics.ts +45 -0
  89. package/src/routes/endpoints/probes.ts +18 -2
  90. package/src/routes/endpoints/socket-route.ts +6 -5
  91. package/src/routes/endpoints/sync-stream.ts +7 -6
  92. package/src/storage/SyncRulesBucketStorage.ts +12 -2
  93. package/src/storage/WriteCheckpointAPI.ts +0 -2
  94. package/src/storage/storage-index.ts +1 -0
  95. package/src/storage/storage-metrics.ts +67 -0
  96. package/src/sync/BucketChecksumState.ts +25 -41
  97. package/src/sync/RequestTracker.ts +9 -3
  98. package/src/sync/util.ts +29 -8
  99. package/src/system/ServiceContext.ts +9 -4
  100. package/test/src/routes/probes.integration.test.ts +5 -5
  101. package/test/src/routes/probes.test.ts +5 -4
  102. package/test/src/sync/BucketChecksumState.test.ts +5 -5
  103. package/test/src/util.test.ts +48 -0
  104. package/tsconfig.tsbuildinfo +1 -1
  105. package/dist/metrics/Metrics.d.ts +0 -30
  106. package/dist/metrics/Metrics.js +0 -202
  107. package/dist/metrics/Metrics.js.map +0 -1
  108. package/src/metrics/Metrics.ts +0 -255
@@ -0,0 +1,56 @@
1
+ import { ServiceContextContainer } from '../system/ServiceContext.js';
2
+ import { createOpenTelemetryMetricsFactory } from './open-telemetry/util.js';
3
+ import { MetricsEngine } from './MetricsEngine.js';
4
+ import { createCoreAPIMetrics, initializeCoreAPIMetrics } from '../api/api-metrics.js';
5
+ import { createCoreReplicationMetrics, initializeCoreReplicationMetrics } from '../replication/replication-metrics.js';
6
+ import { createCoreStorageMetrics, initializeCoreStorageMetrics } from '../storage/storage-metrics.js';
7
+
8
+ export enum MetricModes {
9
+ API = 'api',
10
+ REPLICATION = 'replication',
11
+ STORAGE = 'storage'
12
+ }
13
+
14
+ export type MetricsRegistrationOptions = {
15
+ service_context: ServiceContextContainer;
16
+ modes: MetricModes[];
17
+ };
18
+
19
+ export const registerMetrics = async (options: MetricsRegistrationOptions) => {
20
+ const { service_context, modes } = options;
21
+
22
+ const metricsFactory = createOpenTelemetryMetricsFactory(service_context);
23
+ const metricsEngine = new MetricsEngine({
24
+ factory: metricsFactory,
25
+ disable_telemetry_sharing: service_context.configuration.telemetry.disable_telemetry_sharing
26
+ });
27
+ service_context.register(MetricsEngine, metricsEngine);
28
+
29
+ if (modes.includes(MetricModes.API)) {
30
+ createCoreAPIMetrics(metricsEngine);
31
+ initializeCoreAPIMetrics(metricsEngine);
32
+ }
33
+
34
+ if (modes.includes(MetricModes.REPLICATION)) {
35
+ createCoreReplicationMetrics(metricsEngine);
36
+ initializeCoreReplicationMetrics(metricsEngine);
37
+ }
38
+
39
+ if (modes.includes(MetricModes.STORAGE)) {
40
+ createCoreStorageMetrics(metricsEngine);
41
+
42
+ // This requires an instantiated bucket storage, which is only created when the lifecycle starts
43
+ service_context.storageEngine.registerListener({
44
+ storageActivated: (bucketStorage) => {
45
+ initializeCoreStorageMetrics(metricsEngine, bucketStorage);
46
+ }
47
+ });
48
+ }
49
+
50
+ service_context.lifeCycleEngine.withLifecycle(metricsEngine, {
51
+ start: async () => {
52
+ await metricsEngine.start();
53
+ },
54
+ stop: () => metricsEngine.shutdown()
55
+ });
56
+ };
@@ -2,10 +2,12 @@ import { container, logger } from '@powersync/lib-services-framework';
2
2
  import winston from 'winston';
3
3
  import * as storage from '../storage/storage-index.js';
4
4
  import { ErrorRateLimiter } from './ErrorRateLimiter.js';
5
+ import { MetricsEngine } from '../metrics/MetricsEngine.js';
5
6
 
6
7
  export interface AbstractReplicationJobOptions {
7
8
  id: string;
8
9
  storage: storage.SyncRulesBucketStorage;
10
+ metrics: MetricsEngine;
9
11
  lock: storage.ReplicationLock;
10
12
  rateLimiter: ErrorRateLimiter;
11
13
  }
@@ -7,6 +7,7 @@ import { SyncRulesProvider } from '../util/config/sync-rules/sync-rules-provider
7
7
  import { AbstractReplicationJob } from './AbstractReplicationJob.js';
8
8
  import { ErrorRateLimiter } from './ErrorRateLimiter.js';
9
9
  import { ConnectionTestResult } from './ReplicationModule.js';
10
+ import { MetricsEngine } from '../metrics/MetricsEngine.js';
10
11
 
11
12
  // 5 minutes
12
13
  const PING_INTERVAL = 1_000_000_000n * 300n;
@@ -19,6 +20,7 @@ export interface CreateJobOptions {
19
20
  export interface AbstractReplicatorOptions {
20
21
  id: string;
21
22
  storageEngine: StorageEngine;
23
+ metricsEngine: MetricsEngine;
22
24
  syncRuleProvider: SyncRulesProvider;
23
25
  /**
24
26
  * This limits the effect of retries when there is a persistent issue.
@@ -33,6 +35,7 @@ export interface AbstractReplicatorOptions {
33
35
  */
34
36
  export abstract class AbstractReplicator<T extends AbstractReplicationJob = AbstractReplicationJob> {
35
37
  protected logger: winston.Logger;
38
+
36
39
  /**
37
40
  * Map of replication jobs by sync rule id. Usually there is only one running job, but there could be two when
38
41
  * transitioning to a new set of sync rules.
@@ -72,6 +75,10 @@ export abstract class AbstractReplicator<T extends AbstractReplicationJob = Abst
72
75
  return this.options.rateLimiter;
73
76
  }
74
77
 
78
+ protected get metrics() {
79
+ return this.options.metricsEngine;
80
+ }
81
+
75
82
  public async start(): Promise<void> {
76
83
  this.runLoop().catch((e) => {
77
84
  this.logger.error('Data source fatal replication error', e);
@@ -64,6 +64,14 @@ export abstract class ReplicationModule<TConfig extends DataSourceConfig>
64
64
  */
65
65
  protected abstract createReplicator(context: system.ServiceContext): AbstractReplicator;
66
66
 
67
+ /**
68
+ * Any additional initialization specific to the module should be added here. Will be called if necessary after the
69
+ * main initialization has been completed
70
+ * @param context
71
+ * @protected
72
+ */
73
+ protected abstract onInitialized(context: system.ServiceContext): Promise<void>;
74
+
67
75
  public abstract testConnection(config: TConfig): Promise<ConnectionTestResult>;
68
76
 
69
77
  /**
@@ -93,6 +101,8 @@ export abstract class ReplicationModule<TConfig extends DataSourceConfig>
93
101
 
94
102
  context.replicationEngine?.register(this.createReplicator(context));
95
103
  context.routerEngine?.registerAPI(this.createRouteAPIAdapter());
104
+
105
+ await this.onInitialized(context);
96
106
  }
97
107
 
98
108
  protected decodeConfig(config: TConfig): void {
@@ -3,3 +3,4 @@ export * from './AbstractReplicator.js';
3
3
  export * from './ErrorRateLimiter.js';
4
4
  export * from './ReplicationEngine.js';
5
5
  export * from './ReplicationModule.js';
6
+ export * from './replication-metrics.js';
@@ -0,0 +1,45 @@
1
+ import { MetricsEngine } from '../metrics/metrics-index.js';
2
+ import { ReplicationMetric } from '@powersync/service-types';
3
+
4
+ /**
5
+ * Create and register the core replication metrics.
6
+ * @param engine
7
+ */
8
+ export function createCoreReplicationMetrics(engine: MetricsEngine): void {
9
+ engine.createCounter({
10
+ name: ReplicationMetric.DATA_REPLICATED_BYTES,
11
+ description: 'Uncompressed size of replicated data',
12
+ unit: 'bytes'
13
+ });
14
+
15
+ engine.createCounter({
16
+ name: ReplicationMetric.ROWS_REPLICATED,
17
+ description: 'Total number of replicated rows'
18
+ });
19
+
20
+ engine.createCounter({
21
+ name: ReplicationMetric.TRANSACTIONS_REPLICATED,
22
+ description: 'Total number of replicated transactions'
23
+ });
24
+
25
+ engine.createCounter({
26
+ name: ReplicationMetric.CHUNKS_REPLICATED,
27
+ description: 'Total number of replication chunks'
28
+ });
29
+ }
30
+
31
+ /**
32
+ * Initialise the core replication metrics. This should be called after the metrics have been created.
33
+ * @param engine
34
+ */
35
+ export function initializeCoreReplicationMetrics(engine: MetricsEngine): void {
36
+ const data_replicated_bytes = engine.getCounter(ReplicationMetric.DATA_REPLICATED_BYTES);
37
+ const rows_replicated_total = engine.getCounter(ReplicationMetric.ROWS_REPLICATED);
38
+ const transactions_replicated_total = engine.getCounter(ReplicationMetric.TRANSACTIONS_REPLICATED);
39
+ const chunks_replicated_total = engine.getCounter(ReplicationMetric.CHUNKS_REPLICATED);
40
+
41
+ data_replicated_bytes.add(0);
42
+ rows_replicated_total.add(0);
43
+ transactions_replicated_total.add(0);
44
+ chunks_replicated_total.add(0);
45
+ }
@@ -25,11 +25,27 @@ export const startupCheck = routeDefinition({
25
25
  export const livenessCheck = routeDefinition({
26
26
  path: ProbeRoutes.LIVENESS,
27
27
  method: router.HTTPMethod.GET,
28
- handler: async () => {
28
+ handler: async (params) => {
29
29
  const state = container.probes.state();
30
30
 
31
+ /**
32
+ * The HTTP probes currently only function in the API and UNIFIED
33
+ * modes.
34
+ *
35
+ * For the API mode, we don't really touch the state, but any response from
36
+ * the request indicates the service is alive.
37
+ *
38
+ * For the UNIFIED mode we update the touched_at time while the Replicator engine is running.
39
+ * If the replication engine is present and the timeDifference from the last
40
+ * touched_at is large, we report that the service is not live.
41
+ *
42
+ * This is only an incremental improvement. In future these values should be configurable.
43
+ */
44
+
45
+ const isAPIOnly = !params.context.service_context.replicationEngine;
31
46
  const timeDifference = Date.now() - state.touched_at.getTime();
32
- const status = timeDifference < 10000 ? 200 : 400;
47
+
48
+ const status = isAPIOnly ? 200 : timeDifference < 10000 ? 200 : 400;
33
49
 
34
50
  return new router.RouterResponse({
35
51
  status,
@@ -2,18 +2,19 @@ import { ErrorCode, errors, logger, schema } from '@powersync/lib-services-frame
2
2
  import { RequestParameters } from '@powersync/service-sync-rules';
3
3
  import { serialize } from 'bson';
4
4
 
5
- import { Metrics } from '../../metrics/Metrics.js';
6
5
  import * as sync from '../../sync/sync-index.js';
7
6
  import * as util from '../../util/util-index.js';
8
7
  import { SocketRouteGenerator } from '../router-socket.js';
9
8
  import { SyncRoutes } from './sync-stream.js';
10
9
 
10
+ import { APIMetric } from '@powersync/service-types';
11
+
11
12
  export const syncStreamReactive: SocketRouteGenerator = (router) =>
12
13
  router.reactiveStream<util.StreamingSyncRequest, any>(SyncRoutes.STREAM, {
13
14
  validator: schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }),
14
15
  handler: async ({ context, params, responder, observer, initialN, signal: upstreamSignal }) => {
15
16
  const { service_context } = context;
16
- const { routerEngine, syncContext } = service_context;
17
+ const { routerEngine, metricsEngine, syncContext } = service_context;
17
18
 
18
19
  // Create our own controller that we can abort directly
19
20
  const controller = new AbortController();
@@ -69,8 +70,8 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
69
70
  controller.abort();
70
71
  });
71
72
 
72
- Metrics.getInstance().concurrent_connections.add(1);
73
- const tracker = new sync.RequestTracker();
73
+ metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(1);
74
+ const tracker = new sync.RequestTracker(metricsEngine);
74
75
  try {
75
76
  for await (const data of sync.streamResponse({
76
77
  syncContext: syncContext,
@@ -147,7 +148,7 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
147
148
  operations_synced: tracker.operationsSynced,
148
149
  data_synced_bytes: tracker.dataSyncedBytes
149
150
  });
150
- Metrics.getInstance().concurrent_connections.add(-1);
151
+ metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(-1);
151
152
  }
152
153
  }
153
154
  });
@@ -5,10 +5,11 @@ import { Readable } from 'stream';
5
5
  import * as sync from '../../sync/sync-index.js';
6
6
  import * as util from '../../util/util-index.js';
7
7
 
8
- import { Metrics } from '../../metrics/Metrics.js';
9
8
  import { authUser } from '../auth.js';
10
9
  import { routeDefinition } from '../router.js';
11
10
 
11
+ import { APIMetric } from '@powersync/service-types';
12
+
12
13
  export enum SyncRoutes {
13
14
  STREAM = '/sync/stream'
14
15
  }
@@ -20,7 +21,7 @@ export const syncStreamed = routeDefinition({
20
21
  validator: schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }),
21
22
  handler: async (payload) => {
22
23
  const { service_context } = payload.context;
23
- const { routerEngine, storageEngine, syncContext } = service_context;
24
+ const { routerEngine, storageEngine, metricsEngine, syncContext } = service_context;
24
25
  const headers = payload.request.headers;
25
26
  const userAgent = headers['x-user-agent'] ?? headers['user-agent'];
26
27
  const clientId = payload.params.client_id;
@@ -49,9 +50,9 @@ export const syncStreamed = routeDefinition({
49
50
  const syncRules = bucketStorage.getParsedSyncRules(routerEngine!.getAPI().getParseSyncRulesOptions());
50
51
 
51
52
  const controller = new AbortController();
52
- const tracker = new sync.RequestTracker();
53
+ const tracker = new sync.RequestTracker(metricsEngine);
53
54
  try {
54
- Metrics.getInstance().concurrent_connections.add(1);
55
+ metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(1);
55
56
  const stream = Readable.from(
56
57
  sync.transformToBytesTracked(
57
58
  sync.ndjson(
@@ -96,7 +97,7 @@ export const syncStreamed = routeDefinition({
96
97
  data: stream,
97
98
  afterSend: async () => {
98
99
  controller.abort();
99
- Metrics.getInstance().concurrent_connections.add(-1);
100
+ metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(-1);
100
101
  logger.info(`Sync stream complete`, {
101
102
  user_id: syncParams.user_id,
102
103
  client_id: clientId,
@@ -108,7 +109,7 @@ export const syncStreamed = routeDefinition({
108
109
  });
109
110
  } catch (ex) {
110
111
  controller.abort();
111
- Metrics.getInstance().concurrent_connections.add(-1);
112
+ metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(-1);
112
113
  }
113
114
  }
114
115
  });
@@ -73,6 +73,16 @@ export interface SyncRulesBucketStorage
73
73
  */
74
74
  getParameterSets(checkpoint: util.InternalOpId, lookups: ParameterLookup[]): Promise<SqliteJsonRow[]>;
75
75
 
76
+ /**
77
+ * Given two checkpoints, return the changes in bucket data and parameters that may have occurred
78
+ * in that period.
79
+ *
80
+ * This is a best-effort optimization:
81
+ * 1. This may include more changes than what actually occurred.
82
+ * 2. This may return invalidateDataBuckets or invalidateParameterBuckets instead of of returning
83
+ * specific changes.
84
+ * @param options
85
+ */
76
86
  getCheckpointChanges(options: GetCheckpointChangesOptions): Promise<CheckpointChanges>;
77
87
 
78
88
  /**
@@ -251,7 +261,7 @@ export interface GetCheckpointChangesOptions {
251
261
  }
252
262
 
253
263
  export interface CheckpointChanges {
254
- updatedDataBuckets: string[];
264
+ updatedDataBuckets: Set<string>;
255
265
  invalidateDataBuckets: boolean;
256
266
  /** Serialized using JSONBig */
257
267
  updatedParameterLookups: Set<string>;
@@ -259,7 +269,7 @@ export interface CheckpointChanges {
259
269
  }
260
270
 
261
271
  export const CHECKPOINT_INVALIDATE_ALL: CheckpointChanges = {
262
- updatedDataBuckets: [],
272
+ updatedDataBuckets: new Set<string>(),
263
273
  invalidateDataBuckets: true,
264
274
  updatedParameterLookups: new Set<string>(),
265
275
  invalidateParameterBuckets: true
@@ -94,7 +94,6 @@ export interface BaseWriteCheckpointAPI {
94
94
  */
95
95
  export interface SyncStorageWriteCheckpointAPI extends BaseWriteCheckpointAPI {
96
96
  batchCreateCustomWriteCheckpoints(checkpoints: BatchedCustomWriteCheckpointOptions[]): Promise<void>;
97
- createCustomWriteCheckpoint(checkpoint: BatchedCustomWriteCheckpointOptions): Promise<bigint>;
98
97
  lastWriteCheckpoint(filters: SyncStorageLastWriteCheckpointFilters): Promise<bigint | null>;
99
98
  }
100
99
 
@@ -104,7 +103,6 @@ export interface SyncStorageWriteCheckpointAPI extends BaseWriteCheckpointAPI {
104
103
  */
105
104
  export interface WriteCheckpointAPI extends BaseWriteCheckpointAPI {
106
105
  batchCreateCustomWriteCheckpoints(checkpoints: CustomWriteCheckpointOptions[]): Promise<void>;
107
- createCustomWriteCheckpoint(checkpoint: CustomWriteCheckpointOptions): Promise<bigint>;
108
106
  lastWriteCheckpoint(filters: LastWriteCheckpointFilters): Promise<bigint | null>;
109
107
 
110
108
  watchUserWriteCheckpoint(options: WatchUserWriteCheckpointOptions): AsyncIterable<WriteCheckpointResult>;
@@ -6,6 +6,7 @@ export * from './SourceEntity.js';
6
6
  export * from './SourceTable.js';
7
7
  export * from './StorageEngine.js';
8
8
  export * from './StorageProvider.js';
9
+ export * from './storage-metrics.js';
9
10
  export * from './WriteCheckpointAPI.js';
10
11
  export * from './BucketStorageFactory.js';
11
12
  export * from './BucketStorageBatch.js';
@@ -0,0 +1,67 @@
1
+ import { MetricsEngine } from '../metrics/MetricsEngine.js';
2
+ import { logger } from '@powersync/lib-services-framework';
3
+ import { BucketStorageFactory, StorageMetrics } from './BucketStorageFactory.js';
4
+ import { StorageMetric } from '@powersync/service-types';
5
+
6
+ export function createCoreStorageMetrics(engine: MetricsEngine): void {
7
+ engine.createObservableGauge({
8
+ name: StorageMetric.REPLICATION_SIZE_BYTES,
9
+ description: 'Size of current data stored in PowerSync',
10
+ unit: 'bytes'
11
+ });
12
+
13
+ engine.createObservableGauge({
14
+ name: StorageMetric.OPERATION_SIZE_BYTES,
15
+ description: 'Size of operations stored in PowerSync',
16
+ unit: 'bytes'
17
+ });
18
+
19
+ engine.createObservableGauge({
20
+ name: StorageMetric.PARAMETER_SIZE_BYTES,
21
+ description: 'Size of parameter data stored in PowerSync',
22
+ unit: 'bytes'
23
+ });
24
+ }
25
+
26
+ export function initializeCoreStorageMetrics(engine: MetricsEngine, storage: BucketStorageFactory): void {
27
+ const replication_storage_size_bytes = engine.getObservableGauge(StorageMetric.REPLICATION_SIZE_BYTES);
28
+ const operation_storage_size_bytes = engine.getObservableGauge(StorageMetric.OPERATION_SIZE_BYTES);
29
+ const parameter_storage_size_bytes = engine.getObservableGauge(StorageMetric.PARAMETER_SIZE_BYTES);
30
+
31
+ const MINIMUM_INTERVAL = 60_000;
32
+
33
+ let cachedRequest: Promise<StorageMetrics | null> | undefined = undefined;
34
+ let cacheTimestamp = 0;
35
+
36
+ const getMetrics = () => {
37
+ if (cachedRequest == null || Date.now() - cacheTimestamp > MINIMUM_INTERVAL) {
38
+ cachedRequest = storage.getStorageMetrics().catch((e) => {
39
+ logger.error(`Failed to get storage metrics`, e);
40
+ return null;
41
+ });
42
+ cacheTimestamp = Date.now();
43
+ }
44
+ return cachedRequest;
45
+ };
46
+
47
+ replication_storage_size_bytes.setValueProvider(async () => {
48
+ const metrics = await getMetrics();
49
+ if (metrics) {
50
+ return metrics.replication_size_bytes;
51
+ }
52
+ });
53
+
54
+ operation_storage_size_bytes.setValueProvider(async () => {
55
+ const metrics = await getMetrics();
56
+ if (metrics) {
57
+ return metrics.operations_size_bytes;
58
+ }
59
+ });
60
+
61
+ parameter_storage_size_bytes.setValueProvider(async () => {
62
+ const metrics = await getMetrics();
63
+ if (metrics) {
64
+ return metrics.parameters_size_bytes;
65
+ }
66
+ });
67
+ }
@@ -4,11 +4,11 @@ import * as storage from '../storage/storage-index.js';
4
4
  import * as util from '../util/util-index.js';
5
5
 
6
6
  import { ErrorCode, logger, ServiceAssertionError, ServiceError } from '@powersync/lib-services-framework';
7
+ import { JSONBig } from '@powersync/service-jsonbig';
7
8
  import { BucketParameterQuerier } from '@powersync/service-sync-rules/src/BucketParameterQuerier.js';
8
9
  import { BucketSyncState } from './sync.js';
9
10
  import { SyncContext } from './SyncContext.js';
10
- import { JSONBig } from '@powersync/service-jsonbig';
11
- import { hasIntersection } from './util.js';
11
+ import { getIntersection, hasIntersection } from './util.js';
12
12
 
13
13
  export interface BucketChecksumStateOptions {
14
14
  syncContext: SyncContext;
@@ -70,10 +70,9 @@ export class BucketChecksumState {
70
70
  const storage = this.bucketStorage;
71
71
 
72
72
  const update = await this.parameterState.getCheckpointUpdate(next);
73
- if (update == null) {
73
+ if (update == null && this.lastWriteCheckpoint == writeCheckpoint) {
74
74
  return null;
75
75
  }
76
-
77
76
  const { buckets: allBuckets, updatedBuckets } = update;
78
77
 
79
78
  let dataBucketsNew = new Map<string, BucketSyncState>();
@@ -92,7 +91,7 @@ export class BucketChecksumState {
92
91
  }
93
92
 
94
93
  let checksumMap: util.ChecksumMap;
95
- if (updatedBuckets != null) {
94
+ if (updatedBuckets != INVALIDATE_ALL_BUCKETS) {
96
95
  if (this.lastChecksums == null) {
97
96
  throw new ServiceAssertionError(`Bucket diff received without existing checksums`);
98
97
  }
@@ -115,9 +114,11 @@ export class BucketChecksumState {
115
114
  }
116
115
  }
117
116
 
118
- let updatedChecksums = await storage.getChecksums(base.checkpoint, checksumLookups);
119
- for (let [bucket, value] of updatedChecksums.entries()) {
120
- newChecksums.set(bucket, value);
117
+ if (checksumLookups.length > 0) {
118
+ let updatedChecksums = await storage.getChecksums(base.checkpoint, checksumLookups);
119
+ for (let [bucket, value] of updatedChecksums.entries()) {
120
+ newChecksums.set(bucket, value);
121
+ }
121
122
  }
122
123
  checksumMap = newChecksums;
123
124
  } else {
@@ -125,6 +126,7 @@ export class BucketChecksumState {
125
126
  const bucketList = [...dataBucketsNew.keys()];
126
127
  checksumMap = await storage.getChecksums(base.checkpoint, bucketList);
127
128
  }
129
+
128
130
  // Subset of buckets for which there may be new data in this batch.
129
131
  let bucketsToFetch: BucketDescription[];
130
132
 
@@ -249,6 +251,8 @@ export class BucketChecksumState {
249
251
  }
250
252
  }
251
253
 
254
+ const INVALIDATE_ALL_BUCKETS = Symbol('INVALIDATE_ALL_BUCKETS');
255
+
252
256
  export interface CheckpointUpdate {
253
257
  /**
254
258
  * All buckets forming part of the checkpoint.
@@ -260,7 +264,7 @@ export interface CheckpointUpdate {
260
264
  *
261
265
  * If null, assume that any bucket in `buckets` may have been updated.
262
266
  */
263
- updatedBuckets: Set<string> | null;
267
+ updatedBuckets: Set<string> | typeof INVALIDATE_ALL_BUCKETS;
264
268
  }
265
269
 
266
270
  export class BucketParameterState {
@@ -291,19 +295,15 @@ export class BucketParameterState {
291
295
  this.lookups = new Set<string>(this.querier.parameterQueryLookups.map((l) => JSONBig.stringify(l.values)));
292
296
  }
293
297
 
294
- async getCheckpointUpdate(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate | null> {
298
+ async getCheckpointUpdate(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate> {
295
299
  const querier = this.querier;
296
- let update: CheckpointUpdate | null;
300
+ let update: CheckpointUpdate;
297
301
  if (querier.hasDynamicBuckets) {
298
302
  update = await this.getCheckpointUpdateDynamic(checkpoint);
299
303
  } else {
300
304
  update = await this.getCheckpointUpdateStatic(checkpoint);
301
305
  }
302
306
 
303
- if (update == null) {
304
- return null;
305
- }
306
-
307
307
  if (update.buckets.length > this.context.maxParameterQueryResults) {
308
308
  // TODO: Limit number of results even before we get to this point
309
309
  // This limit applies _before_ we get the unique set
@@ -325,32 +325,18 @@ export class BucketParameterState {
325
325
  /**
326
326
  * For static buckets, we can keep track of which buckets have been updated.
327
327
  */
328
- private async getCheckpointUpdateStatic(
329
- checkpoint: storage.StorageCheckpointUpdate
330
- ): Promise<CheckpointUpdate | null> {
328
+ private async getCheckpointUpdateStatic(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate> {
331
329
  const querier = this.querier;
332
330
  const update = checkpoint.update;
333
331
 
334
332
  if (update.invalidateDataBuckets) {
335
333
  return {
336
334
  buckets: querier.staticBuckets,
337
- updatedBuckets: null
335
+ updatedBuckets: INVALIDATE_ALL_BUCKETS
338
336
  };
339
337
  }
340
338
 
341
- let updatedBuckets = new Set<string>();
342
-
343
- for (let bucket of update.updatedDataBuckets ?? []) {
344
- if (this.staticBuckets.has(bucket)) {
345
- updatedBuckets.add(bucket);
346
- }
347
- }
348
-
349
- if (updatedBuckets.size == 0) {
350
- // No change - skip this checkpoint
351
- return null;
352
- }
353
-
339
+ const updatedBuckets = new Set<string>(getIntersection(this.staticBuckets, update.updatedDataBuckets));
354
340
  return {
355
341
  buckets: querier.staticBuckets,
356
342
  updatedBuckets
@@ -360,9 +346,7 @@ export class BucketParameterState {
360
346
  /**
361
347
  * For dynamic buckets, we need to re-query the list of buckets every time.
362
348
  */
363
- private async getCheckpointUpdateDynamic(
364
- checkpoint: storage.StorageCheckpointUpdate
365
- ): Promise<CheckpointUpdate | null> {
349
+ private async getCheckpointUpdateDynamic(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate> {
366
350
  const querier = this.querier;
367
351
  const storage = this.bucketStorage;
368
352
  const staticBuckets = querier.staticBuckets;
@@ -401,11 +385,11 @@ export class BucketParameterState {
401
385
  dynamicBuckets = this.cachedDynamicBuckets;
402
386
 
403
387
  if (!invalidateDataBuckets) {
404
- // TODO: Do set intersection instead
405
- for (let bucket of update.updatedDataBuckets ?? []) {
406
- if (this.staticBuckets.has(bucket) || this.cachedDynamicBucketSet.has(bucket)) {
407
- updatedBuckets.add(bucket);
408
- }
388
+ for (let bucket of getIntersection(this.staticBuckets, update.updatedDataBuckets)) {
389
+ updatedBuckets.add(bucket);
390
+ }
391
+ for (let bucket of getIntersection(this.cachedDynamicBucketSet, update.updatedDataBuckets)) {
392
+ updatedBuckets.add(bucket);
409
393
  }
410
394
  }
411
395
  }
@@ -415,7 +399,7 @@ export class BucketParameterState {
415
399
  return {
416
400
  buckets: allBuckets,
417
401
  // We cannot track individual bucket updates for dynamic lookups yet
418
- updatedBuckets: null
402
+ updatedBuckets: INVALIDATE_ALL_BUCKETS
419
403
  };
420
404
  } else {
421
405
  return {
@@ -1,4 +1,6 @@
1
- import { Metrics } from '../metrics/Metrics.js';
1
+ import { MetricsEngine } from '../metrics/MetricsEngine.js';
2
+
3
+ import { APIMetric } from '@powersync/service-types';
2
4
 
3
5
  /**
4
6
  * Record sync stats per request stream.
@@ -7,15 +9,19 @@ export class RequestTracker {
7
9
  operationsSynced = 0;
8
10
  dataSyncedBytes = 0;
9
11
 
12
+ constructor(private metrics: MetricsEngine) {
13
+ this.metrics = metrics;
14
+ }
15
+
10
16
  addOperationsSynced(operations: number) {
11
17
  this.operationsSynced += operations;
12
18
 
13
- Metrics.getInstance().operations_synced_total.add(operations);
19
+ this.metrics.getCounter(APIMetric.OPERATIONS_SYNCED).add(operations);
14
20
  }
15
21
 
16
22
  addDataSynced(bytes: number) {
17
23
  this.dataSyncedBytes += bytes;
18
24
 
19
- Metrics.getInstance().data_synced_bytes.add(bytes);
25
+ this.metrics.getCounter(APIMetric.DATA_SYNCED_BYTES).add(bytes);
20
26
  }
21
27
  }