@powersync/service-core 0.0.0-dev-20250304151813 → 0.0.0-dev-20250306152715
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -4
- package/dist/api/api-index.d.ts +1 -0
- package/dist/api/api-index.js +1 -0
- package/dist/api/api-index.js.map +1 -1
- package/dist/api/api-metrics.d.ts +11 -0
- package/dist/api/api-metrics.js +30 -0
- package/dist/api/api-metrics.js.map +1 -0
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/metrics/MetricsEngine.d.ts +21 -0
- package/dist/metrics/MetricsEngine.js +79 -0
- package/dist/metrics/MetricsEngine.js.map +1 -0
- package/dist/metrics/metrics-index.d.ts +4 -0
- package/dist/metrics/metrics-index.js +5 -0
- package/dist/metrics/metrics-index.js.map +1 -0
- package/dist/metrics/metrics-interfaces.d.ts +36 -0
- package/dist/metrics/metrics-interfaces.js +6 -0
- package/dist/metrics/metrics-interfaces.js.map +1 -0
- package/dist/metrics/open-telemetry/OpenTelemetryMetricsFactory.d.ts +10 -0
- package/dist/metrics/open-telemetry/OpenTelemetryMetricsFactory.js +51 -0
- package/dist/metrics/open-telemetry/OpenTelemetryMetricsFactory.js.map +1 -0
- package/dist/metrics/open-telemetry/util.d.ts +6 -0
- package/dist/metrics/open-telemetry/util.js +56 -0
- package/dist/metrics/open-telemetry/util.js.map +1 -0
- package/dist/replication/AbstractReplicationJob.d.ts +2 -0
- package/dist/replication/AbstractReplicationJob.js.map +1 -1
- package/dist/replication/AbstractReplicator.d.ts +3 -0
- package/dist/replication/AbstractReplicator.js +3 -0
- package/dist/replication/AbstractReplicator.js.map +1 -1
- package/dist/replication/ReplicationModule.d.ts +7 -0
- package/dist/replication/ReplicationModule.js +1 -0
- package/dist/replication/ReplicationModule.js.map +1 -1
- package/dist/replication/replication-index.d.ts +1 -0
- package/dist/replication/replication-index.js +1 -0
- package/dist/replication/replication-index.js.map +1 -1
- package/dist/replication/replication-metrics.d.ts +11 -0
- package/dist/replication/replication-metrics.js +39 -0
- package/dist/replication/replication-metrics.js.map +1 -0
- package/dist/routes/configure-fastify.d.ts +3 -3
- package/dist/routes/endpoints/checkpointing.d.ts +6 -6
- package/dist/routes/endpoints/socket-route.js +5 -5
- package/dist/routes/endpoints/socket-route.js.map +1 -1
- package/dist/routes/endpoints/sync-stream.js +6 -6
- package/dist/routes/endpoints/sync-stream.js.map +1 -1
- package/dist/storage/BucketStorageBatch.d.ts +2 -1
- package/dist/storage/BucketStorageBatch.js.map +1 -1
- package/dist/storage/ChecksumCache.d.ts +6 -6
- package/dist/storage/ChecksumCache.js +5 -6
- package/dist/storage/ChecksumCache.js.map +1 -1
- package/dist/storage/SyncRulesBucketStorage.d.ts +9 -9
- package/dist/storage/storage-index.d.ts +1 -0
- package/dist/storage/storage-index.js +1 -0
- package/dist/storage/storage-index.js.map +1 -1
- package/dist/storage/storage-metrics.d.ts +4 -0
- package/dist/storage/storage-metrics.js +56 -0
- package/dist/storage/storage-metrics.js.map +1 -0
- package/dist/sync/BucketChecksumState.d.ts +3 -3
- package/dist/sync/BucketChecksumState.js +3 -3
- package/dist/sync/BucketChecksumState.js.map +1 -1
- package/dist/sync/RequestTracker.d.ts +3 -0
- package/dist/sync/RequestTracker.js +8 -3
- package/dist/sync/RequestTracker.js.map +1 -1
- package/dist/sync/sync.d.ts +1 -1
- package/dist/sync/sync.js +8 -6
- package/dist/sync/sync.js.map +1 -1
- package/dist/system/ServiceContext.d.ts +3 -3
- package/dist/system/ServiceContext.js +7 -3
- package/dist/system/ServiceContext.js.map +1 -1
- package/dist/util/protocol-types.d.ts +10 -10
- package/dist/util/utils.d.ts +12 -2
- package/dist/util/utils.js +5 -1
- package/dist/util/utils.js.map +1 -1
- package/package.json +8 -8
- package/src/api/api-index.ts +1 -0
- package/src/api/api-metrics.ts +35 -0
- package/src/index.ts +2 -2
- package/src/metrics/MetricsEngine.ts +98 -0
- package/src/metrics/metrics-index.ts +4 -0
- package/src/metrics/metrics-interfaces.ts +41 -0
- package/src/metrics/open-telemetry/OpenTelemetryMetricsFactory.ts +66 -0
- package/src/metrics/open-telemetry/util.ts +74 -0
- package/src/replication/AbstractReplicationJob.ts +2 -0
- package/src/replication/AbstractReplicator.ts +7 -0
- package/src/replication/ReplicationModule.ts +10 -0
- package/src/replication/replication-index.ts +1 -0
- package/src/replication/replication-metrics.ts +45 -0
- package/src/routes/endpoints/socket-route.ts +6 -5
- package/src/routes/endpoints/sync-stream.ts +7 -6
- package/src/storage/BucketStorageBatch.ts +2 -1
- package/src/storage/ChecksumCache.ts +13 -14
- package/src/storage/SyncRulesBucketStorage.ts +10 -10
- package/src/storage/storage-index.ts +1 -0
- package/src/storage/storage-metrics.ts +67 -0
- package/src/sync/BucketChecksumState.ts +7 -7
- package/src/sync/RequestTracker.ts +9 -3
- package/src/sync/sync.ts +10 -8
- package/src/system/ServiceContext.ts +9 -4
- package/src/util/protocol-types.ts +10 -10
- package/src/util/utils.ts +13 -2
- package/test/src/checksum_cache.test.ts +83 -84
- package/test/src/sync/BucketChecksumState.test.ts +47 -41
- package/tsconfig.tsbuildinfo +1 -1
- package/dist/metrics/Metrics.d.ts +0 -30
- package/dist/metrics/Metrics.js +0 -202
- package/dist/metrics/Metrics.js.map +0 -1
- package/src/metrics/Metrics.ts +0 -255
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
export interface Counter {
|
|
2
|
+
/**
|
|
3
|
+
* Increment the counter by the given value. Only positive numbers are valid.
|
|
4
|
+
* @param value
|
|
5
|
+
*/
|
|
6
|
+
add(value: number): void;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export interface UpDownCounter {
|
|
10
|
+
/**
|
|
11
|
+
* Increment or decrement(if negative) the counter by the given value.
|
|
12
|
+
* @param value
|
|
13
|
+
*/
|
|
14
|
+
add(value: number): void;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface ObservableGauge {
|
|
18
|
+
/**
|
|
19
|
+
* Set a value provider that provides the value for the gauge at the time of observation.
|
|
20
|
+
* @param valueProvider
|
|
21
|
+
*/
|
|
22
|
+
setValueProvider(valueProvider: () => Promise<number | undefined>): void;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export enum Precision {
|
|
26
|
+
INT = 'int',
|
|
27
|
+
DOUBLE = 'double'
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface MetricMetadata {
|
|
31
|
+
name: string;
|
|
32
|
+
description?: string;
|
|
33
|
+
unit?: string;
|
|
34
|
+
precision?: Precision;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export interface MetricsFactory {
|
|
38
|
+
createCounter(metadata: MetricMetadata): Counter;
|
|
39
|
+
createUpDownCounter(metadata: MetricMetadata): UpDownCounter;
|
|
40
|
+
createObservableGauge(metadata: MetricMetadata): ObservableGauge;
|
|
41
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { Meter, ValueType } from '@opentelemetry/api';
|
|
2
|
+
import {
|
|
3
|
+
Counter,
|
|
4
|
+
ObservableGauge,
|
|
5
|
+
UpDownCounter,
|
|
6
|
+
MetricMetadata,
|
|
7
|
+
MetricsFactory,
|
|
8
|
+
Precision
|
|
9
|
+
} from '../metrics-interfaces.js';
|
|
10
|
+
|
|
11
|
+
export class OpenTelemetryMetricsFactory implements MetricsFactory {
|
|
12
|
+
private meter: Meter;
|
|
13
|
+
|
|
14
|
+
constructor(meter: Meter) {
|
|
15
|
+
this.meter = meter;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
createCounter(metadata: MetricMetadata): Counter {
|
|
19
|
+
return this.meter.createCounter(metadata.name, {
|
|
20
|
+
description: metadata.description,
|
|
21
|
+
unit: metadata.unit,
|
|
22
|
+
valueType: this.toValueType(metadata.precision)
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
createObservableGauge(metadata: MetricMetadata): ObservableGauge {
|
|
27
|
+
const gauge = this.meter.createObservableGauge(metadata.name, {
|
|
28
|
+
description: metadata.description,
|
|
29
|
+
unit: metadata.unit,
|
|
30
|
+
valueType: this.toValueType(metadata.precision)
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
return {
|
|
34
|
+
setValueProvider(valueProvider: () => Promise<number | undefined>) {
|
|
35
|
+
gauge.addCallback(async (result) => {
|
|
36
|
+
const value = await valueProvider();
|
|
37
|
+
|
|
38
|
+
if (value) {
|
|
39
|
+
result.observe(value);
|
|
40
|
+
}
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
createUpDownCounter(metadata: MetricMetadata): UpDownCounter {
|
|
47
|
+
return this.meter.createUpDownCounter(metadata.name, {
|
|
48
|
+
description: metadata.description,
|
|
49
|
+
unit: metadata.unit,
|
|
50
|
+
valueType: this.toValueType(metadata.precision)
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
private toValueType(precision?: Precision): ValueType {
|
|
55
|
+
if (!precision) {
|
|
56
|
+
return ValueType.INT;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
switch (precision) {
|
|
60
|
+
case Precision.INT:
|
|
61
|
+
return ValueType.INT;
|
|
62
|
+
case Precision.DOUBLE:
|
|
63
|
+
return ValueType.DOUBLE;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { MeterProvider, MetricReader, PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics';
|
|
2
|
+
import { env } from '../../util/env.js';
|
|
3
|
+
import { PrometheusExporter } from '@opentelemetry/exporter-prometheus';
|
|
4
|
+
import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-http';
|
|
5
|
+
import { Resource } from '@opentelemetry/resources';
|
|
6
|
+
import { ServiceContext } from '../../system/ServiceContext.js';
|
|
7
|
+
import { OpenTelemetryMetricsFactory } from './OpenTelemetryMetricsFactory.js';
|
|
8
|
+
import { MetricsFactory } from '../metrics-interfaces.js';
|
|
9
|
+
|
|
10
|
+
export interface RuntimeMetadata {
|
|
11
|
+
[key: string]: string | number | undefined;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function createOpenTelemetryMetricsFactory(context: ServiceContext): MetricsFactory {
|
|
15
|
+
const { configuration, lifeCycleEngine, storageEngine } = context;
|
|
16
|
+
const configuredExporters: MetricReader[] = [];
|
|
17
|
+
|
|
18
|
+
if (env.METRICS_PORT) {
|
|
19
|
+
const prometheusExporter = new PrometheusExporter({ port: env.METRICS_PORT, preventServerStart: true });
|
|
20
|
+
configuredExporters.push(prometheusExporter);
|
|
21
|
+
|
|
22
|
+
lifeCycleEngine.withLifecycle(prometheusExporter, {
|
|
23
|
+
start: () => prometheusExporter.startServer()
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (!configuration.telemetry.disable_telemetry_sharing) {
|
|
28
|
+
const periodicExporter = new PeriodicExportingMetricReader({
|
|
29
|
+
exporter: new OTLPMetricExporter({
|
|
30
|
+
url: configuration.telemetry.internal_service_endpoint
|
|
31
|
+
}),
|
|
32
|
+
exportIntervalMillis: 1000 * 60 * 5 // 5 minutes
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
configuredExporters.push(periodicExporter);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
let resolvedMetadata: (metadata: RuntimeMetadata) => void;
|
|
39
|
+
const runtimeMetadata: Promise<RuntimeMetadata> = new Promise((resolve) => {
|
|
40
|
+
resolvedMetadata = resolve;
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
lifeCycleEngine.withLifecycle(null, {
|
|
44
|
+
start: async () => {
|
|
45
|
+
const bucketStorage = storageEngine.activeBucketStorage;
|
|
46
|
+
try {
|
|
47
|
+
const instanceId = await bucketStorage.getPowerSyncInstanceId();
|
|
48
|
+
resolvedMetadata({ ['instance_id']: instanceId });
|
|
49
|
+
} catch (err) {
|
|
50
|
+
resolvedMetadata({ ['instance_id']: 'Unknown' });
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
const meterProvider = new MeterProvider({
|
|
56
|
+
resource: new Resource(
|
|
57
|
+
{
|
|
58
|
+
['service']: 'PowerSync'
|
|
59
|
+
},
|
|
60
|
+
runtimeMetadata
|
|
61
|
+
),
|
|
62
|
+
readers: configuredExporters
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
lifeCycleEngine.withLifecycle(meterProvider, {
|
|
66
|
+
stop: async () => {
|
|
67
|
+
await meterProvider.shutdown();
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
const meter = meterProvider.getMeter('powersync');
|
|
72
|
+
|
|
73
|
+
return new OpenTelemetryMetricsFactory(meter);
|
|
74
|
+
}
|
|
@@ -2,10 +2,12 @@ import { container, logger } from '@powersync/lib-services-framework';
|
|
|
2
2
|
import winston from 'winston';
|
|
3
3
|
import * as storage from '../storage/storage-index.js';
|
|
4
4
|
import { ErrorRateLimiter } from './ErrorRateLimiter.js';
|
|
5
|
+
import { MetricsEngine } from '../metrics/MetricsEngine.js';
|
|
5
6
|
|
|
6
7
|
export interface AbstractReplicationJobOptions {
|
|
7
8
|
id: string;
|
|
8
9
|
storage: storage.SyncRulesBucketStorage;
|
|
10
|
+
metrics: MetricsEngine;
|
|
9
11
|
lock: storage.ReplicationLock;
|
|
10
12
|
rateLimiter: ErrorRateLimiter;
|
|
11
13
|
}
|
|
@@ -7,6 +7,7 @@ import { SyncRulesProvider } from '../util/config/sync-rules/sync-rules-provider
|
|
|
7
7
|
import { AbstractReplicationJob } from './AbstractReplicationJob.js';
|
|
8
8
|
import { ErrorRateLimiter } from './ErrorRateLimiter.js';
|
|
9
9
|
import { ConnectionTestResult } from './ReplicationModule.js';
|
|
10
|
+
import { MetricsEngine } from '../metrics/MetricsEngine.js';
|
|
10
11
|
|
|
11
12
|
// 5 minutes
|
|
12
13
|
const PING_INTERVAL = 1_000_000_000n * 300n;
|
|
@@ -19,6 +20,7 @@ export interface CreateJobOptions {
|
|
|
19
20
|
export interface AbstractReplicatorOptions {
|
|
20
21
|
id: string;
|
|
21
22
|
storageEngine: StorageEngine;
|
|
23
|
+
metricsEngine: MetricsEngine;
|
|
22
24
|
syncRuleProvider: SyncRulesProvider;
|
|
23
25
|
/**
|
|
24
26
|
* This limits the effect of retries when there is a persistent issue.
|
|
@@ -33,6 +35,7 @@ export interface AbstractReplicatorOptions {
|
|
|
33
35
|
*/
|
|
34
36
|
export abstract class AbstractReplicator<T extends AbstractReplicationJob = AbstractReplicationJob> {
|
|
35
37
|
protected logger: winston.Logger;
|
|
38
|
+
|
|
36
39
|
/**
|
|
37
40
|
* Map of replication jobs by sync rule id. Usually there is only one running job, but there could be two when
|
|
38
41
|
* transitioning to a new set of sync rules.
|
|
@@ -72,6 +75,10 @@ export abstract class AbstractReplicator<T extends AbstractReplicationJob = Abst
|
|
|
72
75
|
return this.options.rateLimiter;
|
|
73
76
|
}
|
|
74
77
|
|
|
78
|
+
protected get metrics() {
|
|
79
|
+
return this.options.metricsEngine;
|
|
80
|
+
}
|
|
81
|
+
|
|
75
82
|
public async start(): Promise<void> {
|
|
76
83
|
this.runLoop().catch((e) => {
|
|
77
84
|
this.logger.error('Data source fatal replication error', e);
|
|
@@ -64,6 +64,14 @@ export abstract class ReplicationModule<TConfig extends DataSourceConfig>
|
|
|
64
64
|
*/
|
|
65
65
|
protected abstract createReplicator(context: system.ServiceContext): AbstractReplicator;
|
|
66
66
|
|
|
67
|
+
/**
|
|
68
|
+
* Any additional initialization specific to the module should be added here. Will be called if necessary after the
|
|
69
|
+
* main initialization has been completed
|
|
70
|
+
* @param context
|
|
71
|
+
* @protected
|
|
72
|
+
*/
|
|
73
|
+
protected abstract onInitialized(context: system.ServiceContext): Promise<void>;
|
|
74
|
+
|
|
67
75
|
public abstract testConnection(config: TConfig): Promise<ConnectionTestResult>;
|
|
68
76
|
|
|
69
77
|
/**
|
|
@@ -93,6 +101,8 @@ export abstract class ReplicationModule<TConfig extends DataSourceConfig>
|
|
|
93
101
|
|
|
94
102
|
context.replicationEngine?.register(this.createReplicator(context));
|
|
95
103
|
context.routerEngine?.registerAPI(this.createRouteAPIAdapter());
|
|
104
|
+
|
|
105
|
+
await this.onInitialized(context);
|
|
96
106
|
}
|
|
97
107
|
|
|
98
108
|
protected decodeConfig(config: TConfig): void {
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { MetricsEngine } from '../metrics/metrics-index.js';
|
|
2
|
+
import { ReplicationMetric } from '@powersync/service-types';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Create and register the core replication metrics.
|
|
6
|
+
* @param engine
|
|
7
|
+
*/
|
|
8
|
+
export function createCoreReplicationMetrics(engine: MetricsEngine): void {
|
|
9
|
+
engine.createCounter({
|
|
10
|
+
name: ReplicationMetric.DATA_REPLICATED_BYTES,
|
|
11
|
+
description: 'Uncompressed size of replicated data',
|
|
12
|
+
unit: 'bytes'
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
engine.createCounter({
|
|
16
|
+
name: ReplicationMetric.ROWS_REPLICATED_TOTAL,
|
|
17
|
+
description: 'Total number of replicated rows'
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
engine.createCounter({
|
|
21
|
+
name: ReplicationMetric.TRANSACTIONS_REPLICATED_TOTAL,
|
|
22
|
+
description: 'Total number of replicated transactions'
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
engine.createCounter({
|
|
26
|
+
name: ReplicationMetric.CHUNKS_REPLICATED_TOTAL,
|
|
27
|
+
description: 'Total number of replication chunks'
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Initialise the core replication metrics. This should be called after the metrics have been created.
|
|
33
|
+
* @param engine
|
|
34
|
+
*/
|
|
35
|
+
export function initializeCoreReplicationMetrics(engine: MetricsEngine): void {
|
|
36
|
+
const data_replicated_bytes = engine.getCounter(ReplicationMetric.DATA_REPLICATED_BYTES);
|
|
37
|
+
const rows_replicated_total = engine.getCounter(ReplicationMetric.ROWS_REPLICATED_TOTAL);
|
|
38
|
+
const transactions_replicated_total = engine.getCounter(ReplicationMetric.TRANSACTIONS_REPLICATED_TOTAL);
|
|
39
|
+
const chunks_replicated_total = engine.getCounter(ReplicationMetric.CHUNKS_REPLICATED_TOTAL);
|
|
40
|
+
|
|
41
|
+
data_replicated_bytes.add(0);
|
|
42
|
+
rows_replicated_total.add(0);
|
|
43
|
+
transactions_replicated_total.add(0);
|
|
44
|
+
chunks_replicated_total.add(0);
|
|
45
|
+
}
|
|
@@ -2,18 +2,19 @@ import { ErrorCode, errors, logger, schema } from '@powersync/lib-services-frame
|
|
|
2
2
|
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
3
3
|
import { serialize } from 'bson';
|
|
4
4
|
|
|
5
|
-
import { Metrics } from '../../metrics/Metrics.js';
|
|
6
5
|
import * as sync from '../../sync/sync-index.js';
|
|
7
6
|
import * as util from '../../util/util-index.js';
|
|
8
7
|
import { SocketRouteGenerator } from '../router-socket.js';
|
|
9
8
|
import { SyncRoutes } from './sync-stream.js';
|
|
10
9
|
|
|
10
|
+
import { APIMetric } from '@powersync/service-types';
|
|
11
|
+
|
|
11
12
|
export const syncStreamReactive: SocketRouteGenerator = (router) =>
|
|
12
13
|
router.reactiveStream<util.StreamingSyncRequest, any>(SyncRoutes.STREAM, {
|
|
13
14
|
validator: schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }),
|
|
14
15
|
handler: async ({ context, params, responder, observer, initialN, signal: upstreamSignal }) => {
|
|
15
16
|
const { service_context } = context;
|
|
16
|
-
const { routerEngine, syncContext } = service_context;
|
|
17
|
+
const { routerEngine, metricsEngine, syncContext } = service_context;
|
|
17
18
|
|
|
18
19
|
// Create our own controller that we can abort directly
|
|
19
20
|
const controller = new AbortController();
|
|
@@ -69,8 +70,8 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
|
|
|
69
70
|
controller.abort();
|
|
70
71
|
});
|
|
71
72
|
|
|
72
|
-
|
|
73
|
-
const tracker = new sync.RequestTracker();
|
|
73
|
+
metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(1);
|
|
74
|
+
const tracker = new sync.RequestTracker(metricsEngine);
|
|
74
75
|
try {
|
|
75
76
|
for await (const data of sync.streamResponse({
|
|
76
77
|
syncContext: syncContext,
|
|
@@ -147,7 +148,7 @@ export const syncStreamReactive: SocketRouteGenerator = (router) =>
|
|
|
147
148
|
operations_synced: tracker.operationsSynced,
|
|
148
149
|
data_synced_bytes: tracker.dataSyncedBytes
|
|
149
150
|
});
|
|
150
|
-
|
|
151
|
+
metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(-1);
|
|
151
152
|
}
|
|
152
153
|
}
|
|
153
154
|
});
|
|
@@ -5,10 +5,11 @@ import { Readable } from 'stream';
|
|
|
5
5
|
import * as sync from '../../sync/sync-index.js';
|
|
6
6
|
import * as util from '../../util/util-index.js';
|
|
7
7
|
|
|
8
|
-
import { Metrics } from '../../metrics/Metrics.js';
|
|
9
8
|
import { authUser } from '../auth.js';
|
|
10
9
|
import { routeDefinition } from '../router.js';
|
|
11
10
|
|
|
11
|
+
import { APIMetric } from '@powersync/service-types';
|
|
12
|
+
|
|
12
13
|
export enum SyncRoutes {
|
|
13
14
|
STREAM = '/sync/stream'
|
|
14
15
|
}
|
|
@@ -20,7 +21,7 @@ export const syncStreamed = routeDefinition({
|
|
|
20
21
|
validator: schema.createTsCodecValidator(util.StreamingSyncRequest, { allowAdditional: true }),
|
|
21
22
|
handler: async (payload) => {
|
|
22
23
|
const { service_context } = payload.context;
|
|
23
|
-
const { routerEngine, storageEngine, syncContext } = service_context;
|
|
24
|
+
const { routerEngine, storageEngine, metricsEngine, syncContext } = service_context;
|
|
24
25
|
const headers = payload.request.headers;
|
|
25
26
|
const userAgent = headers['x-user-agent'] ?? headers['user-agent'];
|
|
26
27
|
const clientId = payload.params.client_id;
|
|
@@ -49,9 +50,9 @@ export const syncStreamed = routeDefinition({
|
|
|
49
50
|
const syncRules = bucketStorage.getParsedSyncRules(routerEngine!.getAPI().getParseSyncRulesOptions());
|
|
50
51
|
|
|
51
52
|
const controller = new AbortController();
|
|
52
|
-
const tracker = new sync.RequestTracker();
|
|
53
|
+
const tracker = new sync.RequestTracker(metricsEngine);
|
|
53
54
|
try {
|
|
54
|
-
|
|
55
|
+
metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(1);
|
|
55
56
|
const stream = Readable.from(
|
|
56
57
|
sync.transformToBytesTracked(
|
|
57
58
|
sync.ndjson(
|
|
@@ -96,7 +97,7 @@ export const syncStreamed = routeDefinition({
|
|
|
96
97
|
data: stream,
|
|
97
98
|
afterSend: async () => {
|
|
98
99
|
controller.abort();
|
|
99
|
-
|
|
100
|
+
metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(-1);
|
|
100
101
|
logger.info(`Sync stream complete`, {
|
|
101
102
|
user_id: syncParams.user_id,
|
|
102
103
|
client_id: clientId,
|
|
@@ -108,7 +109,7 @@ export const syncStreamed = routeDefinition({
|
|
|
108
109
|
});
|
|
109
110
|
} catch (ex) {
|
|
110
111
|
controller.abort();
|
|
111
|
-
|
|
112
|
+
metricsEngine.getUpDownCounter(APIMetric.CONCURRENT_CONNECTIONS).add(-1);
|
|
112
113
|
}
|
|
113
114
|
}
|
|
114
115
|
});
|
|
@@ -4,6 +4,7 @@ import { BSON } from 'bson';
|
|
|
4
4
|
import { ReplicationEventPayload } from './ReplicationEventPayload.js';
|
|
5
5
|
import { SourceTable } from './SourceTable.js';
|
|
6
6
|
import { BatchedCustomWriteCheckpointOptions } from './storage-index.js';
|
|
7
|
+
import { InternalOpId } from '../util/utils.js';
|
|
7
8
|
|
|
8
9
|
export const DEFAULT_BUCKET_BATCH_COMMIT_OPTIONS: ResolvedBucketBatchCommitOptions = {
|
|
9
10
|
createEmptyCheckpoints: true
|
|
@@ -144,7 +145,7 @@ export interface BucketBatchStorageListener {
|
|
|
144
145
|
}
|
|
145
146
|
|
|
146
147
|
export interface FlushedResult {
|
|
147
|
-
flushed_op:
|
|
148
|
+
flushed_op: InternalOpId;
|
|
148
149
|
}
|
|
149
150
|
|
|
150
151
|
export interface BucketBatchCommitOptions {
|
|
@@ -1,12 +1,11 @@
|
|
|
1
|
-
import { BucketChecksum, OpId } from '../util/protocol-types.js';
|
|
2
|
-
import { ChecksumMap, addBucketChecksums } from '../util/utils.js';
|
|
3
|
-
import { LRUCache } from 'lru-cache/min';
|
|
4
1
|
import { OrderedSet } from '@js-sdsl/ordered-set';
|
|
5
|
-
import {
|
|
2
|
+
import { LRUCache } from 'lru-cache/min';
|
|
3
|
+
import { BucketChecksum } from '../util/protocol-types.js';
|
|
4
|
+
import { addBucketChecksums, ChecksumMap, InternalOpId } from '../util/utils.js';
|
|
6
5
|
|
|
7
6
|
interface ChecksumFetchContext {
|
|
8
7
|
fetch(bucket: string): Promise<BucketChecksum>;
|
|
9
|
-
checkpoint:
|
|
8
|
+
checkpoint: InternalOpId;
|
|
10
9
|
}
|
|
11
10
|
|
|
12
11
|
export interface PartialChecksum {
|
|
@@ -28,10 +27,11 @@ export interface PartialChecksum {
|
|
|
28
27
|
*/
|
|
29
28
|
isFullChecksum: boolean;
|
|
30
29
|
}
|
|
30
|
+
|
|
31
31
|
export interface FetchPartialBucketChecksum {
|
|
32
32
|
bucket: string;
|
|
33
|
-
start?:
|
|
34
|
-
end:
|
|
33
|
+
start?: InternalOpId;
|
|
34
|
+
end: InternalOpId;
|
|
35
35
|
}
|
|
36
36
|
|
|
37
37
|
export type PartialChecksumMap = Map<string, PartialChecksum>;
|
|
@@ -101,8 +101,7 @@ export class ChecksumCache {
|
|
|
101
101
|
|
|
102
102
|
dispose: (value, key) => {
|
|
103
103
|
// Remove from the set of cached checkpoints for the bucket
|
|
104
|
-
const {
|
|
105
|
-
const checkpoint = BigInt(checkpointString);
|
|
104
|
+
const { checkpoint } = parseCacheKey(key);
|
|
106
105
|
const checkpointSet = this.bucketCheckpoints.get(value.bucket);
|
|
107
106
|
if (checkpointSet == null) {
|
|
108
107
|
return;
|
|
@@ -128,7 +127,7 @@ export class ChecksumCache {
|
|
|
128
127
|
});
|
|
129
128
|
}
|
|
130
129
|
|
|
131
|
-
async getChecksums(checkpoint:
|
|
130
|
+
async getChecksums(checkpoint: InternalOpId, buckets: string[]): Promise<BucketChecksum[]> {
|
|
132
131
|
const checksums = await this.getChecksumMap(checkpoint, buckets);
|
|
133
132
|
// Return results in the same order as the request
|
|
134
133
|
return buckets.map((bucket) => checksums.get(bucket)!);
|
|
@@ -141,7 +140,7 @@ export class ChecksumCache {
|
|
|
141
140
|
*
|
|
142
141
|
* @returns a Map with exactly one entry for each bucket requested
|
|
143
142
|
*/
|
|
144
|
-
async getChecksumMap(checkpoint:
|
|
143
|
+
async getChecksumMap(checkpoint: InternalOpId, buckets: string[]): Promise<ChecksumMap> {
|
|
145
144
|
// Buckets that don't have a cached checksum for this checkpoint yet
|
|
146
145
|
let toFetch = new Set<string>();
|
|
147
146
|
|
|
@@ -235,7 +234,7 @@ export class ChecksumCache {
|
|
|
235
234
|
// Partial checksum found - make a partial checksum request
|
|
236
235
|
bucketRequest = {
|
|
237
236
|
bucket,
|
|
238
|
-
start: cp
|
|
237
|
+
start: cp,
|
|
239
238
|
end: checkpoint
|
|
240
239
|
};
|
|
241
240
|
add.set(bucket, cached);
|
|
@@ -315,11 +314,11 @@ export class ChecksumCache {
|
|
|
315
314
|
}
|
|
316
315
|
}
|
|
317
316
|
|
|
318
|
-
function makeCacheKey(checkpoint:
|
|
317
|
+
function makeCacheKey(checkpoint: InternalOpId | string, bucket: string) {
|
|
319
318
|
return `${checkpoint}/${bucket}`;
|
|
320
319
|
}
|
|
321
320
|
|
|
322
321
|
function parseCacheKey(key: string) {
|
|
323
322
|
const index = key.indexOf('/');
|
|
324
|
-
return {
|
|
323
|
+
return { checkpoint: BigInt(key.substring(0, index)), bucket: key.substring(index + 1) };
|
|
325
324
|
}
|
|
@@ -71,7 +71,7 @@ export interface SyncRulesBucketStorage
|
|
|
71
71
|
/**
|
|
72
72
|
* Used to resolve "dynamic" parameter queries.
|
|
73
73
|
*/
|
|
74
|
-
getParameterSets(checkpoint: util.
|
|
74
|
+
getParameterSets(checkpoint: util.InternalOpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]>;
|
|
75
75
|
|
|
76
76
|
getCheckpointChanges(options: GetCheckpointChangesOptions): Promise<CheckpointChanges>;
|
|
77
77
|
|
|
@@ -94,8 +94,8 @@ export interface SyncRulesBucketStorage
|
|
|
94
94
|
* @param options batch size options
|
|
95
95
|
*/
|
|
96
96
|
getBucketDataBatch(
|
|
97
|
-
checkpoint: util.
|
|
98
|
-
dataBuckets: Map<string,
|
|
97
|
+
checkpoint: util.InternalOpId,
|
|
98
|
+
dataBuckets: Map<string, util.InternalOpId>,
|
|
99
99
|
options?: BucketDataBatchOptions
|
|
100
100
|
): AsyncIterable<SyncBucketDataBatch>;
|
|
101
101
|
|
|
@@ -104,7 +104,7 @@ export interface SyncRulesBucketStorage
|
|
|
104
104
|
*
|
|
105
105
|
* Returns zero checksums for any buckets not found.
|
|
106
106
|
*/
|
|
107
|
-
getChecksums(checkpoint: util.
|
|
107
|
+
getChecksums(checkpoint: util.InternalOpId, buckets: string[]): Promise<util.ChecksumMap>;
|
|
108
108
|
}
|
|
109
109
|
|
|
110
110
|
export interface SyncRulesBucketStorageListener {
|
|
@@ -169,7 +169,7 @@ export interface CompactOptions {
|
|
|
169
169
|
* This can also be used to create a "safe buffer" of recent operations that should
|
|
170
170
|
* not be compacted, to avoid invalidating checkpoints in use.
|
|
171
171
|
*/
|
|
172
|
-
maxOpId?:
|
|
172
|
+
maxOpId?: util.InternalOpId;
|
|
173
173
|
|
|
174
174
|
/**
|
|
175
175
|
* If specified, compact only the specific buckets.
|
|
@@ -215,11 +215,11 @@ export interface BucketDataBatchOptions {
|
|
|
215
215
|
|
|
216
216
|
export interface SyncBucketDataBatch {
|
|
217
217
|
batch: util.SyncBucketData;
|
|
218
|
-
targetOp:
|
|
218
|
+
targetOp: util.InternalOpId | null;
|
|
219
219
|
}
|
|
220
220
|
|
|
221
221
|
export interface ReplicationCheckpoint {
|
|
222
|
-
readonly checkpoint: util.
|
|
222
|
+
readonly checkpoint: util.InternalOpId;
|
|
223
223
|
readonly lsn: string | null;
|
|
224
224
|
}
|
|
225
225
|
|
|
@@ -238,7 +238,7 @@ export interface WatchFilterEvent {
|
|
|
238
238
|
|
|
239
239
|
export interface WriteCheckpoint {
|
|
240
240
|
base: ReplicationCheckpoint;
|
|
241
|
-
writeCheckpoint:
|
|
241
|
+
writeCheckpoint: util.InternalOpId | null;
|
|
242
242
|
}
|
|
243
243
|
|
|
244
244
|
export interface StorageCheckpointUpdate extends WriteCheckpoint {
|
|
@@ -246,8 +246,8 @@ export interface StorageCheckpointUpdate extends WriteCheckpoint {
|
|
|
246
246
|
}
|
|
247
247
|
|
|
248
248
|
export interface GetCheckpointChangesOptions {
|
|
249
|
-
lastCheckpoint: util.
|
|
250
|
-
nextCheckpoint: util.
|
|
249
|
+
lastCheckpoint: util.InternalOpId;
|
|
250
|
+
nextCheckpoint: util.InternalOpId;
|
|
251
251
|
}
|
|
252
252
|
|
|
253
253
|
export interface CheckpointChanges {
|
|
@@ -6,6 +6,7 @@ export * from './SourceEntity.js';
|
|
|
6
6
|
export * from './SourceTable.js';
|
|
7
7
|
export * from './StorageEngine.js';
|
|
8
8
|
export * from './StorageProvider.js';
|
|
9
|
+
export * from './storage-metrics.js';
|
|
9
10
|
export * from './WriteCheckpointAPI.js';
|
|
10
11
|
export * from './BucketStorageFactory.js';
|
|
11
12
|
export * from './BucketStorageBatch.js';
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { MetricsEngine } from '../metrics/MetricsEngine.js';
|
|
2
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
3
|
+
import { BucketStorageFactory, StorageMetrics } from './BucketStorageFactory.js';
|
|
4
|
+
import { StorageMetric } from '@powersync/service-types';
|
|
5
|
+
|
|
6
|
+
export function createCoreStorageMetrics(engine: MetricsEngine): void {
|
|
7
|
+
engine.createObservableGauge({
|
|
8
|
+
name: StorageMetric.REPLICATION_SIZE_BYTES,
|
|
9
|
+
description: 'Size of current data stored in PowerSync',
|
|
10
|
+
unit: 'bytes'
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
engine.createObservableGauge({
|
|
14
|
+
name: StorageMetric.OPERATION_SIZE_BYTES,
|
|
15
|
+
description: 'Size of operations stored in PowerSync',
|
|
16
|
+
unit: 'bytes'
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
engine.createObservableGauge({
|
|
20
|
+
name: StorageMetric.PARAMETER_SIZE_BYTES,
|
|
21
|
+
description: 'Size of parameter data stored in PowerSync',
|
|
22
|
+
unit: 'bytes'
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function initializeCoreStorageMetrics(engine: MetricsEngine, storage: BucketStorageFactory): void {
|
|
27
|
+
const replication_storage_size_bytes = engine.getObservableGauge(StorageMetric.REPLICATION_SIZE_BYTES);
|
|
28
|
+
const operation_storage_size_bytes = engine.getObservableGauge(StorageMetric.OPERATION_SIZE_BYTES);
|
|
29
|
+
const parameter_storage_size_bytes = engine.getObservableGauge(StorageMetric.PARAMETER_SIZE_BYTES);
|
|
30
|
+
|
|
31
|
+
const MINIMUM_INTERVAL = 60_000;
|
|
32
|
+
|
|
33
|
+
let cachedRequest: Promise<StorageMetrics | null> | undefined = undefined;
|
|
34
|
+
let cacheTimestamp = 0;
|
|
35
|
+
|
|
36
|
+
const getMetrics = () => {
|
|
37
|
+
if (cachedRequest == null || Date.now() - cacheTimestamp > MINIMUM_INTERVAL) {
|
|
38
|
+
cachedRequest = storage.getStorageMetrics().catch((e) => {
|
|
39
|
+
logger.error(`Failed to get storage metrics`, e);
|
|
40
|
+
return null;
|
|
41
|
+
});
|
|
42
|
+
cacheTimestamp = Date.now();
|
|
43
|
+
}
|
|
44
|
+
return cachedRequest;
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
replication_storage_size_bytes.setValueProvider(async () => {
|
|
48
|
+
const metrics = await getMetrics();
|
|
49
|
+
if (metrics) {
|
|
50
|
+
return metrics.replication_size_bytes;
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
operation_storage_size_bytes.setValueProvider(async () => {
|
|
55
|
+
const metrics = await getMetrics();
|
|
56
|
+
if (metrics) {
|
|
57
|
+
return metrics.operations_size_bytes;
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
parameter_storage_size_bytes.setValueProvider(async () => {
|
|
62
|
+
const metrics = await getMetrics();
|
|
63
|
+
if (metrics) {
|
|
64
|
+
return metrics.parameters_size_bytes;
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
}
|