@powersync/service-core 0.0.2 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/CHANGELOG.md +18 -0
  2. package/dist/index.d.ts +2 -2
  3. package/dist/index.js +2 -2
  4. package/dist/metrics/Metrics.d.ts +30 -0
  5. package/dist/metrics/Metrics.js +176 -0
  6. package/dist/metrics/Metrics.js.map +1 -0
  7. package/dist/migrations/migrations.js +2 -2
  8. package/dist/migrations/migrations.js.map +1 -1
  9. package/dist/replication/WalStream.js +7 -7
  10. package/dist/replication/WalStream.js.map +1 -1
  11. package/dist/routes/socket-route.js +4 -4
  12. package/dist/routes/socket-route.js.map +1 -1
  13. package/dist/routes/sync-stream.js +3 -3
  14. package/dist/routes/sync-stream.js.map +1 -1
  15. package/dist/storage/BucketStorage.d.ts +4 -0
  16. package/dist/storage/BucketStorage.js.map +1 -1
  17. package/dist/storage/MongoBucketStorage.d.ts +1 -0
  18. package/dist/storage/MongoBucketStorage.js +21 -0
  19. package/dist/storage/MongoBucketStorage.js.map +1 -1
  20. package/dist/storage/mongo/db.d.ts +4 -1
  21. package/dist/storage/mongo/db.js +4 -0
  22. package/dist/storage/mongo/db.js.map +1 -1
  23. package/dist/storage/mongo/models.d.ts +3 -0
  24. package/dist/sync/sync.js +2 -2
  25. package/dist/sync/sync.js.map +1 -1
  26. package/dist/sync/util.js +2 -2
  27. package/dist/sync/util.js.map +1 -1
  28. package/dist/util/config/compound-config-collector.js +4 -0
  29. package/dist/util/config/compound-config-collector.js.map +1 -1
  30. package/dist/util/config/types.d.ts +4 -0
  31. package/package.json +13 -13
  32. package/src/index.ts +2 -2
  33. package/src/metrics/Metrics.ts +263 -0
  34. package/src/migrations/migrations.ts +3 -3
  35. package/src/replication/WalStream.ts +7 -7
  36. package/src/routes/socket-route.ts +4 -4
  37. package/src/routes/sync-stream.ts +3 -3
  38. package/src/storage/BucketStorage.ts +5 -0
  39. package/src/storage/MongoBucketStorage.ts +26 -0
  40. package/src/storage/mongo/db.ts +8 -0
  41. package/src/storage/mongo/models.ts +5 -0
  42. package/src/sync/sync.ts +2 -2
  43. package/src/sync/util.ts +2 -2
  44. package/src/util/config/compound-config-collector.ts +4 -0
  45. package/src/util/config/types.ts +6 -0
  46. package/test/src/util.ts +9 -0
  47. package/test/src/wal_stream.test.ts +23 -14
  48. package/tsconfig.json +12 -0
  49. package/tsconfig.tsbuildinfo +1 -1
  50. package/dist/metrics/metrics.d.ts +0 -16
  51. package/dist/metrics/metrics.js +0 -139
  52. package/dist/metrics/metrics.js.map +0 -1
  53. package/src/metrics/metrics.ts +0 -169
@@ -7,10 +7,10 @@ import { SqliteRow, SqlSyncRules, TablePattern, toSyncRulesRow } from '@powersyn
7
7
  import * as storage from '@/storage/storage-index.js';
8
8
  import * as util from '@/util/util-index.js';
9
9
 
10
- import { chunks_replicated_total, rows_replicated_total, transactions_replicated_total } from '../metrics/metrics.js';
11
10
  import { getPgOutputRelation, getRelId, PgRelation } from './PgRelation.js';
12
11
  import { getReplicationIdentityColumns } from './util.js';
13
12
  import { WalConnection } from './WalConnection.js';
13
+ import { Metrics } from '@/metrics/Metrics.js';
14
14
 
15
15
  export const ZERO_LSN = '00000000/00000000';
16
16
 
@@ -403,7 +403,7 @@ WHERE oid = $1::regclass`,
403
403
  await batch.save({ tag: 'insert', sourceTable: table, before: undefined, after: record });
404
404
  }
405
405
  at += rows.length;
406
- rows_replicated_total.add(rows.length);
406
+ Metrics.getInstance().rows_replicated_total.add(rows.length);
407
407
 
408
408
  // pgwire streaming uses reasonable chunk sizes, so we flush at the end
409
409
  // of each chunk.
@@ -493,18 +493,18 @@ WHERE oid = $1::regclass`,
493
493
  }
494
494
 
495
495
  if (msg.tag == 'insert') {
496
- rows_replicated_total.add(1);
496
+ Metrics.getInstance().rows_replicated_total.add(1);
497
497
  const baseRecord = util.constructAfterRecord(msg);
498
498
  return await batch.save({ tag: 'insert', sourceTable: table, before: undefined, after: baseRecord });
499
499
  } else if (msg.tag == 'update') {
500
- rows_replicated_total.add(1);
500
+ Metrics.getInstance().rows_replicated_total.add(1);
501
501
  // "before" may be null if the replica id columns are unchanged
502
502
  // It's fine to treat that the same as an insert.
503
503
  const before = util.constructBeforeRecord(msg);
504
504
  const after = util.constructAfterRecord(msg);
505
505
  return await batch.save({ tag: 'update', sourceTable: table, before: before, after: after });
506
506
  } else if (msg.tag == 'delete') {
507
- rows_replicated_total.add(1);
507
+ Metrics.getInstance().rows_replicated_total.add(1);
508
508
  const before = util.constructBeforeRecord(msg)!;
509
509
 
510
510
  return await batch.save({ tag: 'delete', sourceTable: table, before: before, after: undefined });
@@ -577,7 +577,7 @@ WHERE oid = $1::regclass`,
577
577
  } else if (msg.tag == 'begin') {
578
578
  inTx = true;
579
579
  } else if (msg.tag == 'commit') {
580
- transactions_replicated_total.add(1);
580
+ Metrics.getInstance().transactions_replicated_total.add(1);
581
581
  inTx = false;
582
582
  await batch.commit(msg.lsn!);
583
583
  await this.ack(msg.lsn!, replicationStream);
@@ -602,7 +602,7 @@ WHERE oid = $1::regclass`,
602
602
  }
603
603
  }
604
604
 
605
- chunks_replicated_total.add(1);
605
+ Metrics.getInstance().chunks_replicated_total.add(1);
606
606
  }
607
607
  });
608
608
  }
@@ -3,10 +3,10 @@ import { SyncParameters, normalizeTokenParameters } from '@powersync/service-syn
3
3
  import * as micro from '@journeyapps-platform/micro';
4
4
 
5
5
  import * as util from '@/util/util-index.js';
6
- import { concurrent_connections, data_synced_bytes } from '../metrics/metrics.js';
7
6
  import { streamResponse } from '../sync/sync.js';
8
7
  import { SyncRoutes } from './sync-stream.js';
9
8
  import { SocketRouteGenerator } from './router-socket.js';
9
+ import { Metrics } from '@/metrics/Metrics.js';
10
10
 
11
11
  export const sync_stream_reactive: SocketRouteGenerator = (router) =>
12
12
  router.reactiveStream<util.StreamingSyncRequest, any>(SyncRoutes.STREAM, {
@@ -64,7 +64,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) =>
64
64
  observer.triggerCancel();
65
65
  });
66
66
 
67
- concurrent_connections.add(1);
67
+ Metrics.getInstance().concurrent_connections.add(1);
68
68
  try {
69
69
  for await (const data of streamResponse({
70
70
  storage,
@@ -93,7 +93,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) =>
93
93
  const serialized = serialize(data) as Buffer;
94
94
  responder.onNext({ data: serialized }, false);
95
95
  requestedN--;
96
- data_synced_bytes.add(serialized.length);
96
+ Metrics.getInstance().data_synced_bytes.add(serialized.length);
97
97
  }
98
98
 
99
99
  if (requestedN <= 0) {
@@ -125,7 +125,7 @@ export const sync_stream_reactive: SocketRouteGenerator = (router) =>
125
125
  responder.onComplete();
126
126
  removeStopHandler();
127
127
  disposer();
128
- concurrent_connections.add(-1);
128
+ Metrics.getInstance().concurrent_connections.add(-1);
129
129
  }
130
130
  }
131
131
  });
@@ -4,10 +4,10 @@ import { SyncParameters, normalizeTokenParameters } from '@powersync/service-syn
4
4
 
5
5
  import * as sync from '@/sync/sync-index.js';
6
6
  import * as util from '@/util/util-index.js';
7
- import { concurrent_connections } from '../metrics/metrics.js';
8
7
 
9
8
  import { authUser } from './auth.js';
10
9
  import { RouteGenerator } from './router.js';
10
+ import { Metrics } from '@/metrics/Metrics.js';
11
11
 
12
12
  export enum SyncRoutes {
13
13
  STREAM = '/sync/stream'
@@ -49,7 +49,7 @@ export const syncStreamed: RouteGenerator = (router) =>
49
49
 
50
50
  const controller = new AbortController();
51
51
  try {
52
- concurrent_connections.add(1);
52
+ Metrics.getInstance().concurrent_connections.add(1);
53
53
  const stream = Readable.from(
54
54
  sync.transformToBytesTracked(
55
55
  sync.ndjson(
@@ -84,7 +84,7 @@ export const syncStreamed: RouteGenerator = (router) =>
84
84
  await res.send(stream);
85
85
  } finally {
86
86
  controller.abort();
87
- concurrent_connections.add(-1);
87
+ Metrics.getInstance().concurrent_connections.add(-1);
88
88
  // Prevent double-send
89
89
  res.hijack();
90
90
  }
@@ -91,6 +91,11 @@ export interface BucketStorageFactory {
91
91
  * Get storage size of active sync rules.
92
92
  */
93
93
  getStorageMetrics(): Promise<StorageMetrics>;
94
+
95
+ /**
96
+ * Get the unique identifier for this instance of Powersync
97
+ */
98
+ getPowerSyncInstanceId(): Promise<string>;
94
99
  }
95
100
 
96
101
  export interface WriteCheckpoint {
@@ -23,6 +23,8 @@ import { MongoSyncBucketStorage } from './mongo/MongoSyncBucketStorage.js';
23
23
  import { PowerSyncMongo, PowerSyncMongoOptions } from './mongo/db.js';
24
24
  import { SyncRuleDocument, SyncRuleState } from './mongo/models.js';
25
25
  import { generateSlotName } from './mongo/util.js';
26
+ import { locks } from '@journeyapps-platform/micro';
27
+ import { v4 as uuid } from 'uuid';
26
28
 
27
29
  export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {}
28
30
 
@@ -341,6 +343,30 @@ export class MongoBucketStorage implements BucketStorageFactory {
341
343
  };
342
344
  }
343
345
 
346
+ async getPowerSyncInstanceId(): Promise<string> {
347
+ let instance = await this.db.instance.findOne({
348
+ _id: { $exists: true }
349
+ });
350
+
351
+ if (!instance) {
352
+ const manager = locks.createMongoLockManager(this.db.locks, {
353
+ name: `instance-id-insertion-lock`
354
+ });
355
+
356
+ await manager.lock(async () => {
357
+ await this.db.instance.insertOne({
358
+ _id: uuid(),
359
+ });
360
+ });
361
+
362
+ instance = await this.db.instance.findOne({
363
+ _id: { $exists: true }
364
+ });
365
+ }
366
+
367
+ return instance!._id;
368
+ }
369
+
344
370
  private makeActiveCheckpoint(doc: SyncRuleDocument | null) {
345
371
  return {
346
372
  checkpoint: util.timestampToOpId(doc?.last_checkpoint ?? 0n),
@@ -1,4 +1,5 @@
1
1
  import * as mongo from 'mongodb';
2
+ import * as micro from '@journeyapps-platform/micro';
2
3
 
3
4
  import * as db from '@/db/db-index.js';
4
5
 
@@ -7,6 +8,7 @@ import {
7
8
  BucketParameterDocument,
8
9
  CurrentDataDocument,
9
10
  IdSequenceDocument,
11
+ InstanceDocument,
10
12
  SourceTableDocument,
11
13
  SyncRuleDocument,
12
14
  WriteCheckpointDocument
@@ -33,6 +35,8 @@ export class PowerSyncMongo {
33
35
  readonly sync_rules: mongo.Collection<SyncRuleDocument>;
34
36
  readonly source_tables: mongo.Collection<SourceTableDocument>;
35
37
  readonly write_checkpoints: mongo.Collection<WriteCheckpointDocument>;
38
+ readonly instance: mongo.Collection<InstanceDocument>;
39
+ readonly locks: mongo.Collection<micro.locks.Lock>;
36
40
 
37
41
  readonly client: mongo.MongoClient;
38
42
  readonly db: mongo.Db;
@@ -55,6 +59,8 @@ export class PowerSyncMongo {
55
59
  this.sync_rules = db.collection('sync_rules');
56
60
  this.source_tables = db.collection('source_tables');
57
61
  this.write_checkpoints = db.collection('write_checkpoints');
62
+ this.instance = db.collection('instance');
63
+ this.locks = this.db.collection('locks');
58
64
  }
59
65
 
60
66
  async clear() {
@@ -65,5 +71,7 @@ export class PowerSyncMongo {
65
71
  await this.sync_rules.deleteMany({});
66
72
  await this.source_tables.deleteMany({});
67
73
  await this.write_checkpoints.deleteMany({});
74
+ await this.instance.deleteOne({});
75
+ await this.locks.deleteMany({});
68
76
  }
69
77
  }
@@ -155,3 +155,8 @@ export interface WriteCheckpointDocument {
155
155
  lsns: Record<string, string>;
156
156
  client_id: bigint;
157
157
  }
158
+
159
+ export interface InstanceDocument {
160
+ // The instance UUID
161
+ _id: string;
162
+ }
package/src/sync/sync.ts CHANGED
@@ -8,9 +8,9 @@ import * as auth from '@/auth/auth-index.js';
8
8
  import * as storage from '@/storage/storage-index.js';
9
9
  import * as util from '@/util/util-index.js';
10
10
 
11
- import { operations_synced_total } from '../metrics/metrics.js';
12
11
  import { mergeAsyncIterables } from './merge.js';
13
12
  import { TokenStreamOptions, tokenStream } from './util.js';
13
+ import { Metrics } from '@/metrics/Metrics.js';
14
14
 
15
15
  /**
16
16
  * Maximum number of connections actively fetching data.
@@ -265,7 +265,7 @@ async function* bucketDataBatch(
265
265
  // iterator memory in case if large data sent.
266
266
  yield { data: null, done: false };
267
267
  }
268
- operations_synced_total.add(r.data.length);
268
+ Metrics.getInstance().operations_synced_total.add(r.data.length);
269
269
 
270
270
  dataBuckets.set(r.bucket, r.next_after);
271
271
  }
package/src/sync/util.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  import * as timers from 'timers/promises';
2
2
 
3
3
  import * as util from '@/util/util-index.js';
4
- import { data_synced_bytes } from '../metrics/metrics.js';
4
+ import { Metrics } from '@/metrics/Metrics.js';
5
5
 
6
6
  export type TokenStreamOptions = {
7
7
  /**
@@ -92,7 +92,7 @@ export async function* ndjson(iterator: AsyncIterable<string | null | Record<str
92
92
  export async function* transformToBytesTracked(iterator: AsyncIterable<string>): AsyncGenerator<Buffer> {
93
93
  for await (let data of iterator) {
94
94
  const encoded = Buffer.from(data, 'utf8');
95
- data_synced_bytes.add(encoded.length);
95
+ Metrics.getInstance().data_synced_bytes.add(encoded.length);
96
96
  yield encoded;
97
97
  }
98
98
  }
@@ -119,6 +119,10 @@ export class CompoundConfigCollector {
119
119
  token_max_expiration: '1d', // 1 day
120
120
  metadata: baseConfig.metadata ?? {},
121
121
  migrations: baseConfig.migrations,
122
+ telemetry: {
123
+ disable_telemetry_sharing: baseConfig.telemetry?.disable_telemetry_sharing ?? false,
124
+ internal_service_endpoint: baseConfig.telemetry?.internal_service_endpoint ?? 'https://pulse.journeyapps.com/v1/metrics'
125
+ },
122
126
  slot_name_prefix: connections[0]?.slot_name_prefix ?? 'powersync_'
123
127
  };
124
128
  return config;
@@ -55,6 +55,12 @@ export type ResolvedPowerSyncConfig = {
55
55
  migrations?: {
56
56
  disable_auto_migration?: boolean;
57
57
  };
58
+
59
+ telemetry: {
60
+ disable_telemetry_sharing: boolean;
61
+ internal_service_endpoint: string;
62
+ };
63
+
58
64
  /** Prefix for postgres replication slot names. May eventually be connection-specific. */
59
65
  slot_name_prefix: string;
60
66
  };
package/test/src/util.ts CHANGED
@@ -6,6 +6,15 @@ import { MongoBucketStorage } from '../../src/storage/MongoBucketStorage.js';
6
6
  import { PowerSyncMongo } from '../../src/storage/mongo/db.js';
7
7
  import { escapeIdentifier } from '../../src/util/pgwire_utils.js';
8
8
  import { env } from './env.js';
9
+ import { Metrics } from '@/metrics/Metrics.js';
10
+
11
+ // The metrics need to be initialised before they can be used
12
+ await Metrics.initialise({
13
+ disable_telemetry_sharing: true,
14
+ powersync_instance_id: 'test',
15
+ internal_metrics_endpoint: 'unused.for.tests.com'
16
+ });
17
+ Metrics.getInstance().resetCounters();
9
18
 
10
19
  export const TEST_URI = env.PG_TEST_URL;
11
20
 
@@ -1,10 +1,10 @@
1
1
  import * as crypto from 'crypto';
2
2
  import { describe, expect, test } from 'vitest';
3
- import { BucketStorageFactory } from '../../src/storage/BucketStorage.js';
3
+ import { BucketStorageFactory } from '@/storage/BucketStorage.js';
4
4
  import { MONGO_STORAGE_FACTORY } from './util.js';
5
5
  import { putOp, removeOp, walStreamTest } from './wal_stream_utils.js';
6
6
  import { pgwireRows } from '@powersync/service-jpgwire';
7
- import { getMetricValueForTests } from '../../src/metrics/metrics.js';
7
+ import { Metrics } from '@/metrics/Metrics.js';
8
8
 
9
9
  type StorageFactory = () => Promise<BucketStorageFactory>;
10
10
 
@@ -41,8 +41,10 @@ bucket_definitions:
41
41
 
42
42
  await context.replicateSnapshot();
43
43
 
44
- const startRowCount = (await getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
45
- const startTxCount = (await getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
44
+ const startRowCount =
45
+ (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
46
+ const startTxCount =
47
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
46
48
 
47
49
  context.startStreaming();
48
50
 
@@ -57,8 +59,9 @@ bucket_definitions:
57
59
  expect(data).toMatchObject([
58
60
  putOp('test_data', { id: test_id, description: 'test1', num: 1152921504606846976n })
59
61
  ]);
60
- const endRowCount = (await getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
61
- const endTxCount = (await getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
62
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
63
+ const endTxCount =
64
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
62
65
  expect(endRowCount - startRowCount).toEqual(1);
63
66
  expect(endTxCount - startTxCount).toEqual(1);
64
67
  })
@@ -80,8 +83,10 @@ bucket_definitions:
80
83
 
81
84
  await context.replicateSnapshot();
82
85
 
83
- const startRowCount = (await getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
84
- const startTxCount = (await getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
86
+ const startRowCount =
87
+ (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
88
+ const startTxCount =
89
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
85
90
 
86
91
  context.startStreaming();
87
92
 
@@ -92,8 +97,9 @@ bucket_definitions:
92
97
  const data = await context.getBucketData('global[]');
93
98
 
94
99
  expect(data).toMatchObject([putOp('test_DATA', { id: test_id, description: 'test1' })]);
95
- const endRowCount = (await getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
96
- const endTxCount = (await getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
100
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
101
+ const endTxCount =
102
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
97
103
  expect(endRowCount - startRowCount).toEqual(1);
98
104
  expect(endTxCount - startTxCount).toEqual(1);
99
105
  })
@@ -287,8 +293,10 @@ bucket_definitions:
287
293
 
288
294
  await context.replicateSnapshot();
289
295
 
290
- const startRowCount = (await getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
291
- const startTxCount = (await getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
296
+ const startRowCount =
297
+ (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
298
+ const startTxCount =
299
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
292
300
 
293
301
  context.startStreaming();
294
302
 
@@ -299,8 +307,9 @@ bucket_definitions:
299
307
  const data = await context.getBucketData('global[]');
300
308
 
301
309
  expect(data).toMatchObject([]);
302
- const endRowCount = (await getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
303
- const endTxCount = (await getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
310
+ const endRowCount = (await Metrics.getInstance().getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
311
+ const endTxCount =
312
+ (await Metrics.getInstance().getMetricValueForTests('powersync_transactions_replicated_total')) ?? 0;
304
313
 
305
314
  // There was a transaction, but we should not replicate any actual data
306
315
  expect(endRowCount - startRowCount).toEqual(0);
package/tsconfig.json CHANGED
@@ -15,6 +15,18 @@
15
15
  "references": [
16
16
  {
17
17
  "path": "../types"
18
+ },
19
+ {
20
+ "path": "../rsocket-router"
21
+ },
22
+ {
23
+ "path": "../jsonbig"
24
+ },
25
+ {
26
+ "path": "../jpgwire"
27
+ },
28
+ {
29
+ "path": "../sync-rules"
18
30
  }
19
31
  ]
20
32
  }