@powersync/service-module-postgres-storage 0.0.0-dev-20260225160713 → 0.0.0-dev-20260313100403

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/CHANGELOG.md +50 -7
  2. package/dist/.tsbuildinfo +1 -1
  3. package/dist/@types/migrations/scripts/1771424826685-current-data-pending-deletes.d.ts +3 -0
  4. package/dist/@types/storage/PostgresBucketStorageFactory.d.ts +4 -0
  5. package/dist/@types/storage/PostgresCompactor.d.ts +8 -2
  6. package/dist/@types/storage/PostgresReportStorage.d.ts +3 -3
  7. package/dist/@types/storage/PostgresSyncRulesStorage.d.ts +10 -4
  8. package/dist/@types/storage/batch/OperationBatch.d.ts +2 -2
  9. package/dist/@types/storage/batch/PostgresBucketBatch.d.ts +13 -9
  10. package/dist/@types/storage/batch/PostgresPersistedBatch.d.ts +17 -5
  11. package/dist/@types/storage/current-data-store.d.ts +85 -0
  12. package/dist/@types/storage/current-data-table.d.ts +9 -0
  13. package/dist/@types/storage/table-id.d.ts +2 -0
  14. package/dist/@types/types/models/CurrentData.d.ts +18 -3
  15. package/dist/@types/utils/bson.d.ts +1 -1
  16. package/dist/@types/utils/test-utils.d.ts +1 -1
  17. package/dist/migrations/scripts/1771424826685-current-data-pending-deletes.js +8 -0
  18. package/dist/migrations/scripts/1771424826685-current-data-pending-deletes.js.map +1 -0
  19. package/dist/storage/PostgresBucketStorageFactory.js +41 -4
  20. package/dist/storage/PostgresBucketStorageFactory.js.map +1 -1
  21. package/dist/storage/PostgresCompactor.js +14 -6
  22. package/dist/storage/PostgresCompactor.js.map +1 -1
  23. package/dist/storage/PostgresReportStorage.js +7 -7
  24. package/dist/storage/PostgresReportStorage.js.map +1 -1
  25. package/dist/storage/PostgresSyncRulesStorage.js +98 -24
  26. package/dist/storage/PostgresSyncRulesStorage.js.map +1 -1
  27. package/dist/storage/batch/OperationBatch.js +2 -1
  28. package/dist/storage/batch/OperationBatch.js.map +1 -1
  29. package/dist/storage/batch/PostgresBucketBatch.js +295 -213
  30. package/dist/storage/batch/PostgresBucketBatch.js.map +1 -1
  31. package/dist/storage/batch/PostgresPersistedBatch.js +86 -81
  32. package/dist/storage/batch/PostgresPersistedBatch.js.map +1 -1
  33. package/dist/storage/current-data-store.js +270 -0
  34. package/dist/storage/current-data-store.js.map +1 -0
  35. package/dist/storage/current-data-table.js +22 -0
  36. package/dist/storage/current-data-table.js.map +1 -0
  37. package/dist/storage/table-id.js +8 -0
  38. package/dist/storage/table-id.js.map +1 -0
  39. package/dist/types/models/CurrentData.js +11 -2
  40. package/dist/types/models/CurrentData.js.map +1 -1
  41. package/dist/utils/bson.js.map +1 -1
  42. package/dist/utils/db.js +9 -0
  43. package/dist/utils/db.js.map +1 -1
  44. package/dist/utils/test-utils.js +13 -6
  45. package/dist/utils/test-utils.js.map +1 -1
  46. package/package.json +9 -9
  47. package/src/migrations/scripts/1771424826685-current-data-pending-deletes.ts +10 -0
  48. package/src/storage/PostgresBucketStorageFactory.ts +53 -5
  49. package/src/storage/PostgresCompactor.ts +17 -8
  50. package/src/storage/PostgresReportStorage.ts +7 -7
  51. package/src/storage/PostgresSyncRulesStorage.ts +47 -31
  52. package/src/storage/batch/OperationBatch.ts +4 -3
  53. package/src/storage/batch/PostgresBucketBatch.ts +316 -238
  54. package/src/storage/batch/PostgresPersistedBatch.ts +92 -84
  55. package/src/storage/current-data-store.ts +326 -0
  56. package/src/storage/current-data-table.ts +26 -0
  57. package/src/storage/table-id.ts +9 -0
  58. package/src/types/models/CurrentData.ts +17 -4
  59. package/src/utils/bson.ts +1 -1
  60. package/src/utils/db.ts +10 -0
  61. package/src/utils/test-utils.ts +14 -7
  62. package/test/src/__snapshots__/{connection-report-storage.test.ts.snap → client-connections-storage.test.ts.snap} +22 -22
  63. package/test/src/__snapshots__/storage.test.ts.snap +151 -0
  64. package/test/src/__snapshots__/storage_compacting.test.ts.snap +17 -0
  65. package/test/src/__snapshots__/storage_sync.test.ts.snap +1111 -16
  66. package/test/src/{connection-report-storage.test.ts → client-connections-storage.test.ts} +1 -1
  67. package/test/src/env.ts +1 -1
  68. package/test/src/migrations.test.ts +1 -1
  69. package/test/src/storage.test.ts +138 -131
  70. package/test/src/storage_compacting.test.ts +80 -11
  71. package/test/src/storage_sync.test.ts +57 -54
  72. package/test/src/util.ts +4 -4
@@ -48,13 +48,13 @@ export class PostgresReportStorage implements storage.ReportStorage {
48
48
  ): event_types.ClientConnectionReportResponse {
49
49
  if (!result) {
50
50
  return {
51
- users: 0,
52
- sdks: []
51
+ total_users: 0,
52
+ sdk_breakdown: []
53
53
  };
54
54
  }
55
55
  return {
56
- users: Number(result.users),
57
- sdks: result.sdks?.data || []
56
+ total_users: Number(result.users),
57
+ sdk_breakdown: result.sdks?.data || []
58
58
  };
59
59
  }
60
60
  private async listConnectionsQuery() {
@@ -234,12 +234,12 @@ export class PostgresReportStorage implements storage.ReportStorage {
234
234
  AND connected_at = ${{ type: 1184, value: connectIsoString }}
235
235
  `.execute();
236
236
  }
237
- async getConnectedClients(): Promise<event_types.ClientConnectionReportResponse> {
237
+ async getCurrentConnections(): Promise<event_types.ClientConnectionReportResponse> {
238
238
  const result = await this.listConnectionsQuery();
239
239
  return this.mapListCurrentConnectionsResponse(result);
240
240
  }
241
241
 
242
- async getClientConnectionReports(
242
+ async getClientConnectionsSummary(
243
243
  data: event_types.ClientConnectionReportRequest
244
244
  ): Promise<event_types.ClientConnectionReportResponse> {
245
245
  const { start, end } = data;
@@ -289,7 +289,7 @@ export class PostgresReportStorage implements storage.ReportStorage {
289
289
  return this.mapListCurrentConnectionsResponse(result);
290
290
  }
291
291
 
292
- async getGeneralClientConnectionAnalytics(
292
+ async getClientSessions(
293
293
  data: event_types.ClientConnectionAnalyticsRequest
294
294
  ): Promise<event_types.PaginatedResponse<event_types.ClientConnection>> {
295
295
  const limit = data.limit || 100;
@@ -14,6 +14,7 @@ import {
14
14
  PopulateChecksumCacheResults,
15
15
  ReplicationCheckpoint,
16
16
  storage,
17
+ StorageVersionConfig,
17
18
  utils,
18
19
  WatchWriteCheckpointOptions
19
20
  } from '@powersync/service-core';
@@ -35,6 +36,7 @@ import { PostgresBucketBatch } from './batch/PostgresBucketBatch.js';
35
36
  import { PostgresWriteCheckpointAPI } from './checkpoints/PostgresWriteCheckpointAPI.js';
36
37
  import { PostgresBucketStorageFactory } from './PostgresBucketStorageFactory.js';
37
38
  import { PostgresCompactor } from './PostgresCompactor.js';
39
+ import { PostgresCurrentDataStore } from './current-data-store.js';
38
40
 
39
41
  export type PostgresSyncRulesStorageOptions = {
40
42
  factory: PostgresBucketStorageFactory;
@@ -52,11 +54,13 @@ export class PostgresSyncRulesStorage
52
54
  public readonly sync_rules: storage.PersistedSyncRulesContent;
53
55
  public readonly slot_name: string;
54
56
  public readonly factory: PostgresBucketStorageFactory;
57
+ public readonly storageConfig: StorageVersionConfig;
55
58
 
56
59
  private sharedIterator = new BroadcastIterable((signal) => this.watchActiveCheckpoint(signal));
57
60
 
58
61
  protected db: lib_postgres.DatabaseClient;
59
62
  protected writeCheckpointAPI: PostgresWriteCheckpointAPI;
63
+ private readonly currentDataStore: PostgresCurrentDataStore;
60
64
 
61
65
  // TODO we might be able to share this in an abstract class
62
66
  private parsedSyncRulesCache:
@@ -71,6 +75,8 @@ export class PostgresSyncRulesStorage
71
75
  this.sync_rules = options.sync_rules;
72
76
  this.slot_name = options.sync_rules.slot_name;
73
77
  this.factory = options.factory;
78
+ this.storageConfig = options.sync_rules.getStorageConfig();
79
+ this.currentDataStore = new PostgresCurrentDataStore(this.storageConfig);
74
80
 
75
81
  this.writeCheckpointAPI = new PostgresWriteCheckpointAPI({
76
82
  db: this.db,
@@ -121,8 +127,18 @@ export class PostgresSyncRulesStorage
121
127
  `.execute();
122
128
  }
123
129
 
124
- compact(options?: storage.CompactOptions): Promise<void> {
125
- return new PostgresCompactor(this.db, this.group_id, options).compact();
130
+ async compact(options?: storage.CompactOptions): Promise<void> {
131
+ let maxOpId = options?.maxOpId;
132
+ if (maxOpId == null) {
133
+ const checkpoint = await this.getCheckpoint();
134
+ // Note: If there is no active checkpoint, this will be 0, in which case no compacting is performed
135
+ maxOpId = checkpoint.checkpoint;
136
+ }
137
+
138
+ return new PostgresCompactor(this.db, this.group_id, {
139
+ ...options,
140
+ maxOpId
141
+ }).compact();
126
142
  }
127
143
 
128
144
  async populatePersistentChecksumCache(options: PopulateChecksumCacheOptions): Promise<PopulateChecksumCacheResults> {
@@ -327,10 +343,7 @@ export class PostgresSyncRulesStorage
327
343
  });
328
344
  }
329
345
 
330
- async startBatch(
331
- options: storage.StartBatchOptions,
332
- callback: (batch: storage.BucketStorageBatch) => Promise<void>
333
- ): Promise<storage.FlushedResult | null> {
346
+ async createWriter(options: storage.CreateWriterOptions): Promise<storage.BucketStorageBatch> {
334
347
  const syncRules = await this.db.sql`
335
348
  SELECT
336
349
  last_checkpoint_lsn,
@@ -347,7 +360,7 @@ export class PostgresSyncRulesStorage
347
360
 
348
361
  const checkpoint_lsn = syncRules?.last_checkpoint_lsn ?? null;
349
362
 
350
- const batch = new PostgresBucketBatch({
363
+ const writer = new PostgresBucketBatch({
351
364
  logger: options.logger ?? framework.logger,
352
365
  db: this.db,
353
366
  sync_rules: this.sync_rules.parsed(options).hydratedSyncRules(),
@@ -355,22 +368,28 @@ export class PostgresSyncRulesStorage
355
368
  slot_name: this.slot_name,
356
369
  last_checkpoint_lsn: checkpoint_lsn,
357
370
  keep_alive_op: syncRules?.keepalive_op,
358
- no_checkpoint_before_lsn: syncRules?.no_checkpoint_before ?? options.zeroLSN,
359
371
  resumeFromLsn: maxLsn(syncRules?.snapshot_lsn, checkpoint_lsn),
360
372
  store_current_data: options.storeCurrentData,
361
373
  skip_existing_rows: options.skipExistingRows ?? false,
362
374
  batch_limits: this.options.batchLimits,
363
- markRecordUnavailable: options.markRecordUnavailable
375
+ markRecordUnavailable: options.markRecordUnavailable,
376
+ storageConfig: this.storageConfig
364
377
  });
365
- this.iterateListeners((cb) => cb.batchStarted?.(batch));
366
-
367
- await callback(batch);
368
- await batch.flush();
369
- if (batch.last_flushed_op != null) {
370
- return { flushed_op: batch.last_flushed_op };
371
- } else {
372
- return null;
373
- }
378
+ this.iterateListeners((cb) => cb.batchStarted?.(writer));
379
+ return writer;
380
+ }
381
+
382
+ /**
383
+ * @deprecated Use `createWriter()` with `await using` instead.
384
+ */
385
+ async startBatch(
386
+ options: storage.CreateWriterOptions,
387
+ callback: (batch: storage.BucketStorageBatch) => Promise<void>
388
+ ): Promise<storage.FlushedResult | null> {
389
+ await using writer = await this.createWriter(options);
390
+ await callback(writer);
391
+ await writer.flush();
392
+ return writer.last_flushed_op != null ? { flushed_op: writer.last_flushed_op } : null;
374
393
  }
375
394
 
376
395
  async getParameterSets(
@@ -415,10 +434,10 @@ export class PostgresSyncRulesStorage
415
434
 
416
435
  async *getBucketDataBatch(
417
436
  checkpoint: InternalOpId,
418
- dataBuckets: Map<string, InternalOpId>,
437
+ dataBuckets: storage.BucketDataRequest[],
419
438
  options?: storage.BucketDataBatchOptions
420
439
  ): AsyncIterable<storage.SyncBucketDataChunk> {
421
- if (dataBuckets.size == 0) {
440
+ if (dataBuckets.length == 0) {
422
441
  return;
423
442
  }
424
443
 
@@ -430,10 +449,8 @@ export class PostgresSyncRulesStorage
430
449
  // not match up with chunks.
431
450
 
432
451
  const end = checkpoint ?? BIGINT_MAX;
433
- const filters = Array.from(dataBuckets.entries()).map(([name, start]) => ({
434
- bucket_name: name,
435
- start: start
436
- }));
452
+ const filters = dataBuckets.map((request) => ({ bucket_name: request.bucket, start: request.start }));
453
+ const startOpByBucket = new Map(dataBuckets.map((request) => [request.bucket, request.start]));
437
454
 
438
455
  const batchRowLimit = options?.limit ?? storage.DEFAULT_DOCUMENT_BATCH_LIMIT;
439
456
  const chunkSizeLimitBytes = options?.chunkLimitBytes ?? storage.DEFAULT_DOCUMENT_CHUNK_LIMIT_BYTES;
@@ -533,7 +550,7 @@ export class PostgresSyncRulesStorage
533
550
  }
534
551
 
535
552
  if (start == null) {
536
- const startOpId = dataBuckets.get(bucket_name);
553
+ const startOpId = startOpByBucket.get(bucket_name);
537
554
  if (startOpId == null) {
538
555
  throw new framework.ServiceAssertionError(`data for unexpected bucket: ${bucket_name}`);
539
556
  }
@@ -588,7 +605,10 @@ export class PostgresSyncRulesStorage
588
605
  }
589
606
  }
590
607
 
591
- async getChecksums(checkpoint: utils.InternalOpId, buckets: string[]): Promise<utils.ChecksumMap> {
608
+ async getChecksums(
609
+ checkpoint: utils.InternalOpId,
610
+ buckets: storage.BucketChecksumRequest[]
611
+ ): Promise<utils.ChecksumMap> {
592
612
  return this.checksumCache.getChecksumMap(checkpoint, buckets);
593
613
  }
594
614
 
@@ -662,11 +682,7 @@ export class PostgresSyncRulesStorage
662
682
  group_id = ${{ type: 'int4', value: this.group_id }}
663
683
  `.execute();
664
684
 
665
- await this.db.sql`
666
- DELETE FROM current_data
667
- WHERE
668
- group_id = ${{ type: 'int4', value: this.group_id }}
669
- `.execute();
685
+ await this.currentDataStore.deleteGroupRows(this.db, { groupId: this.group_id });
670
686
 
671
687
  await this.db.sql`
672
688
  DELETE FROM source_tables
@@ -5,6 +5,7 @@
5
5
 
6
6
  import { storage, utils } from '@powersync/service-core';
7
7
  import { RequiredOperationBatchLimits } from '../../types/types.js';
8
+ import { postgresTableId } from '../table-id.js';
8
9
 
9
10
  /**
10
11
  * Batch of input operations.
@@ -89,13 +90,13 @@ export class RecordOperation {
89
90
  /**
90
91
  * In-memory cache key - must not be persisted.
91
92
  */
92
- export function cacheKey(sourceTableId: string, id: storage.ReplicaId) {
93
+ export function cacheKey(sourceTableId: storage.SourceTableId, id: storage.ReplicaId) {
93
94
  return encodedCacheKey(sourceTableId, storage.serializeReplicaId(id));
94
95
  }
95
96
 
96
97
  /**
97
98
  * Calculates a cache key for a stored ReplicaId. This is usually stored as a bytea/Buffer.
98
99
  */
99
- export function encodedCacheKey(sourceTableId: string, storedKey: Buffer) {
100
- return `${sourceTableId}.${storedKey.toString('base64')}`;
100
+ export function encodedCacheKey(sourceTableId: storage.SourceTableId, storedKey: Buffer) {
101
+ return `${postgresTableId(sourceTableId)}.${storedKey.toString('base64')}`;
101
102
  }