@powersync/service-module-postgres-storage 0.11.2 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/CHANGELOG.md +60 -0
  2. package/dist/.tsbuildinfo +1 -1
  3. package/dist/@types/migrations/scripts/1771232439485-storage-version.d.ts +3 -0
  4. package/dist/@types/migrations/scripts/1771424826685-current-data-pending-deletes.d.ts +3 -0
  5. package/dist/@types/migrations/scripts/1771491856000-sync-plan.d.ts +3 -0
  6. package/dist/@types/storage/PostgresBucketStorageFactory.d.ts +6 -10
  7. package/dist/@types/storage/PostgresCompactor.d.ts +10 -3
  8. package/dist/@types/storage/PostgresSyncRulesStorage.d.ts +5 -3
  9. package/dist/@types/storage/batch/OperationBatch.d.ts +2 -2
  10. package/dist/@types/storage/batch/PostgresBucketBatch.d.ts +12 -9
  11. package/dist/@types/storage/batch/PostgresPersistedBatch.d.ts +17 -5
  12. package/dist/@types/storage/current-data-store.d.ts +85 -0
  13. package/dist/@types/storage/current-data-table.d.ts +9 -0
  14. package/dist/@types/storage/sync-rules/PostgresPersistedSyncRulesContent.d.ts +1 -10
  15. package/dist/@types/storage/table-id.d.ts +2 -0
  16. package/dist/@types/types/models/CurrentData.d.ts +18 -3
  17. package/dist/@types/types/models/SyncRules.d.ts +12 -2
  18. package/dist/@types/types/models/json.d.ts +11 -0
  19. package/dist/@types/types/types.d.ts +2 -0
  20. package/dist/@types/utils/bson.d.ts +1 -1
  21. package/dist/@types/utils/db.d.ts +9 -0
  22. package/dist/@types/utils/test-utils.d.ts +1 -1
  23. package/dist/migrations/scripts/1771232439485-storage-version.js +111 -0
  24. package/dist/migrations/scripts/1771232439485-storage-version.js.map +1 -0
  25. package/dist/migrations/scripts/1771424826685-current-data-pending-deletes.js +8 -0
  26. package/dist/migrations/scripts/1771424826685-current-data-pending-deletes.js.map +1 -0
  27. package/dist/migrations/scripts/1771491856000-sync-plan.js +91 -0
  28. package/dist/migrations/scripts/1771491856000-sync-plan.js.map +1 -0
  29. package/dist/storage/PostgresBucketStorageFactory.js +56 -58
  30. package/dist/storage/PostgresBucketStorageFactory.js.map +1 -1
  31. package/dist/storage/PostgresCompactor.js +55 -66
  32. package/dist/storage/PostgresCompactor.js.map +1 -1
  33. package/dist/storage/PostgresSyncRulesStorage.js +23 -15
  34. package/dist/storage/PostgresSyncRulesStorage.js.map +1 -1
  35. package/dist/storage/batch/OperationBatch.js +2 -1
  36. package/dist/storage/batch/OperationBatch.js.map +1 -1
  37. package/dist/storage/batch/PostgresBucketBatch.js +286 -213
  38. package/dist/storage/batch/PostgresBucketBatch.js.map +1 -1
  39. package/dist/storage/batch/PostgresPersistedBatch.js +86 -81
  40. package/dist/storage/batch/PostgresPersistedBatch.js.map +1 -1
  41. package/dist/storage/current-data-store.js +270 -0
  42. package/dist/storage/current-data-store.js.map +1 -0
  43. package/dist/storage/current-data-table.js +22 -0
  44. package/dist/storage/current-data-table.js.map +1 -0
  45. package/dist/storage/sync-rules/PostgresPersistedSyncRulesContent.js +14 -30
  46. package/dist/storage/sync-rules/PostgresPersistedSyncRulesContent.js.map +1 -1
  47. package/dist/storage/table-id.js +8 -0
  48. package/dist/storage/table-id.js.map +1 -0
  49. package/dist/types/models/CurrentData.js +11 -2
  50. package/dist/types/models/CurrentData.js.map +1 -1
  51. package/dist/types/models/SyncRules.js +12 -1
  52. package/dist/types/models/SyncRules.js.map +1 -1
  53. package/dist/types/models/json.js +21 -0
  54. package/dist/types/models/json.js.map +1 -0
  55. package/dist/utils/bson.js.map +1 -1
  56. package/dist/utils/db.js +41 -0
  57. package/dist/utils/db.js.map +1 -1
  58. package/dist/utils/test-utils.js +50 -14
  59. package/dist/utils/test-utils.js.map +1 -1
  60. package/package.json +9 -9
  61. package/src/migrations/scripts/1771232439485-storage-version.ts +44 -0
  62. package/src/migrations/scripts/1771424826685-current-data-pending-deletes.ts +10 -0
  63. package/src/migrations/scripts/1771491856000-sync-plan.ts +21 -0
  64. package/src/storage/PostgresBucketStorageFactory.ts +69 -68
  65. package/src/storage/PostgresCompactor.ts +63 -72
  66. package/src/storage/PostgresSyncRulesStorage.ts +30 -17
  67. package/src/storage/batch/OperationBatch.ts +4 -3
  68. package/src/storage/batch/PostgresBucketBatch.ts +306 -238
  69. package/src/storage/batch/PostgresPersistedBatch.ts +92 -84
  70. package/src/storage/current-data-store.ts +326 -0
  71. package/src/storage/current-data-table.ts +26 -0
  72. package/src/storage/sync-rules/PostgresPersistedSyncRulesContent.ts +13 -33
  73. package/src/storage/table-id.ts +9 -0
  74. package/src/types/models/CurrentData.ts +17 -4
  75. package/src/types/models/SyncRules.ts +16 -1
  76. package/src/types/models/json.ts +26 -0
  77. package/src/utils/bson.ts +1 -1
  78. package/src/utils/db.ts +47 -0
  79. package/src/utils/test-utils.ts +42 -15
  80. package/test/src/__snapshots__/storage.test.ts.snap +148 -6
  81. package/test/src/__snapshots__/storage_compacting.test.ts.snap +17 -0
  82. package/test/src/__snapshots__/storage_sync.test.ts.snap +2211 -21
  83. package/test/src/migrations.test.ts +9 -2
  84. package/test/src/storage.test.ts +137 -131
  85. package/test/src/storage_compacting.test.ts +113 -2
  86. package/test/src/storage_sync.test.ts +148 -4
  87. package/test/src/util.ts +5 -2
@@ -55,6 +55,8 @@ import { PostgresMigrationAgent } from '../migrations/PostgresMigrationAgent.js'
55
55
  import { normalizePostgresStorageConfig } from '../types/types.js';
56
56
  import { PostgresReportStorage } from '../storage/PostgresReportStorage.js';
57
57
  import { PostgresBucketStorageFactory } from '../storage/PostgresBucketStorageFactory.js';
58
+ import { logger as defaultLogger, createLogger, transports } from '@powersync/lib-services-framework';
59
+ import { truncateTables } from './db.js';
58
60
  export function postgresTestSetup(factoryOptions) {
59
61
  const BASE_CONFIG = {
60
62
  type: 'postgresql',
@@ -62,7 +64,7 @@ export function postgresTestSetup(factoryOptions) {
62
64
  sslmode: 'disable'
63
65
  };
64
66
  const TEST_CONNECTION_OPTIONS = normalizePostgresStorageConfig(BASE_CONFIG);
65
- const migrate = async (direction) => {
67
+ const runMigrations = async (options) => {
66
68
  const env_1 = { stack: [], error: void 0, hasError: false };
67
69
  try {
68
70
  const migrationManager = __addDisposableResource(env_1, new framework.MigrationManager(), true);
@@ -71,18 +73,28 @@ export function postgresTestSetup(factoryOptions) {
71
73
  : new PostgresMigrationAgent(BASE_CONFIG), true);
72
74
  migrationManager.registerMigrationAgent(migrationAgent);
73
75
  const mockServiceContext = { configuration: { storage: BASE_CONFIG } };
74
- await migrationManager.migrate({
75
- direction: framework.migrations.Direction.Down,
76
- migrationContext: {
77
- service_context: mockServiceContext
78
- }
76
+ // Migration logs can get really verbose in tests, so only log warnings and up.
77
+ const logger = createLogger({
78
+ level: 'warn',
79
+ format: defaultLogger.format,
80
+ transports: [new transports.Console()]
79
81
  });
80
- if (direction == framework.migrations.Direction.Up) {
82
+ if (options.down) {
83
+ await migrationManager.migrate({
84
+ direction: framework.migrations.Direction.Down,
85
+ migrationContext: {
86
+ service_context: mockServiceContext
87
+ },
88
+ logger
89
+ });
90
+ }
91
+ if (options.up) {
81
92
  await migrationManager.migrate({
82
93
  direction: framework.migrations.Direction.Up,
83
94
  migrationContext: {
84
95
  service_context: mockServiceContext
85
- }
96
+ },
97
+ logger
86
98
  });
87
99
  }
88
100
  }
@@ -96,11 +108,37 @@ export function postgresTestSetup(factoryOptions) {
96
108
  await result_1;
97
109
  }
98
110
  };
111
+ const migrate = async (direction) => {
112
+ await runMigrations({
113
+ down: true,
114
+ up: direction == framework.migrations.Direction.Up
115
+ });
116
+ };
117
+ const clearStorage = async () => {
118
+ const env_2 = { stack: [], error: void 0, hasError: false };
119
+ try {
120
+ await runMigrations({ down: false, up: true });
121
+ const storageFactory = __addDisposableResource(env_2, new PostgresBucketStorageFactory({
122
+ config: TEST_CONNECTION_OPTIONS,
123
+ slot_name_prefix: 'test_'
124
+ }), true);
125
+ await truncateTables(storageFactory.db);
126
+ }
127
+ catch (e_2) {
128
+ env_2.error = e_2;
129
+ env_2.hasError = true;
130
+ }
131
+ finally {
132
+ const result_2 = __disposeResources(env_2);
133
+ if (result_2)
134
+ await result_2;
135
+ }
136
+ };
99
137
  return {
100
138
  reportFactory: async (options) => {
101
139
  try {
102
140
  if (!options?.doNotClear) {
103
- await migrate(framework.migrations.Direction.Up);
141
+ await clearStorage();
104
142
  }
105
143
  return new PostgresReportStorage({
106
144
  config: TEST_CONNECTION_OPTIONS
@@ -115,7 +153,7 @@ export function postgresTestSetup(factoryOptions) {
115
153
  factory: async (options) => {
116
154
  try {
117
155
  if (!options?.doNotClear) {
118
- await migrate(framework.migrations.Direction.Up);
156
+ await clearStorage();
119
157
  }
120
158
  return new PostgresBucketStorageFactory({
121
159
  config: TEST_CONNECTION_OPTIONS,
@@ -128,10 +166,8 @@ export function postgresTestSetup(factoryOptions) {
128
166
  throw ex;
129
167
  }
130
168
  },
131
- migrate
169
+ migrate,
170
+ tableIdStrings: true
132
171
  };
133
172
  }
134
- export function postgresTestStorageFactoryGenerator(factoryOptions) {
135
- return postgresTestSetup(factoryOptions).factory;
136
- }
137
173
  //# sourceMappingURL=test-utils.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../../src/utils/test-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,OAAO,EAAE,SAAS,EAAiE,MAAM,yBAAyB,CAAC;AACnH,OAAO,EAAE,sBAAsB,EAAE,MAAM,yCAAyC,CAAC;AACjF,OAAO,EAAE,8BAA8B,EAAgC,MAAM,mBAAmB,CAAC;AACjG,OAAO,EAAE,qBAAqB,EAAE,MAAM,qCAAqC,CAAC;AAC5E,OAAO,EAAE,4BAA4B,EAAE,MAAM,4CAA4C,CAAC;AAW1F,MAAM,UAAU,iBAAiB,CAAC,cAA0C;IAC1E,MAAM,WAAW,GAAG;QAClB,IAAI,EAAE,YAAqB;QAC3B,GAAG,EAAE,cAAc,CAAC,GAAG;QACvB,OAAO,EAAE,SAAkB;KAC5B,CAAC;IAEF,MAAM,uBAAuB,GAAG,8BAA8B,CAAC,WAAW,CAAC,CAAC;IAE5E,MAAM,OAAO,GAAG,KAAK,EAAE,SAAyC,EAAE,EAAE;;;YAClE,MAAY,gBAAgB,kCAA8B,IAAI,SAAS,CAAC,gBAAgB,EAAE,OAAA,CAAC;YAC3F,MAAY,cAAc,kCAAG,cAAc,CAAC,cAAc;gBACxD,CAAC,CAAC,cAAc,CAAC,cAAc,CAAC,WAAW,CAAC;gBAC5C,CAAC,CAAC,IAAI,sBAAsB,CAAC,WAAW,CAAC,OAAA,CAAC;YAC5C,gBAAgB,CAAC,sBAAsB,CAAC,cAAc,CAAC,CAAC;YAExD,MAAM,kBAAkB,GAAG,EAAE,aAAa,EAAE,EAAE,OAAO,EAAE,WAAW,EAAE,EAA+B,CAAC;YAEpG,MAAM,gBAAgB,CAAC,OAAO,CAAC;gBAC7B,SAAS,EAAE,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI;gBAC9C,gBAAgB,EAAE;oBAChB,eAAe,EAAE,kBAAkB;iBACpC;aACF,CAAC,CAAC;YAEH,IAAI,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,EAAE,CAAC;gBACnD,MAAM,gBAAgB,CAAC,OAAO,CAAC;oBAC7B,SAAS,EAAE,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;oBAC5C,gBAAgB,EAAE;wBAChB,eAAe,EAAE,kBAAkB;qBACpC;iBACF,CAAC,CAAC;YACL,CAAC;;;;;;;;;;;KACF,CAAC;IAEF,OAAO;QACL,aAAa,EAAE,KAAK,EAAE,OAA4B,EAAE,EAAE;YACpD,IAAI,CAAC;gBACH,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC;oBACzB,MAAM,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;gBACnD,CAAC;gBAED,OAAO,IAAI,qBAAqB,CAAC;oBAC/B,MAAM,EAAE,uBAAuB;iBAChC,CAAC,CAAC;YACL,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,iFAAiF;gBACjF,OAAO,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC;gBAC5B,MAAM,EAAE,CAAC;YACX,CAAC;QACH,CAAC;QACD,OAAO,EAAE,KAAK,EAAE,OAA4B,EAAE,EAAE;YAC9C,IAAI,CAAC;gBACH,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC;oBACzB,MAAM,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;gBACnD,CAAC;gBAED,OAAO,IAAI,4BAA4B,CAAC;oBACtC,MAAM,EAAE,uBAAuB;oBAC/B,gBAAgB,EAAE,OAAO;iBAC1B,CAAC,CAAC;YACL,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,iFAAiF;gBACjF,OAAO,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC;gBAC5B,MAAM,EAAE,CAAC;YACX,CAAC;QACH,CAAC;QACD,OAAO;KACR,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,mCAAmC,CAAC,cAA0C;IAC5F,OAAO,iBAAiB,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC;AACnD,CAAC"}
1
+ {"version":3,"file":"test-utils.js","sourceRoot":"","sources":["../../src/utils/test-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,OAAO,EAAE,SAAS,EAAiE,MAAM,yBAAyB,CAAC;AACnH,OAAO,EAAE,sBAAsB,EAAE,MAAM,yCAAyC,CAAC;AACjF,OAAO,EAAE,8BAA8B,EAAgC,MAAM,mBAAmB,CAAC;AACjG,OAAO,EAAE,qBAAqB,EAAE,MAAM,qCAAqC,CAAC;AAC5E,OAAO,EAAE,4BAA4B,EAAE,MAAM,4CAA4C,CAAC;AAC1F,OAAO,EAAE,MAAM,IAAI,aAAa,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,mCAAmC,CAAC;AACtG,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAWzC,MAAM,UAAU,iBAAiB,CAAC,cAA0C;IAC1E,MAAM,WAAW,GAAG;QAClB,IAAI,EAAE,YAAqB;QAC3B,GAAG,EAAE,cAAc,CAAC,GAAG;QACvB,OAAO,EAAE,SAAkB;KAC5B,CAAC;IAEF,MAAM,uBAAuB,GAAG,8BAA8B,CAAC,WAAW,CAAC,CAAC;IAE5E,MAAM,aAAa,GAAG,KAAK,EAAE,OAAuC,EAAE,EAAE;;;YACtE,MAAY,gBAAgB,kCAA8B,IAAI,SAAS,CAAC,gBAAgB,EAAE,OAAA,CAAC;YAC3F,MAAY,cAAc,kCAAG,cAAc,CAAC,cAAc;gBACxD,CAAC,CAAC,cAAc,CAAC,cAAc,CAAC,WAAW,CAAC;gBAC5C,CAAC,CAAC,IAAI,sBAAsB,CAAC,WAAW,CAAC,OAAA,CAAC;YAC5C,gBAAgB,CAAC,sBAAsB,CAAC,cAAc,CAAC,CAAC;YAExD,MAAM,kBAAkB,GAAG,EAAE,aAAa,EAAE,EAAE,OAAO,EAAE,WAAW,EAAE,EAA+B,CAAC;YAEpG,+EAA+E;YAC/E,MAAM,MAAM,GAAG,YAAY,CAAC;gBAC1B,KAAK,EAAE,MAAM;gBACb,MAAM,EAAE,aAAa,CAAC,MAAM;gBAC5B,UAAU,EAAE,CAAC,IAAI,UAAU,CAAC,OAAO,EAAE,CAAC;aACvC,CAAC,CAAC;YAEH,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;gBACjB,MAAM,gBAAgB,CAAC,OAAO,CAAC;oBAC7B,SAAS,EAAE,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI;oBAC9C,gBAAgB,EAAE;wBAChB,eAAe,EAAE,kBAAkB;qBACpC;oBACD,MAAM;iBACP,CAAC,CAAC;YACL,CAAC;YAED,IAAI,OAAO,CAAC,EAAE,EAAE,CAAC;gBACf,MAAM,gBAAgB,CAAC,OAAO,CAAC;oBAC7B,SAAS,EAAE,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;oBAC5C,gBAAgB,EAAE;wBAChB,eAAe,EAAE,kBAAkB;qBACpC;oBACD,MAAM;iBACP,CAAC,CAAC;YACL,CAAC;;;;;;;;;;;KACF,CAAC;IAEF,MAAM,OAAO,GAAG,KAAK,EAAE,SAAyC,EAAE,EAAE;QAClE,MAAM,aAAa,CAAC;YAClB,IAAI,EAAE,IAAI;YACV,EAAE,EAAE,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;SACnD,CAAC,CAAC;IACL,CAAC,CAAC;IAEF,MAAM,YAAY,GAAG,KAAK,IAAI,EAAE;;;YAC9B,MAAM,aAAa,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;YAE/C,MAAY,cAAc,kCAAG,IAAI,4BAA4B,CAAC;gBAC5D,MAAM,EAAE,uBAAuB;gBAC/B,gBAAgB,EAAE,OAAO;aAC1B,CAAC,OAAA,CAAC;YACH,MAAM,cAAc,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;;;;;;;;;;;KACzC,CAAC;IAEF,OAAO;QACL,aAAa,EAAE,KAAK,EAAE,OAA4B,EAAE,EAAE;YACpD,IAAI,CAAC;gBACH,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC;oBACzB,MAAM,YAAY,EAAE,CAAC;gBACvB,CAAC;gBAED,OAAO,IAAI,qBAAqB,CAAC;oBAC/B,MAAM,EAAE,uBAAuB;iBAChC,CAAC,CAAC;YACL,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,iFAAiF;gBACjF,OAAO,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC;gBAC5B,MAAM,EAAE,CAAC;YACX,CAAC;QACH,CAAC;QACD,OAAO,EAAE,KAAK,EAAE,OAA4B,EAAE,EAAE;YAC9C,IAAI,CAAC;gBACH,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC;oBACzB,MAAM,YAAY,EAAE,CAAC;gBACvB,CAAC;gBAED,OAAO,IAAI,4BAA4B,CAAC;oBACtC,MAAM,EAAE,uBAAuB;oBAC/B,gBAAgB,EAAE,OAAO;iBAC1B,CAAC,CAAC;YACL,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,iFAAiF;gBACjF,OAAO,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC;gBAC5B,MAAM,EAAE,CAAC;YACX,CAAC;QACH,CAAC;QACD,OAAO;QACP,cAAc,EAAE,IAAI;KACrB,CAAC;AACJ,CAAC"}
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "@powersync/service-module-postgres-storage",
3
3
  "repository": "https://github.com/powersync-ja/powersync-service",
4
4
  "types": "dist/@types/index.d.ts",
5
- "version": "0.11.2",
5
+ "version": "0.13.0",
6
6
  "main": "dist/index.js",
7
7
  "license": "FSL-1.1-ALv2",
8
8
  "type": "module",
@@ -29,22 +29,22 @@
29
29
  "p-defer": "^4.0.1",
30
30
  "ts-codec": "^1.3.0",
31
31
  "uuid": "^11.1.0",
32
- "@powersync/lib-service-postgres": "0.4.21",
33
- "@powersync/lib-services-framework": "0.8.2",
34
- "@powersync/service-core": "1.19.2",
35
- "@powersync/service-types": "0.14.0",
36
- "@powersync/service-jpgwire": "0.21.12",
32
+ "@powersync/lib-service-postgres": "0.4.23",
33
+ "@powersync/lib-services-framework": "0.9.0",
34
+ "@powersync/service-core": "1.20.1",
35
+ "@powersync/service-types": "0.15.0",
36
+ "@powersync/service-jpgwire": "0.21.14",
37
37
  "@powersync/service-jsonbig": "0.17.12",
38
- "@powersync/service-sync-rules": "0.31.1"
38
+ "@powersync/service-sync-rules": "0.33.0"
39
39
  },
40
40
  "devDependencies": {
41
41
  "typescript": "^5.7.3",
42
- "@powersync/service-core-tests": "0.13.2"
42
+ "@powersync/service-core-tests": "0.15.0"
43
43
  },
44
44
  "scripts": {
45
45
  "build": "tsc -b",
46
46
  "build:tests": "tsc -b test/tsconfig.json",
47
- "clean": "rm -rf ./lib && tsc -b --clean",
47
+ "clean": "rm -rf ./dist && tsc -b --clean",
48
48
  "test": "vitest"
49
49
  }
50
50
  }
@@ -0,0 +1,44 @@
1
+ import { migrations, storage } from '@powersync/service-core';
2
+ import { openMigrationDB } from '../migration-utils.js';
3
+
4
+ export const up: migrations.PowerSyncMigrationFunction = async (context) => {
5
+ const {
6
+ service_context: { configuration }
7
+ } = context;
8
+ await using client = openMigrationDB(configuration.storage);
9
+ await client.transaction(async (db) => {
10
+ await db.sql`
11
+ ALTER TABLE sync_rules
12
+ ADD COLUMN storage_version integer NOT NULL DEFAULT 1
13
+ `.execute();
14
+ });
15
+ };
16
+
17
+ export const down: migrations.PowerSyncMigrationFunction = async (context) => {
18
+ const {
19
+ service_context: { configuration }
20
+ } = context;
21
+ await using client = openMigrationDB(configuration.storage);
22
+ await client.transaction(async (db) => {
23
+ const newRules = await db.sql`
24
+ SELECT
25
+ id,
26
+ storage_version
27
+ FROM
28
+ sync_rules
29
+ WHERE
30
+ storage_version > ${{ type: 'int4', value: storage.LEGACY_STORAGE_VERSION }}
31
+ `.rows<{ id: number | bigint; storage_version: number | bigint }>();
32
+
33
+ if (newRules.length > 0) {
34
+ throw new Error(
35
+ `Cannot revert migration due to newer storage versions in use: ${newRules.map((r) => `${r.id}: v${r.storage_version}`).join(', ')}`
36
+ );
37
+ }
38
+
39
+ await db.sql`
40
+ ALTER TABLE sync_rules
41
+ DROP COLUMN storage_version
42
+ `.execute();
43
+ });
44
+ };
@@ -0,0 +1,10 @@
1
+ import { migrations } from '@powersync/service-core';
2
+
3
+ export const up: migrations.PowerSyncMigrationFunction = async (_context) => {
4
+ // No-op.
5
+ // Pending-delete support is now storage-version specific and initialized when v3 sync rules are deployed.
6
+ };
7
+
8
+ export const down: migrations.PowerSyncMigrationFunction = async (_context) => {
9
+ // No-op.
10
+ };
@@ -0,0 +1,21 @@
1
+ import { migrations } from '@powersync/service-core';
2
+ import { openMigrationDB } from '../migration-utils.js';
3
+
4
+ export const up: migrations.PowerSyncMigrationFunction = async (context) => {
5
+ const {
6
+ service_context: { configuration }
7
+ } = context;
8
+ await using client = openMigrationDB(configuration.storage);
9
+ await client.sql`
10
+ ALTER TABLE sync_rules
11
+ ADD COLUMN sync_plan JSON;
12
+ `.execute();
13
+ };
14
+
15
+ export const down: migrations.PowerSyncMigrationFunction = async (context) => {
16
+ const {
17
+ service_context: { configuration }
18
+ } = context;
19
+ await using client = openMigrationDB(configuration.storage);
20
+ await client.sql`ALTER TABLE sync_rules DROP COLUMN sync_plan`.execute();
21
+ };
@@ -1,28 +1,23 @@
1
- import * as framework from '@powersync/lib-services-framework';
2
- import { GetIntanceOptions, storage, SyncRulesBucketStorage, UpdateSyncRulesOptions } from '@powersync/service-core';
1
+ import { framework, GetIntanceOptions, storage, SyncRulesBucketStorage } from '@powersync/service-core';
3
2
  import * as pg_wire from '@powersync/service-jpgwire';
4
- import * as sync_rules from '@powersync/service-sync-rules';
5
3
  import crypto from 'crypto';
6
4
  import * as uuid from 'uuid';
7
5
 
8
6
  import * as lib_postgres from '@powersync/lib-service-postgres';
9
7
  import { models, NormalizedPostgresStorageConfig } from '../types/types.js';
10
8
 
9
+ import { getStorageApplicationName } from '../utils/application-name.js';
11
10
  import { NOTIFICATION_CHANNEL, STORAGE_SCHEMA_NAME } from '../utils/db.js';
12
11
  import { notifySyncRulesUpdate } from './batch/PostgresBucketBatch.js';
13
12
  import { PostgresSyncRulesStorage } from './PostgresSyncRulesStorage.js';
14
13
  import { PostgresPersistedSyncRulesContent } from './sync-rules/PostgresPersistedSyncRulesContent.js';
15
- import { getStorageApplicationName } from '../utils/application-name.js';
16
14
 
17
15
  export type PostgresBucketStorageOptions = {
18
16
  config: NormalizedPostgresStorageConfig;
19
17
  slot_name_prefix: string;
20
18
  };
21
19
 
22
- export class PostgresBucketStorageFactory
23
- extends framework.BaseObserver<storage.BucketStorageFactoryListener>
24
- implements storage.BucketStorageFactory
25
- {
20
+ export class PostgresBucketStorageFactory extends storage.BucketStorageFactory {
26
21
  readonly db: lib_postgres.DatabaseClient;
27
22
  public readonly slot_name_prefix: string;
28
23
 
@@ -87,15 +82,27 @@ export class PostgresBucketStorageFactory
87
82
 
88
83
  const sizes = await this.db.sql`
89
84
  SELECT
90
- pg_total_relation_size('current_data') AS current_size_bytes,
85
+ COALESCE(
86
+ pg_total_relation_size(to_regclass('current_data')),
87
+ 0
88
+ ) AS v1_current_size_bytes,
89
+ COALESCE(
90
+ pg_total_relation_size(to_regclass('v3_current_data')),
91
+ 0
92
+ ) AS v3_current_size_bytes,
91
93
  pg_total_relation_size('bucket_parameters') AS parameter_size_bytes,
92
94
  pg_total_relation_size('bucket_data') AS operations_size_bytes;
93
- `.first<{ current_size_bytes: bigint; parameter_size_bytes: bigint; operations_size_bytes: bigint }>();
95
+ `.first<{
96
+ v1_current_size_bytes: bigint;
97
+ v3_current_size_bytes: bigint;
98
+ parameter_size_bytes: bigint;
99
+ operations_size_bytes: bigint;
100
+ }>();
94
101
 
95
102
  return {
96
103
  operations_size_bytes: Number(sizes!.operations_size_bytes),
97
104
  parameters_size_bytes: Number(sizes!.parameter_size_bytes),
98
- replication_size_bytes: Number(sizes!.current_size_bytes)
105
+ replication_size_bytes: Number(sizes!.v1_current_size_bytes) + Number(sizes!.v3_current_size_bytes)
99
106
  };
100
107
  }
101
108
 
@@ -145,42 +152,16 @@ export class PostgresBucketStorageFactory
145
152
  };
146
153
  }
147
154
 
148
- // TODO possibly share implementation in abstract class
149
- async configureSyncRules(options: UpdateSyncRulesOptions): Promise<{
150
- updated: boolean;
151
- persisted_sync_rules?: storage.PersistedSyncRulesContent;
152
- lock?: storage.ReplicationLock;
153
- }> {
154
- const next = await this.getNextSyncRulesContent();
155
- const active = await this.getActiveSyncRulesContent();
156
-
157
- if (next?.sync_rules_content == options.content) {
158
- framework.logger.info('Sync rules from configuration unchanged');
159
- return { updated: false };
160
- } else if (next == null && active?.sync_rules_content == options.content) {
161
- framework.logger.info('Sync rules from configuration unchanged');
162
- return { updated: false };
163
- } else {
164
- framework.logger.info('Sync rules updated from configuration');
165
- const persisted_sync_rules = await this.updateSyncRules(options);
166
- return { updated: true, persisted_sync_rules, lock: persisted_sync_rules.current_lock ?? undefined };
167
- }
168
- }
169
-
170
155
  async updateSyncRules(options: storage.UpdateSyncRulesOptions): Promise<PostgresPersistedSyncRulesContent> {
171
- // TODO some shared implementation for this might be nice
172
- if (options.validate) {
173
- // Parse and validate before applying any changes
174
- sync_rules.SqlSyncRules.fromYaml(options.content, {
175
- // No schema-based validation at this point
176
- schema: undefined,
177
- defaultSchema: 'not_applicable', // Not needed for validation
178
- throwOnError: true
179
- });
180
- } else {
181
- // Apply unconditionally. Any errors will be reported via the diagnostics API.
156
+ const storageVersion = options.storageVersion ?? storage.CURRENT_STORAGE_VERSION;
157
+ const storageConfig = storage.STORAGE_VERSION_CONFIG[storageVersion];
158
+ if (storageConfig == null) {
159
+ throw new framework.ServiceError(
160
+ framework.ErrorCode.PSYNC_S1005,
161
+ `Unsupported storage version ${storageVersion}`
162
+ );
182
163
  }
183
-
164
+ await this.initializeStorageVersion(storageConfig);
184
165
  return this.db.transaction(async (db) => {
185
166
  await db.sql`
186
167
  UPDATE sync_rules
@@ -197,7 +178,14 @@ export class PostgresBucketStorageFactory
197
178
  nextval('sync_rules_id_sequence') AS id
198
179
  )
199
180
  INSERT INTO
200
- sync_rules (id, content, state, slot_name)
181
+ sync_rules (
182
+ id,
183
+ content,
184
+ sync_plan,
185
+ state,
186
+ slot_name,
187
+ storage_version
188
+ )
201
189
  VALUES
202
190
  (
203
191
  (
@@ -206,7 +194,8 @@ export class PostgresBucketStorageFactory
206
194
  FROM
207
195
  next_id
208
196
  ),
209
- ${{ type: 'varchar', value: options.content }},
197
+ ${{ type: 'varchar', value: options.config.yaml }},
198
+ ${{ type: 'json', value: options.config.plan }},
210
199
  ${{ type: 'varchar', value: storage.SyncRuleState.PROCESSING }},
211
200
  CONCAT(
212
201
  ${{ type: 'varchar', value: this.slot_name_prefix }},
@@ -218,7 +207,8 @@ export class PostgresBucketStorageFactory
218
207
  ),
219
208
  '_',
220
209
  ${{ type: 'varchar', value: crypto.randomBytes(2).toString('hex') }}
221
- )
210
+ ),
211
+ ${{ type: 'int4', value: storageVersion }}
222
212
  )
223
213
  RETURNING
224
214
  *
@@ -232,6 +222,34 @@ export class PostgresBucketStorageFactory
232
222
  });
233
223
  }
234
224
 
225
+ /**
226
+ * Lazy-initializes storage-version-specific structures, if needed.
227
+ */
228
+ private async initializeStorageVersion(storageConfig: storage.StorageVersionConfig) {
229
+ if (!storageConfig.softDeleteCurrentData) {
230
+ return;
231
+ }
232
+
233
+ await this.db.sql`
234
+ CREATE TABLE IF NOT EXISTS v3_current_data (
235
+ group_id integer NOT NULL,
236
+ source_table TEXT NOT NULL,
237
+ source_key bytea NOT NULL,
238
+ CONSTRAINT unique_v3_current_data_id PRIMARY KEY (group_id, source_table, source_key),
239
+ buckets jsonb NOT NULL,
240
+ data bytea NOT NULL,
241
+ lookups bytea[] NOT NULL,
242
+ pending_delete BIGINT NULL
243
+ )
244
+ `.execute();
245
+
246
+ await this.db.sql`
247
+ CREATE INDEX IF NOT EXISTS v3_current_data_pending_deletes ON v3_current_data (group_id, pending_delete)
248
+ WHERE
249
+ pending_delete IS NOT NULL
250
+ `.execute();
251
+ }
252
+
235
253
  async restartReplication(sync_rules_group_id: number): Promise<void> {
236
254
  const next = await this.getNextSyncRulesContent();
237
255
  const active = await this.getActiveSyncRulesContent();
@@ -240,10 +258,8 @@ export class PostgresBucketStorageFactory
240
258
  // The current one will continue serving sync requests until the next one has finished processing.
241
259
  if (next != null && next.id == sync_rules_group_id) {
242
260
  // We need to redo the "next" sync rules
243
- await this.updateSyncRules({
244
- content: next.sync_rules_content,
245
- validate: false
246
- });
261
+
262
+ await this.updateSyncRules(next.asUpdateOptions());
247
263
  // Pro-actively stop replicating
248
264
  await this.db.sql`
249
265
  UPDATE sync_rules
@@ -255,10 +271,7 @@ export class PostgresBucketStorageFactory
255
271
  `.execute();
256
272
  } else if (next == null && active?.id == sync_rules_group_id) {
257
273
  // Slot removed for "active" sync rules, while there is no "next" one.
258
- await this.updateSyncRules({
259
- content: active.sync_rules_content,
260
- validate: false
261
- });
274
+ await this.updateSyncRules(active.asUpdateOptions());
262
275
 
263
276
  // Pro-actively stop replicating, but still serve clients with existing data
264
277
  await this.db.sql`
@@ -284,12 +297,6 @@ export class PostgresBucketStorageFactory
284
297
  }
285
298
  }
286
299
 
287
- // TODO possibly share via abstract class
288
- async getActiveSyncRules(options: storage.ParseSyncRulesOptions): Promise<storage.PersistedSyncRules | null> {
289
- const content = await this.getActiveSyncRulesContent();
290
- return content?.parsed(options) ?? null;
291
- }
292
-
293
300
  async getActiveSyncRulesContent(): Promise<storage.PersistedSyncRulesContent | null> {
294
301
  const activeRow = await this.db.sql`
295
302
  SELECT
@@ -313,12 +320,6 @@ export class PostgresBucketStorageFactory
313
320
  return new PostgresPersistedSyncRulesContent(this.db, activeRow);
314
321
  }
315
322
 
316
- // TODO possibly share via abstract class
317
- async getNextSyncRules(options: storage.ParseSyncRulesOptions): Promise<storage.PersistedSyncRules | null> {
318
- const content = await this.getNextSyncRulesContent();
319
- return content?.parsed(options) ?? null;
320
- }
321
-
322
323
  async getNextSyncRulesContent(): Promise<storage.PersistedSyncRulesContent | null> {
323
324
  const nextRow = await this.db.sql`
324
325
  SELECT
@@ -51,20 +51,20 @@ export class PostgresCompactor {
51
51
  private moveBatchLimit: number;
52
52
  private moveBatchQueryLimit: number;
53
53
  private clearBatchLimit: number;
54
- private maxOpId: InternalOpId | undefined;
54
+ private maxOpId: InternalOpId;
55
55
  private buckets: string[] | undefined;
56
56
 
57
57
  constructor(
58
58
  private db: lib_postgres.DatabaseClient,
59
59
  private group_id: number,
60
- options?: PostgresCompactOptions
60
+ options: PostgresCompactOptions
61
61
  ) {
62
- this.idLimitBytes = (options?.memoryLimitMB ?? DEFAULT_MEMORY_LIMIT_MB) * 1024 * 1024;
63
- this.moveBatchLimit = options?.moveBatchLimit ?? DEFAULT_MOVE_BATCH_LIMIT;
64
- this.moveBatchQueryLimit = options?.moveBatchQueryLimit ?? DEFAULT_MOVE_BATCH_QUERY_LIMIT;
65
- this.clearBatchLimit = options?.clearBatchLimit ?? DEFAULT_CLEAR_BATCH_LIMIT;
66
- this.maxOpId = options?.maxOpId;
67
- this.buckets = options?.compactBuckets;
62
+ this.idLimitBytes = (options.memoryLimitMB ?? DEFAULT_MEMORY_LIMIT_MB) * 1024 * 1024;
63
+ this.moveBatchLimit = options.moveBatchLimit ?? DEFAULT_MOVE_BATCH_LIMIT;
64
+ this.moveBatchQueryLimit = options.moveBatchQueryLimit ?? DEFAULT_MOVE_BATCH_QUERY_LIMIT;
65
+ this.clearBatchLimit = options.clearBatchLimit ?? DEFAULT_CLEAR_BATCH_LIMIT;
66
+ this.maxOpId = options.maxOpId ?? 0n;
67
+ this.buckets = options.compactBuckets;
68
68
  }
69
69
 
70
70
  /**
@@ -75,37 +75,54 @@ export class PostgresCompactor {
75
75
  async compact() {
76
76
  if (this.buckets) {
77
77
  for (let bucket of this.buckets) {
78
- // We can make this more efficient later on by iterating
79
- // through the buckets in a single query.
80
- // That makes batching more tricky, so we leave for later.
81
- await this.compactInternal(bucket);
78
+ await this.compactSingleBucket(bucket);
82
79
  }
83
80
  } else {
84
- await this.compactInternal(undefined);
81
+ await this.compactAllBuckets();
85
82
  }
86
83
  }
87
84
 
88
- async compactInternal(bucket: string | undefined) {
89
- const idLimitBytes = this.idLimitBytes;
85
+ private async compactAllBuckets() {
86
+ const DISCOVERY_BATCH_SIZE = 200;
87
+ let lastBucket = '';
88
+
89
+ while (true) {
90
+ const bucketRows = (await this.db.sql`
91
+ SELECT DISTINCT
92
+ bucket_name
93
+ FROM
94
+ bucket_data
95
+ WHERE
96
+ group_id = ${{ type: 'int4', value: this.group_id }}
97
+ AND bucket_name > ${{ type: 'varchar', value: lastBucket }}
98
+ ORDER BY
99
+ bucket_name ASC
100
+ LIMIT
101
+ ${{ type: 'int4', value: DISCOVERY_BATCH_SIZE }}
102
+ `.rows()) as { bucket_name: string }[];
90
103
 
91
- let currentState: CurrentBucketState | null = null;
92
-
93
- let bucketLower: string | null = null;
94
- let bucketUpper: string | null = null;
95
- const MAX_CHAR = String.fromCodePoint(0xffff);
96
-
97
- if (bucket == null) {
98
- bucketLower = '';
99
- bucketUpper = MAX_CHAR;
100
- } else if (bucket?.includes('[')) {
101
- // Exact bucket name
102
- bucketLower = bucket;
103
- bucketUpper = bucket;
104
- } else if (bucket) {
105
- // Bucket definition name
106
- bucketLower = `${bucket}[`;
107
- bucketUpper = `${bucket}[${MAX_CHAR}`;
104
+ if (bucketRows.length === 0) {
105
+ break;
106
+ }
107
+
108
+ for (const row of bucketRows) {
109
+ await this.compactSingleBucket(row.bucket_name);
110
+ }
111
+
112
+ lastBucket = bucketRows[bucketRows.length - 1].bucket_name;
108
113
  }
114
+ }
115
+
116
+ private async compactSingleBucket(bucket: string) {
117
+ const idLimitBytes = this.idLimitBytes;
118
+
119
+ let currentState: CurrentBucketState = {
120
+ bucket: bucket,
121
+ seen: new Map(),
122
+ trackingSize: 0,
123
+ lastNotPut: null,
124
+ opsSincePut: 0
125
+ };
109
126
 
110
127
  let upperOpIdLimit = BIGINT_MAX;
111
128
 
@@ -123,16 +140,9 @@ export class PostgresCompactor {
123
140
  bucket_data
124
141
  WHERE
125
142
  group_id = ${{ type: 'int4', value: this.group_id }}
126
- AND bucket_name >= ${{ type: 'varchar', value: bucketLower }}
127
- AND (
128
- (
129
- bucket_name = ${{ type: 'varchar', value: bucketUpper }}
130
- AND op_id < ${{ type: 'int8', value: upperOpIdLimit }}
131
- )
132
- OR bucket_name < ${{ type: 'varchar', value: bucketUpper }} COLLATE "C" -- Use binary comparison
133
- )
143
+ AND bucket_name = ${{ type: 'varchar', value: bucket }}
144
+ AND op_id < ${{ type: 'int8', value: upperOpIdLimit }}
134
145
  ORDER BY
135
- bucket_name DESC,
136
146
  op_id DESC
137
147
  LIMIT
138
148
  ${{ type: 'int4', value: this.moveBatchQueryLimit }}
@@ -150,32 +160,8 @@ export class PostgresCompactor {
150
160
  // Set upperBound for the next batch
151
161
  const lastBatchItem = batch[batch.length - 1];
152
162
  upperOpIdLimit = lastBatchItem.op_id;
153
- bucketUpper = lastBatchItem.bucket_name;
154
163
 
155
164
  for (const doc of batch) {
156
- if (currentState == null || doc.bucket_name != currentState.bucket) {
157
- if (currentState != null && currentState.lastNotPut != null && currentState.opsSincePut >= 1) {
158
- // Important to flush before clearBucket()
159
- await this.flush();
160
- logger.info(
161
- `Inserting CLEAR at ${this.group_id}:${currentState.bucket}:${currentState.lastNotPut} to remove ${currentState.opsSincePut} operations`
162
- );
163
-
164
- const bucket = currentState.bucket;
165
- const clearOp = currentState.lastNotPut;
166
- // Free memory before clearing bucket
167
- currentState = null;
168
- await this.clearBucket(bucket, clearOp);
169
- }
170
- currentState = {
171
- bucket: doc.bucket_name,
172
- seen: new Map(),
173
- trackingSize: 0,
174
- lastNotPut: null,
175
- opsSincePut: 0
176
- };
177
- }
178
-
179
165
  if (this.maxOpId != null && doc.op_id > this.maxOpId) {
180
166
  continue;
181
167
  }
@@ -237,16 +223,12 @@ export class PostgresCompactor {
237
223
  }
238
224
 
239
225
  await this.flush();
240
- currentState?.seen.clear();
241
- if (currentState?.lastNotPut != null && currentState?.opsSincePut > 1) {
226
+ currentState.seen.clear();
227
+ if (currentState.lastNotPut != null && currentState.opsSincePut > 1) {
242
228
  logger.info(
243
229
  `Inserting CLEAR at ${this.group_id}:${currentState.bucket}:${currentState.lastNotPut} to remove ${currentState.opsSincePut} operations`
244
230
  );
245
- const bucket = currentState.bucket;
246
- const clearOp = currentState.lastNotPut;
247
- // Free memory before clearing bucket
248
- currentState = null;
249
- await this.clearBucket(bucket, clearOp);
231
+ await this.clearBucket(currentState.bucket, currentState.lastNotPut);
250
232
  }
251
233
  }
252
234
 
@@ -258,6 +240,15 @@ export class PostgresCompactor {
258
240
  }
259
241
  }
260
242
 
243
+ /**
244
+ * Expose the internal clearBucket() method to tests.
245
+ *
246
+ * @deprecated Only for tests
247
+ */
248
+ clearBucketForTests(bucket: string, op: InternalOpId) {
249
+ return this.clearBucket(bucket, op);
250
+ }
251
+
261
252
  /**
262
253
  * Perform a CLEAR compact for a bucket.
263
254
  *