@powersync/service-core 0.2.2 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (262) hide show
  1. package/CHANGELOG.md +31 -0
  2. package/dist/api/diagnostics.js +2 -2
  3. package/dist/api/diagnostics.js.map +1 -1
  4. package/dist/api/schema.js.map +1 -1
  5. package/dist/auth/CachedKeyCollector.js.map +1 -1
  6. package/dist/auth/JwtPayload.d.ts +6 -2
  7. package/dist/auth/KeySpec.js.map +1 -1
  8. package/dist/auth/KeyStore.js +3 -9
  9. package/dist/auth/KeyStore.js.map +1 -1
  10. package/dist/auth/LeakyBucket.js.map +1 -1
  11. package/dist/auth/RemoteJWKSCollector.js.map +1 -1
  12. package/dist/auth/SupabaseKeyCollector.js.map +1 -1
  13. package/dist/db/mongo.js.map +1 -1
  14. package/dist/entry/cli-entry.js +2 -2
  15. package/dist/entry/cli-entry.js.map +1 -1
  16. package/dist/entry/commands/config-command.js.map +1 -1
  17. package/dist/entry/commands/migrate-action.js +12 -4
  18. package/dist/entry/commands/migrate-action.js.map +1 -1
  19. package/dist/entry/commands/start-action.js.map +1 -1
  20. package/dist/entry/commands/teardown-action.js.map +1 -1
  21. package/dist/index.d.ts +3 -2
  22. package/dist/index.js +4 -2
  23. package/dist/index.js.map +1 -1
  24. package/dist/locks/LockManager.d.ts +10 -0
  25. package/dist/locks/LockManager.js +7 -0
  26. package/dist/locks/LockManager.js.map +1 -0
  27. package/dist/locks/MongoLocks.d.ts +36 -0
  28. package/dist/locks/MongoLocks.js +81 -0
  29. package/dist/locks/MongoLocks.js.map +1 -0
  30. package/dist/locks/locks-index.d.ts +2 -0
  31. package/dist/locks/locks-index.js +3 -0
  32. package/dist/locks/locks-index.js.map +1 -0
  33. package/dist/metrics/Metrics.js +6 -6
  34. package/dist/metrics/Metrics.js.map +1 -1
  35. package/dist/migrations/db/migrations/1684951997326-init.js.map +1 -1
  36. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -1
  37. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +1 -1
  38. package/dist/migrations/definitions.d.ts +18 -0
  39. package/dist/migrations/definitions.js +6 -0
  40. package/dist/migrations/definitions.js.map +1 -0
  41. package/dist/migrations/executor.d.ts +16 -0
  42. package/dist/migrations/executor.js +64 -0
  43. package/dist/migrations/executor.js.map +1 -0
  44. package/dist/migrations/migrations-index.d.ts +3 -0
  45. package/dist/migrations/migrations-index.js +4 -0
  46. package/dist/migrations/migrations-index.js.map +1 -0
  47. package/dist/migrations/migrations.d.ts +1 -1
  48. package/dist/migrations/migrations.js +12 -8
  49. package/dist/migrations/migrations.js.map +1 -1
  50. package/dist/migrations/store/migration-store.d.ts +11 -0
  51. package/dist/migrations/store/migration-store.js +46 -0
  52. package/dist/migrations/store/migration-store.js.map +1 -0
  53. package/dist/replication/ErrorRateLimiter.js.map +1 -1
  54. package/dist/replication/PgRelation.js.map +1 -1
  55. package/dist/replication/WalConnection.js.map +1 -1
  56. package/dist/replication/WalStream.d.ts +0 -1
  57. package/dist/replication/WalStream.js +21 -25
  58. package/dist/replication/WalStream.js.map +1 -1
  59. package/dist/replication/WalStreamManager.js +12 -13
  60. package/dist/replication/WalStreamManager.js.map +1 -1
  61. package/dist/replication/WalStreamRunner.js +8 -8
  62. package/dist/replication/WalStreamRunner.js.map +1 -1
  63. package/dist/replication/util.js.map +1 -1
  64. package/dist/routes/auth.d.ts +8 -10
  65. package/dist/routes/auth.js.map +1 -1
  66. package/dist/routes/endpoints/admin.d.ts +1011 -0
  67. package/dist/routes/{admin.js → endpoints/admin.js} +33 -18
  68. package/dist/routes/endpoints/admin.js.map +1 -0
  69. package/dist/routes/endpoints/checkpointing.d.ts +76 -0
  70. package/dist/routes/endpoints/checkpointing.js +36 -0
  71. package/dist/routes/endpoints/checkpointing.js.map +1 -0
  72. package/dist/routes/endpoints/dev.d.ts +312 -0
  73. package/dist/routes/{dev.js → endpoints/dev.js} +25 -16
  74. package/dist/routes/endpoints/dev.js.map +1 -0
  75. package/dist/routes/endpoints/route-endpoints-index.d.ts +6 -0
  76. package/dist/routes/endpoints/route-endpoints-index.js +7 -0
  77. package/dist/routes/endpoints/route-endpoints-index.js.map +1 -0
  78. package/dist/routes/endpoints/socket-route.d.ts +2 -0
  79. package/dist/routes/{socket-route.js → endpoints/socket-route.js} +12 -12
  80. package/dist/routes/endpoints/socket-route.js.map +1 -0
  81. package/dist/routes/endpoints/sync-rules.d.ts +174 -0
  82. package/dist/routes/{sync-rules.js → endpoints/sync-rules.js} +44 -24
  83. package/dist/routes/endpoints/sync-rules.js.map +1 -0
  84. package/dist/routes/endpoints/sync-stream.d.ts +132 -0
  85. package/dist/routes/{sync-stream.js → endpoints/sync-stream.js} +28 -19
  86. package/dist/routes/endpoints/sync-stream.js.map +1 -0
  87. package/dist/routes/hooks.d.ts +10 -0
  88. package/dist/routes/hooks.js +31 -0
  89. package/dist/routes/hooks.js.map +1 -0
  90. package/dist/routes/route-register.d.ts +10 -0
  91. package/dist/routes/route-register.js +87 -0
  92. package/dist/routes/route-register.js.map +1 -0
  93. package/dist/routes/router.d.ts +16 -4
  94. package/dist/routes/router.js +6 -1
  95. package/dist/routes/router.js.map +1 -1
  96. package/dist/routes/routes-index.d.ts +5 -3
  97. package/dist/routes/routes-index.js +5 -3
  98. package/dist/routes/routes-index.js.map +1 -1
  99. package/dist/runner/teardown.js +9 -9
  100. package/dist/runner/teardown.js.map +1 -1
  101. package/dist/storage/BucketStorage.d.ts +3 -0
  102. package/dist/storage/BucketStorage.js.map +1 -1
  103. package/dist/storage/ChecksumCache.js.map +1 -1
  104. package/dist/storage/MongoBucketStorage.js +5 -5
  105. package/dist/storage/MongoBucketStorage.js.map +1 -1
  106. package/dist/storage/SourceTable.js.map +1 -1
  107. package/dist/storage/mongo/MongoBucketBatch.js +23 -18
  108. package/dist/storage/mongo/MongoBucketBatch.js.map +1 -1
  109. package/dist/storage/mongo/MongoIdSequence.js.map +1 -1
  110. package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
  111. package/dist/storage/mongo/MongoSyncRulesLock.js +3 -3
  112. package/dist/storage/mongo/MongoSyncRulesLock.js.map +1 -1
  113. package/dist/storage/mongo/OperationBatch.js.map +1 -1
  114. package/dist/storage/mongo/PersistedBatch.js +2 -2
  115. package/dist/storage/mongo/PersistedBatch.js.map +1 -1
  116. package/dist/storage/mongo/db.d.ts +2 -2
  117. package/dist/storage/mongo/db.js.map +1 -1
  118. package/dist/storage/mongo/util.js.map +1 -1
  119. package/dist/sync/BroadcastIterable.js.map +1 -1
  120. package/dist/sync/LastValueSink.js.map +1 -1
  121. package/dist/sync/merge.js.map +1 -1
  122. package/dist/sync/safeRace.js.map +1 -1
  123. package/dist/sync/sync.d.ts +2 -2
  124. package/dist/sync/sync.js +5 -5
  125. package/dist/sync/sync.js.map +1 -1
  126. package/dist/sync/util.js.map +1 -1
  127. package/dist/system/CorePowerSyncSystem.d.ts +12 -7
  128. package/dist/system/CorePowerSyncSystem.js +26 -2
  129. package/dist/system/CorePowerSyncSystem.js.map +1 -1
  130. package/dist/system/system-index.d.ts +1 -0
  131. package/dist/system/system-index.js +2 -0
  132. package/dist/system/system-index.js.map +1 -0
  133. package/dist/util/Mutex.js.map +1 -1
  134. package/dist/util/PgManager.js.map +1 -1
  135. package/dist/util/alerting.d.ts +0 -2
  136. package/dist/util/alerting.js +0 -6
  137. package/dist/util/alerting.js.map +1 -1
  138. package/dist/util/config/collectors/config-collector.js +3 -3
  139. package/dist/util/config/collectors/config-collector.js.map +1 -1
  140. package/dist/util/config/collectors/impl/base64-config-collector.js.map +1 -1
  141. package/dist/util/config/collectors/impl/filesystem-config-collector.js +7 -5
  142. package/dist/util/config/collectors/impl/filesystem-config-collector.js.map +1 -1
  143. package/dist/util/config/compound-config-collector.js +4 -4
  144. package/dist/util/config/compound-config-collector.js.map +1 -1
  145. package/dist/util/config/sync-rules/impl/base64-sync-rules-collector.js.map +1 -1
  146. package/dist/util/config/sync-rules/impl/filesystem-sync-rules-collector.js.map +1 -1
  147. package/dist/util/config/sync-rules/impl/inline-sync-rules-collector.js.map +1 -1
  148. package/dist/util/config.js.map +1 -1
  149. package/dist/util/env.d.ts +1 -2
  150. package/dist/util/env.js +3 -2
  151. package/dist/util/env.js.map +1 -1
  152. package/dist/util/memory-tracking.js +2 -2
  153. package/dist/util/memory-tracking.js.map +1 -1
  154. package/dist/util/migration_lib.js.map +1 -1
  155. package/dist/util/pgwire_utils.js +2 -2
  156. package/dist/util/pgwire_utils.js.map +1 -1
  157. package/dist/util/populate_test_data.js.map +1 -1
  158. package/dist/util/secs.js.map +1 -1
  159. package/dist/util/utils.js +4 -4
  160. package/dist/util/utils.js.map +1 -1
  161. package/package.json +13 -10
  162. package/src/api/diagnostics.ts +5 -5
  163. package/src/api/schema.ts +1 -1
  164. package/src/auth/JwtPayload.ts +6 -2
  165. package/src/auth/KeyStore.ts +3 -9
  166. package/src/entry/cli-entry.ts +3 -4
  167. package/src/entry/commands/config-command.ts +1 -1
  168. package/src/entry/commands/migrate-action.ts +14 -6
  169. package/src/entry/commands/start-action.ts +1 -1
  170. package/src/entry/commands/teardown-action.ts +1 -1
  171. package/src/index.ts +5 -2
  172. package/src/locks/LockManager.ts +16 -0
  173. package/src/locks/MongoLocks.ts +142 -0
  174. package/src/locks/locks-index.ts +2 -0
  175. package/src/metrics/Metrics.ts +8 -8
  176. package/src/migrations/db/migrations/1684951997326-init.ts +3 -3
  177. package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +3 -3
  178. package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +2 -2
  179. package/src/migrations/definitions.ts +21 -0
  180. package/src/migrations/executor.ts +87 -0
  181. package/src/migrations/migrations-index.ts +3 -0
  182. package/src/migrations/migrations.ts +15 -11
  183. package/src/migrations/store/migration-store.ts +63 -0
  184. package/src/replication/WalConnection.ts +2 -2
  185. package/src/replication/WalStream.ts +24 -29
  186. package/src/replication/WalStreamManager.ts +14 -15
  187. package/src/replication/WalStreamRunner.ts +10 -10
  188. package/src/replication/util.ts +1 -1
  189. package/src/routes/auth.ts +22 -16
  190. package/src/routes/endpoints/admin.ts +237 -0
  191. package/src/routes/endpoints/checkpointing.ts +41 -0
  192. package/src/routes/endpoints/dev.ts +199 -0
  193. package/src/routes/endpoints/route-endpoints-index.ts +6 -0
  194. package/src/routes/{socket-route.ts → endpoints/socket-route.ts} +13 -16
  195. package/src/routes/endpoints/sync-rules.ts +227 -0
  196. package/src/routes/endpoints/sync-stream.ts +98 -0
  197. package/src/routes/hooks.ts +45 -0
  198. package/src/routes/route-register.ts +104 -0
  199. package/src/routes/router.ts +34 -6
  200. package/src/routes/routes-index.ts +5 -4
  201. package/src/runner/teardown.ts +9 -9
  202. package/src/storage/BucketStorage.ts +7 -2
  203. package/src/storage/ChecksumCache.ts +2 -2
  204. package/src/storage/MongoBucketStorage.ts +8 -8
  205. package/src/storage/SourceTable.ts +2 -2
  206. package/src/storage/mongo/MongoBucketBatch.ts +29 -22
  207. package/src/storage/mongo/MongoSyncBucketStorage.ts +3 -3
  208. package/src/storage/mongo/MongoSyncRulesLock.ts +3 -3
  209. package/src/storage/mongo/OperationBatch.ts +1 -1
  210. package/src/storage/mongo/PersistedBatch.ts +3 -3
  211. package/src/storage/mongo/db.ts +3 -4
  212. package/src/sync/sync.ts +11 -11
  213. package/src/sync/util.ts +2 -2
  214. package/src/system/CorePowerSyncSystem.ts +31 -10
  215. package/src/system/system-index.ts +1 -0
  216. package/src/util/alerting.ts +0 -8
  217. package/src/util/config/collectors/config-collector.ts +5 -3
  218. package/src/util/config/collectors/impl/filesystem-config-collector.ts +8 -6
  219. package/src/util/config/compound-config-collector.ts +4 -4
  220. package/src/util/env.ts +4 -2
  221. package/src/util/memory-tracking.ts +2 -2
  222. package/src/util/pgwire_utils.ts +3 -3
  223. package/src/util/utils.ts +5 -5
  224. package/test/src/auth.test.ts +4 -2
  225. package/test/src/data_storage.test.ts +181 -19
  226. package/test/src/env.ts +6 -6
  227. package/test/src/setup.ts +7 -0
  228. package/test/src/slow_tests.test.ts +45 -6
  229. package/test/src/sync.test.ts +6 -5
  230. package/test/tsconfig.json +1 -1
  231. package/tsconfig.json +5 -6
  232. package/tsconfig.tsbuildinfo +1 -1
  233. package/vitest.config.ts +1 -3
  234. package/dist/migrations/db/store.d.ts +0 -3
  235. package/dist/migrations/db/store.js +0 -10
  236. package/dist/migrations/db/store.js.map +0 -1
  237. package/dist/routes/admin.d.ts +0 -7
  238. package/dist/routes/admin.js.map +0 -1
  239. package/dist/routes/checkpointing.d.ts +0 -3
  240. package/dist/routes/checkpointing.js +0 -30
  241. package/dist/routes/checkpointing.js.map +0 -1
  242. package/dist/routes/dev.d.ts +0 -6
  243. package/dist/routes/dev.js.map +0 -1
  244. package/dist/routes/route-generators.d.ts +0 -15
  245. package/dist/routes/route-generators.js +0 -32
  246. package/dist/routes/route-generators.js.map +0 -1
  247. package/dist/routes/socket-route.d.ts +0 -2
  248. package/dist/routes/socket-route.js.map +0 -1
  249. package/dist/routes/sync-rules.d.ts +0 -6
  250. package/dist/routes/sync-rules.js.map +0 -1
  251. package/dist/routes/sync-stream.d.ts +0 -5
  252. package/dist/routes/sync-stream.js.map +0 -1
  253. package/src/migrations/db/store.ts +0 -11
  254. package/src/routes/admin.ts +0 -229
  255. package/src/routes/checkpointing.ts +0 -38
  256. package/src/routes/dev.ts +0 -194
  257. package/src/routes/route-generators.ts +0 -39
  258. package/src/routes/sync-rules.ts +0 -210
  259. package/src/routes/sync-stream.ts +0 -95
  260. package/test/src/sql_functions.test.ts +0 -254
  261. package/test/src/sql_operators.test.ts +0 -132
  262. package/test/src/sync_rules.test.ts +0 -1053
package/src/sync/util.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  import * as timers from 'timers/promises';
2
2
 
3
- import * as util from '@/util/util-index.js';
4
- import { Metrics } from '@/metrics/Metrics.js';
3
+ import * as util from '../util/util-index.js';
4
+ import { Metrics } from '../metrics/Metrics.js';
5
5
 
6
6
  export type TokenStreamOptions = {
7
7
  /**
@@ -1,26 +1,47 @@
1
1
  import * as pgwire from '@powersync/service-jpgwire';
2
- import * as micro from '@journeyapps-platform/micro';
2
+ import { LifeCycledSystem, container, logger } from '@powersync/lib-services-framework';
3
3
 
4
- import * as auth from '@/auth/auth-index.js';
5
- import * as storage from '@/storage/storage-index.js';
6
- import * as utils from '@/util/util-index.js';
4
+ import * as storage from '../storage/storage-index.js';
5
+ import * as utils from '../util/util-index.js';
7
6
 
8
- export abstract class CorePowerSyncSystem extends micro.system.MicroSystem {
7
+ export abstract class CorePowerSyncSystem extends LifeCycledSystem {
9
8
  abstract storage: storage.BucketStorageFactory;
10
- abstract client_keystore: auth.KeyStore;
11
- abstract dev_client_keystore: auth.KeyStore;
12
9
  abstract pgwire_pool?: pgwire.PgClient;
10
+ closed: boolean;
13
11
 
14
12
  protected stopHandlers: Set<() => void> = new Set();
15
13
 
16
- closed: boolean;
17
-
18
14
  constructor(public config: utils.ResolvedPowerSyncConfig) {
19
15
  super();
20
16
  this.closed = false;
21
17
  }
22
18
 
23
- abstract addTerminationHandler(): void;
19
+ get client_keystore() {
20
+ return this.config.client_keystore;
21
+ }
22
+
23
+ get dev_client_keystore() {
24
+ return this.config.dev_client_keystore;
25
+ }
26
+
27
+ /**
28
+ * Adds a termination handler which will call handlers registered via
29
+ * [addStopHandler].
30
+ * This should be called after the server is started and it's termination handler is added.
31
+ * This is so that the handler is run before the server's handler, allowing streams to be interrupted on exit
32
+ */
33
+ addTerminationHandler() {
34
+ container.terminationHandler.handleTerminationSignal(async () => {
35
+ // Close open streams, so that they don't block the server from closing.
36
+ // Note: This does not work well when streaming requests are queued. In that case, the server still doesn't
37
+ // close in the 30-second timeout.
38
+ this.closed = true;
39
+ logger.info(`Closing ${this.stopHandlers.size} streams`);
40
+ for (let handler of this.stopHandlers) {
41
+ handler();
42
+ }
43
+ });
44
+ }
24
45
 
25
46
  addStopHandler(handler: () => void): () => void {
26
47
  if (this.closed) {
@@ -0,0 +1 @@
1
+ export * from './CorePowerSyncSystem.js';
@@ -1,5 +1,3 @@
1
- import * as micro from '@journeyapps-platform/micro';
2
-
3
1
  let globalTags: Record<string, string> = {};
4
2
 
5
3
  export function setTags(tags: Record<string, string>) {
@@ -9,9 +7,3 @@ export function setTags(tags: Record<string, string>) {
9
7
  export function getGlobalTags() {
10
8
  return globalTags;
11
9
  }
12
-
13
- export function captureException(error: any, options?: micro.alerts.CaptureOptions) {
14
- micro.alerts.captureException(error, {
15
- ...options
16
- });
17
- }
@@ -1,8 +1,10 @@
1
1
  import * as t from 'ts-codec';
2
+ import * as yaml from 'yaml';
3
+
2
4
  import { configFile } from '@powersync/service-types';
3
- import * as micro from '@journeyapps-platform/micro';
5
+ import { schema } from '@powersync/lib-services-framework';
6
+
4
7
  import { RunnerConfig } from '../types.js';
5
- import * as yaml from 'yaml';
6
8
 
7
9
  export enum ConfigFileFormat {
8
10
  YAML = 'yaml',
@@ -22,7 +24,7 @@ export enum ConfigFileFormat {
22
24
  const YAML_ENV_PREFIX = 'PS_';
23
25
 
24
26
  // ts-codec itself doesn't give great validation errors, so we use json schema for that
25
- const configSchemaValidator = micro.schema
27
+ const configSchemaValidator = schema
26
28
  .parseJSONSchema(
27
29
  t.generateJSONSchema(configFile.powerSyncConfig, { allowAdditional: true, parsers: [configFile.portParser] })
28
30
  )
@@ -1,8 +1,9 @@
1
1
  import * as fs from 'fs/promises';
2
- import * as micro from '@journeyapps-platform/micro';
2
+ import * as path from 'path';
3
3
 
4
4
  import { ConfigCollector, ConfigFileFormat } from '../config-collector.js';
5
5
  import { RunnerConfig } from '../../types.js';
6
+ import { logger } from '@powersync/lib-services-framework';
6
7
 
7
8
  export class FileSystemConfigCollector extends ConfigCollector {
8
9
  get name(): string {
@@ -15,16 +16,17 @@ export class FileSystemConfigCollector extends ConfigCollector {
15
16
  return null;
16
17
  }
17
18
 
19
+ const resolvedPath = path.resolve(process.cwd(), config_path);
20
+
18
21
  // Check if file exists
19
22
  try {
20
- await fs.access(config_path, fs.constants.F_OK);
23
+ await fs.access(resolvedPath, fs.constants.F_OK);
21
24
  } catch (ex) {
22
- throw new Error(`Config file path ${config_path} was specified, but the file does not exist.`);
25
+ throw new Error(`Config file path ${resolvedPath} was specified, but the file does not exist.`);
23
26
  }
24
27
 
25
- micro.logger.info(`Collecting PowerSync configuration from File: ${config_path}`);
26
-
27
- const content = await fs.readFile(config_path, 'utf-8');
28
+ logger.info(`Collecting PowerSync configuration from File: ${resolvedPath}`);
29
+ const content = await fs.readFile(resolvedPath, 'utf-8');
28
30
 
29
31
  let contentType: ConfigFileFormat | undefined;
30
32
  switch (true) {
@@ -1,8 +1,7 @@
1
- import * as micro from '@journeyapps-platform/micro';
2
1
  import { configFile, normalizeConnection } from '@powersync/service-types';
3
2
  import { ConfigCollector } from './collectors/config-collector.js';
4
3
  import { ResolvedConnection, ResolvedPowerSyncConfig, RunnerConfig, SyncRulesConfig } from './types.js';
5
- import * as auth from '@/auth/auth-index.js';
4
+ import * as auth from '../../auth/auth-index.js';
6
5
  import { SyncRulesCollector } from './sync-rules/sync-collector.js';
7
6
  import { Base64ConfigCollector } from './collectors/impl/base64-config-collector.js';
8
7
  import { FileSystemConfigCollector } from './collectors/impl/filesystem-config-collector.js';
@@ -10,6 +9,7 @@ import { Base64SyncRulesCollector } from './sync-rules/impl/base64-sync-rules-co
10
9
  import { InlineSyncRulesCollector } from './sync-rules/impl/inline-sync-rules-collector.js';
11
10
  import { FileSystemSyncRulesCollector } from './sync-rules/impl/filesystem-sync-rules-collector.js';
12
11
  import { FallbackConfigCollector } from './collectors/impl/fallback-config-collector.js';
12
+ import { logger } from '@powersync/lib-services-framework';
13
13
 
14
14
  const POWERSYNC_DEV_KID = 'powersync-dev';
15
15
 
@@ -140,7 +140,7 @@ export class CompoundConfigCollector {
140
140
  if (baseConfig) {
141
141
  return baseConfig;
142
142
  }
143
- micro.logger.debug(
143
+ logger.debug(
144
144
  `Could not collect PowerSync config with ${collector.name} method. Moving on to next method if available.`
145
145
  );
146
146
  } catch (ex) {
@@ -161,7 +161,7 @@ export class CompoundConfigCollector {
161
161
  if (config) {
162
162
  return config;
163
163
  }
164
- micro.logger.debug(
164
+ logger.debug(
165
165
  `Could not collect sync rules with ${collector.name} method. Moving on to next method if available.`
166
166
  );
167
167
  } catch (ex) {
package/src/util/env.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { utils } from '@journeyapps-platform/micro';
1
+ import { utils } from '@powersync/lib-services-framework';
2
2
 
3
3
  import { ServiceRunner } from './config/types.js';
4
4
 
@@ -22,7 +22,9 @@ export const env = utils.collectEnvironmentVariables({
22
22
  /**
23
23
  * Port for metrics
24
24
  */
25
- METRICS_PORT: utils.type.number.optional()
25
+ METRICS_PORT: utils.type.number.optional(),
26
+
27
+ NODE_ENV: utils.type.string.optional()
26
28
  });
27
29
 
28
30
  export type Env = typeof env;
@@ -1,4 +1,4 @@
1
- import * as micro from '@journeyapps-platform/micro';
1
+ import { logger } from '@powersync/lib-services-framework';
2
2
 
3
3
  /**
4
4
  * Track and log memory usage.
@@ -57,7 +57,7 @@ export function trackMemoryUsage() {
57
57
  )
58
58
  )`.replaceAll(/\s+/g, ' ');
59
59
 
60
- micro.logger.info(output);
60
+ logger.info(output);
61
61
  }
62
62
  }, 50);
63
63
  }
@@ -4,9 +4,9 @@ import * as bson from 'bson';
4
4
  import * as uuid from 'uuid';
5
5
  import * as pgwire from '@powersync/service-jpgwire';
6
6
  import { SqliteJsonValue, SqliteRow, ToastableSqliteRow, toSyncRulesRow } from '@powersync/service-sync-rules';
7
- import * as micro from '@journeyapps-platform/micro';
8
7
 
9
- import * as replication from '@/replication/replication-index.js';
8
+ import * as replication from '../replication/replication-index.js';
9
+ import { logger } from '@powersync/lib-services-framework';
10
10
 
11
11
  /**
12
12
  * pgwire message -> SQLite row.
@@ -133,7 +133,7 @@ export async function retriedQuery(db: pgwire.PgClient, ...args: any[]) {
133
133
  if (tries == 1) {
134
134
  throw e;
135
135
  }
136
- micro.logger.warn('Query error, retrying', e);
136
+ logger.warn('Query error, retrying', e);
137
137
  }
138
138
  }
139
139
  }
package/src/util/utils.ts CHANGED
@@ -1,11 +1,11 @@
1
1
  import crypto from 'crypto';
2
2
  import * as pgwire from '@powersync/service-jpgwire';
3
3
  import { pgwireRows } from '@powersync/service-jpgwire';
4
- import * as micro from '@journeyapps-platform/micro';
5
4
 
6
- import * as storage from '@/storage/storage-index.js';
5
+ import * as storage from '../storage/storage-index.js';
7
6
  import { BucketChecksum, OpId } from './protocol-types.js';
8
7
  import { retriedQuery } from './pgwire_utils.js';
8
+ import { logger } from '@powersync/lib-services-framework';
9
9
 
10
10
  export type ChecksumMap = Map<string, BucketChecksum>;
11
11
 
@@ -90,14 +90,14 @@ export async function getClientCheckpoint(
90
90
 
91
91
  const timeout = options?.timeout ?? 50_000;
92
92
 
93
- micro.logger.info(`Waiting for LSN checkpoint: ${lsn}`);
93
+ logger.info(`Waiting for LSN checkpoint: ${lsn}`);
94
94
  while (Date.now() - start < timeout) {
95
95
  const cp = await bucketStorage.getActiveCheckpoint();
96
96
  if (!cp.hasSyncRules()) {
97
97
  throw new Error('No sync rules available');
98
98
  }
99
99
  if (cp.lsn >= lsn) {
100
- micro.logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`);
100
+ logger.info(`Got write checkpoint: ${lsn} : ${cp.checkpoint}`);
101
101
  return cp.checkpoint;
102
102
  }
103
103
 
@@ -117,6 +117,6 @@ export async function createWriteCheckpoint(
117
117
  );
118
118
 
119
119
  const id = await bucketStorage.createWriteCheckpoint(user_id, { '1': lsn });
120
- micro.logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`);
120
+ logger.info(`Write checkpoint 2: ${JSON.stringify({ lsn, id: String(id) })}`);
121
121
  return id;
122
122
  }
@@ -262,7 +262,8 @@ describe('JWT Auth', () => {
262
262
  expect(errors).toEqual([]);
263
263
  expect(keys.length).toBeGreaterThanOrEqual(1);
264
264
 
265
- const invalid = new RemoteJWKSCollector('https://localhost/.well-known/jwks.json', {
265
+ // The localhost hostname fails to resolve correctly on MacOS https://github.com/nodejs/help/issues/2163
266
+ const invalid = new RemoteJWKSCollector('https://127.0.0.1/.well-known/jwks.json', {
266
267
  block_local_ip: true
267
268
  });
268
269
  expect(invalid.getKeys()).rejects.toThrow('IPs in this range are not supported');
@@ -278,7 +279,8 @@ describe('JWT Auth', () => {
278
279
  expect(errors).toEqual([]);
279
280
  expect(keys.length).toBeGreaterThanOrEqual(1);
280
281
 
281
- const invalid = new RemoteJWKSCollector('https://localhost/.well-known/jwks.json');
282
+ // The localhost hostname fails to resolve correctly on MacOS https://github.com/nodejs/help/issues/2163
283
+ const invalid = new RemoteJWKSCollector('https://127.0.0.1/.well-known/jwks.json');
282
284
  // Should try and fetch
283
285
  expect(invalid.getKeys()).rejects.toThrow('ECONNREFUSED');
284
286
  });
@@ -1,4 +1,4 @@
1
- import { SqlSyncRules } from '@powersync/service-sync-rules';
1
+ import { RequestParameters, SqlSyncRules } from '@powersync/service-sync-rules';
2
2
  import * as bson from 'bson';
3
3
  import { describe, expect, test } from 'vitest';
4
4
  import { SourceTable } from '../../src/storage/SourceTable.js';
@@ -289,12 +289,7 @@ bucket_definitions:
289
289
 
290
290
  const checkpoint = result!.flushed_op;
291
291
 
292
- const parameters = {
293
- token_parameters: {
294
- user_id: 'u1'
295
- },
296
- user_parameters: {}
297
- };
292
+ const parameters = new RequestParameters({ sub: 'u1' }, {});
298
293
 
299
294
  const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
300
295
 
@@ -358,12 +353,7 @@ bucket_definitions:
358
353
 
359
354
  const checkpoint = result!.flushed_op;
360
355
 
361
- const parameters = {
362
- token_parameters: {
363
- user_id: 'unknown'
364
- },
365
- user_parameters: {}
366
- };
356
+ const parameters = new RequestParameters({ sub: 'unknown' }, {});
367
357
 
368
358
  const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
369
359
 
@@ -442,12 +432,7 @@ bucket_definitions:
442
432
 
443
433
  const checkpoint = result!.flushed_op;
444
434
 
445
- const parameters = {
446
- token_parameters: {
447
- user_id: 'u1'
448
- },
449
- user_parameters: {}
450
- };
435
+ const parameters = new RequestParameters({ sub: 'u1' }, {});
451
436
 
452
437
  // Test intermediate values - could be moved to sync_rules.test.ts
453
438
  const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
@@ -897,6 +882,183 @@ bucket_definitions:
897
882
  ]);
898
883
  });
899
884
 
885
+ test('changed data with replica identity full', async () => {
886
+ const sync_rules = SqlSyncRules.fromYaml(`
887
+ bucket_definitions:
888
+ global:
889
+ data:
890
+ - SELECT id, description FROM "test"
891
+ `);
892
+ const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
893
+
894
+ const sourceTable = makeTestTable('test', ['id', 'description']);
895
+
896
+ // Pre-setup
897
+ const result1 = await storage.startBatch({}, async (batch) => {
898
+ await batch.save({
899
+ sourceTable,
900
+ tag: 'insert',
901
+ after: {
902
+ id: 'test1',
903
+ description: 'test1a'
904
+ }
905
+ });
906
+ });
907
+
908
+ const checkpoint1 = result1?.flushed_op ?? '0';
909
+
910
+ const result2 = await storage.startBatch({}, async (batch) => {
911
+ // Unchanged, but has a before id
912
+ await batch.save({
913
+ sourceTable,
914
+ tag: 'update',
915
+ before: {
916
+ id: 'test1',
917
+ description: 'test1a'
918
+ },
919
+ after: {
920
+ id: 'test1',
921
+ description: 'test1b'
922
+ }
923
+ });
924
+ });
925
+
926
+ const result3 = await storage.startBatch({}, async (batch) => {
927
+ // Delete
928
+ await batch.save({
929
+ sourceTable,
930
+ tag: 'delete',
931
+ before: {
932
+ id: 'test1',
933
+ description: 'test1b'
934
+ },
935
+ after: undefined
936
+ });
937
+ });
938
+
939
+ const checkpoint3 = result3!.flushed_op;
940
+
941
+ const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
942
+ const data = batch[0].data.map((d) => {
943
+ return {
944
+ op: d.op,
945
+ object_id: d.object_id,
946
+ data: d.data,
947
+ subkey: d.subkey
948
+ };
949
+ });
950
+
951
+ // Operations must be in this order
952
+ expect(data).toEqual([
953
+ // 2
954
+ // The REMOVE is expected because the subkey changes
955
+ {
956
+ op: 'REMOVE',
957
+ object_id: 'test1',
958
+ data: null,
959
+ subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
960
+ },
961
+ {
962
+ op: 'PUT',
963
+ object_id: 'test1',
964
+ data: JSON.stringify({ id: 'test1', description: 'test1b' }),
965
+ subkey: '6544e3899293153fa7b38331/500e9b68-a2fd-51ff-9c00-313e2fb9f562'
966
+ },
967
+ // 3
968
+ {
969
+ op: 'REMOVE',
970
+ object_id: 'test1',
971
+ data: null,
972
+ subkey: '6544e3899293153fa7b38331/500e9b68-a2fd-51ff-9c00-313e2fb9f562'
973
+ }
974
+ ]);
975
+ });
976
+
977
+ test('unchanged data with replica identity full', async () => {
978
+ const sync_rules = SqlSyncRules.fromYaml(`
979
+ bucket_definitions:
980
+ global:
981
+ data:
982
+ - SELECT id, description FROM "test"
983
+ `);
984
+ const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
985
+
986
+ const sourceTable = makeTestTable('test', ['id', 'description']);
987
+
988
+ // Pre-setup
989
+ const result1 = await storage.startBatch({}, async (batch) => {
990
+ await batch.save({
991
+ sourceTable,
992
+ tag: 'insert',
993
+ after: {
994
+ id: 'test1',
995
+ description: 'test1a'
996
+ }
997
+ });
998
+ });
999
+
1000
+ const checkpoint1 = result1?.flushed_op ?? '0';
1001
+
1002
+ const result2 = await storage.startBatch({}, async (batch) => {
1003
+ // Unchanged, but has a before id
1004
+ await batch.save({
1005
+ sourceTable,
1006
+ tag: 'update',
1007
+ before: {
1008
+ id: 'test1',
1009
+ description: 'test1a'
1010
+ },
1011
+ after: {
1012
+ id: 'test1',
1013
+ description: 'test1a'
1014
+ }
1015
+ });
1016
+ });
1017
+
1018
+ const result3 = await storage.startBatch({}, async (batch) => {
1019
+ // Delete
1020
+ await batch.save({
1021
+ sourceTable,
1022
+ tag: 'delete',
1023
+ before: {
1024
+ id: 'test1',
1025
+ description: 'test1a'
1026
+ },
1027
+ after: undefined
1028
+ });
1029
+ });
1030
+
1031
+ const checkpoint3 = result3!.flushed_op;
1032
+
1033
+ const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
1034
+ const data = batch[0].data.map((d) => {
1035
+ return {
1036
+ op: d.op,
1037
+ object_id: d.object_id,
1038
+ data: d.data,
1039
+ subkey: d.subkey
1040
+ };
1041
+ });
1042
+
1043
+ // Operations must be in this order
1044
+ expect(data).toEqual([
1045
+ // 2
1046
+ {
1047
+ op: 'PUT',
1048
+ object_id: 'test1',
1049
+ data: JSON.stringify({ id: 'test1', description: 'test1a' }),
1050
+ subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
1051
+ },
1052
+ // 3
1053
+ {
1054
+ op: 'REMOVE',
1055
+ object_id: 'test1',
1056
+ data: null,
1057
+ subkey: '6544e3899293153fa7b38331/740ba9f2-8b0f-53e3-bb17-5f38a9616f0e'
1058
+ }
1059
+ ]);
1060
+ });
1061
+
900
1062
  test('large batch', async () => {
901
1063
  // Test syncing a batch of data that is small in count,
902
1064
  // but large enough in size to be split over multiple returned batches.
package/test/src/env.ts CHANGED
@@ -1,8 +1,8 @@
1
- import * as micro from '@journeyapps-platform/micro';
1
+ import { utils } from '@powersync/lib-services-framework';
2
2
 
3
- export const env = micro.utils.collectEnvironmentVariables({
4
- MONGO_TEST_URL: micro.utils.type.string.default('mongodb://localhost:27017/powersync_test'),
5
- PG_TEST_URL: micro.utils.type.string.default('postgres://postgres:postgres@localhost:5432/powersync_test'),
6
- CI: micro.utils.type.boolean.default('false'),
7
- SLOW_TESTS: micro.utils.type.boolean.default('false')
3
+ export const env = utils.collectEnvironmentVariables({
4
+ MONGO_TEST_URL: utils.type.string.default('mongodb://localhost:27017/powersync_test'),
5
+ PG_TEST_URL: utils.type.string.default('postgres://postgres:postgres@localhost:5432/powersync_test'),
6
+ CI: utils.type.boolean.default('false'),
7
+ SLOW_TESTS: utils.type.boolean.default('false')
8
8
  });
@@ -0,0 +1,7 @@
1
+ import { container } from '@powersync/lib-services-framework';
2
+ import { beforeAll } from 'vitest';
3
+
4
+ beforeAll(() => {
5
+ // Your setup code here
6
+ container.registerDefaults();
7
+ });
@@ -62,7 +62,7 @@ function defineSlowTests(factory: StorageFactory) {
62
62
  bucket_definitions:
63
63
  global:
64
64
  data:
65
- - SELECT id, description FROM "test_data"
65
+ - SELECT * FROM "test_data"
66
66
  `;
67
67
  const syncRules = await f.updateSyncRules({ content: syncRuleContent });
68
68
  const storage = f.getInstance(syncRules.parsed());
@@ -76,7 +76,10 @@ bucket_definitions:
76
76
  walStream = new WalStream(options);
77
77
 
78
78
  await pool.query(`DROP TABLE IF EXISTS test_data`);
79
- await pool.query(`CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text)`);
79
+ await pool.query(
80
+ `CREATE TABLE test_data(id uuid primary key default uuid_generate_v4(), description text, num decimal)`
81
+ );
82
+ await pool.query(`ALTER TABLE test_data REPLICA IDENTITY FULL`);
80
83
 
81
84
  await walStream.initReplication(replicationConnection);
82
85
  await storage.autoActivate();
@@ -88,14 +91,17 @@ bucket_definitions:
88
91
 
89
92
  while (!abort && Date.now() - start < TEST_DURATION_MS) {
90
93
  const bg = async () => {
91
- for (let j = 0; j < 5 && !abort; j++) {
92
- const n = Math.floor(Math.random() * 50);
94
+ for (let j = 0; j < 1 && !abort; j++) {
95
+ const n = 1;
93
96
  let statements: pgwire.Statement[] = [];
94
97
  for (let i = 0; i < n; i++) {
95
98
  const description = `test${i}`;
96
99
  statements.push({
97
- statement: `INSERT INTO test_data(description) VALUES($1) returning id as test_id`,
98
- params: [{ type: 'varchar', value: description }]
100
+ statement: `INSERT INTO test_data(description, num) VALUES($1, $2) returning id as test_id`,
101
+ params: [
102
+ { type: 'varchar', value: description },
103
+ { type: 'float8', value: Math.random() }
104
+ ]
99
105
  });
100
106
  }
101
107
  const results = await pool.query(...statements);
@@ -104,6 +110,24 @@ bucket_definitions:
104
110
  });
105
111
  await new Promise((resolve) => setTimeout(resolve, Math.random() * 30));
106
112
 
113
+ if (Math.random() > 0.5) {
114
+ const updateStatements: pgwire.Statement[] = ids.map((id) => {
115
+ return {
116
+ statement: `UPDATE test_data SET num = $2 WHERE id = $1`,
117
+ params: [
118
+ { type: 'uuid', value: id },
119
+ { type: 'float8', value: Math.random() }
120
+ ]
121
+ };
122
+ });
123
+
124
+ await pool.query(...updateStatements);
125
+ if (Math.random() > 0.5) {
126
+ // Special case - an update that doesn't change data
127
+ await pool.query(...updateStatements);
128
+ }
129
+ }
130
+
107
131
  const deleteStatements: pgwire.Statement[] = ids.map((id) => {
108
132
  return {
109
133
  statement: `DELETE FROM test_data WHERE id = $1`,
@@ -129,6 +153,21 @@ bucket_definitions:
129
153
  return bson.deserialize((doc.data as mongo.Binary).buffer) as SqliteRow;
130
154
  });
131
155
  expect(transformed).toEqual([]);
156
+
157
+ // Check that each PUT has a REMOVE
158
+ const ops = await f.db.bucket_data.find().sort({ _id: 1 }).toArray();
159
+ let active = new Set<string>();
160
+ for (let op of ops) {
161
+ const key = op.source_key.toHexString();
162
+ if (op.op == 'PUT') {
163
+ active.add(key);
164
+ } else if (op.op == 'REMOVE') {
165
+ active.delete(key);
166
+ }
167
+ }
168
+ if (active.size > 0) {
169
+ throw new Error(`${active.size} rows not removed`);
170
+ }
132
171
  }
133
172
 
134
173
  abortController.abort();