@powersync/service-core 0.2.2 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (262) hide show
  1. package/CHANGELOG.md +31 -0
  2. package/dist/api/diagnostics.js +2 -2
  3. package/dist/api/diagnostics.js.map +1 -1
  4. package/dist/api/schema.js.map +1 -1
  5. package/dist/auth/CachedKeyCollector.js.map +1 -1
  6. package/dist/auth/JwtPayload.d.ts +6 -2
  7. package/dist/auth/KeySpec.js.map +1 -1
  8. package/dist/auth/KeyStore.js +3 -9
  9. package/dist/auth/KeyStore.js.map +1 -1
  10. package/dist/auth/LeakyBucket.js.map +1 -1
  11. package/dist/auth/RemoteJWKSCollector.js.map +1 -1
  12. package/dist/auth/SupabaseKeyCollector.js.map +1 -1
  13. package/dist/db/mongo.js.map +1 -1
  14. package/dist/entry/cli-entry.js +2 -2
  15. package/dist/entry/cli-entry.js.map +1 -1
  16. package/dist/entry/commands/config-command.js.map +1 -1
  17. package/dist/entry/commands/migrate-action.js +12 -4
  18. package/dist/entry/commands/migrate-action.js.map +1 -1
  19. package/dist/entry/commands/start-action.js.map +1 -1
  20. package/dist/entry/commands/teardown-action.js.map +1 -1
  21. package/dist/index.d.ts +3 -2
  22. package/dist/index.js +4 -2
  23. package/dist/index.js.map +1 -1
  24. package/dist/locks/LockManager.d.ts +10 -0
  25. package/dist/locks/LockManager.js +7 -0
  26. package/dist/locks/LockManager.js.map +1 -0
  27. package/dist/locks/MongoLocks.d.ts +36 -0
  28. package/dist/locks/MongoLocks.js +81 -0
  29. package/dist/locks/MongoLocks.js.map +1 -0
  30. package/dist/locks/locks-index.d.ts +2 -0
  31. package/dist/locks/locks-index.js +3 -0
  32. package/dist/locks/locks-index.js.map +1 -0
  33. package/dist/metrics/Metrics.js +6 -6
  34. package/dist/metrics/Metrics.js.map +1 -1
  35. package/dist/migrations/db/migrations/1684951997326-init.js.map +1 -1
  36. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -1
  37. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +1 -1
  38. package/dist/migrations/definitions.d.ts +18 -0
  39. package/dist/migrations/definitions.js +6 -0
  40. package/dist/migrations/definitions.js.map +1 -0
  41. package/dist/migrations/executor.d.ts +16 -0
  42. package/dist/migrations/executor.js +64 -0
  43. package/dist/migrations/executor.js.map +1 -0
  44. package/dist/migrations/migrations-index.d.ts +3 -0
  45. package/dist/migrations/migrations-index.js +4 -0
  46. package/dist/migrations/migrations-index.js.map +1 -0
  47. package/dist/migrations/migrations.d.ts +1 -1
  48. package/dist/migrations/migrations.js +12 -8
  49. package/dist/migrations/migrations.js.map +1 -1
  50. package/dist/migrations/store/migration-store.d.ts +11 -0
  51. package/dist/migrations/store/migration-store.js +46 -0
  52. package/dist/migrations/store/migration-store.js.map +1 -0
  53. package/dist/replication/ErrorRateLimiter.js.map +1 -1
  54. package/dist/replication/PgRelation.js.map +1 -1
  55. package/dist/replication/WalConnection.js.map +1 -1
  56. package/dist/replication/WalStream.d.ts +0 -1
  57. package/dist/replication/WalStream.js +21 -25
  58. package/dist/replication/WalStream.js.map +1 -1
  59. package/dist/replication/WalStreamManager.js +12 -13
  60. package/dist/replication/WalStreamManager.js.map +1 -1
  61. package/dist/replication/WalStreamRunner.js +8 -8
  62. package/dist/replication/WalStreamRunner.js.map +1 -1
  63. package/dist/replication/util.js.map +1 -1
  64. package/dist/routes/auth.d.ts +8 -10
  65. package/dist/routes/auth.js.map +1 -1
  66. package/dist/routes/endpoints/admin.d.ts +1011 -0
  67. package/dist/routes/{admin.js → endpoints/admin.js} +33 -18
  68. package/dist/routes/endpoints/admin.js.map +1 -0
  69. package/dist/routes/endpoints/checkpointing.d.ts +76 -0
  70. package/dist/routes/endpoints/checkpointing.js +36 -0
  71. package/dist/routes/endpoints/checkpointing.js.map +1 -0
  72. package/dist/routes/endpoints/dev.d.ts +312 -0
  73. package/dist/routes/{dev.js → endpoints/dev.js} +25 -16
  74. package/dist/routes/endpoints/dev.js.map +1 -0
  75. package/dist/routes/endpoints/route-endpoints-index.d.ts +6 -0
  76. package/dist/routes/endpoints/route-endpoints-index.js +7 -0
  77. package/dist/routes/endpoints/route-endpoints-index.js.map +1 -0
  78. package/dist/routes/endpoints/socket-route.d.ts +2 -0
  79. package/dist/routes/{socket-route.js → endpoints/socket-route.js} +12 -12
  80. package/dist/routes/endpoints/socket-route.js.map +1 -0
  81. package/dist/routes/endpoints/sync-rules.d.ts +174 -0
  82. package/dist/routes/{sync-rules.js → endpoints/sync-rules.js} +44 -24
  83. package/dist/routes/endpoints/sync-rules.js.map +1 -0
  84. package/dist/routes/endpoints/sync-stream.d.ts +132 -0
  85. package/dist/routes/{sync-stream.js → endpoints/sync-stream.js} +28 -19
  86. package/dist/routes/endpoints/sync-stream.js.map +1 -0
  87. package/dist/routes/hooks.d.ts +10 -0
  88. package/dist/routes/hooks.js +31 -0
  89. package/dist/routes/hooks.js.map +1 -0
  90. package/dist/routes/route-register.d.ts +10 -0
  91. package/dist/routes/route-register.js +87 -0
  92. package/dist/routes/route-register.js.map +1 -0
  93. package/dist/routes/router.d.ts +16 -4
  94. package/dist/routes/router.js +6 -1
  95. package/dist/routes/router.js.map +1 -1
  96. package/dist/routes/routes-index.d.ts +5 -3
  97. package/dist/routes/routes-index.js +5 -3
  98. package/dist/routes/routes-index.js.map +1 -1
  99. package/dist/runner/teardown.js +9 -9
  100. package/dist/runner/teardown.js.map +1 -1
  101. package/dist/storage/BucketStorage.d.ts +3 -0
  102. package/dist/storage/BucketStorage.js.map +1 -1
  103. package/dist/storage/ChecksumCache.js.map +1 -1
  104. package/dist/storage/MongoBucketStorage.js +5 -5
  105. package/dist/storage/MongoBucketStorage.js.map +1 -1
  106. package/dist/storage/SourceTable.js.map +1 -1
  107. package/dist/storage/mongo/MongoBucketBatch.js +23 -18
  108. package/dist/storage/mongo/MongoBucketBatch.js.map +1 -1
  109. package/dist/storage/mongo/MongoIdSequence.js.map +1 -1
  110. package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
  111. package/dist/storage/mongo/MongoSyncRulesLock.js +3 -3
  112. package/dist/storage/mongo/MongoSyncRulesLock.js.map +1 -1
  113. package/dist/storage/mongo/OperationBatch.js.map +1 -1
  114. package/dist/storage/mongo/PersistedBatch.js +2 -2
  115. package/dist/storage/mongo/PersistedBatch.js.map +1 -1
  116. package/dist/storage/mongo/db.d.ts +2 -2
  117. package/dist/storage/mongo/db.js.map +1 -1
  118. package/dist/storage/mongo/util.js.map +1 -1
  119. package/dist/sync/BroadcastIterable.js.map +1 -1
  120. package/dist/sync/LastValueSink.js.map +1 -1
  121. package/dist/sync/merge.js.map +1 -1
  122. package/dist/sync/safeRace.js.map +1 -1
  123. package/dist/sync/sync.d.ts +2 -2
  124. package/dist/sync/sync.js +5 -5
  125. package/dist/sync/sync.js.map +1 -1
  126. package/dist/sync/util.js.map +1 -1
  127. package/dist/system/CorePowerSyncSystem.d.ts +12 -7
  128. package/dist/system/CorePowerSyncSystem.js +26 -2
  129. package/dist/system/CorePowerSyncSystem.js.map +1 -1
  130. package/dist/system/system-index.d.ts +1 -0
  131. package/dist/system/system-index.js +2 -0
  132. package/dist/system/system-index.js.map +1 -0
  133. package/dist/util/Mutex.js.map +1 -1
  134. package/dist/util/PgManager.js.map +1 -1
  135. package/dist/util/alerting.d.ts +0 -2
  136. package/dist/util/alerting.js +0 -6
  137. package/dist/util/alerting.js.map +1 -1
  138. package/dist/util/config/collectors/config-collector.js +3 -3
  139. package/dist/util/config/collectors/config-collector.js.map +1 -1
  140. package/dist/util/config/collectors/impl/base64-config-collector.js.map +1 -1
  141. package/dist/util/config/collectors/impl/filesystem-config-collector.js +7 -5
  142. package/dist/util/config/collectors/impl/filesystem-config-collector.js.map +1 -1
  143. package/dist/util/config/compound-config-collector.js +4 -4
  144. package/dist/util/config/compound-config-collector.js.map +1 -1
  145. package/dist/util/config/sync-rules/impl/base64-sync-rules-collector.js.map +1 -1
  146. package/dist/util/config/sync-rules/impl/filesystem-sync-rules-collector.js.map +1 -1
  147. package/dist/util/config/sync-rules/impl/inline-sync-rules-collector.js.map +1 -1
  148. package/dist/util/config.js.map +1 -1
  149. package/dist/util/env.d.ts +1 -2
  150. package/dist/util/env.js +3 -2
  151. package/dist/util/env.js.map +1 -1
  152. package/dist/util/memory-tracking.js +2 -2
  153. package/dist/util/memory-tracking.js.map +1 -1
  154. package/dist/util/migration_lib.js.map +1 -1
  155. package/dist/util/pgwire_utils.js +2 -2
  156. package/dist/util/pgwire_utils.js.map +1 -1
  157. package/dist/util/populate_test_data.js.map +1 -1
  158. package/dist/util/secs.js.map +1 -1
  159. package/dist/util/utils.js +4 -4
  160. package/dist/util/utils.js.map +1 -1
  161. package/package.json +13 -10
  162. package/src/api/diagnostics.ts +5 -5
  163. package/src/api/schema.ts +1 -1
  164. package/src/auth/JwtPayload.ts +6 -2
  165. package/src/auth/KeyStore.ts +3 -9
  166. package/src/entry/cli-entry.ts +3 -4
  167. package/src/entry/commands/config-command.ts +1 -1
  168. package/src/entry/commands/migrate-action.ts +14 -6
  169. package/src/entry/commands/start-action.ts +1 -1
  170. package/src/entry/commands/teardown-action.ts +1 -1
  171. package/src/index.ts +5 -2
  172. package/src/locks/LockManager.ts +16 -0
  173. package/src/locks/MongoLocks.ts +142 -0
  174. package/src/locks/locks-index.ts +2 -0
  175. package/src/metrics/Metrics.ts +8 -8
  176. package/src/migrations/db/migrations/1684951997326-init.ts +3 -3
  177. package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +3 -3
  178. package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +2 -2
  179. package/src/migrations/definitions.ts +21 -0
  180. package/src/migrations/executor.ts +87 -0
  181. package/src/migrations/migrations-index.ts +3 -0
  182. package/src/migrations/migrations.ts +15 -11
  183. package/src/migrations/store/migration-store.ts +63 -0
  184. package/src/replication/WalConnection.ts +2 -2
  185. package/src/replication/WalStream.ts +24 -29
  186. package/src/replication/WalStreamManager.ts +14 -15
  187. package/src/replication/WalStreamRunner.ts +10 -10
  188. package/src/replication/util.ts +1 -1
  189. package/src/routes/auth.ts +22 -16
  190. package/src/routes/endpoints/admin.ts +237 -0
  191. package/src/routes/endpoints/checkpointing.ts +41 -0
  192. package/src/routes/endpoints/dev.ts +199 -0
  193. package/src/routes/endpoints/route-endpoints-index.ts +6 -0
  194. package/src/routes/{socket-route.ts → endpoints/socket-route.ts} +13 -16
  195. package/src/routes/endpoints/sync-rules.ts +227 -0
  196. package/src/routes/endpoints/sync-stream.ts +98 -0
  197. package/src/routes/hooks.ts +45 -0
  198. package/src/routes/route-register.ts +104 -0
  199. package/src/routes/router.ts +34 -6
  200. package/src/routes/routes-index.ts +5 -4
  201. package/src/runner/teardown.ts +9 -9
  202. package/src/storage/BucketStorage.ts +7 -2
  203. package/src/storage/ChecksumCache.ts +2 -2
  204. package/src/storage/MongoBucketStorage.ts +8 -8
  205. package/src/storage/SourceTable.ts +2 -2
  206. package/src/storage/mongo/MongoBucketBatch.ts +29 -22
  207. package/src/storage/mongo/MongoSyncBucketStorage.ts +3 -3
  208. package/src/storage/mongo/MongoSyncRulesLock.ts +3 -3
  209. package/src/storage/mongo/OperationBatch.ts +1 -1
  210. package/src/storage/mongo/PersistedBatch.ts +3 -3
  211. package/src/storage/mongo/db.ts +3 -4
  212. package/src/sync/sync.ts +11 -11
  213. package/src/sync/util.ts +2 -2
  214. package/src/system/CorePowerSyncSystem.ts +31 -10
  215. package/src/system/system-index.ts +1 -0
  216. package/src/util/alerting.ts +0 -8
  217. package/src/util/config/collectors/config-collector.ts +5 -3
  218. package/src/util/config/collectors/impl/filesystem-config-collector.ts +8 -6
  219. package/src/util/config/compound-config-collector.ts +4 -4
  220. package/src/util/env.ts +4 -2
  221. package/src/util/memory-tracking.ts +2 -2
  222. package/src/util/pgwire_utils.ts +3 -3
  223. package/src/util/utils.ts +5 -5
  224. package/test/src/auth.test.ts +4 -2
  225. package/test/src/data_storage.test.ts +181 -19
  226. package/test/src/env.ts +6 -6
  227. package/test/src/setup.ts +7 -0
  228. package/test/src/slow_tests.test.ts +45 -6
  229. package/test/src/sync.test.ts +6 -5
  230. package/test/tsconfig.json +1 -1
  231. package/tsconfig.json +5 -6
  232. package/tsconfig.tsbuildinfo +1 -1
  233. package/vitest.config.ts +1 -3
  234. package/dist/migrations/db/store.d.ts +0 -3
  235. package/dist/migrations/db/store.js +0 -10
  236. package/dist/migrations/db/store.js.map +0 -1
  237. package/dist/routes/admin.d.ts +0 -7
  238. package/dist/routes/admin.js.map +0 -1
  239. package/dist/routes/checkpointing.d.ts +0 -3
  240. package/dist/routes/checkpointing.js +0 -30
  241. package/dist/routes/checkpointing.js.map +0 -1
  242. package/dist/routes/dev.d.ts +0 -6
  243. package/dist/routes/dev.js.map +0 -1
  244. package/dist/routes/route-generators.d.ts +0 -15
  245. package/dist/routes/route-generators.js +0 -32
  246. package/dist/routes/route-generators.js.map +0 -1
  247. package/dist/routes/socket-route.d.ts +0 -2
  248. package/dist/routes/socket-route.js.map +0 -1
  249. package/dist/routes/sync-rules.d.ts +0 -6
  250. package/dist/routes/sync-rules.js.map +0 -1
  251. package/dist/routes/sync-stream.d.ts +0 -5
  252. package/dist/routes/sync-stream.js.map +0 -1
  253. package/src/migrations/db/store.ts +0 -11
  254. package/src/routes/admin.ts +0 -229
  255. package/src/routes/checkpointing.ts +0 -38
  256. package/src/routes/dev.ts +0 -194
  257. package/src/routes/route-generators.ts +0 -39
  258. package/src/routes/sync-rules.ts +0 -210
  259. package/src/routes/sync-stream.ts +0 -95
  260. package/test/src/sql_functions.test.ts +0 -254
  261. package/test/src/sql_operators.test.ts +0 -132
  262. package/test/src/sync_rules.test.ts +0 -1053
@@ -0,0 +1,104 @@
1
+ import fastify from 'fastify';
2
+
3
+ import { errors, router, HTTPMethod, logger } from '@powersync/lib-services-framework';
4
+ import { Context, ContextProvider, RequestEndpoint, RequestEndpointHandlerPayload } from './router.js';
5
+
6
+ export type FastifyEndpoint<I, O, C> = RequestEndpoint<I, O, C> & {
7
+ parse?: boolean;
8
+ plugins?: fastify.FastifyPluginAsync[];
9
+ };
10
+
11
+ /**
12
+ * Registers endpoint definitions as routes on a Fastify app instance.
13
+ */
14
+ export function registerFastifyRoutes(
15
+ app: fastify.FastifyInstance,
16
+ contextProvider: ContextProvider,
17
+ endpoints: FastifyEndpoint<any, any, Context>[]
18
+ ) {
19
+ for (const e of endpoints) {
20
+ // Create a new context for each route
21
+ app.register(async function (fastify) {
22
+ fastify.route({
23
+ url: e.path,
24
+ method: e.method as HTTPMethod,
25
+ handler: async (request, reply) => {
26
+ const startTime = new Date();
27
+ let response: router.RouterResponse;
28
+ try {
29
+ const context = await contextProvider(request);
30
+
31
+ let combined = {
32
+ ...(request.params as any),
33
+ ...(request.query as any)
34
+ };
35
+
36
+ if (typeof request.body === 'object' && !Buffer.isBuffer(request.body) && !Array.isArray(request.body)) {
37
+ combined = {
38
+ ...combined,
39
+ ...request.body
40
+ };
41
+ }
42
+
43
+ const payload: RequestEndpointHandlerPayload = {
44
+ context: context,
45
+ params: combined,
46
+ request
47
+ };
48
+
49
+ const endpointResponse = await router.executeEndpoint(e, payload);
50
+
51
+ if (router.RouterResponse.isRouterResponse(endpointResponse)) {
52
+ response = endpointResponse;
53
+ } else if (router.isAsyncIterable(endpointResponse) || Buffer.isBuffer(endpointResponse)) {
54
+ response = new router.RouterResponse({
55
+ status: 200,
56
+ data: endpointResponse
57
+ });
58
+ } else {
59
+ response = new router.RouterResponse({
60
+ status: 200,
61
+ data: { data: endpointResponse }
62
+ });
63
+ }
64
+ } catch (ex) {
65
+ const journeyError = errors.JourneyError.isJourneyError(ex) ? ex : new errors.InternalServerError(ex);
66
+
67
+ response = new router.RouterResponse({
68
+ status: journeyError.errorData.status || 500,
69
+ headers: {
70
+ 'Content-Type': 'application/json'
71
+ },
72
+ data: {
73
+ error: journeyError.errorData
74
+ }
75
+ });
76
+ }
77
+
78
+ Object.keys(response.headers).forEach((key) => {
79
+ reply.header(key, response.headers[key]);
80
+ });
81
+ reply.status(response.status);
82
+ try {
83
+ await reply.send(response.data);
84
+ } finally {
85
+ await response.afterSend?.();
86
+ logger.info(`${e.method} ${request.url}`, {
87
+ duration_ms: Math.round(new Date().valueOf() - startTime.valueOf() + Number.EPSILON),
88
+ status: response.status,
89
+ method: e.method,
90
+ path: request.url,
91
+ route: e.path
92
+ });
93
+ }
94
+ }
95
+ });
96
+
97
+ if (!(e.parse ?? true)) {
98
+ fastify.removeAllContentTypeParsers();
99
+ }
100
+
101
+ e.plugins?.forEach((plugin) => fastify.register(plugin));
102
+ });
103
+ }
104
+ }
@@ -1,8 +1,10 @@
1
- import * as micro from '@journeyapps-platform/micro';
2
-
3
- import * as auth from '@/auth/auth-index.js';
1
+ import { router } from '@powersync/lib-services-framework';
2
+ import * as auth from '../auth/auth-index.js';
4
3
  import { CorePowerSyncSystem } from '../system/CorePowerSyncSystem.js';
5
4
 
5
+ /**
6
+ * Common context for routes
7
+ */
6
8
  export type Context = {
7
9
  user_id?: string;
8
10
  system: CorePowerSyncSystem;
@@ -11,8 +13,34 @@ export type Context = {
11
13
  token_errors?: string[];
12
14
  };
13
15
 
16
+ export type BasicRouterRequest = {
17
+ headers: Record<string, string | string[] | undefined>;
18
+ protocol: string;
19
+ hostname: string;
20
+ };
21
+
22
+ export type ContextProvider = (request: BasicRouterRequest) => Promise<Context>;
23
+
24
+ export type RequestEndpoint<
25
+ I,
26
+ O,
27
+ C = Context,
28
+ Payload = RequestEndpointHandlerPayload<I, C, BasicRouterRequest>
29
+ > = router.Endpoint<I, O, C, Payload> & {};
30
+
31
+ export type RequestEndpointHandlerPayload<
32
+ I = any,
33
+ C = Context,
34
+ Request = BasicRouterRequest
35
+ > = router.EndpointHandlerPayload<I, C> & {
36
+ request: Request;
37
+ };
38
+
14
39
  /**
15
- * Creates a route handler given a router instance
16
- * TODO move away from Fastify specific types
40
+ * Helper function for making generics work well when defining routes
17
41
  */
18
- export type RouteGenerator = (router: micro.fastify.FastifyRouter<Context>) => micro.router.Route;
42
+ export function routeDefinition<I, O, C = Context, Extension = {}>(
43
+ params: RequestEndpoint<I, O, C> & Extension
44
+ ): RequestEndpoint<I, O, C> & Extension {
45
+ return params;
46
+ }
@@ -1,5 +1,6 @@
1
- export * from './router.js';
2
- export * from './router-socket.js';
3
- export * from './route-generators.js';
4
-
5
1
  export * as auth from './auth.js';
2
+ export * as endpoints from './endpoints/route-endpoints-index.js';
3
+ export * as hooks from './hooks.js';
4
+ export * from './route-register.js';
5
+ export * from './router-socket.js';
6
+ export * from './router.js';
@@ -3,13 +3,13 @@
3
3
  // 1. The replication slots on the source postgres instance (if available).
4
4
  // 2. The mongo database.
5
5
 
6
- import * as micro from '@journeyapps-platform/micro';
7
6
  import * as timers from 'timers/promises';
8
7
 
9
8
  import * as db from '../db/db-index.js';
10
9
  import * as storage from '../storage/storage-index.js';
11
10
  import * as utils from '../util/util-index.js';
12
11
  import * as replication from '../replication/replication-index.js';
12
+ import { logger } from '@powersync/lib-services-framework';
13
13
 
14
14
  /**
15
15
  * Attempt to terminate a single sync rules instance.
@@ -34,9 +34,9 @@ async function terminateReplicator(
34
34
  lock
35
35
  });
36
36
 
37
- micro.logger.info(`Terminating replication slot ${stream.slot_name}`);
37
+ logger.info(`Terminating replication slot ${stream.slot_name}`);
38
38
  await stream.terminate();
39
- micro.logger.info(`Terminated replication slot ${stream.slot_name}`);
39
+ logger.info(`Terminated replication slot ${stream.slot_name}`);
40
40
  } finally {
41
41
  await lock.release();
42
42
  }
@@ -64,7 +64,7 @@ async function terminateReplicators(
64
64
  } catch (e) {
65
65
  retry = true;
66
66
  console.error(e);
67
- micro.logger.warn(`Failed to terminate ${syncRules.slot_name}`, e);
67
+ logger.warn(`Failed to terminate ${syncRules.slot_name}`, e);
68
68
  }
69
69
  }
70
70
  if (!retry) {
@@ -78,22 +78,22 @@ export async function teardown(runnerConfig: utils.RunnerConfig) {
78
78
  const config = await utils.loadConfig(runnerConfig);
79
79
  const mongoDB = storage.createPowerSyncMongo(config.storage);
80
80
  try {
81
- micro.logger.info(`Waiting for auth`);
81
+ logger.info(`Waiting for auth`);
82
82
  await db.mongo.waitForAuth(mongoDB.db);
83
83
 
84
84
  const bucketStorage = new storage.MongoBucketStorage(mongoDB, { slot_name_prefix: config.slot_name_prefix });
85
85
  const connection = config.connection;
86
86
 
87
- micro.logger.info(`Terminating replication slots`);
87
+ logger.info(`Terminating replication slots`);
88
88
 
89
89
  if (connection) {
90
90
  await terminateReplicators(bucketStorage, connection);
91
91
  }
92
92
 
93
93
  const database = mongoDB.db;
94
- micro.logger.info(`Dropping database ${database.namespace}`);
94
+ logger.info(`Dropping database ${database.namespace}`);
95
95
  await database.dropDatabase();
96
- micro.logger.info(`Done`);
96
+ logger.info(`Done`);
97
97
  await mongoDB.client.close();
98
98
 
99
99
  // If there was an error connecting to postgress, the process may stay open indefinitely.
@@ -101,7 +101,7 @@ export async function teardown(runnerConfig: utils.RunnerConfig) {
101
101
  // We do not consider those errors a teardown failure.
102
102
  process.exit(0);
103
103
  } catch (e) {
104
- micro.logger.error(`Teardown failure`, e);
104
+ logger.error(`Teardown failure`, e);
105
105
  await mongoDB.client.close();
106
106
  process.exit(1);
107
107
  }
@@ -8,8 +8,8 @@ import {
8
8
  ToastableSqliteRow
9
9
  } from '@powersync/service-sync-rules';
10
10
 
11
- import * as replication from '@/replication/replication-index.js';
12
- import * as util from '@/util/util-index.js';
11
+ import * as replication from '../replication/replication-index.js';
12
+ import * as util from '../util/util-index.js';
13
13
  import { SourceTable } from './SourceTable.js';
14
14
 
15
15
  export interface BucketStorageFactory {
@@ -367,7 +367,12 @@ export interface SaveInsert {
367
367
  export interface SaveUpdate {
368
368
  tag: 'update';
369
369
  sourceTable: SourceTable;
370
+
371
+ /**
372
+ * This is only present when the id has changed, and will only contain replica identity columns.
373
+ */
370
374
  before?: SqliteRow;
375
+
371
376
  /**
372
377
  * A null value means null column.
373
378
  *
@@ -1,5 +1,5 @@
1
- import { BucketChecksum, OpId } from '@/util/protocol-types.js';
2
- import { ChecksumMap, addBucketChecksums } from '@/util/utils.js';
1
+ import { BucketChecksum, OpId } from '../util/protocol-types.js';
2
+ import { ChecksumMap, addBucketChecksums } from '../util/utils.js';
3
3
  import { LRUCache } from 'lru-cache/min';
4
4
  import { OrderedSet } from '@js-sdsl/ordered-set';
5
5
 
@@ -1,13 +1,13 @@
1
1
  import * as mongo from 'mongodb';
2
2
  import * as timers from 'timers/promises';
3
3
  import { LRUCache } from 'lru-cache/min';
4
- import * as micro from '@journeyapps-platform/micro';
5
4
  import { SqlSyncRules } from '@powersync/service-sync-rules';
6
5
  import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
7
6
 
8
- import * as replication from '@/replication/replication-index.js';
9
- import * as sync from '@/sync/sync-index.js';
10
- import * as util from '@/util/util-index.js';
7
+ import * as replication from '../replication/replication-index.js';
8
+ import * as sync from '../sync/sync-index.js';
9
+ import * as util from '../util/util-index.js';
10
+ import * as locks from '../locks/locks-index.js';
11
11
 
12
12
  import {
13
13
  ActiveCheckpoint,
@@ -23,8 +23,8 @@ import { MongoSyncBucketStorage } from './mongo/MongoSyncBucketStorage.js';
23
23
  import { PowerSyncMongo, PowerSyncMongoOptions } from './mongo/db.js';
24
24
  import { SyncRuleDocument, SyncRuleState } from './mongo/models.js';
25
25
  import { generateSlotName } from './mongo/util.js';
26
- import { locks } from '@journeyapps-platform/micro';
27
26
  import { v4 as uuid } from 'uuid';
27
+ import { logger } from '@powersync/lib-services-framework';
28
28
 
29
29
  export interface MongoBucketStorageOptions extends PowerSyncMongoOptions {}
30
30
 
@@ -74,13 +74,13 @@ export class MongoBucketStorage implements BucketStorageFactory {
74
74
  const active = await this.getActiveSyncRulesContent();
75
75
 
76
76
  if (next?.sync_rules_content == sync_rules) {
77
- micro.logger.info('Sync rules from configuration unchanged');
77
+ logger.info('Sync rules from configuration unchanged');
78
78
  return { updated: false };
79
79
  } else if (next == null && active?.sync_rules_content == sync_rules) {
80
- micro.logger.info('Sync rules from configuration unchanged');
80
+ logger.info('Sync rules from configuration unchanged');
81
81
  return { updated: false };
82
82
  } else {
83
- micro.logger.info('Sync rules updated from configuration');
83
+ logger.info('Sync rules updated from configuration');
84
84
  const persisted_sync_rules = await this.updateSyncRules({
85
85
  content: sync_rules,
86
86
  lock: options?.lock
@@ -1,7 +1,7 @@
1
1
  import { DEFAULT_SCHEMA, DEFAULT_TAG } from '@powersync/service-sync-rules';
2
2
 
3
- import * as replication from '@/replication/replication-index.js';
4
- import * as util from '@/util/util-index.js';
3
+ import * as replication from '../replication/replication-index.js';
4
+ import * as util from '../util/util-index.js';
5
5
 
6
6
  export class SourceTable {
7
7
  static readonly DEFAULT_SCHEMA = DEFAULT_SCHEMA;
@@ -1,10 +1,10 @@
1
- import * as micro from '@journeyapps-platform/micro';
2
1
  import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules';
3
2
  import * as bson from 'bson';
4
3
  import * as mongo from 'mongodb';
5
4
 
6
- import * as util from '@/util/util-index.js';
7
- import * as replication from '@/replication/replication-index.js';
5
+ import * as util from '../../util/util-index.js';
6
+ import * as replication from '../../replication/replication-index.js';
7
+ import { container, errors, logger } from '@powersync/lib-services-framework';
8
8
  import { BucketStorageBatch, FlushedResult, mergeToast, SaveOptions } from '../BucketStorage.js';
9
9
  import { SourceTable } from '../SourceTable.js';
10
10
  import { PowerSyncMongo } from './db.js';
@@ -187,6 +187,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
187
187
  }
188
188
  const currentData = current_data_lookup.get(op.internalBeforeKey) ?? null;
189
189
  if (currentData != null) {
190
+ // If it will be used again later, it will be set again using nextData below
190
191
  current_data_lookup.delete(op.internalBeforeKey);
191
192
  }
192
193
  const nextData = this.saveOperation(persistedBatch!, op, currentData, op_seq);
@@ -242,6 +243,10 @@ export class MongoBucketBatch implements BucketStorageBatch {
242
243
  // Not an error if we re-apply a transaction
243
244
  existing_buckets = [];
244
245
  existing_lookups = [];
246
+ // Log to help with debugging if there was a consistency issue
247
+ logger.warn(
248
+ `Cannot find previous record for update on ${record.sourceTable.qualifiedName}: ${beforeId} / ${record.before?.id}`
249
+ );
245
250
  } else {
246
251
  const data = bson.deserialize((result.data as mongo.Binary).buffer, BSON_DESERIALIZE_OPTIONS) as SqliteRow;
247
252
  existing_buckets = result.buckets;
@@ -254,6 +259,10 @@ export class MongoBucketBatch implements BucketStorageBatch {
254
259
  // Not an error if we re-apply a transaction
255
260
  existing_buckets = [];
256
261
  existing_lookups = [];
262
+ // Log to help with debugging if there was a consistency issue
263
+ logger.warn(
264
+ `Cannot find previous record for delete on ${record.sourceTable.qualifiedName}: ${beforeId} / ${record.before?.id}`
265
+ );
257
266
  } else {
258
267
  existing_buckets = result.buckets;
259
268
  existing_lookups = result.lookups;
@@ -278,10 +287,10 @@ export class MongoBucketBatch implements BucketStorageBatch {
278
287
  );
279
288
  afterData = new bson.Binary(bson.serialize(after!));
280
289
 
281
- micro.alerts.captureMessage(
290
+ container.reporter.captureMessage(
282
291
  `Data too big on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${e.message}`,
283
292
  {
284
- level: micro.errors.ErrorSeverity.WARNING,
293
+ level: errors.ErrorSeverity.WARNING,
285
294
  metadata: {
286
295
  replication_slot: this.slot_name,
287
296
  table: record.sourceTable.qualifiedName
@@ -292,7 +301,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
292
301
  }
293
302
 
294
303
  // 2. Save bucket data
295
- if (beforeId != null && beforeId != afterId) {
304
+ if (beforeId != null && (afterId == null || !beforeId.equals(afterId))) {
296
305
  // Source ID updated
297
306
  if (sourceTable.syncData) {
298
307
  // Delete old record
@@ -329,23 +338,23 @@ export class MongoBucketBatch implements BucketStorageBatch {
329
338
  if (afterId && after && util.isCompleteRow(after)) {
330
339
  // Insert or update
331
340
  if (sourceTable.syncData) {
332
- const { results: evaluated, errors } = this.sync_rules.evaluateRowWithErrors({
341
+ const { results: evaluated, errors: syncErrors } = this.sync_rules.evaluateRowWithErrors({
333
342
  record: after,
334
343
  sourceTable
335
344
  });
336
345
 
337
- for (let error of errors) {
338
- micro.alerts.captureMessage(
346
+ for (let error of syncErrors) {
347
+ container.reporter.captureMessage(
339
348
  `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`,
340
349
  {
341
- level: micro.errors.ErrorSeverity.WARNING,
350
+ level: errors.ErrorSeverity.WARNING,
342
351
  metadata: {
343
352
  replication_slot: this.slot_name,
344
353
  table: record.sourceTable.qualifiedName
345
354
  }
346
355
  }
347
356
  );
348
- micro.logger.error(
357
+ logger.error(
349
358
  `Failed to evaluate data query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`
350
359
  );
351
360
  }
@@ -375,17 +384,17 @@ export class MongoBucketBatch implements BucketStorageBatch {
375
384
  );
376
385
 
377
386
  for (let error of paramErrors) {
378
- micro.alerts.captureMessage(
387
+ container.reporter.captureMessage(
379
388
  `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${record.after?.id}: ${error.error}`,
380
389
  {
381
- level: micro.errors.ErrorSeverity.WARNING,
390
+ level: errors.ErrorSeverity.WARNING,
382
391
  metadata: {
383
392
  replication_slot: this.slot_name,
384
393
  table: record.sourceTable.qualifiedName
385
394
  }
386
395
  }
387
396
  );
388
- micro.logger.error(
397
+ logger.error(
389
398
  `Failed to evaluate parameter query on ${record.sourceTable.qualifiedName}.${after.id}: ${error.error}`
390
399
  );
391
400
  }
@@ -422,7 +431,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
422
431
  };
423
432
  }
424
433
 
425
- if (beforeId != afterId) {
434
+ if (afterId == null || !beforeId.equals(afterId)) {
426
435
  // Either a delete (afterId == null), or replaced the old replication id
427
436
  batch.deleteCurrentData(before_key);
428
437
  }
@@ -439,7 +448,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
439
448
  if (e instanceof mongo.MongoError && e.hasErrorLabel('TransientTransactionError')) {
440
449
  // Likely write conflict caused by concurrent write stream replicating
441
450
  } else {
442
- micro.logger.warn('Transaction error', e as Error);
451
+ logger.warn('Transaction error', e as Error);
443
452
  }
444
453
  await new Promise((resolve) => setTimeout(resolve, Math.random() * 50));
445
454
  throw e;
@@ -464,7 +473,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
464
473
  await this.withTransaction(async () => {
465
474
  flushTry += 1;
466
475
  if (flushTry % 10 == 0) {
467
- micro.logger.info(`${this.slot_name} ${description} - try ${flushTry}`);
476
+ logger.info(`${this.slot_name} ${description} - try ${flushTry}`);
468
477
  }
469
478
  if (flushTry > 20 && Date.now() > lastTry) {
470
479
  throw new Error('Max transaction tries exceeded');
@@ -529,13 +538,11 @@ export class MongoBucketBatch implements BucketStorageBatch {
529
538
  if (this.last_checkpoint_lsn != null && lsn <= this.last_checkpoint_lsn) {
530
539
  // When re-applying transactions, don't create a new checkpoint until
531
540
  // we are past the last transaction.
532
- micro.logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`);
541
+ logger.info(`Re-applied transaction ${lsn} - skipping checkpoint`);
533
542
  return false;
534
543
  }
535
544
  if (lsn < this.no_checkpoint_before_lsn) {
536
- micro.logger.info(
537
- `Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}`
538
- );
545
+ logger.info(`Waiting until ${this.no_checkpoint_before_lsn} before creating checkpoint, currently at ${lsn}`);
539
546
  return false;
540
547
  }
541
548
 
@@ -599,7 +606,7 @@ export class MongoBucketBatch implements BucketStorageBatch {
599
606
  }
600
607
 
601
608
  async save(record: SaveOptions): Promise<FlushedResult | null> {
602
- micro.logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`);
609
+ logger.debug(`Saving ${record.tag}:${record.before?.id}/${record.after?.id}`);
603
610
 
604
611
  this.batch ??= new OperationBatch();
605
612
  this.batch.push(new RecordOperation(record));
@@ -2,9 +2,9 @@ import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service
2
2
  import * as bson from 'bson';
3
3
  import * as mongo from 'mongodb';
4
4
 
5
- import * as db from '@/db/db-index.js';
6
- import * as replication from '@/replication/WalStream.js';
7
- import * as util from '@/util/util-index.js';
5
+ import * as db from '../../db/db-index.js';
6
+ import * as replication from '../../replication/WalStream.js';
7
+ import * as util from '../../util/util-index.js';
8
8
  import {
9
9
  BucketDataBatchOptions,
10
10
  BucketStorageBatch,
@@ -1,8 +1,8 @@
1
- import * as micro from '@journeyapps-platform/micro';
2
1
  import crypto from 'crypto';
3
2
 
4
3
  import { PersistedSyncRulesContent, ReplicationLock } from '../BucketStorage.js';
5
4
  import { PowerSyncMongo } from './db.js';
5
+ import { logger } from '@powersync/lib-services-framework';
6
6
 
7
7
  /**
8
8
  * Manages a lock on a sync rules document, so that only one process
@@ -40,7 +40,7 @@ export class MongoSyncRulesLock implements ReplicationLock {
40
40
  try {
41
41
  await this.refresh();
42
42
  } catch (e) {
43
- micro.logger.error('Failed to refresh lock', e);
43
+ logger.error('Failed to refresh lock', e);
44
44
  clearInterval(this.refreshInterval);
45
45
  }
46
46
  }, 30_130);
@@ -59,7 +59,7 @@ export class MongoSyncRulesLock implements ReplicationLock {
59
59
  );
60
60
  if (result.modifiedCount == 0) {
61
61
  // Log and ignore
62
- micro.logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`);
62
+ logger.warn(`Lock already released: ${this.sync_rules_id}/${this.lock_id}`);
63
63
  }
64
64
  }
65
65
 
@@ -1,7 +1,7 @@
1
1
  import * as bson from 'bson';
2
2
  import { ToastableSqliteRow } from '@powersync/service-sync-rules';
3
3
 
4
- import * as util from '@/util/util-index.js';
4
+ import * as util from '../../util/util-index.js';
5
5
  import { SaveOptions } from '../BucketStorage.js';
6
6
 
7
7
  /**
@@ -2,9 +2,8 @@ import { JSONBig } from '@powersync/service-jsonbig';
2
2
  import { EvaluatedParameters, EvaluatedRow } from '@powersync/service-sync-rules';
3
3
  import * as bson from 'bson';
4
4
  import * as mongo from 'mongodb';
5
- import * as micro from '@journeyapps-platform/micro';
6
5
 
7
- import * as util from '@/util/util-index.js';
6
+ import * as util from '../../util/util-index.js';
8
7
  import { SourceTable } from '../SourceTable.js';
9
8
  import { currentBucketKey } from './MongoBucketBatch.js';
10
9
  import { MongoIdSequence } from './MongoIdSequence.js';
@@ -17,6 +16,7 @@ import {
17
16
  SourceKey
18
17
  } from './models.js';
19
18
  import { serializeLookup } from './util.js';
19
+ import { logger } from '@powersync/lib-services-framework';
20
20
 
21
21
  /**
22
22
  * Maximum size of operations we write in a single transaction.
@@ -253,7 +253,7 @@ export class PersistedBatch {
253
253
  });
254
254
  }
255
255
 
256
- micro.logger.info(
256
+ logger.info(
257
257
  `powersync_${this.group_id} Flushed ${this.bucketData.length} + ${this.bucketParameters.length} + ${
258
258
  this.currentData.length
259
259
  } updates, ${Math.round(this.currentSize / 1024)}kb. Last op_id: ${this.debugLastOpId}`
@@ -1,8 +1,7 @@
1
1
  import * as mongo from 'mongodb';
2
- import * as micro from '@journeyapps-platform/micro';
3
-
4
- import * as db from '@/db/db-index.js';
5
2
 
3
+ import * as db from '../../db/db-index.js';
4
+ import * as locks from '../../locks/locks-index.js';
6
5
  import {
7
6
  BucketDataDocument,
8
7
  BucketParameterDocument,
@@ -36,7 +35,7 @@ export class PowerSyncMongo {
36
35
  readonly source_tables: mongo.Collection<SourceTableDocument>;
37
36
  readonly write_checkpoints: mongo.Collection<WriteCheckpointDocument>;
38
37
  readonly instance: mongo.Collection<InstanceDocument>;
39
- readonly locks: mongo.Collection<micro.locks.Lock>;
38
+ readonly locks: mongo.Collection<locks.Lock>;
40
39
 
41
40
  readonly client: mongo.MongoClient;
42
41
  readonly db: mongo.Db;
package/src/sync/sync.ts CHANGED
@@ -1,16 +1,16 @@
1
- import * as micro from '@journeyapps-platform/micro';
2
1
  import { JSONBig, JsonContainer } from '@powersync/service-jsonbig';
3
- import { SyncParameters } from '@powersync/service-sync-rules';
2
+ import { RequestParameters } from '@powersync/service-sync-rules';
4
3
  import { Semaphore } from 'async-mutex';
5
4
  import { AbortError } from 'ix/aborterror.js';
6
5
 
7
- import * as auth from '@/auth/auth-index.js';
8
- import * as storage from '@/storage/storage-index.js';
9
- import * as util from '@/util/util-index.js';
6
+ import * as auth from '../auth/auth-index.js';
7
+ import * as storage from '../storage/storage-index.js';
8
+ import * as util from '../util/util-index.js';
10
9
 
10
+ import { logger } from '@powersync/lib-services-framework';
11
+ import { Metrics } from '../metrics/Metrics.js';
11
12
  import { mergeAsyncIterables } from './merge.js';
12
13
  import { TokenStreamOptions, tokenStream } from './util.js';
13
- import { Metrics } from '@/metrics/Metrics.js';
14
14
 
15
15
  /**
16
16
  * Maximum number of connections actively fetching data.
@@ -21,7 +21,7 @@ const syncSemaphore = new Semaphore(MAX_ACTIVE_CONNECTIONS);
21
21
  export interface SyncStreamParameters {
22
22
  storage: storage.BucketStorageFactory;
23
23
  params: util.StreamingSyncRequest;
24
- syncParams: SyncParameters;
24
+ syncParams: RequestParameters;
25
25
  token: auth.JwtPayload;
26
26
  /**
27
27
  * If this signal is aborted, the stream response ends as soon as possible, without error.
@@ -71,7 +71,7 @@ export async function* streamResponse(
71
71
  async function* streamResponseInner(
72
72
  storage: storage.BucketStorageFactory,
73
73
  params: util.StreamingSyncRequest,
74
- syncParams: SyncParameters,
74
+ syncParams: RequestParameters,
75
75
  signal: AbortSignal
76
76
  ): AsyncGenerator<util.StreamingSyncLine | string | null> {
77
77
  // Bucket state of bucket id -> op_id.
@@ -141,7 +141,7 @@ async function* streamResponseInner(
141
141
  message += `buckets: ${allBuckets.length} | `;
142
142
  message += `updated: ${limitedBuckets(diff.updatedBuckets, 20)} | `;
143
143
  message += `removed: ${limitedBuckets(diff.removedBuckets, 20)} | `;
144
- micro.logger.info(message);
144
+ logger.info(message);
145
145
 
146
146
  const checksum_line: util.StreamingSyncCheckpointDiff = {
147
147
  checkpoint_diff: {
@@ -156,7 +156,7 @@ async function* streamResponseInner(
156
156
  } else {
157
157
  let message = `New checkpoint: ${checkpoint} | write: ${writeCheckpoint} | `;
158
158
  message += `buckets: ${allBuckets.length} ${limitedBuckets(allBuckets, 20)}`;
159
- micro.logger.info(message);
159
+ logger.info(message);
160
160
  bucketsToFetch = allBuckets;
161
161
  const checksum_line: util.StreamingSyncCheckpoint = {
162
162
  checkpoint: {
@@ -246,7 +246,7 @@ async function* bucketDataBatch(request: BucketDataRequest) {
246
246
  if (r.data.length == 0) {
247
247
  continue;
248
248
  }
249
- micro.logger.debug(`Sending data for ${r.bucket}`);
249
+ logger.debug(`Sending data for ${r.bucket}`);
250
250
 
251
251
  let send_data: any;
252
252
  if (binary_data) {