@powersync/service-core 0.0.0-dev-20240718134716 → 0.0.0-dev-20240918082156

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (352) hide show
  1. package/CHANGELOG.md +89 -6
  2. package/dist/api/RouteAPI.d.ts +68 -0
  3. package/dist/api/RouteAPI.js +2 -0
  4. package/dist/api/RouteAPI.js.map +1 -0
  5. package/dist/api/api-index.d.ts +1 -0
  6. package/dist/api/api-index.js +1 -0
  7. package/dist/api/api-index.js.map +1 -1
  8. package/dist/api/diagnostics.d.ts +4 -4
  9. package/dist/api/diagnostics.js +11 -65
  10. package/dist/api/diagnostics.js.map +1 -1
  11. package/dist/api/schema.d.ts +3 -5
  12. package/dist/api/schema.js +9 -79
  13. package/dist/api/schema.js.map +1 -1
  14. package/dist/auth/KeyStore.d.ts +7 -4
  15. package/dist/auth/KeyStore.js +1 -1
  16. package/dist/auth/KeyStore.js.map +1 -1
  17. package/dist/auth/auth-index.d.ts +0 -1
  18. package/dist/auth/auth-index.js +0 -1
  19. package/dist/auth/auth-index.js.map +1 -1
  20. package/dist/entry/cli-entry.js +4 -2
  21. package/dist/entry/cli-entry.js.map +1 -1
  22. package/dist/entry/commands/compact-action.d.ts +2 -0
  23. package/dist/entry/commands/compact-action.js +52 -0
  24. package/dist/entry/commands/compact-action.js.map +1 -0
  25. package/dist/entry/commands/migrate-action.js +4 -5
  26. package/dist/entry/commands/migrate-action.js.map +1 -1
  27. package/dist/entry/commands/teardown-action.js +2 -2
  28. package/dist/entry/commands/teardown-action.js.map +1 -1
  29. package/dist/entry/entry-index.d.ts +1 -0
  30. package/dist/entry/entry-index.js +1 -0
  31. package/dist/entry/entry-index.js.map +1 -1
  32. package/dist/index.d.ts +4 -2
  33. package/dist/index.js +4 -2
  34. package/dist/index.js.map +1 -1
  35. package/dist/metrics/Metrics.d.ts +6 -5
  36. package/dist/metrics/Metrics.js +53 -10
  37. package/dist/metrics/Metrics.js.map +1 -1
  38. package/dist/migrations/db/migrations/1684951997326-init.d.ts +2 -2
  39. package/dist/migrations/db/migrations/1684951997326-init.js +4 -2
  40. package/dist/migrations/db/migrations/1684951997326-init.js.map +1 -1
  41. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.d.ts +2 -2
  42. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +4 -2
  43. package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -1
  44. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.d.ts +2 -2
  45. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js +4 -2
  46. package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +1 -1
  47. package/dist/migrations/migrations.d.ts +8 -0
  48. package/dist/migrations/migrations.js +19 -7
  49. package/dist/migrations/migrations.js.map +1 -1
  50. package/dist/modules/AbstractModule.d.ts +26 -0
  51. package/dist/modules/AbstractModule.js +11 -0
  52. package/dist/modules/AbstractModule.js.map +1 -0
  53. package/dist/modules/ModuleManager.d.ts +11 -0
  54. package/dist/modules/ModuleManager.js +32 -0
  55. package/dist/modules/ModuleManager.js.map +1 -0
  56. package/dist/modules/modules-index.d.ts +2 -0
  57. package/dist/modules/modules-index.js +3 -0
  58. package/dist/modules/modules-index.js.map +1 -0
  59. package/dist/replication/AbstractReplicationJob.d.ts +38 -0
  60. package/dist/replication/AbstractReplicationJob.js +51 -0
  61. package/dist/replication/AbstractReplicationJob.js.map +1 -0
  62. package/dist/replication/AbstractReplicator.d.ts +53 -0
  63. package/dist/replication/AbstractReplicator.js +187 -0
  64. package/dist/replication/AbstractReplicator.js.map +1 -0
  65. package/dist/replication/ErrorRateLimiter.d.ts +0 -9
  66. package/dist/replication/ErrorRateLimiter.js +1 -42
  67. package/dist/replication/ErrorRateLimiter.js.map +1 -1
  68. package/dist/replication/ReplicationEngine.d.ts +18 -0
  69. package/dist/replication/ReplicationEngine.js +41 -0
  70. package/dist/replication/ReplicationEngine.js.map +1 -0
  71. package/dist/replication/ReplicationModule.d.ts +39 -0
  72. package/dist/replication/ReplicationModule.js +65 -0
  73. package/dist/replication/ReplicationModule.js.map +1 -0
  74. package/dist/replication/replication-index.d.ts +4 -6
  75. package/dist/replication/replication-index.js +4 -6
  76. package/dist/replication/replication-index.js.map +1 -1
  77. package/dist/routes/RouterEngine.d.ts +42 -0
  78. package/dist/routes/RouterEngine.js +80 -0
  79. package/dist/routes/RouterEngine.js.map +1 -0
  80. package/dist/routes/auth.d.ts +2 -2
  81. package/dist/routes/auth.js +11 -11
  82. package/dist/routes/auth.js.map +1 -1
  83. package/dist/routes/configure-fastify.d.ts +737 -0
  84. package/dist/routes/configure-fastify.js +57 -0
  85. package/dist/routes/configure-fastify.js.map +1 -0
  86. package/dist/routes/configure-rsocket.d.ts +13 -0
  87. package/dist/routes/configure-rsocket.js +47 -0
  88. package/dist/routes/configure-rsocket.js.map +1 -0
  89. package/dist/routes/endpoints/admin.d.ts +0 -34
  90. package/dist/routes/endpoints/admin.js +48 -89
  91. package/dist/routes/endpoints/admin.js.map +1 -1
  92. package/dist/routes/endpoints/checkpointing.d.ts +56 -16
  93. package/dist/routes/endpoints/checkpointing.js +33 -12
  94. package/dist/routes/endpoints/checkpointing.js.map +1 -1
  95. package/dist/routes/endpoints/route-endpoints-index.d.ts +0 -1
  96. package/dist/routes/endpoints/route-endpoints-index.js +0 -1
  97. package/dist/routes/endpoints/route-endpoints-index.js.map +1 -1
  98. package/dist/routes/endpoints/socket-route.js +46 -39
  99. package/dist/routes/endpoints/socket-route.js.map +1 -1
  100. package/dist/routes/endpoints/sync-rules.d.ts +1 -1
  101. package/dist/routes/endpoints/sync-rules.js +32 -23
  102. package/dist/routes/endpoints/sync-rules.js.map +1 -1
  103. package/dist/routes/endpoints/sync-stream.d.ts +10 -0
  104. package/dist/routes/endpoints/sync-stream.js +17 -13
  105. package/dist/routes/endpoints/sync-stream.js.map +1 -1
  106. package/dist/routes/route-register.d.ts +1 -1
  107. package/dist/routes/route-register.js +1 -1
  108. package/dist/routes/route-register.js.map +1 -1
  109. package/dist/routes/router-socket.d.ts +5 -4
  110. package/dist/routes/router-socket.js +2 -1
  111. package/dist/routes/router-socket.js.map +1 -1
  112. package/dist/routes/router.d.ts +7 -2
  113. package/dist/routes/router.js.map +1 -1
  114. package/dist/routes/routes-index.d.ts +3 -0
  115. package/dist/routes/routes-index.js +3 -0
  116. package/dist/routes/routes-index.js.map +1 -1
  117. package/dist/runner/teardown.js +47 -76
  118. package/dist/runner/teardown.js.map +1 -1
  119. package/dist/storage/BucketStorage.d.ts +61 -20
  120. package/dist/storage/BucketStorage.js +0 -10
  121. package/dist/storage/BucketStorage.js.map +1 -1
  122. package/dist/storage/MongoBucketStorage.d.ts +4 -4
  123. package/dist/storage/MongoBucketStorage.js +19 -24
  124. package/dist/storage/MongoBucketStorage.js.map +1 -1
  125. package/dist/storage/SourceEntity.d.ts +20 -0
  126. package/dist/storage/SourceEntity.js +2 -0
  127. package/dist/storage/SourceEntity.js.map +1 -0
  128. package/dist/storage/SourceTable.d.ts +4 -5
  129. package/dist/storage/SourceTable.js +3 -4
  130. package/dist/storage/SourceTable.js.map +1 -1
  131. package/dist/storage/StorageEngine.d.ts +24 -0
  132. package/dist/storage/StorageEngine.js +43 -0
  133. package/dist/storage/StorageEngine.js.map +1 -0
  134. package/dist/storage/StorageProvider.d.ts +21 -0
  135. package/dist/storage/StorageProvider.js +2 -0
  136. package/dist/storage/StorageProvider.js.map +1 -0
  137. package/dist/storage/mongo/MongoBucketBatch.d.ts +1 -1
  138. package/dist/storage/mongo/MongoBucketBatch.js +6 -7
  139. package/dist/storage/mongo/MongoBucketBatch.js.map +1 -1
  140. package/dist/storage/mongo/MongoCompactor.d.ts +40 -0
  141. package/dist/storage/mongo/MongoCompactor.js +293 -0
  142. package/dist/storage/mongo/MongoCompactor.js.map +1 -0
  143. package/dist/storage/mongo/MongoPersistedSyncRulesContent.d.ts +2 -2
  144. package/dist/storage/mongo/MongoPersistedSyncRulesContent.js +2 -2
  145. package/dist/storage/mongo/MongoPersistedSyncRulesContent.js.map +1 -1
  146. package/dist/storage/mongo/MongoStorageProvider.d.ts +5 -0
  147. package/dist/storage/mongo/MongoStorageProvider.js +26 -0
  148. package/dist/storage/mongo/MongoStorageProvider.js.map +1 -0
  149. package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +9 -7
  150. package/dist/storage/mongo/MongoSyncBucketStorage.js +43 -28
  151. package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
  152. package/dist/storage/mongo/MongoSyncRulesLock.js +1 -1
  153. package/dist/storage/mongo/MongoSyncRulesLock.js.map +1 -1
  154. package/dist/storage/mongo/OperationBatch.d.ts +7 -3
  155. package/dist/storage/mongo/OperationBatch.js +16 -7
  156. package/dist/storage/mongo/OperationBatch.js.map +1 -1
  157. package/dist/storage/mongo/PersistedBatch.d.ts +3 -3
  158. package/dist/storage/mongo/PersistedBatch.js +2 -2
  159. package/dist/storage/mongo/PersistedBatch.js.map +1 -1
  160. package/dist/storage/mongo/models.d.ts +17 -7
  161. package/dist/storage/mongo/models.js.map +1 -1
  162. package/dist/storage/mongo/util.d.ts +14 -0
  163. package/dist/storage/mongo/util.js +70 -0
  164. package/dist/storage/mongo/util.js.map +1 -1
  165. package/dist/storage/storage-index.d.ts +5 -2
  166. package/dist/storage/storage-index.js +5 -2
  167. package/dist/storage/storage-index.js.map +1 -1
  168. package/dist/sync/RequestTracker.js +2 -3
  169. package/dist/sync/RequestTracker.js.map +1 -1
  170. package/dist/sync/sync-index.d.ts +1 -0
  171. package/dist/sync/sync-index.js +1 -0
  172. package/dist/sync/sync-index.js.map +1 -1
  173. package/dist/sync/sync.d.ts +2 -1
  174. package/dist/sync/sync.js +56 -17
  175. package/dist/sync/sync.js.map +1 -1
  176. package/dist/system/ServiceContext.d.ts +37 -0
  177. package/dist/system/ServiceContext.js +48 -0
  178. package/dist/system/ServiceContext.js.map +1 -0
  179. package/dist/system/system-index.d.ts +1 -1
  180. package/dist/system/system-index.js +1 -1
  181. package/dist/system/system-index.js.map +1 -1
  182. package/dist/util/config/collectors/config-collector.d.ts +12 -0
  183. package/dist/util/config/collectors/config-collector.js +43 -0
  184. package/dist/util/config/collectors/config-collector.js.map +1 -1
  185. package/dist/util/config/compound-config-collector.d.ts +10 -29
  186. package/dist/util/config/compound-config-collector.js +28 -84
  187. package/dist/util/config/compound-config-collector.js.map +1 -1
  188. package/dist/util/config/sync-rules/sync-rules-provider.d.ts +9 -0
  189. package/dist/util/config/sync-rules/sync-rules-provider.js +15 -0
  190. package/dist/util/config/sync-rules/sync-rules-provider.js.map +1 -0
  191. package/dist/util/config/types.d.ts +6 -4
  192. package/dist/util/config/types.js.map +1 -1
  193. package/dist/util/config.d.ts +3 -4
  194. package/dist/util/config.js +5 -20
  195. package/dist/util/config.js.map +1 -1
  196. package/dist/util/protocol-types.d.ts +4 -0
  197. package/dist/util/protocol-types.js +5 -1
  198. package/dist/util/protocol-types.js.map +1 -1
  199. package/dist/util/util-index.d.ts +3 -6
  200. package/dist/util/util-index.js +3 -6
  201. package/dist/util/util-index.js.map +1 -1
  202. package/dist/util/utils.d.ts +10 -6
  203. package/dist/util/utils.js +45 -25
  204. package/dist/util/utils.js.map +1 -1
  205. package/package.json +7 -7
  206. package/src/api/RouteAPI.ts +78 -0
  207. package/src/api/api-index.ts +1 -0
  208. package/src/api/diagnostics.ts +16 -71
  209. package/src/api/schema.ts +13 -89
  210. package/src/auth/KeyStore.ts +9 -6
  211. package/src/auth/auth-index.ts +0 -1
  212. package/src/entry/cli-entry.ts +4 -2
  213. package/src/entry/commands/compact-action.ts +57 -0
  214. package/src/entry/commands/migrate-action.ts +5 -8
  215. package/src/entry/commands/teardown-action.ts +2 -2
  216. package/src/entry/entry-index.ts +1 -0
  217. package/src/index.ts +5 -2
  218. package/src/metrics/Metrics.ts +70 -15
  219. package/src/migrations/db/migrations/1684951997326-init.ts +9 -4
  220. package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +7 -4
  221. package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +6 -4
  222. package/src/migrations/migrations.ts +24 -8
  223. package/src/modules/AbstractModule.ts +37 -0
  224. package/src/modules/ModuleManager.ts +34 -0
  225. package/src/modules/modules-index.ts +2 -0
  226. package/src/replication/AbstractReplicationJob.ts +79 -0
  227. package/src/replication/AbstractReplicator.ts +227 -0
  228. package/src/replication/ErrorRateLimiter.ts +0 -44
  229. package/src/replication/ReplicationEngine.ts +43 -0
  230. package/src/replication/ReplicationModule.ts +101 -0
  231. package/src/replication/replication-index.ts +4 -6
  232. package/src/routes/RouterEngine.ts +120 -0
  233. package/src/routes/auth.ts +21 -12
  234. package/src/routes/configure-fastify.ts +101 -0
  235. package/src/routes/configure-rsocket.ts +60 -0
  236. package/src/routes/endpoints/admin.ts +74 -100
  237. package/src/routes/endpoints/checkpointing.ts +46 -12
  238. package/src/routes/endpoints/route-endpoints-index.ts +0 -1
  239. package/src/routes/endpoints/socket-route.ts +50 -42
  240. package/src/routes/endpoints/sync-rules.ts +41 -25
  241. package/src/routes/endpoints/sync-stream.ts +17 -13
  242. package/src/routes/route-register.ts +2 -2
  243. package/src/routes/router-socket.ts +6 -5
  244. package/src/routes/router.ts +7 -2
  245. package/src/routes/routes-index.ts +3 -0
  246. package/src/runner/teardown.ts +50 -88
  247. package/src/storage/BucketStorage.ts +74 -26
  248. package/src/storage/MongoBucketStorage.ts +23 -26
  249. package/src/storage/SourceEntity.ts +22 -0
  250. package/src/storage/SourceTable.ts +4 -6
  251. package/src/storage/StorageEngine.ts +55 -0
  252. package/src/storage/StorageProvider.ts +27 -0
  253. package/src/storage/mongo/MongoBucketBatch.ts +8 -8
  254. package/src/storage/mongo/MongoCompactor.ts +372 -0
  255. package/src/storage/mongo/MongoPersistedSyncRulesContent.ts +3 -3
  256. package/src/storage/mongo/MongoStorageProvider.ts +31 -0
  257. package/src/storage/mongo/MongoSyncBucketStorage.ts +64 -34
  258. package/src/storage/mongo/MongoSyncRulesLock.ts +1 -1
  259. package/src/storage/mongo/OperationBatch.ts +18 -11
  260. package/src/storage/mongo/PersistedBatch.ts +6 -5
  261. package/src/storage/mongo/models.ts +17 -7
  262. package/src/storage/mongo/util.ts +71 -1
  263. package/src/storage/storage-index.ts +5 -2
  264. package/src/sync/RequestTracker.ts +3 -3
  265. package/src/sync/sync-index.ts +1 -0
  266. package/src/sync/sync.ts +66 -17
  267. package/src/system/ServiceContext.ts +68 -0
  268. package/src/system/system-index.ts +1 -1
  269. package/src/util/config/collectors/config-collector.ts +48 -0
  270. package/src/util/config/compound-config-collector.ts +45 -110
  271. package/src/util/config/sync-rules/sync-rules-provider.ts +18 -0
  272. package/src/util/config/types.ts +6 -5
  273. package/src/util/config.ts +6 -23
  274. package/src/util/protocol-types.ts +6 -1
  275. package/src/util/util-index.ts +3 -6
  276. package/src/util/utils.ts +55 -39
  277. package/test/src/__snapshots__/sync.test.ts.snap +90 -5
  278. package/test/src/auth.test.ts +7 -7
  279. package/test/src/broadcast_iterable.test.ts +1 -1
  280. package/test/src/bucket_validation.test.ts +142 -0
  281. package/test/src/bucket_validation.ts +116 -0
  282. package/test/src/checksum_cache.test.ts +3 -3
  283. package/test/src/compacting.test.ts +216 -0
  284. package/test/src/data_storage.test.ts +275 -204
  285. package/test/src/env.ts +1 -3
  286. package/test/src/merge_iterable.test.ts +1 -6
  287. package/test/src/setup.ts +1 -1
  288. package/test/src/stream_utils.ts +42 -0
  289. package/test/src/sync.test.ts +209 -48
  290. package/test/src/util.ts +110 -55
  291. package/test/tsconfig.json +1 -1
  292. package/tsconfig.tsbuildinfo +1 -1
  293. package/dist/auth/SupabaseKeyCollector.d.ts +0 -22
  294. package/dist/auth/SupabaseKeyCollector.js +0 -61
  295. package/dist/auth/SupabaseKeyCollector.js.map +0 -1
  296. package/dist/replication/PgRelation.d.ts +0 -16
  297. package/dist/replication/PgRelation.js +0 -26
  298. package/dist/replication/PgRelation.js.map +0 -1
  299. package/dist/replication/WalConnection.d.ts +0 -34
  300. package/dist/replication/WalConnection.js +0 -190
  301. package/dist/replication/WalConnection.js.map +0 -1
  302. package/dist/replication/WalStream.d.ts +0 -57
  303. package/dist/replication/WalStream.js +0 -517
  304. package/dist/replication/WalStream.js.map +0 -1
  305. package/dist/replication/WalStreamManager.d.ts +0 -30
  306. package/dist/replication/WalStreamManager.js +0 -198
  307. package/dist/replication/WalStreamManager.js.map +0 -1
  308. package/dist/replication/WalStreamRunner.d.ts +0 -38
  309. package/dist/replication/WalStreamRunner.js +0 -155
  310. package/dist/replication/WalStreamRunner.js.map +0 -1
  311. package/dist/replication/util.d.ts +0 -9
  312. package/dist/replication/util.js +0 -62
  313. package/dist/replication/util.js.map +0 -1
  314. package/dist/routes/endpoints/dev.d.ts +0 -312
  315. package/dist/routes/endpoints/dev.js +0 -172
  316. package/dist/routes/endpoints/dev.js.map +0 -1
  317. package/dist/system/CorePowerSyncSystem.d.ts +0 -23
  318. package/dist/system/CorePowerSyncSystem.js +0 -52
  319. package/dist/system/CorePowerSyncSystem.js.map +0 -1
  320. package/dist/util/PgManager.d.ts +0 -24
  321. package/dist/util/PgManager.js +0 -55
  322. package/dist/util/PgManager.js.map +0 -1
  323. package/dist/util/migration_lib.d.ts +0 -11
  324. package/dist/util/migration_lib.js +0 -64
  325. package/dist/util/migration_lib.js.map +0 -1
  326. package/dist/util/pgwire_utils.d.ts +0 -24
  327. package/dist/util/pgwire_utils.js +0 -117
  328. package/dist/util/pgwire_utils.js.map +0 -1
  329. package/dist/util/populate_test_data.d.ts +0 -8
  330. package/dist/util/populate_test_data.js +0 -65
  331. package/dist/util/populate_test_data.js.map +0 -1
  332. package/src/auth/SupabaseKeyCollector.ts +0 -67
  333. package/src/replication/PgRelation.ts +0 -42
  334. package/src/replication/WalConnection.ts +0 -227
  335. package/src/replication/WalStream.ts +0 -628
  336. package/src/replication/WalStreamManager.ts +0 -213
  337. package/src/replication/WalStreamRunner.ts +0 -180
  338. package/src/replication/util.ts +0 -76
  339. package/src/routes/endpoints/dev.ts +0 -199
  340. package/src/system/CorePowerSyncSystem.ts +0 -64
  341. package/src/util/PgManager.ts +0 -64
  342. package/src/util/migration_lib.ts +0 -79
  343. package/src/util/pgwire_utils.ts +0 -139
  344. package/src/util/populate_test_data.ts +0 -78
  345. package/test/src/__snapshots__/pg_test.test.ts.snap +0 -256
  346. package/test/src/large_batch.test.ts +0 -194
  347. package/test/src/pg_test.test.ts +0 -450
  348. package/test/src/schema_changes.test.ts +0 -545
  349. package/test/src/slow_tests.test.ts +0 -296
  350. package/test/src/validation.test.ts +0 -63
  351. package/test/src/wal_stream.test.ts +0 -314
  352. package/test/src/wal_stream_utils.ts +0 -147
@@ -1,26 +1,25 @@
1
+ import {
2
+ BucketDataBatchOptions,
3
+ ParseSyncRulesOptions,
4
+ PersistedSyncRulesContent,
5
+ StartBatchOptions
6
+ } from '@/storage/BucketStorage.js';
1
7
  import { RequestParameters, SqlSyncRules } from '@powersync/service-sync-rules';
2
- import * as bson from 'bson';
3
8
  import { describe, expect, test } from 'vitest';
4
- import { SourceTable } from '../../src/storage/SourceTable.js';
5
- import { hashData } from '../../src/util/utils.js';
6
- import { MONGO_STORAGE_FACTORY, StorageFactory } from './util.js';
7
- import { SyncBucketData } from '../../src/util/protocol-types.js';
8
- import { BucketDataBatchOptions } from '../../src/storage/BucketStorage.js';
9
- import { fromAsync } from './wal_stream_utils.js';
10
-
11
- function makeTestTable(name: string, columns?: string[] | undefined) {
12
- const relId = hashData('table', name, (columns ?? ['id']).join(','));
13
- const id = new bson.ObjectId('6544e3899293153fa7b38331');
14
- return new SourceTable(
15
- id,
16
- SourceTable.DEFAULT_TAG,
17
- relId,
18
- SourceTable.DEFAULT_SCHEMA,
19
- name,
20
- (columns ?? ['id']).map((column) => ({ name: column, typeOid: 25 })),
21
- true
22
- );
23
- }
9
+ import { fromAsync, oneFromAsync } from './stream_utils.js';
10
+ import {
11
+ BATCH_OPTIONS,
12
+ getBatchData,
13
+ getBatchMeta,
14
+ makeTestTable,
15
+ MONGO_STORAGE_FACTORY,
16
+ PARSE_OPTIONS,
17
+ rid,
18
+ StorageFactory,
19
+ testRules,
20
+ ZERO_LSN
21
+ } from './util.js';
22
+ import { getUuidReplicaIdentityBson } from '@/util/util-index.js';
24
23
 
25
24
  const TEST_TABLE = makeTestTable('test', ['id']);
26
25
 
@@ -30,7 +29,7 @@ describe('store - mongodb', function () {
30
29
 
31
30
  function defineDataStorageTests(factory: StorageFactory) {
32
31
  test('save and load parameters', async () => {
33
- const sync_rules = SqlSyncRules.fromYaml(`
32
+ const sync_rules = testRules(`
34
33
  bucket_definitions:
35
34
  mybucket:
36
35
  parameters:
@@ -38,9 +37,9 @@ bucket_definitions:
38
37
  data: []
39
38
  `);
40
39
 
41
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
40
+ const storage = (await factory()).getInstance(sync_rules);
42
41
 
43
- const result = await storage.startBatch({}, async (batch) => {
42
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
44
43
  await batch.save({
45
44
  sourceTable: TEST_TABLE,
46
45
  tag: 'insert',
@@ -49,7 +48,8 @@ bucket_definitions:
49
48
  id1: 'user3',
50
49
  id2: 'user4',
51
50
  group_id: 'group2a'
52
- }
51
+ },
52
+ afterReplicaId: rid('t2')
53
53
  });
54
54
 
55
55
  await batch.save({
@@ -60,7 +60,8 @@ bucket_definitions:
60
60
  id1: 'user1',
61
61
  id2: 'user2',
62
62
  group_id: 'group1a'
63
- }
63
+ },
64
+ afterReplicaId: rid('t1')
64
65
  });
65
66
  });
66
67
 
@@ -73,34 +74,38 @@ bucket_definitions:
73
74
  });
74
75
 
75
76
  test('it should use the latest version', async () => {
76
- const sync_rules = SqlSyncRules.fromYaml(`
77
+ const sync_rules = testRules(
78
+ `
77
79
  bucket_definitions:
78
80
  mybucket:
79
81
  parameters:
80
82
  - SELECT group_id FROM test WHERE id = token_parameters.user_id
81
83
  data: []
82
- `);
84
+ `
85
+ );
83
86
 
84
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
87
+ const storage = (await factory()).getInstance(sync_rules);
85
88
 
86
- const result1 = await storage.startBatch({}, async (batch) => {
89
+ const result1 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
87
90
  await batch.save({
88
91
  sourceTable: TEST_TABLE,
89
92
  tag: 'insert',
90
93
  after: {
91
94
  id: 'user1',
92
95
  group_id: 'group1'
93
- }
96
+ },
97
+ afterReplicaId: rid('user1')
94
98
  });
95
99
  });
96
- const result2 = await storage.startBatch({}, async (batch) => {
100
+ const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
97
101
  await batch.save({
98
102
  sourceTable: TEST_TABLE,
99
103
  tag: 'insert',
100
104
  after: {
101
105
  id: 'user1',
102
106
  group_id: 'group2'
103
- }
107
+ },
108
+ afterReplicaId: rid('user1')
104
109
  });
105
110
  });
106
111
 
@@ -121,17 +126,19 @@ bucket_definitions:
121
126
  });
122
127
 
123
128
  test('save and load parameters with different number types', async () => {
124
- const sync_rules = SqlSyncRules.fromYaml(`
129
+ const sync_rules = testRules(
130
+ `
125
131
  bucket_definitions:
126
132
  mybucket:
127
133
  parameters:
128
134
  - SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
129
135
  data: []
130
- `);
136
+ `
137
+ );
131
138
 
132
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
139
+ const storage = (await factory()).getInstance(sync_rules);
133
140
 
134
- const result = await storage.startBatch({}, async (batch) => {
141
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
135
142
  await batch.save({
136
143
  sourceTable: TEST_TABLE,
137
144
  tag: 'insert',
@@ -141,7 +148,8 @@ bucket_definitions:
141
148
  n1: 314n,
142
149
  f2: 314,
143
150
  f3: 3.14
144
- }
151
+ },
152
+ afterReplicaId: rid('t1')
145
153
  });
146
154
  });
147
155
 
@@ -162,17 +170,19 @@ bucket_definitions:
162
170
  // This specific case tested here cannot happen with postgres in practice, but we still
163
171
  // test this to ensure correct deserialization.
164
172
 
165
- const sync_rules = SqlSyncRules.fromYaml(`
173
+ const sync_rules = testRules(
174
+ `
166
175
  bucket_definitions:
167
176
  mybucket:
168
177
  parameters:
169
178
  - SELECT group_id FROM test WHERE n1 = token_parameters.n1
170
179
  data: []
171
- `);
180
+ `
181
+ );
172
182
 
173
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
183
+ const storage = (await factory()).getInstance(sync_rules);
174
184
 
175
- const result = await storage.startBatch({}, async (batch) => {
185
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
176
186
  await batch.save({
177
187
  sourceTable: TEST_TABLE,
178
188
  tag: 'insert',
@@ -180,7 +190,8 @@ bucket_definitions:
180
190
  id: 't1',
181
191
  group_id: 'group1',
182
192
  n1: 1152921504606846976n // 2^60
183
- }
193
+ },
194
+ afterReplicaId: rid('t1')
184
195
  });
185
196
 
186
197
  await batch.save({
@@ -192,7 +203,8 @@ bucket_definitions:
192
203
  // Simulate a TOAST value, even though it can't happen for values like this
193
204
  // in practice.
194
205
  n1: undefined
195
- }
206
+ },
207
+ afterReplicaId: rid('t1')
196
208
  });
197
209
  });
198
210
 
@@ -205,15 +217,17 @@ bucket_definitions:
205
217
  });
206
218
 
207
219
  test('removing row', async () => {
208
- const sync_rules = SqlSyncRules.fromYaml(`
220
+ const sync_rules = testRules(
221
+ `
209
222
  bucket_definitions:
210
223
  global:
211
224
  data:
212
225
  - SELECT id, description FROM "%"
213
- `);
214
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
226
+ `
227
+ );
228
+ const storage = (await factory()).getInstance(sync_rules);
215
229
 
216
- const result = await storage.startBatch({}, async (batch) => {
230
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
217
231
  const sourceTable = TEST_TABLE;
218
232
 
219
233
  await batch.save({
@@ -222,21 +236,20 @@ bucket_definitions:
222
236
  after: {
223
237
  id: 'test1',
224
238
  description: 'test1'
225
- }
239
+ },
240
+ afterReplicaId: rid('test1')
226
241
  });
227
242
  await batch.save({
228
243
  sourceTable,
229
244
  tag: 'delete',
230
- before: {
231
- id: 'test1'
232
- }
245
+ beforeReplicaId: rid('test1')
233
246
  });
234
247
  });
235
248
 
236
249
  const checkpoint = result!.flushed_op;
237
250
 
238
251
  const batch = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
239
- const data = batch[0].data.map((d) => {
252
+ const data = batch[0].batch.data.map((d) => {
240
253
  return {
241
254
  op: d.op,
242
255
  object_id: d.object_id,
@@ -265,25 +278,29 @@ bucket_definitions:
265
278
  test('save and load parameters with workspaceId', async () => {
266
279
  const WORKSPACE_TABLE = makeTestTable('workspace', ['id']);
267
280
 
268
- const sync_rules = SqlSyncRules.fromYaml(`
281
+ const sync_rules_content = testRules(
282
+ `
269
283
  bucket_definitions:
270
284
  by_workspace:
271
285
  parameters:
272
286
  - SELECT id as workspace_id FROM workspace WHERE
273
287
  workspace."userId" = token_parameters.user_id
274
288
  data: []
275
- `);
289
+ `
290
+ );
291
+ const sync_rules = sync_rules_content.parsed(PARSE_OPTIONS).sync_rules;
276
292
 
277
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
293
+ const storage = (await factory()).getInstance(sync_rules_content);
278
294
 
279
- const result = await storage.startBatch({}, async (batch) => {
295
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
280
296
  await batch.save({
281
297
  sourceTable: WORKSPACE_TABLE,
282
298
  tag: 'insert',
283
299
  after: {
284
300
  id: 'workspace1',
285
301
  userId: 'u1'
286
- }
302
+ },
303
+ afterReplicaId: rid('workspace1')
287
304
  });
288
305
  });
289
306
 
@@ -311,25 +328,29 @@ bucket_definitions:
311
328
  test('save and load parameters with dynamic global buckets', async () => {
312
329
  const WORKSPACE_TABLE = makeTestTable('workspace');
313
330
 
314
- const sync_rules = SqlSyncRules.fromYaml(`
331
+ const sync_rules_content = testRules(
332
+ `
315
333
  bucket_definitions:
316
334
  by_public_workspace:
317
335
  parameters:
318
336
  - SELECT id as workspace_id FROM workspace WHERE
319
337
  workspace.visibility = 'public'
320
338
  data: []
321
- `);
339
+ `
340
+ );
341
+ const sync_rules = sync_rules_content.parsed(PARSE_OPTIONS).sync_rules;
322
342
 
323
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
343
+ const storage = (await factory()).getInstance(sync_rules_content);
324
344
 
325
- const result = await storage.startBatch({}, async (batch) => {
345
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
326
346
  await batch.save({
327
347
  sourceTable: WORKSPACE_TABLE,
328
348
  tag: 'insert',
329
349
  after: {
330
350
  id: 'workspace1',
331
351
  visibility: 'public'
332
- }
352
+ },
353
+ afterReplicaId: rid('workspace1')
333
354
  });
334
355
 
335
356
  await batch.save({
@@ -338,7 +359,8 @@ bucket_definitions:
338
359
  after: {
339
360
  id: 'workspace2',
340
361
  visibility: 'private'
341
- }
362
+ },
363
+ afterReplicaId: rid('workspace2')
342
364
  });
343
365
 
344
366
  await batch.save({
@@ -347,7 +369,8 @@ bucket_definitions:
347
369
  after: {
348
370
  id: 'workspace3',
349
371
  visibility: 'public'
350
- }
372
+ },
373
+ afterReplicaId: rid('workspace3')
351
374
  });
352
375
  });
353
376
 
@@ -377,7 +400,8 @@ bucket_definitions:
377
400
  test('multiple parameter queries', async () => {
378
401
  const WORKSPACE_TABLE = makeTestTable('workspace');
379
402
 
380
- const sync_rules = SqlSyncRules.fromYaml(`
403
+ const sync_rules_content = testRules(
404
+ `
381
405
  bucket_definitions:
382
406
  by_workspace:
383
407
  parameters:
@@ -386,18 +410,21 @@ bucket_definitions:
386
410
  - SELECT id as workspace_id FROM workspace WHERE
387
411
  workspace.user_id = token_parameters.user_id
388
412
  data: []
389
- `);
413
+ `
414
+ );
415
+ const sync_rules = sync_rules_content.parsed(PARSE_OPTIONS).sync_rules;
390
416
 
391
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
417
+ const storage = (await factory()).getInstance(sync_rules_content);
392
418
 
393
- const result = await storage.startBatch({}, async (batch) => {
419
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
394
420
  await batch.save({
395
421
  sourceTable: WORKSPACE_TABLE,
396
422
  tag: 'insert',
397
423
  after: {
398
424
  id: 'workspace1',
399
425
  visibility: 'public'
400
- }
426
+ },
427
+ afterReplicaId: rid('workspace1')
401
428
  });
402
429
 
403
430
  await batch.save({
@@ -406,7 +433,8 @@ bucket_definitions:
406
433
  after: {
407
434
  id: 'workspace2',
408
435
  visibility: 'private'
409
- }
436
+ },
437
+ afterReplicaId: rid('workspace2')
410
438
  });
411
439
 
412
440
  await batch.save({
@@ -416,7 +444,8 @@ bucket_definitions:
416
444
  id: 'workspace3',
417
445
  user_id: 'u1',
418
446
  visibility: 'private'
419
- }
447
+ },
448
+ afterReplicaId: rid('workspace3')
420
449
  });
421
450
 
422
451
  await batch.save({
@@ -426,7 +455,8 @@ bucket_definitions:
426
455
  id: 'workspace4',
427
456
  user_id: 'u2',
428
457
  visibility: 'private'
429
- }
458
+ },
459
+ afterReplicaId: rid('workspace4')
430
460
  });
431
461
  });
432
462
 
@@ -463,16 +493,18 @@ bucket_definitions:
463
493
  });
464
494
 
465
495
  test('changing client ids', async () => {
466
- const sync_rules = SqlSyncRules.fromYaml(`
496
+ const sync_rules = testRules(
497
+ `
467
498
  bucket_definitions:
468
499
  global:
469
500
  data:
470
501
  - SELECT client_id as id, description FROM "%"
471
- `);
472
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
502
+ `
503
+ );
504
+ const storage = (await factory()).getInstance(sync_rules);
473
505
 
474
506
  const sourceTable = TEST_TABLE;
475
- const result = await storage.startBatch({}, async (batch) => {
507
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
476
508
  await batch.save({
477
509
  sourceTable,
478
510
  tag: 'insert',
@@ -480,7 +512,8 @@ bucket_definitions:
480
512
  id: 'test1',
481
513
  client_id: 'client1a',
482
514
  description: 'test1a'
483
- }
515
+ },
516
+ afterReplicaId: rid('test1')
484
517
  });
485
518
  await batch.save({
486
519
  sourceTable,
@@ -489,7 +522,8 @@ bucket_definitions:
489
522
  id: 'test1',
490
523
  client_id: 'client1b',
491
524
  description: 'test1b'
492
- }
525
+ },
526
+ afterReplicaId: rid('test1')
493
527
  });
494
528
 
495
529
  await batch.save({
@@ -499,12 +533,13 @@ bucket_definitions:
499
533
  id: 'test2',
500
534
  client_id: 'client2',
501
535
  description: 'test2'
502
- }
536
+ },
537
+ afterReplicaId: rid('test2')
503
538
  });
504
539
  });
505
540
  const checkpoint = result!.flushed_op;
506
541
  const batch = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
507
- const data = batch[0].data.map((d) => {
542
+ const data = batch[0].batch.data.map((d) => {
508
543
  return {
509
544
  op: d.op,
510
545
  object_id: d.object_id
@@ -520,15 +555,17 @@ bucket_definitions:
520
555
  });
521
556
 
522
557
  test('re-apply delete', async () => {
523
- const sync_rules = SqlSyncRules.fromYaml(`
558
+ const sync_rules = testRules(
559
+ `
524
560
  bucket_definitions:
525
561
  global:
526
562
  data:
527
563
  - SELECT id, description FROM "%"
528
- `);
529
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
564
+ `
565
+ );
566
+ const storage = (await factory()).getInstance(sync_rules);
530
567
 
531
- await storage.startBatch({}, async (batch) => {
568
+ await storage.startBatch(BATCH_OPTIONS, async (batch) => {
532
569
  const sourceTable = TEST_TABLE;
533
570
 
534
571
  await batch.save({
@@ -537,38 +574,35 @@ bucket_definitions:
537
574
  after: {
538
575
  id: 'test1',
539
576
  description: 'test1'
540
- }
577
+ },
578
+ afterReplicaId: rid('test1')
541
579
  });
542
580
  });
543
581
 
544
- await storage.startBatch({}, async (batch) => {
582
+ await storage.startBatch(BATCH_OPTIONS, async (batch) => {
545
583
  const sourceTable = TEST_TABLE;
546
584
 
547
585
  await batch.save({
548
586
  sourceTable,
549
587
  tag: 'delete',
550
- before: {
551
- id: 'test1'
552
- }
588
+ beforeReplicaId: rid('test1')
553
589
  });
554
590
  });
555
591
 
556
- const result = await storage.startBatch({}, async (batch) => {
592
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
557
593
  const sourceTable = TEST_TABLE;
558
594
 
559
595
  await batch.save({
560
596
  sourceTable,
561
597
  tag: 'delete',
562
- before: {
563
- id: 'test1'
564
- }
598
+ beforeReplicaId: rid('test1')
565
599
  });
566
600
  });
567
601
 
568
602
  const checkpoint = result!.flushed_op;
569
603
 
570
604
  const batch = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
571
- const data = batch[0].data.map((d) => {
605
+ const data = batch[0].batch.data.map((d) => {
572
606
  return {
573
607
  op: d.op,
574
608
  object_id: d.object_id,
@@ -595,15 +629,17 @@ bucket_definitions:
595
629
  });
596
630
 
597
631
  test('re-apply update + delete', async () => {
598
- const sync_rules = SqlSyncRules.fromYaml(`
632
+ const sync_rules = testRules(
633
+ `
599
634
  bucket_definitions:
600
635
  global:
601
636
  data:
602
637
  - SELECT id, description FROM "%"
603
- `);
604
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
638
+ `
639
+ );
640
+ const storage = (await factory()).getInstance(sync_rules);
605
641
 
606
- await storage.startBatch({}, async (batch) => {
642
+ await storage.startBatch(BATCH_OPTIONS, async (batch) => {
607
643
  const sourceTable = TEST_TABLE;
608
644
 
609
645
  await batch.save({
@@ -612,11 +648,12 @@ bucket_definitions:
612
648
  after: {
613
649
  id: 'test1',
614
650
  description: 'test1'
615
- }
651
+ },
652
+ afterReplicaId: rid('test1')
616
653
  });
617
654
  });
618
655
 
619
- await storage.startBatch({}, async (batch) => {
656
+ await storage.startBatch(BATCH_OPTIONS, async (batch) => {
620
657
  const sourceTable = TEST_TABLE;
621
658
 
622
659
  await batch.save({
@@ -625,7 +662,8 @@ bucket_definitions:
625
662
  after: {
626
663
  id: 'test1',
627
664
  description: undefined
628
- }
665
+ },
666
+ afterReplicaId: rid('test1')
629
667
  });
630
668
 
631
669
  await batch.save({
@@ -634,19 +672,18 @@ bucket_definitions:
634
672
  after: {
635
673
  id: 'test1',
636
674
  description: undefined
637
- }
675
+ },
676
+ afterReplicaId: rid('test1')
638
677
  });
639
678
 
640
679
  await batch.save({
641
680
  sourceTable,
642
681
  tag: 'delete',
643
- before: {
644
- id: 'test1'
645
- }
682
+ beforeReplicaId: rid('test1')
646
683
  });
647
684
  });
648
685
 
649
- const result = await storage.startBatch({}, async (batch) => {
686
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
650
687
  const sourceTable = TEST_TABLE;
651
688
 
652
689
  await batch.save({
@@ -655,7 +692,8 @@ bucket_definitions:
655
692
  after: {
656
693
  id: 'test1',
657
694
  description: undefined
658
- }
695
+ },
696
+ afterReplicaId: rid('test1')
659
697
  });
660
698
 
661
699
  await batch.save({
@@ -664,15 +702,14 @@ bucket_definitions:
664
702
  after: {
665
703
  id: 'test1',
666
704
  description: undefined
667
- }
705
+ },
706
+ afterReplicaId: rid('test1')
668
707
  });
669
708
 
670
709
  await batch.save({
671
710
  sourceTable,
672
711
  tag: 'delete',
673
- before: {
674
- id: 'test1'
675
- }
712
+ beforeReplicaId: rid('test1')
676
713
  });
677
714
  });
678
715
 
@@ -680,7 +717,7 @@ bucket_definitions:
680
717
 
681
718
  const batch = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
682
719
 
683
- const data = batch[0].data.map((d) => {
720
+ const data = batch[0].batch.data.map((d) => {
684
721
  return {
685
722
  op: d.op,
686
723
  object_id: d.object_id,
@@ -709,17 +746,19 @@ bucket_definitions:
709
746
  });
710
747
 
711
748
  test('truncate parameters', async () => {
712
- const sync_rules = SqlSyncRules.fromYaml(`
749
+ const sync_rules = testRules(
750
+ `
713
751
  bucket_definitions:
714
752
  mybucket:
715
753
  parameters:
716
754
  - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
717
755
  data: []
718
- `);
756
+ `
757
+ );
719
758
 
720
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
759
+ const storage = (await factory()).getInstance(sync_rules);
721
760
 
722
- await storage.startBatch({}, async (batch) => {
761
+ await storage.startBatch(BATCH_OPTIONS, async (batch) => {
723
762
  await batch.save({
724
763
  sourceTable: TEST_TABLE,
725
764
  tag: 'insert',
@@ -728,7 +767,8 @@ bucket_definitions:
728
767
  id1: 'user3',
729
768
  id2: 'user4',
730
769
  group_id: 'group2a'
731
- }
770
+ },
771
+ afterReplicaId: rid('t2')
732
772
  });
733
773
 
734
774
  await batch.truncate([TEST_TABLE]);
@@ -749,16 +789,18 @@ bucket_definitions:
749
789
  // 1. Not getting the correct "current_data" state for each operation.
750
790
  // 2. Output order not being correct.
751
791
 
752
- const sync_rules = SqlSyncRules.fromYaml(`
792
+ const sync_rules = testRules(
793
+ `
753
794
  bucket_definitions:
754
795
  global:
755
796
  data:
756
797
  - SELECT id, description FROM "test"
757
- `);
758
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
798
+ `
799
+ );
800
+ const storage = (await factory()).getInstance(sync_rules);
759
801
 
760
802
  // Pre-setup
761
- const result1 = await storage.startBatch({}, async (batch) => {
803
+ const result1 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
762
804
  const sourceTable = TEST_TABLE;
763
805
 
764
806
  await batch.save({
@@ -767,7 +809,8 @@ bucket_definitions:
767
809
  after: {
768
810
  id: 'test1',
769
811
  description: 'test1a'
770
- }
812
+ },
813
+ afterReplicaId: rid('test1')
771
814
  });
772
815
 
773
816
  await batch.save({
@@ -776,14 +819,15 @@ bucket_definitions:
776
819
  after: {
777
820
  id: 'test2',
778
821
  description: 'test2a'
779
- }
822
+ },
823
+ afterReplicaId: rid('test2')
780
824
  });
781
825
  });
782
826
 
783
827
  const checkpoint1 = result1?.flushed_op ?? '0';
784
828
 
785
829
  // Test batch
786
- const result2 = await storage.startBatch({}, async (batch) => {
830
+ const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
787
831
  const sourceTable = TEST_TABLE;
788
832
  // b
789
833
  await batch.save({
@@ -792,7 +836,8 @@ bucket_definitions:
792
836
  after: {
793
837
  id: 'test1',
794
838
  description: 'test1b'
795
- }
839
+ },
840
+ afterReplicaId: rid('test1')
796
841
  });
797
842
 
798
843
  await batch.save({
@@ -801,10 +846,12 @@ bucket_definitions:
801
846
  before: {
802
847
  id: 'test1'
803
848
  },
849
+ beforeReplicaId: rid('test1'),
804
850
  after: {
805
851
  id: 'test2',
806
852
  description: 'test2b'
807
- }
853
+ },
854
+ afterReplicaId: rid('test2')
808
855
  });
809
856
 
810
857
  await batch.save({
@@ -813,10 +860,13 @@ bucket_definitions:
813
860
  before: {
814
861
  id: 'test2'
815
862
  },
863
+ beforeReplicaId: rid('test2'),
816
864
  after: {
817
865
  id: 'test3',
818
866
  description: 'test3b'
819
- }
867
+ },
868
+
869
+ afterReplicaId: rid('test3')
820
870
  });
821
871
 
822
872
  // c
@@ -826,7 +876,8 @@ bucket_definitions:
826
876
  after: {
827
877
  id: 'test2',
828
878
  description: 'test2c'
829
- }
879
+ },
880
+ afterReplicaId: rid('test2')
830
881
  });
831
882
 
832
883
  // d
@@ -836,7 +887,8 @@ bucket_definitions:
836
887
  after: {
837
888
  id: 'test4',
838
889
  description: 'test4d'
839
- }
890
+ },
891
+ afterReplicaId: rid('test4')
840
892
  });
841
893
 
842
894
  await batch.save({
@@ -845,17 +897,19 @@ bucket_definitions:
845
897
  before: {
846
898
  id: 'test4'
847
899
  },
900
+ beforeReplicaId: rid('test4'),
848
901
  after: {
849
902
  id: 'test5',
850
903
  description: 'test5d'
851
- }
904
+ },
905
+ afterReplicaId: rid('test5')
852
906
  });
853
907
  });
854
908
 
855
909
  const checkpoint2 = result2!.flushed_op;
856
910
 
857
911
  const batch = await fromAsync(storage.getBucketDataBatch(checkpoint2, new Map([['global[]', checkpoint1]])));
858
- const data = batch[0].data.map((d) => {
912
+ const data = batch[0].batch.data.map((d) => {
859
913
  return {
860
914
  op: d.op,
861
915
  object_id: d.object_id,
@@ -883,31 +937,40 @@ bucket_definitions:
883
937
  });
884
938
 
885
939
  test('changed data with replica identity full', async () => {
886
- const sync_rules = SqlSyncRules.fromYaml(`
940
+ const sync_rules = testRules(
941
+ `
887
942
  bucket_definitions:
888
943
  global:
889
944
  data:
890
945
  - SELECT id, description FROM "test"
891
- `);
892
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
946
+ `
947
+ );
948
+ function rid2(id: string, description: string) {
949
+ return getUuidReplicaIdentityBson({ id, description }, [
950
+ { name: 'id', type: 'VARCHAR', typeId: 25 },
951
+ { name: 'description', type: 'VARCHAR', typeId: 25 }
952
+ ]);
953
+ }
954
+ const storage = (await factory()).getInstance(sync_rules);
893
955
 
894
956
  const sourceTable = makeTestTable('test', ['id', 'description']);
895
957
 
896
958
  // Pre-setup
897
- const result1 = await storage.startBatch({}, async (batch) => {
959
+ const result1 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
898
960
  await batch.save({
899
961
  sourceTable,
900
962
  tag: 'insert',
901
963
  after: {
902
964
  id: 'test1',
903
965
  description: 'test1a'
904
- }
966
+ },
967
+ afterReplicaId: rid2('test1', 'test1a')
905
968
  });
906
969
  });
907
970
 
908
971
  const checkpoint1 = result1?.flushed_op ?? '0';
909
972
 
910
- const result2 = await storage.startBatch({}, async (batch) => {
973
+ const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
911
974
  // Unchanged, but has a before id
912
975
  await batch.save({
913
976
  sourceTable,
@@ -916,14 +979,16 @@ bucket_definitions:
916
979
  id: 'test1',
917
980
  description: 'test1a'
918
981
  },
982
+ beforeReplicaId: rid2('test1', 'test1a'),
919
983
  after: {
920
984
  id: 'test1',
921
985
  description: 'test1b'
922
- }
986
+ },
987
+ afterReplicaId: rid2('test1', 'test1b')
923
988
  });
924
989
  });
925
990
 
926
- const result3 = await storage.startBatch({}, async (batch) => {
991
+ const result3 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
927
992
  // Delete
928
993
  await batch.save({
929
994
  sourceTable,
@@ -932,6 +997,7 @@ bucket_definitions:
932
997
  id: 'test1',
933
998
  description: 'test1b'
934
999
  },
1000
+ beforeReplicaId: rid2('test1', 'test1b'),
935
1001
  after: undefined
936
1002
  });
937
1003
  });
@@ -939,7 +1005,7 @@ bucket_definitions:
939
1005
  const checkpoint3 = result3!.flushed_op;
940
1006
 
941
1007
  const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
942
- const data = batch[0].data.map((d) => {
1008
+ const data = batch[0].batch.data.map((d) => {
943
1009
  return {
944
1010
  op: d.op,
945
1011
  object_id: d.object_id,
@@ -975,31 +1041,41 @@ bucket_definitions:
975
1041
  });
976
1042
 
977
1043
  test('unchanged data with replica identity full', async () => {
978
- const sync_rules = SqlSyncRules.fromYaml(`
1044
+ const sync_rules = testRules(
1045
+ `
979
1046
  bucket_definitions:
980
1047
  global:
981
1048
  data:
982
1049
  - SELECT id, description FROM "test"
983
- `);
984
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
1050
+ `
1051
+ );
1052
+ function rid2(id: string, description: string) {
1053
+ return getUuidReplicaIdentityBson({ id, description }, [
1054
+ { name: 'id', type: 'VARCHAR', typeId: 25 },
1055
+ { name: 'description', type: 'VARCHAR', typeId: 25 }
1056
+ ]);
1057
+ }
1058
+
1059
+ const storage = (await factory()).getInstance(sync_rules);
985
1060
 
986
1061
  const sourceTable = makeTestTable('test', ['id', 'description']);
987
1062
 
988
1063
  // Pre-setup
989
- const result1 = await storage.startBatch({}, async (batch) => {
1064
+ const result1 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
990
1065
  await batch.save({
991
1066
  sourceTable,
992
1067
  tag: 'insert',
993
1068
  after: {
994
1069
  id: 'test1',
995
1070
  description: 'test1a'
996
- }
1071
+ },
1072
+ afterReplicaId: rid2('test1', 'test1a')
997
1073
  });
998
1074
  });
999
1075
 
1000
1076
  const checkpoint1 = result1?.flushed_op ?? '0';
1001
1077
 
1002
- const result2 = await storage.startBatch({}, async (batch) => {
1078
+ const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
1003
1079
  // Unchanged, but has a before id
1004
1080
  await batch.save({
1005
1081
  sourceTable,
@@ -1008,14 +1084,16 @@ bucket_definitions:
1008
1084
  id: 'test1',
1009
1085
  description: 'test1a'
1010
1086
  },
1087
+ beforeReplicaId: rid2('test1', 'test1a'),
1011
1088
  after: {
1012
1089
  id: 'test1',
1013
1090
  description: 'test1a'
1014
- }
1091
+ },
1092
+ afterReplicaId: rid2('test1', 'test1a')
1015
1093
  });
1016
1094
  });
1017
1095
 
1018
- const result3 = await storage.startBatch({}, async (batch) => {
1096
+ const result3 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
1019
1097
  // Delete
1020
1098
  await batch.save({
1021
1099
  sourceTable,
@@ -1024,6 +1102,7 @@ bucket_definitions:
1024
1102
  id: 'test1',
1025
1103
  description: 'test1a'
1026
1104
  },
1105
+ beforeReplicaId: rid2('test1', 'test1a'),
1027
1106
  after: undefined
1028
1107
  });
1029
1108
  });
@@ -1031,7 +1110,7 @@ bucket_definitions:
1031
1110
  const checkpoint3 = result3!.flushed_op;
1032
1111
 
1033
1112
  const batch = await fromAsync(storage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]])));
1034
- const data = batch[0].data.map((d) => {
1113
+ const data = batch[0].batch.data.map((d) => {
1035
1114
  return {
1036
1115
  op: d.op,
1037
1116
  object_id: d.object_id,
@@ -1064,15 +1143,17 @@ bucket_definitions:
1064
1143
  // but large enough in size to be split over multiple returned batches.
1065
1144
  // The specific batch splits is an implementation detail of the storage driver,
1066
1145
  // and the test will have to updated when other implementations are added.
1067
- const sync_rules = SqlSyncRules.fromYaml(`
1146
+ const sync_rules = testRules(
1147
+ `
1068
1148
  bucket_definitions:
1069
1149
  global:
1070
1150
  data:
1071
1151
  - SELECT id, description FROM "%"
1072
- `);
1073
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
1152
+ `
1153
+ );
1154
+ const storage = (await factory()).getInstance(sync_rules);
1074
1155
 
1075
- const result = await storage.startBatch({}, async (batch) => {
1156
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
1076
1157
  const sourceTable = TEST_TABLE;
1077
1158
 
1078
1159
  const largeDescription = '0123456789'.repeat(12_000_00);
@@ -1083,7 +1164,8 @@ bucket_definitions:
1083
1164
  after: {
1084
1165
  id: 'test1',
1085
1166
  description: 'test1'
1086
- }
1167
+ },
1168
+ afterReplicaId: rid('test1')
1087
1169
  });
1088
1170
 
1089
1171
  await batch.save({
@@ -1092,7 +1174,8 @@ bucket_definitions:
1092
1174
  after: {
1093
1175
  id: 'large1',
1094
1176
  description: largeDescription
1095
- }
1177
+ },
1178
+ afterReplicaId: rid('large1')
1096
1179
  });
1097
1180
 
1098
1181
  // Large enough to split the returned batch
@@ -1102,7 +1185,8 @@ bucket_definitions:
1102
1185
  after: {
1103
1186
  id: 'large2',
1104
1187
  description: largeDescription
1105
- }
1188
+ },
1189
+ afterReplicaId: rid('large2')
1106
1190
  });
1107
1191
 
1108
1192
  await batch.save({
@@ -1111,7 +1195,8 @@ bucket_definitions:
1111
1195
  after: {
1112
1196
  id: 'test3',
1113
1197
  description: 'test3'
1114
- }
1198
+ },
1199
+ afterReplicaId: rid('test3')
1115
1200
  });
1116
1201
  });
1117
1202
 
@@ -1133,7 +1218,7 @@ bucket_definitions:
1133
1218
  });
1134
1219
 
1135
1220
  const batch2 = await fromAsync(
1136
- storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].next_after]]), options)
1221
+ storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].batch.next_after]]), options)
1137
1222
  );
1138
1223
  expect(getBatchData(batch2)).toEqual([
1139
1224
  { op_id: '3', op: 'PUT', object_id: 'large2', checksum: 1795508474 },
@@ -1146,7 +1231,7 @@ bucket_definitions:
1146
1231
  });
1147
1232
 
1148
1233
  const batch3 = await fromAsync(
1149
- storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].next_after]]), options)
1234
+ storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].batch.next_after]]), options)
1150
1235
  );
1151
1236
  expect(getBatchData(batch3)).toEqual([]);
1152
1237
  expect(getBatchMeta(batch3)).toEqual(null);
@@ -1156,15 +1241,17 @@ bucket_definitions:
1156
1241
  // Test syncing a batch of data that is small in count,
1157
1242
  // but large enough in size to be split over multiple returned chunks.
1158
1243
  // Similar to the above test, but splits over 1MB chunks.
1159
- const sync_rules = SqlSyncRules.fromYaml(`
1244
+ const sync_rules = testRules(
1245
+ `
1160
1246
  bucket_definitions:
1161
1247
  global:
1162
1248
  data:
1163
1249
  - SELECT id, description FROM "%"
1164
- `);
1165
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
1250
+ `
1251
+ );
1252
+ const storage = (await factory()).getInstance(sync_rules);
1166
1253
 
1167
- const result = await storage.startBatch({}, async (batch) => {
1254
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
1168
1255
  const sourceTable = TEST_TABLE;
1169
1256
 
1170
1257
  const largeDescription = '0123456789'.repeat(2_000_00);
@@ -1175,7 +1262,8 @@ bucket_definitions:
1175
1262
  after: {
1176
1263
  id: 'test1',
1177
1264
  description: 'test1'
1178
- }
1265
+ },
1266
+ afterReplicaId: rid('test1')
1179
1267
  });
1180
1268
 
1181
1269
  await batch.save({
@@ -1184,7 +1272,8 @@ bucket_definitions:
1184
1272
  after: {
1185
1273
  id: 'large1',
1186
1274
  description: largeDescription
1187
- }
1275
+ },
1276
+ afterReplicaId: rid('large1')
1188
1277
  });
1189
1278
 
1190
1279
  // Large enough to split the returned batch
@@ -1194,7 +1283,8 @@ bucket_definitions:
1194
1283
  after: {
1195
1284
  id: 'large2',
1196
1285
  description: largeDescription
1197
- }
1286
+ },
1287
+ afterReplicaId: rid('large2')
1198
1288
  });
1199
1289
 
1200
1290
  await batch.save({
@@ -1203,7 +1293,8 @@ bucket_definitions:
1203
1293
  after: {
1204
1294
  id: 'test3',
1205
1295
  description: 'test3'
1206
- }
1296
+ },
1297
+ afterReplicaId: rid('test3')
1207
1298
  });
1208
1299
  });
1209
1300
 
@@ -1223,7 +1314,7 @@ bucket_definitions:
1223
1314
  });
1224
1315
 
1225
1316
  const batch2 = await fromAsync(
1226
- storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].next_after]]), options)
1317
+ storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].batch.next_after]]), options)
1227
1318
  );
1228
1319
  expect(getBatchData(batch2)).toEqual([{ op_id: '3', op: 'PUT', object_id: 'large2', checksum: 1607205872 }]);
1229
1320
  expect(getBatchMeta(batch2)).toEqual({
@@ -1233,7 +1324,7 @@ bucket_definitions:
1233
1324
  });
1234
1325
 
1235
1326
  const batch3 = await fromAsync(
1236
- storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].next_after]]), options)
1327
+ storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].batch.next_after]]), options)
1237
1328
  );
1238
1329
  expect(getBatchData(batch3)).toEqual([{ op_id: '4', op: 'PUT', object_id: 'test3', checksum: 1359888332 }]);
1239
1330
  expect(getBatchMeta(batch3)).toEqual({
@@ -1245,15 +1336,17 @@ bucket_definitions:
1245
1336
 
1246
1337
  test('long batch', async () => {
1247
1338
  // Test syncing a batch of data that is limited by count.
1248
- const sync_rules = SqlSyncRules.fromYaml(`
1339
+ const sync_rules = testRules(
1340
+ `
1249
1341
  bucket_definitions:
1250
1342
  global:
1251
1343
  data:
1252
1344
  - SELECT id, description FROM "%"
1253
- `);
1254
- const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
1345
+ `
1346
+ );
1347
+ const storage = (await factory()).getInstance(sync_rules);
1255
1348
 
1256
- const result = await storage.startBatch({}, async (batch) => {
1349
+ const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
1257
1350
  const sourceTable = TEST_TABLE;
1258
1351
 
1259
1352
  for (let i = 1; i <= 6; i++) {
@@ -1263,14 +1356,17 @@ bucket_definitions:
1263
1356
  after: {
1264
1357
  id: `test${i}`,
1265
1358
  description: `test${i}`
1266
- }
1359
+ },
1360
+ afterReplicaId: `test${i}`
1267
1361
  });
1268
1362
  }
1269
1363
  });
1270
1364
 
1271
1365
  const checkpoint = result!.flushed_op;
1272
1366
 
1273
- const batch1 = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']]), { limit: 4 }));
1367
+ const batch1 = await oneFromAsync(
1368
+ storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']]), { limit: 4 })
1369
+ );
1274
1370
 
1275
1371
  expect(getBatchData(batch1)).toEqual([
1276
1372
  { op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
@@ -1285,8 +1381,8 @@ bucket_definitions:
1285
1381
  next_after: '4'
1286
1382
  });
1287
1383
 
1288
- const batch2 = await fromAsync(
1289
- storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].next_after]]), {
1384
+ const batch2 = await oneFromAsync(
1385
+ storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1.batch.next_after]]), {
1290
1386
  limit: 4
1291
1387
  })
1292
1388
  );
@@ -1302,7 +1398,7 @@ bucket_definitions:
1302
1398
  });
1303
1399
 
1304
1400
  const batch3 = await fromAsync(
1305
- storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].next_after]]), {
1401
+ storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2.batch.next_after]]), {
1306
1402
  limit: 4
1307
1403
  })
1308
1404
  );
@@ -1311,28 +1407,3 @@ bucket_definitions:
1311
1407
  expect(getBatchMeta(batch3)).toEqual(null);
1312
1408
  });
1313
1409
  }
1314
-
1315
- function getBatchData(batch: SyncBucketData[]) {
1316
- if (batch.length == 0) {
1317
- return [];
1318
- }
1319
- return batch[0].data.map((d) => {
1320
- return {
1321
- op_id: d.op_id,
1322
- op: d.op,
1323
- object_id: d.object_id,
1324
- checksum: d.checksum
1325
- };
1326
- });
1327
- }
1328
-
1329
- function getBatchMeta(batch: SyncBucketData[]) {
1330
- if (batch.length == 0) {
1331
- return null;
1332
- }
1333
- return {
1334
- has_more: batch[0].has_more,
1335
- after: batch[0].after,
1336
- next_after: batch[0].next_after
1337
- };
1338
- }