@powersync/service-core 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.probes/.gitkeep +0 -0
- package/CHANGELOG.md +13 -0
- package/LICENSE +67 -0
- package/README.md +3 -0
- package/dist/api/api-index.d.ts +2 -0
- package/dist/api/api-index.js +3 -0
- package/dist/api/api-index.js.map +1 -0
- package/dist/api/diagnostics.d.ts +21 -0
- package/dist/api/diagnostics.js +183 -0
- package/dist/api/diagnostics.js.map +1 -0
- package/dist/api/schema.d.ts +5 -0
- package/dist/api/schema.js +88 -0
- package/dist/api/schema.js.map +1 -0
- package/dist/auth/CachedKeyCollector.d.ts +46 -0
- package/dist/auth/CachedKeyCollector.js +116 -0
- package/dist/auth/CachedKeyCollector.js.map +1 -0
- package/dist/auth/CompoundKeyCollector.d.ts +8 -0
- package/dist/auth/CompoundKeyCollector.js +23 -0
- package/dist/auth/CompoundKeyCollector.js.map +1 -0
- package/dist/auth/JwtPayload.d.ts +10 -0
- package/dist/auth/JwtPayload.js +2 -0
- package/dist/auth/JwtPayload.js.map +1 -0
- package/dist/auth/KeyCollector.d.ts +24 -0
- package/dist/auth/KeyCollector.js +2 -0
- package/dist/auth/KeyCollector.js.map +1 -0
- package/dist/auth/KeySpec.d.ts +26 -0
- package/dist/auth/KeySpec.js +49 -0
- package/dist/auth/KeySpec.js.map +1 -0
- package/dist/auth/KeyStore.d.ts +39 -0
- package/dist/auth/KeyStore.js +131 -0
- package/dist/auth/KeyStore.js.map +1 -0
- package/dist/auth/LeakyBucket.d.ts +39 -0
- package/dist/auth/LeakyBucket.js +57 -0
- package/dist/auth/LeakyBucket.js.map +1 -0
- package/dist/auth/RemoteJWKSCollector.d.ts +24 -0
- package/dist/auth/RemoteJWKSCollector.js +106 -0
- package/dist/auth/RemoteJWKSCollector.js.map +1 -0
- package/dist/auth/StaticKeyCollector.d.ts +14 -0
- package/dist/auth/StaticKeyCollector.js +19 -0
- package/dist/auth/StaticKeyCollector.js.map +1 -0
- package/dist/auth/SupabaseKeyCollector.d.ts +22 -0
- package/dist/auth/SupabaseKeyCollector.js +61 -0
- package/dist/auth/SupabaseKeyCollector.js.map +1 -0
- package/dist/auth/auth-index.d.ts +10 -0
- package/dist/auth/auth-index.js +11 -0
- package/dist/auth/auth-index.js.map +1 -0
- package/dist/db/db-index.d.ts +1 -0
- package/dist/db/db-index.js +2 -0
- package/dist/db/db-index.js.map +1 -0
- package/dist/db/mongo.d.ts +29 -0
- package/dist/db/mongo.js +65 -0
- package/dist/db/mongo.js.map +1 -0
- package/dist/entry/cli-entry.d.ts +15 -0
- package/dist/entry/cli-entry.js +36 -0
- package/dist/entry/cli-entry.js.map +1 -0
- package/dist/entry/commands/config-command.d.ts +10 -0
- package/dist/entry/commands/config-command.js +21 -0
- package/dist/entry/commands/config-command.js.map +1 -0
- package/dist/entry/commands/migrate-action.d.ts +2 -0
- package/dist/entry/commands/migrate-action.js +18 -0
- package/dist/entry/commands/migrate-action.js.map +1 -0
- package/dist/entry/commands/start-action.d.ts +3 -0
- package/dist/entry/commands/start-action.js +15 -0
- package/dist/entry/commands/start-action.js.map +1 -0
- package/dist/entry/commands/teardown-action.d.ts +2 -0
- package/dist/entry/commands/teardown-action.js +17 -0
- package/dist/entry/commands/teardown-action.js.map +1 -0
- package/dist/entry/entry-index.d.ts +5 -0
- package/dist/entry/entry-index.js +6 -0
- package/dist/entry/entry-index.js.map +1 -0
- package/dist/index.d.ts +24 -0
- package/dist/index.js +26 -0
- package/dist/index.js.map +1 -0
- package/dist/metrics/metrics.d.ts +16 -0
- package/dist/metrics/metrics.js +139 -0
- package/dist/metrics/metrics.js.map +1 -0
- package/dist/migrations/db/migrations/1684951997326-init.d.ts +3 -0
- package/dist/migrations/db/migrations/1684951997326-init.js +31 -0
- package/dist/migrations/db/migrations/1684951997326-init.js.map +1 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.d.ts +2 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js +5 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js.map +1 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.d.ts +3 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +54 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.d.ts +3 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js +27 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +1 -0
- package/dist/migrations/db/store.d.ts +3 -0
- package/dist/migrations/db/store.js +10 -0
- package/dist/migrations/db/store.js.map +1 -0
- package/dist/migrations/migrations.d.ts +10 -0
- package/dist/migrations/migrations.js +94 -0
- package/dist/migrations/migrations.js.map +1 -0
- package/dist/replication/ErrorRateLimiter.d.ts +17 -0
- package/dist/replication/ErrorRateLimiter.js +42 -0
- package/dist/replication/ErrorRateLimiter.js.map +1 -0
- package/dist/replication/PgRelation.d.ts +16 -0
- package/dist/replication/PgRelation.js +26 -0
- package/dist/replication/PgRelation.js.map +1 -0
- package/dist/replication/WalConnection.d.ts +34 -0
- package/dist/replication/WalConnection.js +190 -0
- package/dist/replication/WalConnection.js.map +1 -0
- package/dist/replication/WalStream.d.ts +58 -0
- package/dist/replication/WalStream.js +517 -0
- package/dist/replication/WalStream.js.map +1 -0
- package/dist/replication/WalStreamManager.d.ts +30 -0
- package/dist/replication/WalStreamManager.js +199 -0
- package/dist/replication/WalStreamManager.js.map +1 -0
- package/dist/replication/WalStreamRunner.d.ts +38 -0
- package/dist/replication/WalStreamRunner.js +155 -0
- package/dist/replication/WalStreamRunner.js.map +1 -0
- package/dist/replication/replication-index.d.ts +7 -0
- package/dist/replication/replication-index.js +8 -0
- package/dist/replication/replication-index.js.map +1 -0
- package/dist/replication/util.d.ts +9 -0
- package/dist/replication/util.js +62 -0
- package/dist/replication/util.js.map +1 -0
- package/dist/routes/admin.d.ts +7 -0
- package/dist/routes/admin.js +192 -0
- package/dist/routes/admin.js.map +1 -0
- package/dist/routes/auth.d.ts +58 -0
- package/dist/routes/auth.js +182 -0
- package/dist/routes/auth.js.map +1 -0
- package/dist/routes/checkpointing.d.ts +3 -0
- package/dist/routes/checkpointing.js +30 -0
- package/dist/routes/checkpointing.js.map +1 -0
- package/dist/routes/dev.d.ts +6 -0
- package/dist/routes/dev.js +163 -0
- package/dist/routes/dev.js.map +1 -0
- package/dist/routes/route-generators.d.ts +15 -0
- package/dist/routes/route-generators.js +32 -0
- package/dist/routes/route-generators.js.map +1 -0
- package/dist/routes/router-socket.d.ts +10 -0
- package/dist/routes/router-socket.js +5 -0
- package/dist/routes/router-socket.js.map +1 -0
- package/dist/routes/router.d.ts +13 -0
- package/dist/routes/router.js +2 -0
- package/dist/routes/router.js.map +1 -0
- package/dist/routes/routes-index.d.ts +4 -0
- package/dist/routes/routes-index.js +5 -0
- package/dist/routes/routes-index.js.map +1 -0
- package/dist/routes/socket-route.d.ts +2 -0
- package/dist/routes/socket-route.js +119 -0
- package/dist/routes/socket-route.js.map +1 -0
- package/dist/routes/sync-rules.d.ts +6 -0
- package/dist/routes/sync-rules.js +182 -0
- package/dist/routes/sync-rules.js.map +1 -0
- package/dist/routes/sync-stream.d.ts +5 -0
- package/dist/routes/sync-stream.js +74 -0
- package/dist/routes/sync-stream.js.map +1 -0
- package/dist/runner/teardown.d.ts +2 -0
- package/dist/runner/teardown.js +79 -0
- package/dist/runner/teardown.js.map +1 -0
- package/dist/storage/BucketStorage.d.ts +298 -0
- package/dist/storage/BucketStorage.js +25 -0
- package/dist/storage/BucketStorage.js.map +1 -0
- package/dist/storage/MongoBucketStorage.d.ts +51 -0
- package/dist/storage/MongoBucketStorage.js +388 -0
- package/dist/storage/MongoBucketStorage.js.map +1 -0
- package/dist/storage/SourceTable.d.ts +39 -0
- package/dist/storage/SourceTable.js +50 -0
- package/dist/storage/SourceTable.js.map +1 -0
- package/dist/storage/mongo/MongoBucketBatch.d.ts +48 -0
- package/dist/storage/mongo/MongoBucketBatch.js +584 -0
- package/dist/storage/mongo/MongoBucketBatch.js.map +1 -0
- package/dist/storage/mongo/MongoIdSequence.d.ts +12 -0
- package/dist/storage/mongo/MongoIdSequence.js +21 -0
- package/dist/storage/mongo/MongoIdSequence.js.map +1 -0
- package/dist/storage/mongo/MongoPersistedSyncRules.d.ts +9 -0
- package/dist/storage/mongo/MongoPersistedSyncRules.js +9 -0
- package/dist/storage/mongo/MongoPersistedSyncRules.js.map +1 -0
- package/dist/storage/mongo/MongoPersistedSyncRulesContent.d.ts +20 -0
- package/dist/storage/mongo/MongoPersistedSyncRulesContent.js +26 -0
- package/dist/storage/mongo/MongoPersistedSyncRulesContent.js.map +1 -0
- package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +27 -0
- package/dist/storage/mongo/MongoSyncBucketStorage.js +379 -0
- package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -0
- package/dist/storage/mongo/MongoSyncRulesLock.d.ts +16 -0
- package/dist/storage/mongo/MongoSyncRulesLock.js +65 -0
- package/dist/storage/mongo/MongoSyncRulesLock.js.map +1 -0
- package/dist/storage/mongo/OperationBatch.d.ts +26 -0
- package/dist/storage/mongo/OperationBatch.js +101 -0
- package/dist/storage/mongo/OperationBatch.js.map +1 -0
- package/dist/storage/mongo/PersistedBatch.d.ts +42 -0
- package/dist/storage/mongo/PersistedBatch.js +200 -0
- package/dist/storage/mongo/PersistedBatch.js.map +1 -0
- package/dist/storage/mongo/db.d.ts +23 -0
- package/dist/storage/mongo/db.js +34 -0
- package/dist/storage/mongo/db.js.map +1 -0
- package/dist/storage/mongo/models.d.ts +137 -0
- package/dist/storage/mongo/models.js +27 -0
- package/dist/storage/mongo/models.js.map +1 -0
- package/dist/storage/mongo/util.d.ts +26 -0
- package/dist/storage/mongo/util.js +81 -0
- package/dist/storage/mongo/util.js.map +1 -0
- package/dist/storage/storage-index.d.ts +14 -0
- package/dist/storage/storage-index.js +15 -0
- package/dist/storage/storage-index.js.map +1 -0
- package/dist/sync/BroadcastIterable.d.ts +38 -0
- package/dist/sync/BroadcastIterable.js +153 -0
- package/dist/sync/BroadcastIterable.js.map +1 -0
- package/dist/sync/LastValueSink.d.ts +25 -0
- package/dist/sync/LastValueSink.js +84 -0
- package/dist/sync/LastValueSink.js.map +1 -0
- package/dist/sync/merge.d.ts +39 -0
- package/dist/sync/merge.js +175 -0
- package/dist/sync/merge.js.map +1 -0
- package/dist/sync/safeRace.d.ts +1 -0
- package/dist/sync/safeRace.js +91 -0
- package/dist/sync/safeRace.js.map +1 -0
- package/dist/sync/sync-index.d.ts +6 -0
- package/dist/sync/sync-index.js +7 -0
- package/dist/sync/sync-index.js.map +1 -0
- package/dist/sync/sync.d.ts +18 -0
- package/dist/sync/sync.js +248 -0
- package/dist/sync/sync.js.map +1 -0
- package/dist/sync/util.d.ts +26 -0
- package/dist/sync/util.js +73 -0
- package/dist/sync/util.js.map +1 -0
- package/dist/system/CorePowerSyncSystem.d.ts +18 -0
- package/dist/system/CorePowerSyncSystem.js +28 -0
- package/dist/system/CorePowerSyncSystem.js.map +1 -0
- package/dist/util/Mutex.d.ts +47 -0
- package/dist/util/Mutex.js +132 -0
- package/dist/util/Mutex.js.map +1 -0
- package/dist/util/PgManager.d.ts +24 -0
- package/dist/util/PgManager.js +55 -0
- package/dist/util/PgManager.js.map +1 -0
- package/dist/util/alerting.d.ts +4 -0
- package/dist/util/alerting.js +14 -0
- package/dist/util/alerting.js.map +1 -0
- package/dist/util/config/collectors/config-collector.d.ts +29 -0
- package/dist/util/config/collectors/config-collector.js +116 -0
- package/dist/util/config/collectors/config-collector.js.map +1 -0
- package/dist/util/config/collectors/impl/base64-config-collector.d.ts +6 -0
- package/dist/util/config/collectors/impl/base64-config-collector.js +15 -0
- package/dist/util/config/collectors/impl/base64-config-collector.js.map +1 -0
- package/dist/util/config/collectors/impl/fallback-config-collector.d.ts +11 -0
- package/dist/util/config/collectors/impl/fallback-config-collector.js +19 -0
- package/dist/util/config/collectors/impl/fallback-config-collector.js.map +1 -0
- package/dist/util/config/collectors/impl/filesystem-config-collector.d.ts +6 -0
- package/dist/util/config/collectors/impl/filesystem-config-collector.js +35 -0
- package/dist/util/config/collectors/impl/filesystem-config-collector.js.map +1 -0
- package/dist/util/config/compound-config-collector.d.ts +32 -0
- package/dist/util/config/compound-config-collector.js +126 -0
- package/dist/util/config/compound-config-collector.js.map +1 -0
- package/dist/util/config/sync-rules/impl/base64-sync-rules-collector.d.ts +7 -0
- package/dist/util/config/sync-rules/impl/base64-sync-rules-collector.js +17 -0
- package/dist/util/config/sync-rules/impl/base64-sync-rules-collector.js.map +1 -0
- package/dist/util/config/sync-rules/impl/filesystem-sync-rules-collector.d.ts +7 -0
- package/dist/util/config/sync-rules/impl/filesystem-sync-rules-collector.js +21 -0
- package/dist/util/config/sync-rules/impl/filesystem-sync-rules-collector.js.map +1 -0
- package/dist/util/config/sync-rules/impl/inline-sync-rules-collector.d.ts +7 -0
- package/dist/util/config/sync-rules/impl/inline-sync-rules-collector.js +17 -0
- package/dist/util/config/sync-rules/impl/inline-sync-rules-collector.js.map +1 -0
- package/dist/util/config/sync-rules/sync-collector.d.ts +6 -0
- package/dist/util/config/sync-rules/sync-collector.js +3 -0
- package/dist/util/config/sync-rules/sync-collector.js.map +1 -0
- package/dist/util/config/types.d.ts +53 -0
- package/dist/util/config/types.js +7 -0
- package/dist/util/config/types.js.map +1 -0
- package/dist/util/config.d.ts +7 -0
- package/dist/util/config.js +35 -0
- package/dist/util/config.js.map +1 -0
- package/dist/util/env.d.ts +10 -0
- package/dist/util/env.js +25 -0
- package/dist/util/env.js.map +1 -0
- package/dist/util/memory-tracking.d.ts +7 -0
- package/dist/util/memory-tracking.js +58 -0
- package/dist/util/memory-tracking.js.map +1 -0
- package/dist/util/migration_lib.d.ts +11 -0
- package/dist/util/migration_lib.js +64 -0
- package/dist/util/migration_lib.js.map +1 -0
- package/dist/util/pgwire_utils.d.ts +24 -0
- package/dist/util/pgwire_utils.js +117 -0
- package/dist/util/pgwire_utils.js.map +1 -0
- package/dist/util/populate_test_data.d.ts +8 -0
- package/dist/util/populate_test_data.js +65 -0
- package/dist/util/populate_test_data.js.map +1 -0
- package/dist/util/protocol-types.d.ts +178 -0
- package/dist/util/protocol-types.js +38 -0
- package/dist/util/protocol-types.js.map +1 -0
- package/dist/util/secs.d.ts +2 -0
- package/dist/util/secs.js +49 -0
- package/dist/util/secs.js.map +1 -0
- package/dist/util/util-index.d.ts +22 -0
- package/dist/util/util-index.js +23 -0
- package/dist/util/util-index.js.map +1 -0
- package/dist/util/utils.d.ts +14 -0
- package/dist/util/utils.js +75 -0
- package/dist/util/utils.js.map +1 -0
- package/package.json +55 -0
- package/src/api/api-index.ts +2 -0
- package/src/api/diagnostics.ts +221 -0
- package/src/api/schema.ts +99 -0
- package/src/auth/CachedKeyCollector.ts +132 -0
- package/src/auth/CompoundKeyCollector.ts +33 -0
- package/src/auth/JwtPayload.ts +11 -0
- package/src/auth/KeyCollector.ts +27 -0
- package/src/auth/KeySpec.ts +67 -0
- package/src/auth/KeyStore.ts +156 -0
- package/src/auth/LeakyBucket.ts +66 -0
- package/src/auth/RemoteJWKSCollector.ts +130 -0
- package/src/auth/StaticKeyCollector.ts +21 -0
- package/src/auth/SupabaseKeyCollector.ts +67 -0
- package/src/auth/auth-index.ts +10 -0
- package/src/db/db-index.ts +1 -0
- package/src/db/mongo.ts +72 -0
- package/src/entry/cli-entry.ts +41 -0
- package/src/entry/commands/config-command.ts +36 -0
- package/src/entry/commands/migrate-action.ts +25 -0
- package/src/entry/commands/start-action.ts +24 -0
- package/src/entry/commands/teardown-action.ts +23 -0
- package/src/entry/entry-index.ts +5 -0
- package/src/index.ts +37 -0
- package/src/metrics/metrics.ts +169 -0
- package/src/migrations/db/migrations/1684951997326-init.ts +33 -0
- package/src/migrations/db/migrations/1688556755264-initial-sync-rules.ts +5 -0
- package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +99 -0
- package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +32 -0
- package/src/migrations/db/store.ts +11 -0
- package/src/migrations/migrations.ts +122 -0
- package/src/replication/ErrorRateLimiter.ts +49 -0
- package/src/replication/PgRelation.ts +42 -0
- package/src/replication/WalConnection.ts +227 -0
- package/src/replication/WalStream.ts +626 -0
- package/src/replication/WalStreamManager.ts +214 -0
- package/src/replication/WalStreamRunner.ts +180 -0
- package/src/replication/replication-index.ts +7 -0
- package/src/replication/util.ts +76 -0
- package/src/routes/admin.ts +229 -0
- package/src/routes/auth.ts +209 -0
- package/src/routes/checkpointing.ts +38 -0
- package/src/routes/dev.ts +194 -0
- package/src/routes/route-generators.ts +39 -0
- package/src/routes/router-socket.ts +13 -0
- package/src/routes/router.ts +17 -0
- package/src/routes/routes-index.ts +5 -0
- package/src/routes/socket-route.ts +131 -0
- package/src/routes/sync-rules.ts +210 -0
- package/src/routes/sync-stream.ts +92 -0
- package/src/runner/teardown.ts +91 -0
- package/src/storage/BucketStorage.ts +386 -0
- package/src/storage/MongoBucketStorage.ts +493 -0
- package/src/storage/SourceTable.ts +60 -0
- package/src/storage/mongo/MongoBucketBatch.ts +756 -0
- package/src/storage/mongo/MongoIdSequence.ts +24 -0
- package/src/storage/mongo/MongoPersistedSyncRules.ts +16 -0
- package/src/storage/mongo/MongoPersistedSyncRulesContent.ts +47 -0
- package/src/storage/mongo/MongoSyncBucketStorage.ts +517 -0
- package/src/storage/mongo/MongoSyncRulesLock.ts +81 -0
- package/src/storage/mongo/OperationBatch.ts +115 -0
- package/src/storage/mongo/PersistedBatch.ts +245 -0
- package/src/storage/mongo/db.ts +69 -0
- package/src/storage/mongo/models.ts +157 -0
- package/src/storage/mongo/util.ts +88 -0
- package/src/storage/storage-index.ts +15 -0
- package/src/sync/BroadcastIterable.ts +161 -0
- package/src/sync/LastValueSink.ts +100 -0
- package/src/sync/merge.ts +200 -0
- package/src/sync/safeRace.ts +99 -0
- package/src/sync/sync-index.ts +6 -0
- package/src/sync/sync.ts +312 -0
- package/src/sync/util.ts +98 -0
- package/src/system/CorePowerSyncSystem.ts +43 -0
- package/src/util/Mutex.ts +159 -0
- package/src/util/PgManager.ts +64 -0
- package/src/util/alerting.ts +17 -0
- package/src/util/config/collectors/config-collector.ts +141 -0
- package/src/util/config/collectors/impl/base64-config-collector.ts +18 -0
- package/src/util/config/collectors/impl/fallback-config-collector.ts +22 -0
- package/src/util/config/collectors/impl/filesystem-config-collector.ts +41 -0
- package/src/util/config/compound-config-collector.ts +171 -0
- package/src/util/config/sync-rules/impl/base64-sync-rules-collector.ts +21 -0
- package/src/util/config/sync-rules/impl/filesystem-sync-rules-collector.ts +26 -0
- package/src/util/config/sync-rules/impl/inline-sync-rules-collector.ts +21 -0
- package/src/util/config/sync-rules/sync-collector.ts +8 -0
- package/src/util/config/types.ts +60 -0
- package/src/util/config.ts +39 -0
- package/src/util/env.ts +28 -0
- package/src/util/memory-tracking.ts +67 -0
- package/src/util/migration_lib.ts +79 -0
- package/src/util/pgwire_utils.ts +139 -0
- package/src/util/populate_test_data.ts +78 -0
- package/src/util/protocol-types.ts +223 -0
- package/src/util/secs.ts +54 -0
- package/src/util/util-index.ts +25 -0
- package/src/util/utils.ts +102 -0
- package/test/src/__snapshots__/pg_test.test.ts.snap +256 -0
- package/test/src/__snapshots__/sync.test.ts.snap +235 -0
- package/test/src/auth.test.ts +340 -0
- package/test/src/broadcast_iterable.test.ts +156 -0
- package/test/src/data_storage.test.ts +1176 -0
- package/test/src/env.ts +8 -0
- package/test/src/large_batch.test.ts +194 -0
- package/test/src/merge_iterable.test.ts +355 -0
- package/test/src/pg_test.test.ts +432 -0
- package/test/src/schema_changes.test.ts +545 -0
- package/test/src/slow_tests.test.ts +257 -0
- package/test/src/sql_functions.test.ts +254 -0
- package/test/src/sql_operators.test.ts +132 -0
- package/test/src/sync.test.ts +293 -0
- package/test/src/sync_rules.test.ts +1051 -0
- package/test/src/util.ts +67 -0
- package/test/src/validation.test.ts +63 -0
- package/test/src/wal_stream.test.ts +310 -0
- package/test/src/wal_stream_utils.ts +147 -0
- package/test/tsconfig.json +20 -0
- package/tsconfig.json +20 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/vitest.config.ts +11 -0
|
@@ -0,0 +1,1176 @@
|
|
|
1
|
+
import { SqlSyncRules } from '@powersync/service-sync-rules';
|
|
2
|
+
import * as bson from 'bson';
|
|
3
|
+
import { describe, expect, test } from 'vitest';
|
|
4
|
+
import { SourceTable } from '../../src/storage/SourceTable.js';
|
|
5
|
+
import { hashData } from '../../src/util/utils.js';
|
|
6
|
+
import { MONGO_STORAGE_FACTORY, StorageFactory } from './util.js';
|
|
7
|
+
import { SyncBucketData } from '../../src/util/protocol-types.js';
|
|
8
|
+
import { BucketDataBatchOptions } from '../../src/storage/BucketStorage.js';
|
|
9
|
+
import { fromAsync } from './wal_stream_utils.js';
|
|
10
|
+
|
|
11
|
+
function makeTestTable(name: string, columns?: string[] | undefined) {
|
|
12
|
+
const relId = hashData('table', name, (columns ?? ['id']).join(','));
|
|
13
|
+
const id = new bson.ObjectId('6544e3899293153fa7b38331');
|
|
14
|
+
return new SourceTable(
|
|
15
|
+
id,
|
|
16
|
+
SourceTable.DEFAULT_TAG,
|
|
17
|
+
relId,
|
|
18
|
+
SourceTable.DEFAULT_SCHEMA,
|
|
19
|
+
name,
|
|
20
|
+
(columns ?? ['id']).map((column) => ({ name: column, typeOid: 25 })),
|
|
21
|
+
true
|
|
22
|
+
);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const TEST_TABLE = makeTestTable('test', ['id']);
|
|
26
|
+
|
|
27
|
+
describe('store - mongodb', function () {
|
|
28
|
+
defineDataStorageTests(MONGO_STORAGE_FACTORY);
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
function defineDataStorageTests(factory: StorageFactory) {
|
|
32
|
+
test('save and load parameters', async () => {
|
|
33
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
34
|
+
bucket_definitions:
|
|
35
|
+
mybucket:
|
|
36
|
+
parameters:
|
|
37
|
+
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
38
|
+
data: []
|
|
39
|
+
`);
|
|
40
|
+
|
|
41
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
42
|
+
|
|
43
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
44
|
+
await batch.save({
|
|
45
|
+
sourceTable: TEST_TABLE,
|
|
46
|
+
tag: 'insert',
|
|
47
|
+
after: {
|
|
48
|
+
id: 't2',
|
|
49
|
+
id1: 'user3',
|
|
50
|
+
id2: 'user4',
|
|
51
|
+
group_id: 'group2a'
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
await batch.save({
|
|
56
|
+
sourceTable: TEST_TABLE,
|
|
57
|
+
tag: 'insert',
|
|
58
|
+
after: {
|
|
59
|
+
id: 't1',
|
|
60
|
+
id1: 'user1',
|
|
61
|
+
id2: 'user2',
|
|
62
|
+
group_id: 'group1a'
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
const parameters = await storage.getParameterSets(result!.flushed_op, [['mybucket', '1', 'user1']]);
|
|
68
|
+
expect(parameters).toEqual([
|
|
69
|
+
{
|
|
70
|
+
group_id: 'group1a'
|
|
71
|
+
}
|
|
72
|
+
]);
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
test('it should use the latest version', async () => {
|
|
76
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
77
|
+
bucket_definitions:
|
|
78
|
+
mybucket:
|
|
79
|
+
parameters:
|
|
80
|
+
- SELECT group_id FROM test WHERE id = token_parameters.user_id
|
|
81
|
+
data: []
|
|
82
|
+
`);
|
|
83
|
+
|
|
84
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
85
|
+
|
|
86
|
+
const result1 = await storage.startBatch({}, async (batch) => {
|
|
87
|
+
await batch.save({
|
|
88
|
+
sourceTable: TEST_TABLE,
|
|
89
|
+
tag: 'insert',
|
|
90
|
+
after: {
|
|
91
|
+
id: 'user1',
|
|
92
|
+
group_id: 'group1'
|
|
93
|
+
}
|
|
94
|
+
});
|
|
95
|
+
});
|
|
96
|
+
const result2 = await storage.startBatch({}, async (batch) => {
|
|
97
|
+
await batch.save({
|
|
98
|
+
sourceTable: TEST_TABLE,
|
|
99
|
+
tag: 'insert',
|
|
100
|
+
after: {
|
|
101
|
+
id: 'user1',
|
|
102
|
+
group_id: 'group2'
|
|
103
|
+
}
|
|
104
|
+
});
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
const parameters = await storage.getParameterSets(result2!.flushed_op, [['mybucket', '1', 'user1']]);
|
|
108
|
+
expect(parameters).toEqual([
|
|
109
|
+
{
|
|
110
|
+
group_id: 'group2'
|
|
111
|
+
}
|
|
112
|
+
]);
|
|
113
|
+
|
|
114
|
+
// Use the checkpoint to get older data if relevant
|
|
115
|
+
const parameters2 = await storage.getParameterSets(result1!.flushed_op, [['mybucket', '1', 'user1']]);
|
|
116
|
+
expect(parameters2).toEqual([
|
|
117
|
+
{
|
|
118
|
+
group_id: 'group1'
|
|
119
|
+
}
|
|
120
|
+
]);
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
test('save and load parameters with different number types', async () => {
|
|
124
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
125
|
+
bucket_definitions:
|
|
126
|
+
mybucket:
|
|
127
|
+
parameters:
|
|
128
|
+
- SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
|
|
129
|
+
data: []
|
|
130
|
+
`);
|
|
131
|
+
|
|
132
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
133
|
+
|
|
134
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
135
|
+
await batch.save({
|
|
136
|
+
sourceTable: TEST_TABLE,
|
|
137
|
+
tag: 'insert',
|
|
138
|
+
after: {
|
|
139
|
+
id: 't1',
|
|
140
|
+
group_id: 'group1',
|
|
141
|
+
n1: 314n,
|
|
142
|
+
f2: 314,
|
|
143
|
+
f3: 3.14
|
|
144
|
+
}
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
const TEST_PARAMS = { group_id: 'group1' };
|
|
149
|
+
|
|
150
|
+
const checkpoint = result!.flushed_op;
|
|
151
|
+
|
|
152
|
+
const parameters1 = await storage.getParameterSets(checkpoint, [['mybucket', '1', 314n, 314, 3.14]]);
|
|
153
|
+
expect(parameters1).toEqual([TEST_PARAMS]);
|
|
154
|
+
const parameters2 = await storage.getParameterSets(checkpoint, [['mybucket', '1', 314, 314n, 3.14]]);
|
|
155
|
+
expect(parameters2).toEqual([TEST_PARAMS]);
|
|
156
|
+
const parameters3 = await storage.getParameterSets(checkpoint, [['mybucket', '1', 314n, 314, 3]]);
|
|
157
|
+
expect(parameters3).toEqual([]);
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
test('save and load parameters with large numbers', async () => {
|
|
161
|
+
// This ensures serialization / deserialization of "current_data" is done correctly.
|
|
162
|
+
// This specific case tested here cannot happen with postgres in practice, but we still
|
|
163
|
+
// test this to ensure correct deserialization.
|
|
164
|
+
|
|
165
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
166
|
+
bucket_definitions:
|
|
167
|
+
mybucket:
|
|
168
|
+
parameters:
|
|
169
|
+
- SELECT group_id FROM test WHERE n1 = token_parameters.n1
|
|
170
|
+
data: []
|
|
171
|
+
`);
|
|
172
|
+
|
|
173
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
174
|
+
|
|
175
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
176
|
+
await batch.save({
|
|
177
|
+
sourceTable: TEST_TABLE,
|
|
178
|
+
tag: 'insert',
|
|
179
|
+
after: {
|
|
180
|
+
id: 't1',
|
|
181
|
+
group_id: 'group1',
|
|
182
|
+
n1: 1152921504606846976n // 2^60
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
await batch.save({
|
|
187
|
+
sourceTable: TEST_TABLE,
|
|
188
|
+
tag: 'update',
|
|
189
|
+
after: {
|
|
190
|
+
id: 't1',
|
|
191
|
+
group_id: 'group1',
|
|
192
|
+
// Simulate a TOAST value, even though it can't happen for values like this
|
|
193
|
+
// in practice.
|
|
194
|
+
n1: undefined
|
|
195
|
+
}
|
|
196
|
+
});
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
const TEST_PARAMS = { group_id: 'group1' };
|
|
200
|
+
|
|
201
|
+
const checkpoint = result!.flushed_op;
|
|
202
|
+
|
|
203
|
+
const parameters1 = await storage.getParameterSets(checkpoint, [['mybucket', '1', 1152921504606846976n]]);
|
|
204
|
+
expect(parameters1).toEqual([TEST_PARAMS]);
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
test('removing row', async () => {
|
|
208
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
209
|
+
bucket_definitions:
|
|
210
|
+
global:
|
|
211
|
+
data:
|
|
212
|
+
- SELECT id, description FROM "%"
|
|
213
|
+
`);
|
|
214
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
215
|
+
|
|
216
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
217
|
+
const sourceTable = TEST_TABLE;
|
|
218
|
+
|
|
219
|
+
await batch.save({
|
|
220
|
+
sourceTable,
|
|
221
|
+
tag: 'insert',
|
|
222
|
+
after: {
|
|
223
|
+
id: 'test1',
|
|
224
|
+
description: 'test1'
|
|
225
|
+
}
|
|
226
|
+
});
|
|
227
|
+
await batch.save({
|
|
228
|
+
sourceTable,
|
|
229
|
+
tag: 'delete',
|
|
230
|
+
before: {
|
|
231
|
+
id: 'test1'
|
|
232
|
+
}
|
|
233
|
+
});
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
const checkpoint = result!.flushed_op;
|
|
237
|
+
|
|
238
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
|
|
239
|
+
const data = batch[0].data.map((d) => {
|
|
240
|
+
return {
|
|
241
|
+
op: d.op,
|
|
242
|
+
object_id: d.object_id,
|
|
243
|
+
checksum: d.checksum
|
|
244
|
+
};
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
const c1 = 2871785649;
|
|
248
|
+
const c2 = 2872534815;
|
|
249
|
+
|
|
250
|
+
expect(data).toEqual([
|
|
251
|
+
{ op: 'PUT', object_id: 'test1', checksum: c1 },
|
|
252
|
+
{ op: 'REMOVE', object_id: 'test1', checksum: c2 }
|
|
253
|
+
]);
|
|
254
|
+
|
|
255
|
+
const checksums = await storage.getChecksums(checkpoint, ['global[]']);
|
|
256
|
+
expect(checksums).toEqual([
|
|
257
|
+
{
|
|
258
|
+
bucket: 'global[]',
|
|
259
|
+
checksum: (c1 + c2) & 0xffffffff,
|
|
260
|
+
count: 2
|
|
261
|
+
}
|
|
262
|
+
]);
|
|
263
|
+
});
|
|
264
|
+
|
|
265
|
+
test('save and load parameters with workspaceId', async () => {
|
|
266
|
+
const WORKSPACE_TABLE = makeTestTable('workspace', ['id']);
|
|
267
|
+
|
|
268
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
269
|
+
bucket_definitions:
|
|
270
|
+
by_workspace:
|
|
271
|
+
parameters:
|
|
272
|
+
- SELECT id as workspace_id FROM workspace WHERE
|
|
273
|
+
workspace."userId" = token_parameters.user_id
|
|
274
|
+
data: []
|
|
275
|
+
`);
|
|
276
|
+
|
|
277
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
278
|
+
|
|
279
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
280
|
+
await batch.save({
|
|
281
|
+
sourceTable: WORKSPACE_TABLE,
|
|
282
|
+
tag: 'insert',
|
|
283
|
+
after: {
|
|
284
|
+
id: 'workspace1',
|
|
285
|
+
userId: 'u1'
|
|
286
|
+
}
|
|
287
|
+
});
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
const checkpoint = result!.flushed_op;
|
|
291
|
+
|
|
292
|
+
const parameters = {
|
|
293
|
+
token_parameters: {
|
|
294
|
+
user_id: 'u1'
|
|
295
|
+
},
|
|
296
|
+
user_parameters: {}
|
|
297
|
+
};
|
|
298
|
+
|
|
299
|
+
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
|
|
300
|
+
|
|
301
|
+
const lookups = q1.getLookups(parameters);
|
|
302
|
+
expect(lookups).toEqual([['by_workspace', '1', 'u1']]);
|
|
303
|
+
|
|
304
|
+
const parameter_sets = await storage.getParameterSets(checkpoint, lookups);
|
|
305
|
+
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
|
|
306
|
+
|
|
307
|
+
const buckets = await sync_rules.queryBucketIds({
|
|
308
|
+
getParameterSets(lookups) {
|
|
309
|
+
return storage.getParameterSets(checkpoint, lookups);
|
|
310
|
+
},
|
|
311
|
+
parameters
|
|
312
|
+
});
|
|
313
|
+
expect(buckets).toEqual(['by_workspace["workspace1"]']);
|
|
314
|
+
});
|
|
315
|
+
|
|
316
|
+
test('save and load parameters with dynamic global buckets', async () => {
|
|
317
|
+
const WORKSPACE_TABLE = makeTestTable('workspace');
|
|
318
|
+
|
|
319
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
320
|
+
bucket_definitions:
|
|
321
|
+
by_public_workspace:
|
|
322
|
+
parameters:
|
|
323
|
+
- SELECT id as workspace_id FROM workspace WHERE
|
|
324
|
+
workspace.visibility = 'public'
|
|
325
|
+
data: []
|
|
326
|
+
`);
|
|
327
|
+
|
|
328
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
329
|
+
|
|
330
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
331
|
+
await batch.save({
|
|
332
|
+
sourceTable: WORKSPACE_TABLE,
|
|
333
|
+
tag: 'insert',
|
|
334
|
+
after: {
|
|
335
|
+
id: 'workspace1',
|
|
336
|
+
visibility: 'public'
|
|
337
|
+
}
|
|
338
|
+
});
|
|
339
|
+
|
|
340
|
+
await batch.save({
|
|
341
|
+
sourceTable: WORKSPACE_TABLE,
|
|
342
|
+
tag: 'insert',
|
|
343
|
+
after: {
|
|
344
|
+
id: 'workspace2',
|
|
345
|
+
visibility: 'private'
|
|
346
|
+
}
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
await batch.save({
|
|
350
|
+
sourceTable: WORKSPACE_TABLE,
|
|
351
|
+
tag: 'insert',
|
|
352
|
+
after: {
|
|
353
|
+
id: 'workspace3',
|
|
354
|
+
visibility: 'public'
|
|
355
|
+
}
|
|
356
|
+
});
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
const checkpoint = result!.flushed_op;
|
|
360
|
+
|
|
361
|
+
const parameters = {
|
|
362
|
+
token_parameters: {
|
|
363
|
+
user_id: 'unknown'
|
|
364
|
+
},
|
|
365
|
+
user_parameters: {}
|
|
366
|
+
};
|
|
367
|
+
|
|
368
|
+
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
|
|
369
|
+
|
|
370
|
+
const lookups = q1.getLookups(parameters);
|
|
371
|
+
expect(lookups).toEqual([['by_public_workspace', '1']]);
|
|
372
|
+
|
|
373
|
+
const parameter_sets = await storage.getParameterSets(checkpoint, lookups);
|
|
374
|
+
parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
375
|
+
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
|
|
376
|
+
|
|
377
|
+
const buckets = await sync_rules.queryBucketIds({
|
|
378
|
+
getParameterSets(lookups) {
|
|
379
|
+
return storage.getParameterSets(checkpoint, lookups);
|
|
380
|
+
},
|
|
381
|
+
parameters
|
|
382
|
+
});
|
|
383
|
+
buckets.sort();
|
|
384
|
+
expect(buckets).toEqual(['by_public_workspace["workspace1"]', 'by_public_workspace["workspace3"]']);
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
test('multiple parameter queries', async () => {
|
|
388
|
+
const WORKSPACE_TABLE = makeTestTable('workspace');
|
|
389
|
+
|
|
390
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
391
|
+
bucket_definitions:
|
|
392
|
+
by_workspace:
|
|
393
|
+
parameters:
|
|
394
|
+
- SELECT id as workspace_id FROM workspace WHERE
|
|
395
|
+
workspace.visibility = 'public'
|
|
396
|
+
- SELECT id as workspace_id FROM workspace WHERE
|
|
397
|
+
workspace.user_id = token_parameters.user_id
|
|
398
|
+
data: []
|
|
399
|
+
`);
|
|
400
|
+
|
|
401
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
402
|
+
|
|
403
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
404
|
+
await batch.save({
|
|
405
|
+
sourceTable: WORKSPACE_TABLE,
|
|
406
|
+
tag: 'insert',
|
|
407
|
+
after: {
|
|
408
|
+
id: 'workspace1',
|
|
409
|
+
visibility: 'public'
|
|
410
|
+
}
|
|
411
|
+
});
|
|
412
|
+
|
|
413
|
+
await batch.save({
|
|
414
|
+
sourceTable: WORKSPACE_TABLE,
|
|
415
|
+
tag: 'insert',
|
|
416
|
+
after: {
|
|
417
|
+
id: 'workspace2',
|
|
418
|
+
visibility: 'private'
|
|
419
|
+
}
|
|
420
|
+
});
|
|
421
|
+
|
|
422
|
+
await batch.save({
|
|
423
|
+
sourceTable: WORKSPACE_TABLE,
|
|
424
|
+
tag: 'insert',
|
|
425
|
+
after: {
|
|
426
|
+
id: 'workspace3',
|
|
427
|
+
user_id: 'u1',
|
|
428
|
+
visibility: 'private'
|
|
429
|
+
}
|
|
430
|
+
});
|
|
431
|
+
|
|
432
|
+
await batch.save({
|
|
433
|
+
sourceTable: WORKSPACE_TABLE,
|
|
434
|
+
tag: 'insert',
|
|
435
|
+
after: {
|
|
436
|
+
id: 'workspace4',
|
|
437
|
+
user_id: 'u2',
|
|
438
|
+
visibility: 'private'
|
|
439
|
+
}
|
|
440
|
+
});
|
|
441
|
+
});
|
|
442
|
+
|
|
443
|
+
const checkpoint = result!.flushed_op;
|
|
444
|
+
|
|
445
|
+
const parameters = {
|
|
446
|
+
token_parameters: {
|
|
447
|
+
user_id: 'u1'
|
|
448
|
+
},
|
|
449
|
+
user_parameters: {}
|
|
450
|
+
};
|
|
451
|
+
|
|
452
|
+
// Test intermediate values - could be moved to sync_rules.test.ts
|
|
453
|
+
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
|
|
454
|
+
const lookups1 = q1.getLookups(parameters);
|
|
455
|
+
expect(lookups1).toEqual([['by_workspace', '1']]);
|
|
456
|
+
|
|
457
|
+
const parameter_sets1 = await storage.getParameterSets(checkpoint, lookups1);
|
|
458
|
+
parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
459
|
+
expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);
|
|
460
|
+
|
|
461
|
+
const q2 = sync_rules.bucket_descriptors[0].parameter_queries[1];
|
|
462
|
+
const lookups2 = q2.getLookups(parameters);
|
|
463
|
+
expect(lookups2).toEqual([['by_workspace', '2', 'u1']]);
|
|
464
|
+
|
|
465
|
+
const parameter_sets2 = await storage.getParameterSets(checkpoint, lookups2);
|
|
466
|
+
parameter_sets2.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
467
|
+
expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
|
|
468
|
+
|
|
469
|
+
// Test final values - the important part
|
|
470
|
+
const buckets = await sync_rules.queryBucketIds({
|
|
471
|
+
getParameterSets(lookups) {
|
|
472
|
+
return storage.getParameterSets(checkpoint, lookups);
|
|
473
|
+
},
|
|
474
|
+
parameters
|
|
475
|
+
});
|
|
476
|
+
buckets.sort();
|
|
477
|
+
expect(buckets).toEqual(['by_workspace["workspace1"]', 'by_workspace["workspace3"]']);
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
test('changing client ids', async () => {
|
|
481
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
482
|
+
bucket_definitions:
|
|
483
|
+
global:
|
|
484
|
+
data:
|
|
485
|
+
- SELECT client_id as id, description FROM "%"
|
|
486
|
+
`);
|
|
487
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
488
|
+
|
|
489
|
+
const sourceTable = TEST_TABLE;
|
|
490
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
491
|
+
await batch.save({
|
|
492
|
+
sourceTable,
|
|
493
|
+
tag: 'insert',
|
|
494
|
+
after: {
|
|
495
|
+
id: 'test1',
|
|
496
|
+
client_id: 'client1a',
|
|
497
|
+
description: 'test1a'
|
|
498
|
+
}
|
|
499
|
+
});
|
|
500
|
+
await batch.save({
|
|
501
|
+
sourceTable,
|
|
502
|
+
tag: 'update',
|
|
503
|
+
after: {
|
|
504
|
+
id: 'test1',
|
|
505
|
+
client_id: 'client1b',
|
|
506
|
+
description: 'test1b'
|
|
507
|
+
}
|
|
508
|
+
});
|
|
509
|
+
|
|
510
|
+
await batch.save({
|
|
511
|
+
sourceTable,
|
|
512
|
+
tag: 'insert',
|
|
513
|
+
after: {
|
|
514
|
+
id: 'test2',
|
|
515
|
+
client_id: 'client2',
|
|
516
|
+
description: 'test2'
|
|
517
|
+
}
|
|
518
|
+
});
|
|
519
|
+
});
|
|
520
|
+
const checkpoint = result!.flushed_op;
|
|
521
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
|
|
522
|
+
const data = batch[0].data.map((d) => {
|
|
523
|
+
return {
|
|
524
|
+
op: d.op,
|
|
525
|
+
object_id: d.object_id
|
|
526
|
+
};
|
|
527
|
+
});
|
|
528
|
+
|
|
529
|
+
expect(data).toEqual([
|
|
530
|
+
{ op: 'PUT', object_id: 'client1a' },
|
|
531
|
+
{ op: 'PUT', object_id: 'client1b' },
|
|
532
|
+
{ op: 'REMOVE', object_id: 'client1a' },
|
|
533
|
+
{ op: 'PUT', object_id: 'client2' }
|
|
534
|
+
]);
|
|
535
|
+
});
|
|
536
|
+
|
|
537
|
+
test('re-apply delete', async () => {
|
|
538
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
539
|
+
bucket_definitions:
|
|
540
|
+
global:
|
|
541
|
+
data:
|
|
542
|
+
- SELECT id, description FROM "%"
|
|
543
|
+
`);
|
|
544
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
545
|
+
|
|
546
|
+
await storage.startBatch({}, async (batch) => {
|
|
547
|
+
const sourceTable = TEST_TABLE;
|
|
548
|
+
|
|
549
|
+
await batch.save({
|
|
550
|
+
sourceTable,
|
|
551
|
+
tag: 'insert',
|
|
552
|
+
after: {
|
|
553
|
+
id: 'test1',
|
|
554
|
+
description: 'test1'
|
|
555
|
+
}
|
|
556
|
+
});
|
|
557
|
+
});
|
|
558
|
+
|
|
559
|
+
await storage.startBatch({}, async (batch) => {
|
|
560
|
+
const sourceTable = TEST_TABLE;
|
|
561
|
+
|
|
562
|
+
await batch.save({
|
|
563
|
+
sourceTable,
|
|
564
|
+
tag: 'delete',
|
|
565
|
+
before: {
|
|
566
|
+
id: 'test1'
|
|
567
|
+
}
|
|
568
|
+
});
|
|
569
|
+
});
|
|
570
|
+
|
|
571
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
572
|
+
const sourceTable = TEST_TABLE;
|
|
573
|
+
|
|
574
|
+
await batch.save({
|
|
575
|
+
sourceTable,
|
|
576
|
+
tag: 'delete',
|
|
577
|
+
before: {
|
|
578
|
+
id: 'test1'
|
|
579
|
+
}
|
|
580
|
+
});
|
|
581
|
+
});
|
|
582
|
+
|
|
583
|
+
const checkpoint = result!.flushed_op;
|
|
584
|
+
|
|
585
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
|
|
586
|
+
const data = batch[0].data.map((d) => {
|
|
587
|
+
return {
|
|
588
|
+
op: d.op,
|
|
589
|
+
object_id: d.object_id,
|
|
590
|
+
checksum: d.checksum
|
|
591
|
+
};
|
|
592
|
+
});
|
|
593
|
+
|
|
594
|
+
const c1 = 2871785649;
|
|
595
|
+
const c2 = 2872534815;
|
|
596
|
+
|
|
597
|
+
expect(data).toEqual([
|
|
598
|
+
{ op: 'PUT', object_id: 'test1', checksum: c1 },
|
|
599
|
+
{ op: 'REMOVE', object_id: 'test1', checksum: c2 }
|
|
600
|
+
]);
|
|
601
|
+
|
|
602
|
+
const checksums = await storage.getChecksums(checkpoint, ['global[]']);
|
|
603
|
+
expect(checksums).toEqual([
|
|
604
|
+
{
|
|
605
|
+
bucket: 'global[]',
|
|
606
|
+
checksum: (c1 + c2) & 0xffffffff,
|
|
607
|
+
count: 2
|
|
608
|
+
}
|
|
609
|
+
]);
|
|
610
|
+
});
|
|
611
|
+
|
|
612
|
+
test('re-apply update + delete', async () => {
|
|
613
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
614
|
+
bucket_definitions:
|
|
615
|
+
global:
|
|
616
|
+
data:
|
|
617
|
+
- SELECT id, description FROM "%"
|
|
618
|
+
`);
|
|
619
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
620
|
+
|
|
621
|
+
await storage.startBatch({}, async (batch) => {
|
|
622
|
+
const sourceTable = TEST_TABLE;
|
|
623
|
+
|
|
624
|
+
await batch.save({
|
|
625
|
+
sourceTable,
|
|
626
|
+
tag: 'insert',
|
|
627
|
+
after: {
|
|
628
|
+
id: 'test1',
|
|
629
|
+
description: 'test1'
|
|
630
|
+
}
|
|
631
|
+
});
|
|
632
|
+
});
|
|
633
|
+
|
|
634
|
+
await storage.startBatch({}, async (batch) => {
|
|
635
|
+
const sourceTable = TEST_TABLE;
|
|
636
|
+
|
|
637
|
+
await batch.save({
|
|
638
|
+
sourceTable,
|
|
639
|
+
tag: 'update',
|
|
640
|
+
after: {
|
|
641
|
+
id: 'test1',
|
|
642
|
+
description: undefined
|
|
643
|
+
}
|
|
644
|
+
});
|
|
645
|
+
|
|
646
|
+
await batch.save({
|
|
647
|
+
sourceTable,
|
|
648
|
+
tag: 'update',
|
|
649
|
+
after: {
|
|
650
|
+
id: 'test1',
|
|
651
|
+
description: undefined
|
|
652
|
+
}
|
|
653
|
+
});
|
|
654
|
+
|
|
655
|
+
await batch.save({
|
|
656
|
+
sourceTable,
|
|
657
|
+
tag: 'delete',
|
|
658
|
+
before: {
|
|
659
|
+
id: 'test1'
|
|
660
|
+
}
|
|
661
|
+
});
|
|
662
|
+
});
|
|
663
|
+
|
|
664
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
665
|
+
const sourceTable = TEST_TABLE;
|
|
666
|
+
|
|
667
|
+
await batch.save({
|
|
668
|
+
sourceTable,
|
|
669
|
+
tag: 'update',
|
|
670
|
+
after: {
|
|
671
|
+
id: 'test1',
|
|
672
|
+
description: undefined
|
|
673
|
+
}
|
|
674
|
+
});
|
|
675
|
+
|
|
676
|
+
await batch.save({
|
|
677
|
+
sourceTable,
|
|
678
|
+
tag: 'update',
|
|
679
|
+
after: {
|
|
680
|
+
id: 'test1',
|
|
681
|
+
description: undefined
|
|
682
|
+
}
|
|
683
|
+
});
|
|
684
|
+
|
|
685
|
+
await batch.save({
|
|
686
|
+
sourceTable,
|
|
687
|
+
tag: 'delete',
|
|
688
|
+
before: {
|
|
689
|
+
id: 'test1'
|
|
690
|
+
}
|
|
691
|
+
});
|
|
692
|
+
});
|
|
693
|
+
|
|
694
|
+
const checkpoint = result!.flushed_op;
|
|
695
|
+
|
|
696
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
|
|
697
|
+
|
|
698
|
+
const data = batch[0].data.map((d) => {
|
|
699
|
+
return {
|
|
700
|
+
op: d.op,
|
|
701
|
+
object_id: d.object_id,
|
|
702
|
+
checksum: d.checksum
|
|
703
|
+
};
|
|
704
|
+
});
|
|
705
|
+
|
|
706
|
+
const c1 = 2871785649;
|
|
707
|
+
const c2 = 2872534815;
|
|
708
|
+
|
|
709
|
+
expect(data).toEqual([
|
|
710
|
+
{ op: 'PUT', object_id: 'test1', checksum: c1 },
|
|
711
|
+
{ op: 'PUT', object_id: 'test1', checksum: c1 },
|
|
712
|
+
{ op: 'PUT', object_id: 'test1', checksum: c1 },
|
|
713
|
+
{ op: 'REMOVE', object_id: 'test1', checksum: c2 }
|
|
714
|
+
]);
|
|
715
|
+
|
|
716
|
+
const checksums = await storage.getChecksums(checkpoint, ['global[]']);
|
|
717
|
+
expect(checksums).toEqual([
|
|
718
|
+
{
|
|
719
|
+
bucket: 'global[]',
|
|
720
|
+
checksum: (c1 + c1 + c1 + c2) & 0xffffffff,
|
|
721
|
+
count: 4
|
|
722
|
+
}
|
|
723
|
+
]);
|
|
724
|
+
});
|
|
725
|
+
|
|
726
|
+
test('truncate parameters', async () => {
|
|
727
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
728
|
+
bucket_definitions:
|
|
729
|
+
mybucket:
|
|
730
|
+
parameters:
|
|
731
|
+
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
732
|
+
data: []
|
|
733
|
+
`);
|
|
734
|
+
|
|
735
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
736
|
+
|
|
737
|
+
await storage.startBatch({}, async (batch) => {
|
|
738
|
+
await batch.save({
|
|
739
|
+
sourceTable: TEST_TABLE,
|
|
740
|
+
tag: 'insert',
|
|
741
|
+
after: {
|
|
742
|
+
id: 't2',
|
|
743
|
+
id1: 'user3',
|
|
744
|
+
id2: 'user4',
|
|
745
|
+
group_id: 'group2a'
|
|
746
|
+
}
|
|
747
|
+
});
|
|
748
|
+
|
|
749
|
+
await batch.truncate([TEST_TABLE]);
|
|
750
|
+
});
|
|
751
|
+
|
|
752
|
+
const { checkpoint } = await storage.getCheckpoint();
|
|
753
|
+
|
|
754
|
+
const parameters = await storage.getParameterSets(checkpoint, [['mybucket', '1', 'user1']]);
|
|
755
|
+
expect(parameters).toEqual([]);
|
|
756
|
+
});
|
|
757
|
+
|
|
758
|
+
test('batch with overlapping replica ids', async () => {
|
|
759
|
+
// This test checks that we get the correct output when processing rows with:
|
|
760
|
+
// 1. changing replica ids
|
|
761
|
+
// 2. overlapping with replica ids of other rows in the same transaction (at different times)
|
|
762
|
+
// If operations are not processing in input order, this breaks easily.
|
|
763
|
+
// It can break at two places:
|
|
764
|
+
// 1. Not getting the correct "current_data" state for each operation.
|
|
765
|
+
// 2. Output order not being correct.
|
|
766
|
+
|
|
767
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
768
|
+
bucket_definitions:
|
|
769
|
+
global:
|
|
770
|
+
data:
|
|
771
|
+
- SELECT id, description FROM "test"
|
|
772
|
+
`);
|
|
773
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
774
|
+
|
|
775
|
+
// Pre-setup
|
|
776
|
+
const result1 = await storage.startBatch({}, async (batch) => {
|
|
777
|
+
const sourceTable = TEST_TABLE;
|
|
778
|
+
|
|
779
|
+
await batch.save({
|
|
780
|
+
sourceTable,
|
|
781
|
+
tag: 'insert',
|
|
782
|
+
after: {
|
|
783
|
+
id: 'test1',
|
|
784
|
+
description: 'test1a'
|
|
785
|
+
}
|
|
786
|
+
});
|
|
787
|
+
|
|
788
|
+
await batch.save({
|
|
789
|
+
sourceTable,
|
|
790
|
+
tag: 'insert',
|
|
791
|
+
after: {
|
|
792
|
+
id: 'test2',
|
|
793
|
+
description: 'test2a'
|
|
794
|
+
}
|
|
795
|
+
});
|
|
796
|
+
});
|
|
797
|
+
|
|
798
|
+
const checkpoint1 = result1?.flushed_op ?? '0';
|
|
799
|
+
|
|
800
|
+
// Test batch
|
|
801
|
+
const result2 = await storage.startBatch({}, async (batch) => {
|
|
802
|
+
const sourceTable = TEST_TABLE;
|
|
803
|
+
// b
|
|
804
|
+
await batch.save({
|
|
805
|
+
sourceTable,
|
|
806
|
+
tag: 'insert',
|
|
807
|
+
after: {
|
|
808
|
+
id: 'test1',
|
|
809
|
+
description: 'test1b'
|
|
810
|
+
}
|
|
811
|
+
});
|
|
812
|
+
|
|
813
|
+
await batch.save({
|
|
814
|
+
sourceTable,
|
|
815
|
+
tag: 'update',
|
|
816
|
+
before: {
|
|
817
|
+
id: 'test1'
|
|
818
|
+
},
|
|
819
|
+
after: {
|
|
820
|
+
id: 'test2',
|
|
821
|
+
description: 'test2b'
|
|
822
|
+
}
|
|
823
|
+
});
|
|
824
|
+
|
|
825
|
+
await batch.save({
|
|
826
|
+
sourceTable,
|
|
827
|
+
tag: 'update',
|
|
828
|
+
before: {
|
|
829
|
+
id: 'test2'
|
|
830
|
+
},
|
|
831
|
+
after: {
|
|
832
|
+
id: 'test3',
|
|
833
|
+
description: 'test3b'
|
|
834
|
+
}
|
|
835
|
+
});
|
|
836
|
+
|
|
837
|
+
// c
|
|
838
|
+
await batch.save({
|
|
839
|
+
sourceTable,
|
|
840
|
+
tag: 'update',
|
|
841
|
+
after: {
|
|
842
|
+
id: 'test2',
|
|
843
|
+
description: 'test2c'
|
|
844
|
+
}
|
|
845
|
+
});
|
|
846
|
+
|
|
847
|
+
// d
|
|
848
|
+
await batch.save({
|
|
849
|
+
sourceTable,
|
|
850
|
+
tag: 'insert',
|
|
851
|
+
after: {
|
|
852
|
+
id: 'test4',
|
|
853
|
+
description: 'test4d'
|
|
854
|
+
}
|
|
855
|
+
});
|
|
856
|
+
|
|
857
|
+
await batch.save({
|
|
858
|
+
sourceTable,
|
|
859
|
+
tag: 'update',
|
|
860
|
+
before: {
|
|
861
|
+
id: 'test4'
|
|
862
|
+
},
|
|
863
|
+
after: {
|
|
864
|
+
id: 'test5',
|
|
865
|
+
description: 'test5d'
|
|
866
|
+
}
|
|
867
|
+
});
|
|
868
|
+
});
|
|
869
|
+
|
|
870
|
+
const checkpoint2 = result2!.flushed_op;
|
|
871
|
+
|
|
872
|
+
const batch = await fromAsync(storage.getBucketDataBatch(checkpoint2, new Map([['global[]', checkpoint1]])));
|
|
873
|
+
const data = batch[0].data.map((d) => {
|
|
874
|
+
return {
|
|
875
|
+
op: d.op,
|
|
876
|
+
object_id: d.object_id,
|
|
877
|
+
data: d.data
|
|
878
|
+
};
|
|
879
|
+
});
|
|
880
|
+
|
|
881
|
+
// Operations must be in this order
|
|
882
|
+
expect(data).toEqual([
|
|
883
|
+
// b
|
|
884
|
+
{ op: 'PUT', object_id: 'test1', data: JSON.stringify({ id: 'test1', description: 'test1b' }) },
|
|
885
|
+
{ op: 'REMOVE', object_id: 'test1', data: null },
|
|
886
|
+
{ op: 'PUT', object_id: 'test2', data: JSON.stringify({ id: 'test2', description: 'test2b' }) },
|
|
887
|
+
{ op: 'REMOVE', object_id: 'test2', data: null },
|
|
888
|
+
{ op: 'PUT', object_id: 'test3', data: JSON.stringify({ id: 'test3', description: 'test3b' }) },
|
|
889
|
+
|
|
890
|
+
// c
|
|
891
|
+
{ op: 'PUT', object_id: 'test2', data: JSON.stringify({ id: 'test2', description: 'test2c' }) },
|
|
892
|
+
|
|
893
|
+
// d
|
|
894
|
+
{ op: 'PUT', object_id: 'test4', data: JSON.stringify({ id: 'test4', description: 'test4d' }) },
|
|
895
|
+
{ op: 'REMOVE', object_id: 'test4', data: null },
|
|
896
|
+
{ op: 'PUT', object_id: 'test5', data: JSON.stringify({ id: 'test5', description: 'test5d' }) }
|
|
897
|
+
]);
|
|
898
|
+
});
|
|
899
|
+
|
|
900
|
+
test('large batch', async () => {
|
|
901
|
+
// Test syncing a batch of data that is small in count,
|
|
902
|
+
// but large enough in size to be split over multiple returned batches.
|
|
903
|
+
// The specific batch splits is an implementation detail of the storage driver,
|
|
904
|
+
// and the test will have to updated when other implementations are added.
|
|
905
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
906
|
+
bucket_definitions:
|
|
907
|
+
global:
|
|
908
|
+
data:
|
|
909
|
+
- SELECT id, description FROM "%"
|
|
910
|
+
`);
|
|
911
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
912
|
+
|
|
913
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
914
|
+
const sourceTable = TEST_TABLE;
|
|
915
|
+
|
|
916
|
+
const largeDescription = '0123456789'.repeat(12_000_00);
|
|
917
|
+
|
|
918
|
+
await batch.save({
|
|
919
|
+
sourceTable,
|
|
920
|
+
tag: 'insert',
|
|
921
|
+
after: {
|
|
922
|
+
id: 'test1',
|
|
923
|
+
description: 'test1'
|
|
924
|
+
}
|
|
925
|
+
});
|
|
926
|
+
|
|
927
|
+
await batch.save({
|
|
928
|
+
sourceTable,
|
|
929
|
+
tag: 'insert',
|
|
930
|
+
after: {
|
|
931
|
+
id: 'large1',
|
|
932
|
+
description: largeDescription
|
|
933
|
+
}
|
|
934
|
+
});
|
|
935
|
+
|
|
936
|
+
// Large enough to split the returned batch
|
|
937
|
+
await batch.save({
|
|
938
|
+
sourceTable,
|
|
939
|
+
tag: 'insert',
|
|
940
|
+
after: {
|
|
941
|
+
id: 'large2',
|
|
942
|
+
description: largeDescription
|
|
943
|
+
}
|
|
944
|
+
});
|
|
945
|
+
|
|
946
|
+
await batch.save({
|
|
947
|
+
sourceTable,
|
|
948
|
+
tag: 'insert',
|
|
949
|
+
after: {
|
|
950
|
+
id: 'test3',
|
|
951
|
+
description: 'test3'
|
|
952
|
+
}
|
|
953
|
+
});
|
|
954
|
+
});
|
|
955
|
+
|
|
956
|
+
const checkpoint = result!.flushed_op;
|
|
957
|
+
|
|
958
|
+
const options: BucketDataBatchOptions = {
|
|
959
|
+
chunkLimitBytes: 16 * 1024 * 1024
|
|
960
|
+
};
|
|
961
|
+
|
|
962
|
+
const batch1 = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']]), options));
|
|
963
|
+
expect(getBatchData(batch1)).toEqual([
|
|
964
|
+
{ op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
|
|
965
|
+
{ op_id: '2', op: 'PUT', object_id: 'large1', checksum: 454746904 }
|
|
966
|
+
]);
|
|
967
|
+
expect(getBatchMeta(batch1)).toEqual({
|
|
968
|
+
after: '0',
|
|
969
|
+
has_more: true,
|
|
970
|
+
next_after: '2'
|
|
971
|
+
});
|
|
972
|
+
|
|
973
|
+
const batch2 = await fromAsync(
|
|
974
|
+
storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].next_after]]), options)
|
|
975
|
+
);
|
|
976
|
+
expect(getBatchData(batch2)).toEqual([
|
|
977
|
+
{ op_id: '3', op: 'PUT', object_id: 'large2', checksum: 1795508474 },
|
|
978
|
+
{ op_id: '4', op: 'PUT', object_id: 'test3', checksum: 1359888332 }
|
|
979
|
+
]);
|
|
980
|
+
expect(getBatchMeta(batch2)).toEqual({
|
|
981
|
+
after: '2',
|
|
982
|
+
has_more: false,
|
|
983
|
+
next_after: '4'
|
|
984
|
+
});
|
|
985
|
+
|
|
986
|
+
const batch3 = await fromAsync(
|
|
987
|
+
storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].next_after]]), options)
|
|
988
|
+
);
|
|
989
|
+
expect(getBatchData(batch3)).toEqual([]);
|
|
990
|
+
expect(getBatchMeta(batch3)).toEqual(null);
|
|
991
|
+
});
|
|
992
|
+
|
|
993
|
+
test('large batch (2)', async () => {
|
|
994
|
+
// Test syncing a batch of data that is small in count,
|
|
995
|
+
// but large enough in size to be split over multiple returned chunks.
|
|
996
|
+
// Similar to the above test, but splits over 1MB chunks.
|
|
997
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
998
|
+
bucket_definitions:
|
|
999
|
+
global:
|
|
1000
|
+
data:
|
|
1001
|
+
- SELECT id, description FROM "%"
|
|
1002
|
+
`);
|
|
1003
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
1004
|
+
|
|
1005
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
1006
|
+
const sourceTable = TEST_TABLE;
|
|
1007
|
+
|
|
1008
|
+
const largeDescription = '0123456789'.repeat(2_000_00);
|
|
1009
|
+
|
|
1010
|
+
await batch.save({
|
|
1011
|
+
sourceTable,
|
|
1012
|
+
tag: 'insert',
|
|
1013
|
+
after: {
|
|
1014
|
+
id: 'test1',
|
|
1015
|
+
description: 'test1'
|
|
1016
|
+
}
|
|
1017
|
+
});
|
|
1018
|
+
|
|
1019
|
+
await batch.save({
|
|
1020
|
+
sourceTable,
|
|
1021
|
+
tag: 'insert',
|
|
1022
|
+
after: {
|
|
1023
|
+
id: 'large1',
|
|
1024
|
+
description: largeDescription
|
|
1025
|
+
}
|
|
1026
|
+
});
|
|
1027
|
+
|
|
1028
|
+
// Large enough to split the returned batch
|
|
1029
|
+
await batch.save({
|
|
1030
|
+
sourceTable,
|
|
1031
|
+
tag: 'insert',
|
|
1032
|
+
after: {
|
|
1033
|
+
id: 'large2',
|
|
1034
|
+
description: largeDescription
|
|
1035
|
+
}
|
|
1036
|
+
});
|
|
1037
|
+
|
|
1038
|
+
await batch.save({
|
|
1039
|
+
sourceTable,
|
|
1040
|
+
tag: 'insert',
|
|
1041
|
+
after: {
|
|
1042
|
+
id: 'test3',
|
|
1043
|
+
description: 'test3'
|
|
1044
|
+
}
|
|
1045
|
+
});
|
|
1046
|
+
});
|
|
1047
|
+
|
|
1048
|
+
const checkpoint = result!.flushed_op;
|
|
1049
|
+
|
|
1050
|
+
const options: BucketDataBatchOptions = {};
|
|
1051
|
+
|
|
1052
|
+
const batch1 = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']]), options));
|
|
1053
|
+
expect(getBatchData(batch1)).toEqual([
|
|
1054
|
+
{ op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
|
|
1055
|
+
{ op_id: '2', op: 'PUT', object_id: 'large1', checksum: 1178768505 }
|
|
1056
|
+
]);
|
|
1057
|
+
expect(getBatchMeta(batch1)).toEqual({
|
|
1058
|
+
after: '0',
|
|
1059
|
+
has_more: true,
|
|
1060
|
+
next_after: '2'
|
|
1061
|
+
});
|
|
1062
|
+
|
|
1063
|
+
const batch2 = await fromAsync(
|
|
1064
|
+
storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].next_after]]), options)
|
|
1065
|
+
);
|
|
1066
|
+
expect(getBatchData(batch2)).toEqual([{ op_id: '3', op: 'PUT', object_id: 'large2', checksum: 1607205872 }]);
|
|
1067
|
+
expect(getBatchMeta(batch2)).toEqual({
|
|
1068
|
+
after: '2',
|
|
1069
|
+
has_more: true,
|
|
1070
|
+
next_after: '3'
|
|
1071
|
+
});
|
|
1072
|
+
|
|
1073
|
+
const batch3 = await fromAsync(
|
|
1074
|
+
storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].next_after]]), options)
|
|
1075
|
+
);
|
|
1076
|
+
expect(getBatchData(batch3)).toEqual([{ op_id: '4', op: 'PUT', object_id: 'test3', checksum: 1359888332 }]);
|
|
1077
|
+
expect(getBatchMeta(batch3)).toEqual({
|
|
1078
|
+
after: '3',
|
|
1079
|
+
has_more: false,
|
|
1080
|
+
next_after: '4'
|
|
1081
|
+
});
|
|
1082
|
+
});
|
|
1083
|
+
|
|
1084
|
+
test('long batch', async () => {
|
|
1085
|
+
// Test syncing a batch of data that is limited by count.
|
|
1086
|
+
const sync_rules = SqlSyncRules.fromYaml(`
|
|
1087
|
+
bucket_definitions:
|
|
1088
|
+
global:
|
|
1089
|
+
data:
|
|
1090
|
+
- SELECT id, description FROM "%"
|
|
1091
|
+
`);
|
|
1092
|
+
const storage = (await factory()).getInstance({ id: 1, sync_rules, slot_name: 'test' });
|
|
1093
|
+
|
|
1094
|
+
const result = await storage.startBatch({}, async (batch) => {
|
|
1095
|
+
const sourceTable = TEST_TABLE;
|
|
1096
|
+
|
|
1097
|
+
for (let i = 1; i <= 6; i++) {
|
|
1098
|
+
await batch.save({
|
|
1099
|
+
sourceTable,
|
|
1100
|
+
tag: 'insert',
|
|
1101
|
+
after: {
|
|
1102
|
+
id: `test${i}`,
|
|
1103
|
+
description: `test${i}`
|
|
1104
|
+
}
|
|
1105
|
+
});
|
|
1106
|
+
}
|
|
1107
|
+
});
|
|
1108
|
+
|
|
1109
|
+
const checkpoint = result!.flushed_op;
|
|
1110
|
+
|
|
1111
|
+
const batch1 = await fromAsync(storage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']]), { limit: 4 }));
|
|
1112
|
+
|
|
1113
|
+
expect(getBatchData(batch1)).toEqual([
|
|
1114
|
+
{ op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
|
|
1115
|
+
{ op_id: '2', op: 'PUT', object_id: 'test2', checksum: 730027011 },
|
|
1116
|
+
{ op_id: '3', op: 'PUT', object_id: 'test3', checksum: 1359888332 },
|
|
1117
|
+
{ op_id: '4', op: 'PUT', object_id: 'test4', checksum: 2049153252 }
|
|
1118
|
+
]);
|
|
1119
|
+
|
|
1120
|
+
expect(getBatchMeta(batch1)).toEqual({
|
|
1121
|
+
after: '0',
|
|
1122
|
+
has_more: true,
|
|
1123
|
+
next_after: '4'
|
|
1124
|
+
});
|
|
1125
|
+
|
|
1126
|
+
const batch2 = await fromAsync(
|
|
1127
|
+
storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].next_after]]), {
|
|
1128
|
+
limit: 4
|
|
1129
|
+
})
|
|
1130
|
+
);
|
|
1131
|
+
expect(getBatchData(batch2)).toEqual([
|
|
1132
|
+
{ op_id: '5', op: 'PUT', object_id: 'test5', checksum: 3686902721 },
|
|
1133
|
+
{ op_id: '6', op: 'PUT', object_id: 'test6', checksum: 1974820016 }
|
|
1134
|
+
]);
|
|
1135
|
+
|
|
1136
|
+
expect(getBatchMeta(batch2)).toEqual({
|
|
1137
|
+
after: '4',
|
|
1138
|
+
has_more: false,
|
|
1139
|
+
next_after: '6'
|
|
1140
|
+
});
|
|
1141
|
+
|
|
1142
|
+
const batch3 = await fromAsync(
|
|
1143
|
+
storage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].next_after]]), {
|
|
1144
|
+
limit: 4
|
|
1145
|
+
})
|
|
1146
|
+
);
|
|
1147
|
+
expect(getBatchData(batch3)).toEqual([]);
|
|
1148
|
+
|
|
1149
|
+
expect(getBatchMeta(batch3)).toEqual(null);
|
|
1150
|
+
});
|
|
1151
|
+
}
|
|
1152
|
+
|
|
1153
|
+
function getBatchData(batch: SyncBucketData[]) {
|
|
1154
|
+
if (batch.length == 0) {
|
|
1155
|
+
return [];
|
|
1156
|
+
}
|
|
1157
|
+
return batch[0].data.map((d) => {
|
|
1158
|
+
return {
|
|
1159
|
+
op_id: d.op_id,
|
|
1160
|
+
op: d.op,
|
|
1161
|
+
object_id: d.object_id,
|
|
1162
|
+
checksum: d.checksum
|
|
1163
|
+
};
|
|
1164
|
+
});
|
|
1165
|
+
}
|
|
1166
|
+
|
|
1167
|
+
function getBatchMeta(batch: SyncBucketData[]) {
|
|
1168
|
+
if (batch.length == 0) {
|
|
1169
|
+
return null;
|
|
1170
|
+
}
|
|
1171
|
+
return {
|
|
1172
|
+
has_more: batch[0].has_more,
|
|
1173
|
+
after: batch[0].after,
|
|
1174
|
+
next_after: batch[0].next_after
|
|
1175
|
+
};
|
|
1176
|
+
}
|