@powersync/service-core 0.8.8 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/dist/api/RouteAPI.d.ts +67 -0
- package/dist/api/RouteAPI.js +2 -0
- package/dist/api/RouteAPI.js.map +1 -0
- package/dist/api/api-index.d.ts +1 -0
- package/dist/api/api-index.js +1 -0
- package/dist/api/api-index.js.map +1 -1
- package/dist/api/diagnostics.d.ts +4 -4
- package/dist/api/diagnostics.js +170 -158
- package/dist/api/diagnostics.js.map +1 -1
- package/dist/api/schema.d.ts +3 -5
- package/dist/api/schema.js +14 -80
- package/dist/api/schema.js.map +1 -1
- package/dist/auth/CachedKeyCollector.js.map +1 -1
- package/dist/auth/KeySpec.js.map +1 -1
- package/dist/auth/KeyStore.d.ts +7 -4
- package/dist/auth/KeyStore.js +1 -1
- package/dist/auth/KeyStore.js.map +1 -1
- package/dist/auth/LeakyBucket.js.map +1 -1
- package/dist/auth/RemoteJWKSCollector.d.ts +0 -2
- package/dist/auth/RemoteJWKSCollector.js.map +1 -1
- package/dist/auth/auth-index.d.ts +0 -1
- package/dist/auth/auth-index.js +0 -1
- package/dist/auth/auth-index.js.map +1 -1
- package/dist/db/mongo.js +5 -3
- package/dist/db/mongo.js.map +1 -1
- package/dist/entry/cli-entry.js +3 -2
- package/dist/entry/cli-entry.js.map +1 -1
- package/dist/entry/commands/compact-action.js +90 -14
- package/dist/entry/commands/compact-action.js.map +1 -1
- package/dist/entry/commands/migrate-action.js +4 -5
- package/dist/entry/commands/migrate-action.js.map +1 -1
- package/dist/entry/commands/teardown-action.js +2 -2
- package/dist/entry/commands/teardown-action.js.map +1 -1
- package/dist/index.d.ts +4 -2
- package/dist/index.js +4 -2
- package/dist/index.js.map +1 -1
- package/dist/locks/MongoLocks.js.map +1 -1
- package/dist/metrics/Metrics.d.ts +2 -2
- package/dist/metrics/Metrics.js +5 -13
- package/dist/metrics/Metrics.js.map +1 -1
- package/dist/migrations/db/migrations/1684951997326-init.d.ts +2 -2
- package/dist/migrations/db/migrations/1684951997326-init.js +4 -2
- package/dist/migrations/db/migrations/1684951997326-init.js.map +1 -1
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.d.ts +2 -2
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +4 -2
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -1
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.d.ts +2 -2
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js +4 -2
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +1 -1
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.d.ts +3 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js +31 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js.map +1 -0
- package/dist/migrations/executor.js.map +1 -1
- package/dist/migrations/migrations.d.ts +8 -0
- package/dist/migrations/migrations.js +19 -7
- package/dist/migrations/migrations.js.map +1 -1
- package/dist/migrations/store/migration-store.js.map +1 -1
- package/dist/modules/AbstractModule.d.ts +26 -0
- package/dist/modules/AbstractModule.js +11 -0
- package/dist/modules/AbstractModule.js.map +1 -0
- package/dist/modules/ModuleManager.d.ts +11 -0
- package/dist/modules/ModuleManager.js +32 -0
- package/dist/modules/ModuleManager.js.map +1 -0
- package/dist/modules/modules-index.d.ts +2 -0
- package/dist/modules/modules-index.js +3 -0
- package/dist/modules/modules-index.js.map +1 -0
- package/dist/replication/AbstractReplicationJob.d.ts +37 -0
- package/dist/replication/AbstractReplicationJob.js +51 -0
- package/dist/replication/AbstractReplicationJob.js.map +1 -0
- package/dist/replication/AbstractReplicator.d.ts +53 -0
- package/dist/replication/AbstractReplicator.js +250 -0
- package/dist/replication/AbstractReplicator.js.map +1 -0
- package/dist/replication/ErrorRateLimiter.d.ts +0 -10
- package/dist/replication/ErrorRateLimiter.js +1 -42
- package/dist/replication/ErrorRateLimiter.js.map +1 -1
- package/dist/replication/ReplicationEngine.d.ts +18 -0
- package/dist/replication/ReplicationEngine.js +41 -0
- package/dist/replication/ReplicationEngine.js.map +1 -0
- package/dist/replication/ReplicationModule.d.ts +51 -0
- package/dist/replication/ReplicationModule.js +68 -0
- package/dist/replication/ReplicationModule.js.map +1 -0
- package/dist/replication/replication-index.d.ts +4 -6
- package/dist/replication/replication-index.js +4 -6
- package/dist/replication/replication-index.js.map +1 -1
- package/dist/routes/RouterEngine.d.ts +42 -0
- package/dist/routes/RouterEngine.js +80 -0
- package/dist/routes/RouterEngine.js.map +1 -0
- package/dist/routes/auth.d.ts +2 -2
- package/dist/routes/auth.js +11 -11
- package/dist/routes/auth.js.map +1 -1
- package/dist/routes/configure-fastify.d.ts +37 -23
- package/dist/routes/configure-fastify.js +18 -18
- package/dist/routes/configure-fastify.js.map +1 -1
- package/dist/routes/configure-rsocket.d.ts +3 -4
- package/dist/routes/configure-rsocket.js +7 -4
- package/dist/routes/configure-rsocket.js.map +1 -1
- package/dist/routes/endpoints/admin.d.ts +30 -0
- package/dist/routes/endpoints/admin.js +46 -67
- package/dist/routes/endpoints/admin.js.map +1 -1
- package/dist/routes/endpoints/checkpointing.js +103 -15
- package/dist/routes/endpoints/checkpointing.js.map +1 -1
- package/dist/routes/endpoints/socket-route.js +8 -6
- package/dist/routes/endpoints/socket-route.js.map +1 -1
- package/dist/routes/endpoints/sync-rules.d.ts +1 -1
- package/dist/routes/endpoints/sync-rules.js +32 -23
- package/dist/routes/endpoints/sync-rules.js.map +1 -1
- package/dist/routes/endpoints/sync-stream.d.ts +0 -1
- package/dist/routes/endpoints/sync-stream.js +8 -8
- package/dist/routes/endpoints/sync-stream.js.map +1 -1
- package/dist/routes/hooks.js.map +1 -1
- package/dist/routes/route-register.js.map +1 -1
- package/dist/routes/router.d.ts +9 -2
- package/dist/routes/router.js.map +1 -1
- package/dist/routes/routes-index.d.ts +1 -0
- package/dist/routes/routes-index.js +1 -0
- package/dist/routes/routes-index.js.map +1 -1
- package/dist/runner/teardown.js +109 -76
- package/dist/runner/teardown.js.map +1 -1
- package/dist/storage/BucketStorage.d.ts +86 -36
- package/dist/storage/BucketStorage.js +6 -10
- package/dist/storage/BucketStorage.js.map +1 -1
- package/dist/storage/ChecksumCache.js.map +1 -1
- package/dist/storage/MongoBucketStorage.d.ts +7 -11
- package/dist/storage/MongoBucketStorage.js +48 -41
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/ReplicationEventPayload.d.ts +14 -0
- package/dist/storage/ReplicationEventPayload.js +2 -0
- package/dist/storage/ReplicationEventPayload.js.map +1 -0
- package/dist/storage/SourceEntity.d.ts +20 -0
- package/dist/storage/SourceEntity.js +2 -0
- package/dist/storage/SourceEntity.js.map +1 -0
- package/dist/storage/SourceTable.d.ts +12 -5
- package/dist/storage/SourceTable.js +12 -5
- package/dist/storage/SourceTable.js.map +1 -1
- package/dist/storage/StorageEngine.d.ts +28 -0
- package/dist/storage/StorageEngine.js +45 -0
- package/dist/storage/StorageEngine.js.map +1 -0
- package/dist/storage/StorageProvider.d.ts +21 -0
- package/dist/storage/StorageProvider.js +2 -0
- package/dist/storage/StorageProvider.js.map +1 -0
- package/dist/storage/WriteCheckpointAPI.d.ts +74 -0
- package/dist/storage/WriteCheckpointAPI.js +16 -0
- package/dist/storage/WriteCheckpointAPI.js.map +1 -0
- package/dist/storage/mongo/MongoBucketBatch.d.ts +24 -5
- package/dist/storage/mongo/MongoBucketBatch.js +119 -62
- package/dist/storage/mongo/MongoBucketBatch.js.map +1 -1
- package/dist/storage/mongo/MongoCompactor.js +20 -3
- package/dist/storage/mongo/MongoCompactor.js.map +1 -1
- package/dist/storage/mongo/MongoIdSequence.js.map +1 -1
- package/dist/storage/mongo/MongoPersistedSyncRulesContent.d.ts +2 -2
- package/dist/storage/mongo/MongoPersistedSyncRulesContent.js +2 -2
- package/dist/storage/mongo/MongoPersistedSyncRulesContent.js.map +1 -1
- package/dist/storage/mongo/MongoStorageProvider.d.ts +5 -0
- package/dist/storage/mongo/MongoStorageProvider.js +26 -0
- package/dist/storage/mongo/MongoStorageProvider.js.map +1 -0
- package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +18 -10
- package/dist/storage/mongo/MongoSyncBucketStorage.js +140 -25
- package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/mongo/MongoSyncRulesLock.js +1 -1
- package/dist/storage/mongo/MongoSyncRulesLock.js.map +1 -1
- package/dist/storage/mongo/MongoWriteCheckpointAPI.d.ts +20 -0
- package/dist/storage/mongo/MongoWriteCheckpointAPI.js +103 -0
- package/dist/storage/mongo/MongoWriteCheckpointAPI.js.map +1 -0
- package/dist/storage/mongo/OperationBatch.d.ts +13 -4
- package/dist/storage/mongo/OperationBatch.js +25 -7
- package/dist/storage/mongo/OperationBatch.js.map +1 -1
- package/dist/storage/mongo/PersistedBatch.d.ts +3 -3
- package/dist/storage/mongo/PersistedBatch.js +2 -2
- package/dist/storage/mongo/PersistedBatch.js.map +1 -1
- package/dist/storage/mongo/config.d.ts +19 -0
- package/dist/storage/mongo/config.js +26 -0
- package/dist/storage/mongo/config.js.map +1 -0
- package/dist/storage/mongo/db.d.ts +3 -2
- package/dist/storage/mongo/db.js +1 -0
- package/dist/storage/mongo/db.js.map +1 -1
- package/dist/storage/mongo/models.d.ts +20 -5
- package/dist/storage/mongo/models.js.map +1 -1
- package/dist/storage/mongo/util.d.ts +12 -1
- package/dist/storage/mongo/util.js +50 -2
- package/dist/storage/mongo/util.js.map +1 -1
- package/dist/storage/storage-index.d.ts +8 -2
- package/dist/storage/storage-index.js +8 -2
- package/dist/storage/storage-index.js.map +1 -1
- package/dist/sync/BroadcastIterable.d.ts +0 -1
- package/dist/sync/BroadcastIterable.js.map +1 -1
- package/dist/sync/LastValueSink.d.ts +0 -1
- package/dist/sync/LastValueSink.js.map +1 -1
- package/dist/sync/merge.d.ts +0 -1
- package/dist/sync/merge.js.map +1 -1
- package/dist/sync/safeRace.js.map +1 -1
- package/dist/sync/sync.d.ts +1 -1
- package/dist/sync/sync.js +5 -5
- package/dist/sync/sync.js.map +1 -1
- package/dist/sync/util.d.ts +0 -2
- package/dist/sync/util.js.map +1 -1
- package/dist/system/ServiceContext.d.ts +37 -0
- package/dist/system/ServiceContext.js +48 -0
- package/dist/system/ServiceContext.js.map +1 -0
- package/dist/system/system-index.d.ts +1 -1
- package/dist/system/system-index.js +1 -1
- package/dist/system/system-index.js.map +1 -1
- package/dist/util/Mutex.js.map +1 -1
- package/dist/util/config/collectors/config-collector.js.map +1 -1
- package/dist/util/config/collectors/impl/base64-config-collector.js.map +1 -1
- package/dist/util/config/collectors/impl/filesystem-config-collector.js.map +1 -1
- package/dist/util/config/compound-config-collector.d.ts +9 -2
- package/dist/util/config/compound-config-collector.js +16 -24
- package/dist/util/config/compound-config-collector.js.map +1 -1
- package/dist/util/config/sync-rules/impl/base64-sync-rules-collector.js.map +1 -1
- package/dist/util/config/sync-rules/impl/filesystem-sync-rules-collector.js.map +1 -1
- package/dist/util/config/sync-rules/impl/inline-sync-rules-collector.js.map +1 -1
- package/dist/util/config/sync-rules/sync-rules-provider.d.ts +9 -0
- package/dist/util/config/sync-rules/sync-rules-provider.js +15 -0
- package/dist/util/config/sync-rules/sync-rules-provider.js.map +1 -0
- package/dist/util/config/types.d.ts +7 -4
- package/dist/util/config/types.js.map +1 -1
- package/dist/util/config.d.ts +3 -4
- package/dist/util/config.js +5 -20
- package/dist/util/config.js.map +1 -1
- package/dist/util/memory-tracking.js.map +1 -1
- package/dist/util/secs.js.map +1 -1
- package/dist/util/util-index.d.ts +3 -6
- package/dist/util/util-index.js +3 -6
- package/dist/util/util-index.js.map +1 -1
- package/dist/util/utils.d.ts +10 -7
- package/dist/util/utils.js +36 -25
- package/dist/util/utils.js.map +1 -1
- package/package.json +8 -12
- package/src/api/RouteAPI.ts +78 -0
- package/src/api/api-index.ts +1 -0
- package/src/api/diagnostics.ts +18 -70
- package/src/api/schema.ts +18 -90
- package/src/auth/KeyStore.ts +9 -6
- package/src/auth/RemoteJWKSCollector.ts +4 -1
- package/src/auth/auth-index.ts +0 -1
- package/src/db/mongo.ts +5 -3
- package/src/entry/cli-entry.ts +3 -2
- package/src/entry/commands/compact-action.ts +24 -12
- package/src/entry/commands/migrate-action.ts +5 -8
- package/src/entry/commands/teardown-action.ts +2 -2
- package/src/index.ts +5 -2
- package/src/metrics/Metrics.ts +6 -16
- package/src/migrations/db/migrations/1684951997326-init.ts +9 -4
- package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +7 -4
- package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +6 -4
- package/src/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.ts +37 -0
- package/src/migrations/migrations.ts +24 -8
- package/src/modules/AbstractModule.ts +37 -0
- package/src/modules/ModuleManager.ts +34 -0
- package/src/modules/modules-index.ts +2 -0
- package/src/replication/AbstractReplicationJob.ts +79 -0
- package/src/replication/AbstractReplicator.ts +228 -0
- package/src/replication/ErrorRateLimiter.ts +0 -44
- package/src/replication/ReplicationEngine.ts +43 -0
- package/src/replication/ReplicationModule.ts +122 -0
- package/src/replication/replication-index.ts +4 -6
- package/src/routes/RouterEngine.ts +120 -0
- package/src/routes/auth.ts +21 -12
- package/src/routes/configure-fastify.ts +26 -27
- package/src/routes/configure-rsocket.ts +13 -8
- package/src/routes/endpoints/admin.ts +72 -76
- package/src/routes/endpoints/checkpointing.ts +51 -11
- package/src/routes/endpoints/socket-route.ts +10 -6
- package/src/routes/endpoints/sync-rules.ts +41 -25
- package/src/routes/endpoints/sync-stream.ts +8 -8
- package/src/routes/router.ts +8 -3
- package/src/routes/routes-index.ts +1 -0
- package/src/runner/teardown.ts +50 -88
- package/src/storage/BucketStorage.ts +103 -41
- package/src/storage/MongoBucketStorage.ts +65 -53
- package/src/storage/ReplicationEventPayload.ts +16 -0
- package/src/storage/SourceEntity.ts +22 -0
- package/src/storage/SourceTable.ts +14 -7
- package/src/storage/StorageEngine.ts +62 -0
- package/src/storage/StorageProvider.ts +27 -0
- package/src/storage/WriteCheckpointAPI.ts +85 -0
- package/src/storage/mongo/MongoBucketBatch.ts +164 -84
- package/src/storage/mongo/MongoCompactor.ts +25 -4
- package/src/storage/mongo/MongoPersistedSyncRulesContent.ts +7 -4
- package/src/storage/mongo/MongoStorageProvider.ts +31 -0
- package/src/storage/mongo/MongoSyncBucketStorage.ts +118 -41
- package/src/storage/mongo/MongoSyncRulesLock.ts +7 -3
- package/src/storage/mongo/MongoWriteCheckpointAPI.ts +151 -0
- package/src/storage/mongo/OperationBatch.ts +28 -12
- package/src/storage/mongo/PersistedBatch.ts +10 -6
- package/src/storage/mongo/config.ts +40 -0
- package/src/storage/mongo/db.ts +4 -1
- package/src/storage/mongo/models.ts +21 -5
- package/src/storage/mongo/util.ts +48 -3
- package/src/storage/storage-index.ts +8 -2
- package/src/sync/sync.ts +7 -4
- package/src/sync/util.ts +0 -1
- package/src/system/ServiceContext.ts +68 -0
- package/src/system/system-index.ts +1 -1
- package/src/util/config/compound-config-collector.ts +31 -31
- package/src/util/config/sync-rules/sync-rules-provider.ts +18 -0
- package/src/util/config/types.ts +7 -5
- package/src/util/config.ts +6 -23
- package/src/util/util-index.ts +3 -6
- package/src/util/utils.ts +48 -41
- package/test/src/__snapshots__/sync.test.ts.snap +14 -14
- package/test/src/auth.test.ts +7 -7
- package/test/src/broadcast_iterable.test.ts +1 -1
- package/test/src/compacting.test.ts +50 -40
- package/test/src/data_storage.test.ts +382 -202
- package/test/src/env.ts +1 -3
- package/test/src/merge_iterable.test.ts +1 -6
- package/test/src/routes/probes.integration.test.ts +34 -30
- package/test/src/setup.ts +1 -1
- package/test/src/stream_utils.ts +42 -0
- package/test/src/sync.test.ts +115 -39
- package/test/src/util.ts +48 -51
- package/test/tsconfig.json +1 -1
- package/tsconfig.tsbuildinfo +1 -1
- package/vitest.config.ts +7 -1
- package/dist/auth/SupabaseKeyCollector.d.ts +0 -22
- package/dist/auth/SupabaseKeyCollector.js +0 -61
- package/dist/auth/SupabaseKeyCollector.js.map +0 -1
- package/dist/replication/PgRelation.d.ts +0 -16
- package/dist/replication/PgRelation.js +0 -26
- package/dist/replication/PgRelation.js.map +0 -1
- package/dist/replication/WalConnection.d.ts +0 -34
- package/dist/replication/WalConnection.js +0 -190
- package/dist/replication/WalConnection.js.map +0 -1
- package/dist/replication/WalStream.d.ts +0 -57
- package/dist/replication/WalStream.js +0 -519
- package/dist/replication/WalStream.js.map +0 -1
- package/dist/replication/WalStreamManager.d.ts +0 -30
- package/dist/replication/WalStreamManager.js +0 -198
- package/dist/replication/WalStreamManager.js.map +0 -1
- package/dist/replication/WalStreamRunner.d.ts +0 -38
- package/dist/replication/WalStreamRunner.js +0 -155
- package/dist/replication/WalStreamRunner.js.map +0 -1
- package/dist/replication/util.d.ts +0 -9
- package/dist/replication/util.js +0 -62
- package/dist/replication/util.js.map +0 -1
- package/dist/system/CorePowerSyncSystem.d.ts +0 -23
- package/dist/system/CorePowerSyncSystem.js +0 -52
- package/dist/system/CorePowerSyncSystem.js.map +0 -1
- package/dist/util/PgManager.d.ts +0 -24
- package/dist/util/PgManager.js +0 -55
- package/dist/util/PgManager.js.map +0 -1
- package/dist/util/migration_lib.d.ts +0 -11
- package/dist/util/migration_lib.js +0 -64
- package/dist/util/migration_lib.js.map +0 -1
- package/dist/util/pgwire_utils.d.ts +0 -24
- package/dist/util/pgwire_utils.js +0 -117
- package/dist/util/pgwire_utils.js.map +0 -1
- package/dist/util/populate_test_data.d.ts +0 -8
- package/dist/util/populate_test_data.js +0 -65
- package/dist/util/populate_test_data.js.map +0 -1
- package/src/auth/SupabaseKeyCollector.ts +0 -67
- package/src/replication/PgRelation.ts +0 -42
- package/src/replication/WalConnection.ts +0 -227
- package/src/replication/WalStream.ts +0 -631
- package/src/replication/WalStreamManager.ts +0 -213
- package/src/replication/WalStreamRunner.ts +0 -180
- package/src/replication/util.ts +0 -76
- package/src/system/CorePowerSyncSystem.ts +0 -64
- package/src/util/PgManager.ts +0 -64
- package/src/util/migration_lib.ts +0 -79
- package/src/util/pgwire_utils.ts +0 -139
- package/src/util/populate_test_data.ts +0 -78
- package/test/src/__snapshots__/pg_test.test.ts.snap +0 -256
- package/test/src/large_batch.test.ts +0 -194
- package/test/src/pg_test.test.ts +0 -450
- package/test/src/schema_changes.test.ts +0 -545
- package/test/src/slow_tests.test.ts +0 -338
- package/test/src/validation.test.ts +0 -63
- package/test/src/wal_stream.test.ts +0 -319
- package/test/src/wal_stream_utils.ts +0 -156
|
@@ -1,8 +1,19 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { BucketDataBatchOptions, SaveOperationTag } from '@/storage/BucketStorage.js';
|
|
2
|
+
import { getUuidReplicaIdentityBson } from '@/util/util-index.js';
|
|
3
|
+
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
2
4
|
import { describe, expect, test } from 'vitest';
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
5
|
-
|
|
5
|
+
import { fromAsync, oneFromAsync } from './stream_utils.js';
|
|
6
|
+
import {
|
|
7
|
+
BATCH_OPTIONS,
|
|
8
|
+
getBatchData,
|
|
9
|
+
getBatchMeta,
|
|
10
|
+
makeTestTable,
|
|
11
|
+
MONGO_STORAGE_FACTORY,
|
|
12
|
+
PARSE_OPTIONS,
|
|
13
|
+
rid,
|
|
14
|
+
StorageFactory,
|
|
15
|
+
testRules
|
|
16
|
+
} from './util.js';
|
|
6
17
|
|
|
7
18
|
const TEST_TABLE = makeTestTable('test', ['id']);
|
|
8
19
|
|
|
@@ -12,7 +23,7 @@ describe('store - mongodb', function () {
|
|
|
12
23
|
|
|
13
24
|
function defineDataStorageTests(factory: StorageFactory) {
|
|
14
25
|
test('save and load parameters', async () => {
|
|
15
|
-
const sync_rules =
|
|
26
|
+
const sync_rules = testRules(`
|
|
16
27
|
bucket_definitions:
|
|
17
28
|
mybucket:
|
|
18
29
|
parameters:
|
|
@@ -20,29 +31,31 @@ bucket_definitions:
|
|
|
20
31
|
data: []
|
|
21
32
|
`);
|
|
22
33
|
|
|
23
|
-
const storage = (await factory()).getInstance(
|
|
34
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
24
35
|
|
|
25
|
-
const result = await storage.startBatch(
|
|
36
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
26
37
|
await batch.save({
|
|
27
38
|
sourceTable: TEST_TABLE,
|
|
28
|
-
tag:
|
|
39
|
+
tag: SaveOperationTag.INSERT,
|
|
29
40
|
after: {
|
|
30
41
|
id: 't2',
|
|
31
42
|
id1: 'user3',
|
|
32
43
|
id2: 'user4',
|
|
33
44
|
group_id: 'group2a'
|
|
34
|
-
}
|
|
45
|
+
},
|
|
46
|
+
afterReplicaId: rid('t2')
|
|
35
47
|
});
|
|
36
48
|
|
|
37
49
|
await batch.save({
|
|
38
50
|
sourceTable: TEST_TABLE,
|
|
39
|
-
tag:
|
|
51
|
+
tag: SaveOperationTag.INSERT,
|
|
40
52
|
after: {
|
|
41
53
|
id: 't1',
|
|
42
54
|
id1: 'user1',
|
|
43
55
|
id2: 'user2',
|
|
44
56
|
group_id: 'group1a'
|
|
45
|
-
}
|
|
57
|
+
},
|
|
58
|
+
afterReplicaId: rid('t1')
|
|
46
59
|
});
|
|
47
60
|
});
|
|
48
61
|
|
|
@@ -55,34 +68,38 @@ bucket_definitions:
|
|
|
55
68
|
});
|
|
56
69
|
|
|
57
70
|
test('it should use the latest version', async () => {
|
|
58
|
-
const sync_rules =
|
|
71
|
+
const sync_rules = testRules(
|
|
72
|
+
`
|
|
59
73
|
bucket_definitions:
|
|
60
74
|
mybucket:
|
|
61
75
|
parameters:
|
|
62
76
|
- SELECT group_id FROM test WHERE id = token_parameters.user_id
|
|
63
77
|
data: []
|
|
64
|
-
`
|
|
78
|
+
`
|
|
79
|
+
);
|
|
65
80
|
|
|
66
|
-
const storage = (await factory()).getInstance(
|
|
81
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
67
82
|
|
|
68
|
-
const result1 = await storage.startBatch(
|
|
83
|
+
const result1 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
69
84
|
await batch.save({
|
|
70
85
|
sourceTable: TEST_TABLE,
|
|
71
|
-
tag:
|
|
86
|
+
tag: SaveOperationTag.INSERT,
|
|
72
87
|
after: {
|
|
73
88
|
id: 'user1',
|
|
74
89
|
group_id: 'group1'
|
|
75
|
-
}
|
|
90
|
+
},
|
|
91
|
+
afterReplicaId: rid('user1')
|
|
76
92
|
});
|
|
77
93
|
});
|
|
78
|
-
const result2 = await storage.startBatch(
|
|
94
|
+
const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
79
95
|
await batch.save({
|
|
80
96
|
sourceTable: TEST_TABLE,
|
|
81
|
-
tag:
|
|
97
|
+
tag: SaveOperationTag.INSERT,
|
|
82
98
|
after: {
|
|
83
99
|
id: 'user1',
|
|
84
100
|
group_id: 'group2'
|
|
85
|
-
}
|
|
101
|
+
},
|
|
102
|
+
afterReplicaId: rid('user1')
|
|
86
103
|
});
|
|
87
104
|
});
|
|
88
105
|
|
|
@@ -103,27 +120,30 @@ bucket_definitions:
|
|
|
103
120
|
});
|
|
104
121
|
|
|
105
122
|
test('save and load parameters with different number types', async () => {
|
|
106
|
-
const sync_rules =
|
|
123
|
+
const sync_rules = testRules(
|
|
124
|
+
`
|
|
107
125
|
bucket_definitions:
|
|
108
126
|
mybucket:
|
|
109
127
|
parameters:
|
|
110
128
|
- SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
|
|
111
129
|
data: []
|
|
112
|
-
`
|
|
130
|
+
`
|
|
131
|
+
);
|
|
113
132
|
|
|
114
|
-
const storage = (await factory()).getInstance(
|
|
133
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
115
134
|
|
|
116
|
-
const result = await storage.startBatch(
|
|
135
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
117
136
|
await batch.save({
|
|
118
137
|
sourceTable: TEST_TABLE,
|
|
119
|
-
tag:
|
|
138
|
+
tag: SaveOperationTag.INSERT,
|
|
120
139
|
after: {
|
|
121
140
|
id: 't1',
|
|
122
141
|
group_id: 'group1',
|
|
123
142
|
n1: 314n,
|
|
124
143
|
f2: 314,
|
|
125
144
|
f3: 3.14
|
|
126
|
-
}
|
|
145
|
+
},
|
|
146
|
+
afterReplicaId: rid('t1')
|
|
127
147
|
});
|
|
128
148
|
});
|
|
129
149
|
|
|
@@ -144,37 +164,41 @@ bucket_definitions:
|
|
|
144
164
|
// This specific case tested here cannot happen with postgres in practice, but we still
|
|
145
165
|
// test this to ensure correct deserialization.
|
|
146
166
|
|
|
147
|
-
const sync_rules =
|
|
167
|
+
const sync_rules = testRules(
|
|
168
|
+
`
|
|
148
169
|
bucket_definitions:
|
|
149
170
|
mybucket:
|
|
150
171
|
parameters:
|
|
151
172
|
- SELECT group_id FROM test WHERE n1 = token_parameters.n1
|
|
152
173
|
data: []
|
|
153
|
-
`
|
|
174
|
+
`
|
|
175
|
+
);
|
|
154
176
|
|
|
155
|
-
const storage = (await factory()).getInstance(
|
|
177
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
156
178
|
|
|
157
|
-
const result = await storage.startBatch(
|
|
179
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
158
180
|
await batch.save({
|
|
159
181
|
sourceTable: TEST_TABLE,
|
|
160
|
-
tag:
|
|
182
|
+
tag: SaveOperationTag.INSERT,
|
|
161
183
|
after: {
|
|
162
184
|
id: 't1',
|
|
163
185
|
group_id: 'group1',
|
|
164
186
|
n1: 1152921504606846976n // 2^60
|
|
165
|
-
}
|
|
187
|
+
},
|
|
188
|
+
afterReplicaId: rid('t1')
|
|
166
189
|
});
|
|
167
190
|
|
|
168
191
|
await batch.save({
|
|
169
192
|
sourceTable: TEST_TABLE,
|
|
170
|
-
tag:
|
|
193
|
+
tag: SaveOperationTag.UPDATE,
|
|
171
194
|
after: {
|
|
172
195
|
id: 't1',
|
|
173
196
|
group_id: 'group1',
|
|
174
197
|
// Simulate a TOAST value, even though it can't happen for values like this
|
|
175
198
|
// in practice.
|
|
176
199
|
n1: undefined
|
|
177
|
-
}
|
|
200
|
+
},
|
|
201
|
+
afterReplicaId: rid('t1')
|
|
178
202
|
});
|
|
179
203
|
});
|
|
180
204
|
|
|
@@ -187,31 +211,32 @@ bucket_definitions:
|
|
|
187
211
|
});
|
|
188
212
|
|
|
189
213
|
test('removing row', async () => {
|
|
190
|
-
const sync_rules =
|
|
214
|
+
const sync_rules = testRules(
|
|
215
|
+
`
|
|
191
216
|
bucket_definitions:
|
|
192
217
|
global:
|
|
193
218
|
data:
|
|
194
219
|
- SELECT id, description FROM "%"
|
|
195
|
-
`
|
|
196
|
-
|
|
220
|
+
`
|
|
221
|
+
);
|
|
222
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
197
223
|
|
|
198
|
-
const result = await storage.startBatch(
|
|
224
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
199
225
|
const sourceTable = TEST_TABLE;
|
|
200
226
|
|
|
201
227
|
await batch.save({
|
|
202
228
|
sourceTable,
|
|
203
|
-
tag:
|
|
229
|
+
tag: SaveOperationTag.INSERT,
|
|
204
230
|
after: {
|
|
205
231
|
id: 'test1',
|
|
206
232
|
description: 'test1'
|
|
207
|
-
}
|
|
233
|
+
},
|
|
234
|
+
afterReplicaId: rid('test1')
|
|
208
235
|
});
|
|
209
236
|
await batch.save({
|
|
210
237
|
sourceTable,
|
|
211
|
-
tag:
|
|
212
|
-
|
|
213
|
-
id: 'test1'
|
|
214
|
-
}
|
|
238
|
+
tag: SaveOperationTag.DELETE,
|
|
239
|
+
beforeReplicaId: rid('test1')
|
|
215
240
|
});
|
|
216
241
|
});
|
|
217
242
|
|
|
@@ -247,25 +272,29 @@ bucket_definitions:
|
|
|
247
272
|
test('save and load parameters with workspaceId', async () => {
|
|
248
273
|
const WORKSPACE_TABLE = makeTestTable('workspace', ['id']);
|
|
249
274
|
|
|
250
|
-
const
|
|
275
|
+
const sync_rules_content = testRules(
|
|
276
|
+
`
|
|
251
277
|
bucket_definitions:
|
|
252
278
|
by_workspace:
|
|
253
279
|
parameters:
|
|
254
280
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
255
281
|
workspace."userId" = token_parameters.user_id
|
|
256
282
|
data: []
|
|
257
|
-
`
|
|
283
|
+
`
|
|
284
|
+
);
|
|
285
|
+
const sync_rules = sync_rules_content.parsed(PARSE_OPTIONS).sync_rules;
|
|
258
286
|
|
|
259
|
-
const storage = (await factory()).getInstance(
|
|
287
|
+
const storage = (await factory()).getInstance(sync_rules_content);
|
|
260
288
|
|
|
261
|
-
const result = await storage.startBatch(
|
|
289
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
262
290
|
await batch.save({
|
|
263
291
|
sourceTable: WORKSPACE_TABLE,
|
|
264
|
-
tag:
|
|
292
|
+
tag: SaveOperationTag.INSERT,
|
|
265
293
|
after: {
|
|
266
294
|
id: 'workspace1',
|
|
267
295
|
userId: 'u1'
|
|
268
|
-
}
|
|
296
|
+
},
|
|
297
|
+
afterReplicaId: rid('workspace1')
|
|
269
298
|
});
|
|
270
299
|
});
|
|
271
300
|
|
|
@@ -293,43 +322,49 @@ bucket_definitions:
|
|
|
293
322
|
test('save and load parameters with dynamic global buckets', async () => {
|
|
294
323
|
const WORKSPACE_TABLE = makeTestTable('workspace');
|
|
295
324
|
|
|
296
|
-
const
|
|
325
|
+
const sync_rules_content = testRules(
|
|
326
|
+
`
|
|
297
327
|
bucket_definitions:
|
|
298
328
|
by_public_workspace:
|
|
299
329
|
parameters:
|
|
300
330
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
301
331
|
workspace.visibility = 'public'
|
|
302
332
|
data: []
|
|
303
|
-
`
|
|
333
|
+
`
|
|
334
|
+
);
|
|
335
|
+
const sync_rules = sync_rules_content.parsed(PARSE_OPTIONS).sync_rules;
|
|
304
336
|
|
|
305
|
-
const storage = (await factory()).getInstance(
|
|
337
|
+
const storage = (await factory()).getInstance(sync_rules_content);
|
|
306
338
|
|
|
307
|
-
const result = await storage.startBatch(
|
|
339
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
308
340
|
await batch.save({
|
|
309
341
|
sourceTable: WORKSPACE_TABLE,
|
|
310
|
-
tag:
|
|
342
|
+
tag: SaveOperationTag.INSERT,
|
|
311
343
|
after: {
|
|
312
344
|
id: 'workspace1',
|
|
313
345
|
visibility: 'public'
|
|
314
|
-
}
|
|
346
|
+
},
|
|
347
|
+
afterReplicaId: rid('workspace1')
|
|
315
348
|
});
|
|
316
349
|
|
|
317
350
|
await batch.save({
|
|
318
351
|
sourceTable: WORKSPACE_TABLE,
|
|
319
|
-
tag:
|
|
352
|
+
tag: SaveOperationTag.INSERT,
|
|
320
353
|
after: {
|
|
321
354
|
id: 'workspace2',
|
|
322
355
|
visibility: 'private'
|
|
323
|
-
}
|
|
356
|
+
},
|
|
357
|
+
afterReplicaId: rid('workspace2')
|
|
324
358
|
});
|
|
325
359
|
|
|
326
360
|
await batch.save({
|
|
327
361
|
sourceTable: WORKSPACE_TABLE,
|
|
328
|
-
tag:
|
|
362
|
+
tag: SaveOperationTag.INSERT,
|
|
329
363
|
after: {
|
|
330
364
|
id: 'workspace3',
|
|
331
365
|
visibility: 'public'
|
|
332
|
-
}
|
|
366
|
+
},
|
|
367
|
+
afterReplicaId: rid('workspace3')
|
|
333
368
|
});
|
|
334
369
|
});
|
|
335
370
|
|
|
@@ -359,7 +394,8 @@ bucket_definitions:
|
|
|
359
394
|
test('multiple parameter queries', async () => {
|
|
360
395
|
const WORKSPACE_TABLE = makeTestTable('workspace');
|
|
361
396
|
|
|
362
|
-
const
|
|
397
|
+
const sync_rules_content = testRules(
|
|
398
|
+
`
|
|
363
399
|
bucket_definitions:
|
|
364
400
|
by_workspace:
|
|
365
401
|
parameters:
|
|
@@ -368,47 +404,53 @@ bucket_definitions:
|
|
|
368
404
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
369
405
|
workspace.user_id = token_parameters.user_id
|
|
370
406
|
data: []
|
|
371
|
-
`
|
|
407
|
+
`
|
|
408
|
+
);
|
|
409
|
+
const sync_rules = sync_rules_content.parsed(PARSE_OPTIONS).sync_rules;
|
|
372
410
|
|
|
373
|
-
const storage = (await factory()).getInstance(
|
|
411
|
+
const storage = (await factory()).getInstance(sync_rules_content);
|
|
374
412
|
|
|
375
|
-
const result = await storage.startBatch(
|
|
413
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
376
414
|
await batch.save({
|
|
377
415
|
sourceTable: WORKSPACE_TABLE,
|
|
378
|
-
tag:
|
|
416
|
+
tag: SaveOperationTag.INSERT,
|
|
379
417
|
after: {
|
|
380
418
|
id: 'workspace1',
|
|
381
419
|
visibility: 'public'
|
|
382
|
-
}
|
|
420
|
+
},
|
|
421
|
+
afterReplicaId: rid('workspace1')
|
|
383
422
|
});
|
|
384
423
|
|
|
385
424
|
await batch.save({
|
|
386
425
|
sourceTable: WORKSPACE_TABLE,
|
|
387
|
-
tag:
|
|
426
|
+
tag: SaveOperationTag.INSERT,
|
|
388
427
|
after: {
|
|
389
428
|
id: 'workspace2',
|
|
390
429
|
visibility: 'private'
|
|
391
|
-
}
|
|
430
|
+
},
|
|
431
|
+
afterReplicaId: rid('workspace2')
|
|
392
432
|
});
|
|
393
433
|
|
|
394
434
|
await batch.save({
|
|
395
435
|
sourceTable: WORKSPACE_TABLE,
|
|
396
|
-
tag:
|
|
436
|
+
tag: SaveOperationTag.INSERT,
|
|
397
437
|
after: {
|
|
398
438
|
id: 'workspace3',
|
|
399
439
|
user_id: 'u1',
|
|
400
440
|
visibility: 'private'
|
|
401
|
-
}
|
|
441
|
+
},
|
|
442
|
+
afterReplicaId: rid('workspace3')
|
|
402
443
|
});
|
|
403
444
|
|
|
404
445
|
await batch.save({
|
|
405
446
|
sourceTable: WORKSPACE_TABLE,
|
|
406
|
-
tag:
|
|
447
|
+
tag: SaveOperationTag.INSERT,
|
|
407
448
|
after: {
|
|
408
449
|
id: 'workspace4',
|
|
409
450
|
user_id: 'u2',
|
|
410
451
|
visibility: 'private'
|
|
411
|
-
}
|
|
452
|
+
},
|
|
453
|
+
afterReplicaId: rid('workspace4')
|
|
412
454
|
});
|
|
413
455
|
});
|
|
414
456
|
|
|
@@ -445,43 +487,48 @@ bucket_definitions:
|
|
|
445
487
|
});
|
|
446
488
|
|
|
447
489
|
test('changing client ids', async () => {
|
|
448
|
-
const sync_rules =
|
|
490
|
+
const sync_rules = testRules(
|
|
491
|
+
`
|
|
449
492
|
bucket_definitions:
|
|
450
493
|
global:
|
|
451
494
|
data:
|
|
452
495
|
- SELECT client_id as id, description FROM "%"
|
|
453
|
-
`
|
|
454
|
-
|
|
496
|
+
`
|
|
497
|
+
);
|
|
498
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
455
499
|
|
|
456
500
|
const sourceTable = TEST_TABLE;
|
|
457
|
-
const result = await storage.startBatch(
|
|
501
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
458
502
|
await batch.save({
|
|
459
503
|
sourceTable,
|
|
460
|
-
tag:
|
|
504
|
+
tag: SaveOperationTag.INSERT,
|
|
461
505
|
after: {
|
|
462
506
|
id: 'test1',
|
|
463
507
|
client_id: 'client1a',
|
|
464
508
|
description: 'test1a'
|
|
465
|
-
}
|
|
509
|
+
},
|
|
510
|
+
afterReplicaId: rid('test1')
|
|
466
511
|
});
|
|
467
512
|
await batch.save({
|
|
468
513
|
sourceTable,
|
|
469
|
-
tag:
|
|
514
|
+
tag: SaveOperationTag.UPDATE,
|
|
470
515
|
after: {
|
|
471
516
|
id: 'test1',
|
|
472
517
|
client_id: 'client1b',
|
|
473
518
|
description: 'test1b'
|
|
474
|
-
}
|
|
519
|
+
},
|
|
520
|
+
afterReplicaId: rid('test1')
|
|
475
521
|
});
|
|
476
522
|
|
|
477
523
|
await batch.save({
|
|
478
524
|
sourceTable,
|
|
479
|
-
tag:
|
|
525
|
+
tag: SaveOperationTag.INSERT,
|
|
480
526
|
after: {
|
|
481
527
|
id: 'test2',
|
|
482
528
|
client_id: 'client2',
|
|
483
529
|
description: 'test2'
|
|
484
|
-
}
|
|
530
|
+
},
|
|
531
|
+
afterReplicaId: rid('test2')
|
|
485
532
|
});
|
|
486
533
|
});
|
|
487
534
|
const checkpoint = result!.flushed_op;
|
|
@@ -502,48 +549,47 @@ bucket_definitions:
|
|
|
502
549
|
});
|
|
503
550
|
|
|
504
551
|
test('re-apply delete', async () => {
|
|
505
|
-
const sync_rules =
|
|
552
|
+
const sync_rules = testRules(
|
|
553
|
+
`
|
|
506
554
|
bucket_definitions:
|
|
507
555
|
global:
|
|
508
556
|
data:
|
|
509
557
|
- SELECT id, description FROM "%"
|
|
510
|
-
`
|
|
511
|
-
|
|
558
|
+
`
|
|
559
|
+
);
|
|
560
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
512
561
|
|
|
513
|
-
await storage.startBatch(
|
|
562
|
+
await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
514
563
|
const sourceTable = TEST_TABLE;
|
|
515
564
|
|
|
516
565
|
await batch.save({
|
|
517
566
|
sourceTable,
|
|
518
|
-
tag:
|
|
567
|
+
tag: SaveOperationTag.INSERT,
|
|
519
568
|
after: {
|
|
520
569
|
id: 'test1',
|
|
521
570
|
description: 'test1'
|
|
522
|
-
}
|
|
571
|
+
},
|
|
572
|
+
afterReplicaId: rid('test1')
|
|
523
573
|
});
|
|
524
574
|
});
|
|
525
575
|
|
|
526
|
-
await storage.startBatch(
|
|
576
|
+
await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
527
577
|
const sourceTable = TEST_TABLE;
|
|
528
578
|
|
|
529
579
|
await batch.save({
|
|
530
580
|
sourceTable,
|
|
531
|
-
tag:
|
|
532
|
-
|
|
533
|
-
id: 'test1'
|
|
534
|
-
}
|
|
581
|
+
tag: SaveOperationTag.DELETE,
|
|
582
|
+
beforeReplicaId: rid('test1')
|
|
535
583
|
});
|
|
536
584
|
});
|
|
537
585
|
|
|
538
|
-
const result = await storage.startBatch(
|
|
586
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
539
587
|
const sourceTable = TEST_TABLE;
|
|
540
588
|
|
|
541
589
|
await batch.save({
|
|
542
590
|
sourceTable,
|
|
543
|
-
tag:
|
|
544
|
-
|
|
545
|
-
id: 'test1'
|
|
546
|
-
}
|
|
591
|
+
tag: SaveOperationTag.DELETE,
|
|
592
|
+
beforeReplicaId: rid('test1')
|
|
547
593
|
});
|
|
548
594
|
});
|
|
549
595
|
|
|
@@ -577,84 +623,87 @@ bucket_definitions:
|
|
|
577
623
|
});
|
|
578
624
|
|
|
579
625
|
test('re-apply update + delete', async () => {
|
|
580
|
-
const sync_rules =
|
|
626
|
+
const sync_rules = testRules(
|
|
627
|
+
`
|
|
581
628
|
bucket_definitions:
|
|
582
629
|
global:
|
|
583
630
|
data:
|
|
584
631
|
- SELECT id, description FROM "%"
|
|
585
|
-
`
|
|
586
|
-
|
|
632
|
+
`
|
|
633
|
+
);
|
|
634
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
587
635
|
|
|
588
|
-
await storage.startBatch(
|
|
636
|
+
await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
589
637
|
const sourceTable = TEST_TABLE;
|
|
590
638
|
|
|
591
639
|
await batch.save({
|
|
592
640
|
sourceTable,
|
|
593
|
-
tag:
|
|
641
|
+
tag: SaveOperationTag.INSERT,
|
|
594
642
|
after: {
|
|
595
643
|
id: 'test1',
|
|
596
644
|
description: 'test1'
|
|
597
|
-
}
|
|
645
|
+
},
|
|
646
|
+
afterReplicaId: rid('test1')
|
|
598
647
|
});
|
|
599
648
|
});
|
|
600
649
|
|
|
601
|
-
await storage.startBatch(
|
|
650
|
+
await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
602
651
|
const sourceTable = TEST_TABLE;
|
|
603
652
|
|
|
604
653
|
await batch.save({
|
|
605
654
|
sourceTable,
|
|
606
|
-
tag:
|
|
655
|
+
tag: SaveOperationTag.UPDATE,
|
|
607
656
|
after: {
|
|
608
657
|
id: 'test1',
|
|
609
658
|
description: undefined
|
|
610
|
-
}
|
|
659
|
+
},
|
|
660
|
+
afterReplicaId: rid('test1')
|
|
611
661
|
});
|
|
612
662
|
|
|
613
663
|
await batch.save({
|
|
614
664
|
sourceTable,
|
|
615
|
-
tag:
|
|
665
|
+
tag: SaveOperationTag.UPDATE,
|
|
616
666
|
after: {
|
|
617
667
|
id: 'test1',
|
|
618
668
|
description: undefined
|
|
619
|
-
}
|
|
669
|
+
},
|
|
670
|
+
afterReplicaId: rid('test1')
|
|
620
671
|
});
|
|
621
672
|
|
|
622
673
|
await batch.save({
|
|
623
674
|
sourceTable,
|
|
624
|
-
tag:
|
|
625
|
-
|
|
626
|
-
id: 'test1'
|
|
627
|
-
}
|
|
675
|
+
tag: SaveOperationTag.DELETE,
|
|
676
|
+
beforeReplicaId: rid('test1')
|
|
628
677
|
});
|
|
629
678
|
});
|
|
630
679
|
|
|
631
|
-
const result = await storage.startBatch(
|
|
680
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
632
681
|
const sourceTable = TEST_TABLE;
|
|
633
682
|
|
|
634
683
|
await batch.save({
|
|
635
684
|
sourceTable,
|
|
636
|
-
tag:
|
|
685
|
+
tag: SaveOperationTag.UPDATE,
|
|
637
686
|
after: {
|
|
638
687
|
id: 'test1',
|
|
639
688
|
description: undefined
|
|
640
|
-
}
|
|
689
|
+
},
|
|
690
|
+
afterReplicaId: rid('test1')
|
|
641
691
|
});
|
|
642
692
|
|
|
643
693
|
await batch.save({
|
|
644
694
|
sourceTable,
|
|
645
|
-
tag:
|
|
695
|
+
tag: SaveOperationTag.UPDATE,
|
|
646
696
|
after: {
|
|
647
697
|
id: 'test1',
|
|
648
698
|
description: undefined
|
|
649
|
-
}
|
|
699
|
+
},
|
|
700
|
+
afterReplicaId: rid('test1')
|
|
650
701
|
});
|
|
651
702
|
|
|
652
703
|
await batch.save({
|
|
653
704
|
sourceTable,
|
|
654
|
-
tag:
|
|
655
|
-
|
|
656
|
-
id: 'test1'
|
|
657
|
-
}
|
|
705
|
+
tag: SaveOperationTag.DELETE,
|
|
706
|
+
beforeReplicaId: rid('test1')
|
|
658
707
|
});
|
|
659
708
|
});
|
|
660
709
|
|
|
@@ -691,26 +740,29 @@ bucket_definitions:
|
|
|
691
740
|
});
|
|
692
741
|
|
|
693
742
|
test('truncate parameters', async () => {
|
|
694
|
-
const sync_rules =
|
|
743
|
+
const sync_rules = testRules(
|
|
744
|
+
`
|
|
695
745
|
bucket_definitions:
|
|
696
746
|
mybucket:
|
|
697
747
|
parameters:
|
|
698
748
|
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
699
749
|
data: []
|
|
700
|
-
`
|
|
750
|
+
`
|
|
751
|
+
);
|
|
701
752
|
|
|
702
|
-
const storage = (await factory()).getInstance(
|
|
753
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
703
754
|
|
|
704
|
-
await storage.startBatch(
|
|
755
|
+
await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
705
756
|
await batch.save({
|
|
706
757
|
sourceTable: TEST_TABLE,
|
|
707
|
-
tag:
|
|
758
|
+
tag: SaveOperationTag.INSERT,
|
|
708
759
|
after: {
|
|
709
760
|
id: 't2',
|
|
710
761
|
id1: 'user3',
|
|
711
762
|
id2: 'user4',
|
|
712
763
|
group_id: 'group2a'
|
|
713
|
-
}
|
|
764
|
+
},
|
|
765
|
+
afterReplicaId: rid('t2')
|
|
714
766
|
});
|
|
715
767
|
|
|
716
768
|
await batch.truncate([TEST_TABLE]);
|
|
@@ -731,106 +783,120 @@ bucket_definitions:
|
|
|
731
783
|
// 1. Not getting the correct "current_data" state for each operation.
|
|
732
784
|
// 2. Output order not being correct.
|
|
733
785
|
|
|
734
|
-
const sync_rules =
|
|
786
|
+
const sync_rules = testRules(
|
|
787
|
+
`
|
|
735
788
|
bucket_definitions:
|
|
736
789
|
global:
|
|
737
790
|
data:
|
|
738
791
|
- SELECT id, description FROM "test"
|
|
739
|
-
`
|
|
740
|
-
|
|
792
|
+
`
|
|
793
|
+
);
|
|
794
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
741
795
|
|
|
742
796
|
// Pre-setup
|
|
743
|
-
const result1 = await storage.startBatch(
|
|
797
|
+
const result1 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
744
798
|
const sourceTable = TEST_TABLE;
|
|
745
799
|
|
|
746
800
|
await batch.save({
|
|
747
801
|
sourceTable,
|
|
748
|
-
tag:
|
|
802
|
+
tag: SaveOperationTag.INSERT,
|
|
749
803
|
after: {
|
|
750
804
|
id: 'test1',
|
|
751
805
|
description: 'test1a'
|
|
752
|
-
}
|
|
806
|
+
},
|
|
807
|
+
afterReplicaId: rid('test1')
|
|
753
808
|
});
|
|
754
809
|
|
|
755
810
|
await batch.save({
|
|
756
811
|
sourceTable,
|
|
757
|
-
tag:
|
|
812
|
+
tag: SaveOperationTag.INSERT,
|
|
758
813
|
after: {
|
|
759
814
|
id: 'test2',
|
|
760
815
|
description: 'test2a'
|
|
761
|
-
}
|
|
816
|
+
},
|
|
817
|
+
afterReplicaId: rid('test2')
|
|
762
818
|
});
|
|
763
819
|
});
|
|
764
820
|
|
|
765
821
|
const checkpoint1 = result1?.flushed_op ?? '0';
|
|
766
822
|
|
|
767
823
|
// Test batch
|
|
768
|
-
const result2 = await storage.startBatch(
|
|
824
|
+
const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
769
825
|
const sourceTable = TEST_TABLE;
|
|
770
826
|
// b
|
|
771
827
|
await batch.save({
|
|
772
828
|
sourceTable,
|
|
773
|
-
tag:
|
|
829
|
+
tag: SaveOperationTag.INSERT,
|
|
774
830
|
after: {
|
|
775
831
|
id: 'test1',
|
|
776
832
|
description: 'test1b'
|
|
777
|
-
}
|
|
833
|
+
},
|
|
834
|
+
afterReplicaId: rid('test1')
|
|
778
835
|
});
|
|
779
836
|
|
|
780
837
|
await batch.save({
|
|
781
838
|
sourceTable,
|
|
782
|
-
tag:
|
|
839
|
+
tag: SaveOperationTag.UPDATE,
|
|
783
840
|
before: {
|
|
784
841
|
id: 'test1'
|
|
785
842
|
},
|
|
843
|
+
beforeReplicaId: rid('test1'),
|
|
786
844
|
after: {
|
|
787
845
|
id: 'test2',
|
|
788
846
|
description: 'test2b'
|
|
789
|
-
}
|
|
847
|
+
},
|
|
848
|
+
afterReplicaId: rid('test2')
|
|
790
849
|
});
|
|
791
850
|
|
|
792
851
|
await batch.save({
|
|
793
852
|
sourceTable,
|
|
794
|
-
tag:
|
|
853
|
+
tag: SaveOperationTag.UPDATE,
|
|
795
854
|
before: {
|
|
796
855
|
id: 'test2'
|
|
797
856
|
},
|
|
857
|
+
beforeReplicaId: rid('test2'),
|
|
798
858
|
after: {
|
|
799
859
|
id: 'test3',
|
|
800
860
|
description: 'test3b'
|
|
801
|
-
}
|
|
861
|
+
},
|
|
862
|
+
|
|
863
|
+
afterReplicaId: rid('test3')
|
|
802
864
|
});
|
|
803
865
|
|
|
804
866
|
// c
|
|
805
867
|
await batch.save({
|
|
806
868
|
sourceTable,
|
|
807
|
-
tag:
|
|
869
|
+
tag: SaveOperationTag.UPDATE,
|
|
808
870
|
after: {
|
|
809
871
|
id: 'test2',
|
|
810
872
|
description: 'test2c'
|
|
811
|
-
}
|
|
873
|
+
},
|
|
874
|
+
afterReplicaId: rid('test2')
|
|
812
875
|
});
|
|
813
876
|
|
|
814
877
|
// d
|
|
815
878
|
await batch.save({
|
|
816
879
|
sourceTable,
|
|
817
|
-
tag:
|
|
880
|
+
tag: SaveOperationTag.INSERT,
|
|
818
881
|
after: {
|
|
819
882
|
id: 'test4',
|
|
820
883
|
description: 'test4d'
|
|
821
|
-
}
|
|
884
|
+
},
|
|
885
|
+
afterReplicaId: rid('test4')
|
|
822
886
|
});
|
|
823
887
|
|
|
824
888
|
await batch.save({
|
|
825
889
|
sourceTable,
|
|
826
|
-
tag:
|
|
890
|
+
tag: SaveOperationTag.UPDATE,
|
|
827
891
|
before: {
|
|
828
892
|
id: 'test4'
|
|
829
893
|
},
|
|
894
|
+
beforeReplicaId: rid('test4'),
|
|
830
895
|
after: {
|
|
831
896
|
id: 'test5',
|
|
832
897
|
description: 'test5d'
|
|
833
|
-
}
|
|
898
|
+
},
|
|
899
|
+
afterReplicaId: rid('test5')
|
|
834
900
|
});
|
|
835
901
|
});
|
|
836
902
|
|
|
@@ -865,55 +931,67 @@ bucket_definitions:
|
|
|
865
931
|
});
|
|
866
932
|
|
|
867
933
|
test('changed data with replica identity full', async () => {
|
|
868
|
-
const sync_rules =
|
|
934
|
+
const sync_rules = testRules(
|
|
935
|
+
`
|
|
869
936
|
bucket_definitions:
|
|
870
937
|
global:
|
|
871
938
|
data:
|
|
872
939
|
- SELECT id, description FROM "test"
|
|
873
|
-
`
|
|
874
|
-
|
|
940
|
+
`
|
|
941
|
+
);
|
|
942
|
+
function rid2(id: string, description: string) {
|
|
943
|
+
return getUuidReplicaIdentityBson({ id, description }, [
|
|
944
|
+
{ name: 'id', type: 'VARCHAR', typeId: 25 },
|
|
945
|
+
{ name: 'description', type: 'VARCHAR', typeId: 25 }
|
|
946
|
+
]);
|
|
947
|
+
}
|
|
948
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
875
949
|
|
|
876
950
|
const sourceTable = makeTestTable('test', ['id', 'description']);
|
|
877
951
|
|
|
878
952
|
// Pre-setup
|
|
879
|
-
const result1 = await storage.startBatch(
|
|
953
|
+
const result1 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
880
954
|
await batch.save({
|
|
881
955
|
sourceTable,
|
|
882
|
-
tag:
|
|
956
|
+
tag: SaveOperationTag.INSERT,
|
|
883
957
|
after: {
|
|
884
958
|
id: 'test1',
|
|
885
959
|
description: 'test1a'
|
|
886
|
-
}
|
|
960
|
+
},
|
|
961
|
+
afterReplicaId: rid2('test1', 'test1a')
|
|
887
962
|
});
|
|
888
963
|
});
|
|
889
964
|
|
|
890
965
|
const checkpoint1 = result1?.flushed_op ?? '0';
|
|
891
966
|
|
|
892
|
-
const result2 = await storage.startBatch(
|
|
967
|
+
const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
893
968
|
// Unchanged, but has a before id
|
|
894
969
|
await batch.save({
|
|
895
970
|
sourceTable,
|
|
896
|
-
tag:
|
|
971
|
+
tag: SaveOperationTag.UPDATE,
|
|
897
972
|
before: {
|
|
898
973
|
id: 'test1',
|
|
899
974
|
description: 'test1a'
|
|
900
975
|
},
|
|
976
|
+
beforeReplicaId: rid2('test1', 'test1a'),
|
|
901
977
|
after: {
|
|
902
978
|
id: 'test1',
|
|
903
979
|
description: 'test1b'
|
|
904
|
-
}
|
|
980
|
+
},
|
|
981
|
+
afterReplicaId: rid2('test1', 'test1b')
|
|
905
982
|
});
|
|
906
983
|
});
|
|
907
984
|
|
|
908
|
-
const result3 = await storage.startBatch(
|
|
985
|
+
const result3 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
909
986
|
// Delete
|
|
910
987
|
await batch.save({
|
|
911
988
|
sourceTable,
|
|
912
|
-
tag:
|
|
989
|
+
tag: SaveOperationTag.DELETE,
|
|
913
990
|
before: {
|
|
914
991
|
id: 'test1',
|
|
915
992
|
description: 'test1b'
|
|
916
993
|
},
|
|
994
|
+
beforeReplicaId: rid2('test1', 'test1b'),
|
|
917
995
|
after: undefined
|
|
918
996
|
});
|
|
919
997
|
});
|
|
@@ -957,55 +1035,68 @@ bucket_definitions:
|
|
|
957
1035
|
});
|
|
958
1036
|
|
|
959
1037
|
test('unchanged data with replica identity full', async () => {
|
|
960
|
-
const sync_rules =
|
|
1038
|
+
const sync_rules = testRules(
|
|
1039
|
+
`
|
|
961
1040
|
bucket_definitions:
|
|
962
1041
|
global:
|
|
963
1042
|
data:
|
|
964
1043
|
- SELECT id, description FROM "test"
|
|
965
|
-
`
|
|
966
|
-
|
|
1044
|
+
`
|
|
1045
|
+
);
|
|
1046
|
+
function rid2(id: string, description: string) {
|
|
1047
|
+
return getUuidReplicaIdentityBson({ id, description }, [
|
|
1048
|
+
{ name: 'id', type: 'VARCHAR', typeId: 25 },
|
|
1049
|
+
{ name: 'description', type: 'VARCHAR', typeId: 25 }
|
|
1050
|
+
]);
|
|
1051
|
+
}
|
|
1052
|
+
|
|
1053
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
967
1054
|
|
|
968
1055
|
const sourceTable = makeTestTable('test', ['id', 'description']);
|
|
969
1056
|
|
|
970
1057
|
// Pre-setup
|
|
971
|
-
const result1 = await storage.startBatch(
|
|
1058
|
+
const result1 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
972
1059
|
await batch.save({
|
|
973
1060
|
sourceTable,
|
|
974
|
-
tag:
|
|
1061
|
+
tag: SaveOperationTag.INSERT,
|
|
975
1062
|
after: {
|
|
976
1063
|
id: 'test1',
|
|
977
1064
|
description: 'test1a'
|
|
978
|
-
}
|
|
1065
|
+
},
|
|
1066
|
+
afterReplicaId: rid2('test1', 'test1a')
|
|
979
1067
|
});
|
|
980
1068
|
});
|
|
981
1069
|
|
|
982
1070
|
const checkpoint1 = result1?.flushed_op ?? '0';
|
|
983
1071
|
|
|
984
|
-
const result2 = await storage.startBatch(
|
|
1072
|
+
const result2 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
985
1073
|
// Unchanged, but has a before id
|
|
986
1074
|
await batch.save({
|
|
987
1075
|
sourceTable,
|
|
988
|
-
tag:
|
|
1076
|
+
tag: SaveOperationTag.UPDATE,
|
|
989
1077
|
before: {
|
|
990
1078
|
id: 'test1',
|
|
991
1079
|
description: 'test1a'
|
|
992
1080
|
},
|
|
1081
|
+
beforeReplicaId: rid2('test1', 'test1a'),
|
|
993
1082
|
after: {
|
|
994
1083
|
id: 'test1',
|
|
995
1084
|
description: 'test1a'
|
|
996
|
-
}
|
|
1085
|
+
},
|
|
1086
|
+
afterReplicaId: rid2('test1', 'test1a')
|
|
997
1087
|
});
|
|
998
1088
|
});
|
|
999
1089
|
|
|
1000
|
-
const result3 = await storage.startBatch(
|
|
1090
|
+
const result3 = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
1001
1091
|
// Delete
|
|
1002
1092
|
await batch.save({
|
|
1003
1093
|
sourceTable,
|
|
1004
|
-
tag:
|
|
1094
|
+
tag: SaveOperationTag.DELETE,
|
|
1005
1095
|
before: {
|
|
1006
1096
|
id: 'test1',
|
|
1007
1097
|
description: 'test1a'
|
|
1008
1098
|
},
|
|
1099
|
+
beforeReplicaId: rid2('test1', 'test1a'),
|
|
1009
1100
|
after: undefined
|
|
1010
1101
|
});
|
|
1011
1102
|
});
|
|
@@ -1046,54 +1137,60 @@ bucket_definitions:
|
|
|
1046
1137
|
// but large enough in size to be split over multiple returned batches.
|
|
1047
1138
|
// The specific batch splits is an implementation detail of the storage driver,
|
|
1048
1139
|
// and the test will have to updated when other implementations are added.
|
|
1049
|
-
const sync_rules =
|
|
1140
|
+
const sync_rules = testRules(
|
|
1141
|
+
`
|
|
1050
1142
|
bucket_definitions:
|
|
1051
1143
|
global:
|
|
1052
1144
|
data:
|
|
1053
1145
|
- SELECT id, description FROM "%"
|
|
1054
|
-
`
|
|
1055
|
-
|
|
1146
|
+
`
|
|
1147
|
+
);
|
|
1148
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
1056
1149
|
|
|
1057
|
-
const result = await storage.startBatch(
|
|
1150
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
1058
1151
|
const sourceTable = TEST_TABLE;
|
|
1059
1152
|
|
|
1060
1153
|
const largeDescription = '0123456789'.repeat(12_000_00);
|
|
1061
1154
|
|
|
1062
1155
|
await batch.save({
|
|
1063
1156
|
sourceTable,
|
|
1064
|
-
tag:
|
|
1157
|
+
tag: SaveOperationTag.INSERT,
|
|
1065
1158
|
after: {
|
|
1066
1159
|
id: 'test1',
|
|
1067
1160
|
description: 'test1'
|
|
1068
|
-
}
|
|
1161
|
+
},
|
|
1162
|
+
afterReplicaId: rid('test1')
|
|
1069
1163
|
});
|
|
1070
1164
|
|
|
1071
1165
|
await batch.save({
|
|
1072
1166
|
sourceTable,
|
|
1073
|
-
tag:
|
|
1167
|
+
tag: SaveOperationTag.INSERT,
|
|
1074
1168
|
after: {
|
|
1075
1169
|
id: 'large1',
|
|
1076
1170
|
description: largeDescription
|
|
1077
|
-
}
|
|
1171
|
+
},
|
|
1172
|
+
afterReplicaId: rid('large1')
|
|
1078
1173
|
});
|
|
1079
1174
|
|
|
1080
1175
|
// Large enough to split the returned batch
|
|
1081
1176
|
await batch.save({
|
|
1082
1177
|
sourceTable,
|
|
1083
|
-
tag:
|
|
1178
|
+
tag: SaveOperationTag.INSERT,
|
|
1084
1179
|
after: {
|
|
1085
1180
|
id: 'large2',
|
|
1086
1181
|
description: largeDescription
|
|
1087
|
-
}
|
|
1182
|
+
},
|
|
1183
|
+
afterReplicaId: rid('large2')
|
|
1088
1184
|
});
|
|
1089
1185
|
|
|
1090
1186
|
await batch.save({
|
|
1091
1187
|
sourceTable,
|
|
1092
|
-
tag:
|
|
1188
|
+
tag: SaveOperationTag.INSERT,
|
|
1093
1189
|
after: {
|
|
1094
1190
|
id: 'test3',
|
|
1095
1191
|
description: 'test3'
|
|
1096
|
-
}
|
|
1192
|
+
},
|
|
1193
|
+
afterReplicaId: rid('test3')
|
|
1097
1194
|
});
|
|
1098
1195
|
});
|
|
1099
1196
|
|
|
@@ -1138,54 +1235,60 @@ bucket_definitions:
|
|
|
1138
1235
|
// Test syncing a batch of data that is small in count,
|
|
1139
1236
|
// but large enough in size to be split over multiple returned chunks.
|
|
1140
1237
|
// Similar to the above test, but splits over 1MB chunks.
|
|
1141
|
-
const sync_rules =
|
|
1238
|
+
const sync_rules = testRules(
|
|
1239
|
+
`
|
|
1142
1240
|
bucket_definitions:
|
|
1143
1241
|
global:
|
|
1144
1242
|
data:
|
|
1145
1243
|
- SELECT id, description FROM "%"
|
|
1146
|
-
`
|
|
1147
|
-
|
|
1244
|
+
`
|
|
1245
|
+
);
|
|
1246
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
1148
1247
|
|
|
1149
|
-
const result = await storage.startBatch(
|
|
1248
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
1150
1249
|
const sourceTable = TEST_TABLE;
|
|
1151
1250
|
|
|
1152
1251
|
const largeDescription = '0123456789'.repeat(2_000_00);
|
|
1153
1252
|
|
|
1154
1253
|
await batch.save({
|
|
1155
1254
|
sourceTable,
|
|
1156
|
-
tag:
|
|
1255
|
+
tag: SaveOperationTag.INSERT,
|
|
1157
1256
|
after: {
|
|
1158
1257
|
id: 'test1',
|
|
1159
1258
|
description: 'test1'
|
|
1160
|
-
}
|
|
1259
|
+
},
|
|
1260
|
+
afterReplicaId: rid('test1')
|
|
1161
1261
|
});
|
|
1162
1262
|
|
|
1163
1263
|
await batch.save({
|
|
1164
1264
|
sourceTable,
|
|
1165
|
-
tag:
|
|
1265
|
+
tag: SaveOperationTag.INSERT,
|
|
1166
1266
|
after: {
|
|
1167
1267
|
id: 'large1',
|
|
1168
1268
|
description: largeDescription
|
|
1169
|
-
}
|
|
1269
|
+
},
|
|
1270
|
+
afterReplicaId: rid('large1')
|
|
1170
1271
|
});
|
|
1171
1272
|
|
|
1172
1273
|
// Large enough to split the returned batch
|
|
1173
1274
|
await batch.save({
|
|
1174
1275
|
sourceTable,
|
|
1175
|
-
tag:
|
|
1276
|
+
tag: SaveOperationTag.INSERT,
|
|
1176
1277
|
after: {
|
|
1177
1278
|
id: 'large2',
|
|
1178
1279
|
description: largeDescription
|
|
1179
|
-
}
|
|
1280
|
+
},
|
|
1281
|
+
afterReplicaId: rid('large2')
|
|
1180
1282
|
});
|
|
1181
1283
|
|
|
1182
1284
|
await batch.save({
|
|
1183
1285
|
sourceTable,
|
|
1184
|
-
tag:
|
|
1286
|
+
tag: SaveOperationTag.INSERT,
|
|
1185
1287
|
after: {
|
|
1186
1288
|
id: 'test3',
|
|
1187
1289
|
description: 'test3'
|
|
1188
|
-
}
|
|
1290
|
+
},
|
|
1291
|
+
afterReplicaId: rid('test3')
|
|
1189
1292
|
});
|
|
1190
1293
|
});
|
|
1191
1294
|
|
|
@@ -1227,25 +1330,28 @@ bucket_definitions:
|
|
|
1227
1330
|
|
|
1228
1331
|
test('long batch', async () => {
|
|
1229
1332
|
// Test syncing a batch of data that is limited by count.
|
|
1230
|
-
const sync_rules =
|
|
1333
|
+
const sync_rules = testRules(
|
|
1334
|
+
`
|
|
1231
1335
|
bucket_definitions:
|
|
1232
1336
|
global:
|
|
1233
1337
|
data:
|
|
1234
1338
|
- SELECT id, description FROM "%"
|
|
1235
|
-
`
|
|
1236
|
-
|
|
1339
|
+
`
|
|
1340
|
+
);
|
|
1341
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
1237
1342
|
|
|
1238
|
-
const result = await storage.startBatch(
|
|
1343
|
+
const result = await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
1239
1344
|
const sourceTable = TEST_TABLE;
|
|
1240
1345
|
|
|
1241
1346
|
for (let i = 1; i <= 6; i++) {
|
|
1242
1347
|
await batch.save({
|
|
1243
1348
|
sourceTable,
|
|
1244
|
-
tag:
|
|
1349
|
+
tag: SaveOperationTag.INSERT,
|
|
1245
1350
|
after: {
|
|
1246
1351
|
id: `test${i}`,
|
|
1247
1352
|
description: `test${i}`
|
|
1248
|
-
}
|
|
1353
|
+
},
|
|
1354
|
+
afterReplicaId: `test${i}`
|
|
1249
1355
|
});
|
|
1250
1356
|
}
|
|
1251
1357
|
});
|
|
@@ -1295,6 +1401,44 @@ bucket_definitions:
|
|
|
1295
1401
|
expect(getBatchMeta(batch3)).toEqual(null);
|
|
1296
1402
|
});
|
|
1297
1403
|
|
|
1404
|
+
test('batch should be disposed automatically', async () => {
|
|
1405
|
+
const sync_rules = testRules(`
|
|
1406
|
+
bucket_definitions:
|
|
1407
|
+
global:
|
|
1408
|
+
data: []
|
|
1409
|
+
`);
|
|
1410
|
+
|
|
1411
|
+
const storage = (await factory()).getInstance(sync_rules);
|
|
1412
|
+
|
|
1413
|
+
let isDisposed = false;
|
|
1414
|
+
await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
1415
|
+
batch.registerListener({
|
|
1416
|
+
disposed: () => {
|
|
1417
|
+
isDisposed = true;
|
|
1418
|
+
}
|
|
1419
|
+
});
|
|
1420
|
+
});
|
|
1421
|
+
expect(isDisposed).true;
|
|
1422
|
+
|
|
1423
|
+
isDisposed = false;
|
|
1424
|
+
let errorCaught = false;
|
|
1425
|
+
try {
|
|
1426
|
+
await storage.startBatch(BATCH_OPTIONS, async (batch) => {
|
|
1427
|
+
batch.registerListener({
|
|
1428
|
+
disposed: () => {
|
|
1429
|
+
isDisposed = true;
|
|
1430
|
+
}
|
|
1431
|
+
});
|
|
1432
|
+
throw new Error(`Testing exceptions`);
|
|
1433
|
+
});
|
|
1434
|
+
} catch (ex) {
|
|
1435
|
+
errorCaught = true;
|
|
1436
|
+
expect(ex.message.includes('Testing')).true;
|
|
1437
|
+
}
|
|
1438
|
+
expect(errorCaught).true;
|
|
1439
|
+
expect(isDisposed).true;
|
|
1440
|
+
});
|
|
1441
|
+
|
|
1298
1442
|
test('empty storage metrics', async () => {
|
|
1299
1443
|
const f = await factory({ dropAll: true });
|
|
1300
1444
|
|
|
@@ -1306,7 +1450,7 @@ bucket_definitions:
|
|
|
1306
1450
|
});
|
|
1307
1451
|
|
|
1308
1452
|
const r = await f.configureSyncRules('bucket_definitions: {}');
|
|
1309
|
-
const storage = f.getInstance(r.persisted_sync_rules
|
|
1453
|
+
const storage = f.getInstance(r.persisted_sync_rules!);
|
|
1310
1454
|
await storage.autoActivate();
|
|
1311
1455
|
|
|
1312
1456
|
const metrics2 = await f.getStorageMetrics();
|
|
@@ -1316,4 +1460,40 @@ bucket_definitions:
|
|
|
1316
1460
|
replication_size_bytes: 0
|
|
1317
1461
|
});
|
|
1318
1462
|
});
|
|
1463
|
+
|
|
1464
|
+
test('invalidate cached parsed sync rules', async () => {
|
|
1465
|
+
const sync_rules_content = testRules(
|
|
1466
|
+
`
|
|
1467
|
+
bucket_definitions:
|
|
1468
|
+
by_workspace:
|
|
1469
|
+
parameters:
|
|
1470
|
+
- SELECT id as workspace_id FROM workspace WHERE
|
|
1471
|
+
workspace."userId" = token_parameters.user_id
|
|
1472
|
+
data: []
|
|
1473
|
+
`
|
|
1474
|
+
);
|
|
1475
|
+
|
|
1476
|
+
const bucketStorageFactory = await factory();
|
|
1477
|
+
const syncBucketStorage = bucketStorageFactory.getInstance(sync_rules_content);
|
|
1478
|
+
|
|
1479
|
+
const parsedSchema1 = syncBucketStorage.getParsedSyncRules({
|
|
1480
|
+
defaultSchema: 'public'
|
|
1481
|
+
});
|
|
1482
|
+
|
|
1483
|
+
const parsedSchema2 = syncBucketStorage.getParsedSyncRules({
|
|
1484
|
+
defaultSchema: 'public'
|
|
1485
|
+
});
|
|
1486
|
+
|
|
1487
|
+
// These should be cached, this will be the same instance
|
|
1488
|
+
expect(parsedSchema2).equals(parsedSchema1);
|
|
1489
|
+
expect(parsedSchema1.getSourceTables()[0].schema).equals('public');
|
|
1490
|
+
|
|
1491
|
+
const parsedSchema3 = syncBucketStorage.getParsedSyncRules({
|
|
1492
|
+
defaultSchema: 'databasename'
|
|
1493
|
+
});
|
|
1494
|
+
|
|
1495
|
+
// The cache should not be used
|
|
1496
|
+
expect(parsedSchema3).not.equals(parsedSchema2);
|
|
1497
|
+
expect(parsedSchema3.getSourceTables()[0].schema).equals('databasename');
|
|
1498
|
+
});
|
|
1319
1499
|
}
|