@effect/cluster 0.28.4 → 0.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ClusterError/package.json +6 -0
- package/ClusterMetrics/package.json +6 -0
- package/ClusterSchema/package.json +6 -0
- package/DeliverAt/package.json +6 -0
- package/Entity/package.json +6 -0
- package/EntityAddress/package.json +6 -0
- package/EntityId/package.json +6 -0
- package/EntityType/package.json +6 -0
- package/Envelope/package.json +6 -0
- package/HttpCommon/package.json +6 -0
- package/HttpRunner/package.json +6 -0
- package/HttpShardManager/package.json +6 -0
- package/MachineId/package.json +6 -0
- package/MessageStorage/package.json +6 -0
- package/README.md +2 -2
- package/Reply/package.json +6 -0
- package/Runner/package.json +6 -0
- package/RunnerAddress/package.json +6 -0
- package/RunnerHealth/package.json +6 -0
- package/RunnerServer/package.json +6 -0
- package/Runners/package.json +6 -0
- package/ShardStorage/package.json +6 -0
- package/Singleton/package.json +6 -0
- package/SingletonAddress/package.json +6 -0
- package/Snowflake/package.json +6 -0
- package/SocketRunner/package.json +6 -0
- package/SocketShardManager/package.json +6 -0
- package/SqlMessageStorage/package.json +6 -0
- package/SqlShardStorage/package.json +6 -0
- package/SynchronizedClock/package.json +6 -0
- package/dist/cjs/ClusterError.js +180 -0
- package/dist/cjs/ClusterError.js.map +1 -0
- package/dist/cjs/ClusterMetrics.js +63 -0
- package/dist/cjs/ClusterMetrics.js.map +1 -0
- package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
- package/dist/cjs/ClusterSchema.js.map +1 -0
- package/dist/cjs/DeliverAt.js +30 -0
- package/dist/cjs/DeliverAt.js.map +1 -0
- package/dist/cjs/Entity.js +187 -0
- package/dist/cjs/Entity.js.map +1 -0
- package/dist/cjs/EntityAddress.js +54 -0
- package/dist/cjs/EntityAddress.js.map +1 -0
- package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
- package/dist/cjs/EntityId.js.map +1 -0
- package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
- package/dist/cjs/EntityType.js.map +1 -0
- package/dist/cjs/Envelope.js +168 -0
- package/dist/cjs/Envelope.js.map +1 -0
- package/dist/cjs/HttpCommon.js +49 -0
- package/dist/cjs/HttpCommon.js.map +1 -0
- package/dist/cjs/HttpRunner.js +108 -0
- package/dist/cjs/HttpRunner.js.map +1 -0
- package/dist/cjs/HttpShardManager.js +140 -0
- package/dist/cjs/HttpShardManager.js.map +1 -0
- package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
- package/dist/cjs/MachineId.js.map +1 -0
- package/dist/cjs/Message.js +99 -18
- package/dist/cjs/Message.js.map +1 -1
- package/dist/cjs/MessageStorage.js +356 -0
- package/dist/cjs/MessageStorage.js.map +1 -0
- package/dist/cjs/Reply.js +200 -0
- package/dist/cjs/Reply.js.map +1 -0
- package/dist/cjs/Runner.js +79 -0
- package/dist/cjs/Runner.js.map +1 -0
- package/dist/cjs/RunnerAddress.js +63 -0
- package/dist/cjs/RunnerAddress.js.map +1 -0
- package/dist/cjs/RunnerHealth.js +68 -0
- package/dist/cjs/RunnerHealth.js.map +1 -0
- package/dist/cjs/RunnerServer.js +125 -0
- package/dist/cjs/RunnerServer.js.map +1 -0
- package/dist/cjs/Runners.js +344 -0
- package/dist/cjs/Runners.js.map +1 -0
- package/dist/cjs/ShardId.js +7 -46
- package/dist/cjs/ShardId.js.map +1 -1
- package/dist/cjs/ShardManager.js +493 -8
- package/dist/cjs/ShardManager.js.map +1 -1
- package/dist/cjs/ShardStorage.js +139 -0
- package/dist/cjs/ShardStorage.js.map +1 -0
- package/dist/cjs/Sharding.js +732 -88
- package/dist/cjs/Sharding.js.map +1 -1
- package/dist/cjs/ShardingConfig.js +85 -18
- package/dist/cjs/ShardingConfig.js.map +1 -1
- package/dist/cjs/ShardingRegistrationEvent.js +26 -32
- package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
- package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
- package/dist/cjs/Singleton.js.map +1 -0
- package/dist/cjs/SingletonAddress.js +50 -0
- package/dist/cjs/SingletonAddress.js.map +1 -0
- package/dist/cjs/Snowflake.js +133 -0
- package/dist/cjs/Snowflake.js.map +1 -0
- package/dist/cjs/SocketRunner.js +40 -0
- package/dist/cjs/SocketRunner.js.map +1 -0
- package/dist/cjs/SocketShardManager.js +33 -0
- package/dist/cjs/SocketShardManager.js.map +1 -0
- package/dist/cjs/SqlMessageStorage.js +668 -0
- package/dist/cjs/SqlMessageStorage.js.map +1 -0
- package/dist/cjs/SqlShardStorage.js +228 -0
- package/dist/cjs/SqlShardStorage.js.map +1 -0
- package/dist/cjs/SynchronizedClock.js +66 -0
- package/dist/cjs/SynchronizedClock.js.map +1 -0
- package/dist/cjs/index.js +57 -45
- package/dist/cjs/internal/entityManager.js +311 -143
- package/dist/cjs/internal/entityManager.js.map +1 -1
- package/dist/cjs/internal/entityReaper.js +47 -0
- package/dist/cjs/internal/entityReaper.js.map +1 -0
- package/dist/cjs/internal/hash.js +20 -0
- package/dist/cjs/internal/hash.js.map +1 -0
- package/dist/cjs/internal/interruptors.js +9 -0
- package/dist/cjs/internal/interruptors.js.map +1 -0
- package/dist/cjs/internal/resourceMap.js +88 -0
- package/dist/cjs/internal/resourceMap.js.map +1 -0
- package/dist/cjs/internal/resourceRef.js +92 -0
- package/dist/cjs/internal/resourceRef.js.map +1 -0
- package/dist/cjs/internal/shardManager.js +219 -235
- package/dist/cjs/internal/shardManager.js.map +1 -1
- package/dist/dts/ClusterError.d.ts +169 -0
- package/dist/dts/ClusterError.d.ts.map +1 -0
- package/dist/dts/ClusterMetrics.d.ts +50 -0
- package/dist/dts/ClusterMetrics.d.ts.map +1 -0
- package/dist/dts/ClusterSchema.d.ts +13 -0
- package/dist/dts/ClusterSchema.d.ts.map +1 -0
- package/dist/dts/DeliverAt.d.ts +27 -0
- package/dist/dts/DeliverAt.d.ts.map +1 -0
- package/dist/dts/Entity.d.ts +180 -0
- package/dist/dts/Entity.d.ts.map +1 -0
- package/dist/dts/EntityAddress.d.ts +55 -0
- package/dist/dts/EntityAddress.d.ts.map +1 -0
- package/dist/dts/EntityId.d.ts +15 -0
- package/dist/dts/EntityId.d.ts.map +1 -0
- package/dist/dts/EntityType.d.ts +15 -0
- package/dist/dts/EntityType.d.ts.map +1 -0
- package/dist/dts/Envelope.d.ts +252 -0
- package/dist/dts/Envelope.d.ts.map +1 -0
- package/dist/dts/HttpCommon.d.ts +25 -0
- package/dist/dts/HttpCommon.d.ts.map +1 -0
- package/dist/dts/HttpRunner.d.ts +76 -0
- package/dist/dts/HttpRunner.d.ts.map +1 -0
- package/dist/dts/HttpShardManager.d.ts +119 -0
- package/dist/dts/HttpShardManager.d.ts.map +1 -0
- package/dist/dts/MachineId.d.ts +20 -0
- package/dist/dts/MachineId.d.ts.map +1 -0
- package/dist/dts/Message.d.ts +91 -74
- package/dist/dts/Message.d.ts.map +1 -1
- package/dist/dts/MessageStorage.d.ts +336 -0
- package/dist/dts/MessageStorage.d.ts.map +1 -0
- package/dist/dts/Reply.d.ts +171 -0
- package/dist/dts/Reply.d.ts.map +1 -0
- package/dist/dts/Runner.d.ts +81 -0
- package/dist/dts/Runner.d.ts.map +1 -0
- package/dist/dts/RunnerAddress.d.ts +56 -0
- package/dist/dts/RunnerAddress.d.ts.map +1 -0
- package/dist/dts/RunnerHealth.d.ts +54 -0
- package/dist/dts/RunnerHealth.d.ts.map +1 -0
- package/dist/dts/RunnerServer.d.ts +44 -0
- package/dist/dts/RunnerServer.d.ts.map +1 -0
- package/dist/dts/Runners.d.ts +161 -0
- package/dist/dts/Runners.d.ts.map +1 -0
- package/dist/dts/ShardId.d.ts +5 -55
- package/dist/dts/ShardId.d.ts.map +1 -1
- package/dist/dts/ShardManager.d.ts +435 -23
- package/dist/dts/ShardManager.d.ts.map +1 -1
- package/dist/dts/ShardStorage.d.ts +200 -0
- package/dist/dts/ShardStorage.d.ts.map +1 -0
- package/dist/dts/Sharding.d.ts +109 -131
- package/dist/dts/Sharding.d.ts.map +1 -1
- package/dist/dts/ShardingConfig.d.ts +147 -44
- package/dist/dts/ShardingConfig.d.ts.map +1 -1
- package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
- package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
- package/dist/dts/Singleton.d.ts +13 -0
- package/dist/dts/Singleton.d.ts.map +1 -0
- package/dist/dts/SingletonAddress.d.ts +49 -0
- package/dist/dts/SingletonAddress.d.ts.map +1 -0
- package/dist/dts/Snowflake.d.ts +121 -0
- package/dist/dts/Snowflake.d.ts.map +1 -0
- package/dist/dts/SocketRunner.d.ts +22 -0
- package/dist/dts/SocketRunner.d.ts.map +1 -0
- package/dist/dts/SocketShardManager.d.ts +17 -0
- package/dist/dts/SocketShardManager.d.ts.map +1 -0
- package/dist/dts/SqlMessageStorage.d.ts +43 -0
- package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
- package/dist/dts/SqlShardStorage.d.ts +38 -0
- package/dist/dts/SqlShardStorage.d.ts.map +1 -0
- package/dist/dts/SynchronizedClock.d.ts +19 -0
- package/dist/dts/SynchronizedClock.d.ts.map +1 -0
- package/dist/dts/index.d.ts +48 -24
- package/dist/dts/index.d.ts.map +1 -1
- package/dist/dts/internal/entityReaper.d.ts +2 -0
- package/dist/dts/internal/entityReaper.d.ts.map +1 -0
- package/dist/dts/internal/hash.d.ts +2 -0
- package/dist/dts/internal/hash.d.ts.map +1 -0
- package/dist/dts/internal/interruptors.d.ts +2 -0
- package/dist/dts/internal/interruptors.d.ts.map +1 -0
- package/dist/dts/internal/resourceMap.d.ts +22 -0
- package/dist/dts/internal/resourceMap.d.ts.map +1 -0
- package/dist/dts/internal/resourceRef.d.ts +25 -0
- package/dist/dts/internal/resourceRef.d.ts.map +1 -0
- package/dist/dts/internal/shardManager.d.ts +1 -11
- package/dist/dts/internal/shardManager.d.ts.map +1 -1
- package/dist/esm/ClusterError.js +164 -0
- package/dist/esm/ClusterError.js.map +1 -0
- package/dist/esm/ClusterMetrics.js +54 -0
- package/dist/esm/ClusterMetrics.js.map +1 -0
- package/dist/esm/ClusterSchema.js +13 -0
- package/dist/esm/ClusterSchema.js.map +1 -0
- package/dist/esm/DeliverAt.js +22 -0
- package/dist/esm/DeliverAt.js.map +1 -0
- package/dist/esm/Entity.js +173 -0
- package/dist/esm/Entity.js.map +1 -0
- package/dist/esm/EntityAddress.js +44 -0
- package/dist/esm/EntityAddress.js.map +1 -0
- package/dist/esm/EntityId.js +10 -0
- package/dist/esm/EntityId.js.map +1 -0
- package/dist/esm/EntityType.js +10 -0
- package/dist/esm/EntityType.js.map +1 -0
- package/dist/esm/Envelope.js +154 -0
- package/dist/esm/Envelope.js.map +1 -0
- package/dist/esm/HttpCommon.js +38 -0
- package/dist/esm/HttpCommon.js.map +1 -0
- package/dist/esm/HttpRunner.js +98 -0
- package/dist/esm/HttpRunner.js.map +1 -0
- package/dist/esm/HttpShardManager.js +128 -0
- package/dist/esm/HttpShardManager.js.map +1 -0
- package/dist/esm/MachineId.js +17 -0
- package/dist/esm/MachineId.js.map +1 -0
- package/dist/esm/Message.js +88 -17
- package/dist/esm/Message.js.map +1 -1
- package/dist/esm/MessageStorage.js +345 -0
- package/dist/esm/MessageStorage.js.map +1 -0
- package/dist/esm/Reply.js +184 -0
- package/dist/esm/Reply.js.map +1 -0
- package/dist/esm/Runner.js +68 -0
- package/dist/esm/Runner.js.map +1 -0
- package/dist/esm/RunnerAddress.js +52 -0
- package/dist/esm/RunnerAddress.js.map +1 -0
- package/dist/esm/RunnerHealth.js +58 -0
- package/dist/esm/RunnerHealth.js.map +1 -0
- package/dist/esm/RunnerServer.js +116 -0
- package/dist/esm/RunnerServer.js.map +1 -0
- package/dist/esm/Runners.js +332 -0
- package/dist/esm/Runners.js.map +1 -0
- package/dist/esm/ShardId.js +5 -42
- package/dist/esm/ShardId.js.map +1 -1
- package/dist/esm/ShardManager.js +486 -7
- package/dist/esm/ShardManager.js.map +1 -1
- package/dist/esm/ShardStorage.js +129 -0
- package/dist/esm/ShardStorage.js.map +1 -0
- package/dist/esm/Sharding.js +730 -87
- package/dist/esm/Sharding.js.map +1 -1
- package/dist/esm/ShardingConfig.js +80 -17
- package/dist/esm/ShardingConfig.js.map +1 -1
- package/dist/esm/ShardingRegistrationEvent.js +19 -29
- package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
- package/dist/esm/Singleton.js +15 -0
- package/dist/esm/Singleton.js.map +1 -0
- package/dist/esm/SingletonAddress.js +40 -0
- package/dist/esm/SingletonAddress.js.map +1 -0
- package/dist/esm/Snowflake.js +117 -0
- package/dist/esm/Snowflake.js.map +1 -0
- package/dist/esm/SocketRunner.js +31 -0
- package/dist/esm/SocketRunner.js.map +1 -0
- package/dist/esm/SocketShardManager.js +24 -0
- package/dist/esm/SocketShardManager.js.map +1 -0
- package/dist/esm/SqlMessageStorage.js +658 -0
- package/dist/esm/SqlMessageStorage.js.map +1 -0
- package/dist/esm/SqlShardStorage.js +218 -0
- package/dist/esm/SqlShardStorage.js.map +1 -0
- package/dist/esm/SynchronizedClock.js +57 -0
- package/dist/esm/SynchronizedClock.js.map +1 -0
- package/dist/esm/index.js +48 -24
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/internal/entityManager.js +311 -142
- package/dist/esm/internal/entityManager.js.map +1 -1
- package/dist/esm/internal/entityReaper.js +38 -0
- package/dist/esm/internal/entityReaper.js.map +1 -0
- package/dist/esm/internal/hash.js +12 -0
- package/dist/esm/internal/hash.js.map +1 -0
- package/dist/esm/internal/interruptors.js +3 -0
- package/dist/esm/internal/interruptors.js.map +1 -0
- package/dist/esm/internal/resourceMap.js +79 -0
- package/dist/esm/internal/resourceMap.js.map +1 -0
- package/dist/esm/internal/resourceRef.js +83 -0
- package/dist/esm/internal/resourceRef.js.map +1 -0
- package/dist/esm/internal/shardManager.js +217 -233
- package/dist/esm/internal/shardManager.js.map +1 -1
- package/package.json +212 -154
- package/src/ClusterError.ts +193 -0
- package/src/ClusterMetrics.ts +62 -0
- package/src/ClusterSchema.ts +13 -0
- package/src/DeliverAt.ts +36 -0
- package/src/Entity.ts +438 -0
- package/src/EntityAddress.ts +55 -0
- package/src/EntityId.ts +16 -0
- package/src/EntityType.ts +16 -0
- package/src/Envelope.ts +352 -0
- package/src/HttpCommon.ts +73 -0
- package/src/HttpRunner.ts +196 -0
- package/src/HttpShardManager.ts +273 -0
- package/src/MachineId.ts +27 -0
- package/src/Message.ts +143 -92
- package/src/MessageStorage.ts +697 -0
- package/src/Reply.ts +295 -0
- package/src/Runner.ts +84 -0
- package/src/RunnerAddress.ts +61 -0
- package/src/RunnerHealth.ts +87 -0
- package/src/RunnerServer.ts +156 -0
- package/src/Runners.ts +533 -0
- package/src/ShardId.ts +10 -62
- package/src/ShardManager.ts +780 -29
- package/src/ShardStorage.ts +289 -0
- package/src/Sharding.ts +1060 -183
- package/src/ShardingConfig.ts +186 -45
- package/src/ShardingRegistrationEvent.ts +38 -39
- package/src/Singleton.ts +20 -0
- package/src/SingletonAddress.ts +47 -0
- package/src/Snowflake.ts +194 -0
- package/src/SocketRunner.ts +59 -0
- package/src/SocketShardManager.ts +48 -0
- package/src/SqlMessageStorage.ts +833 -0
- package/src/SqlShardStorage.ts +292 -0
- package/src/SynchronizedClock.ts +82 -0
- package/src/index.ts +54 -24
- package/src/internal/entityManager.ts +464 -361
- package/src/internal/entityReaper.ts +53 -0
- package/src/internal/hash.ts +11 -0
- package/src/internal/interruptors.ts +4 -0
- package/src/internal/resourceMap.ts +89 -0
- package/src/internal/resourceRef.ts +88 -0
- package/src/internal/shardManager.ts +273 -546
- package/AtLeastOnce/package.json +0 -6
- package/AtLeastOnceStorage/package.json +0 -6
- package/Broadcaster/package.json +0 -6
- package/ManagerConfig/package.json +0 -6
- package/MessageState/package.json +0 -6
- package/Messenger/package.json +0 -6
- package/Pod/package.json +0 -6
- package/PodAddress/package.json +0 -6
- package/Pods/package.json +0 -6
- package/PodsHealth/package.json +0 -6
- package/PoisonPill/package.json +0 -6
- package/RecipientAddress/package.json +0 -6
- package/RecipientBehaviour/package.json +0 -6
- package/RecipientBehaviourContext/package.json +0 -6
- package/RecipientType/package.json +0 -6
- package/Serialization/package.json +0 -6
- package/SerializedEnvelope/package.json +0 -6
- package/SerializedMessage/package.json +0 -6
- package/ShardManagerClient/package.json +0 -6
- package/ShardingEvent/package.json +0 -6
- package/ShardingException/package.json +0 -6
- package/Storage/package.json +0 -6
- package/dist/cjs/AtLeastOnce.js.map +0 -1
- package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
- package/dist/cjs/Broadcaster.js +0 -6
- package/dist/cjs/Broadcaster.js.map +0 -1
- package/dist/cjs/ManagerConfig.js.map +0 -1
- package/dist/cjs/MessageState.js +0 -55
- package/dist/cjs/MessageState.js.map +0 -1
- package/dist/cjs/Messenger.js +0 -6
- package/dist/cjs/Messenger.js.map +0 -1
- package/dist/cjs/Pod.js +0 -78
- package/dist/cjs/Pod.js.map +0 -1
- package/dist/cjs/PodAddress.js +0 -77
- package/dist/cjs/PodAddress.js.map +0 -1
- package/dist/cjs/Pods.js.map +0 -1
- package/dist/cjs/PodsHealth.js +0 -41
- package/dist/cjs/PodsHealth.js.map +0 -1
- package/dist/cjs/PoisonPill.js +0 -78
- package/dist/cjs/PoisonPill.js.map +0 -1
- package/dist/cjs/RecipientAddress.js +0 -79
- package/dist/cjs/RecipientAddress.js.map +0 -1
- package/dist/cjs/RecipientBehaviour.js +0 -38
- package/dist/cjs/RecipientBehaviour.js.map +0 -1
- package/dist/cjs/RecipientBehaviourContext.js +0 -64
- package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
- package/dist/cjs/RecipientType.js +0 -123
- package/dist/cjs/RecipientType.js.map +0 -1
- package/dist/cjs/Serialization.js +0 -32
- package/dist/cjs/Serialization.js.map +0 -1
- package/dist/cjs/SerializedEnvelope.js +0 -87
- package/dist/cjs/SerializedEnvelope.js.map +0 -1
- package/dist/cjs/SerializedMessage.js +0 -64
- package/dist/cjs/SerializedMessage.js.map +0 -1
- package/dist/cjs/ShardManagerClient.js.map +0 -1
- package/dist/cjs/ShardingEvent.js +0 -72
- package/dist/cjs/ShardingEvent.js.map +0 -1
- package/dist/cjs/ShardingException.js +0 -107
- package/dist/cjs/ShardingException.js.map +0 -1
- package/dist/cjs/Storage.js +0 -40
- package/dist/cjs/Storage.js.map +0 -1
- package/dist/cjs/internal/atLeastOnce.js +0 -35
- package/dist/cjs/internal/atLeastOnce.js.map +0 -1
- package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
- package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
- package/dist/cjs/internal/entityState.js +0 -47
- package/dist/cjs/internal/entityState.js.map +0 -1
- package/dist/cjs/internal/managerConfig.js +0 -46
- package/dist/cjs/internal/managerConfig.js.map +0 -1
- package/dist/cjs/internal/message.js +0 -48
- package/dist/cjs/internal/message.js.map +0 -1
- package/dist/cjs/internal/messageState.js +0 -79
- package/dist/cjs/internal/messageState.js.map +0 -1
- package/dist/cjs/internal/podWithMetadata.js +0 -54
- package/dist/cjs/internal/podWithMetadata.js.map +0 -1
- package/dist/cjs/internal/pods.js +0 -35
- package/dist/cjs/internal/pods.js.map +0 -1
- package/dist/cjs/internal/podsHealth.js +0 -40
- package/dist/cjs/internal/podsHealth.js.map +0 -1
- package/dist/cjs/internal/recipientBehaviour.js +0 -52
- package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
- package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
- package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
- package/dist/cjs/internal/serialization.js +0 -48
- package/dist/cjs/internal/serialization.js.map +0 -1
- package/dist/cjs/internal/shardManagerClient.js +0 -48
- package/dist/cjs/internal/shardManagerClient.js.map +0 -1
- package/dist/cjs/internal/shardManagerState.js +0 -44
- package/dist/cjs/internal/shardManagerState.js.map +0 -1
- package/dist/cjs/internal/sharding.js +0 -306
- package/dist/cjs/internal/sharding.js.map +0 -1
- package/dist/cjs/internal/shardingConfig.js +0 -56
- package/dist/cjs/internal/shardingConfig.js.map +0 -1
- package/dist/cjs/internal/storage.js +0 -52
- package/dist/cjs/internal/storage.js.map +0 -1
- package/dist/cjs/internal/utils.js +0 -69
- package/dist/cjs/internal/utils.js.map +0 -1
- package/dist/dts/AtLeastOnce.d.ts +0 -20
- package/dist/dts/AtLeastOnce.d.ts.map +0 -1
- package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
- package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
- package/dist/dts/Broadcaster.d.ts +0 -32
- package/dist/dts/Broadcaster.d.ts.map +0 -1
- package/dist/dts/ManagerConfig.d.ts +0 -61
- package/dist/dts/ManagerConfig.d.ts.map +0 -1
- package/dist/dts/MessageState.d.ts +0 -107
- package/dist/dts/MessageState.d.ts.map +0 -1
- package/dist/dts/Messenger.d.ts +0 -32
- package/dist/dts/Messenger.d.ts.map +0 -1
- package/dist/dts/Pod.d.ts +0 -81
- package/dist/dts/Pod.d.ts.map +0 -1
- package/dist/dts/PodAddress.d.ts +0 -80
- package/dist/dts/PodAddress.d.ts.map +0 -1
- package/dist/dts/Pods.d.ts +0 -78
- package/dist/dts/Pods.d.ts.map +0 -1
- package/dist/dts/PodsHealth.d.ts +0 -66
- package/dist/dts/PodsHealth.d.ts.map +0 -1
- package/dist/dts/PoisonPill.d.ts +0 -78
- package/dist/dts/PoisonPill.d.ts.map +0 -1
- package/dist/dts/RecipientAddress.d.ts +0 -57
- package/dist/dts/RecipientAddress.d.ts.map +0 -1
- package/dist/dts/RecipientBehaviour.d.ts +0 -72
- package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
- package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
- package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
- package/dist/dts/RecipientType.d.ts +0 -93
- package/dist/dts/RecipientType.d.ts.map +0 -1
- package/dist/dts/Serialization.d.ts +0 -58
- package/dist/dts/Serialization.d.ts.map +0 -1
- package/dist/dts/SerializedEnvelope.d.ts +0 -86
- package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
- package/dist/dts/SerializedMessage.d.ts +0 -66
- package/dist/dts/SerializedMessage.d.ts.map +0 -1
- package/dist/dts/ShardManagerClient.d.ts +0 -50
- package/dist/dts/ShardManagerClient.d.ts.map +0 -1
- package/dist/dts/ShardingEvent.d.ts +0 -90
- package/dist/dts/ShardingEvent.d.ts.map +0 -1
- package/dist/dts/ShardingException.d.ts +0 -125
- package/dist/dts/ShardingException.d.ts.map +0 -1
- package/dist/dts/Storage.d.ts +0 -78
- package/dist/dts/Storage.d.ts.map +0 -1
- package/dist/dts/internal/atLeastOnce.d.ts +0 -2
- package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
- package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
- package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
- package/dist/dts/internal/entityState.d.ts +0 -21
- package/dist/dts/internal/entityState.d.ts.map +0 -1
- package/dist/dts/internal/managerConfig.d.ts +0 -2
- package/dist/dts/internal/managerConfig.d.ts.map +0 -1
- package/dist/dts/internal/message.d.ts +0 -9
- package/dist/dts/internal/message.d.ts.map +0 -1
- package/dist/dts/internal/messageState.d.ts +0 -2
- package/dist/dts/internal/messageState.d.ts.map +0 -1
- package/dist/dts/internal/podWithMetadata.d.ts +0 -2
- package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
- package/dist/dts/internal/pods.d.ts +0 -2
- package/dist/dts/internal/pods.d.ts.map +0 -1
- package/dist/dts/internal/podsHealth.d.ts +0 -2
- package/dist/dts/internal/podsHealth.d.ts.map +0 -1
- package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
- package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
- package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
- package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
- package/dist/dts/internal/serialization.d.ts +0 -2
- package/dist/dts/internal/serialization.d.ts.map +0 -1
- package/dist/dts/internal/shardManagerClient.d.ts +0 -2
- package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
- package/dist/dts/internal/shardManagerState.d.ts +0 -26
- package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
- package/dist/dts/internal/sharding.d.ts +0 -2
- package/dist/dts/internal/sharding.d.ts.map +0 -1
- package/dist/dts/internal/shardingConfig.d.ts +0 -2
- package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
- package/dist/dts/internal/storage.d.ts +0 -2
- package/dist/dts/internal/storage.d.ts.map +0 -1
- package/dist/dts/internal/utils.d.ts +0 -2
- package/dist/dts/internal/utils.d.ts.map +0 -1
- package/dist/esm/AtLeastOnce.js +0 -12
- package/dist/esm/AtLeastOnce.js.map +0 -1
- package/dist/esm/AtLeastOnceStorage.js +0 -17
- package/dist/esm/AtLeastOnceStorage.js.map +0 -1
- package/dist/esm/Broadcaster.js +0 -2
- package/dist/esm/Broadcaster.js.map +0 -1
- package/dist/esm/ManagerConfig.js +0 -26
- package/dist/esm/ManagerConfig.js.map +0 -1
- package/dist/esm/MessageState.js +0 -47
- package/dist/esm/MessageState.js.map +0 -1
- package/dist/esm/Messenger.js +0 -2
- package/dist/esm/Messenger.js.map +0 -1
- package/dist/esm/Pod.js +0 -65
- package/dist/esm/Pod.js.map +0 -1
- package/dist/esm/PodAddress.js +0 -64
- package/dist/esm/PodAddress.js.map +0 -1
- package/dist/esm/Pods.js +0 -27
- package/dist/esm/Pods.js.map +0 -1
- package/dist/esm/PodsHealth.js +0 -33
- package/dist/esm/PodsHealth.js.map +0 -1
- package/dist/esm/PoisonPill.js +0 -65
- package/dist/esm/PoisonPill.js.map +0 -1
- package/dist/esm/RecipientAddress.js +0 -67
- package/dist/esm/RecipientAddress.js.map +0 -1
- package/dist/esm/RecipientBehaviour.js +0 -30
- package/dist/esm/RecipientBehaviour.js.map +0 -1
- package/dist/esm/RecipientBehaviourContext.js +0 -56
- package/dist/esm/RecipientBehaviourContext.js.map +0 -1
- package/dist/esm/RecipientType.js +0 -108
- package/dist/esm/RecipientType.js.map +0 -1
- package/dist/esm/Serialization.js +0 -24
- package/dist/esm/Serialization.js.map +0 -1
- package/dist/esm/SerializedEnvelope.js +0 -74
- package/dist/esm/SerializedEnvelope.js.map +0 -1
- package/dist/esm/SerializedMessage.js +0 -51
- package/dist/esm/SerializedMessage.js.map +0 -1
- package/dist/esm/ShardManagerClient.js +0 -22
- package/dist/esm/ShardManagerClient.js.map +0 -1
- package/dist/esm/ShardingEvent.js +0 -62
- package/dist/esm/ShardingEvent.js.map +0 -1
- package/dist/esm/ShardingException.js +0 -91
- package/dist/esm/ShardingException.js.map +0 -1
- package/dist/esm/Storage.js +0 -32
- package/dist/esm/Storage.js.map +0 -1
- package/dist/esm/internal/atLeastOnce.js +0 -26
- package/dist/esm/internal/atLeastOnce.js.map +0 -1
- package/dist/esm/internal/atLeastOnceStorage.js +0 -154
- package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
- package/dist/esm/internal/entityState.js +0 -35
- package/dist/esm/internal/entityState.js.map +0 -1
- package/dist/esm/internal/managerConfig.js +0 -38
- package/dist/esm/internal/managerConfig.js.map +0 -1
- package/dist/esm/internal/message.js +0 -35
- package/dist/esm/internal/message.js.map +0 -1
- package/dist/esm/internal/messageState.js +0 -66
- package/dist/esm/internal/messageState.js.map +0 -1
- package/dist/esm/internal/podWithMetadata.js +0 -41
- package/dist/esm/internal/podWithMetadata.js.map +0 -1
- package/dist/esm/internal/pods.js +0 -25
- package/dist/esm/internal/pods.js.map +0 -1
- package/dist/esm/internal/podsHealth.js +0 -30
- package/dist/esm/internal/podsHealth.js.map +0 -1
- package/dist/esm/internal/recipientBehaviour.js +0 -42
- package/dist/esm/internal/recipientBehaviour.js.map +0 -1
- package/dist/esm/internal/recipientBehaviourContext.js +0 -26
- package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
- package/dist/esm/internal/serialization.js +0 -38
- package/dist/esm/internal/serialization.js.map +0 -1
- package/dist/esm/internal/shardManagerClient.js +0 -38
- package/dist/esm/internal/shardManagerClient.js.map +0 -1
- package/dist/esm/internal/shardManagerState.js +0 -36
- package/dist/esm/internal/shardManagerState.js.map +0 -1
- package/dist/esm/internal/sharding.js +0 -288
- package/dist/esm/internal/sharding.js.map +0 -1
- package/dist/esm/internal/shardingConfig.js +0 -47
- package/dist/esm/internal/shardingConfig.js.map +0 -1
- package/dist/esm/internal/storage.js +0 -42
- package/dist/esm/internal/storage.js.map +0 -1
- package/dist/esm/internal/utils.js +0 -56
- package/dist/esm/internal/utils.js.map +0 -1
- package/src/AtLeastOnce.ts +0 -28
- package/src/AtLeastOnceStorage.ts +0 -96
- package/src/Broadcaster.ts +0 -48
- package/src/ManagerConfig.ts +0 -67
- package/src/MessageState.ts +0 -126
- package/src/Messenger.ts +0 -40
- package/src/Pod.ts +0 -95
- package/src/PodAddress.ts +0 -94
- package/src/Pods.ts +0 -100
- package/src/PodsHealth.ts +0 -74
- package/src/PoisonPill.ts +0 -105
- package/src/RecipientAddress.ts +0 -72
- package/src/RecipientBehaviour.ts +0 -108
- package/src/RecipientBehaviourContext.ts +0 -101
- package/src/RecipientType.ts +0 -134
- package/src/Serialization.ts +0 -72
- package/src/SerializedEnvelope.ts +0 -108
- package/src/SerializedMessage.ts +0 -82
- package/src/ShardManagerClient.ts +0 -57
- package/src/ShardingEvent.ts +0 -121
- package/src/ShardingException.ts +0 -151
- package/src/Storage.ts +0 -92
- package/src/internal/atLeastOnce.ts +0 -59
- package/src/internal/atLeastOnceStorage.ts +0 -218
- package/src/internal/entityState.ts +0 -64
- package/src/internal/managerConfig.ts +0 -84
- package/src/internal/message.ts +0 -63
- package/src/internal/messageState.ts +0 -98
- package/src/internal/podWithMetadata.ts +0 -72
- package/src/internal/pods.ts +0 -29
- package/src/internal/podsHealth.ts +0 -39
- package/src/internal/recipientBehaviour.ts +0 -133
- package/src/internal/recipientBehaviourContext.ts +0 -70
- package/src/internal/serialization.ts +0 -63
- package/src/internal/shardManagerClient.ts +0 -49
- package/src/internal/shardManagerState.ts +0 -80
- package/src/internal/sharding.ts +0 -789
- package/src/internal/shardingConfig.ts +0 -97
- package/src/internal/storage.ts +0 -60
- package/src/internal/utils.ts +0 -54
@@ -1,591 +1,318 @@
|
|
1
|
-
|
2
|
-
* @since 1.0.0
|
3
|
-
*/
|
4
|
-
import * as Chunk from "effect/Chunk"
|
1
|
+
import * as Arr from "effect/Array"
|
5
2
|
import * as Clock from "effect/Clock"
|
6
|
-
import { GenericTag } from "effect/Context"
|
7
3
|
import * as Effect from "effect/Effect"
|
8
|
-
import {
|
9
|
-
import
|
10
|
-
import * as
|
11
|
-
import * as HashSet from "effect/HashSet"
|
12
|
-
import * as Layer from "effect/Layer"
|
13
|
-
import * as List from "effect/List"
|
4
|
+
import { constFalse } from "effect/Function"
|
5
|
+
import * as MutableHashMap from "effect/MutableHashMap"
|
6
|
+
import * as MutableHashSet from "effect/MutableHashSet"
|
14
7
|
import * as Option from "effect/Option"
|
15
|
-
import * as
|
16
|
-
import
|
17
|
-
import type
|
18
|
-
import
|
19
|
-
import
|
20
|
-
import
|
21
|
-
import type * as Pod from "../Pod.js"
|
22
|
-
import type * as PodAddress from "../PodAddress.js"
|
23
|
-
import * as Pods from "../Pods.js"
|
24
|
-
import * as PodsHealth from "../PodsHealth.js"
|
25
|
-
import * as ShardId from "../ShardId.js"
|
26
|
-
import * as ShardingEvent from "../ShardingEvent.js"
|
27
|
-
import * as ShardingException from "../ShardingException.js"
|
28
|
-
import type * as ShardManager from "../ShardManager.js"
|
29
|
-
import * as Storage from "../Storage.js"
|
30
|
-
import * as PodWithMetadata from "./podWithMetadata.js"
|
31
|
-
import * as ShardManagerState from "./shardManagerState.js"
|
32
|
-
import { groupBy, minByOption } from "./utils.js"
|
8
|
+
import * as Order from "effect/Order"
|
9
|
+
import type { Runner } from "../Runner.js"
|
10
|
+
import type { RunnerAddress } from "../RunnerAddress.js"
|
11
|
+
import { RunnerHealth } from "../RunnerHealth.js"
|
12
|
+
import { ShardId } from "../ShardId.js"
|
13
|
+
import { ShardStorage } from "../ShardStorage.js"
|
33
14
|
|
34
15
|
/** @internal */
|
35
|
-
|
16
|
+
export class State {
|
17
|
+
static fromStorage = Effect.fnUntraced(function*(numberOfShards: number) {
|
18
|
+
const storage = yield* ShardStorage
|
19
|
+
const runnerHealth = yield* RunnerHealth
|
20
|
+
|
21
|
+
// Fetch registered runners and shard assignments from cluster storage
|
22
|
+
const storedRunners = yield* storage.getRunners
|
23
|
+
const storedAssignments = yield* storage.getAssignments
|
24
|
+
|
25
|
+
// Determine which runners are still alive
|
26
|
+
const deadRunners = Arr.empty<Runner>()
|
27
|
+
const aliveRunners = MutableHashMap.empty<RunnerAddress, Runner>()
|
28
|
+
yield* Effect.forEach(storedRunners, ([address, runner]) =>
|
29
|
+
Effect.map(runnerHealth.isAlive(address), (isAlive) => {
|
30
|
+
if (isAlive) {
|
31
|
+
MutableHashMap.set(aliveRunners, address, runner)
|
32
|
+
} else {
|
33
|
+
deadRunners.push(runner)
|
34
|
+
}
|
35
|
+
}), { concurrency: "unbounded", discard: true })
|
36
|
+
if (deadRunners.length > 0) {
|
37
|
+
yield* Effect.logWarning("Ignoring runners that are no longer considered alive:", deadRunners)
|
38
|
+
}
|
36
39
|
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
)
|
40
|
+
// Determine which shards remain unassigned to a runner
|
41
|
+
const assignedShards = new Map<ShardId, RunnerAddress>()
|
42
|
+
const invalidAssignments = Arr.empty<[ShardId, RunnerAddress]>()
|
43
|
+
for (const [shard, address] of storedAssignments) {
|
44
|
+
if (Option.isSome(address) && MutableHashMap.has(aliveRunners, address.value)) {
|
45
|
+
assignedShards.set(shard, address.value)
|
46
|
+
} else if (Option.isSome(address)) {
|
47
|
+
invalidAssignments.push([shard, address.value])
|
48
|
+
}
|
49
|
+
}
|
50
|
+
if (invalidAssignments.length > 0) {
|
51
|
+
yield* Effect.logWarning(
|
52
|
+
"Ignoring shard assignments for runners that are no longer considered alive: ",
|
53
|
+
invalidAssignments
|
54
|
+
)
|
55
|
+
}
|
41
56
|
|
42
|
-
|
43
|
-
|
57
|
+
// Construct the initial state
|
58
|
+
const now = yield* Clock.currentTimeMillis
|
59
|
+
const runnerState = MutableHashMap.empty<RunnerAddress, RunnerWithMetadata>()
|
60
|
+
for (const [address, runner] of aliveRunners) {
|
61
|
+
MutableHashMap.set(runnerState, address, RunnerWithMetadata({ runner, registeredAt: now }))
|
62
|
+
}
|
44
63
|
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
eventsHub: PubSub.PubSub<ShardingEvent.ShardingEvent>,
|
51
|
-
healthApi: PodsHealth.PodsHealth,
|
52
|
-
podApi: Pods.Pods,
|
53
|
-
stateRepository: Storage.Storage,
|
54
|
-
config: ManagerConfig.ManagerConfig
|
55
|
-
): ShardManager.ShardManager {
|
56
|
-
const getAssignments: Effect.Effect<HashMap.HashMap<ShardId.ShardId, Option.Option<PodAddress.PodAddress>>> = pipe(
|
57
|
-
RefSynchronized.get(stateRef),
|
58
|
-
Effect.map((_) => _.shards)
|
59
|
-
)
|
60
|
-
|
61
|
-
const getShardingEvents = Stream.fromPubSub(eventsHub)
|
62
|
-
|
63
|
-
function register(pod: Pod.Pod) {
|
64
|
-
return pipe(
|
65
|
-
Effect.logDebug("Registering " + (pod.address) + "@" + pod.version),
|
66
|
-
Effect.zipRight(
|
67
|
-
RefSynchronized.updateAndGetEffect(stateRef, (state) =>
|
68
|
-
pipe(
|
69
|
-
Effect.flatMap(Effect.clock, (_) => _.currentTimeMillis),
|
70
|
-
Effect.map((cdt) =>
|
71
|
-
ShardManagerState.make(
|
72
|
-
HashMap.set(state.pods, pod.address, PodWithMetadata.make(pod, cdt)),
|
73
|
-
state.shards
|
74
|
-
)
|
75
|
-
)
|
76
|
-
))
|
77
|
-
),
|
78
|
-
Effect.zipLeft(PubSub.publish(eventsHub, ShardingEvent.PodRegistered(pod.address))),
|
79
|
-
Effect.flatMap((state) => Effect.when(rebalance(false), () => HashSet.size(state.unassignedShards) > 0)),
|
80
|
-
Effect.zipRight(Effect.forkIn(layerScope)(persistPods)),
|
81
|
-
Effect.asVoid
|
82
|
-
)
|
83
|
-
}
|
64
|
+
const shardState = new Map<ShardId, Option.Option<RunnerAddress>>()
|
65
|
+
for (let n = 1; n <= numberOfShards; n++) {
|
66
|
+
const shardId = ShardId.make(n)
|
67
|
+
shardState.set(shardId, Option.fromNullable(assignedShards.get(shardId)))
|
68
|
+
}
|
84
69
|
|
85
|
-
|
86
|
-
|
87
|
-
RefSynchronized.get(stateRef),
|
88
|
-
Effect.map((_) => HashMap.has(_.pods, podAddress))
|
89
|
-
)
|
90
|
-
}
|
70
|
+
return new State(runnerState, shardState)
|
71
|
+
})
|
91
72
|
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
PubSub.publish(eventsHub, ShardingEvent.PodHealthChecked(podAddress)),
|
97
|
-
Effect.zipRight(
|
98
|
-
Effect.unlessEffect(
|
99
|
-
Effect.zipRight(
|
100
|
-
Effect.logWarning(`${podAddress} is not alive, unregistering`),
|
101
|
-
unregister(podAddress)
|
102
|
-
),
|
103
|
-
healthApi.isAlive(podAddress)
|
104
|
-
)
|
105
|
-
)
|
106
|
-
),
|
107
|
-
stateHasPod(podAddress)
|
108
|
-
),
|
109
|
-
Effect.asVoid
|
110
|
-
)
|
111
|
-
}
|
73
|
+
constructor(
|
74
|
+
readonly runners: MutableHashMap.MutableHashMap<RunnerAddress, RunnerWithMetadata>,
|
75
|
+
readonly shards: Map<ShardId, Option.Option<RunnerAddress>>
|
76
|
+
) {}
|
112
77
|
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
Effect.zipLeft(Effect.logDebug(`Unregistering ${podAddress}`)),
|
123
|
-
Effect.bind("unassignments", (_) =>
|
124
|
-
pipe(
|
125
|
-
stateRef,
|
126
|
-
RefSynchronized.modify((state) => [
|
127
|
-
pipe(
|
128
|
-
state.shards,
|
129
|
-
HashMap.filter((pod) => equals(pod)(Option.some(podAddress))),
|
130
|
-
HashMap.keySet
|
131
|
-
),
|
132
|
-
{
|
133
|
-
...state,
|
134
|
-
pods: HashMap.remove(state.pods, podAddress),
|
135
|
-
shards: HashMap.map(state.shards, (_) => equals(_)(Option.some(podAddress)) ? Option.none() : _)
|
136
|
-
}
|
137
|
-
])
|
138
|
-
)),
|
139
|
-
Effect.tap((_) => PubSub.publish(eventsHub, ShardingEvent.PodUnregistered(podAddress))),
|
140
|
-
Effect.tap((_) =>
|
141
|
-
Effect.when(
|
142
|
-
PubSub.publish(eventsHub, ShardingEvent.ShardsUnassigned(podAddress, _.unassignments)),
|
143
|
-
() => HashSet.size(_.unassignments) > 0
|
144
|
-
)
|
145
|
-
),
|
146
|
-
Effect.zipLeft(Effect.forkIn(layerScope)(persistPods)),
|
147
|
-
Effect.zipLeft(Effect.forkIn(layerScope)(rebalance(true)))
|
148
|
-
)
|
149
|
-
return Effect.asVoid(Effect.whenEffect(eff, stateHasPod(podAddress)))
|
78
|
+
get maxVersion(): Option.Option<number> {
|
79
|
+
if (MutableHashMap.size(this.runners) === 0) return Option.none()
|
80
|
+
let version: number | undefined = undefined
|
81
|
+
for (const [, meta] of this.runners) {
|
82
|
+
if (version === undefined || meta.runner.version > version) {
|
83
|
+
version = meta.runner.version
|
84
|
+
}
|
85
|
+
}
|
86
|
+
return Option.some(version!)
|
150
87
|
}
|
151
88
|
|
152
|
-
|
153
|
-
return pipe(
|
154
|
-
|
155
|
-
|
156
|
-
pipe(
|
157
|
-
Schedule.spaced(config.persistRetryInterval),
|
158
|
-
Schedule.andThen(Schedule.recurs(config.persistRetryCount))
|
159
|
-
)
|
160
|
-
),
|
161
|
-
Effect.ignore
|
89
|
+
allRunnersHaveVersion(version: Option.Option<number>): boolean {
|
90
|
+
return version.pipe(
|
91
|
+
Option.map((max) => Arr.every(this.runnerVersions, (version) => version === max)),
|
92
|
+
Option.getOrElse(constFalse)
|
162
93
|
)
|
163
94
|
}
|
164
95
|
|
165
|
-
|
166
|
-
|
167
|
-
RefSynchronized.get(stateRef),
|
168
|
-
Effect.flatMap((state) => stateRepository.saveAssignments(state.shards))
|
169
|
-
)
|
170
|
-
)
|
96
|
+
get shardsPerRunner(): MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>> {
|
97
|
+
const shards = MutableHashMap.empty<RunnerAddress, Set<ShardId>>()
|
171
98
|
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
Effect.flatMap((state) => stateRepository.savePods(HashMap.map(state.pods, (v) => v.pod)))
|
176
|
-
)
|
177
|
-
)
|
178
|
-
|
179
|
-
function updateShardsState(
|
180
|
-
shards: HashSet.HashSet<ShardId.ShardId>,
|
181
|
-
pod: Option.Option<PodAddress.PodAddress>
|
182
|
-
) {
|
183
|
-
return RefSynchronized.updateEffect(stateRef, (state) => {
|
184
|
-
if (Option.isSome(pod) && !HashMap.has(state.pods, pod.value)) {
|
185
|
-
return Effect.fail(new ShardingException.PodNoLongerRegisteredException({ podAddress: pod.value }))
|
186
|
-
}
|
187
|
-
return Effect.succeed({
|
188
|
-
...state,
|
189
|
-
shards: pipe(
|
190
|
-
state.shards,
|
191
|
-
HashMap.map((assignment, shard) => HashSet.has(shards, shard) ? pod : assignment)
|
192
|
-
)
|
193
|
-
})
|
99
|
+
if (MutableHashMap.isEmpty(this.runners)) return shards
|
100
|
+
MutableHashMap.forEach(this.runners, (_, address) => {
|
101
|
+
MutableHashMap.set(shards, address, new Set())
|
194
102
|
})
|
195
|
-
}
|
196
103
|
|
197
|
-
|
198
|
-
|
199
|
-
const
|
104
|
+
for (const [shard, address] of this.shards) {
|
105
|
+
if (Option.isNone(address)) continue
|
106
|
+
const shardIds = Option.getOrUndefined(MutableHashMap.get(shards, address.value))!
|
107
|
+
shardIds.add(shard)
|
108
|
+
}
|
200
109
|
|
201
|
-
|
202
|
-
|
203
|
-
: decideAssignmentsForUnbalancedShards(state, config.rebalanceRate)
|
110
|
+
return shards
|
111
|
+
}
|
204
112
|
|
205
|
-
|
113
|
+
get averageShardsPerRunner(): number {
|
114
|
+
const runnerCount = MutableHashMap.size(this.runners)
|
115
|
+
return runnerCount > 0 ? this.shards.size / runnerCount : 0
|
116
|
+
}
|
206
117
|
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
118
|
+
get unassignedShards(): Array<ShardId> {
|
119
|
+
const shardIds: Array<ShardId> = []
|
120
|
+
for (const [shard, address] of this.shards) {
|
121
|
+
if (Option.isNone(address)) {
|
122
|
+
shardIds.push(shard)
|
211
123
|
}
|
124
|
+
}
|
125
|
+
return shardIds
|
126
|
+
}
|
212
127
|
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
onFailure: () => Chunk.fromIterable([pod]),
|
222
|
-
onSuccess: () => Chunk.empty<PodAddress.PodAddress>()
|
223
|
-
})
|
224
|
-
),
|
225
|
-
{ concurrency: "inherit" }
|
226
|
-
),
|
227
|
-
Effect.map(Chunk.fromIterable),
|
228
|
-
Effect.map((_) => Chunk.flatten(_)),
|
229
|
-
Effect.map(HashSet.fromIterable)
|
230
|
-
)
|
128
|
+
private get runnerVersions(): Array<number> {
|
129
|
+
const runnerVersions: Array<number> = []
|
130
|
+
for (const [, meta] of this.runners) {
|
131
|
+
runnerVersions.push(meta.runner.version)
|
132
|
+
}
|
133
|
+
return runnerVersions
|
134
|
+
}
|
135
|
+
}
|
231
136
|
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
)
|
137
|
+
/** @internal */
|
138
|
+
export interface RunnerWithMetadata {
|
139
|
+
readonly runner: Runner
|
140
|
+
readonly registeredAt: number
|
141
|
+
}
|
142
|
+
/** @internal */
|
143
|
+
export const RunnerWithMetadata = (runner: RunnerWithMetadata): RunnerWithMetadata => runner
|
240
144
|
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
145
|
+
/** @internal */
|
146
|
+
export function decideAssignmentsForUnassignedShards(state: State): readonly [
|
147
|
+
assignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
|
148
|
+
unassignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
|
149
|
+
changes: MutableHashSet.MutableHashSet<RunnerAddress>
|
150
|
+
] {
|
151
|
+
return pickNewRunners(state.unassignedShards, state, true, 1)
|
152
|
+
}
|
246
153
|
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
)
|
154
|
+
const allocationOrder: Order.Order<[ShardId, number, number]> = Order.combine(
|
155
|
+
Order.mapInput(Order.number, ([, shards]) => shards),
|
156
|
+
Order.mapInput(Order.number, ([, , registeredAt]) => registeredAt)
|
157
|
+
)
|
252
158
|
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
159
|
+
/** @internal */
|
160
|
+
export function decideAssignmentsForUnbalancedShards(state: State, rate: number): readonly [
|
161
|
+
assignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
|
162
|
+
unassignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
|
163
|
+
changes: MutableHashSet.MutableHashSet<RunnerAddress>
|
164
|
+
] {
|
165
|
+
const shardsPerRunner = state.shardsPerRunner
|
166
|
+
const maxVersion = state.maxVersion
|
167
|
+
const extraShardsToAllocate = Arr.empty<[ShardId, shardsInverse: number, registeredAt: number]>()
|
168
|
+
|
169
|
+
if (state.allRunnersHaveVersion(maxVersion)) {
|
170
|
+
const averageShardsPerRunner = state.averageShardsPerRunner
|
171
|
+
MutableHashMap.forEach(shardsPerRunner, (shards) => {
|
172
|
+
// Count how many extra shards there are compared to the average
|
173
|
+
const extraShards = Math.max(0, shards.size - averageShardsPerRunner)
|
174
|
+
for (const shard of takeRandom(shards, extraShards)) {
|
175
|
+
const maybeAddress = state.shards.get(shard) ?? Option.none()
|
176
|
+
if (Option.isNone(maybeAddress)) {
|
177
|
+
extraShardsToAllocate.push([shard, Number.MIN_SAFE_INTEGER, Number.MIN_SAFE_INTEGER])
|
178
|
+
continue
|
179
|
+
}
|
180
|
+
const address = maybeAddress.value
|
181
|
+
extraShardsToAllocate.push([
|
182
|
+
shard,
|
183
|
+
Option.match(MutableHashMap.get(shardsPerRunner, address), {
|
184
|
+
onNone: () => Number.MIN_SAFE_INTEGER,
|
185
|
+
onSome: (shards) => -shards.size
|
186
|
+
}),
|
187
|
+
Option.match(MutableHashMap.get(state.runners, address), {
|
188
|
+
onNone: () => Number.MIN_SAFE_INTEGER,
|
189
|
+
onSome: (meta) => meta.registeredAt
|
190
|
+
})
|
191
|
+
])
|
192
|
+
}
|
193
|
+
})
|
194
|
+
}
|
285
195
|
|
286
|
-
|
287
|
-
const filteredAssignments = pipe(
|
288
|
-
HashMap.removeMany(readyAssignments, failedUnassignedPods),
|
289
|
-
HashMap.map((shards, __) => HashSet.difference(shards, failedUnassignedShards))
|
290
|
-
)
|
196
|
+
const sortedShardsToRebalance = extraShardsToAllocate.sort(allocationOrder).map(([shard]) => shard)
|
291
197
|
|
292
|
-
|
293
|
-
|
294
|
-
Effect.forEach(filteredAssignments, ([pod, shards]) =>
|
295
|
-
pipe(
|
296
|
-
podApi.assignShards(pod, shards),
|
297
|
-
Effect.zipRight(updateShardsState(shards, Option.some(pod))),
|
298
|
-
Effect.matchEffect({
|
299
|
-
onFailure: () => Effect.succeed(Chunk.fromIterable([pod])),
|
300
|
-
onSuccess: () =>
|
301
|
-
pipe(
|
302
|
-
PubSub.publish(eventsHub, ShardingEvent.ShardsAssigned(pod, shards)),
|
303
|
-
Effect.as(Chunk.empty())
|
304
|
-
)
|
305
|
-
})
|
306
|
-
), { concurrency: "inherit" }),
|
307
|
-
Effect.map(Chunk.fromIterable),
|
308
|
-
Effect.map((_) => Chunk.flatten(_)),
|
309
|
-
Effect.map(HashSet.fromIterable)
|
310
|
-
)
|
198
|
+
return pickNewRunners(sortedShardsToRebalance, state, false, rate, shardsPerRunner, maxVersion)
|
199
|
+
}
|
311
200
|
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
201
|
+
function pickNewRunners(
|
202
|
+
shardsToRebalance: ReadonlyArray<ShardId>,
|
203
|
+
state: State,
|
204
|
+
immediate: boolean,
|
205
|
+
rate: number,
|
206
|
+
shardsPerRunner = state.shardsPerRunner,
|
207
|
+
maybeMaxVersion = state.maxVersion
|
208
|
+
): readonly [
|
209
|
+
assignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
|
210
|
+
unassignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
|
211
|
+
changes: MutableHashSet.MutableHashSet<RunnerAddress>
|
212
|
+
] {
|
213
|
+
const addressAssignments = MutableHashMap.empty<RunnerAddress, Set<ShardId>>()
|
214
|
+
const unassignments = MutableHashMap.empty<RunnerAddress, Set<ShardId>>()
|
215
|
+
const changes = MutableHashSet.empty<RunnerAddress>()
|
316
216
|
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
217
|
+
if (Option.isNone(maybeMaxVersion)) {
|
218
|
+
return [addressAssignments, unassignments, changes]
|
219
|
+
}
|
220
|
+
const maxVersion = maybeMaxVersion.value
|
221
|
+
|
222
|
+
for (const shardId of shardsToRebalance) {
|
223
|
+
// Find the runner with the fewest assigned shards
|
224
|
+
let candidate: RunnerAddress | undefined
|
225
|
+
let candidateShards: Set<ShardId> | undefined
|
226
|
+
|
227
|
+
for (const [address, shards] of shardsPerRunner) {
|
228
|
+
// Keep only runners with the maximum version
|
229
|
+
const maybeRunnerMeta = MutableHashMap.get(state.runners, address)
|
230
|
+
if (Option.isNone(maybeRunnerMeta)) continue
|
231
|
+
const runnerMeta = maybeRunnerMeta.value
|
232
|
+
if (runnerMeta.runner.version !== maxVersion) continue
|
233
|
+
|
234
|
+
// Do not assign to a runner that has unassignments in the same rebalance
|
235
|
+
if (MutableHashMap.has(unassignments, address)) continue
|
236
|
+
|
237
|
+
// Do not assign too many shards to each runner unless rebalancing must
|
238
|
+
// occur immediately
|
239
|
+
if (!immediate) {
|
240
|
+
const assignmentCount = Option.getOrUndefined(MutableHashMap.get(addressAssignments, address))?.size ?? 0
|
241
|
+
if (assignmentCount >= state.shards.size * rate) continue
|
328
242
|
}
|
329
243
|
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
Effect.sleep(config.rebalanceRetryInterval),
|
334
|
-
Effect.zipRight(rebalance(rebalanceImmediately)),
|
335
|
-
Effect.forkIn(layerScope)
|
336
|
-
)
|
244
|
+
if (candidate === undefined || shards.size < candidateShards!.size) {
|
245
|
+
candidate = address
|
246
|
+
candidateShards = shards
|
337
247
|
}
|
248
|
+
}
|
249
|
+
if (!candidate || !candidateShards) break
|
338
250
|
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
}
|
251
|
+
// If the old runner is the same as the new runner, do nothing
|
252
|
+
const oldRunner = Option.getOrUndefined(state.shards.get(shardId) ?? Option.none())
|
253
|
+
if (oldRunner && oldRunner.toString() === candidate.toString()) {
|
254
|
+
continue
|
255
|
+
}
|
256
|
+
const oldShards = oldRunner && Option.getOrUndefined(MutableHashMap.get(shardsPerRunner, oldRunner))
|
257
|
+
|
258
|
+
// If the new runner has one less, as many, or more shards than the
|
259
|
+
// old runner, do not change anything
|
260
|
+
if (oldShards && candidateShards.size + 1 >= oldShards.size) continue
|
261
|
+
|
262
|
+
// Otherwise create a new assignment
|
263
|
+
MutableHashMap.modifyAt(
|
264
|
+
addressAssignments,
|
265
|
+
candidate,
|
266
|
+
Option.match({
|
267
|
+
onNone: () => Option.some(new Set([shardId])),
|
268
|
+
onSome: (shards) => {
|
269
|
+
shards.add(shardId)
|
270
|
+
return Option.some(shards)
|
271
|
+
}
|
272
|
+
})
|
273
|
+
)
|
274
|
+
if (oldRunner) {
|
275
|
+
MutableHashMap.modifyAt(
|
276
|
+
unassignments,
|
277
|
+
oldRunner,
|
278
|
+
Option.match({
|
279
|
+
onNone: () => Option.some(new Set([shardId])),
|
280
|
+
onSome: (shards) => {
|
281
|
+
shards.add(shardId)
|
282
|
+
return Option.some(shards)
|
283
|
+
}
|
284
|
+
})
|
285
|
+
)
|
286
|
+
}
|
344
287
|
|
345
|
-
|
346
|
-
|
288
|
+
// Move the shard to the new runner
|
289
|
+
candidateShards.add(shardId)
|
290
|
+
if (oldShards) {
|
291
|
+
oldShards.delete(shardId)
|
292
|
+
}
|
347
293
|
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
register,
|
352
|
-
unregister,
|
353
|
-
persistPods,
|
354
|
-
rebalance,
|
355
|
-
notifyUnhealthyPod,
|
356
|
-
checkAllPodsHealth
|
294
|
+
// Track changes
|
295
|
+
MutableHashSet.add(changes, candidate)
|
296
|
+
if (oldRunner) MutableHashSet.add(changes, oldRunner)
|
357
297
|
}
|
358
|
-
}
|
359
298
|
|
360
|
-
|
361
|
-
export function decideAssignmentsForUnassignedShards(state: ShardManagerState.ShardManagerState) {
|
362
|
-
return pickNewPods(List.fromIterable(state.unassignedShards), state, true, 1)
|
299
|
+
return [addressAssignments, unassignments, changes]
|
363
300
|
}
|
364
301
|
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
)
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
HashMap.flatMap((shards, _) => {
|
375
|
-
// count how many extra shards compared to the average
|
376
|
-
const extraShards = Math.max(HashSet.size(shards) - state.averageShardsPerPod.value, 0)
|
377
|
-
return pipe(
|
378
|
-
HashMap.empty(),
|
379
|
-
HashMap.set(_, HashSet.fromIterable(List.take(List.fromIterable(shards), extraShards)))
|
380
|
-
)
|
381
|
-
}),
|
382
|
-
HashSet.fromIterable,
|
383
|
-
HashSet.map((_) => _[1]),
|
384
|
-
HashSet.flatMap((_) => _)
|
385
|
-
)
|
386
|
-
: HashSet.empty()
|
387
|
-
|
388
|
-
/*
|
389
|
-
TODO: port sortBy
|
390
|
-
|
391
|
-
val sortedShardsToRebalance = extraShardsToAllocate.toList.sortBy { shard =>
|
392
|
-
// handle unassigned shards first, then shards on the pods with most shards, then shards on old pods
|
393
|
-
state.shards.get(shard).flatten.fold((Int.MinValue, OffsetDateTime.MIN)) { pod =>
|
394
|
-
(
|
395
|
-
state.shardsPerPod.get(pod).fold(Int.MinValue)(-_.size),
|
396
|
-
state.pods.get(pod).fold(OffsetDateTime.MIN)(_.registered)
|
397
|
-
)
|
398
|
-
}
|
399
|
-
}
|
400
|
-
* */
|
401
|
-
const sortedShardsToRebalance = List.fromIterable(extraShardsToAllocate)
|
402
|
-
return pickNewPods(sortedShardsToRebalance, state, false, rebalanceRate)
|
302
|
+
function takeRandom<A>(self: Iterable<A>, n: number): ReadonlyArray<A> {
|
303
|
+
const array = Array.from(self)
|
304
|
+
let currentIndex = array.length
|
305
|
+
while (currentIndex != 0) {
|
306
|
+
const randomIndex = Math.floor(Math.random() * currentIndex)
|
307
|
+
currentIndex = currentIndex - 1
|
308
|
+
swap(array, currentIndex, randomIndex)
|
309
|
+
}
|
310
|
+
return n < array.length ? array.slice(0, n) : array
|
403
311
|
}
|
404
312
|
|
405
|
-
function
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
): readonly [
|
411
|
-
assignments: HashMap.HashMap<PodAddress.PodAddress, HashSet.HashSet<ShardId.ShardId>>,
|
412
|
-
unassignments: HashMap.HashMap<PodAddress.PodAddress, HashSet.HashSet<ShardId.ShardId>>
|
413
|
-
] {
|
414
|
-
const [_, assignments] = pipe(
|
415
|
-
List.reduce(
|
416
|
-
shardsToRebalance,
|
417
|
-
[
|
418
|
-
state.shardsPerPod,
|
419
|
-
List.empty<readonly [ShardId.ShardId, PodAddress.PodAddress]>()
|
420
|
-
] as const,
|
421
|
-
([shardsPerPod, assignments], shard) => {
|
422
|
-
const unassignedPods = pipe(
|
423
|
-
assignments,
|
424
|
-
List.flatMap(([shard, _]) =>
|
425
|
-
pipe(
|
426
|
-
HashMap.get(state.shards, shard),
|
427
|
-
Option.flatten,
|
428
|
-
Option.toArray,
|
429
|
-
List.fromIterable
|
430
|
-
)
|
431
|
-
)
|
432
|
-
)
|
433
|
-
|
434
|
-
// find pod with least amount of shards
|
435
|
-
return pipe(
|
436
|
-
// keep only pods with the max version
|
437
|
-
HashMap.filter(shardsPerPod, (_, pod) => {
|
438
|
-
const maxVersion = state.maxVersion
|
439
|
-
if (Option.isNone(maxVersion)) return true
|
440
|
-
return pipe(
|
441
|
-
HashMap.get(state.pods, pod),
|
442
|
-
Option.map(PodWithMetadata.extractVersion),
|
443
|
-
Option.map((_) => PodWithMetadata.compareVersion(_, maxVersion.value) === 0),
|
444
|
-
Option.getOrElse(() => false)
|
445
|
-
)
|
446
|
-
}),
|
447
|
-
// don't assign too many shards to the same pods, unless we need rebalance immediately
|
448
|
-
HashMap.filter((_, pod) => {
|
449
|
-
if (rebalanceImmediately) return true
|
450
|
-
return (
|
451
|
-
pipe(
|
452
|
-
assignments,
|
453
|
-
List.filter(([_, p]) => equals(p)(pod)),
|
454
|
-
List.size
|
455
|
-
) <
|
456
|
-
HashMap.size(state.shards) * rebalanceRate
|
457
|
-
)
|
458
|
-
}),
|
459
|
-
// don't assign to a pod that was unassigned in the same rebalance
|
460
|
-
HashMap.filter(
|
461
|
-
(_, pod) => !Option.isSome(List.findFirst(unassignedPods, equals(pod)))
|
462
|
-
),
|
463
|
-
minByOption(([_, pods]) => HashSet.size(pods)),
|
464
|
-
Option.match({
|
465
|
-
onNone: () => [shardsPerPod, assignments] as const,
|
466
|
-
onSome: ([pod, shards]) => {
|
467
|
-
const oldPod = Option.flatten(HashMap.get(state.shards, shard))
|
468
|
-
// if old pod is same as new pod, don't change anything
|
469
|
-
if (equals(oldPod)(pod)) {
|
470
|
-
return [shardsPerPod, assignments] as const
|
471
|
-
// if the new pod has more, as much, or only 1 less shard than the old pod, don't change anything
|
472
|
-
} else if (
|
473
|
-
Option.match(HashMap.get(shardsPerPod, pod), { onNone: () => 0, onSome: HashSet.size }) + 1 >=
|
474
|
-
Option.match(
|
475
|
-
oldPod,
|
476
|
-
{
|
477
|
-
onNone: () => Number.MAX_SAFE_INTEGER,
|
478
|
-
onSome: (_) =>
|
479
|
-
Option.match(HashMap.get(shardsPerPod, _), { onNone: () => 0, onSome: HashSet.size })
|
480
|
-
}
|
481
|
-
)
|
482
|
-
) {
|
483
|
-
return [shardsPerPod, assignments] as const
|
484
|
-
|
485
|
-
// otherwise, create a new assignment
|
486
|
-
} else {
|
487
|
-
const unassigned = Option.match(
|
488
|
-
oldPod,
|
489
|
-
{
|
490
|
-
onNone: () => shardsPerPod,
|
491
|
-
onSome: (oldPod) => HashMap.modify(shardsPerPod, oldPod, HashSet.remove(shard))
|
492
|
-
}
|
493
|
-
)
|
494
|
-
return [
|
495
|
-
HashMap.modify(unassigned, pod, (_) => HashSet.add(shards, shard)),
|
496
|
-
List.prepend(assignments, [shard, pod] as const)
|
497
|
-
] as const
|
498
|
-
}
|
499
|
-
}
|
500
|
-
})
|
501
|
-
)
|
502
|
-
}
|
503
|
-
)
|
504
|
-
)
|
505
|
-
|
506
|
-
const unassignments = List.flatMap(assignments, ([shard, _]) =>
|
507
|
-
pipe(
|
508
|
-
Option.flatten(HashMap.get(state.shards, shard)),
|
509
|
-
Option.map((_) => [shard, _] as const),
|
510
|
-
Option.match({ onNone: List.empty, onSome: List.of })
|
511
|
-
))
|
512
|
-
|
513
|
-
const assignmentsPerPod = pipe(
|
514
|
-
assignments,
|
515
|
-
groupBy(([_, pod]) => pod),
|
516
|
-
HashMap.map(HashSet.map(([shardId, _]) => shardId))
|
517
|
-
)
|
518
|
-
const unassignmentsPerPod = pipe(
|
519
|
-
unassignments,
|
520
|
-
groupBy(([_, pod]) => pod),
|
521
|
-
HashMap.map(HashSet.map(([shardId, _]) => shardId))
|
522
|
-
)
|
523
|
-
return [assignmentsPerPod, unassignmentsPerPod] as const
|
313
|
+
function swap<A>(array: Array<A>, i: number, j: number): ReadonlyArray<A> {
|
314
|
+
const tmp = array[i]
|
315
|
+
array[i] = array[j]
|
316
|
+
array[j] = tmp
|
317
|
+
return array
|
524
318
|
}
|
525
|
-
|
526
|
-
/**
|
527
|
-
* @since 1.0.0
|
528
|
-
* @category layers
|
529
|
-
*/
|
530
|
-
export const live = Effect.gen(function*() {
|
531
|
-
const config = yield* ManagerConfig.ManagerConfig
|
532
|
-
const stateRepository = yield* Storage.Storage
|
533
|
-
const healthApi = yield* PodsHealth.PodsHealth
|
534
|
-
const podsApi = yield* Pods.Pods
|
535
|
-
const layerScope = yield* Effect.scope
|
536
|
-
|
537
|
-
const pods = yield* stateRepository.getPods
|
538
|
-
const assignments = yield* stateRepository.getAssignments
|
539
|
-
|
540
|
-
const filteredPods = yield* pipe(
|
541
|
-
Effect.filter(pods, ([podAddress]) => healthApi.isAlive(podAddress), { concurrency: "inherit" }),
|
542
|
-
Effect.map(HashMap.fromIterable)
|
543
|
-
)
|
544
|
-
const filteredAssignments = HashMap.filter(
|
545
|
-
assignments,
|
546
|
-
(pod) => Option.isSome(pod) && HashMap.has(filteredPods, pod.value)
|
547
|
-
)
|
548
|
-
const cdt = yield* Clock.currentTimeMillis
|
549
|
-
const initialState = ShardManagerState.make(
|
550
|
-
HashMap.map(filteredPods, (pod) => PodWithMetadata.make(pod, cdt)),
|
551
|
-
HashMap.union(
|
552
|
-
filteredAssignments,
|
553
|
-
pipe(
|
554
|
-
Chunk.range(1, config.numberOfShards),
|
555
|
-
Chunk.map((n) => [ShardId.make(n), Option.none()] as const),
|
556
|
-
HashMap.fromIterable
|
557
|
-
)
|
558
|
-
)
|
559
|
-
)
|
560
|
-
const state = yield* RefSynchronized.make(initialState)
|
561
|
-
const rebalanceSemaphore = yield* Effect.makeSemaphore(1)
|
562
|
-
const eventsHub = yield* PubSub.unbounded<ShardingEvent.ShardingEvent>()
|
563
|
-
const shardManager = make(
|
564
|
-
layerScope,
|
565
|
-
state,
|
566
|
-
rebalanceSemaphore,
|
567
|
-
eventsHub,
|
568
|
-
healthApi,
|
569
|
-
podsApi,
|
570
|
-
stateRepository,
|
571
|
-
config
|
572
|
-
)
|
573
|
-
yield* Effect.forkIn(layerScope)(shardManager.persistPods)
|
574
|
-
// rebalance immediately if there are unassigned shards
|
575
|
-
yield* shardManager.rebalance(HashSet.size(initialState.unassignedShards) > 0)
|
576
|
-
// start a regular rebalance at the given interval
|
577
|
-
yield* pipe(
|
578
|
-
shardManager.rebalance(false),
|
579
|
-
Effect.repeat(Schedule.spaced(config.rebalanceInterval)),
|
580
|
-
Effect.forkIn(layerScope)
|
581
|
-
)
|
582
|
-
// log info events
|
583
|
-
yield* pipe(
|
584
|
-
shardManager.getShardingEvents,
|
585
|
-
Stream.mapEffect((_) => Effect.logDebug(JSON.stringify(_))),
|
586
|
-
Stream.runDrain,
|
587
|
-
Effect.forkIn(layerScope)
|
588
|
-
)
|
589
|
-
yield* Effect.logDebug("Shard Manager loaded")
|
590
|
-
return shardManager
|
591
|
-
}).pipe(Layer.scoped(shardManagerTag))
|