@effect/cluster 0.0.0-snapshot-d33d8b050b8e3c87dcde9587083e6c1cf733f72b
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AtLeastOnce/package.json +6 -0
- package/AtLeastOnceStorage/package.json +6 -0
- package/Broadcaster/package.json +6 -0
- package/LICENSE +21 -0
- package/ManagerConfig/package.json +6 -0
- package/Message/package.json +6 -0
- package/MessageState/package.json +6 -0
- package/Messenger/package.json +6 -0
- package/Pod/package.json +6 -0
- package/PodAddress/package.json +6 -0
- package/Pods/package.json +6 -0
- package/PodsHealth/package.json +6 -0
- package/PoisonPill/package.json +6 -0
- package/README.md +3 -0
- package/RecipientAddress/package.json +6 -0
- package/RecipientBehaviour/package.json +6 -0
- package/RecipientBehaviourContext/package.json +6 -0
- package/RecipientType/package.json +6 -0
- package/Serialization/package.json +6 -0
- package/SerializedEnvelope/package.json +6 -0
- package/SerializedMessage/package.json +6 -0
- package/ShardId/package.json +6 -0
- package/ShardManager/package.json +6 -0
- package/ShardManagerClient/package.json +6 -0
- package/Sharding/package.json +6 -0
- package/ShardingConfig/package.json +6 -0
- package/ShardingEvent/package.json +6 -0
- package/ShardingException/package.json +6 -0
- package/ShardingRegistrationEvent/package.json +6 -0
- package/Storage/package.json +6 -0
- package/dist/cjs/AtLeastOnce.js +43 -0
- package/dist/cjs/AtLeastOnce.js.map +1 -0
- package/dist/cjs/AtLeastOnceStorage.js +48 -0
- package/dist/cjs/AtLeastOnceStorage.js.map +1 -0
- package/dist/cjs/Broadcaster.js +6 -0
- package/dist/cjs/Broadcaster.js.map +1 -0
- package/dist/cjs/ManagerConfig.js +57 -0
- package/dist/cjs/ManagerConfig.js.map +1 -0
- package/dist/cjs/Message.js +64 -0
- package/dist/cjs/Message.js.map +1 -0
- package/dist/cjs/MessageState.js +78 -0
- package/dist/cjs/MessageState.js.map +1 -0
- package/dist/cjs/Messenger.js +6 -0
- package/dist/cjs/Messenger.js.map +1 -0
- package/dist/cjs/Pod.js +101 -0
- package/dist/cjs/Pod.js.map +1 -0
- package/dist/cjs/PodAddress.js +100 -0
- package/dist/cjs/PodAddress.js.map +1 -0
- package/dist/cjs/Pods.js +58 -0
- package/dist/cjs/Pods.js.map +1 -0
- package/dist/cjs/PodsHealth.js +64 -0
- package/dist/cjs/PodsHealth.js.map +1 -0
- package/dist/cjs/PoisonPill.js +101 -0
- package/dist/cjs/PoisonPill.js.map +1 -0
- package/dist/cjs/RecipientAddress.js +102 -0
- package/dist/cjs/RecipientAddress.js.map +1 -0
- package/dist/cjs/RecipientBehaviour.js +61 -0
- package/dist/cjs/RecipientBehaviour.js.map +1 -0
- package/dist/cjs/RecipientBehaviourContext.js +87 -0
- package/dist/cjs/RecipientBehaviourContext.js.map +1 -0
- package/dist/cjs/RecipientType.js +142 -0
- package/dist/cjs/RecipientType.js.map +1 -0
- package/dist/cjs/Serialization.js +55 -0
- package/dist/cjs/Serialization.js.map +1 -0
- package/dist/cjs/SerializedEnvelope.js +111 -0
- package/dist/cjs/SerializedEnvelope.js.map +1 -0
- package/dist/cjs/SerializedMessage.js +87 -0
- package/dist/cjs/SerializedMessage.js.map +1 -0
- package/dist/cjs/ShardId.js +89 -0
- package/dist/cjs/ShardId.js.map +1 -0
- package/dist/cjs/ShardManager.js +48 -0
- package/dist/cjs/ShardManager.js.map +1 -0
- package/dist/cjs/ShardManagerClient.js +53 -0
- package/dist/cjs/ShardManagerClient.js.map +1 -0
- package/dist/cjs/Sharding.js +129 -0
- package/dist/cjs/Sharding.js.map +1 -0
- package/dist/cjs/ShardingConfig.js +64 -0
- package/dist/cjs/ShardingConfig.js.map +1 -0
- package/dist/cjs/ShardingEvent.js +72 -0
- package/dist/cjs/ShardingEvent.js.map +1 -0
- package/dist/cjs/ShardingException.js +130 -0
- package/dist/cjs/ShardingException.js.map +1 -0
- package/dist/cjs/ShardingRegistrationEvent.js +44 -0
- package/dist/cjs/ShardingRegistrationEvent.js.map +1 -0
- package/dist/cjs/Storage.js +63 -0
- package/dist/cjs/Storage.js.map +1 -0
- package/dist/cjs/index.js +88 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/cjs/internal/atLeastOnce.js +58 -0
- package/dist/cjs/internal/atLeastOnce.js.map +1 -0
- package/dist/cjs/internal/atLeastOnceStorage.js +186 -0
- package/dist/cjs/internal/atLeastOnceStorage.js.map +1 -0
- package/dist/cjs/internal/entityManager.js +191 -0
- package/dist/cjs/internal/entityManager.js.map +1 -0
- package/dist/cjs/internal/entityState.js +70 -0
- package/dist/cjs/internal/entityState.js.map +1 -0
- package/dist/cjs/internal/managerConfig.js +69 -0
- package/dist/cjs/internal/managerConfig.js.map +1 -0
- package/dist/cjs/internal/message.js +68 -0
- package/dist/cjs/internal/message.js.map +1 -0
- package/dist/cjs/internal/messageState.js +102 -0
- package/dist/cjs/internal/messageState.js.map +1 -0
- package/dist/cjs/internal/podWithMetadata.js +77 -0
- package/dist/cjs/internal/podWithMetadata.js.map +1 -0
- package/dist/cjs/internal/pods.js +58 -0
- package/dist/cjs/internal/pods.js.map +1 -0
- package/dist/cjs/internal/podsHealth.js +63 -0
- package/dist/cjs/internal/podsHealth.js.map +1 -0
- package/dist/cjs/internal/recipientBehaviour.js +75 -0
- package/dist/cjs/internal/recipientBehaviour.js.map +1 -0
- package/dist/cjs/internal/recipientBehaviourContext.js +59 -0
- package/dist/cjs/internal/recipientBehaviourContext.js.map +1 -0
- package/dist/cjs/internal/serialization.js +72 -0
- package/dist/cjs/internal/serialization.js.map +1 -0
- package/dist/cjs/internal/shardManager.js +291 -0
- package/dist/cjs/internal/shardManager.js.map +1 -0
- package/dist/cjs/internal/shardManagerClient.js +71 -0
- package/dist/cjs/internal/shardManagerClient.js.map +1 -0
- package/dist/cjs/internal/shardManagerState.js +67 -0
- package/dist/cjs/internal/shardManagerState.js.map +1 -0
- package/dist/cjs/internal/sharding.js +329 -0
- package/dist/cjs/internal/sharding.js.map +1 -0
- package/dist/cjs/internal/shardingConfig.js +79 -0
- package/dist/cjs/internal/shardingConfig.js.map +1 -0
- package/dist/cjs/internal/storage.js +75 -0
- package/dist/cjs/internal/storage.js.map +1 -0
- package/dist/cjs/internal/utils.js +92 -0
- package/dist/cjs/internal/utils.js.map +1 -0
- package/dist/dts/AtLeastOnce.d.ts +20 -0
- package/dist/dts/AtLeastOnce.d.ts.map +1 -0
- package/dist/dts/AtLeastOnceStorage.d.ts +75 -0
- package/dist/dts/AtLeastOnceStorage.d.ts.map +1 -0
- package/dist/dts/Broadcaster.d.ts +32 -0
- package/dist/dts/Broadcaster.d.ts.map +1 -0
- package/dist/dts/ManagerConfig.d.ts +61 -0
- package/dist/dts/ManagerConfig.d.ts.map +1 -0
- package/dist/dts/Message.d.ts +106 -0
- package/dist/dts/Message.d.ts.map +1 -0
- package/dist/dts/MessageState.d.ts +107 -0
- package/dist/dts/MessageState.d.ts.map +1 -0
- package/dist/dts/Messenger.d.ts +32 -0
- package/dist/dts/Messenger.d.ts.map +1 -0
- package/dist/dts/Pod.d.ts +81 -0
- package/dist/dts/Pod.d.ts.map +1 -0
- package/dist/dts/PodAddress.d.ts +80 -0
- package/dist/dts/PodAddress.d.ts.map +1 -0
- package/dist/dts/Pods.d.ts +78 -0
- package/dist/dts/Pods.d.ts.map +1 -0
- package/dist/dts/PodsHealth.d.ts +66 -0
- package/dist/dts/PodsHealth.d.ts.map +1 -0
- package/dist/dts/PoisonPill.d.ts +78 -0
- package/dist/dts/PoisonPill.d.ts.map +1 -0
- package/dist/dts/RecipientAddress.d.ts +57 -0
- package/dist/dts/RecipientAddress.d.ts.map +1 -0
- package/dist/dts/RecipientBehaviour.d.ts +72 -0
- package/dist/dts/RecipientBehaviour.d.ts.map +1 -0
- package/dist/dts/RecipientBehaviourContext.d.ts +83 -0
- package/dist/dts/RecipientBehaviourContext.d.ts.map +1 -0
- package/dist/dts/RecipientType.d.ts +96 -0
- package/dist/dts/RecipientType.d.ts.map +1 -0
- package/dist/dts/Serialization.d.ts +58 -0
- package/dist/dts/Serialization.d.ts.map +1 -0
- package/dist/dts/SerializedEnvelope.d.ts +87 -0
- package/dist/dts/SerializedEnvelope.d.ts.map +1 -0
- package/dist/dts/SerializedMessage.d.ts +66 -0
- package/dist/dts/SerializedMessage.d.ts.map +1 -0
- package/dist/dts/ShardId.d.ts +70 -0
- package/dist/dts/ShardId.d.ts.map +1 -0
- package/dist/dts/ShardManager.d.ts +44 -0
- package/dist/dts/ShardManager.d.ts.map +1 -0
- package/dist/dts/ShardManagerClient.d.ts +50 -0
- package/dist/dts/ShardManagerClient.d.ts.map +1 -0
- package/dist/dts/Sharding.d.ts +146 -0
- package/dist/dts/Sharding.d.ts.map +1 -0
- package/dist/dts/ShardingConfig.d.ts +69 -0
- package/dist/dts/ShardingConfig.d.ts.map +1 -0
- package/dist/dts/ShardingEvent.d.ts +90 -0
- package/dist/dts/ShardingEvent.d.ts.map +1 -0
- package/dist/dts/ShardingException.d.ts +125 -0
- package/dist/dts/ShardingException.d.ts.map +1 -0
- package/dist/dts/ShardingRegistrationEvent.d.ts +44 -0
- package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -0
- package/dist/dts/Storage.d.ts +78 -0
- package/dist/dts/Storage.d.ts.map +1 -0
- package/dist/dts/index.d.ts +113 -0
- package/dist/dts/index.d.ts.map +1 -0
- package/dist/dts/internal/atLeastOnce.d.ts +2 -0
- package/dist/dts/internal/atLeastOnce.d.ts.map +1 -0
- package/dist/dts/internal/atLeastOnceStorage.d.ts +2 -0
- package/dist/dts/internal/atLeastOnceStorage.d.ts.map +1 -0
- package/dist/dts/internal/entityManager.d.ts +2 -0
- package/dist/dts/internal/entityManager.d.ts.map +1 -0
- package/dist/dts/internal/entityState.d.ts +21 -0
- package/dist/dts/internal/entityState.d.ts.map +1 -0
- package/dist/dts/internal/managerConfig.d.ts +2 -0
- package/dist/dts/internal/managerConfig.d.ts.map +1 -0
- package/dist/dts/internal/message.d.ts +9 -0
- package/dist/dts/internal/message.d.ts.map +1 -0
- package/dist/dts/internal/messageState.d.ts +2 -0
- package/dist/dts/internal/messageState.d.ts.map +1 -0
- package/dist/dts/internal/podWithMetadata.d.ts +2 -0
- package/dist/dts/internal/podWithMetadata.d.ts.map +1 -0
- package/dist/dts/internal/pods.d.ts +2 -0
- package/dist/dts/internal/pods.d.ts.map +1 -0
- package/dist/dts/internal/podsHealth.d.ts +2 -0
- package/dist/dts/internal/podsHealth.d.ts.map +1 -0
- package/dist/dts/internal/recipientBehaviour.d.ts +2 -0
- package/dist/dts/internal/recipientBehaviour.d.ts.map +1 -0
- package/dist/dts/internal/recipientBehaviourContext.d.ts +2 -0
- package/dist/dts/internal/recipientBehaviourContext.d.ts.map +1 -0
- package/dist/dts/internal/serialization.d.ts +2 -0
- package/dist/dts/internal/serialization.d.ts.map +1 -0
- package/dist/dts/internal/shardManager.d.ts +12 -0
- package/dist/dts/internal/shardManager.d.ts.map +1 -0
- package/dist/dts/internal/shardManagerClient.d.ts +2 -0
- package/dist/dts/internal/shardManagerClient.d.ts.map +1 -0
- package/dist/dts/internal/shardManagerState.d.ts +26 -0
- package/dist/dts/internal/shardManagerState.d.ts.map +1 -0
- package/dist/dts/internal/sharding.d.ts +2 -0
- package/dist/dts/internal/sharding.d.ts.map +1 -0
- package/dist/dts/internal/shardingConfig.d.ts +2 -0
- package/dist/dts/internal/shardingConfig.d.ts.map +1 -0
- package/dist/dts/internal/storage.d.ts +2 -0
- package/dist/dts/internal/storage.d.ts.map +1 -0
- package/dist/dts/internal/utils.d.ts +2 -0
- package/dist/dts/internal/utils.d.ts.map +1 -0
- package/dist/esm/AtLeastOnce.js +12 -0
- package/dist/esm/AtLeastOnce.js.map +1 -0
- package/dist/esm/AtLeastOnceStorage.js +17 -0
- package/dist/esm/AtLeastOnceStorage.js.map +1 -0
- package/dist/esm/Broadcaster.js +2 -0
- package/dist/esm/Broadcaster.js.map +1 -0
- package/dist/esm/ManagerConfig.js +26 -0
- package/dist/esm/ManagerConfig.js.map +1 -0
- package/dist/esm/Message.js +33 -0
- package/dist/esm/Message.js.map +1 -0
- package/dist/esm/MessageState.js +47 -0
- package/dist/esm/MessageState.js.map +1 -0
- package/dist/esm/Messenger.js +2 -0
- package/dist/esm/Messenger.js.map +1 -0
- package/dist/esm/Pod.js +65 -0
- package/dist/esm/Pod.js.map +1 -0
- package/dist/esm/PodAddress.js +64 -0
- package/dist/esm/PodAddress.js.map +1 -0
- package/dist/esm/Pods.js +27 -0
- package/dist/esm/Pods.js.map +1 -0
- package/dist/esm/PodsHealth.js +33 -0
- package/dist/esm/PodsHealth.js.map +1 -0
- package/dist/esm/PoisonPill.js +65 -0
- package/dist/esm/PoisonPill.js.map +1 -0
- package/dist/esm/RecipientAddress.js +67 -0
- package/dist/esm/RecipientAddress.js.map +1 -0
- package/dist/esm/RecipientBehaviour.js +30 -0
- package/dist/esm/RecipientBehaviour.js.map +1 -0
- package/dist/esm/RecipientBehaviourContext.js +56 -0
- package/dist/esm/RecipientBehaviourContext.js.map +1 -0
- package/dist/esm/RecipientType.js +105 -0
- package/dist/esm/RecipientType.js.map +1 -0
- package/dist/esm/Serialization.js +24 -0
- package/dist/esm/Serialization.js.map +1 -0
- package/dist/esm/SerializedEnvelope.js +75 -0
- package/dist/esm/SerializedEnvelope.js.map +1 -0
- package/dist/esm/SerializedMessage.js +51 -0
- package/dist/esm/SerializedMessage.js.map +1 -0
- package/dist/esm/ShardId.js +54 -0
- package/dist/esm/ShardId.js.map +1 -0
- package/dist/esm/ShardManager.js +17 -0
- package/dist/esm/ShardManager.js.map +1 -0
- package/dist/esm/ShardManagerClient.js +22 -0
- package/dist/esm/ShardManagerClient.js.map +1 -0
- package/dist/esm/Sharding.js +98 -0
- package/dist/esm/Sharding.js.map +1 -0
- package/dist/esm/ShardingConfig.js +33 -0
- package/dist/esm/ShardingConfig.js.map +1 -0
- package/dist/esm/ShardingEvent.js +62 -0
- package/dist/esm/ShardingEvent.js.map +1 -0
- package/dist/esm/ShardingException.js +91 -0
- package/dist/esm/ShardingException.js.map +1 -0
- package/dist/esm/ShardingRegistrationEvent.js +36 -0
- package/dist/esm/ShardingRegistrationEvent.js.map +1 -0
- package/dist/esm/Storage.js +32 -0
- package/dist/esm/Storage.js.map +1 -0
- package/dist/esm/index.js +113 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/internal/atLeastOnce.js +26 -0
- package/dist/esm/internal/atLeastOnce.js.map +1 -0
- package/dist/esm/internal/atLeastOnceStorage.js +154 -0
- package/dist/esm/internal/atLeastOnceStorage.js.map +1 -0
- package/dist/esm/internal/entityManager.js +159 -0
- package/dist/esm/internal/entityManager.js.map +1 -0
- package/dist/esm/internal/entityState.js +35 -0
- package/dist/esm/internal/entityState.js.map +1 -0
- package/dist/esm/internal/managerConfig.js +38 -0
- package/dist/esm/internal/managerConfig.js.map +1 -0
- package/dist/esm/internal/message.js +32 -0
- package/dist/esm/internal/message.js.map +1 -0
- package/dist/esm/internal/messageState.js +66 -0
- package/dist/esm/internal/messageState.js.map +1 -0
- package/dist/esm/internal/podWithMetadata.js +41 -0
- package/dist/esm/internal/podWithMetadata.js.map +1 -0
- package/dist/esm/internal/pods.js +25 -0
- package/dist/esm/internal/pods.js.map +1 -0
- package/dist/esm/internal/podsHealth.js +30 -0
- package/dist/esm/internal/podsHealth.js.map +1 -0
- package/dist/esm/internal/recipientBehaviour.js +42 -0
- package/dist/esm/internal/recipientBehaviour.js.map +1 -0
- package/dist/esm/internal/recipientBehaviourContext.js +26 -0
- package/dist/esm/internal/recipientBehaviourContext.js.map +1 -0
- package/dist/esm/internal/serialization.js +39 -0
- package/dist/esm/internal/serialization.js.map +1 -0
- package/dist/esm/internal/shardManager.js +256 -0
- package/dist/esm/internal/shardManager.js.map +1 -0
- package/dist/esm/internal/shardManagerClient.js +38 -0
- package/dist/esm/internal/shardManagerClient.js.map +1 -0
- package/dist/esm/internal/shardManagerState.js +36 -0
- package/dist/esm/internal/shardManagerState.js.map +1 -0
- package/dist/esm/internal/sharding.js +288 -0
- package/dist/esm/internal/sharding.js.map +1 -0
- package/dist/esm/internal/shardingConfig.js +47 -0
- package/dist/esm/internal/shardingConfig.js.map +1 -0
- package/dist/esm/internal/storage.js +42 -0
- package/dist/esm/internal/storage.js.map +1 -0
- package/dist/esm/internal/utils.js +56 -0
- package/dist/esm/internal/utils.js.map +1 -0
- package/dist/esm/package.json +4 -0
- package/package.json +259 -0
- package/src/AtLeastOnce.ts +28 -0
- package/src/AtLeastOnceStorage.ts +96 -0
- package/src/Broadcaster.ts +48 -0
- package/src/ManagerConfig.ts +67 -0
- package/src/Message.ts +132 -0
- package/src/MessageState.ts +126 -0
- package/src/Messenger.ts +40 -0
- package/src/Pod.ts +95 -0
- package/src/PodAddress.ts +94 -0
- package/src/Pods.ts +100 -0
- package/src/PodsHealth.ts +74 -0
- package/src/PoisonPill.ts +105 -0
- package/src/RecipientAddress.ts +72 -0
- package/src/RecipientBehaviour.ts +108 -0
- package/src/RecipientBehaviourContext.ts +101 -0
- package/src/RecipientType.ts +134 -0
- package/src/Serialization.ts +72 -0
- package/src/SerializedEnvelope.ts +109 -0
- package/src/SerializedMessage.ts +82 -0
- package/src/ShardId.ts +79 -0
- package/src/ShardManager.ts +53 -0
- package/src/ShardManagerClient.ts +57 -0
- package/src/Sharding.ts +214 -0
- package/src/ShardingConfig.ts +76 -0
- package/src/ShardingEvent.ts +121 -0
- package/src/ShardingException.ts +151 -0
- package/src/ShardingRegistrationEvent.ts +62 -0
- package/src/Storage.ts +92 -0
- package/src/index.ts +139 -0
- package/src/internal/atLeastOnce.ts +59 -0
- package/src/internal/atLeastOnceStorage.ts +218 -0
- package/src/internal/entityManager.ts +404 -0
- package/src/internal/entityState.ts +64 -0
- package/src/internal/managerConfig.ts +84 -0
- package/src/internal/message.ts +64 -0
- package/src/internal/messageState.ts +98 -0
- package/src/internal/podWithMetadata.ts +72 -0
- package/src/internal/pods.ts +29 -0
- package/src/internal/podsHealth.ts +39 -0
- package/src/internal/recipientBehaviour.ts +133 -0
- package/src/internal/recipientBehaviourContext.ts +70 -0
- package/src/internal/serialization.ts +59 -0
- package/src/internal/shardManager.ts +593 -0
- package/src/internal/shardManagerClient.ts +49 -0
- package/src/internal/shardManagerState.ts +80 -0
- package/src/internal/sharding.ts +793 -0
- package/src/internal/shardingConfig.ts +97 -0
- package/src/internal/storage.ts +60 -0
- package/src/internal/utils.ts +54 -0
@@ -0,0 +1,593 @@
|
|
1
|
+
/**
|
2
|
+
* @since 1.0.0
|
3
|
+
*/
|
4
|
+
import * as Chunk from "effect/Chunk"
|
5
|
+
import * as Clock from "effect/Clock"
|
6
|
+
import { GenericTag } from "effect/Context"
|
7
|
+
import * as Effect from "effect/Effect"
|
8
|
+
import { equals } from "effect/Equal"
|
9
|
+
import { pipe } from "effect/Function"
|
10
|
+
import * as HashMap from "effect/HashMap"
|
11
|
+
import * as HashSet from "effect/HashSet"
|
12
|
+
import * as Layer from "effect/Layer"
|
13
|
+
import * as List from "effect/List"
|
14
|
+
import * as Option from "effect/Option"
|
15
|
+
import * as PubSub from "effect/PubSub"
|
16
|
+
import * as Schedule from "effect/Schedule"
|
17
|
+
import type * as Scope from "effect/Scope"
|
18
|
+
import * as Stream from "effect/Stream"
|
19
|
+
import * as RefSynchronized from "effect/SynchronizedRef"
|
20
|
+
import * as ManagerConfig from "../ManagerConfig.js"
|
21
|
+
import type * as Pod from "../Pod.js"
|
22
|
+
import type * as PodAddress from "../PodAddress.js"
|
23
|
+
import * as Pods from "../Pods.js"
|
24
|
+
import * as PodsHealth from "../PodsHealth.js"
|
25
|
+
import * as ShardId from "../ShardId.js"
|
26
|
+
import * as ShardingEvent from "../ShardingEvent.js"
|
27
|
+
import * as ShardingException from "../ShardingException.js"
|
28
|
+
import type * as ShardManager from "../ShardManager.js"
|
29
|
+
import * as Storage from "../Storage.js"
|
30
|
+
import * as PodWithMetadata from "./podWithMetadata.js"
|
31
|
+
import * as ShardManagerState from "./shardManagerState.js"
|
32
|
+
import { groupBy, minByOption } from "./utils.js"
|
33
|
+
|
34
|
+
/** @internal */
|
35
|
+
const ShardManagerSymbolKey = "@effect/cluster/ShardManager"
|
36
|
+
|
37
|
+
/** @internal */
|
38
|
+
export const ShardManagerTypeId: ShardManager.ShardManagerTypeId = Symbol.for(
|
39
|
+
ShardManagerSymbolKey
|
40
|
+
) as ShardManager.ShardManagerTypeId
|
41
|
+
|
42
|
+
/** @internal */
|
43
|
+
export const shardManagerTag = GenericTag<ShardManager.ShardManager>(ShardManagerSymbolKey)
|
44
|
+
|
45
|
+
/** @internal */
|
46
|
+
function make(
|
47
|
+
layerScope: Scope.Scope,
|
48
|
+
stateRef: RefSynchronized.SynchronizedRef<ShardManagerState.ShardManagerState>,
|
49
|
+
rebalanceSemaphore: Effect.Semaphore,
|
50
|
+
eventsHub: PubSub.PubSub<ShardingEvent.ShardingEvent>,
|
51
|
+
healthApi: PodsHealth.PodsHealth,
|
52
|
+
podApi: Pods.Pods,
|
53
|
+
stateRepository: Storage.Storage,
|
54
|
+
config: ManagerConfig.ManagerConfig
|
55
|
+
): ShardManager.ShardManager {
|
56
|
+
const getAssignments: Effect.Effect<HashMap.HashMap<ShardId.ShardId, Option.Option<PodAddress.PodAddress>>> = pipe(
|
57
|
+
RefSynchronized.get(stateRef),
|
58
|
+
Effect.map((_) => _.shards)
|
59
|
+
)
|
60
|
+
|
61
|
+
const getShardingEvents = Stream.fromPubSub(eventsHub)
|
62
|
+
|
63
|
+
function register(pod: Pod.Pod) {
|
64
|
+
return pipe(
|
65
|
+
Effect.logDebug("Registering " + (pod.address) + "@" + pod.version),
|
66
|
+
Effect.zipRight(
|
67
|
+
RefSynchronized.updateAndGetEffect(stateRef, (state) =>
|
68
|
+
pipe(
|
69
|
+
Effect.flatMap(Effect.clock, (_) => _.currentTimeMillis),
|
70
|
+
Effect.map((cdt) =>
|
71
|
+
ShardManagerState.make(
|
72
|
+
HashMap.set(state.pods, pod.address, PodWithMetadata.make(pod, cdt)),
|
73
|
+
state.shards
|
74
|
+
)
|
75
|
+
)
|
76
|
+
))
|
77
|
+
),
|
78
|
+
Effect.zipLeft(PubSub.publish(eventsHub, ShardingEvent.PodRegistered(pod.address))),
|
79
|
+
Effect.flatMap((state) => Effect.when(rebalance(false), () => HashSet.size(state.unassignedShards) > 0)),
|
80
|
+
Effect.zipRight(Effect.forkIn(layerScope)(persistPods)),
|
81
|
+
Effect.asVoid
|
82
|
+
)
|
83
|
+
}
|
84
|
+
|
85
|
+
function stateHasPod(podAddress: PodAddress.PodAddress) {
|
86
|
+
return pipe(
|
87
|
+
RefSynchronized.get(stateRef),
|
88
|
+
Effect.map((_) => HashMap.has(_.pods, podAddress))
|
89
|
+
)
|
90
|
+
}
|
91
|
+
|
92
|
+
function notifyUnhealthyPod(podAddress: PodAddress.PodAddress) {
|
93
|
+
return pipe(
|
94
|
+
Effect.whenEffect(
|
95
|
+
pipe(
|
96
|
+
PubSub.publish(eventsHub, ShardingEvent.PodHealthChecked(podAddress)),
|
97
|
+
Effect.zipRight(
|
98
|
+
Effect.unlessEffect(
|
99
|
+
Effect.zipRight(
|
100
|
+
Effect.logWarning(`${podAddress} is not alive, unregistering`),
|
101
|
+
unregister(podAddress)
|
102
|
+
),
|
103
|
+
healthApi.isAlive(podAddress)
|
104
|
+
)
|
105
|
+
)
|
106
|
+
),
|
107
|
+
stateHasPod(podAddress)
|
108
|
+
),
|
109
|
+
Effect.asVoid
|
110
|
+
)
|
111
|
+
}
|
112
|
+
|
113
|
+
const checkAllPodsHealth = pipe(
|
114
|
+
RefSynchronized.get(stateRef),
|
115
|
+
Effect.map((_) => HashMap.keySet(_.pods)),
|
116
|
+
Effect.flatMap((_) => (Effect.forEach(_, notifyUnhealthyPod, { concurrency: 4, discard: true })))
|
117
|
+
)
|
118
|
+
|
119
|
+
function unregister(podAddress: PodAddress.PodAddress) {
|
120
|
+
const eff = pipe(
|
121
|
+
Effect.Do,
|
122
|
+
Effect.zipLeft(Effect.logDebug(`Unregistering ${podAddress}`)),
|
123
|
+
Effect.bind("unassignments", (_) =>
|
124
|
+
pipe(
|
125
|
+
stateRef,
|
126
|
+
RefSynchronized.modify((state) => [
|
127
|
+
pipe(
|
128
|
+
state.shards,
|
129
|
+
HashMap.filter((pod) => equals(pod)(Option.some(podAddress))),
|
130
|
+
HashMap.keySet
|
131
|
+
),
|
132
|
+
{
|
133
|
+
...state,
|
134
|
+
pods: HashMap.remove(state.pods, podAddress),
|
135
|
+
shards: HashMap.map(state.shards, (_) => equals(_)(Option.some(podAddress)) ? Option.none() : _)
|
136
|
+
}
|
137
|
+
])
|
138
|
+
)),
|
139
|
+
Effect.tap((_) => PubSub.publish(eventsHub, ShardingEvent.PodUnregistered(podAddress))),
|
140
|
+
Effect.tap((_) =>
|
141
|
+
Effect.when(
|
142
|
+
PubSub.publish(eventsHub, ShardingEvent.ShardsUnassigned(podAddress, _.unassignments)),
|
143
|
+
() => HashSet.size(_.unassignments) > 0
|
144
|
+
)
|
145
|
+
),
|
146
|
+
Effect.zipLeft(Effect.forkIn(layerScope)(persistPods)),
|
147
|
+
Effect.zipLeft(Effect.forkIn(layerScope)(rebalance(true)))
|
148
|
+
)
|
149
|
+
return Effect.asVoid(Effect.whenEffect(eff, stateHasPod(podAddress)))
|
150
|
+
}
|
151
|
+
|
152
|
+
function withRetry<A, E>(zio: Effect.Effect<A, E>): Effect.Effect<void> {
|
153
|
+
return pipe(
|
154
|
+
zio,
|
155
|
+
Effect.retry(
|
156
|
+
pipe(
|
157
|
+
Schedule.spaced(config.persistRetryInterval),
|
158
|
+
Schedule.andThen(Schedule.recurs(config.persistRetryCount))
|
159
|
+
)
|
160
|
+
),
|
161
|
+
Effect.ignore
|
162
|
+
)
|
163
|
+
}
|
164
|
+
|
165
|
+
const persistAssignments = withRetry(
|
166
|
+
pipe(
|
167
|
+
RefSynchronized.get(stateRef),
|
168
|
+
Effect.flatMap((state) => stateRepository.saveAssignments(state.shards))
|
169
|
+
)
|
170
|
+
)
|
171
|
+
|
172
|
+
const persistPods = withRetry(
|
173
|
+
pipe(
|
174
|
+
RefSynchronized.get(stateRef),
|
175
|
+
Effect.flatMap((state) => stateRepository.savePods(HashMap.map(state.pods, (v) => v.pod)))
|
176
|
+
)
|
177
|
+
)
|
178
|
+
|
179
|
+
function updateShardsState(
|
180
|
+
shards: HashSet.HashSet<ShardId.ShardId>,
|
181
|
+
pod: Option.Option<PodAddress.PodAddress>
|
182
|
+
) {
|
183
|
+
return RefSynchronized.updateEffect(stateRef, (state) => {
|
184
|
+
if (Option.isSome(pod) && !HashMap.has(state.pods, pod.value)) {
|
185
|
+
return Effect.fail(new ShardingException.PodNoLongerRegisteredException({ podAddress: pod.value }))
|
186
|
+
}
|
187
|
+
return Effect.succeed({
|
188
|
+
...state,
|
189
|
+
shards: pipe(
|
190
|
+
state.shards,
|
191
|
+
HashMap.map((assignment, shard) => HashSet.has(shards, shard) ? pod : assignment)
|
192
|
+
)
|
193
|
+
})
|
194
|
+
})
|
195
|
+
}
|
196
|
+
|
197
|
+
function rebalance(rebalanceImmediately: boolean): Effect.Effect<void> {
|
198
|
+
const algo = Effect.gen(function*(_) {
|
199
|
+
const state = yield* _(RefSynchronized.get(stateRef))
|
200
|
+
|
201
|
+
const [assignments, unassignments] = rebalanceImmediately || HashSet.size(state.unassignedShards) > 0
|
202
|
+
? decideAssignmentsForUnassignedShards(state)
|
203
|
+
: decideAssignmentsForUnbalancedShards(state, config.rebalanceRate)
|
204
|
+
|
205
|
+
const areChanges = HashMap.size(assignments) > 0 || HashMap.size(unassignments) > 0
|
206
|
+
|
207
|
+
if (areChanges) {
|
208
|
+
yield* _(Effect.logDebug(
|
209
|
+
"Rebalance (rebalanceImmidiately=" + JSON.stringify(rebalanceImmediately) + ")"
|
210
|
+
))
|
211
|
+
}
|
212
|
+
|
213
|
+
const failedPingedPods = yield* _(
|
214
|
+
HashSet.union(HashMap.keySet(assignments), HashMap.keySet(unassignments)),
|
215
|
+
Effect.forEach(
|
216
|
+
(pod) =>
|
217
|
+
pipe(
|
218
|
+
podApi.ping(pod),
|
219
|
+
Effect.timeout(config.pingTimeout),
|
220
|
+
Effect.match({
|
221
|
+
onFailure: () => Chunk.fromIterable([pod]),
|
222
|
+
onSuccess: () => Chunk.empty<PodAddress.PodAddress>()
|
223
|
+
})
|
224
|
+
),
|
225
|
+
{ concurrency: "inherit" }
|
226
|
+
),
|
227
|
+
Effect.map(Chunk.fromIterable),
|
228
|
+
Effect.map((_) => Chunk.flatten(_)),
|
229
|
+
Effect.map(HashSet.fromIterable)
|
230
|
+
)
|
231
|
+
|
232
|
+
const shardsToRemove = pipe(
|
233
|
+
List.fromIterable(assignments),
|
234
|
+
List.appendAll(List.fromIterable(unassignments)),
|
235
|
+
List.filter(([pod, __]) => HashSet.has(failedPingedPods, pod)),
|
236
|
+
List.map(([_, shards]) => List.fromIterable(shards)),
|
237
|
+
List.flatMap((_) => _), // TODO: List is missing flatMap
|
238
|
+
HashSet.fromIterable
|
239
|
+
)
|
240
|
+
|
241
|
+
const readyAssignments = pipe(
|
242
|
+
assignments,
|
243
|
+
HashMap.map(HashSet.difference(shardsToRemove)),
|
244
|
+
HashMap.filter((__) => HashSet.size(__) > 0)
|
245
|
+
)
|
246
|
+
|
247
|
+
const readyUnassignments = pipe(
|
248
|
+
unassignments,
|
249
|
+
HashMap.map(HashSet.difference(shardsToRemove)),
|
250
|
+
HashMap.filter((__) => HashSet.size(__) > 0)
|
251
|
+
)
|
252
|
+
|
253
|
+
const [failedUnassignedPods, failedUnassignedShards] = yield* _(
|
254
|
+
Effect.forEach(readyUnassignments, ([pod, shards]) =>
|
255
|
+
pipe(
|
256
|
+
podApi.unassignShards(pod, shards),
|
257
|
+
Effect.zipRight(updateShardsState(shards, Option.none())),
|
258
|
+
Effect.matchEffect({
|
259
|
+
onFailure: () => Effect.succeed([HashSet.fromIterable([pod]), shards] as const),
|
260
|
+
onSuccess: () =>
|
261
|
+
pipe(
|
262
|
+
PubSub.publish(eventsHub, ShardingEvent.ShardsUnassigned(pod, shards)),
|
263
|
+
Effect.as(
|
264
|
+
[
|
265
|
+
HashSet.empty<PodAddress.PodAddress>(),
|
266
|
+
HashSet.empty<ShardId.ShardId>()
|
267
|
+
] as const
|
268
|
+
)
|
269
|
+
)
|
270
|
+
})
|
271
|
+
), { concurrency: "inherit" }),
|
272
|
+
Effect.map(Chunk.fromIterable),
|
273
|
+
Effect.map((_) => Chunk.unzip(_)),
|
274
|
+
Effect.map(
|
275
|
+
([pods, shards]) => [Chunk.map(pods, Chunk.fromIterable), Chunk.map(shards, Chunk.fromIterable)] as const
|
276
|
+
),
|
277
|
+
Effect.map(
|
278
|
+
([pods, shards]) =>
|
279
|
+
[
|
280
|
+
HashSet.fromIterable(Chunk.flatten(pods)),
|
281
|
+
HashSet.fromIterable(Chunk.flatten(shards))
|
282
|
+
] as const
|
283
|
+
)
|
284
|
+
)
|
285
|
+
|
286
|
+
// remove assignments of shards that couldn't be unassigned, as well as faulty pods.
|
287
|
+
const filteredAssignments = pipe(
|
288
|
+
HashMap.removeMany(readyAssignments, failedUnassignedPods),
|
289
|
+
HashMap.map((shards, __) => HashSet.difference(shards, failedUnassignedShards))
|
290
|
+
)
|
291
|
+
|
292
|
+
// then do the assignments
|
293
|
+
const failedAssignedPods = yield* _(
|
294
|
+
Effect.forEach(filteredAssignments, ([pod, shards]) =>
|
295
|
+
pipe(
|
296
|
+
podApi.assignShards(pod, shards),
|
297
|
+
Effect.zipRight(updateShardsState(shards, Option.some(pod))),
|
298
|
+
Effect.matchEffect({
|
299
|
+
onFailure: () => Effect.succeed(Chunk.fromIterable([pod])),
|
300
|
+
onSuccess: () =>
|
301
|
+
pipe(
|
302
|
+
PubSub.publish(eventsHub, ShardingEvent.ShardsAssigned(pod, shards)),
|
303
|
+
Effect.as(Chunk.empty())
|
304
|
+
)
|
305
|
+
})
|
306
|
+
), { concurrency: "inherit" }),
|
307
|
+
Effect.map(Chunk.fromIterable),
|
308
|
+
Effect.map((_) => Chunk.flatten(_)),
|
309
|
+
Effect.map(HashSet.fromIterable)
|
310
|
+
)
|
311
|
+
|
312
|
+
const failedPods = HashSet.union(
|
313
|
+
HashSet.union(failedPingedPods, failedUnassignedPods),
|
314
|
+
failedAssignedPods
|
315
|
+
)
|
316
|
+
|
317
|
+
// check if failing pods are still up
|
318
|
+
yield* _(Effect.forkIn(layerScope)(Effect.forEach(failedPods, (_) => notifyUnhealthyPod(_), { discard: true })))
|
319
|
+
|
320
|
+
if (HashSet.size(failedPods) > 0) {
|
321
|
+
yield* _(
|
322
|
+
Effect.logDebug(
|
323
|
+
"Failed to rebalance pods: " +
|
324
|
+
(failedPods) +
|
325
|
+
" failed pinged: " + (failedPingedPods) +
|
326
|
+
" failed assigned: " + (failedAssignedPods) +
|
327
|
+
" failed unassigned: " + (failedUnassignedPods)
|
328
|
+
)
|
329
|
+
)
|
330
|
+
}
|
331
|
+
|
332
|
+
// retry rebalancing later if there was any failure
|
333
|
+
if (HashSet.size(failedPods) > 0 && rebalanceImmediately) {
|
334
|
+
yield* _(
|
335
|
+
Effect.sleep(config.rebalanceRetryInterval),
|
336
|
+
Effect.zipRight(rebalance(rebalanceImmediately)),
|
337
|
+
Effect.forkIn(layerScope)
|
338
|
+
)
|
339
|
+
}
|
340
|
+
|
341
|
+
// persist state changes to Redis
|
342
|
+
if (areChanges) {
|
343
|
+
yield* _(Effect.forkIn(layerScope)(persistAssignments))
|
344
|
+
}
|
345
|
+
})
|
346
|
+
|
347
|
+
return rebalanceSemaphore.withPermits(1)(algo)
|
348
|
+
}
|
349
|
+
|
350
|
+
return {
|
351
|
+
getAssignments,
|
352
|
+
getShardingEvents,
|
353
|
+
register,
|
354
|
+
unregister,
|
355
|
+
persistPods,
|
356
|
+
rebalance,
|
357
|
+
notifyUnhealthyPod,
|
358
|
+
checkAllPodsHealth
|
359
|
+
}
|
360
|
+
}
|
361
|
+
|
362
|
+
/** @internal */
|
363
|
+
export function decideAssignmentsForUnassignedShards(state: ShardManagerState.ShardManagerState) {
|
364
|
+
return pickNewPods(List.fromIterable(state.unassignedShards), state, true, 1)
|
365
|
+
}
|
366
|
+
|
367
|
+
/** @internal */
|
368
|
+
export function decideAssignmentsForUnbalancedShards(
|
369
|
+
state: ShardManagerState.ShardManagerState,
|
370
|
+
rebalanceRate: number
|
371
|
+
) {
|
372
|
+
// don't do regular rebalance in the middle of a rolling update
|
373
|
+
const extraShardsToAllocate = state.allPodsHaveMaxVersion
|
374
|
+
? pipe(
|
375
|
+
state.shardsPerPod,
|
376
|
+
HashMap.flatMap((shards, _) => {
|
377
|
+
// count how many extra shards compared to the average
|
378
|
+
const extraShards = Math.max(HashSet.size(shards) - state.averageShardsPerPod.value, 0)
|
379
|
+
return pipe(
|
380
|
+
HashMap.empty(),
|
381
|
+
HashMap.set(_, HashSet.fromIterable(List.take(List.fromIterable(shards), extraShards)))
|
382
|
+
)
|
383
|
+
}),
|
384
|
+
HashSet.fromIterable,
|
385
|
+
HashSet.map((_) => _[1]),
|
386
|
+
HashSet.flatMap((_) => _)
|
387
|
+
)
|
388
|
+
: HashSet.empty()
|
389
|
+
|
390
|
+
/*
|
391
|
+
TODO: port sortBy
|
392
|
+
|
393
|
+
val sortedShardsToRebalance = extraShardsToAllocate.toList.sortBy { shard =>
|
394
|
+
// handle unassigned shards first, then shards on the pods with most shards, then shards on old pods
|
395
|
+
state.shards.get(shard).flatten.fold((Int.MinValue, OffsetDateTime.MIN)) { pod =>
|
396
|
+
(
|
397
|
+
state.shardsPerPod.get(pod).fold(Int.MinValue)(-_.size),
|
398
|
+
state.pods.get(pod).fold(OffsetDateTime.MIN)(_.registered)
|
399
|
+
)
|
400
|
+
}
|
401
|
+
}
|
402
|
+
* */
|
403
|
+
const sortedShardsToRebalance = List.fromIterable(extraShardsToAllocate)
|
404
|
+
return pickNewPods(sortedShardsToRebalance, state, false, rebalanceRate)
|
405
|
+
}
|
406
|
+
|
407
|
+
function pickNewPods(
|
408
|
+
shardsToRebalance: List.List<ShardId.ShardId>,
|
409
|
+
state: ShardManagerState.ShardManagerState,
|
410
|
+
rebalanceImmediately: boolean,
|
411
|
+
rebalanceRate: number
|
412
|
+
): readonly [
|
413
|
+
assignments: HashMap.HashMap<PodAddress.PodAddress, HashSet.HashSet<ShardId.ShardId>>,
|
414
|
+
unassignments: HashMap.HashMap<PodAddress.PodAddress, HashSet.HashSet<ShardId.ShardId>>
|
415
|
+
] {
|
416
|
+
const [_, assignments] = pipe(
|
417
|
+
List.reduce(
|
418
|
+
shardsToRebalance,
|
419
|
+
[
|
420
|
+
state.shardsPerPod,
|
421
|
+
List.empty<readonly [ShardId.ShardId, PodAddress.PodAddress]>()
|
422
|
+
] as const,
|
423
|
+
([shardsPerPod, assignments], shard) => {
|
424
|
+
const unassignedPods = pipe(
|
425
|
+
assignments,
|
426
|
+
List.flatMap(([shard, _]) =>
|
427
|
+
pipe(
|
428
|
+
HashMap.get(state.shards, shard),
|
429
|
+
Option.flatten,
|
430
|
+
Option.toArray,
|
431
|
+
List.fromIterable
|
432
|
+
)
|
433
|
+
)
|
434
|
+
)
|
435
|
+
|
436
|
+
// find pod with least amount of shards
|
437
|
+
return pipe(
|
438
|
+
// keep only pods with the max version
|
439
|
+
HashMap.filter(shardsPerPod, (_, pod) => {
|
440
|
+
const maxVersion = state.maxVersion
|
441
|
+
if (Option.isNone(maxVersion)) return true
|
442
|
+
return pipe(
|
443
|
+
HashMap.get(state.pods, pod),
|
444
|
+
Option.map(PodWithMetadata.extractVersion),
|
445
|
+
Option.map((_) => PodWithMetadata.compareVersion(_, maxVersion.value) === 0),
|
446
|
+
Option.getOrElse(() => false)
|
447
|
+
)
|
448
|
+
}),
|
449
|
+
// don't assign too many shards to the same pods, unless we need rebalance immediately
|
450
|
+
HashMap.filter((_, pod) => {
|
451
|
+
if (rebalanceImmediately) return true
|
452
|
+
return (
|
453
|
+
pipe(
|
454
|
+
assignments,
|
455
|
+
List.filter(([_, p]) => equals(p)(pod)),
|
456
|
+
List.size
|
457
|
+
) <
|
458
|
+
HashMap.size(state.shards) * rebalanceRate
|
459
|
+
)
|
460
|
+
}),
|
461
|
+
// don't assign to a pod that was unassigned in the same rebalance
|
462
|
+
HashMap.filter(
|
463
|
+
(_, pod) => !Option.isSome(List.findFirst(unassignedPods, equals(pod)))
|
464
|
+
),
|
465
|
+
minByOption(([_, pods]) => HashSet.size(pods)),
|
466
|
+
Option.match({
|
467
|
+
onNone: () => [shardsPerPod, assignments] as const,
|
468
|
+
onSome: ([pod, shards]) => {
|
469
|
+
const oldPod = Option.flatten(HashMap.get(state.shards, shard))
|
470
|
+
// if old pod is same as new pod, don't change anything
|
471
|
+
if (equals(oldPod)(pod)) {
|
472
|
+
return [shardsPerPod, assignments] as const
|
473
|
+
// if the new pod has more, as much, or only 1 less shard than the old pod, don't change anything
|
474
|
+
} else if (
|
475
|
+
Option.match(HashMap.get(shardsPerPod, pod), { onNone: () => 0, onSome: HashSet.size }) + 1 >=
|
476
|
+
Option.match(
|
477
|
+
oldPod,
|
478
|
+
{
|
479
|
+
onNone: () => Number.MAX_SAFE_INTEGER,
|
480
|
+
onSome: (_) =>
|
481
|
+
Option.match(HashMap.get(shardsPerPod, _), { onNone: () => 0, onSome: HashSet.size })
|
482
|
+
}
|
483
|
+
)
|
484
|
+
) {
|
485
|
+
return [shardsPerPod, assignments] as const
|
486
|
+
|
487
|
+
// otherwise, create a new assignment
|
488
|
+
} else {
|
489
|
+
const unassigned = Option.match(
|
490
|
+
oldPod,
|
491
|
+
{
|
492
|
+
onNone: () => shardsPerPod,
|
493
|
+
onSome: (oldPod) => HashMap.modify(shardsPerPod, oldPod, HashSet.remove(shard))
|
494
|
+
}
|
495
|
+
)
|
496
|
+
return [
|
497
|
+
HashMap.modify(unassigned, pod, (_) => HashSet.add(shards, shard)),
|
498
|
+
List.prepend(assignments, [shard, pod] as const)
|
499
|
+
] as const
|
500
|
+
}
|
501
|
+
}
|
502
|
+
})
|
503
|
+
)
|
504
|
+
}
|
505
|
+
)
|
506
|
+
)
|
507
|
+
|
508
|
+
const unassignments = List.flatMap(assignments, ([shard, _]) =>
|
509
|
+
pipe(
|
510
|
+
Option.flatten(HashMap.get(state.shards, shard)),
|
511
|
+
Option.map((_) => [shard, _] as const),
|
512
|
+
Option.match({ onNone: List.empty, onSome: List.of })
|
513
|
+
))
|
514
|
+
|
515
|
+
const assignmentsPerPod = pipe(
|
516
|
+
assignments,
|
517
|
+
groupBy(([_, pod]) => pod),
|
518
|
+
HashMap.map(HashSet.map(([shardId, _]) => shardId))
|
519
|
+
)
|
520
|
+
const unassignmentsPerPod = pipe(
|
521
|
+
unassignments,
|
522
|
+
groupBy(([_, pod]) => pod),
|
523
|
+
HashMap.map(HashSet.map(([shardId, _]) => shardId))
|
524
|
+
)
|
525
|
+
return [assignmentsPerPod, unassignmentsPerPod] as const
|
526
|
+
}
|
527
|
+
|
528
|
+
/**
|
529
|
+
* @since 1.0.0
|
530
|
+
* @category layers
|
531
|
+
*/
|
532
|
+
export const live = Effect.gen(function*(_) {
|
533
|
+
const config = yield* _(ManagerConfig.ManagerConfig)
|
534
|
+
const stateRepository = yield* _(Storage.Storage)
|
535
|
+
const healthApi = yield* _(PodsHealth.PodsHealth)
|
536
|
+
const podsApi = yield* _(Pods.Pods)
|
537
|
+
const layerScope = yield* _(Effect.scope)
|
538
|
+
|
539
|
+
const pods = yield* _(stateRepository.getPods)
|
540
|
+
const assignments = yield* _(stateRepository.getAssignments)
|
541
|
+
|
542
|
+
const filteredPods = yield* _(
|
543
|
+
Effect.filter(pods, ([podAddress]) => healthApi.isAlive(podAddress), { concurrency: "inherit" }),
|
544
|
+
Effect.map(HashMap.fromIterable)
|
545
|
+
)
|
546
|
+
const filteredAssignments = HashMap.filter(
|
547
|
+
assignments,
|
548
|
+
(pod) => Option.isSome(pod) && HashMap.has(filteredPods, pod.value)
|
549
|
+
)
|
550
|
+
const cdt = yield* _(Clock.currentTimeMillis)
|
551
|
+
const initialState = ShardManagerState.make(
|
552
|
+
HashMap.map(filteredPods, (pod) => PodWithMetadata.make(pod, cdt)),
|
553
|
+
HashMap.union(
|
554
|
+
filteredAssignments,
|
555
|
+
pipe(
|
556
|
+
Chunk.range(1, config.numberOfShards),
|
557
|
+
Chunk.map((n) => [ShardId.make(n), Option.none()] as const),
|
558
|
+
HashMap.fromIterable
|
559
|
+
)
|
560
|
+
)
|
561
|
+
)
|
562
|
+
const state = yield* _(RefSynchronized.make(initialState))
|
563
|
+
const rebalanceSemaphore = yield* _(Effect.makeSemaphore(1))
|
564
|
+
const eventsHub = yield* _(PubSub.unbounded<ShardingEvent.ShardingEvent>())
|
565
|
+
const shardManager = make(
|
566
|
+
layerScope,
|
567
|
+
state,
|
568
|
+
rebalanceSemaphore,
|
569
|
+
eventsHub,
|
570
|
+
healthApi,
|
571
|
+
podsApi,
|
572
|
+
stateRepository,
|
573
|
+
config
|
574
|
+
)
|
575
|
+
yield* _(Effect.forkIn(layerScope)(shardManager.persistPods))
|
576
|
+
// rebalance immediately if there are unassigned shards
|
577
|
+
yield* _(shardManager.rebalance(HashSet.size(initialState.unassignedShards) > 0))
|
578
|
+
// start a regular rebalance at the given interval
|
579
|
+
yield* _(
|
580
|
+
shardManager.rebalance(false),
|
581
|
+
Effect.repeat(Schedule.spaced(config.rebalanceInterval)),
|
582
|
+
Effect.forkIn(layerScope)
|
583
|
+
)
|
584
|
+
// log info events
|
585
|
+
yield* _(
|
586
|
+
shardManager.getShardingEvents,
|
587
|
+
Stream.mapEffect((_) => Effect.logDebug(JSON.stringify(_))),
|
588
|
+
Stream.runDrain,
|
589
|
+
Effect.forkIn(layerScope)
|
590
|
+
)
|
591
|
+
yield* _(Effect.logDebug("Shard Manager loaded"))
|
592
|
+
return shardManager
|
593
|
+
}).pipe(Layer.scoped(shardManagerTag))
|
@@ -0,0 +1,49 @@
|
|
1
|
+
import * as Context from "effect/Context"
|
2
|
+
import * as Effect from "effect/Effect"
|
3
|
+
import { pipe } from "effect/Function"
|
4
|
+
import * as HashMap from "effect/HashMap"
|
5
|
+
import * as Layer from "effect/Layer"
|
6
|
+
import * as Option from "effect/Option"
|
7
|
+
import * as PodAddress from "../PodAddress.js"
|
8
|
+
import * as ShardId from "../ShardId.js"
|
9
|
+
import * as ShardingConfig from "../ShardingConfig.js"
|
10
|
+
import type * as ShardManagerClient from "../ShardManagerClient.js"
|
11
|
+
|
12
|
+
/** @internal */
|
13
|
+
const ShardManagerSymbolKey = "@effect/cluster/ShardManagerClient"
|
14
|
+
|
15
|
+
/** @internal */
|
16
|
+
export const ShardManagerClientTypeId: ShardManagerClient.ShardManagerClientTypeId = Symbol.for(
|
17
|
+
ShardManagerSymbolKey
|
18
|
+
) as ShardManagerClient.ShardManagerClientTypeId
|
19
|
+
|
20
|
+
/** @internal */
|
21
|
+
export const shardManagerClientTag = Context.GenericTag<ShardManagerClient.ShardManagerClient>(ShardManagerSymbolKey)
|
22
|
+
|
23
|
+
/** @internal */
|
24
|
+
export function make(
|
25
|
+
args: Omit<ShardManagerClient.ShardManagerClient, ShardManagerClient.ShardManagerClientTypeId>
|
26
|
+
): ShardManagerClient.ShardManagerClient {
|
27
|
+
return ({ [ShardManagerClientTypeId]: ShardManagerClientTypeId, ...args })
|
28
|
+
}
|
29
|
+
|
30
|
+
/** @internal */
|
31
|
+
export const local = pipe(
|
32
|
+
Layer.effect(
|
33
|
+
shardManagerClientTag,
|
34
|
+
Effect.gen(function*($) {
|
35
|
+
const config = yield* $(ShardingConfig.ShardingConfig)
|
36
|
+
const pod = PodAddress.make(config.selfHost, config.shardingPort)
|
37
|
+
let shards = HashMap.empty<ShardId.ShardId, Option.Option<PodAddress.PodAddress>>()
|
38
|
+
for (let i = 1; i <= config.numberOfShards; i++) {
|
39
|
+
shards = HashMap.set(shards, ShardId.make(i), Option.some(pod))
|
40
|
+
}
|
41
|
+
return make({
|
42
|
+
register: () => Effect.void,
|
43
|
+
unregister: () => Effect.void,
|
44
|
+
notifyUnhealthyPod: () => Effect.void,
|
45
|
+
getAssignments: Effect.succeed(shards)
|
46
|
+
})
|
47
|
+
})
|
48
|
+
)
|
49
|
+
)
|