@effect/cluster 0.28.3 → 0.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ClusterError/package.json +6 -0
- package/ClusterMetrics/package.json +6 -0
- package/ClusterSchema/package.json +6 -0
- package/DeliverAt/package.json +6 -0
- package/Entity/package.json +6 -0
- package/EntityAddress/package.json +6 -0
- package/EntityId/package.json +6 -0
- package/EntityType/package.json +6 -0
- package/Envelope/package.json +6 -0
- package/HttpCommon/package.json +6 -0
- package/HttpRunner/package.json +6 -0
- package/HttpShardManager/package.json +6 -0
- package/MachineId/package.json +6 -0
- package/MessageStorage/package.json +6 -0
- package/README.md +2 -2
- package/Reply/package.json +6 -0
- package/Runner/package.json +6 -0
- package/RunnerAddress/package.json +6 -0
- package/RunnerHealth/package.json +6 -0
- package/RunnerServer/package.json +6 -0
- package/Runners/package.json +6 -0
- package/ShardStorage/package.json +6 -0
- package/Singleton/package.json +6 -0
- package/SingletonAddress/package.json +6 -0
- package/Snowflake/package.json +6 -0
- package/SocketRunner/package.json +6 -0
- package/SocketShardManager/package.json +6 -0
- package/SqlMessageStorage/package.json +6 -0
- package/SqlShardStorage/package.json +6 -0
- package/SynchronizedClock/package.json +6 -0
- package/dist/cjs/ClusterError.js +180 -0
- package/dist/cjs/ClusterError.js.map +1 -0
- package/dist/cjs/ClusterMetrics.js +63 -0
- package/dist/cjs/ClusterMetrics.js.map +1 -0
- package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
- package/dist/cjs/ClusterSchema.js.map +1 -0
- package/dist/cjs/DeliverAt.js +30 -0
- package/dist/cjs/DeliverAt.js.map +1 -0
- package/dist/cjs/Entity.js +187 -0
- package/dist/cjs/Entity.js.map +1 -0
- package/dist/cjs/EntityAddress.js +54 -0
- package/dist/cjs/EntityAddress.js.map +1 -0
- package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
- package/dist/cjs/EntityId.js.map +1 -0
- package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
- package/dist/cjs/EntityType.js.map +1 -0
- package/dist/cjs/Envelope.js +168 -0
- package/dist/cjs/Envelope.js.map +1 -0
- package/dist/cjs/HttpCommon.js +49 -0
- package/dist/cjs/HttpCommon.js.map +1 -0
- package/dist/cjs/HttpRunner.js +108 -0
- package/dist/cjs/HttpRunner.js.map +1 -0
- package/dist/cjs/HttpShardManager.js +140 -0
- package/dist/cjs/HttpShardManager.js.map +1 -0
- package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
- package/dist/cjs/MachineId.js.map +1 -0
- package/dist/cjs/Message.js +99 -18
- package/dist/cjs/Message.js.map +1 -1
- package/dist/cjs/MessageStorage.js +356 -0
- package/dist/cjs/MessageStorage.js.map +1 -0
- package/dist/cjs/Reply.js +200 -0
- package/dist/cjs/Reply.js.map +1 -0
- package/dist/cjs/Runner.js +79 -0
- package/dist/cjs/Runner.js.map +1 -0
- package/dist/cjs/RunnerAddress.js +63 -0
- package/dist/cjs/RunnerAddress.js.map +1 -0
- package/dist/cjs/RunnerHealth.js +68 -0
- package/dist/cjs/RunnerHealth.js.map +1 -0
- package/dist/cjs/RunnerServer.js +125 -0
- package/dist/cjs/RunnerServer.js.map +1 -0
- package/dist/cjs/Runners.js +344 -0
- package/dist/cjs/Runners.js.map +1 -0
- package/dist/cjs/ShardId.js +7 -46
- package/dist/cjs/ShardId.js.map +1 -1
- package/dist/cjs/ShardManager.js +493 -8
- package/dist/cjs/ShardManager.js.map +1 -1
- package/dist/cjs/ShardStorage.js +139 -0
- package/dist/cjs/ShardStorage.js.map +1 -0
- package/dist/cjs/Sharding.js +732 -88
- package/dist/cjs/Sharding.js.map +1 -1
- package/dist/cjs/ShardingConfig.js +85 -18
- package/dist/cjs/ShardingConfig.js.map +1 -1
- package/dist/cjs/ShardingRegistrationEvent.js +26 -32
- package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
- package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
- package/dist/cjs/Singleton.js.map +1 -0
- package/dist/cjs/SingletonAddress.js +50 -0
- package/dist/cjs/SingletonAddress.js.map +1 -0
- package/dist/cjs/Snowflake.js +133 -0
- package/dist/cjs/Snowflake.js.map +1 -0
- package/dist/cjs/SocketRunner.js +40 -0
- package/dist/cjs/SocketRunner.js.map +1 -0
- package/dist/cjs/SocketShardManager.js +33 -0
- package/dist/cjs/SocketShardManager.js.map +1 -0
- package/dist/cjs/SqlMessageStorage.js +668 -0
- package/dist/cjs/SqlMessageStorage.js.map +1 -0
- package/dist/cjs/SqlShardStorage.js +228 -0
- package/dist/cjs/SqlShardStorage.js.map +1 -0
- package/dist/cjs/SynchronizedClock.js +66 -0
- package/dist/cjs/SynchronizedClock.js.map +1 -0
- package/dist/cjs/index.js +57 -45
- package/dist/cjs/internal/entityManager.js +311 -143
- package/dist/cjs/internal/entityManager.js.map +1 -1
- package/dist/cjs/internal/entityReaper.js +47 -0
- package/dist/cjs/internal/entityReaper.js.map +1 -0
- package/dist/cjs/internal/hash.js +20 -0
- package/dist/cjs/internal/hash.js.map +1 -0
- package/dist/cjs/internal/interruptors.js +9 -0
- package/dist/cjs/internal/interruptors.js.map +1 -0
- package/dist/cjs/internal/resourceMap.js +88 -0
- package/dist/cjs/internal/resourceMap.js.map +1 -0
- package/dist/cjs/internal/resourceRef.js +92 -0
- package/dist/cjs/internal/resourceRef.js.map +1 -0
- package/dist/cjs/internal/shardManager.js +219 -235
- package/dist/cjs/internal/shardManager.js.map +1 -1
- package/dist/dts/ClusterError.d.ts +169 -0
- package/dist/dts/ClusterError.d.ts.map +1 -0
- package/dist/dts/ClusterMetrics.d.ts +50 -0
- package/dist/dts/ClusterMetrics.d.ts.map +1 -0
- package/dist/dts/ClusterSchema.d.ts +13 -0
- package/dist/dts/ClusterSchema.d.ts.map +1 -0
- package/dist/dts/DeliverAt.d.ts +27 -0
- package/dist/dts/DeliverAt.d.ts.map +1 -0
- package/dist/dts/Entity.d.ts +180 -0
- package/dist/dts/Entity.d.ts.map +1 -0
- package/dist/dts/EntityAddress.d.ts +55 -0
- package/dist/dts/EntityAddress.d.ts.map +1 -0
- package/dist/dts/EntityId.d.ts +15 -0
- package/dist/dts/EntityId.d.ts.map +1 -0
- package/dist/dts/EntityType.d.ts +15 -0
- package/dist/dts/EntityType.d.ts.map +1 -0
- package/dist/dts/Envelope.d.ts +252 -0
- package/dist/dts/Envelope.d.ts.map +1 -0
- package/dist/dts/HttpCommon.d.ts +25 -0
- package/dist/dts/HttpCommon.d.ts.map +1 -0
- package/dist/dts/HttpRunner.d.ts +76 -0
- package/dist/dts/HttpRunner.d.ts.map +1 -0
- package/dist/dts/HttpShardManager.d.ts +119 -0
- package/dist/dts/HttpShardManager.d.ts.map +1 -0
- package/dist/dts/MachineId.d.ts +20 -0
- package/dist/dts/MachineId.d.ts.map +1 -0
- package/dist/dts/Message.d.ts +91 -74
- package/dist/dts/Message.d.ts.map +1 -1
- package/dist/dts/MessageStorage.d.ts +336 -0
- package/dist/dts/MessageStorage.d.ts.map +1 -0
- package/dist/dts/Reply.d.ts +171 -0
- package/dist/dts/Reply.d.ts.map +1 -0
- package/dist/dts/Runner.d.ts +81 -0
- package/dist/dts/Runner.d.ts.map +1 -0
- package/dist/dts/RunnerAddress.d.ts +56 -0
- package/dist/dts/RunnerAddress.d.ts.map +1 -0
- package/dist/dts/RunnerHealth.d.ts +54 -0
- package/dist/dts/RunnerHealth.d.ts.map +1 -0
- package/dist/dts/RunnerServer.d.ts +44 -0
- package/dist/dts/RunnerServer.d.ts.map +1 -0
- package/dist/dts/Runners.d.ts +161 -0
- package/dist/dts/Runners.d.ts.map +1 -0
- package/dist/dts/ShardId.d.ts +5 -55
- package/dist/dts/ShardId.d.ts.map +1 -1
- package/dist/dts/ShardManager.d.ts +435 -23
- package/dist/dts/ShardManager.d.ts.map +1 -1
- package/dist/dts/ShardStorage.d.ts +200 -0
- package/dist/dts/ShardStorage.d.ts.map +1 -0
- package/dist/dts/Sharding.d.ts +109 -131
- package/dist/dts/Sharding.d.ts.map +1 -1
- package/dist/dts/ShardingConfig.d.ts +147 -44
- package/dist/dts/ShardingConfig.d.ts.map +1 -1
- package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
- package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
- package/dist/dts/Singleton.d.ts +13 -0
- package/dist/dts/Singleton.d.ts.map +1 -0
- package/dist/dts/SingletonAddress.d.ts +49 -0
- package/dist/dts/SingletonAddress.d.ts.map +1 -0
- package/dist/dts/Snowflake.d.ts +121 -0
- package/dist/dts/Snowflake.d.ts.map +1 -0
- package/dist/dts/SocketRunner.d.ts +22 -0
- package/dist/dts/SocketRunner.d.ts.map +1 -0
- package/dist/dts/SocketShardManager.d.ts +17 -0
- package/dist/dts/SocketShardManager.d.ts.map +1 -0
- package/dist/dts/SqlMessageStorage.d.ts +43 -0
- package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
- package/dist/dts/SqlShardStorage.d.ts +38 -0
- package/dist/dts/SqlShardStorage.d.ts.map +1 -0
- package/dist/dts/SynchronizedClock.d.ts +19 -0
- package/dist/dts/SynchronizedClock.d.ts.map +1 -0
- package/dist/dts/index.d.ts +48 -24
- package/dist/dts/index.d.ts.map +1 -1
- package/dist/dts/internal/entityReaper.d.ts +2 -0
- package/dist/dts/internal/entityReaper.d.ts.map +1 -0
- package/dist/dts/internal/hash.d.ts +2 -0
- package/dist/dts/internal/hash.d.ts.map +1 -0
- package/dist/dts/internal/interruptors.d.ts +2 -0
- package/dist/dts/internal/interruptors.d.ts.map +1 -0
- package/dist/dts/internal/resourceMap.d.ts +22 -0
- package/dist/dts/internal/resourceMap.d.ts.map +1 -0
- package/dist/dts/internal/resourceRef.d.ts +25 -0
- package/dist/dts/internal/resourceRef.d.ts.map +1 -0
- package/dist/dts/internal/shardManager.d.ts +1 -11
- package/dist/dts/internal/shardManager.d.ts.map +1 -1
- package/dist/esm/ClusterError.js +164 -0
- package/dist/esm/ClusterError.js.map +1 -0
- package/dist/esm/ClusterMetrics.js +54 -0
- package/dist/esm/ClusterMetrics.js.map +1 -0
- package/dist/esm/ClusterSchema.js +13 -0
- package/dist/esm/ClusterSchema.js.map +1 -0
- package/dist/esm/DeliverAt.js +22 -0
- package/dist/esm/DeliverAt.js.map +1 -0
- package/dist/esm/Entity.js +173 -0
- package/dist/esm/Entity.js.map +1 -0
- package/dist/esm/EntityAddress.js +44 -0
- package/dist/esm/EntityAddress.js.map +1 -0
- package/dist/esm/EntityId.js +10 -0
- package/dist/esm/EntityId.js.map +1 -0
- package/dist/esm/EntityType.js +10 -0
- package/dist/esm/EntityType.js.map +1 -0
- package/dist/esm/Envelope.js +154 -0
- package/dist/esm/Envelope.js.map +1 -0
- package/dist/esm/HttpCommon.js +38 -0
- package/dist/esm/HttpCommon.js.map +1 -0
- package/dist/esm/HttpRunner.js +98 -0
- package/dist/esm/HttpRunner.js.map +1 -0
- package/dist/esm/HttpShardManager.js +128 -0
- package/dist/esm/HttpShardManager.js.map +1 -0
- package/dist/esm/MachineId.js +17 -0
- package/dist/esm/MachineId.js.map +1 -0
- package/dist/esm/Message.js +88 -17
- package/dist/esm/Message.js.map +1 -1
- package/dist/esm/MessageStorage.js +345 -0
- package/dist/esm/MessageStorage.js.map +1 -0
- package/dist/esm/Reply.js +184 -0
- package/dist/esm/Reply.js.map +1 -0
- package/dist/esm/Runner.js +68 -0
- package/dist/esm/Runner.js.map +1 -0
- package/dist/esm/RunnerAddress.js +52 -0
- package/dist/esm/RunnerAddress.js.map +1 -0
- package/dist/esm/RunnerHealth.js +58 -0
- package/dist/esm/RunnerHealth.js.map +1 -0
- package/dist/esm/RunnerServer.js +116 -0
- package/dist/esm/RunnerServer.js.map +1 -0
- package/dist/esm/Runners.js +332 -0
- package/dist/esm/Runners.js.map +1 -0
- package/dist/esm/ShardId.js +5 -42
- package/dist/esm/ShardId.js.map +1 -1
- package/dist/esm/ShardManager.js +486 -7
- package/dist/esm/ShardManager.js.map +1 -1
- package/dist/esm/ShardStorage.js +129 -0
- package/dist/esm/ShardStorage.js.map +1 -0
- package/dist/esm/Sharding.js +730 -87
- package/dist/esm/Sharding.js.map +1 -1
- package/dist/esm/ShardingConfig.js +80 -17
- package/dist/esm/ShardingConfig.js.map +1 -1
- package/dist/esm/ShardingRegistrationEvent.js +19 -29
- package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
- package/dist/esm/Singleton.js +15 -0
- package/dist/esm/Singleton.js.map +1 -0
- package/dist/esm/SingletonAddress.js +40 -0
- package/dist/esm/SingletonAddress.js.map +1 -0
- package/dist/esm/Snowflake.js +117 -0
- package/dist/esm/Snowflake.js.map +1 -0
- package/dist/esm/SocketRunner.js +31 -0
- package/dist/esm/SocketRunner.js.map +1 -0
- package/dist/esm/SocketShardManager.js +24 -0
- package/dist/esm/SocketShardManager.js.map +1 -0
- package/dist/esm/SqlMessageStorage.js +658 -0
- package/dist/esm/SqlMessageStorage.js.map +1 -0
- package/dist/esm/SqlShardStorage.js +218 -0
- package/dist/esm/SqlShardStorage.js.map +1 -0
- package/dist/esm/SynchronizedClock.js +57 -0
- package/dist/esm/SynchronizedClock.js.map +1 -0
- package/dist/esm/index.js +48 -24
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/internal/entityManager.js +311 -142
- package/dist/esm/internal/entityManager.js.map +1 -1
- package/dist/esm/internal/entityReaper.js +38 -0
- package/dist/esm/internal/entityReaper.js.map +1 -0
- package/dist/esm/internal/hash.js +12 -0
- package/dist/esm/internal/hash.js.map +1 -0
- package/dist/esm/internal/interruptors.js +3 -0
- package/dist/esm/internal/interruptors.js.map +1 -0
- package/dist/esm/internal/resourceMap.js +79 -0
- package/dist/esm/internal/resourceMap.js.map +1 -0
- package/dist/esm/internal/resourceRef.js +83 -0
- package/dist/esm/internal/resourceRef.js.map +1 -0
- package/dist/esm/internal/shardManager.js +217 -233
- package/dist/esm/internal/shardManager.js.map +1 -1
- package/package.json +212 -154
- package/src/ClusterError.ts +193 -0
- package/src/ClusterMetrics.ts +62 -0
- package/src/ClusterSchema.ts +13 -0
- package/src/DeliverAt.ts +36 -0
- package/src/Entity.ts +438 -0
- package/src/EntityAddress.ts +55 -0
- package/src/EntityId.ts +16 -0
- package/src/EntityType.ts +16 -0
- package/src/Envelope.ts +352 -0
- package/src/HttpCommon.ts +73 -0
- package/src/HttpRunner.ts +196 -0
- package/src/HttpShardManager.ts +273 -0
- package/src/MachineId.ts +27 -0
- package/src/Message.ts +143 -92
- package/src/MessageStorage.ts +697 -0
- package/src/Reply.ts +295 -0
- package/src/Runner.ts +84 -0
- package/src/RunnerAddress.ts +61 -0
- package/src/RunnerHealth.ts +87 -0
- package/src/RunnerServer.ts +156 -0
- package/src/Runners.ts +533 -0
- package/src/ShardId.ts +10 -62
- package/src/ShardManager.ts +780 -29
- package/src/ShardStorage.ts +289 -0
- package/src/Sharding.ts +1060 -183
- package/src/ShardingConfig.ts +186 -45
- package/src/ShardingRegistrationEvent.ts +38 -39
- package/src/Singleton.ts +20 -0
- package/src/SingletonAddress.ts +47 -0
- package/src/Snowflake.ts +194 -0
- package/src/SocketRunner.ts +59 -0
- package/src/SocketShardManager.ts +48 -0
- package/src/SqlMessageStorage.ts +833 -0
- package/src/SqlShardStorage.ts +292 -0
- package/src/SynchronizedClock.ts +82 -0
- package/src/index.ts +54 -24
- package/src/internal/entityManager.ts +464 -361
- package/src/internal/entityReaper.ts +53 -0
- package/src/internal/hash.ts +11 -0
- package/src/internal/interruptors.ts +4 -0
- package/src/internal/resourceMap.ts +89 -0
- package/src/internal/resourceRef.ts +88 -0
- package/src/internal/shardManager.ts +273 -546
- package/AtLeastOnce/package.json +0 -6
- package/AtLeastOnceStorage/package.json +0 -6
- package/Broadcaster/package.json +0 -6
- package/ManagerConfig/package.json +0 -6
- package/MessageState/package.json +0 -6
- package/Messenger/package.json +0 -6
- package/Pod/package.json +0 -6
- package/PodAddress/package.json +0 -6
- package/Pods/package.json +0 -6
- package/PodsHealth/package.json +0 -6
- package/PoisonPill/package.json +0 -6
- package/RecipientAddress/package.json +0 -6
- package/RecipientBehaviour/package.json +0 -6
- package/RecipientBehaviourContext/package.json +0 -6
- package/RecipientType/package.json +0 -6
- package/Serialization/package.json +0 -6
- package/SerializedEnvelope/package.json +0 -6
- package/SerializedMessage/package.json +0 -6
- package/ShardManagerClient/package.json +0 -6
- package/ShardingEvent/package.json +0 -6
- package/ShardingException/package.json +0 -6
- package/Storage/package.json +0 -6
- package/dist/cjs/AtLeastOnce.js.map +0 -1
- package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
- package/dist/cjs/Broadcaster.js +0 -6
- package/dist/cjs/Broadcaster.js.map +0 -1
- package/dist/cjs/ManagerConfig.js.map +0 -1
- package/dist/cjs/MessageState.js +0 -55
- package/dist/cjs/MessageState.js.map +0 -1
- package/dist/cjs/Messenger.js +0 -6
- package/dist/cjs/Messenger.js.map +0 -1
- package/dist/cjs/Pod.js +0 -78
- package/dist/cjs/Pod.js.map +0 -1
- package/dist/cjs/PodAddress.js +0 -77
- package/dist/cjs/PodAddress.js.map +0 -1
- package/dist/cjs/Pods.js.map +0 -1
- package/dist/cjs/PodsHealth.js +0 -41
- package/dist/cjs/PodsHealth.js.map +0 -1
- package/dist/cjs/PoisonPill.js +0 -78
- package/dist/cjs/PoisonPill.js.map +0 -1
- package/dist/cjs/RecipientAddress.js +0 -79
- package/dist/cjs/RecipientAddress.js.map +0 -1
- package/dist/cjs/RecipientBehaviour.js +0 -38
- package/dist/cjs/RecipientBehaviour.js.map +0 -1
- package/dist/cjs/RecipientBehaviourContext.js +0 -64
- package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
- package/dist/cjs/RecipientType.js +0 -123
- package/dist/cjs/RecipientType.js.map +0 -1
- package/dist/cjs/Serialization.js +0 -32
- package/dist/cjs/Serialization.js.map +0 -1
- package/dist/cjs/SerializedEnvelope.js +0 -87
- package/dist/cjs/SerializedEnvelope.js.map +0 -1
- package/dist/cjs/SerializedMessage.js +0 -64
- package/dist/cjs/SerializedMessage.js.map +0 -1
- package/dist/cjs/ShardManagerClient.js.map +0 -1
- package/dist/cjs/ShardingEvent.js +0 -72
- package/dist/cjs/ShardingEvent.js.map +0 -1
- package/dist/cjs/ShardingException.js +0 -107
- package/dist/cjs/ShardingException.js.map +0 -1
- package/dist/cjs/Storage.js +0 -40
- package/dist/cjs/Storage.js.map +0 -1
- package/dist/cjs/internal/atLeastOnce.js +0 -35
- package/dist/cjs/internal/atLeastOnce.js.map +0 -1
- package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
- package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
- package/dist/cjs/internal/entityState.js +0 -47
- package/dist/cjs/internal/entityState.js.map +0 -1
- package/dist/cjs/internal/managerConfig.js +0 -46
- package/dist/cjs/internal/managerConfig.js.map +0 -1
- package/dist/cjs/internal/message.js +0 -48
- package/dist/cjs/internal/message.js.map +0 -1
- package/dist/cjs/internal/messageState.js +0 -79
- package/dist/cjs/internal/messageState.js.map +0 -1
- package/dist/cjs/internal/podWithMetadata.js +0 -54
- package/dist/cjs/internal/podWithMetadata.js.map +0 -1
- package/dist/cjs/internal/pods.js +0 -35
- package/dist/cjs/internal/pods.js.map +0 -1
- package/dist/cjs/internal/podsHealth.js +0 -40
- package/dist/cjs/internal/podsHealth.js.map +0 -1
- package/dist/cjs/internal/recipientBehaviour.js +0 -52
- package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
- package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
- package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
- package/dist/cjs/internal/serialization.js +0 -48
- package/dist/cjs/internal/serialization.js.map +0 -1
- package/dist/cjs/internal/shardManagerClient.js +0 -48
- package/dist/cjs/internal/shardManagerClient.js.map +0 -1
- package/dist/cjs/internal/shardManagerState.js +0 -44
- package/dist/cjs/internal/shardManagerState.js.map +0 -1
- package/dist/cjs/internal/sharding.js +0 -306
- package/dist/cjs/internal/sharding.js.map +0 -1
- package/dist/cjs/internal/shardingConfig.js +0 -56
- package/dist/cjs/internal/shardingConfig.js.map +0 -1
- package/dist/cjs/internal/storage.js +0 -52
- package/dist/cjs/internal/storage.js.map +0 -1
- package/dist/cjs/internal/utils.js +0 -69
- package/dist/cjs/internal/utils.js.map +0 -1
- package/dist/dts/AtLeastOnce.d.ts +0 -20
- package/dist/dts/AtLeastOnce.d.ts.map +0 -1
- package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
- package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
- package/dist/dts/Broadcaster.d.ts +0 -32
- package/dist/dts/Broadcaster.d.ts.map +0 -1
- package/dist/dts/ManagerConfig.d.ts +0 -61
- package/dist/dts/ManagerConfig.d.ts.map +0 -1
- package/dist/dts/MessageState.d.ts +0 -107
- package/dist/dts/MessageState.d.ts.map +0 -1
- package/dist/dts/Messenger.d.ts +0 -32
- package/dist/dts/Messenger.d.ts.map +0 -1
- package/dist/dts/Pod.d.ts +0 -81
- package/dist/dts/Pod.d.ts.map +0 -1
- package/dist/dts/PodAddress.d.ts +0 -80
- package/dist/dts/PodAddress.d.ts.map +0 -1
- package/dist/dts/Pods.d.ts +0 -78
- package/dist/dts/Pods.d.ts.map +0 -1
- package/dist/dts/PodsHealth.d.ts +0 -66
- package/dist/dts/PodsHealth.d.ts.map +0 -1
- package/dist/dts/PoisonPill.d.ts +0 -78
- package/dist/dts/PoisonPill.d.ts.map +0 -1
- package/dist/dts/RecipientAddress.d.ts +0 -57
- package/dist/dts/RecipientAddress.d.ts.map +0 -1
- package/dist/dts/RecipientBehaviour.d.ts +0 -72
- package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
- package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
- package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
- package/dist/dts/RecipientType.d.ts +0 -93
- package/dist/dts/RecipientType.d.ts.map +0 -1
- package/dist/dts/Serialization.d.ts +0 -58
- package/dist/dts/Serialization.d.ts.map +0 -1
- package/dist/dts/SerializedEnvelope.d.ts +0 -86
- package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
- package/dist/dts/SerializedMessage.d.ts +0 -66
- package/dist/dts/SerializedMessage.d.ts.map +0 -1
- package/dist/dts/ShardManagerClient.d.ts +0 -50
- package/dist/dts/ShardManagerClient.d.ts.map +0 -1
- package/dist/dts/ShardingEvent.d.ts +0 -90
- package/dist/dts/ShardingEvent.d.ts.map +0 -1
- package/dist/dts/ShardingException.d.ts +0 -125
- package/dist/dts/ShardingException.d.ts.map +0 -1
- package/dist/dts/Storage.d.ts +0 -78
- package/dist/dts/Storage.d.ts.map +0 -1
- package/dist/dts/internal/atLeastOnce.d.ts +0 -2
- package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
- package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
- package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
- package/dist/dts/internal/entityState.d.ts +0 -21
- package/dist/dts/internal/entityState.d.ts.map +0 -1
- package/dist/dts/internal/managerConfig.d.ts +0 -2
- package/dist/dts/internal/managerConfig.d.ts.map +0 -1
- package/dist/dts/internal/message.d.ts +0 -9
- package/dist/dts/internal/message.d.ts.map +0 -1
- package/dist/dts/internal/messageState.d.ts +0 -2
- package/dist/dts/internal/messageState.d.ts.map +0 -1
- package/dist/dts/internal/podWithMetadata.d.ts +0 -2
- package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
- package/dist/dts/internal/pods.d.ts +0 -2
- package/dist/dts/internal/pods.d.ts.map +0 -1
- package/dist/dts/internal/podsHealth.d.ts +0 -2
- package/dist/dts/internal/podsHealth.d.ts.map +0 -1
- package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
- package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
- package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
- package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
- package/dist/dts/internal/serialization.d.ts +0 -2
- package/dist/dts/internal/serialization.d.ts.map +0 -1
- package/dist/dts/internal/shardManagerClient.d.ts +0 -2
- package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
- package/dist/dts/internal/shardManagerState.d.ts +0 -26
- package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
- package/dist/dts/internal/sharding.d.ts +0 -2
- package/dist/dts/internal/sharding.d.ts.map +0 -1
- package/dist/dts/internal/shardingConfig.d.ts +0 -2
- package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
- package/dist/dts/internal/storage.d.ts +0 -2
- package/dist/dts/internal/storage.d.ts.map +0 -1
- package/dist/dts/internal/utils.d.ts +0 -2
- package/dist/dts/internal/utils.d.ts.map +0 -1
- package/dist/esm/AtLeastOnce.js +0 -12
- package/dist/esm/AtLeastOnce.js.map +0 -1
- package/dist/esm/AtLeastOnceStorage.js +0 -17
- package/dist/esm/AtLeastOnceStorage.js.map +0 -1
- package/dist/esm/Broadcaster.js +0 -2
- package/dist/esm/Broadcaster.js.map +0 -1
- package/dist/esm/ManagerConfig.js +0 -26
- package/dist/esm/ManagerConfig.js.map +0 -1
- package/dist/esm/MessageState.js +0 -47
- package/dist/esm/MessageState.js.map +0 -1
- package/dist/esm/Messenger.js +0 -2
- package/dist/esm/Messenger.js.map +0 -1
- package/dist/esm/Pod.js +0 -65
- package/dist/esm/Pod.js.map +0 -1
- package/dist/esm/PodAddress.js +0 -64
- package/dist/esm/PodAddress.js.map +0 -1
- package/dist/esm/Pods.js +0 -27
- package/dist/esm/Pods.js.map +0 -1
- package/dist/esm/PodsHealth.js +0 -33
- package/dist/esm/PodsHealth.js.map +0 -1
- package/dist/esm/PoisonPill.js +0 -65
- package/dist/esm/PoisonPill.js.map +0 -1
- package/dist/esm/RecipientAddress.js +0 -67
- package/dist/esm/RecipientAddress.js.map +0 -1
- package/dist/esm/RecipientBehaviour.js +0 -30
- package/dist/esm/RecipientBehaviour.js.map +0 -1
- package/dist/esm/RecipientBehaviourContext.js +0 -56
- package/dist/esm/RecipientBehaviourContext.js.map +0 -1
- package/dist/esm/RecipientType.js +0 -108
- package/dist/esm/RecipientType.js.map +0 -1
- package/dist/esm/Serialization.js +0 -24
- package/dist/esm/Serialization.js.map +0 -1
- package/dist/esm/SerializedEnvelope.js +0 -74
- package/dist/esm/SerializedEnvelope.js.map +0 -1
- package/dist/esm/SerializedMessage.js +0 -51
- package/dist/esm/SerializedMessage.js.map +0 -1
- package/dist/esm/ShardManagerClient.js +0 -22
- package/dist/esm/ShardManagerClient.js.map +0 -1
- package/dist/esm/ShardingEvent.js +0 -62
- package/dist/esm/ShardingEvent.js.map +0 -1
- package/dist/esm/ShardingException.js +0 -91
- package/dist/esm/ShardingException.js.map +0 -1
- package/dist/esm/Storage.js +0 -32
- package/dist/esm/Storage.js.map +0 -1
- package/dist/esm/internal/atLeastOnce.js +0 -26
- package/dist/esm/internal/atLeastOnce.js.map +0 -1
- package/dist/esm/internal/atLeastOnceStorage.js +0 -154
- package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
- package/dist/esm/internal/entityState.js +0 -35
- package/dist/esm/internal/entityState.js.map +0 -1
- package/dist/esm/internal/managerConfig.js +0 -38
- package/dist/esm/internal/managerConfig.js.map +0 -1
- package/dist/esm/internal/message.js +0 -35
- package/dist/esm/internal/message.js.map +0 -1
- package/dist/esm/internal/messageState.js +0 -66
- package/dist/esm/internal/messageState.js.map +0 -1
- package/dist/esm/internal/podWithMetadata.js +0 -41
- package/dist/esm/internal/podWithMetadata.js.map +0 -1
- package/dist/esm/internal/pods.js +0 -25
- package/dist/esm/internal/pods.js.map +0 -1
- package/dist/esm/internal/podsHealth.js +0 -30
- package/dist/esm/internal/podsHealth.js.map +0 -1
- package/dist/esm/internal/recipientBehaviour.js +0 -42
- package/dist/esm/internal/recipientBehaviour.js.map +0 -1
- package/dist/esm/internal/recipientBehaviourContext.js +0 -26
- package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
- package/dist/esm/internal/serialization.js +0 -38
- package/dist/esm/internal/serialization.js.map +0 -1
- package/dist/esm/internal/shardManagerClient.js +0 -38
- package/dist/esm/internal/shardManagerClient.js.map +0 -1
- package/dist/esm/internal/shardManagerState.js +0 -36
- package/dist/esm/internal/shardManagerState.js.map +0 -1
- package/dist/esm/internal/sharding.js +0 -288
- package/dist/esm/internal/sharding.js.map +0 -1
- package/dist/esm/internal/shardingConfig.js +0 -47
- package/dist/esm/internal/shardingConfig.js.map +0 -1
- package/dist/esm/internal/storage.js +0 -42
- package/dist/esm/internal/storage.js.map +0 -1
- package/dist/esm/internal/utils.js +0 -56
- package/dist/esm/internal/utils.js.map +0 -1
- package/src/AtLeastOnce.ts +0 -28
- package/src/AtLeastOnceStorage.ts +0 -96
- package/src/Broadcaster.ts +0 -48
- package/src/ManagerConfig.ts +0 -67
- package/src/MessageState.ts +0 -126
- package/src/Messenger.ts +0 -40
- package/src/Pod.ts +0 -95
- package/src/PodAddress.ts +0 -94
- package/src/Pods.ts +0 -100
- package/src/PodsHealth.ts +0 -74
- package/src/PoisonPill.ts +0 -105
- package/src/RecipientAddress.ts +0 -72
- package/src/RecipientBehaviour.ts +0 -108
- package/src/RecipientBehaviourContext.ts +0 -101
- package/src/RecipientType.ts +0 -134
- package/src/Serialization.ts +0 -72
- package/src/SerializedEnvelope.ts +0 -108
- package/src/SerializedMessage.ts +0 -82
- package/src/ShardManagerClient.ts +0 -57
- package/src/ShardingEvent.ts +0 -121
- package/src/ShardingException.ts +0 -151
- package/src/Storage.ts +0 -92
- package/src/internal/atLeastOnce.ts +0 -59
- package/src/internal/atLeastOnceStorage.ts +0 -218
- package/src/internal/entityState.ts +0 -64
- package/src/internal/managerConfig.ts +0 -84
- package/src/internal/message.ts +0 -63
- package/src/internal/messageState.ts +0 -98
- package/src/internal/podWithMetadata.ts +0 -72
- package/src/internal/pods.ts +0 -29
- package/src/internal/podsHealth.ts +0 -39
- package/src/internal/recipientBehaviour.ts +0 -133
- package/src/internal/recipientBehaviourContext.ts +0 -70
- package/src/internal/serialization.ts +0 -63
- package/src/internal/shardManagerClient.ts +0 -49
- package/src/internal/shardManagerState.ts +0 -80
- package/src/internal/sharding.ts +0 -789
- package/src/internal/shardingConfig.ts +0 -97
- package/src/internal/storage.ts +0 -60
- package/src/internal/utils.ts +0 -54
package/src/ShardManager.ts
CHANGED
@@ -1,53 +1,804 @@
|
|
1
1
|
/**
|
2
2
|
* @since 1.0.0
|
3
3
|
*/
|
4
|
-
import
|
5
|
-
import
|
6
|
-
import
|
7
|
-
import
|
8
|
-
import * as
|
9
|
-
import
|
10
|
-
import
|
11
|
-
import type
|
12
|
-
import
|
4
|
+
import * as Rpc from "@effect/rpc/Rpc"
|
5
|
+
import * as RpcClient from "@effect/rpc/RpcClient"
|
6
|
+
import * as RpcGroup from "@effect/rpc/RpcGroup"
|
7
|
+
import * as RpcServer from "@effect/rpc/RpcServer"
|
8
|
+
import * as Arr from "effect/Array"
|
9
|
+
import * as Clock from "effect/Clock"
|
10
|
+
import * as Config_ from "effect/Config"
|
11
|
+
import type { ConfigError } from "effect/ConfigError"
|
12
|
+
import * as ConfigProvider from "effect/ConfigProvider"
|
13
|
+
import * as Context from "effect/Context"
|
14
|
+
import * as Data from "effect/Data"
|
15
|
+
import * as Deferred from "effect/Deferred"
|
16
|
+
import * as Duration from "effect/Duration"
|
17
|
+
import * as Effect from "effect/Effect"
|
18
|
+
import * as Equal from "effect/Equal"
|
19
|
+
import * as FiberSet from "effect/FiberSet"
|
20
|
+
import { identity } from "effect/Function"
|
21
|
+
import * as Iterable from "effect/Iterable"
|
22
|
+
import * as Layer from "effect/Layer"
|
23
|
+
import * as Mailbox from "effect/Mailbox"
|
24
|
+
import * as Metric from "effect/Metric"
|
25
|
+
import * as MutableHashMap from "effect/MutableHashMap"
|
26
|
+
import * as MutableHashSet from "effect/MutableHashSet"
|
27
|
+
import * as Option from "effect/Option"
|
28
|
+
import * as PubSub from "effect/PubSub"
|
29
|
+
import * as Queue from "effect/Queue"
|
30
|
+
import * as Schedule from "effect/Schedule"
|
31
|
+
import * as Schema from "effect/Schema"
|
32
|
+
import type { Scope } from "effect/Scope"
|
33
|
+
import { RunnerNotRegistered } from "./ClusterError.js"
|
34
|
+
import * as ClusterMetrics from "./ClusterMetrics.js"
|
35
|
+
import {
|
36
|
+
decideAssignmentsForUnassignedShards,
|
37
|
+
decideAssignmentsForUnbalancedShards,
|
38
|
+
RunnerWithMetadata,
|
39
|
+
State
|
40
|
+
} from "./internal/shardManager.js"
|
41
|
+
import * as MachineId from "./MachineId.js"
|
42
|
+
import { Runner } from "./Runner.js"
|
43
|
+
import { RunnerAddress } from "./RunnerAddress.js"
|
44
|
+
import { RunnerHealth } from "./RunnerHealth.js"
|
45
|
+
import { RpcClientProtocol, Runners } from "./Runners.js"
|
46
|
+
import { ShardId } from "./ShardId.js"
|
47
|
+
import { ShardingConfig } from "./ShardingConfig.js"
|
48
|
+
import { ShardStorage } from "./ShardStorage.js"
|
13
49
|
|
14
50
|
/**
|
15
51
|
* @since 1.0.0
|
16
|
-
* @category
|
52
|
+
* @category models
|
53
|
+
*/
|
54
|
+
export class ShardManager extends Context.Tag("@effect/cluster/ShardManager")<ShardManager, {
|
55
|
+
/**
|
56
|
+
* Get all shard assignments.
|
57
|
+
*/
|
58
|
+
readonly getAssignments: Effect.Effect<
|
59
|
+
ReadonlyMap<ShardId, Option.Option<RunnerAddress>>
|
60
|
+
>
|
61
|
+
/**
|
62
|
+
* Get a stream of sharding events emit by the shard manager.
|
63
|
+
*/
|
64
|
+
readonly shardingEvents: Effect.Effect<Queue.Dequeue<ShardingEvent>, never, Scope>
|
65
|
+
/**
|
66
|
+
* Register a new runner with the cluster.
|
67
|
+
*/
|
68
|
+
readonly register: (runner: Runner) => Effect.Effect<MachineId.MachineId>
|
69
|
+
/**
|
70
|
+
* Unregister a runner from the cluster.
|
71
|
+
*/
|
72
|
+
readonly unregister: (address: RunnerAddress) => Effect.Effect<void>
|
73
|
+
/**
|
74
|
+
* Rebalance shards assigned to runners within the cluster.
|
75
|
+
*/
|
76
|
+
readonly rebalance: (immediate: boolean) => Effect.Effect<void>
|
77
|
+
/**
|
78
|
+
* Notify the cluster of an unhealthy runner.
|
79
|
+
*/
|
80
|
+
readonly notifyUnhealthyRunner: (address: RunnerAddress) => Effect.Effect<void>
|
81
|
+
/**
|
82
|
+
* Check and repot on the health of all runners in the cluster.
|
83
|
+
*/
|
84
|
+
readonly checkRunnerHealth: Effect.Effect<void>
|
85
|
+
}>() {}
|
86
|
+
|
87
|
+
/**
|
88
|
+
* @since 1.0.0
|
89
|
+
* @category Config
|
90
|
+
*/
|
91
|
+
export class Config extends Context.Tag("@effect/cluster/ShardManager/Config")<Config, {
|
92
|
+
/**
|
93
|
+
* The duration to wait before rebalancing shards after a change.
|
94
|
+
*/
|
95
|
+
readonly rebalanceDebounce: Duration.DurationInput
|
96
|
+
/**
|
97
|
+
* The interval on which regular rebalancing of shards will occur.
|
98
|
+
*/
|
99
|
+
readonly rebalanceInterval: Duration.DurationInput
|
100
|
+
/**
|
101
|
+
* The interval on which rebalancing of shards which failed to be
|
102
|
+
* rebalanced will be retried.
|
103
|
+
*/
|
104
|
+
readonly rebalanceRetryInterval: Duration.DurationInput
|
105
|
+
/**
|
106
|
+
* The maximum ratio of shards to rebalance at once.
|
107
|
+
*
|
108
|
+
* **Note**: this value should be a number between `0` and `1`.
|
109
|
+
*/
|
110
|
+
readonly rebalanceRate: number
|
111
|
+
/**
|
112
|
+
* The interval on which persistence of Runners will be retried if it fails.
|
113
|
+
*/
|
114
|
+
readonly persistRetryInterval: Duration.DurationInput
|
115
|
+
/**
|
116
|
+
* The number of times persistence of Runners will be retried if it fails.
|
117
|
+
*/
|
118
|
+
readonly persistRetryCount: number
|
119
|
+
/**
|
120
|
+
* The interval on which Runner health will be checked.
|
121
|
+
*/
|
122
|
+
readonly runnerHealthCheckInterval: Duration.DurationInput
|
123
|
+
/**
|
124
|
+
* The length of time to wait for a Runner to respond to a ping.
|
125
|
+
*/
|
126
|
+
readonly runnerPingTimeout: Duration.DurationInput
|
127
|
+
}>() {
|
128
|
+
/**
|
129
|
+
* @since 1.0.0
|
130
|
+
*/
|
131
|
+
static readonly defaults: Config["Type"] = {
|
132
|
+
rebalanceDebounce: Duration.millis(500),
|
133
|
+
rebalanceInterval: Duration.seconds(20),
|
134
|
+
rebalanceRetryInterval: Duration.seconds(10),
|
135
|
+
rebalanceRate: 2 / 100,
|
136
|
+
persistRetryCount: 100,
|
137
|
+
persistRetryInterval: Duration.seconds(3),
|
138
|
+
runnerHealthCheckInterval: Duration.minutes(1),
|
139
|
+
runnerPingTimeout: Duration.seconds(3)
|
140
|
+
}
|
141
|
+
}
|
142
|
+
|
143
|
+
/**
|
144
|
+
* @since 1.0.0
|
145
|
+
* @category Config
|
146
|
+
*/
|
147
|
+
export const configConfig: Config_.Config<Config["Type"]> = Config_.all({
|
148
|
+
rebalanceDebounce: Config_.duration("rebalanceDebounce").pipe(
|
149
|
+
Config_.withDefault(Config.defaults.rebalanceDebounce),
|
150
|
+
Config_.withDescription("The duration to wait before rebalancing shards after a change.")
|
151
|
+
),
|
152
|
+
rebalanceInterval: Config_.duration("rebalanceInterval").pipe(
|
153
|
+
Config_.withDefault(Config.defaults.rebalanceInterval),
|
154
|
+
Config_.withDescription("The interval on which regular rebalancing of shards will occur.")
|
155
|
+
),
|
156
|
+
rebalanceRetryInterval: Config_.duration("rebalanceRetryInterval").pipe(
|
157
|
+
Config_.withDefault(Config.defaults.rebalanceRetryInterval),
|
158
|
+
Config_.withDescription(
|
159
|
+
"The interval on which rebalancing of shards which failed to be rebalanced will be retried."
|
160
|
+
)
|
161
|
+
),
|
162
|
+
rebalanceRate: Config_.number("rebalanceRate").pipe(
|
163
|
+
Config_.withDefault(Config.defaults.rebalanceRate),
|
164
|
+
Config_.withDescription("The maximum ratio of shards to rebalance at once.")
|
165
|
+
),
|
166
|
+
persistRetryCount: Config_.integer("persistRetryCount").pipe(
|
167
|
+
Config_.withDefault(Config.defaults.persistRetryCount),
|
168
|
+
Config_.withDescription("The number of times persistence of runners will be retried if it fails.")
|
169
|
+
),
|
170
|
+
persistRetryInterval: Config_.duration("persistRetryInterval").pipe(
|
171
|
+
Config_.withDefault(Config.defaults.persistRetryInterval),
|
172
|
+
Config_.withDescription("The interval on which persistence of runners will be retried if it fails.")
|
173
|
+
),
|
174
|
+
runnerHealthCheckInterval: Config_.duration("runnerHealthCheckInterval").pipe(
|
175
|
+
Config_.withDefault(Config.defaults.runnerHealthCheckInterval),
|
176
|
+
Config_.withDescription("The interval on which runner health will be checked.")
|
177
|
+
),
|
178
|
+
runnerPingTimeout: Config_.duration("runnerPingTimeout").pipe(
|
179
|
+
Config_.withDefault(Config.defaults.runnerPingTimeout),
|
180
|
+
Config_.withDescription("The length of time to wait for a runner to respond to a ping.")
|
181
|
+
)
|
182
|
+
})
|
183
|
+
|
184
|
+
/**
|
185
|
+
* @since 1.0.0
|
186
|
+
* @category Config
|
17
187
|
*/
|
18
|
-
export const
|
188
|
+
export const configFromEnv: Effect.Effect<Config["Type"], ConfigError> = configConfig.pipe(
|
189
|
+
Effect.withConfigProvider(
|
190
|
+
ConfigProvider.fromEnv().pipe(
|
191
|
+
ConfigProvider.constantCase
|
192
|
+
)
|
193
|
+
)
|
194
|
+
)
|
19
195
|
|
20
196
|
/**
|
21
197
|
* @since 1.0.0
|
22
|
-
* @category
|
198
|
+
* @category Config
|
23
199
|
*/
|
24
|
-
export
|
200
|
+
export const layerConfig = (config?: Partial<Config["Type"]>): Layer.Layer<Config> =>
|
201
|
+
Layer.succeed(Config, {
|
202
|
+
...Config.defaults,
|
203
|
+
...config
|
204
|
+
})
|
25
205
|
|
26
206
|
/**
|
27
207
|
* @since 1.0.0
|
28
|
-
* @category
|
208
|
+
* @category Config
|
29
209
|
*/
|
30
|
-
export const
|
210
|
+
export const layerConfigFromEnv: Layer.Layer<Config, ConfigError> = Layer.effect(Config, configFromEnv)
|
211
|
+
|
212
|
+
/**
|
213
|
+
* Represents a client which can be used to communicate with the
|
214
|
+
* `ShardManager`.
|
215
|
+
*
|
216
|
+
* @since 1.0.0
|
217
|
+
* @category Client
|
218
|
+
*/
|
219
|
+
export class ShardManagerClient
|
220
|
+
extends Context.Tag("@effect/cluster/ShardManager/ShardManagerClient")<ShardManagerClient, {
|
221
|
+
/**
|
222
|
+
* Register a new runner with the cluster.
|
223
|
+
*/
|
224
|
+
readonly register: (address: RunnerAddress) => Effect.Effect<MachineId.MachineId>
|
225
|
+
/**
|
226
|
+
* Unregister a runner from the cluster.
|
227
|
+
*/
|
228
|
+
readonly unregister: (address: RunnerAddress) => Effect.Effect<void>
|
229
|
+
/**
|
230
|
+
* Notify the cluster of an unhealthy runner.
|
231
|
+
*/
|
232
|
+
readonly notifyUnhealthyRunner: (address: RunnerAddress) => Effect.Effect<void>
|
233
|
+
/**
|
234
|
+
* Get all shard assignments.
|
235
|
+
*/
|
236
|
+
readonly getAssignments: Effect.Effect<
|
237
|
+
ReadonlyMap<ShardId, Option.Option<RunnerAddress>>
|
238
|
+
>
|
239
|
+
/**
|
240
|
+
* Get a stream of sharding events emit by the shard manager.
|
241
|
+
*/
|
242
|
+
readonly shardingEvents: Effect.Effect<Mailbox.ReadonlyMailbox<ShardingEvent>, never, Scope>
|
243
|
+
/**
|
244
|
+
* Get the current time on the shard manager.
|
245
|
+
*/
|
246
|
+
readonly getTime: Effect.Effect<number>
|
247
|
+
}>()
|
248
|
+
{}
|
31
249
|
|
32
250
|
/**
|
33
251
|
* @since 1.0.0
|
34
252
|
* @category models
|
35
253
|
*/
|
36
|
-
export
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
254
|
+
export const ShardingEventSchema = Schema.Union(
|
255
|
+
Schema.TaggedStruct("StreamStarted", {}),
|
256
|
+
Schema.TaggedStruct("ShardsAssigned", {
|
257
|
+
address: RunnerAddress,
|
258
|
+
shards: Schema.Array(ShardId)
|
259
|
+
}),
|
260
|
+
Schema.TaggedStruct("ShardsUnassigned", {
|
261
|
+
address: RunnerAddress,
|
262
|
+
shards: Schema.Array(ShardId)
|
263
|
+
}),
|
264
|
+
Schema.TaggedStruct("RunnerRegistered", {
|
265
|
+
address: RunnerAddress
|
266
|
+
}),
|
267
|
+
Schema.TaggedStruct("RunnerUnregistered", {
|
268
|
+
address: RunnerAddress
|
269
|
+
})
|
270
|
+
) satisfies Schema.Schema<ShardingEvent, any>
|
271
|
+
|
272
|
+
/**
|
273
|
+
* The messaging protocol for the `ShardManager`.
|
274
|
+
*
|
275
|
+
* @since 1.0.0
|
276
|
+
* @category Rpcs
|
277
|
+
*/
|
278
|
+
export class Rpcs extends RpcGroup.make(
|
279
|
+
Rpc.make("Register", {
|
280
|
+
payload: { runner: Runner },
|
281
|
+
success: MachineId.MachineId
|
282
|
+
}),
|
283
|
+
Rpc.make("Unregister", {
|
284
|
+
payload: { address: RunnerAddress }
|
285
|
+
}),
|
286
|
+
Rpc.make("NotifyUnhealthyRunner", {
|
287
|
+
payload: { address: RunnerAddress }
|
288
|
+
}),
|
289
|
+
Rpc.make("GetAssignments", {
|
290
|
+
success: Schema.ReadonlyMap({ key: ShardId, value: Schema.Option(RunnerAddress) })
|
291
|
+
}),
|
292
|
+
Rpc.make("ShardingEvents", {
|
293
|
+
success: ShardingEventSchema,
|
294
|
+
stream: true
|
295
|
+
}),
|
296
|
+
Rpc.make("GetTime", {
|
297
|
+
success: Schema.Number
|
298
|
+
})
|
299
|
+
) {}
|
300
|
+
|
301
|
+
/**
|
302
|
+
* @since 1.0.0
|
303
|
+
* @category models
|
304
|
+
*/
|
305
|
+
export type ShardingEvent = Data.TaggedEnum<{
|
306
|
+
StreamStarted: {}
|
307
|
+
ShardsAssigned: {
|
308
|
+
address: RunnerAddress
|
309
|
+
shards: ReadonlyArray<ShardId>
|
310
|
+
}
|
311
|
+
ShardsUnassigned: {
|
312
|
+
address: RunnerAddress
|
313
|
+
shards: ReadonlyArray<ShardId>
|
314
|
+
}
|
315
|
+
RunnerRegistered: { address: RunnerAddress }
|
316
|
+
RunnerUnregistered: { address: RunnerAddress }
|
317
|
+
}>
|
318
|
+
|
319
|
+
/**
|
320
|
+
* @since 1.0.0
|
321
|
+
* @category models
|
322
|
+
*/
|
323
|
+
export const ShardingEvent = Data.taggedEnum<ShardingEvent>()
|
324
|
+
|
325
|
+
/**
|
326
|
+
* @since 1.0.0
|
327
|
+
* @category Client
|
328
|
+
*/
|
329
|
+
export const makeClientLocal = Effect.gen(function*() {
|
330
|
+
const runnerAddress = yield* ShardingConfig
|
331
|
+
const clock = yield* Effect.clock
|
332
|
+
|
333
|
+
const shards = new Map<ShardId, Option.Option<RunnerAddress>>()
|
334
|
+
for (let n = 1; n <= runnerAddress.numberOfShards; n++) {
|
335
|
+
shards.set(ShardId.make(n), runnerAddress.runnerAddress)
|
336
|
+
}
|
337
|
+
|
338
|
+
let machineId = 0
|
339
|
+
|
340
|
+
return ShardManagerClient.of({
|
341
|
+
register: () => Effect.sync(() => MachineId.make(++machineId)),
|
342
|
+
unregister: () => Effect.void,
|
343
|
+
notifyUnhealthyRunner: () => Effect.void,
|
344
|
+
getAssignments: Effect.succeed(shards),
|
345
|
+
shardingEvents: Effect.gen(function*() {
|
346
|
+
const mailbox = yield* Mailbox.make<ShardingEvent>()
|
347
|
+
yield* mailbox.offer(ShardingEvent.StreamStarted())
|
348
|
+
return mailbox
|
349
|
+
}),
|
350
|
+
getTime: clock.currentTimeMillis
|
351
|
+
})
|
352
|
+
})
|
353
|
+
|
354
|
+
/**
|
355
|
+
* @since 1.0.0
|
356
|
+
* @category Client
|
357
|
+
*/
|
358
|
+
export const makeClientRpc: Effect.Effect<
|
359
|
+
ShardManagerClient["Type"],
|
360
|
+
never,
|
361
|
+
ShardingConfig | RpcClient.Protocol | Scope
|
362
|
+
> = Effect.gen(function*() {
|
363
|
+
const config = yield* ShardingConfig
|
364
|
+
const client = yield* RpcClient.make(Rpcs, {
|
365
|
+
spanPrefix: "ShardManagerClient",
|
366
|
+
disableTracing: true
|
367
|
+
})
|
368
|
+
|
369
|
+
return ShardManagerClient.of({
|
370
|
+
register: (address) => client.Register({ runner: Runner.make({ address, version: config.serverVersion }) }),
|
371
|
+
unregister: (address) => client.Unregister({ address }),
|
372
|
+
notifyUnhealthyRunner: (address) => client.NotifyUnhealthyRunner({ address }),
|
373
|
+
getAssignments: client.GetAssignments(),
|
374
|
+
shardingEvents: client.ShardingEvents({}, { asMailbox: true }),
|
375
|
+
getTime: client.GetTime()
|
376
|
+
})
|
377
|
+
})
|
378
|
+
|
379
|
+
/**
|
380
|
+
* @since 1.0.0
|
381
|
+
* @category Client
|
382
|
+
*/
|
383
|
+
export const layerClientLocal: Layer.Layer<
|
384
|
+
ShardManagerClient,
|
385
|
+
never,
|
386
|
+
ShardingConfig
|
387
|
+
> = Layer.effect(ShardManagerClient, makeClientLocal)
|
388
|
+
|
389
|
+
/**
|
390
|
+
* @since 1.0.0
|
391
|
+
* @category Client
|
392
|
+
*/
|
393
|
+
export const layerClientRpc: Layer.Layer<
|
394
|
+
ShardManagerClient,
|
395
|
+
never,
|
396
|
+
ShardingConfig | RpcClientProtocol
|
397
|
+
> = Layer.scoped(ShardManagerClient, makeClientRpc).pipe(
|
398
|
+
Layer.provide(Layer.scoped(
|
399
|
+
RpcClient.Protocol,
|
400
|
+
Effect.gen(function*() {
|
401
|
+
const config = yield* ShardingConfig
|
402
|
+
const clientProtocol = yield* RpcClientProtocol
|
403
|
+
return yield* clientProtocol(config.shardManagerAddress)
|
404
|
+
})
|
405
|
+
))
|
406
|
+
)
|
407
|
+
|
408
|
+
/**
|
409
|
+
* @since 1.0.0
|
410
|
+
* @category Constructors
|
411
|
+
*/
|
412
|
+
export const make = Effect.gen(function*() {
|
413
|
+
const storage = yield* ShardStorage
|
414
|
+
const runnersApi = yield* Runners
|
415
|
+
const runnerHealthApi = yield* RunnerHealth
|
416
|
+
const clock = yield* Effect.clock
|
417
|
+
const config = yield* Config
|
418
|
+
const shardingConfig = yield* ShardingConfig
|
419
|
+
|
420
|
+
const state = yield* Effect.orDie(State.fromStorage(shardingConfig.numberOfShards))
|
421
|
+
const scope = yield* Effect.scope
|
422
|
+
const events = yield* PubSub.unbounded<ShardingEvent>()
|
423
|
+
|
424
|
+
yield* Metric.incrementBy(ClusterMetrics.runners, MutableHashMap.size(state.runners))
|
425
|
+
|
426
|
+
for (const address of state.shards.values()) {
|
427
|
+
const metric = Option.isSome(address) ?
|
428
|
+
Metric.tagged(ClusterMetrics.assignedShards, "address", address.toString()) :
|
429
|
+
ClusterMetrics.unassignedShards
|
430
|
+
yield* Metric.increment(metric)
|
431
|
+
}
|
432
|
+
|
433
|
+
function withRetry<A, E, R>(effect: Effect.Effect<A, E, R>): Effect.Effect<void, never, R> {
|
434
|
+
return effect.pipe(
|
435
|
+
Effect.retry({
|
436
|
+
schedule: Schedule.spaced(config.persistRetryCount),
|
437
|
+
times: config.persistRetryCount
|
438
|
+
}),
|
439
|
+
Effect.ignore
|
440
|
+
)
|
441
|
+
}
|
442
|
+
|
443
|
+
const persistRunners = Effect.unsafeMakeSemaphore(1).withPermits(1)(withRetry(
|
444
|
+
Effect.suspend(() =>
|
445
|
+
storage.saveRunners(
|
446
|
+
Iterable.map(state.runners, ([address, runner]) => [address, runner.runner])
|
447
|
+
)
|
448
|
+
)
|
449
|
+
))
|
450
|
+
|
451
|
+
const persistAssignments = Effect.unsafeMakeSemaphore(1).withPermits(1)(withRetry(
|
452
|
+
Effect.suspend(() => storage.saveAssignments(state.shards))
|
453
|
+
))
|
454
|
+
|
455
|
+
const notifyUnhealthyRunner = Effect.fnUntraced(function*(address: RunnerAddress) {
|
456
|
+
if (!MutableHashMap.has(state.runners, address)) return
|
457
|
+
|
458
|
+
yield* Metric.increment(
|
459
|
+
Metric.tagged(ClusterMetrics.runnerHealthChecked, "runner_address", address.toString())
|
460
|
+
)
|
461
|
+
|
462
|
+
if (!(yield* runnerHealthApi.isAlive(address))) {
|
463
|
+
yield* Effect.logWarning(`Runner at address '${address.toString()}' is not alive`)
|
464
|
+
yield* unregister(address)
|
465
|
+
}
|
466
|
+
})
|
467
|
+
|
468
|
+
function updateShardsState(
|
469
|
+
shards: Iterable<ShardId>,
|
470
|
+
address: Option.Option<RunnerAddress>
|
471
|
+
): Effect.Effect<void, RunnerNotRegistered> {
|
472
|
+
return Effect.suspend(() => {
|
473
|
+
if (Option.isSome(address) && !MutableHashMap.has(state.runners, address.value)) {
|
474
|
+
return Effect.fail(new RunnerNotRegistered({ address: address.value }))
|
475
|
+
}
|
476
|
+
for (const shardId of shards) {
|
477
|
+
if (!state.shards.has(shardId)) continue
|
478
|
+
state.shards.set(shardId, address)
|
479
|
+
}
|
480
|
+
return Effect.void
|
481
|
+
})
|
482
|
+
}
|
483
|
+
|
484
|
+
const getAssignments = Effect.sync(() => state.shards)
|
485
|
+
|
486
|
+
let machineId = 0
|
487
|
+
const register = Effect.fnUntraced(function*(runner: Runner) {
|
488
|
+
yield* Effect.logInfo(`Registering runner ${Runner.pretty(runner)}`)
|
489
|
+
const now = clock.unsafeCurrentTimeMillis()
|
490
|
+
MutableHashMap.set(state.runners, runner.address, RunnerWithMetadata({ runner, registeredAt: now }))
|
491
|
+
|
492
|
+
yield* Metric.increment(ClusterMetrics.runners)
|
493
|
+
yield* PubSub.publish(events, ShardingEvent.RunnerRegistered({ address: runner.address }))
|
494
|
+
if (state.unassignedShards.length > 0) {
|
495
|
+
yield* rebalance(false)
|
496
|
+
}
|
497
|
+
yield* Effect.forkIn(persistRunners, scope)
|
498
|
+
return MachineId.make(++machineId)
|
499
|
+
})
|
500
|
+
|
501
|
+
const unregister = Effect.fnUntraced(function*(address: RunnerAddress) {
|
502
|
+
if (!MutableHashMap.has(state.runners, address)) return
|
503
|
+
|
504
|
+
yield* Effect.logInfo("Unregistering runner at address:", address)
|
505
|
+
const unassignments = Arr.empty<ShardId>()
|
506
|
+
for (const [shard, runner] of state.shards) {
|
507
|
+
if (Option.isSome(runner) && Equal.equals(runner.value, address)) {
|
508
|
+
unassignments.push(shard)
|
509
|
+
state.shards.set(shard, Option.none())
|
510
|
+
}
|
511
|
+
}
|
512
|
+
|
513
|
+
MutableHashMap.remove(state.runners, address)
|
514
|
+
yield* Metric.incrementBy(ClusterMetrics.runners, -1)
|
515
|
+
|
516
|
+
if (unassignments.length > 0) {
|
517
|
+
yield* Metric.incrementBy(
|
518
|
+
Metric.tagged(ClusterMetrics.unassignedShards, "runner_address", address.toString()),
|
519
|
+
unassignments.length
|
520
|
+
)
|
521
|
+
yield* PubSub.publish(events, ShardingEvent.RunnerUnregistered({ address }))
|
522
|
+
}
|
523
|
+
|
524
|
+
yield* Effect.forkIn(persistRunners, scope)
|
525
|
+
yield* Effect.forkIn(rebalance(true), scope)
|
526
|
+
})
|
527
|
+
|
528
|
+
let rebalancing = false
|
529
|
+
let nextRebalanceImmediate = false
|
530
|
+
let rebalanceDeferred: Deferred.Deferred<void> | undefined
|
531
|
+
const rebalanceFibers = yield* FiberSet.make()
|
532
|
+
|
533
|
+
const rebalance = (immmediate: boolean): Effect.Effect<void> =>
|
534
|
+
Effect.withFiberRuntime<void>((fiber) => {
|
535
|
+
if (!rebalancing) {
|
536
|
+
rebalancing = true
|
537
|
+
return rebalanceLoop(immmediate)
|
538
|
+
}
|
539
|
+
if (immmediate) {
|
540
|
+
nextRebalanceImmediate = true
|
541
|
+
}
|
542
|
+
if (!rebalanceDeferred) {
|
543
|
+
rebalanceDeferred = Deferred.unsafeMake(fiber.id())
|
544
|
+
}
|
545
|
+
return Deferred.await(rebalanceDeferred)
|
546
|
+
})
|
547
|
+
|
548
|
+
const rebalanceLoop = (immediate?: boolean): Effect.Effect<void> =>
|
549
|
+
Effect.suspend(() => {
|
550
|
+
const deferred = rebalanceDeferred
|
551
|
+
rebalanceDeferred = undefined
|
552
|
+
if (!immediate) {
|
553
|
+
immediate = nextRebalanceImmediate
|
554
|
+
nextRebalanceImmediate = false
|
555
|
+
}
|
556
|
+
return runRebalance(immediate).pipe(
|
557
|
+
deferred ? Effect.intoDeferred(deferred) : identity,
|
558
|
+
Effect.onExit(() => {
|
559
|
+
if (!rebalanceDeferred) {
|
560
|
+
rebalancing = false
|
561
|
+
return Effect.void
|
562
|
+
}
|
563
|
+
return Effect.forkIn(rebalanceLoop(), scope)
|
564
|
+
})
|
565
|
+
)
|
566
|
+
})
|
567
|
+
|
568
|
+
const runRebalance = Effect.fn("ShardManager.rebalance")(function*(immediate: boolean) {
|
569
|
+
yield* Effect.annotateCurrentSpan("immmediate", immediate)
|
570
|
+
|
571
|
+
yield* Effect.sleep(config.rebalanceDebounce)
|
572
|
+
|
573
|
+
// Determine which shards to assign and unassign
|
574
|
+
const [assignments, unassignments, changes] = immediate || (state.unassignedShards.length > 0)
|
575
|
+
? decideAssignmentsForUnassignedShards(state)
|
576
|
+
: decideAssignmentsForUnbalancedShards(state, config.rebalanceRate)
|
577
|
+
|
578
|
+
yield* Effect.logDebug(`Rebalancing shards (immediate = ${immediate})`)
|
579
|
+
|
580
|
+
if (MutableHashSet.size(changes) === 0) return
|
581
|
+
|
582
|
+
yield* Metric.increment(ClusterMetrics.rebalances)
|
583
|
+
|
584
|
+
// Ping runners first and remove unhealthy ones
|
585
|
+
const failedRunners = MutableHashSet.empty<RunnerAddress>()
|
586
|
+
for (const address of changes) {
|
587
|
+
yield* FiberSet.run(
|
588
|
+
rebalanceFibers,
|
589
|
+
runnersApi.ping(address).pipe(
|
590
|
+
Effect.timeout(config.runnerPingTimeout),
|
591
|
+
Effect.catchAll(() => {
|
592
|
+
MutableHashSet.add(failedRunners, address)
|
593
|
+
MutableHashMap.remove(assignments, address)
|
594
|
+
MutableHashMap.remove(unassignments, address)
|
595
|
+
return Effect.void
|
596
|
+
})
|
597
|
+
)
|
598
|
+
)
|
599
|
+
}
|
600
|
+
yield* FiberSet.awaitEmpty(rebalanceFibers)
|
601
|
+
|
602
|
+
const failedUnassignments = new Set<ShardId>()
|
603
|
+
for (const [address, shards] of unassignments) {
|
604
|
+
yield* FiberSet.run(
|
605
|
+
rebalanceFibers,
|
606
|
+
updateShardsState(shards, Option.none()).pipe(
|
607
|
+
Effect.matchEffect({
|
608
|
+
onFailure: () => {
|
609
|
+
MutableHashSet.add(failedRunners, address)
|
610
|
+
for (const shard of shards) {
|
611
|
+
failedUnassignments.add(shard)
|
612
|
+
}
|
613
|
+
// Remove failed runners from the assignments
|
614
|
+
MutableHashMap.remove(assignments, address)
|
615
|
+
return Effect.void
|
616
|
+
},
|
617
|
+
onSuccess: () => {
|
618
|
+
const shardCount = shards.size
|
619
|
+
return Metric.incrementBy(
|
620
|
+
Metric.tagged(ClusterMetrics.assignedShards, "runner_address", address.toString()),
|
621
|
+
-shardCount
|
622
|
+
).pipe(
|
623
|
+
Effect.zipRight(Metric.incrementBy(ClusterMetrics.unassignedShards, shardCount)),
|
624
|
+
Effect.zipRight(
|
625
|
+
PubSub.publish(events, ShardingEvent.ShardsUnassigned({ address, shards: Array.from(shards) }))
|
626
|
+
)
|
627
|
+
)
|
628
|
+
}
|
629
|
+
})
|
630
|
+
)
|
631
|
+
)
|
632
|
+
}
|
633
|
+
yield* FiberSet.awaitEmpty(rebalanceFibers)
|
634
|
+
|
635
|
+
// Remove failed shard unassignments from the assignments
|
636
|
+
MutableHashMap.forEach(assignments, (shards, address) => {
|
637
|
+
for (const shard of failedUnassignments) {
|
638
|
+
shards.delete(shard)
|
639
|
+
}
|
640
|
+
if (shards.size === 0) {
|
641
|
+
MutableHashMap.remove(assignments, address)
|
642
|
+
}
|
643
|
+
})
|
644
|
+
|
645
|
+
// Perform the assignments
|
646
|
+
for (const [address, shards] of assignments) {
|
647
|
+
yield* FiberSet.run(
|
648
|
+
rebalanceFibers,
|
649
|
+
updateShardsState(shards, Option.some(address)).pipe(
|
650
|
+
Effect.matchEffect({
|
651
|
+
onFailure: () => {
|
652
|
+
MutableHashSet.add(failedRunners, address)
|
653
|
+
return Effect.void
|
654
|
+
},
|
655
|
+
onSuccess: () => {
|
656
|
+
const shardCount = shards.size
|
657
|
+
return Metric.incrementBy(
|
658
|
+
Metric.tagged(ClusterMetrics.assignedShards, "runner_address", address.toString()),
|
659
|
+
-shardCount
|
660
|
+
).pipe(
|
661
|
+
Effect.zipRight(Metric.incrementBy(ClusterMetrics.unassignedShards, -shardCount)),
|
662
|
+
Effect.zipRight(
|
663
|
+
PubSub.publish(events, ShardingEvent.ShardsAssigned({ address, shards: Array.from(shards) }))
|
664
|
+
)
|
665
|
+
)
|
666
|
+
}
|
667
|
+
})
|
668
|
+
)
|
669
|
+
)
|
670
|
+
}
|
671
|
+
yield* FiberSet.awaitEmpty(rebalanceFibers)
|
672
|
+
|
673
|
+
const wereFailures = MutableHashSet.size(failedRunners) > 0
|
674
|
+
if (wereFailures) {
|
675
|
+
// Check if the failing runners are still reachable
|
676
|
+
yield* Effect.forEach(failedRunners, notifyUnhealthyRunner, { discard: true }).pipe(
|
677
|
+
Effect.forkIn(scope)
|
678
|
+
)
|
679
|
+
yield* Effect.logWarning("Failed to rebalance runners: ", failedRunners)
|
680
|
+
}
|
681
|
+
|
682
|
+
if (wereFailures && immediate) {
|
683
|
+
// Try rebalancing again later if there were any failures
|
684
|
+
yield* Clock.sleep(config.rebalanceRetryInterval).pipe(
|
685
|
+
Effect.zipRight(rebalance(immediate)),
|
686
|
+
Effect.forkIn(scope)
|
687
|
+
)
|
688
|
+
}
|
689
|
+
|
690
|
+
yield* persistAssignments
|
691
|
+
})
|
692
|
+
|
693
|
+
const checkRunnerHealth: Effect.Effect<void> = Effect.suspend(() =>
|
694
|
+
Effect.forEach(MutableHashMap.keys(state.runners), notifyUnhealthyRunner, {
|
695
|
+
concurrency: "inherit",
|
696
|
+
discard: true
|
697
|
+
})
|
698
|
+
).pipe(
|
699
|
+
Effect.withConcurrency(4),
|
700
|
+
Effect.asVoid
|
701
|
+
)
|
702
|
+
|
703
|
+
yield* Effect.addFinalizer(() =>
|
704
|
+
persistAssignments.pipe(
|
705
|
+
Effect.catchAllCause((cause) => Effect.logWarning("Failed to persist assignments on shutdown", cause)),
|
706
|
+
Effect.zipRight(persistRunners.pipe(
|
707
|
+
Effect.catchAllCause((cause) => Effect.logWarning("Failed to persist runners on shutdown", cause))
|
708
|
+
))
|
709
|
+
)
|
710
|
+
)
|
711
|
+
|
712
|
+
yield* Effect.forkIn(persistRunners, scope)
|
713
|
+
|
714
|
+
// Rebalance immediately if there are unassigned shards
|
715
|
+
yield* Effect.forkIn(
|
716
|
+
rebalance(state.unassignedShards.length > 0),
|
717
|
+
scope
|
718
|
+
)
|
719
|
+
|
720
|
+
// Start a regular cluster rebalance at the configured interval
|
721
|
+
yield* rebalance(false).pipe(
|
722
|
+
Effect.andThen(Effect.sleep(config.rebalanceInterval)),
|
723
|
+
Effect.forever,
|
724
|
+
Effect.forkIn(scope)
|
725
|
+
)
|
726
|
+
|
727
|
+
yield* checkRunnerHealth.pipe(
|
728
|
+
Effect.andThen(Effect.sleep(config.runnerHealthCheckInterval)),
|
729
|
+
Effect.forever,
|
730
|
+
Effect.forkIn(scope)
|
731
|
+
)
|
732
|
+
|
733
|
+
yield* Effect.gen(function*() {
|
734
|
+
const queue = yield* PubSub.subscribe(events)
|
735
|
+
while (true) {
|
736
|
+
yield* Effect.logInfo("Shard manager event:", yield* Queue.take(queue))
|
737
|
+
}
|
738
|
+
}).pipe(Effect.forkIn(scope))
|
739
|
+
|
740
|
+
yield* Effect.logInfo("Shard manager initialized")
|
741
|
+
|
742
|
+
return ShardManager.of({
|
743
|
+
getAssignments,
|
744
|
+
shardingEvents: PubSub.subscribe(events),
|
745
|
+
register,
|
746
|
+
unregister,
|
747
|
+
rebalance,
|
748
|
+
notifyUnhealthyRunner,
|
749
|
+
checkRunnerHealth
|
750
|
+
})
|
751
|
+
})
|
752
|
+
|
753
|
+
/**
|
754
|
+
* @since 1.0.0
|
755
|
+
* @category layer
|
756
|
+
*/
|
757
|
+
export const layer: Layer.Layer<
|
758
|
+
ShardManager,
|
759
|
+
never,
|
760
|
+
ShardStorage | RunnerHealth | Runners | Config | ShardingConfig
|
761
|
+
> = Layer.scoped(ShardManager, make)
|
762
|
+
|
763
|
+
/**
|
764
|
+
* @since 1.0.0
|
765
|
+
* @category Server
|
766
|
+
*/
|
767
|
+
export const layerServerHandlers = Rpcs.toLayer(Effect.gen(function*() {
|
768
|
+
const shardManager = yield* ShardManager
|
769
|
+
const clock = yield* Effect.clock
|
770
|
+
return {
|
771
|
+
Register: ({ runner }) => shardManager.register(runner),
|
772
|
+
Unregister: ({ address }) => shardManager.unregister(address),
|
773
|
+
NotifyUnhealthyRunner: ({ address }) => shardManager.notifyUnhealthyRunner(address),
|
774
|
+
GetAssignments: () => shardManager.getAssignments,
|
775
|
+
ShardingEvents: Effect.fnUntraced(function*() {
|
776
|
+
const queue = yield* shardManager.shardingEvents
|
777
|
+
const mailbox = yield* Mailbox.make<ShardingEvent>()
|
778
|
+
|
779
|
+
yield* mailbox.offer(ShardingEvent.StreamStarted())
|
780
|
+
|
781
|
+
yield* Queue.takeBetween(queue, 1, Number.MAX_SAFE_INTEGER).pipe(
|
782
|
+
Effect.flatMap((events) => mailbox.offerAll(events)),
|
783
|
+
Effect.forever,
|
784
|
+
Effect.forkScoped
|
785
|
+
)
|
786
|
+
|
787
|
+
return mailbox
|
788
|
+
}),
|
789
|
+
GetTime: () => clock.currentTimeMillis
|
790
|
+
}
|
791
|
+
}))
|
48
792
|
|
49
793
|
/**
|
50
794
|
* @since 1.0.0
|
51
|
-
* @category
|
795
|
+
* @category Server
|
52
796
|
*/
|
53
|
-
export const
|
797
|
+
export const layerServer: Layer.Layer<
|
798
|
+
never,
|
799
|
+
never,
|
800
|
+
ShardManager | RpcServer.Protocol
|
801
|
+
> = RpcServer.layer(Rpcs, {
|
802
|
+
spanPrefix: "ShardManager",
|
803
|
+
disableTracing: true
|
804
|
+
}).pipe(Layer.provide(layerServerHandlers))
|