@effect/cluster 0.28.3 → 0.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ClusterError/package.json +6 -0
- package/ClusterMetrics/package.json +6 -0
- package/ClusterSchema/package.json +6 -0
- package/DeliverAt/package.json +6 -0
- package/Entity/package.json +6 -0
- package/EntityAddress/package.json +6 -0
- package/EntityId/package.json +6 -0
- package/EntityType/package.json +6 -0
- package/Envelope/package.json +6 -0
- package/HttpCommon/package.json +6 -0
- package/HttpRunner/package.json +6 -0
- package/HttpShardManager/package.json +6 -0
- package/MachineId/package.json +6 -0
- package/MessageStorage/package.json +6 -0
- package/README.md +2 -2
- package/Reply/package.json +6 -0
- package/Runner/package.json +6 -0
- package/RunnerAddress/package.json +6 -0
- package/RunnerHealth/package.json +6 -0
- package/RunnerServer/package.json +6 -0
- package/Runners/package.json +6 -0
- package/ShardStorage/package.json +6 -0
- package/Singleton/package.json +6 -0
- package/SingletonAddress/package.json +6 -0
- package/Snowflake/package.json +6 -0
- package/SocketRunner/package.json +6 -0
- package/SocketShardManager/package.json +6 -0
- package/SqlMessageStorage/package.json +6 -0
- package/SqlShardStorage/package.json +6 -0
- package/SynchronizedClock/package.json +6 -0
- package/dist/cjs/ClusterError.js +180 -0
- package/dist/cjs/ClusterError.js.map +1 -0
- package/dist/cjs/ClusterMetrics.js +63 -0
- package/dist/cjs/ClusterMetrics.js.map +1 -0
- package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
- package/dist/cjs/ClusterSchema.js.map +1 -0
- package/dist/cjs/DeliverAt.js +30 -0
- package/dist/cjs/DeliverAt.js.map +1 -0
- package/dist/cjs/Entity.js +187 -0
- package/dist/cjs/Entity.js.map +1 -0
- package/dist/cjs/EntityAddress.js +54 -0
- package/dist/cjs/EntityAddress.js.map +1 -0
- package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
- package/dist/cjs/EntityId.js.map +1 -0
- package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
- package/dist/cjs/EntityType.js.map +1 -0
- package/dist/cjs/Envelope.js +168 -0
- package/dist/cjs/Envelope.js.map +1 -0
- package/dist/cjs/HttpCommon.js +49 -0
- package/dist/cjs/HttpCommon.js.map +1 -0
- package/dist/cjs/HttpRunner.js +108 -0
- package/dist/cjs/HttpRunner.js.map +1 -0
- package/dist/cjs/HttpShardManager.js +140 -0
- package/dist/cjs/HttpShardManager.js.map +1 -0
- package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
- package/dist/cjs/MachineId.js.map +1 -0
- package/dist/cjs/Message.js +99 -18
- package/dist/cjs/Message.js.map +1 -1
- package/dist/cjs/MessageStorage.js +356 -0
- package/dist/cjs/MessageStorage.js.map +1 -0
- package/dist/cjs/Reply.js +200 -0
- package/dist/cjs/Reply.js.map +1 -0
- package/dist/cjs/Runner.js +79 -0
- package/dist/cjs/Runner.js.map +1 -0
- package/dist/cjs/RunnerAddress.js +63 -0
- package/dist/cjs/RunnerAddress.js.map +1 -0
- package/dist/cjs/RunnerHealth.js +68 -0
- package/dist/cjs/RunnerHealth.js.map +1 -0
- package/dist/cjs/RunnerServer.js +125 -0
- package/dist/cjs/RunnerServer.js.map +1 -0
- package/dist/cjs/Runners.js +344 -0
- package/dist/cjs/Runners.js.map +1 -0
- package/dist/cjs/ShardId.js +7 -46
- package/dist/cjs/ShardId.js.map +1 -1
- package/dist/cjs/ShardManager.js +493 -8
- package/dist/cjs/ShardManager.js.map +1 -1
- package/dist/cjs/ShardStorage.js +139 -0
- package/dist/cjs/ShardStorage.js.map +1 -0
- package/dist/cjs/Sharding.js +732 -88
- package/dist/cjs/Sharding.js.map +1 -1
- package/dist/cjs/ShardingConfig.js +85 -18
- package/dist/cjs/ShardingConfig.js.map +1 -1
- package/dist/cjs/ShardingRegistrationEvent.js +26 -32
- package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
- package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
- package/dist/cjs/Singleton.js.map +1 -0
- package/dist/cjs/SingletonAddress.js +50 -0
- package/dist/cjs/SingletonAddress.js.map +1 -0
- package/dist/cjs/Snowflake.js +133 -0
- package/dist/cjs/Snowflake.js.map +1 -0
- package/dist/cjs/SocketRunner.js +40 -0
- package/dist/cjs/SocketRunner.js.map +1 -0
- package/dist/cjs/SocketShardManager.js +33 -0
- package/dist/cjs/SocketShardManager.js.map +1 -0
- package/dist/cjs/SqlMessageStorage.js +668 -0
- package/dist/cjs/SqlMessageStorage.js.map +1 -0
- package/dist/cjs/SqlShardStorage.js +228 -0
- package/dist/cjs/SqlShardStorage.js.map +1 -0
- package/dist/cjs/SynchronizedClock.js +66 -0
- package/dist/cjs/SynchronizedClock.js.map +1 -0
- package/dist/cjs/index.js +57 -45
- package/dist/cjs/internal/entityManager.js +311 -143
- package/dist/cjs/internal/entityManager.js.map +1 -1
- package/dist/cjs/internal/entityReaper.js +47 -0
- package/dist/cjs/internal/entityReaper.js.map +1 -0
- package/dist/cjs/internal/hash.js +20 -0
- package/dist/cjs/internal/hash.js.map +1 -0
- package/dist/cjs/internal/interruptors.js +9 -0
- package/dist/cjs/internal/interruptors.js.map +1 -0
- package/dist/cjs/internal/resourceMap.js +88 -0
- package/dist/cjs/internal/resourceMap.js.map +1 -0
- package/dist/cjs/internal/resourceRef.js +92 -0
- package/dist/cjs/internal/resourceRef.js.map +1 -0
- package/dist/cjs/internal/shardManager.js +219 -235
- package/dist/cjs/internal/shardManager.js.map +1 -1
- package/dist/dts/ClusterError.d.ts +169 -0
- package/dist/dts/ClusterError.d.ts.map +1 -0
- package/dist/dts/ClusterMetrics.d.ts +50 -0
- package/dist/dts/ClusterMetrics.d.ts.map +1 -0
- package/dist/dts/ClusterSchema.d.ts +13 -0
- package/dist/dts/ClusterSchema.d.ts.map +1 -0
- package/dist/dts/DeliverAt.d.ts +27 -0
- package/dist/dts/DeliverAt.d.ts.map +1 -0
- package/dist/dts/Entity.d.ts +180 -0
- package/dist/dts/Entity.d.ts.map +1 -0
- package/dist/dts/EntityAddress.d.ts +55 -0
- package/dist/dts/EntityAddress.d.ts.map +1 -0
- package/dist/dts/EntityId.d.ts +15 -0
- package/dist/dts/EntityId.d.ts.map +1 -0
- package/dist/dts/EntityType.d.ts +15 -0
- package/dist/dts/EntityType.d.ts.map +1 -0
- package/dist/dts/Envelope.d.ts +252 -0
- package/dist/dts/Envelope.d.ts.map +1 -0
- package/dist/dts/HttpCommon.d.ts +25 -0
- package/dist/dts/HttpCommon.d.ts.map +1 -0
- package/dist/dts/HttpRunner.d.ts +76 -0
- package/dist/dts/HttpRunner.d.ts.map +1 -0
- package/dist/dts/HttpShardManager.d.ts +119 -0
- package/dist/dts/HttpShardManager.d.ts.map +1 -0
- package/dist/dts/MachineId.d.ts +20 -0
- package/dist/dts/MachineId.d.ts.map +1 -0
- package/dist/dts/Message.d.ts +91 -74
- package/dist/dts/Message.d.ts.map +1 -1
- package/dist/dts/MessageStorage.d.ts +336 -0
- package/dist/dts/MessageStorage.d.ts.map +1 -0
- package/dist/dts/Reply.d.ts +171 -0
- package/dist/dts/Reply.d.ts.map +1 -0
- package/dist/dts/Runner.d.ts +81 -0
- package/dist/dts/Runner.d.ts.map +1 -0
- package/dist/dts/RunnerAddress.d.ts +56 -0
- package/dist/dts/RunnerAddress.d.ts.map +1 -0
- package/dist/dts/RunnerHealth.d.ts +54 -0
- package/dist/dts/RunnerHealth.d.ts.map +1 -0
- package/dist/dts/RunnerServer.d.ts +44 -0
- package/dist/dts/RunnerServer.d.ts.map +1 -0
- package/dist/dts/Runners.d.ts +161 -0
- package/dist/dts/Runners.d.ts.map +1 -0
- package/dist/dts/ShardId.d.ts +5 -55
- package/dist/dts/ShardId.d.ts.map +1 -1
- package/dist/dts/ShardManager.d.ts +435 -23
- package/dist/dts/ShardManager.d.ts.map +1 -1
- package/dist/dts/ShardStorage.d.ts +200 -0
- package/dist/dts/ShardStorage.d.ts.map +1 -0
- package/dist/dts/Sharding.d.ts +109 -131
- package/dist/dts/Sharding.d.ts.map +1 -1
- package/dist/dts/ShardingConfig.d.ts +147 -44
- package/dist/dts/ShardingConfig.d.ts.map +1 -1
- package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
- package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
- package/dist/dts/Singleton.d.ts +13 -0
- package/dist/dts/Singleton.d.ts.map +1 -0
- package/dist/dts/SingletonAddress.d.ts +49 -0
- package/dist/dts/SingletonAddress.d.ts.map +1 -0
- package/dist/dts/Snowflake.d.ts +121 -0
- package/dist/dts/Snowflake.d.ts.map +1 -0
- package/dist/dts/SocketRunner.d.ts +22 -0
- package/dist/dts/SocketRunner.d.ts.map +1 -0
- package/dist/dts/SocketShardManager.d.ts +17 -0
- package/dist/dts/SocketShardManager.d.ts.map +1 -0
- package/dist/dts/SqlMessageStorage.d.ts +43 -0
- package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
- package/dist/dts/SqlShardStorage.d.ts +38 -0
- package/dist/dts/SqlShardStorage.d.ts.map +1 -0
- package/dist/dts/SynchronizedClock.d.ts +19 -0
- package/dist/dts/SynchronizedClock.d.ts.map +1 -0
- package/dist/dts/index.d.ts +48 -24
- package/dist/dts/index.d.ts.map +1 -1
- package/dist/dts/internal/entityReaper.d.ts +2 -0
- package/dist/dts/internal/entityReaper.d.ts.map +1 -0
- package/dist/dts/internal/hash.d.ts +2 -0
- package/dist/dts/internal/hash.d.ts.map +1 -0
- package/dist/dts/internal/interruptors.d.ts +2 -0
- package/dist/dts/internal/interruptors.d.ts.map +1 -0
- package/dist/dts/internal/resourceMap.d.ts +22 -0
- package/dist/dts/internal/resourceMap.d.ts.map +1 -0
- package/dist/dts/internal/resourceRef.d.ts +25 -0
- package/dist/dts/internal/resourceRef.d.ts.map +1 -0
- package/dist/dts/internal/shardManager.d.ts +1 -11
- package/dist/dts/internal/shardManager.d.ts.map +1 -1
- package/dist/esm/ClusterError.js +164 -0
- package/dist/esm/ClusterError.js.map +1 -0
- package/dist/esm/ClusterMetrics.js +54 -0
- package/dist/esm/ClusterMetrics.js.map +1 -0
- package/dist/esm/ClusterSchema.js +13 -0
- package/dist/esm/ClusterSchema.js.map +1 -0
- package/dist/esm/DeliverAt.js +22 -0
- package/dist/esm/DeliverAt.js.map +1 -0
- package/dist/esm/Entity.js +173 -0
- package/dist/esm/Entity.js.map +1 -0
- package/dist/esm/EntityAddress.js +44 -0
- package/dist/esm/EntityAddress.js.map +1 -0
- package/dist/esm/EntityId.js +10 -0
- package/dist/esm/EntityId.js.map +1 -0
- package/dist/esm/EntityType.js +10 -0
- package/dist/esm/EntityType.js.map +1 -0
- package/dist/esm/Envelope.js +154 -0
- package/dist/esm/Envelope.js.map +1 -0
- package/dist/esm/HttpCommon.js +38 -0
- package/dist/esm/HttpCommon.js.map +1 -0
- package/dist/esm/HttpRunner.js +98 -0
- package/dist/esm/HttpRunner.js.map +1 -0
- package/dist/esm/HttpShardManager.js +128 -0
- package/dist/esm/HttpShardManager.js.map +1 -0
- package/dist/esm/MachineId.js +17 -0
- package/dist/esm/MachineId.js.map +1 -0
- package/dist/esm/Message.js +88 -17
- package/dist/esm/Message.js.map +1 -1
- package/dist/esm/MessageStorage.js +345 -0
- package/dist/esm/MessageStorage.js.map +1 -0
- package/dist/esm/Reply.js +184 -0
- package/dist/esm/Reply.js.map +1 -0
- package/dist/esm/Runner.js +68 -0
- package/dist/esm/Runner.js.map +1 -0
- package/dist/esm/RunnerAddress.js +52 -0
- package/dist/esm/RunnerAddress.js.map +1 -0
- package/dist/esm/RunnerHealth.js +58 -0
- package/dist/esm/RunnerHealth.js.map +1 -0
- package/dist/esm/RunnerServer.js +116 -0
- package/dist/esm/RunnerServer.js.map +1 -0
- package/dist/esm/Runners.js +332 -0
- package/dist/esm/Runners.js.map +1 -0
- package/dist/esm/ShardId.js +5 -42
- package/dist/esm/ShardId.js.map +1 -1
- package/dist/esm/ShardManager.js +486 -7
- package/dist/esm/ShardManager.js.map +1 -1
- package/dist/esm/ShardStorage.js +129 -0
- package/dist/esm/ShardStorage.js.map +1 -0
- package/dist/esm/Sharding.js +730 -87
- package/dist/esm/Sharding.js.map +1 -1
- package/dist/esm/ShardingConfig.js +80 -17
- package/dist/esm/ShardingConfig.js.map +1 -1
- package/dist/esm/ShardingRegistrationEvent.js +19 -29
- package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
- package/dist/esm/Singleton.js +15 -0
- package/dist/esm/Singleton.js.map +1 -0
- package/dist/esm/SingletonAddress.js +40 -0
- package/dist/esm/SingletonAddress.js.map +1 -0
- package/dist/esm/Snowflake.js +117 -0
- package/dist/esm/Snowflake.js.map +1 -0
- package/dist/esm/SocketRunner.js +31 -0
- package/dist/esm/SocketRunner.js.map +1 -0
- package/dist/esm/SocketShardManager.js +24 -0
- package/dist/esm/SocketShardManager.js.map +1 -0
- package/dist/esm/SqlMessageStorage.js +658 -0
- package/dist/esm/SqlMessageStorage.js.map +1 -0
- package/dist/esm/SqlShardStorage.js +218 -0
- package/dist/esm/SqlShardStorage.js.map +1 -0
- package/dist/esm/SynchronizedClock.js +57 -0
- package/dist/esm/SynchronizedClock.js.map +1 -0
- package/dist/esm/index.js +48 -24
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/internal/entityManager.js +311 -142
- package/dist/esm/internal/entityManager.js.map +1 -1
- package/dist/esm/internal/entityReaper.js +38 -0
- package/dist/esm/internal/entityReaper.js.map +1 -0
- package/dist/esm/internal/hash.js +12 -0
- package/dist/esm/internal/hash.js.map +1 -0
- package/dist/esm/internal/interruptors.js +3 -0
- package/dist/esm/internal/interruptors.js.map +1 -0
- package/dist/esm/internal/resourceMap.js +79 -0
- package/dist/esm/internal/resourceMap.js.map +1 -0
- package/dist/esm/internal/resourceRef.js +83 -0
- package/dist/esm/internal/resourceRef.js.map +1 -0
- package/dist/esm/internal/shardManager.js +217 -233
- package/dist/esm/internal/shardManager.js.map +1 -1
- package/package.json +212 -154
- package/src/ClusterError.ts +193 -0
- package/src/ClusterMetrics.ts +62 -0
- package/src/ClusterSchema.ts +13 -0
- package/src/DeliverAt.ts +36 -0
- package/src/Entity.ts +438 -0
- package/src/EntityAddress.ts +55 -0
- package/src/EntityId.ts +16 -0
- package/src/EntityType.ts +16 -0
- package/src/Envelope.ts +352 -0
- package/src/HttpCommon.ts +73 -0
- package/src/HttpRunner.ts +196 -0
- package/src/HttpShardManager.ts +273 -0
- package/src/MachineId.ts +27 -0
- package/src/Message.ts +143 -92
- package/src/MessageStorage.ts +697 -0
- package/src/Reply.ts +295 -0
- package/src/Runner.ts +84 -0
- package/src/RunnerAddress.ts +61 -0
- package/src/RunnerHealth.ts +87 -0
- package/src/RunnerServer.ts +156 -0
- package/src/Runners.ts +533 -0
- package/src/ShardId.ts +10 -62
- package/src/ShardManager.ts +780 -29
- package/src/ShardStorage.ts +289 -0
- package/src/Sharding.ts +1060 -183
- package/src/ShardingConfig.ts +186 -45
- package/src/ShardingRegistrationEvent.ts +38 -39
- package/src/Singleton.ts +20 -0
- package/src/SingletonAddress.ts +47 -0
- package/src/Snowflake.ts +194 -0
- package/src/SocketRunner.ts +59 -0
- package/src/SocketShardManager.ts +48 -0
- package/src/SqlMessageStorage.ts +833 -0
- package/src/SqlShardStorage.ts +292 -0
- package/src/SynchronizedClock.ts +82 -0
- package/src/index.ts +54 -24
- package/src/internal/entityManager.ts +464 -361
- package/src/internal/entityReaper.ts +53 -0
- package/src/internal/hash.ts +11 -0
- package/src/internal/interruptors.ts +4 -0
- package/src/internal/resourceMap.ts +89 -0
- package/src/internal/resourceRef.ts +88 -0
- package/src/internal/shardManager.ts +273 -546
- package/AtLeastOnce/package.json +0 -6
- package/AtLeastOnceStorage/package.json +0 -6
- package/Broadcaster/package.json +0 -6
- package/ManagerConfig/package.json +0 -6
- package/MessageState/package.json +0 -6
- package/Messenger/package.json +0 -6
- package/Pod/package.json +0 -6
- package/PodAddress/package.json +0 -6
- package/Pods/package.json +0 -6
- package/PodsHealth/package.json +0 -6
- package/PoisonPill/package.json +0 -6
- package/RecipientAddress/package.json +0 -6
- package/RecipientBehaviour/package.json +0 -6
- package/RecipientBehaviourContext/package.json +0 -6
- package/RecipientType/package.json +0 -6
- package/Serialization/package.json +0 -6
- package/SerializedEnvelope/package.json +0 -6
- package/SerializedMessage/package.json +0 -6
- package/ShardManagerClient/package.json +0 -6
- package/ShardingEvent/package.json +0 -6
- package/ShardingException/package.json +0 -6
- package/Storage/package.json +0 -6
- package/dist/cjs/AtLeastOnce.js.map +0 -1
- package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
- package/dist/cjs/Broadcaster.js +0 -6
- package/dist/cjs/Broadcaster.js.map +0 -1
- package/dist/cjs/ManagerConfig.js.map +0 -1
- package/dist/cjs/MessageState.js +0 -55
- package/dist/cjs/MessageState.js.map +0 -1
- package/dist/cjs/Messenger.js +0 -6
- package/dist/cjs/Messenger.js.map +0 -1
- package/dist/cjs/Pod.js +0 -78
- package/dist/cjs/Pod.js.map +0 -1
- package/dist/cjs/PodAddress.js +0 -77
- package/dist/cjs/PodAddress.js.map +0 -1
- package/dist/cjs/Pods.js.map +0 -1
- package/dist/cjs/PodsHealth.js +0 -41
- package/dist/cjs/PodsHealth.js.map +0 -1
- package/dist/cjs/PoisonPill.js +0 -78
- package/dist/cjs/PoisonPill.js.map +0 -1
- package/dist/cjs/RecipientAddress.js +0 -79
- package/dist/cjs/RecipientAddress.js.map +0 -1
- package/dist/cjs/RecipientBehaviour.js +0 -38
- package/dist/cjs/RecipientBehaviour.js.map +0 -1
- package/dist/cjs/RecipientBehaviourContext.js +0 -64
- package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
- package/dist/cjs/RecipientType.js +0 -123
- package/dist/cjs/RecipientType.js.map +0 -1
- package/dist/cjs/Serialization.js +0 -32
- package/dist/cjs/Serialization.js.map +0 -1
- package/dist/cjs/SerializedEnvelope.js +0 -87
- package/dist/cjs/SerializedEnvelope.js.map +0 -1
- package/dist/cjs/SerializedMessage.js +0 -64
- package/dist/cjs/SerializedMessage.js.map +0 -1
- package/dist/cjs/ShardManagerClient.js.map +0 -1
- package/dist/cjs/ShardingEvent.js +0 -72
- package/dist/cjs/ShardingEvent.js.map +0 -1
- package/dist/cjs/ShardingException.js +0 -107
- package/dist/cjs/ShardingException.js.map +0 -1
- package/dist/cjs/Storage.js +0 -40
- package/dist/cjs/Storage.js.map +0 -1
- package/dist/cjs/internal/atLeastOnce.js +0 -35
- package/dist/cjs/internal/atLeastOnce.js.map +0 -1
- package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
- package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
- package/dist/cjs/internal/entityState.js +0 -47
- package/dist/cjs/internal/entityState.js.map +0 -1
- package/dist/cjs/internal/managerConfig.js +0 -46
- package/dist/cjs/internal/managerConfig.js.map +0 -1
- package/dist/cjs/internal/message.js +0 -48
- package/dist/cjs/internal/message.js.map +0 -1
- package/dist/cjs/internal/messageState.js +0 -79
- package/dist/cjs/internal/messageState.js.map +0 -1
- package/dist/cjs/internal/podWithMetadata.js +0 -54
- package/dist/cjs/internal/podWithMetadata.js.map +0 -1
- package/dist/cjs/internal/pods.js +0 -35
- package/dist/cjs/internal/pods.js.map +0 -1
- package/dist/cjs/internal/podsHealth.js +0 -40
- package/dist/cjs/internal/podsHealth.js.map +0 -1
- package/dist/cjs/internal/recipientBehaviour.js +0 -52
- package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
- package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
- package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
- package/dist/cjs/internal/serialization.js +0 -48
- package/dist/cjs/internal/serialization.js.map +0 -1
- package/dist/cjs/internal/shardManagerClient.js +0 -48
- package/dist/cjs/internal/shardManagerClient.js.map +0 -1
- package/dist/cjs/internal/shardManagerState.js +0 -44
- package/dist/cjs/internal/shardManagerState.js.map +0 -1
- package/dist/cjs/internal/sharding.js +0 -306
- package/dist/cjs/internal/sharding.js.map +0 -1
- package/dist/cjs/internal/shardingConfig.js +0 -56
- package/dist/cjs/internal/shardingConfig.js.map +0 -1
- package/dist/cjs/internal/storage.js +0 -52
- package/dist/cjs/internal/storage.js.map +0 -1
- package/dist/cjs/internal/utils.js +0 -69
- package/dist/cjs/internal/utils.js.map +0 -1
- package/dist/dts/AtLeastOnce.d.ts +0 -20
- package/dist/dts/AtLeastOnce.d.ts.map +0 -1
- package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
- package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
- package/dist/dts/Broadcaster.d.ts +0 -32
- package/dist/dts/Broadcaster.d.ts.map +0 -1
- package/dist/dts/ManagerConfig.d.ts +0 -61
- package/dist/dts/ManagerConfig.d.ts.map +0 -1
- package/dist/dts/MessageState.d.ts +0 -107
- package/dist/dts/MessageState.d.ts.map +0 -1
- package/dist/dts/Messenger.d.ts +0 -32
- package/dist/dts/Messenger.d.ts.map +0 -1
- package/dist/dts/Pod.d.ts +0 -81
- package/dist/dts/Pod.d.ts.map +0 -1
- package/dist/dts/PodAddress.d.ts +0 -80
- package/dist/dts/PodAddress.d.ts.map +0 -1
- package/dist/dts/Pods.d.ts +0 -78
- package/dist/dts/Pods.d.ts.map +0 -1
- package/dist/dts/PodsHealth.d.ts +0 -66
- package/dist/dts/PodsHealth.d.ts.map +0 -1
- package/dist/dts/PoisonPill.d.ts +0 -78
- package/dist/dts/PoisonPill.d.ts.map +0 -1
- package/dist/dts/RecipientAddress.d.ts +0 -57
- package/dist/dts/RecipientAddress.d.ts.map +0 -1
- package/dist/dts/RecipientBehaviour.d.ts +0 -72
- package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
- package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
- package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
- package/dist/dts/RecipientType.d.ts +0 -93
- package/dist/dts/RecipientType.d.ts.map +0 -1
- package/dist/dts/Serialization.d.ts +0 -58
- package/dist/dts/Serialization.d.ts.map +0 -1
- package/dist/dts/SerializedEnvelope.d.ts +0 -86
- package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
- package/dist/dts/SerializedMessage.d.ts +0 -66
- package/dist/dts/SerializedMessage.d.ts.map +0 -1
- package/dist/dts/ShardManagerClient.d.ts +0 -50
- package/dist/dts/ShardManagerClient.d.ts.map +0 -1
- package/dist/dts/ShardingEvent.d.ts +0 -90
- package/dist/dts/ShardingEvent.d.ts.map +0 -1
- package/dist/dts/ShardingException.d.ts +0 -125
- package/dist/dts/ShardingException.d.ts.map +0 -1
- package/dist/dts/Storage.d.ts +0 -78
- package/dist/dts/Storage.d.ts.map +0 -1
- package/dist/dts/internal/atLeastOnce.d.ts +0 -2
- package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
- package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
- package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
- package/dist/dts/internal/entityState.d.ts +0 -21
- package/dist/dts/internal/entityState.d.ts.map +0 -1
- package/dist/dts/internal/managerConfig.d.ts +0 -2
- package/dist/dts/internal/managerConfig.d.ts.map +0 -1
- package/dist/dts/internal/message.d.ts +0 -9
- package/dist/dts/internal/message.d.ts.map +0 -1
- package/dist/dts/internal/messageState.d.ts +0 -2
- package/dist/dts/internal/messageState.d.ts.map +0 -1
- package/dist/dts/internal/podWithMetadata.d.ts +0 -2
- package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
- package/dist/dts/internal/pods.d.ts +0 -2
- package/dist/dts/internal/pods.d.ts.map +0 -1
- package/dist/dts/internal/podsHealth.d.ts +0 -2
- package/dist/dts/internal/podsHealth.d.ts.map +0 -1
- package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
- package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
- package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
- package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
- package/dist/dts/internal/serialization.d.ts +0 -2
- package/dist/dts/internal/serialization.d.ts.map +0 -1
- package/dist/dts/internal/shardManagerClient.d.ts +0 -2
- package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
- package/dist/dts/internal/shardManagerState.d.ts +0 -26
- package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
- package/dist/dts/internal/sharding.d.ts +0 -2
- package/dist/dts/internal/sharding.d.ts.map +0 -1
- package/dist/dts/internal/shardingConfig.d.ts +0 -2
- package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
- package/dist/dts/internal/storage.d.ts +0 -2
- package/dist/dts/internal/storage.d.ts.map +0 -1
- package/dist/dts/internal/utils.d.ts +0 -2
- package/dist/dts/internal/utils.d.ts.map +0 -1
- package/dist/esm/AtLeastOnce.js +0 -12
- package/dist/esm/AtLeastOnce.js.map +0 -1
- package/dist/esm/AtLeastOnceStorage.js +0 -17
- package/dist/esm/AtLeastOnceStorage.js.map +0 -1
- package/dist/esm/Broadcaster.js +0 -2
- package/dist/esm/Broadcaster.js.map +0 -1
- package/dist/esm/ManagerConfig.js +0 -26
- package/dist/esm/ManagerConfig.js.map +0 -1
- package/dist/esm/MessageState.js +0 -47
- package/dist/esm/MessageState.js.map +0 -1
- package/dist/esm/Messenger.js +0 -2
- package/dist/esm/Messenger.js.map +0 -1
- package/dist/esm/Pod.js +0 -65
- package/dist/esm/Pod.js.map +0 -1
- package/dist/esm/PodAddress.js +0 -64
- package/dist/esm/PodAddress.js.map +0 -1
- package/dist/esm/Pods.js +0 -27
- package/dist/esm/Pods.js.map +0 -1
- package/dist/esm/PodsHealth.js +0 -33
- package/dist/esm/PodsHealth.js.map +0 -1
- package/dist/esm/PoisonPill.js +0 -65
- package/dist/esm/PoisonPill.js.map +0 -1
- package/dist/esm/RecipientAddress.js +0 -67
- package/dist/esm/RecipientAddress.js.map +0 -1
- package/dist/esm/RecipientBehaviour.js +0 -30
- package/dist/esm/RecipientBehaviour.js.map +0 -1
- package/dist/esm/RecipientBehaviourContext.js +0 -56
- package/dist/esm/RecipientBehaviourContext.js.map +0 -1
- package/dist/esm/RecipientType.js +0 -108
- package/dist/esm/RecipientType.js.map +0 -1
- package/dist/esm/Serialization.js +0 -24
- package/dist/esm/Serialization.js.map +0 -1
- package/dist/esm/SerializedEnvelope.js +0 -74
- package/dist/esm/SerializedEnvelope.js.map +0 -1
- package/dist/esm/SerializedMessage.js +0 -51
- package/dist/esm/SerializedMessage.js.map +0 -1
- package/dist/esm/ShardManagerClient.js +0 -22
- package/dist/esm/ShardManagerClient.js.map +0 -1
- package/dist/esm/ShardingEvent.js +0 -62
- package/dist/esm/ShardingEvent.js.map +0 -1
- package/dist/esm/ShardingException.js +0 -91
- package/dist/esm/ShardingException.js.map +0 -1
- package/dist/esm/Storage.js +0 -32
- package/dist/esm/Storage.js.map +0 -1
- package/dist/esm/internal/atLeastOnce.js +0 -26
- package/dist/esm/internal/atLeastOnce.js.map +0 -1
- package/dist/esm/internal/atLeastOnceStorage.js +0 -154
- package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
- package/dist/esm/internal/entityState.js +0 -35
- package/dist/esm/internal/entityState.js.map +0 -1
- package/dist/esm/internal/managerConfig.js +0 -38
- package/dist/esm/internal/managerConfig.js.map +0 -1
- package/dist/esm/internal/message.js +0 -35
- package/dist/esm/internal/message.js.map +0 -1
- package/dist/esm/internal/messageState.js +0 -66
- package/dist/esm/internal/messageState.js.map +0 -1
- package/dist/esm/internal/podWithMetadata.js +0 -41
- package/dist/esm/internal/podWithMetadata.js.map +0 -1
- package/dist/esm/internal/pods.js +0 -25
- package/dist/esm/internal/pods.js.map +0 -1
- package/dist/esm/internal/podsHealth.js +0 -30
- package/dist/esm/internal/podsHealth.js.map +0 -1
- package/dist/esm/internal/recipientBehaviour.js +0 -42
- package/dist/esm/internal/recipientBehaviour.js.map +0 -1
- package/dist/esm/internal/recipientBehaviourContext.js +0 -26
- package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
- package/dist/esm/internal/serialization.js +0 -38
- package/dist/esm/internal/serialization.js.map +0 -1
- package/dist/esm/internal/shardManagerClient.js +0 -38
- package/dist/esm/internal/shardManagerClient.js.map +0 -1
- package/dist/esm/internal/shardManagerState.js +0 -36
- package/dist/esm/internal/shardManagerState.js.map +0 -1
- package/dist/esm/internal/sharding.js +0 -288
- package/dist/esm/internal/sharding.js.map +0 -1
- package/dist/esm/internal/shardingConfig.js +0 -47
- package/dist/esm/internal/shardingConfig.js.map +0 -1
- package/dist/esm/internal/storage.js +0 -42
- package/dist/esm/internal/storage.js.map +0 -1
- package/dist/esm/internal/utils.js +0 -56
- package/dist/esm/internal/utils.js.map +0 -1
- package/src/AtLeastOnce.ts +0 -28
- package/src/AtLeastOnceStorage.ts +0 -96
- package/src/Broadcaster.ts +0 -48
- package/src/ManagerConfig.ts +0 -67
- package/src/MessageState.ts +0 -126
- package/src/Messenger.ts +0 -40
- package/src/Pod.ts +0 -95
- package/src/PodAddress.ts +0 -94
- package/src/Pods.ts +0 -100
- package/src/PodsHealth.ts +0 -74
- package/src/PoisonPill.ts +0 -105
- package/src/RecipientAddress.ts +0 -72
- package/src/RecipientBehaviour.ts +0 -108
- package/src/RecipientBehaviourContext.ts +0 -101
- package/src/RecipientType.ts +0 -134
- package/src/Serialization.ts +0 -72
- package/src/SerializedEnvelope.ts +0 -108
- package/src/SerializedMessage.ts +0 -82
- package/src/ShardManagerClient.ts +0 -57
- package/src/ShardingEvent.ts +0 -121
- package/src/ShardingException.ts +0 -151
- package/src/Storage.ts +0 -92
- package/src/internal/atLeastOnce.ts +0 -59
- package/src/internal/atLeastOnceStorage.ts +0 -218
- package/src/internal/entityState.ts +0 -64
- package/src/internal/managerConfig.ts +0 -84
- package/src/internal/message.ts +0 -63
- package/src/internal/messageState.ts +0 -98
- package/src/internal/podWithMetadata.ts +0 -72
- package/src/internal/pods.ts +0 -29
- package/src/internal/podsHealth.ts +0 -39
- package/src/internal/recipientBehaviour.ts +0 -133
- package/src/internal/recipientBehaviourContext.ts +0 -70
- package/src/internal/serialization.ts +0 -63
- package/src/internal/shardManagerClient.ts +0 -49
- package/src/internal/shardManagerState.ts +0 -80
- package/src/internal/sharding.ts +0 -789
- package/src/internal/shardingConfig.ts +0 -97
- package/src/internal/storage.ts +0 -60
- package/src/internal/utils.ts +0 -54
@@ -1,402 +1,505 @@
|
|
1
|
-
import * as
|
1
|
+
import type * as Rpc from "@effect/rpc/Rpc"
|
2
|
+
import { RequestId } from "@effect/rpc/RpcMessage"
|
3
|
+
import * as RpcServer from "@effect/rpc/RpcServer"
|
4
|
+
import * as Arr from "effect/Array"
|
5
|
+
import * as Cause from "effect/Cause"
|
6
|
+
import * as Context from "effect/Context"
|
2
7
|
import * as Duration from "effect/Duration"
|
8
|
+
import type { DurationInput } from "effect/Duration"
|
3
9
|
import * as Effect from "effect/Effect"
|
4
10
|
import * as Exit from "effect/Exit"
|
5
|
-
import * as
|
6
|
-
import {
|
11
|
+
import * as FiberRef from "effect/FiberRef"
|
12
|
+
import { identity } from "effect/Function"
|
7
13
|
import * as HashMap from "effect/HashMap"
|
8
|
-
import * as
|
14
|
+
import * as Metric from "effect/Metric"
|
9
15
|
import * as Option from "effect/Option"
|
16
|
+
import * as Schedule from "effect/Schedule"
|
17
|
+
import * as Schema from "effect/Schema"
|
10
18
|
import * as Scope from "effect/Scope"
|
11
|
-
import
|
19
|
+
import { AlreadyProcessingMessage, EntityNotManagedByRunner, MailboxFull, MalformedMessage } from "../ClusterError.js"
|
20
|
+
import * as ClusterMetrics from "../ClusterMetrics.js"
|
21
|
+
import { Persisted } from "../ClusterSchema.js"
|
22
|
+
import type { Entity, HandlersFrom } from "../Entity.js"
|
23
|
+
import { CurrentAddress, CurrentRunnerAddress, Request } from "../Entity.js"
|
24
|
+
import type { EntityAddress } from "../EntityAddress.js"
|
25
|
+
import type { EntityId } from "../EntityId.js"
|
26
|
+
import * as Envelope from "../Envelope.js"
|
12
27
|
import * as Message from "../Message.js"
|
13
|
-
import * as
|
14
|
-
import
|
15
|
-
import type
|
16
|
-
import
|
17
|
-
import type
|
18
|
-
import
|
19
|
-
import
|
20
|
-
import
|
21
|
-
import
|
22
|
-
import
|
23
|
-
import
|
24
|
-
import * as ShardingException from "../ShardingException.js"
|
25
|
-
import * as EntityState from "./entityState.js"
|
28
|
+
import * as MessageStorage from "../MessageStorage.js"
|
29
|
+
import * as Reply from "../Reply.js"
|
30
|
+
import type { RunnerAddress } from "../RunnerAddress.js"
|
31
|
+
import type { ShardId } from "../ShardId.js"
|
32
|
+
import type { Sharding } from "../Sharding.js"
|
33
|
+
import { ShardingConfig } from "../ShardingConfig.js"
|
34
|
+
import * as Snowflake from "../Snowflake.js"
|
35
|
+
import { EntityReaper } from "./entityReaper.js"
|
36
|
+
import { internalInterruptors } from "./interruptors.js"
|
37
|
+
import { ResourceMap } from "./resourceMap.js"
|
38
|
+
import { ResourceRef } from "./resourceRef.js"
|
26
39
|
|
27
40
|
/** @internal */
|
28
|
-
|
41
|
+
export interface EntityManager {
|
42
|
+
readonly sendLocal: <R extends Rpc.Any>(
|
43
|
+
message: Message.IncomingLocal<R>
|
44
|
+
) => Effect.Effect<void, EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage>
|
29
45
|
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
)
|
46
|
+
readonly send: (
|
47
|
+
message: Message.Incoming<any>
|
48
|
+
) => Effect.Effect<void, EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage>
|
34
49
|
|
35
|
-
|
36
|
-
export type EntityManagerTypeId = typeof EntityManagerTypeId
|
50
|
+
readonly isProcessingFor: (message: Message.Incoming<any>) => boolean
|
37
51
|
|
52
|
+
readonly interruptShard: (shardId: ShardId) => Effect.Effect<void>
|
53
|
+
}
|
54
|
+
|
55
|
+
// Represents the entities managed by this entity manager
|
38
56
|
/** @internal */
|
39
|
-
export
|
40
|
-
readonly
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
| ShardingException.SerializationException
|
51
|
-
>
|
52
|
-
|
53
|
-
/** @internal */
|
54
|
-
readonly terminateEntitiesOnShards: (
|
55
|
-
shards: HashSet.HashSet<ShardId.ShardId>
|
56
|
-
) => Effect.Effect<void>
|
57
|
-
|
58
|
-
/** @internal */
|
59
|
-
readonly terminateAllEntities: Effect.Effect<void>
|
57
|
+
export type EntityState = {
|
58
|
+
readonly address: EntityAddress
|
59
|
+
readonly mailboxGauge: Metric.Metric.Gauge<bigint>
|
60
|
+
readonly activeRequests: Map<bigint, {
|
61
|
+
readonly rpc: Rpc.AnyWithProps
|
62
|
+
readonly message: Message.IncomingRequestLocal<any>
|
63
|
+
lastSentChunk: Option.Option<Reply.Chunk<Rpc.Any>>
|
64
|
+
sequence: number
|
65
|
+
}>
|
66
|
+
lastActiveCheck: number
|
67
|
+
write: RpcServer.RpcServer<any>["write"]
|
60
68
|
}
|
61
69
|
|
62
70
|
/** @internal */
|
63
|
-
export
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
71
|
+
export const make = Effect.fnUntraced(function*<
|
72
|
+
Rpcs extends Rpc.Any,
|
73
|
+
Handlers extends HandlersFrom<Rpcs>,
|
74
|
+
RX
|
75
|
+
>(
|
76
|
+
entity: Entity<Rpcs>,
|
77
|
+
buildHandlers: Effect.Effect<Handlers, never, RX>,
|
78
|
+
options: {
|
79
|
+
readonly sharding: Sharding["Type"]
|
80
|
+
readonly storage: MessageStorage.MessageStorage["Type"]
|
81
|
+
readonly runnerAddress: RunnerAddress
|
82
|
+
readonly maxIdleTime?: DurationInput | undefined
|
83
|
+
readonly concurrency?: number | "unbounded" | undefined
|
84
|
+
readonly mailboxCapacity?: number | "unbounded" | undefined
|
85
|
+
}
|
70
86
|
) {
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
>
|
79
|
-
>(HashMap.empty())
|
80
|
-
|
81
|
-
function startExpirationFiber(recipientAddress: RecipientAddress.RecipientAddress) {
|
82
|
-
const maxIdleMillis = pipe(
|
83
|
-
entityMaxIdle,
|
84
|
-
Option.getOrElse(() => config.entityMaxIdleTime),
|
85
|
-
Duration.toMillis
|
86
|
-
)
|
87
|
+
const config = yield* ShardingConfig
|
88
|
+
const snowflakeGen = yield* Snowflake.Generator
|
89
|
+
const managerScope = yield* Effect.scope
|
90
|
+
const storageEnabled = options.storage !== MessageStorage.noop
|
91
|
+
const mailboxCapacity = options.mailboxCapacity ?? config.entityMailboxCapacity
|
92
|
+
const clock = yield* Effect.clock
|
93
|
+
const context = yield* Effect.context<Rpc.Context<Rpcs> | Rpc.Middleware<Rpcs> | RX>()
|
87
94
|
|
88
|
-
|
89
|
-
return pipe(
|
90
|
-
Effect.Do,
|
91
|
-
Effect.zipLeft(Clock.sleep(Duration.millis(duration))),
|
92
|
-
Effect.bind("cdt", () => Clock.currentTimeMillis),
|
93
|
-
Effect.bind("map", () => RefSynchronized.get(entityStates)),
|
94
|
-
Effect.let("lastReceivedAt", ({ map }) =>
|
95
|
-
pipe(
|
96
|
-
HashMap.get(map, recipientAddress),
|
97
|
-
Option.map((_) => _.lastReceivedAt),
|
98
|
-
Option.getOrElse(() => 0)
|
99
|
-
)),
|
100
|
-
Effect.let("remaining", ({ cdt, lastReceivedAt }) => (maxIdleMillis - cdt + lastReceivedAt)),
|
101
|
-
Effect.tap((_) => _.remaining > 0 ? sleep(_.remaining) : Effect.void)
|
102
|
-
)
|
103
|
-
}
|
95
|
+
const activeServers = new Map<EntityId, EntityState>()
|
104
96
|
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
Effect.forkDaemon
|
113
|
-
)
|
97
|
+
const entities: ResourceMap<
|
98
|
+
EntityAddress,
|
99
|
+
EntityState,
|
100
|
+
EntityNotManagedByRunner
|
101
|
+
> = yield* ResourceMap.make(Effect.fnUntraced(function*(address) {
|
102
|
+
if (yield* options.sharding.isShutdown) {
|
103
|
+
return yield* new EntityNotManagedByRunner({ address })
|
114
104
|
}
|
115
105
|
|
116
|
-
|
117
|
-
|
118
|
-
*/
|
119
|
-
function terminateEntity(recipientAddress: RecipientAddress.RecipientAddress) {
|
120
|
-
return pipe(
|
121
|
-
// get the things to cleanup
|
122
|
-
RefSynchronized.get(
|
123
|
-
entityStates
|
124
|
-
),
|
125
|
-
Effect.map(HashMap.get(recipientAddress)),
|
126
|
-
Effect.flatMap(Option.match({
|
127
|
-
// there is no entity state to cleanup
|
128
|
-
onNone: () => Effect.void,
|
129
|
-
// found it!
|
130
|
-
onSome: (entityState) =>
|
131
|
-
pipe(
|
132
|
-
// interrupt the expiration timer
|
133
|
-
Fiber.interrupt(entityState.expirationFiber),
|
134
|
-
// close the scope of the entity,
|
135
|
-
Effect.ensuring(Scope.close(entityState.executionScope, Exit.void)),
|
136
|
-
// remove the entry from the map
|
137
|
-
Effect.ensuring(RefSynchronized.update(entityStates, HashMap.remove(recipientAddress))),
|
138
|
-
// log error if happens
|
139
|
-
Effect.catchAllCause(Effect.logError),
|
140
|
-
Effect.asVoid,
|
141
|
-
Effect.annotateLogs("entityId", recipientAddress.entityId),
|
142
|
-
Effect.annotateLogs("recipientType", recipientAddress.recipientTypeName)
|
143
|
-
)
|
144
|
-
}))
|
145
|
-
)
|
146
|
-
}
|
106
|
+
const scope = yield* Effect.scope
|
107
|
+
const endLatch = yield* Effect.makeLatch()
|
147
108
|
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
): Effect.Effect<Option.Option<Fiber.RuntimeFiber<void, never>>> {
|
154
|
-
return RefSynchronized.modifyEffect(entityStates, (entityStatesMap) =>
|
155
|
-
pipe(
|
156
|
-
HashMap.get(entityStatesMap, recipientAddress),
|
157
|
-
Option.match({
|
158
|
-
// if no entry is found, the entity has succefully shut down
|
159
|
-
onNone: () => Effect.succeed([Option.none(), entityStatesMap] as const),
|
160
|
-
// there is an entry, so we should begin termination
|
161
|
-
onSome: (entityState) =>
|
162
|
-
pipe(
|
163
|
-
entityState.terminationFiber,
|
164
|
-
Option.match({
|
165
|
-
// termination has already begun, keep everything as-is
|
166
|
-
onSome: () => Effect.succeed([entityState.terminationFiber, entityStatesMap] as const),
|
167
|
-
// begin to terminate the queue
|
168
|
-
onNone: () =>
|
169
|
-
pipe(
|
170
|
-
terminateEntity(recipientAddress),
|
171
|
-
Effect.forkDaemon,
|
172
|
-
Effect.map((terminationFiber) =>
|
173
|
-
[
|
174
|
-
Option.some(terminationFiber),
|
175
|
-
HashMap.modify(
|
176
|
-
entityStatesMap,
|
177
|
-
recipientAddress,
|
178
|
-
EntityState.withTerminationFiber(terminationFiber)
|
179
|
-
)
|
180
|
-
] as const
|
181
|
-
)
|
182
|
-
)
|
183
|
-
})
|
184
|
-
)
|
185
|
-
})
|
186
|
-
))
|
187
|
-
}
|
109
|
+
// on shutdown, reset the storage for the entity
|
110
|
+
yield* Scope.addFinalizer(
|
111
|
+
scope,
|
112
|
+
Effect.ignore(options.storage.resetAddress(address))
|
113
|
+
)
|
188
114
|
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
return Effect.
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
serialization.decode(recipientType.schema, envelope.body),
|
238
|
-
Effect.flatMap((message) =>
|
239
|
-
pipe(
|
240
|
-
offer(message),
|
241
|
-
Effect.flatMap((_) =>
|
242
|
-
MessageState.mapEffect(
|
243
|
-
_,
|
244
|
-
(value) => serialization.encode(Message.exitSchema(message), value)
|
245
|
-
)
|
246
|
-
)
|
247
|
-
)
|
248
|
-
)
|
249
|
-
)
|
250
|
-
),
|
251
|
-
Scope.extend(executionScope),
|
252
|
-
Effect.provideService(
|
253
|
-
RecipientBehaviourContext.RecipientBehaviourContext,
|
254
|
-
RecipientBehaviourContext.make({
|
255
|
-
recipientAddress,
|
256
|
-
shardId,
|
257
|
-
recipientType: recipientType as any,
|
258
|
-
forkShutdown
|
259
|
-
})
|
260
|
-
),
|
261
|
-
Effect.provide(env)
|
115
|
+
const activeRequests: EntityState["activeRequests"] = new Map()
|
116
|
+
let defectRequestIds: Array<bigint> = []
|
117
|
+
|
118
|
+
// the server is stored in a ref, so if there is a defect, we can
|
119
|
+
// swap the server without losing the active requests
|
120
|
+
const writeRef = yield* ResourceRef.from(
|
121
|
+
scope,
|
122
|
+
Effect.fnUntraced(function*(scope) {
|
123
|
+
let isShuttingDown = false
|
124
|
+
|
125
|
+
// Initiate the behavior for the entity
|
126
|
+
const handlers = yield* (entity.protocol.toHandlersContext(buildHandlers).pipe(
|
127
|
+
Effect.provide(context.pipe(
|
128
|
+
Context.add(CurrentAddress, address),
|
129
|
+
Context.add(CurrentRunnerAddress, options.runnerAddress),
|
130
|
+
Context.add(Scope.Scope, scope)
|
131
|
+
)),
|
132
|
+
Effect.locally(FiberRef.currentLogAnnotations, HashMap.empty())
|
133
|
+
) as Effect.Effect<Context.Context<Rpc.ToHandler<Rpcs>>>)
|
134
|
+
|
135
|
+
const server = yield* RpcServer.makeNoSerialization(entity.protocol, {
|
136
|
+
spanPrefix: `${entity.type}(${address.entityId})`,
|
137
|
+
concurrency: options.concurrency ?? 1,
|
138
|
+
onFromServer(response): Effect.Effect<void> {
|
139
|
+
switch (response._tag) {
|
140
|
+
case "Exit": {
|
141
|
+
const request = activeRequests.get(response.requestId)
|
142
|
+
if (!request) return Effect.void
|
143
|
+
|
144
|
+
// For durable messages, ignore interrupts during shutdown.
|
145
|
+
// They will be retried when the entity is restarted.
|
146
|
+
if (
|
147
|
+
storageEnabled &&
|
148
|
+
isShuttingDown &&
|
149
|
+
Context.get(request.rpc.annotations, Persisted) &&
|
150
|
+
Exit.isInterrupted(response.exit)
|
151
|
+
) {
|
152
|
+
return Effect.void
|
153
|
+
}
|
154
|
+
return retryRespond(
|
155
|
+
4,
|
156
|
+
Effect.suspend(() =>
|
157
|
+
request.message.respond(
|
158
|
+
new Reply.WithExit({
|
159
|
+
requestId: Snowflake.Snowflake(response.requestId),
|
160
|
+
id: snowflakeGen.unsafeNext(),
|
161
|
+
exit: response.exit
|
162
|
+
})
|
262
163
|
)
|
164
|
+
)
|
165
|
+
).pipe(
|
166
|
+
Effect.flatMap(() => {
|
167
|
+
activeRequests.delete(response.requestId)
|
263
168
|
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
lastReceivedAt: cdt
|
270
|
-
})
|
169
|
+
// ensure that the reaper does not remove the entity as we haven't
|
170
|
+
// been "idle" yet
|
171
|
+
if (activeRequests.size === 0) {
|
172
|
+
state.lastActiveCheck = clock.unsafeCurrentTimeMillis()
|
173
|
+
}
|
271
174
|
|
272
|
-
return
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
175
|
+
return Effect.void
|
176
|
+
}),
|
177
|
+
Effect.orDie
|
178
|
+
)
|
179
|
+
}
|
180
|
+
case "Chunk": {
|
181
|
+
const request = activeRequests.get(response.requestId)
|
182
|
+
if (!request) return Effect.void
|
183
|
+
const sequence = request.sequence
|
184
|
+
request.sequence++
|
185
|
+
return Effect.orDie(retryRespond(
|
186
|
+
4,
|
187
|
+
Effect.suspend(() => {
|
188
|
+
const reply = new Reply.Chunk({
|
189
|
+
requestId: Snowflake.Snowflake(response.requestId),
|
190
|
+
id: snowflakeGen.unsafeNext(),
|
191
|
+
sequence,
|
192
|
+
values: response.values
|
193
|
+
})
|
194
|
+
request.lastSentChunk = Option.some(reply)
|
195
|
+
return request.message.respond(reply)
|
280
196
|
})
|
281
|
-
|
282
|
-
}
|
197
|
+
))
|
198
|
+
}
|
199
|
+
case "Defect": {
|
200
|
+
const effect = writeRef.unsafeRebuild()
|
201
|
+
defectRequestIds = Array.from(activeRequests.keys())
|
202
|
+
return Effect.logError("Defect in entity, restarting", Cause.die(response.defect)).pipe(
|
203
|
+
Effect.andThen(effect.pipe(
|
204
|
+
Effect.tapErrorCause(Effect.logError),
|
205
|
+
Effect.retry(Schedule.spaced(500))
|
206
|
+
)),
|
207
|
+
Effect.annotateLogs({
|
208
|
+
module: "EntityManager",
|
209
|
+
address,
|
210
|
+
runner: options.runnerAddress
|
211
|
+
})
|
212
|
+
)
|
213
|
+
}
|
214
|
+
case "ClientEnd": {
|
215
|
+
return endLatch.open
|
216
|
+
}
|
217
|
+
}
|
218
|
+
}
|
219
|
+
}).pipe(
|
220
|
+
Scope.extend(scope),
|
221
|
+
Effect.provide(handlers)
|
222
|
+
)
|
223
|
+
|
224
|
+
yield* Scope.addFinalizer(
|
225
|
+
scope,
|
226
|
+
Effect.sync(() => {
|
227
|
+
isShuttingDown = true
|
228
|
+
})
|
229
|
+
)
|
230
|
+
|
231
|
+
for (const id of defectRequestIds) {
|
232
|
+
const { lastSentChunk, message } = activeRequests.get(id)!
|
233
|
+
yield* server.write(0, {
|
234
|
+
...message.envelope,
|
235
|
+
id: RequestId(message.envelope.requestId),
|
236
|
+
tag: message.envelope.tag as any,
|
237
|
+
payload: new Request({
|
238
|
+
...message.envelope,
|
239
|
+
lastSentChunk
|
240
|
+
} as any) as any
|
283
241
|
})
|
284
|
-
|
242
|
+
}
|
243
|
+
defectRequestIds = []
|
244
|
+
|
245
|
+
return server.write
|
246
|
+
})
|
247
|
+
)
|
248
|
+
|
249
|
+
const state: EntityState = {
|
250
|
+
address,
|
251
|
+
mailboxGauge: ClusterMetrics.mailboxSize.pipe(
|
252
|
+
Metric.tagged("type", entity.type),
|
253
|
+
Metric.tagged("entityId", address.entityId)
|
254
|
+
),
|
255
|
+
write(clientId, message) {
|
256
|
+
if (writeRef.state.current._tag !== "Acquired") {
|
257
|
+
return Effect.flatMap(writeRef.await, (write) => write(clientId, message))
|
258
|
+
}
|
259
|
+
return writeRef.state.current.value(clientId, message)
|
260
|
+
},
|
261
|
+
activeRequests,
|
262
|
+
lastActiveCheck: clock.unsafeCurrentTimeMillis()
|
285
263
|
}
|
286
264
|
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
// first, verify that this entity should be handled by this pod
|
300
|
-
if (recipientType._tag === "EntityType") {
|
301
|
-
return Effect.asVoid(Effect.unlessEffect(
|
302
|
-
Effect.fail(
|
303
|
-
new ShardingException.EntityNotManagedByThisPodException({
|
304
|
-
recipientAddress: envelope.recipientAddress
|
305
|
-
})
|
306
|
-
),
|
307
|
-
sharding.isEntityOnLocalShards(envelope.recipientAddress)
|
308
|
-
))
|
309
|
-
} else if (recipientType._tag === "TopicType") {
|
310
|
-
return Effect.void
|
311
|
-
}
|
312
|
-
return Effect.die("Unhandled recipientType")
|
313
|
-
}),
|
314
|
-
Effect.bind("maybeEntityState", () => getOrCreateEntityState(envelope.recipientAddress)),
|
315
|
-
Effect.flatMap((_) =>
|
316
|
-
pipe(
|
317
|
-
_.maybeEntityState,
|
318
|
-
Option.match({
|
319
|
-
onNone: () =>
|
320
|
-
pipe(
|
321
|
-
Effect.sleep(Duration.millis(100)),
|
322
|
-
Effect.flatMap(() => sendAndGetState(envelope))
|
323
|
-
),
|
324
|
-
onSome: (entityState) => {
|
325
|
-
return entityState.sendAndGetState(envelope)
|
326
|
-
}
|
327
|
-
})
|
328
|
-
)
|
265
|
+
// During shutdown, signal that no more messages will be processed
|
266
|
+
// and wait for the fiber to complete.
|
267
|
+
//
|
268
|
+
// If the termination timeout is reached, let the server clean itself up
|
269
|
+
yield* Scope.addFinalizer(
|
270
|
+
scope,
|
271
|
+
Effect.withFiberRuntime((fiber) => {
|
272
|
+
activeServers.delete(address.entityId)
|
273
|
+
internalInterruptors.add(fiber.id())
|
274
|
+
return state.write(0, { _tag: "Eof" }).pipe(
|
275
|
+
Effect.andThen(Effect.interruptible(endLatch.await)),
|
276
|
+
Effect.timeoutOption(config.entityTerminationTimeout)
|
329
277
|
)
|
330
|
-
)
|
278
|
+
})
|
279
|
+
)
|
280
|
+
activeServers.set(address.entityId, state)
|
281
|
+
|
282
|
+
return state
|
283
|
+
}, Effect.locally(FiberRef.currentLogAnnotations, HashMap.empty())))
|
284
|
+
|
285
|
+
const reaper = yield* EntityReaper
|
286
|
+
const maxIdleTime = Duration.toMillis(options.maxIdleTime ?? config.entityMaxIdleTime)
|
287
|
+
if (Number.isFinite(maxIdleTime)) {
|
288
|
+
yield* reaper.register({
|
289
|
+
maxIdleTime,
|
290
|
+
servers: activeServers,
|
291
|
+
entities
|
292
|
+
})
|
293
|
+
}
|
294
|
+
|
295
|
+
// update metrics for active servers
|
296
|
+
const gauge = ClusterMetrics.entities.pipe(Metric.tagged("type", entity.type))
|
297
|
+
yield* Effect.sync(() => {
|
298
|
+
gauge.unsafeUpdate(BigInt(activeServers.size), [])
|
299
|
+
for (const state of activeServers.values()) {
|
300
|
+
state.mailboxGauge.unsafeUpdate(BigInt(state.activeRequests.size), [])
|
331
301
|
}
|
302
|
+
}).pipe(
|
303
|
+
Effect.andThen(Effect.sleep(1000)),
|
304
|
+
Effect.forever,
|
305
|
+
Effect.forkIn(managerScope)
|
306
|
+
)
|
332
307
|
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
308
|
+
function sendLocal<R extends Rpc.Any>(
|
309
|
+
message: Message.IncomingLocal<R>
|
310
|
+
): Effect.Effect<void, EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage> {
|
311
|
+
return Effect.locally(
|
312
|
+
Effect.flatMap(
|
313
|
+
entities.get(message.envelope.address),
|
314
|
+
(server): Effect.Effect<void, EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage> => {
|
315
|
+
switch (message._tag) {
|
316
|
+
case "IncomingRequestLocal": {
|
317
|
+
// If the request is already running, then we might have more than
|
318
|
+
// one sender for the same request. In this case, the other senders
|
319
|
+
// should resume from storage only.
|
320
|
+
let entry = server.activeRequests.get(message.envelope.requestId)
|
321
|
+
if (entry) {
|
322
|
+
return Effect.fail(
|
323
|
+
new AlreadyProcessingMessage({
|
324
|
+
envelopeId: message.envelope.requestId,
|
325
|
+
address: message.envelope.address
|
326
|
+
})
|
327
|
+
)
|
328
|
+
}
|
338
329
|
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
Effect.logError(
|
360
|
-
`Entity ${recipientAddress} termination is taking more than expected entityTerminationTimeout (${
|
361
|
-
Duration.toMillis(config.entityTerminationTimeout)
|
362
|
-
}ms).`
|
363
|
-
),
|
364
|
-
onSuccess: () =>
|
365
|
-
Effect.logDebug(
|
366
|
-
`Entity ${recipientAddress} cleaned up.`
|
367
|
-
)
|
368
|
-
}),
|
369
|
-
Effect.asVoid
|
370
|
-
)
|
330
|
+
if (mailboxCapacity !== "unbounded" && server.activeRequests.size >= mailboxCapacity) {
|
331
|
+
return Effect.fail(new MailboxFull({ address: message.envelope.address }))
|
332
|
+
}
|
333
|
+
|
334
|
+
entry = {
|
335
|
+
rpc: entity.protocol.requests.get(message.envelope.tag)! as any as Rpc.AnyWithProps,
|
336
|
+
message,
|
337
|
+
lastSentChunk: message.lastSentReply as any,
|
338
|
+
sequence: Option.match(message.lastSentReply, {
|
339
|
+
onNone: () => 0,
|
340
|
+
onSome: (reply) => reply._tag === "Chunk" ? reply.sequence + 1 : 0
|
341
|
+
})
|
342
|
+
}
|
343
|
+
server.activeRequests.set(message.envelope.requestId, entry)
|
344
|
+
return server.write(0, {
|
345
|
+
...message.envelope,
|
346
|
+
id: RequestId(message.envelope.requestId),
|
347
|
+
payload: new Request({
|
348
|
+
...message.envelope,
|
349
|
+
lastSentChunk: message.lastSentReply as any
|
371
350
|
})
|
351
|
+
})
|
352
|
+
}
|
353
|
+
case "IncomingEnvelope": {
|
354
|
+
const entry = server.activeRequests.get(message.envelope.requestId)
|
355
|
+
if (!entry) {
|
356
|
+
return Effect.fail(new EntityNotManagedByRunner({ address: message.envelope.address }))
|
357
|
+
} else if (
|
358
|
+
message.envelope._tag === "AckChunk" &&
|
359
|
+
Option.isSome(entry.lastSentChunk) &&
|
360
|
+
message.envelope.replyId !== entry.lastSentChunk.value.id
|
361
|
+
) {
|
362
|
+
return Effect.void
|
363
|
+
}
|
364
|
+
return server.write(
|
365
|
+
0,
|
366
|
+
message.envelope._tag === "AckChunk"
|
367
|
+
? { _tag: "Ack", requestId: RequestId(message.envelope.requestId) }
|
368
|
+
: { _tag: "Interrupt", requestId: RequestId(message.envelope.requestId), interruptors: [] }
|
372
369
|
)
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
370
|
+
}
|
371
|
+
}
|
372
|
+
}
|
373
|
+
),
|
374
|
+
FiberRef.currentLogAnnotations,
|
375
|
+
HashMap.empty()
|
376
|
+
)
|
377
|
+
}
|
379
378
|
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
Effect.
|
390
|
-
|
379
|
+
const interruptShard = (shardId: ShardId) =>
|
380
|
+
Effect.suspend(function loop(): Effect.Effect<void> {
|
381
|
+
const toInterrupt = new Set<EntityState>()
|
382
|
+
for (const state of activeServers.values()) {
|
383
|
+
if (shardId === state.address.shardId) {
|
384
|
+
toInterrupt.add(state)
|
385
|
+
}
|
386
|
+
}
|
387
|
+
if (toInterrupt.size === 0) {
|
388
|
+
return Effect.void
|
389
|
+
}
|
390
|
+
return Effect.flatMap(
|
391
|
+
Effect.forEach(toInterrupt, (state) => entities.removeIgnore(state.address), {
|
392
|
+
concurrency: "unbounded",
|
393
|
+
discard: true
|
394
|
+
}),
|
395
|
+
loop
|
391
396
|
)
|
392
|
-
}
|
397
|
+
})
|
393
398
|
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
399
|
+
const decodeMessage = Schema.decode(makeMessageSchema(entity))
|
400
|
+
|
401
|
+
return identity<EntityManager>({
|
402
|
+
interruptShard,
|
403
|
+
isProcessingFor(message) {
|
404
|
+
const state = activeServers.get(message.envelope.address.entityId)
|
405
|
+
if (!state) return false
|
406
|
+
return state.activeRequests.has(message.envelope.requestId)
|
407
|
+
},
|
408
|
+
sendLocal,
|
409
|
+
send: (message) =>
|
410
|
+
decodeMessage(message).pipe(
|
411
|
+
Effect.matchEffect({
|
412
|
+
onFailure: (cause) => {
|
413
|
+
if (message._tag === "IncomingEnvelope") {
|
414
|
+
return Effect.die(new MalformedMessage({ cause }))
|
415
|
+
}
|
416
|
+
return Effect.orDie(message.respond(
|
417
|
+
new Reply.ReplyWithContext({
|
418
|
+
reply: new Reply.WithExit({
|
419
|
+
id: snowflakeGen.unsafeNext(),
|
420
|
+
requestId: message.envelope.requestId,
|
421
|
+
exit: Exit.die(new MalformedMessage({ cause }))
|
422
|
+
}),
|
423
|
+
rpc: entity.protocol.requests.get(message.envelope.tag)!,
|
424
|
+
context
|
425
|
+
})
|
426
|
+
))
|
427
|
+
},
|
428
|
+
onSuccess: (decoded) => {
|
429
|
+
if (decoded._tag === "IncomingEnvelope") {
|
430
|
+
return sendLocal(
|
431
|
+
new Message.IncomingEnvelope(decoded)
|
432
|
+
)
|
433
|
+
}
|
434
|
+
const request = message as Message.IncomingRequest<any>
|
435
|
+
const rpc = entity.protocol.requests.get(decoded.envelope.tag)!
|
436
|
+
return sendLocal(
|
437
|
+
new Message.IncomingRequestLocal({
|
438
|
+
envelope: decoded.envelope,
|
439
|
+
lastSentReply: decoded.lastSentReply,
|
440
|
+
respond: (reply) =>
|
441
|
+
request.respond(
|
442
|
+
new Reply.ReplyWithContext({
|
443
|
+
reply,
|
444
|
+
rpc,
|
445
|
+
context
|
446
|
+
})
|
447
|
+
)
|
448
|
+
})
|
449
|
+
)
|
450
|
+
}
|
451
|
+
}),
|
452
|
+
Effect.provide(context as Context.Context<unknown>)
|
453
|
+
)
|
401
454
|
})
|
455
|
+
})
|
456
|
+
|
457
|
+
const makeMessageSchema = <Rpcs extends Rpc.Any>(entity: Entity<Rpcs>): Schema.Schema<
|
458
|
+
{
|
459
|
+
readonly _tag: "IncomingRequest"
|
460
|
+
readonly envelope: Envelope.Request.Any
|
461
|
+
readonly lastSentReply: Option.Option<Reply.Reply<Rpcs>>
|
462
|
+
} | {
|
463
|
+
readonly _tag: "IncomingEnvelope"
|
464
|
+
readonly envelope: Envelope.AckChunk | Envelope.Interrupt
|
465
|
+
},
|
466
|
+
Message.Incoming<Rpcs>,
|
467
|
+
Rpc.Context<Rpcs>
|
468
|
+
> => {
|
469
|
+
const requests = Arr.empty<Schema.Schema.Any>()
|
470
|
+
|
471
|
+
for (const rpc of entity.protocol.requests.values()) {
|
472
|
+
requests.push(
|
473
|
+
Schema.TaggedStruct("IncomingRequest", {
|
474
|
+
envelope: Schema.transform(
|
475
|
+
Schema.Struct({
|
476
|
+
...Envelope.PartialEncodedRequestFromSelf.fields,
|
477
|
+
tag: Schema.Literal(rpc._tag),
|
478
|
+
payload: (rpc as any as Rpc.AnyWithProps).payloadSchema
|
479
|
+
}),
|
480
|
+
Envelope.RequestFromSelf,
|
481
|
+
{
|
482
|
+
decode: (encoded) => Envelope.makeRequest(encoded),
|
483
|
+
encode: identity
|
484
|
+
}
|
485
|
+
),
|
486
|
+
lastSentReply: Schema.OptionFromSelf(Reply.Reply(rpc))
|
487
|
+
})
|
488
|
+
)
|
489
|
+
}
|
490
|
+
|
491
|
+
return Schema.Union(
|
492
|
+
...requests,
|
493
|
+
Schema.TaggedStruct("IncomingEnvelope", {
|
494
|
+
envelope: Schema.Union(
|
495
|
+
Schema.typeSchema(Envelope.AckChunk),
|
496
|
+
Schema.typeSchema(Envelope.Interrupt)
|
497
|
+
)
|
498
|
+
})
|
499
|
+
) as any
|
402
500
|
}
|
501
|
+
|
502
|
+
const retryRespond = <A, E, R>(times: number, effect: Effect.Effect<A, E, R>): Effect.Effect<A, E, R> =>
|
503
|
+
times === 0 ?
|
504
|
+
effect :
|
505
|
+
Effect.catchAll(effect, () => Effect.delay(retryRespond(times - 1, effect), 200))
|