@effect/cluster 0.28.3 → 0.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (626) hide show
  1. package/ClusterError/package.json +6 -0
  2. package/ClusterMetrics/package.json +6 -0
  3. package/ClusterSchema/package.json +6 -0
  4. package/DeliverAt/package.json +6 -0
  5. package/Entity/package.json +6 -0
  6. package/EntityAddress/package.json +6 -0
  7. package/EntityId/package.json +6 -0
  8. package/EntityType/package.json +6 -0
  9. package/Envelope/package.json +6 -0
  10. package/HttpCommon/package.json +6 -0
  11. package/HttpRunner/package.json +6 -0
  12. package/HttpShardManager/package.json +6 -0
  13. package/MachineId/package.json +6 -0
  14. package/MessageStorage/package.json +6 -0
  15. package/README.md +2 -2
  16. package/Reply/package.json +6 -0
  17. package/Runner/package.json +6 -0
  18. package/RunnerAddress/package.json +6 -0
  19. package/RunnerHealth/package.json +6 -0
  20. package/RunnerServer/package.json +6 -0
  21. package/Runners/package.json +6 -0
  22. package/ShardStorage/package.json +6 -0
  23. package/Singleton/package.json +6 -0
  24. package/SingletonAddress/package.json +6 -0
  25. package/Snowflake/package.json +6 -0
  26. package/SocketRunner/package.json +6 -0
  27. package/SocketShardManager/package.json +6 -0
  28. package/SqlMessageStorage/package.json +6 -0
  29. package/SqlShardStorage/package.json +6 -0
  30. package/SynchronizedClock/package.json +6 -0
  31. package/dist/cjs/ClusterError.js +180 -0
  32. package/dist/cjs/ClusterError.js.map +1 -0
  33. package/dist/cjs/ClusterMetrics.js +63 -0
  34. package/dist/cjs/ClusterMetrics.js.map +1 -0
  35. package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
  36. package/dist/cjs/ClusterSchema.js.map +1 -0
  37. package/dist/cjs/DeliverAt.js +30 -0
  38. package/dist/cjs/DeliverAt.js.map +1 -0
  39. package/dist/cjs/Entity.js +187 -0
  40. package/dist/cjs/Entity.js.map +1 -0
  41. package/dist/cjs/EntityAddress.js +54 -0
  42. package/dist/cjs/EntityAddress.js.map +1 -0
  43. package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
  44. package/dist/cjs/EntityId.js.map +1 -0
  45. package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
  46. package/dist/cjs/EntityType.js.map +1 -0
  47. package/dist/cjs/Envelope.js +168 -0
  48. package/dist/cjs/Envelope.js.map +1 -0
  49. package/dist/cjs/HttpCommon.js +49 -0
  50. package/dist/cjs/HttpCommon.js.map +1 -0
  51. package/dist/cjs/HttpRunner.js +108 -0
  52. package/dist/cjs/HttpRunner.js.map +1 -0
  53. package/dist/cjs/HttpShardManager.js +140 -0
  54. package/dist/cjs/HttpShardManager.js.map +1 -0
  55. package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
  56. package/dist/cjs/MachineId.js.map +1 -0
  57. package/dist/cjs/Message.js +99 -18
  58. package/dist/cjs/Message.js.map +1 -1
  59. package/dist/cjs/MessageStorage.js +356 -0
  60. package/dist/cjs/MessageStorage.js.map +1 -0
  61. package/dist/cjs/Reply.js +200 -0
  62. package/dist/cjs/Reply.js.map +1 -0
  63. package/dist/cjs/Runner.js +79 -0
  64. package/dist/cjs/Runner.js.map +1 -0
  65. package/dist/cjs/RunnerAddress.js +63 -0
  66. package/dist/cjs/RunnerAddress.js.map +1 -0
  67. package/dist/cjs/RunnerHealth.js +68 -0
  68. package/dist/cjs/RunnerHealth.js.map +1 -0
  69. package/dist/cjs/RunnerServer.js +125 -0
  70. package/dist/cjs/RunnerServer.js.map +1 -0
  71. package/dist/cjs/Runners.js +344 -0
  72. package/dist/cjs/Runners.js.map +1 -0
  73. package/dist/cjs/ShardId.js +7 -46
  74. package/dist/cjs/ShardId.js.map +1 -1
  75. package/dist/cjs/ShardManager.js +493 -8
  76. package/dist/cjs/ShardManager.js.map +1 -1
  77. package/dist/cjs/ShardStorage.js +139 -0
  78. package/dist/cjs/ShardStorage.js.map +1 -0
  79. package/dist/cjs/Sharding.js +732 -88
  80. package/dist/cjs/Sharding.js.map +1 -1
  81. package/dist/cjs/ShardingConfig.js +85 -18
  82. package/dist/cjs/ShardingConfig.js.map +1 -1
  83. package/dist/cjs/ShardingRegistrationEvent.js +26 -32
  84. package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
  85. package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
  86. package/dist/cjs/Singleton.js.map +1 -0
  87. package/dist/cjs/SingletonAddress.js +50 -0
  88. package/dist/cjs/SingletonAddress.js.map +1 -0
  89. package/dist/cjs/Snowflake.js +133 -0
  90. package/dist/cjs/Snowflake.js.map +1 -0
  91. package/dist/cjs/SocketRunner.js +40 -0
  92. package/dist/cjs/SocketRunner.js.map +1 -0
  93. package/dist/cjs/SocketShardManager.js +33 -0
  94. package/dist/cjs/SocketShardManager.js.map +1 -0
  95. package/dist/cjs/SqlMessageStorage.js +668 -0
  96. package/dist/cjs/SqlMessageStorage.js.map +1 -0
  97. package/dist/cjs/SqlShardStorage.js +228 -0
  98. package/dist/cjs/SqlShardStorage.js.map +1 -0
  99. package/dist/cjs/SynchronizedClock.js +66 -0
  100. package/dist/cjs/SynchronizedClock.js.map +1 -0
  101. package/dist/cjs/index.js +57 -45
  102. package/dist/cjs/internal/entityManager.js +311 -143
  103. package/dist/cjs/internal/entityManager.js.map +1 -1
  104. package/dist/cjs/internal/entityReaper.js +47 -0
  105. package/dist/cjs/internal/entityReaper.js.map +1 -0
  106. package/dist/cjs/internal/hash.js +20 -0
  107. package/dist/cjs/internal/hash.js.map +1 -0
  108. package/dist/cjs/internal/interruptors.js +9 -0
  109. package/dist/cjs/internal/interruptors.js.map +1 -0
  110. package/dist/cjs/internal/resourceMap.js +88 -0
  111. package/dist/cjs/internal/resourceMap.js.map +1 -0
  112. package/dist/cjs/internal/resourceRef.js +92 -0
  113. package/dist/cjs/internal/resourceRef.js.map +1 -0
  114. package/dist/cjs/internal/shardManager.js +219 -235
  115. package/dist/cjs/internal/shardManager.js.map +1 -1
  116. package/dist/dts/ClusterError.d.ts +169 -0
  117. package/dist/dts/ClusterError.d.ts.map +1 -0
  118. package/dist/dts/ClusterMetrics.d.ts +50 -0
  119. package/dist/dts/ClusterMetrics.d.ts.map +1 -0
  120. package/dist/dts/ClusterSchema.d.ts +13 -0
  121. package/dist/dts/ClusterSchema.d.ts.map +1 -0
  122. package/dist/dts/DeliverAt.d.ts +27 -0
  123. package/dist/dts/DeliverAt.d.ts.map +1 -0
  124. package/dist/dts/Entity.d.ts +180 -0
  125. package/dist/dts/Entity.d.ts.map +1 -0
  126. package/dist/dts/EntityAddress.d.ts +55 -0
  127. package/dist/dts/EntityAddress.d.ts.map +1 -0
  128. package/dist/dts/EntityId.d.ts +15 -0
  129. package/dist/dts/EntityId.d.ts.map +1 -0
  130. package/dist/dts/EntityType.d.ts +15 -0
  131. package/dist/dts/EntityType.d.ts.map +1 -0
  132. package/dist/dts/Envelope.d.ts +252 -0
  133. package/dist/dts/Envelope.d.ts.map +1 -0
  134. package/dist/dts/HttpCommon.d.ts +25 -0
  135. package/dist/dts/HttpCommon.d.ts.map +1 -0
  136. package/dist/dts/HttpRunner.d.ts +76 -0
  137. package/dist/dts/HttpRunner.d.ts.map +1 -0
  138. package/dist/dts/HttpShardManager.d.ts +119 -0
  139. package/dist/dts/HttpShardManager.d.ts.map +1 -0
  140. package/dist/dts/MachineId.d.ts +20 -0
  141. package/dist/dts/MachineId.d.ts.map +1 -0
  142. package/dist/dts/Message.d.ts +91 -74
  143. package/dist/dts/Message.d.ts.map +1 -1
  144. package/dist/dts/MessageStorage.d.ts +336 -0
  145. package/dist/dts/MessageStorage.d.ts.map +1 -0
  146. package/dist/dts/Reply.d.ts +171 -0
  147. package/dist/dts/Reply.d.ts.map +1 -0
  148. package/dist/dts/Runner.d.ts +81 -0
  149. package/dist/dts/Runner.d.ts.map +1 -0
  150. package/dist/dts/RunnerAddress.d.ts +56 -0
  151. package/dist/dts/RunnerAddress.d.ts.map +1 -0
  152. package/dist/dts/RunnerHealth.d.ts +54 -0
  153. package/dist/dts/RunnerHealth.d.ts.map +1 -0
  154. package/dist/dts/RunnerServer.d.ts +44 -0
  155. package/dist/dts/RunnerServer.d.ts.map +1 -0
  156. package/dist/dts/Runners.d.ts +161 -0
  157. package/dist/dts/Runners.d.ts.map +1 -0
  158. package/dist/dts/ShardId.d.ts +5 -55
  159. package/dist/dts/ShardId.d.ts.map +1 -1
  160. package/dist/dts/ShardManager.d.ts +435 -23
  161. package/dist/dts/ShardManager.d.ts.map +1 -1
  162. package/dist/dts/ShardStorage.d.ts +200 -0
  163. package/dist/dts/ShardStorage.d.ts.map +1 -0
  164. package/dist/dts/Sharding.d.ts +109 -131
  165. package/dist/dts/Sharding.d.ts.map +1 -1
  166. package/dist/dts/ShardingConfig.d.ts +147 -44
  167. package/dist/dts/ShardingConfig.d.ts.map +1 -1
  168. package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
  169. package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
  170. package/dist/dts/Singleton.d.ts +13 -0
  171. package/dist/dts/Singleton.d.ts.map +1 -0
  172. package/dist/dts/SingletonAddress.d.ts +49 -0
  173. package/dist/dts/SingletonAddress.d.ts.map +1 -0
  174. package/dist/dts/Snowflake.d.ts +121 -0
  175. package/dist/dts/Snowflake.d.ts.map +1 -0
  176. package/dist/dts/SocketRunner.d.ts +22 -0
  177. package/dist/dts/SocketRunner.d.ts.map +1 -0
  178. package/dist/dts/SocketShardManager.d.ts +17 -0
  179. package/dist/dts/SocketShardManager.d.ts.map +1 -0
  180. package/dist/dts/SqlMessageStorage.d.ts +43 -0
  181. package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
  182. package/dist/dts/SqlShardStorage.d.ts +38 -0
  183. package/dist/dts/SqlShardStorage.d.ts.map +1 -0
  184. package/dist/dts/SynchronizedClock.d.ts +19 -0
  185. package/dist/dts/SynchronizedClock.d.ts.map +1 -0
  186. package/dist/dts/index.d.ts +48 -24
  187. package/dist/dts/index.d.ts.map +1 -1
  188. package/dist/dts/internal/entityReaper.d.ts +2 -0
  189. package/dist/dts/internal/entityReaper.d.ts.map +1 -0
  190. package/dist/dts/internal/hash.d.ts +2 -0
  191. package/dist/dts/internal/hash.d.ts.map +1 -0
  192. package/dist/dts/internal/interruptors.d.ts +2 -0
  193. package/dist/dts/internal/interruptors.d.ts.map +1 -0
  194. package/dist/dts/internal/resourceMap.d.ts +22 -0
  195. package/dist/dts/internal/resourceMap.d.ts.map +1 -0
  196. package/dist/dts/internal/resourceRef.d.ts +25 -0
  197. package/dist/dts/internal/resourceRef.d.ts.map +1 -0
  198. package/dist/dts/internal/shardManager.d.ts +1 -11
  199. package/dist/dts/internal/shardManager.d.ts.map +1 -1
  200. package/dist/esm/ClusterError.js +164 -0
  201. package/dist/esm/ClusterError.js.map +1 -0
  202. package/dist/esm/ClusterMetrics.js +54 -0
  203. package/dist/esm/ClusterMetrics.js.map +1 -0
  204. package/dist/esm/ClusterSchema.js +13 -0
  205. package/dist/esm/ClusterSchema.js.map +1 -0
  206. package/dist/esm/DeliverAt.js +22 -0
  207. package/dist/esm/DeliverAt.js.map +1 -0
  208. package/dist/esm/Entity.js +173 -0
  209. package/dist/esm/Entity.js.map +1 -0
  210. package/dist/esm/EntityAddress.js +44 -0
  211. package/dist/esm/EntityAddress.js.map +1 -0
  212. package/dist/esm/EntityId.js +10 -0
  213. package/dist/esm/EntityId.js.map +1 -0
  214. package/dist/esm/EntityType.js +10 -0
  215. package/dist/esm/EntityType.js.map +1 -0
  216. package/dist/esm/Envelope.js +154 -0
  217. package/dist/esm/Envelope.js.map +1 -0
  218. package/dist/esm/HttpCommon.js +38 -0
  219. package/dist/esm/HttpCommon.js.map +1 -0
  220. package/dist/esm/HttpRunner.js +98 -0
  221. package/dist/esm/HttpRunner.js.map +1 -0
  222. package/dist/esm/HttpShardManager.js +128 -0
  223. package/dist/esm/HttpShardManager.js.map +1 -0
  224. package/dist/esm/MachineId.js +17 -0
  225. package/dist/esm/MachineId.js.map +1 -0
  226. package/dist/esm/Message.js +88 -17
  227. package/dist/esm/Message.js.map +1 -1
  228. package/dist/esm/MessageStorage.js +345 -0
  229. package/dist/esm/MessageStorage.js.map +1 -0
  230. package/dist/esm/Reply.js +184 -0
  231. package/dist/esm/Reply.js.map +1 -0
  232. package/dist/esm/Runner.js +68 -0
  233. package/dist/esm/Runner.js.map +1 -0
  234. package/dist/esm/RunnerAddress.js +52 -0
  235. package/dist/esm/RunnerAddress.js.map +1 -0
  236. package/dist/esm/RunnerHealth.js +58 -0
  237. package/dist/esm/RunnerHealth.js.map +1 -0
  238. package/dist/esm/RunnerServer.js +116 -0
  239. package/dist/esm/RunnerServer.js.map +1 -0
  240. package/dist/esm/Runners.js +332 -0
  241. package/dist/esm/Runners.js.map +1 -0
  242. package/dist/esm/ShardId.js +5 -42
  243. package/dist/esm/ShardId.js.map +1 -1
  244. package/dist/esm/ShardManager.js +486 -7
  245. package/dist/esm/ShardManager.js.map +1 -1
  246. package/dist/esm/ShardStorage.js +129 -0
  247. package/dist/esm/ShardStorage.js.map +1 -0
  248. package/dist/esm/Sharding.js +730 -87
  249. package/dist/esm/Sharding.js.map +1 -1
  250. package/dist/esm/ShardingConfig.js +80 -17
  251. package/dist/esm/ShardingConfig.js.map +1 -1
  252. package/dist/esm/ShardingRegistrationEvent.js +19 -29
  253. package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
  254. package/dist/esm/Singleton.js +15 -0
  255. package/dist/esm/Singleton.js.map +1 -0
  256. package/dist/esm/SingletonAddress.js +40 -0
  257. package/dist/esm/SingletonAddress.js.map +1 -0
  258. package/dist/esm/Snowflake.js +117 -0
  259. package/dist/esm/Snowflake.js.map +1 -0
  260. package/dist/esm/SocketRunner.js +31 -0
  261. package/dist/esm/SocketRunner.js.map +1 -0
  262. package/dist/esm/SocketShardManager.js +24 -0
  263. package/dist/esm/SocketShardManager.js.map +1 -0
  264. package/dist/esm/SqlMessageStorage.js +658 -0
  265. package/dist/esm/SqlMessageStorage.js.map +1 -0
  266. package/dist/esm/SqlShardStorage.js +218 -0
  267. package/dist/esm/SqlShardStorage.js.map +1 -0
  268. package/dist/esm/SynchronizedClock.js +57 -0
  269. package/dist/esm/SynchronizedClock.js.map +1 -0
  270. package/dist/esm/index.js +48 -24
  271. package/dist/esm/index.js.map +1 -1
  272. package/dist/esm/internal/entityManager.js +311 -142
  273. package/dist/esm/internal/entityManager.js.map +1 -1
  274. package/dist/esm/internal/entityReaper.js +38 -0
  275. package/dist/esm/internal/entityReaper.js.map +1 -0
  276. package/dist/esm/internal/hash.js +12 -0
  277. package/dist/esm/internal/hash.js.map +1 -0
  278. package/dist/esm/internal/interruptors.js +3 -0
  279. package/dist/esm/internal/interruptors.js.map +1 -0
  280. package/dist/esm/internal/resourceMap.js +79 -0
  281. package/dist/esm/internal/resourceMap.js.map +1 -0
  282. package/dist/esm/internal/resourceRef.js +83 -0
  283. package/dist/esm/internal/resourceRef.js.map +1 -0
  284. package/dist/esm/internal/shardManager.js +217 -233
  285. package/dist/esm/internal/shardManager.js.map +1 -1
  286. package/package.json +212 -154
  287. package/src/ClusterError.ts +193 -0
  288. package/src/ClusterMetrics.ts +62 -0
  289. package/src/ClusterSchema.ts +13 -0
  290. package/src/DeliverAt.ts +36 -0
  291. package/src/Entity.ts +438 -0
  292. package/src/EntityAddress.ts +55 -0
  293. package/src/EntityId.ts +16 -0
  294. package/src/EntityType.ts +16 -0
  295. package/src/Envelope.ts +352 -0
  296. package/src/HttpCommon.ts +73 -0
  297. package/src/HttpRunner.ts +196 -0
  298. package/src/HttpShardManager.ts +273 -0
  299. package/src/MachineId.ts +27 -0
  300. package/src/Message.ts +143 -92
  301. package/src/MessageStorage.ts +697 -0
  302. package/src/Reply.ts +295 -0
  303. package/src/Runner.ts +84 -0
  304. package/src/RunnerAddress.ts +61 -0
  305. package/src/RunnerHealth.ts +87 -0
  306. package/src/RunnerServer.ts +156 -0
  307. package/src/Runners.ts +533 -0
  308. package/src/ShardId.ts +10 -62
  309. package/src/ShardManager.ts +780 -29
  310. package/src/ShardStorage.ts +289 -0
  311. package/src/Sharding.ts +1060 -183
  312. package/src/ShardingConfig.ts +186 -45
  313. package/src/ShardingRegistrationEvent.ts +38 -39
  314. package/src/Singleton.ts +20 -0
  315. package/src/SingletonAddress.ts +47 -0
  316. package/src/Snowflake.ts +194 -0
  317. package/src/SocketRunner.ts +59 -0
  318. package/src/SocketShardManager.ts +48 -0
  319. package/src/SqlMessageStorage.ts +833 -0
  320. package/src/SqlShardStorage.ts +292 -0
  321. package/src/SynchronizedClock.ts +82 -0
  322. package/src/index.ts +54 -24
  323. package/src/internal/entityManager.ts +464 -361
  324. package/src/internal/entityReaper.ts +53 -0
  325. package/src/internal/hash.ts +11 -0
  326. package/src/internal/interruptors.ts +4 -0
  327. package/src/internal/resourceMap.ts +89 -0
  328. package/src/internal/resourceRef.ts +88 -0
  329. package/src/internal/shardManager.ts +273 -546
  330. package/AtLeastOnce/package.json +0 -6
  331. package/AtLeastOnceStorage/package.json +0 -6
  332. package/Broadcaster/package.json +0 -6
  333. package/ManagerConfig/package.json +0 -6
  334. package/MessageState/package.json +0 -6
  335. package/Messenger/package.json +0 -6
  336. package/Pod/package.json +0 -6
  337. package/PodAddress/package.json +0 -6
  338. package/Pods/package.json +0 -6
  339. package/PodsHealth/package.json +0 -6
  340. package/PoisonPill/package.json +0 -6
  341. package/RecipientAddress/package.json +0 -6
  342. package/RecipientBehaviour/package.json +0 -6
  343. package/RecipientBehaviourContext/package.json +0 -6
  344. package/RecipientType/package.json +0 -6
  345. package/Serialization/package.json +0 -6
  346. package/SerializedEnvelope/package.json +0 -6
  347. package/SerializedMessage/package.json +0 -6
  348. package/ShardManagerClient/package.json +0 -6
  349. package/ShardingEvent/package.json +0 -6
  350. package/ShardingException/package.json +0 -6
  351. package/Storage/package.json +0 -6
  352. package/dist/cjs/AtLeastOnce.js.map +0 -1
  353. package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
  354. package/dist/cjs/Broadcaster.js +0 -6
  355. package/dist/cjs/Broadcaster.js.map +0 -1
  356. package/dist/cjs/ManagerConfig.js.map +0 -1
  357. package/dist/cjs/MessageState.js +0 -55
  358. package/dist/cjs/MessageState.js.map +0 -1
  359. package/dist/cjs/Messenger.js +0 -6
  360. package/dist/cjs/Messenger.js.map +0 -1
  361. package/dist/cjs/Pod.js +0 -78
  362. package/dist/cjs/Pod.js.map +0 -1
  363. package/dist/cjs/PodAddress.js +0 -77
  364. package/dist/cjs/PodAddress.js.map +0 -1
  365. package/dist/cjs/Pods.js.map +0 -1
  366. package/dist/cjs/PodsHealth.js +0 -41
  367. package/dist/cjs/PodsHealth.js.map +0 -1
  368. package/dist/cjs/PoisonPill.js +0 -78
  369. package/dist/cjs/PoisonPill.js.map +0 -1
  370. package/dist/cjs/RecipientAddress.js +0 -79
  371. package/dist/cjs/RecipientAddress.js.map +0 -1
  372. package/dist/cjs/RecipientBehaviour.js +0 -38
  373. package/dist/cjs/RecipientBehaviour.js.map +0 -1
  374. package/dist/cjs/RecipientBehaviourContext.js +0 -64
  375. package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
  376. package/dist/cjs/RecipientType.js +0 -123
  377. package/dist/cjs/RecipientType.js.map +0 -1
  378. package/dist/cjs/Serialization.js +0 -32
  379. package/dist/cjs/Serialization.js.map +0 -1
  380. package/dist/cjs/SerializedEnvelope.js +0 -87
  381. package/dist/cjs/SerializedEnvelope.js.map +0 -1
  382. package/dist/cjs/SerializedMessage.js +0 -64
  383. package/dist/cjs/SerializedMessage.js.map +0 -1
  384. package/dist/cjs/ShardManagerClient.js.map +0 -1
  385. package/dist/cjs/ShardingEvent.js +0 -72
  386. package/dist/cjs/ShardingEvent.js.map +0 -1
  387. package/dist/cjs/ShardingException.js +0 -107
  388. package/dist/cjs/ShardingException.js.map +0 -1
  389. package/dist/cjs/Storage.js +0 -40
  390. package/dist/cjs/Storage.js.map +0 -1
  391. package/dist/cjs/internal/atLeastOnce.js +0 -35
  392. package/dist/cjs/internal/atLeastOnce.js.map +0 -1
  393. package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
  394. package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
  395. package/dist/cjs/internal/entityState.js +0 -47
  396. package/dist/cjs/internal/entityState.js.map +0 -1
  397. package/dist/cjs/internal/managerConfig.js +0 -46
  398. package/dist/cjs/internal/managerConfig.js.map +0 -1
  399. package/dist/cjs/internal/message.js +0 -48
  400. package/dist/cjs/internal/message.js.map +0 -1
  401. package/dist/cjs/internal/messageState.js +0 -79
  402. package/dist/cjs/internal/messageState.js.map +0 -1
  403. package/dist/cjs/internal/podWithMetadata.js +0 -54
  404. package/dist/cjs/internal/podWithMetadata.js.map +0 -1
  405. package/dist/cjs/internal/pods.js +0 -35
  406. package/dist/cjs/internal/pods.js.map +0 -1
  407. package/dist/cjs/internal/podsHealth.js +0 -40
  408. package/dist/cjs/internal/podsHealth.js.map +0 -1
  409. package/dist/cjs/internal/recipientBehaviour.js +0 -52
  410. package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
  411. package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
  412. package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
  413. package/dist/cjs/internal/serialization.js +0 -48
  414. package/dist/cjs/internal/serialization.js.map +0 -1
  415. package/dist/cjs/internal/shardManagerClient.js +0 -48
  416. package/dist/cjs/internal/shardManagerClient.js.map +0 -1
  417. package/dist/cjs/internal/shardManagerState.js +0 -44
  418. package/dist/cjs/internal/shardManagerState.js.map +0 -1
  419. package/dist/cjs/internal/sharding.js +0 -306
  420. package/dist/cjs/internal/sharding.js.map +0 -1
  421. package/dist/cjs/internal/shardingConfig.js +0 -56
  422. package/dist/cjs/internal/shardingConfig.js.map +0 -1
  423. package/dist/cjs/internal/storage.js +0 -52
  424. package/dist/cjs/internal/storage.js.map +0 -1
  425. package/dist/cjs/internal/utils.js +0 -69
  426. package/dist/cjs/internal/utils.js.map +0 -1
  427. package/dist/dts/AtLeastOnce.d.ts +0 -20
  428. package/dist/dts/AtLeastOnce.d.ts.map +0 -1
  429. package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
  430. package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
  431. package/dist/dts/Broadcaster.d.ts +0 -32
  432. package/dist/dts/Broadcaster.d.ts.map +0 -1
  433. package/dist/dts/ManagerConfig.d.ts +0 -61
  434. package/dist/dts/ManagerConfig.d.ts.map +0 -1
  435. package/dist/dts/MessageState.d.ts +0 -107
  436. package/dist/dts/MessageState.d.ts.map +0 -1
  437. package/dist/dts/Messenger.d.ts +0 -32
  438. package/dist/dts/Messenger.d.ts.map +0 -1
  439. package/dist/dts/Pod.d.ts +0 -81
  440. package/dist/dts/Pod.d.ts.map +0 -1
  441. package/dist/dts/PodAddress.d.ts +0 -80
  442. package/dist/dts/PodAddress.d.ts.map +0 -1
  443. package/dist/dts/Pods.d.ts +0 -78
  444. package/dist/dts/Pods.d.ts.map +0 -1
  445. package/dist/dts/PodsHealth.d.ts +0 -66
  446. package/dist/dts/PodsHealth.d.ts.map +0 -1
  447. package/dist/dts/PoisonPill.d.ts +0 -78
  448. package/dist/dts/PoisonPill.d.ts.map +0 -1
  449. package/dist/dts/RecipientAddress.d.ts +0 -57
  450. package/dist/dts/RecipientAddress.d.ts.map +0 -1
  451. package/dist/dts/RecipientBehaviour.d.ts +0 -72
  452. package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
  453. package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
  454. package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
  455. package/dist/dts/RecipientType.d.ts +0 -93
  456. package/dist/dts/RecipientType.d.ts.map +0 -1
  457. package/dist/dts/Serialization.d.ts +0 -58
  458. package/dist/dts/Serialization.d.ts.map +0 -1
  459. package/dist/dts/SerializedEnvelope.d.ts +0 -86
  460. package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
  461. package/dist/dts/SerializedMessage.d.ts +0 -66
  462. package/dist/dts/SerializedMessage.d.ts.map +0 -1
  463. package/dist/dts/ShardManagerClient.d.ts +0 -50
  464. package/dist/dts/ShardManagerClient.d.ts.map +0 -1
  465. package/dist/dts/ShardingEvent.d.ts +0 -90
  466. package/dist/dts/ShardingEvent.d.ts.map +0 -1
  467. package/dist/dts/ShardingException.d.ts +0 -125
  468. package/dist/dts/ShardingException.d.ts.map +0 -1
  469. package/dist/dts/Storage.d.ts +0 -78
  470. package/dist/dts/Storage.d.ts.map +0 -1
  471. package/dist/dts/internal/atLeastOnce.d.ts +0 -2
  472. package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
  473. package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
  474. package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
  475. package/dist/dts/internal/entityState.d.ts +0 -21
  476. package/dist/dts/internal/entityState.d.ts.map +0 -1
  477. package/dist/dts/internal/managerConfig.d.ts +0 -2
  478. package/dist/dts/internal/managerConfig.d.ts.map +0 -1
  479. package/dist/dts/internal/message.d.ts +0 -9
  480. package/dist/dts/internal/message.d.ts.map +0 -1
  481. package/dist/dts/internal/messageState.d.ts +0 -2
  482. package/dist/dts/internal/messageState.d.ts.map +0 -1
  483. package/dist/dts/internal/podWithMetadata.d.ts +0 -2
  484. package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
  485. package/dist/dts/internal/pods.d.ts +0 -2
  486. package/dist/dts/internal/pods.d.ts.map +0 -1
  487. package/dist/dts/internal/podsHealth.d.ts +0 -2
  488. package/dist/dts/internal/podsHealth.d.ts.map +0 -1
  489. package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
  490. package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
  491. package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
  492. package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
  493. package/dist/dts/internal/serialization.d.ts +0 -2
  494. package/dist/dts/internal/serialization.d.ts.map +0 -1
  495. package/dist/dts/internal/shardManagerClient.d.ts +0 -2
  496. package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
  497. package/dist/dts/internal/shardManagerState.d.ts +0 -26
  498. package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
  499. package/dist/dts/internal/sharding.d.ts +0 -2
  500. package/dist/dts/internal/sharding.d.ts.map +0 -1
  501. package/dist/dts/internal/shardingConfig.d.ts +0 -2
  502. package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
  503. package/dist/dts/internal/storage.d.ts +0 -2
  504. package/dist/dts/internal/storage.d.ts.map +0 -1
  505. package/dist/dts/internal/utils.d.ts +0 -2
  506. package/dist/dts/internal/utils.d.ts.map +0 -1
  507. package/dist/esm/AtLeastOnce.js +0 -12
  508. package/dist/esm/AtLeastOnce.js.map +0 -1
  509. package/dist/esm/AtLeastOnceStorage.js +0 -17
  510. package/dist/esm/AtLeastOnceStorage.js.map +0 -1
  511. package/dist/esm/Broadcaster.js +0 -2
  512. package/dist/esm/Broadcaster.js.map +0 -1
  513. package/dist/esm/ManagerConfig.js +0 -26
  514. package/dist/esm/ManagerConfig.js.map +0 -1
  515. package/dist/esm/MessageState.js +0 -47
  516. package/dist/esm/MessageState.js.map +0 -1
  517. package/dist/esm/Messenger.js +0 -2
  518. package/dist/esm/Messenger.js.map +0 -1
  519. package/dist/esm/Pod.js +0 -65
  520. package/dist/esm/Pod.js.map +0 -1
  521. package/dist/esm/PodAddress.js +0 -64
  522. package/dist/esm/PodAddress.js.map +0 -1
  523. package/dist/esm/Pods.js +0 -27
  524. package/dist/esm/Pods.js.map +0 -1
  525. package/dist/esm/PodsHealth.js +0 -33
  526. package/dist/esm/PodsHealth.js.map +0 -1
  527. package/dist/esm/PoisonPill.js +0 -65
  528. package/dist/esm/PoisonPill.js.map +0 -1
  529. package/dist/esm/RecipientAddress.js +0 -67
  530. package/dist/esm/RecipientAddress.js.map +0 -1
  531. package/dist/esm/RecipientBehaviour.js +0 -30
  532. package/dist/esm/RecipientBehaviour.js.map +0 -1
  533. package/dist/esm/RecipientBehaviourContext.js +0 -56
  534. package/dist/esm/RecipientBehaviourContext.js.map +0 -1
  535. package/dist/esm/RecipientType.js +0 -108
  536. package/dist/esm/RecipientType.js.map +0 -1
  537. package/dist/esm/Serialization.js +0 -24
  538. package/dist/esm/Serialization.js.map +0 -1
  539. package/dist/esm/SerializedEnvelope.js +0 -74
  540. package/dist/esm/SerializedEnvelope.js.map +0 -1
  541. package/dist/esm/SerializedMessage.js +0 -51
  542. package/dist/esm/SerializedMessage.js.map +0 -1
  543. package/dist/esm/ShardManagerClient.js +0 -22
  544. package/dist/esm/ShardManagerClient.js.map +0 -1
  545. package/dist/esm/ShardingEvent.js +0 -62
  546. package/dist/esm/ShardingEvent.js.map +0 -1
  547. package/dist/esm/ShardingException.js +0 -91
  548. package/dist/esm/ShardingException.js.map +0 -1
  549. package/dist/esm/Storage.js +0 -32
  550. package/dist/esm/Storage.js.map +0 -1
  551. package/dist/esm/internal/atLeastOnce.js +0 -26
  552. package/dist/esm/internal/atLeastOnce.js.map +0 -1
  553. package/dist/esm/internal/atLeastOnceStorage.js +0 -154
  554. package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
  555. package/dist/esm/internal/entityState.js +0 -35
  556. package/dist/esm/internal/entityState.js.map +0 -1
  557. package/dist/esm/internal/managerConfig.js +0 -38
  558. package/dist/esm/internal/managerConfig.js.map +0 -1
  559. package/dist/esm/internal/message.js +0 -35
  560. package/dist/esm/internal/message.js.map +0 -1
  561. package/dist/esm/internal/messageState.js +0 -66
  562. package/dist/esm/internal/messageState.js.map +0 -1
  563. package/dist/esm/internal/podWithMetadata.js +0 -41
  564. package/dist/esm/internal/podWithMetadata.js.map +0 -1
  565. package/dist/esm/internal/pods.js +0 -25
  566. package/dist/esm/internal/pods.js.map +0 -1
  567. package/dist/esm/internal/podsHealth.js +0 -30
  568. package/dist/esm/internal/podsHealth.js.map +0 -1
  569. package/dist/esm/internal/recipientBehaviour.js +0 -42
  570. package/dist/esm/internal/recipientBehaviour.js.map +0 -1
  571. package/dist/esm/internal/recipientBehaviourContext.js +0 -26
  572. package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
  573. package/dist/esm/internal/serialization.js +0 -38
  574. package/dist/esm/internal/serialization.js.map +0 -1
  575. package/dist/esm/internal/shardManagerClient.js +0 -38
  576. package/dist/esm/internal/shardManagerClient.js.map +0 -1
  577. package/dist/esm/internal/shardManagerState.js +0 -36
  578. package/dist/esm/internal/shardManagerState.js.map +0 -1
  579. package/dist/esm/internal/sharding.js +0 -288
  580. package/dist/esm/internal/sharding.js.map +0 -1
  581. package/dist/esm/internal/shardingConfig.js +0 -47
  582. package/dist/esm/internal/shardingConfig.js.map +0 -1
  583. package/dist/esm/internal/storage.js +0 -42
  584. package/dist/esm/internal/storage.js.map +0 -1
  585. package/dist/esm/internal/utils.js +0 -56
  586. package/dist/esm/internal/utils.js.map +0 -1
  587. package/src/AtLeastOnce.ts +0 -28
  588. package/src/AtLeastOnceStorage.ts +0 -96
  589. package/src/Broadcaster.ts +0 -48
  590. package/src/ManagerConfig.ts +0 -67
  591. package/src/MessageState.ts +0 -126
  592. package/src/Messenger.ts +0 -40
  593. package/src/Pod.ts +0 -95
  594. package/src/PodAddress.ts +0 -94
  595. package/src/Pods.ts +0 -100
  596. package/src/PodsHealth.ts +0 -74
  597. package/src/PoisonPill.ts +0 -105
  598. package/src/RecipientAddress.ts +0 -72
  599. package/src/RecipientBehaviour.ts +0 -108
  600. package/src/RecipientBehaviourContext.ts +0 -101
  601. package/src/RecipientType.ts +0 -134
  602. package/src/Serialization.ts +0 -72
  603. package/src/SerializedEnvelope.ts +0 -108
  604. package/src/SerializedMessage.ts +0 -82
  605. package/src/ShardManagerClient.ts +0 -57
  606. package/src/ShardingEvent.ts +0 -121
  607. package/src/ShardingException.ts +0 -151
  608. package/src/Storage.ts +0 -92
  609. package/src/internal/atLeastOnce.ts +0 -59
  610. package/src/internal/atLeastOnceStorage.ts +0 -218
  611. package/src/internal/entityState.ts +0 -64
  612. package/src/internal/managerConfig.ts +0 -84
  613. package/src/internal/message.ts +0 -63
  614. package/src/internal/messageState.ts +0 -98
  615. package/src/internal/podWithMetadata.ts +0 -72
  616. package/src/internal/pods.ts +0 -29
  617. package/src/internal/podsHealth.ts +0 -39
  618. package/src/internal/recipientBehaviour.ts +0 -133
  619. package/src/internal/recipientBehaviourContext.ts +0 -70
  620. package/src/internal/serialization.ts +0 -63
  621. package/src/internal/shardManagerClient.ts +0 -49
  622. package/src/internal/shardManagerState.ts +0 -80
  623. package/src/internal/sharding.ts +0 -789
  624. package/src/internal/shardingConfig.ts +0 -97
  625. package/src/internal/storage.ts +0 -60
  626. package/src/internal/utils.ts +0 -54
@@ -1,402 +1,505 @@
1
- import * as Clock from "effect/Clock"
1
+ import type * as Rpc from "@effect/rpc/Rpc"
2
+ import { RequestId } from "@effect/rpc/RpcMessage"
3
+ import * as RpcServer from "@effect/rpc/RpcServer"
4
+ import * as Arr from "effect/Array"
5
+ import * as Cause from "effect/Cause"
6
+ import * as Context from "effect/Context"
2
7
  import * as Duration from "effect/Duration"
8
+ import type { DurationInput } from "effect/Duration"
3
9
  import * as Effect from "effect/Effect"
4
10
  import * as Exit from "effect/Exit"
5
- import * as Fiber from "effect/Fiber"
6
- import { pipe } from "effect/Function"
11
+ import * as FiberRef from "effect/FiberRef"
12
+ import { identity } from "effect/Function"
7
13
  import * as HashMap from "effect/HashMap"
8
- import * as HashSet from "effect/HashSet"
14
+ import * as Metric from "effect/Metric"
9
15
  import * as Option from "effect/Option"
16
+ import * as Schedule from "effect/Schedule"
17
+ import * as Schema from "effect/Schema"
10
18
  import * as Scope from "effect/Scope"
11
- import * as RefSynchronized from "effect/SynchronizedRef"
19
+ import { AlreadyProcessingMessage, EntityNotManagedByRunner, MailboxFull, MalformedMessage } from "../ClusterError.js"
20
+ import * as ClusterMetrics from "../ClusterMetrics.js"
21
+ import { Persisted } from "../ClusterSchema.js"
22
+ import type { Entity, HandlersFrom } from "../Entity.js"
23
+ import { CurrentAddress, CurrentRunnerAddress, Request } from "../Entity.js"
24
+ import type { EntityAddress } from "../EntityAddress.js"
25
+ import type { EntityId } from "../EntityId.js"
26
+ import * as Envelope from "../Envelope.js"
12
27
  import * as Message from "../Message.js"
13
- import * as MessageState from "../MessageState.js"
14
- import type * as RecipientAddress from "../RecipientAddress.js"
15
- import type * as RecipientBehaviour from "../RecipientBehaviour.js"
16
- import * as RecipientBehaviourContext from "../RecipientBehaviourContext.js"
17
- import type * as RecipientType from "../RecipientType.js"
18
- import type * as Serialization from "../Serialization.js"
19
- import type * as SerializedEnvelope from "../SerializedEnvelope.js"
20
- import type * as SerializedMessage from "../SerializedMessage.js"
21
- import type * as ShardId from "../ShardId.js"
22
- import type * as Sharding from "../Sharding.js"
23
- import type * as ShardingConfig from "../ShardingConfig.js"
24
- import * as ShardingException from "../ShardingException.js"
25
- import * as EntityState from "./entityState.js"
28
+ import * as MessageStorage from "../MessageStorage.js"
29
+ import * as Reply from "../Reply.js"
30
+ import type { RunnerAddress } from "../RunnerAddress.js"
31
+ import type { ShardId } from "../ShardId.js"
32
+ import type { Sharding } from "../Sharding.js"
33
+ import { ShardingConfig } from "../ShardingConfig.js"
34
+ import * as Snowflake from "../Snowflake.js"
35
+ import { EntityReaper } from "./entityReaper.js"
36
+ import { internalInterruptors } from "./interruptors.js"
37
+ import { ResourceMap } from "./resourceMap.js"
38
+ import { ResourceRef } from "./resourceRef.js"
26
39
 
27
40
  /** @internal */
28
- const EntityManagerSymbolKey = "@effect/cluster/EntityManager"
41
+ export interface EntityManager {
42
+ readonly sendLocal: <R extends Rpc.Any>(
43
+ message: Message.IncomingLocal<R>
44
+ ) => Effect.Effect<void, EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage>
29
45
 
30
- /** @internal */
31
- export const EntityManagerTypeId = Symbol.for(
32
- EntityManagerSymbolKey
33
- )
46
+ readonly send: (
47
+ message: Message.Incoming<any>
48
+ ) => Effect.Effect<void, EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage>
34
49
 
35
- /** @internal */
36
- export type EntityManagerTypeId = typeof EntityManagerTypeId
50
+ readonly isProcessingFor: (message: Message.Incoming<any>) => boolean
37
51
 
52
+ readonly interruptShard: (shardId: ShardId) => Effect.Effect<void>
53
+ }
54
+
55
+ // Represents the entities managed by this entity manager
38
56
  /** @internal */
39
- export interface EntityManager {
40
- readonly [EntityManagerTypeId]: EntityManagerTypeId
41
-
42
- /** @internal */
43
- readonly sendAndGetState: (
44
- envelope: SerializedEnvelope.SerializedEnvelope
45
- ) => Effect.Effect<
46
- MessageState.MessageState<SerializedMessage.SerializedMessage>,
47
- | ShardingException.EntityNotManagedByThisPodException
48
- | ShardingException.PodUnavailableException
49
- | ShardingException.ExceptionWhileOfferingMessageException
50
- | ShardingException.SerializationException
51
- >
52
-
53
- /** @internal */
54
- readonly terminateEntitiesOnShards: (
55
- shards: HashSet.HashSet<ShardId.ShardId>
56
- ) => Effect.Effect<void>
57
-
58
- /** @internal */
59
- readonly terminateAllEntities: Effect.Effect<void>
57
+ export type EntityState = {
58
+ readonly address: EntityAddress
59
+ readonly mailboxGauge: Metric.Metric.Gauge<bigint>
60
+ readonly activeRequests: Map<bigint, {
61
+ readonly rpc: Rpc.AnyWithProps
62
+ readonly message: Message.IncomingRequestLocal<any>
63
+ lastSentChunk: Option.Option<Reply.Chunk<Rpc.Any>>
64
+ sequence: number
65
+ }>
66
+ lastActiveCheck: number
67
+ write: RpcServer.RpcServer<any>["write"]
60
68
  }
61
69
 
62
70
  /** @internal */
63
- export function make<Msg extends Message.Message.Any, R>(
64
- recipientType: RecipientType.RecipientType<Msg>,
65
- recipientBehaviour: RecipientBehaviour.RecipientBehaviour<Msg, R>,
66
- sharding: Sharding.Sharding,
67
- config: ShardingConfig.ShardingConfig,
68
- serialization: Serialization.Serialization,
69
- options: RecipientBehaviour.EntityBehaviourOptions = {}
71
+ export const make = Effect.fnUntraced(function*<
72
+ Rpcs extends Rpc.Any,
73
+ Handlers extends HandlersFrom<Rpcs>,
74
+ RX
75
+ >(
76
+ entity: Entity<Rpcs>,
77
+ buildHandlers: Effect.Effect<Handlers, never, RX>,
78
+ options: {
79
+ readonly sharding: Sharding["Type"]
80
+ readonly storage: MessageStorage.MessageStorage["Type"]
81
+ readonly runnerAddress: RunnerAddress
82
+ readonly maxIdleTime?: DurationInput | undefined
83
+ readonly concurrency?: number | "unbounded" | undefined
84
+ readonly mailboxCapacity?: number | "unbounded" | undefined
85
+ }
70
86
  ) {
71
- return Effect.gen(function*() {
72
- const entityMaxIdle = options.entityMaxIdleTime || Option.none()
73
- const env = yield* Effect.context<Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>>()
74
- const entityStates = yield* RefSynchronized.make<
75
- HashMap.HashMap<
76
- RecipientAddress.RecipientAddress,
77
- EntityState.EntityState
78
- >
79
- >(HashMap.empty())
80
-
81
- function startExpirationFiber(recipientAddress: RecipientAddress.RecipientAddress) {
82
- const maxIdleMillis = pipe(
83
- entityMaxIdle,
84
- Option.getOrElse(() => config.entityMaxIdleTime),
85
- Duration.toMillis
86
- )
87
+ const config = yield* ShardingConfig
88
+ const snowflakeGen = yield* Snowflake.Generator
89
+ const managerScope = yield* Effect.scope
90
+ const storageEnabled = options.storage !== MessageStorage.noop
91
+ const mailboxCapacity = options.mailboxCapacity ?? config.entityMailboxCapacity
92
+ const clock = yield* Effect.clock
93
+ const context = yield* Effect.context<Rpc.Context<Rpcs> | Rpc.Middleware<Rpcs> | RX>()
87
94
 
88
- function sleep(duration: number): Effect.Effect<void> {
89
- return pipe(
90
- Effect.Do,
91
- Effect.zipLeft(Clock.sleep(Duration.millis(duration))),
92
- Effect.bind("cdt", () => Clock.currentTimeMillis),
93
- Effect.bind("map", () => RefSynchronized.get(entityStates)),
94
- Effect.let("lastReceivedAt", ({ map }) =>
95
- pipe(
96
- HashMap.get(map, recipientAddress),
97
- Option.map((_) => _.lastReceivedAt),
98
- Option.getOrElse(() => 0)
99
- )),
100
- Effect.let("remaining", ({ cdt, lastReceivedAt }) => (maxIdleMillis - cdt + lastReceivedAt)),
101
- Effect.tap((_) => _.remaining > 0 ? sleep(_.remaining) : Effect.void)
102
- )
103
- }
95
+ const activeServers = new Map<EntityId, EntityState>()
104
96
 
105
- return pipe(
106
- sleep(maxIdleMillis),
107
- Effect.zipRight(forkEntityTermination(recipientAddress)),
108
- Effect.asVoid,
109
- Effect.interruptible,
110
- Effect.annotateLogs("entityId", recipientAddress),
111
- Effect.annotateLogs("recipientType", recipientType.name),
112
- Effect.forkDaemon
113
- )
97
+ const entities: ResourceMap<
98
+ EntityAddress,
99
+ EntityState,
100
+ EntityNotManagedByRunner
101
+ > = yield* ResourceMap.make(Effect.fnUntraced(function*(address) {
102
+ if (yield* options.sharding.isShutdown) {
103
+ return yield* new EntityNotManagedByRunner({ address })
114
104
  }
115
105
 
116
- /**
117
- * Performs proper termination of the entity, interrupting the expiration timer, closing the scope and failing pending replies
118
- */
119
- function terminateEntity(recipientAddress: RecipientAddress.RecipientAddress) {
120
- return pipe(
121
- // get the things to cleanup
122
- RefSynchronized.get(
123
- entityStates
124
- ),
125
- Effect.map(HashMap.get(recipientAddress)),
126
- Effect.flatMap(Option.match({
127
- // there is no entity state to cleanup
128
- onNone: () => Effect.void,
129
- // found it!
130
- onSome: (entityState) =>
131
- pipe(
132
- // interrupt the expiration timer
133
- Fiber.interrupt(entityState.expirationFiber),
134
- // close the scope of the entity,
135
- Effect.ensuring(Scope.close(entityState.executionScope, Exit.void)),
136
- // remove the entry from the map
137
- Effect.ensuring(RefSynchronized.update(entityStates, HashMap.remove(recipientAddress))),
138
- // log error if happens
139
- Effect.catchAllCause(Effect.logError),
140
- Effect.asVoid,
141
- Effect.annotateLogs("entityId", recipientAddress.entityId),
142
- Effect.annotateLogs("recipientType", recipientAddress.recipientTypeName)
143
- )
144
- }))
145
- )
146
- }
106
+ const scope = yield* Effect.scope
107
+ const endLatch = yield* Effect.makeLatch()
147
108
 
148
- /**
149
- * Begins entity termination (if needed) and return the fiber to wait for completed termination (if any)
150
- */
151
- function forkEntityTermination(
152
- recipientAddress: RecipientAddress.RecipientAddress
153
- ): Effect.Effect<Option.Option<Fiber.RuntimeFiber<void, never>>> {
154
- return RefSynchronized.modifyEffect(entityStates, (entityStatesMap) =>
155
- pipe(
156
- HashMap.get(entityStatesMap, recipientAddress),
157
- Option.match({
158
- // if no entry is found, the entity has succefully shut down
159
- onNone: () => Effect.succeed([Option.none(), entityStatesMap] as const),
160
- // there is an entry, so we should begin termination
161
- onSome: (entityState) =>
162
- pipe(
163
- entityState.terminationFiber,
164
- Option.match({
165
- // termination has already begun, keep everything as-is
166
- onSome: () => Effect.succeed([entityState.terminationFiber, entityStatesMap] as const),
167
- // begin to terminate the queue
168
- onNone: () =>
169
- pipe(
170
- terminateEntity(recipientAddress),
171
- Effect.forkDaemon,
172
- Effect.map((terminationFiber) =>
173
- [
174
- Option.some(terminationFiber),
175
- HashMap.modify(
176
- entityStatesMap,
177
- recipientAddress,
178
- EntityState.withTerminationFiber(terminationFiber)
179
- )
180
- ] as const
181
- )
182
- )
183
- })
184
- )
185
- })
186
- ))
187
- }
109
+ // on shutdown, reset the storage for the entity
110
+ yield* Scope.addFinalizer(
111
+ scope,
112
+ Effect.ignore(options.storage.resetAddress(address))
113
+ )
188
114
 
189
- function getOrCreateEntityState(
190
- recipientAddress: RecipientAddress.RecipientAddress
191
- ): Effect.Effect<
192
- Option.Option<EntityState.EntityState>,
193
- ShardingException.EntityNotManagedByThisPodException
194
- > {
195
- return RefSynchronized.modifyEffect(entityStates, (map) =>
196
- pipe(
197
- HashMap.get(map, recipientAddress),
198
- Option.match({
199
- onSome: (entityState) =>
200
- pipe(
201
- entityState.terminationFiber,
202
- Option.match({
203
- // offer exists, delay the interruption fiber and return the offer
204
- onNone: () =>
205
- pipe(
206
- Clock.currentTimeMillis,
207
- Effect.map(
208
- (cdt) =>
209
- [
210
- Option.some(entityState),
211
- HashMap.modify(map, recipientAddress, EntityState.withLastReceivedAd(cdt))
212
- ] as const
213
- )
214
- ),
215
- // the queue is shutting down, stash and retry
216
- onSome: () => Effect.succeed([Option.none(), map] as const)
217
- })
218
- ),
219
- onNone: () =>
220
- Effect.flatMap(sharding.isShuttingDown, (isGoingDown) => {
221
- if (isGoingDown) {
222
- // don't start any fiber while sharding is shutting down
223
- return Effect.fail(new ShardingException.EntityNotManagedByThisPodException({ recipientAddress }))
224
- } else {
225
- // offer doesn't exist, create a new one
226
- return Effect.gen(function*() {
227
- const executionScope = yield* Scope.make()
228
- const expirationFiber = yield* startExpirationFiber(recipientAddress)
229
- const cdt = yield* Clock.currentTimeMillis
230
- const forkShutdown = pipe(forkEntityTermination(recipientAddress), Effect.asVoid)
231
- const shardId = sharding.getShardId(recipientAddress)
232
-
233
- const sendAndGetState = yield* pipe(
234
- recipientBehaviour,
235
- Effect.map((offer) => (envelope: SerializedEnvelope.SerializedEnvelope) =>
236
- pipe(
237
- serialization.decode(recipientType.schema, envelope.body),
238
- Effect.flatMap((message) =>
239
- pipe(
240
- offer(message),
241
- Effect.flatMap((_) =>
242
- MessageState.mapEffect(
243
- _,
244
- (value) => serialization.encode(Message.exitSchema(message), value)
245
- )
246
- )
247
- )
248
- )
249
- )
250
- ),
251
- Scope.extend(executionScope),
252
- Effect.provideService(
253
- RecipientBehaviourContext.RecipientBehaviourContext,
254
- RecipientBehaviourContext.make({
255
- recipientAddress,
256
- shardId,
257
- recipientType: recipientType as any,
258
- forkShutdown
259
- })
260
- ),
261
- Effect.provide(env)
115
+ const activeRequests: EntityState["activeRequests"] = new Map()
116
+ let defectRequestIds: Array<bigint> = []
117
+
118
+ // the server is stored in a ref, so if there is a defect, we can
119
+ // swap the server without losing the active requests
120
+ const writeRef = yield* ResourceRef.from(
121
+ scope,
122
+ Effect.fnUntraced(function*(scope) {
123
+ let isShuttingDown = false
124
+
125
+ // Initiate the behavior for the entity
126
+ const handlers = yield* (entity.protocol.toHandlersContext(buildHandlers).pipe(
127
+ Effect.provide(context.pipe(
128
+ Context.add(CurrentAddress, address),
129
+ Context.add(CurrentRunnerAddress, options.runnerAddress),
130
+ Context.add(Scope.Scope, scope)
131
+ )),
132
+ Effect.locally(FiberRef.currentLogAnnotations, HashMap.empty())
133
+ ) as Effect.Effect<Context.Context<Rpc.ToHandler<Rpcs>>>)
134
+
135
+ const server = yield* RpcServer.makeNoSerialization(entity.protocol, {
136
+ spanPrefix: `${entity.type}(${address.entityId})`,
137
+ concurrency: options.concurrency ?? 1,
138
+ onFromServer(response): Effect.Effect<void> {
139
+ switch (response._tag) {
140
+ case "Exit": {
141
+ const request = activeRequests.get(response.requestId)
142
+ if (!request) return Effect.void
143
+
144
+ // For durable messages, ignore interrupts during shutdown.
145
+ // They will be retried when the entity is restarted.
146
+ if (
147
+ storageEnabled &&
148
+ isShuttingDown &&
149
+ Context.get(request.rpc.annotations, Persisted) &&
150
+ Exit.isInterrupted(response.exit)
151
+ ) {
152
+ return Effect.void
153
+ }
154
+ return retryRespond(
155
+ 4,
156
+ Effect.suspend(() =>
157
+ request.message.respond(
158
+ new Reply.WithExit({
159
+ requestId: Snowflake.Snowflake(response.requestId),
160
+ id: snowflakeGen.unsafeNext(),
161
+ exit: response.exit
162
+ })
262
163
  )
164
+ )
165
+ ).pipe(
166
+ Effect.flatMap(() => {
167
+ activeRequests.delete(response.requestId)
263
168
 
264
- const entityState = EntityState.make({
265
- sendAndGetState,
266
- expirationFiber,
267
- executionScope,
268
- terminationFiber: Option.none(),
269
- lastReceivedAt: cdt
270
- })
169
+ // ensure that the reaper does not remove the entity as we haven't
170
+ // been "idle" yet
171
+ if (activeRequests.size === 0) {
172
+ state.lastActiveCheck = clock.unsafeCurrentTimeMillis()
173
+ }
271
174
 
272
- return [
273
- Option.some(entityState),
274
- HashMap.set(
275
- map,
276
- recipientAddress,
277
- entityState
278
- )
279
- ] as const
175
+ return Effect.void
176
+ }),
177
+ Effect.orDie
178
+ )
179
+ }
180
+ case "Chunk": {
181
+ const request = activeRequests.get(response.requestId)
182
+ if (!request) return Effect.void
183
+ const sequence = request.sequence
184
+ request.sequence++
185
+ return Effect.orDie(retryRespond(
186
+ 4,
187
+ Effect.suspend(() => {
188
+ const reply = new Reply.Chunk({
189
+ requestId: Snowflake.Snowflake(response.requestId),
190
+ id: snowflakeGen.unsafeNext(),
191
+ sequence,
192
+ values: response.values
193
+ })
194
+ request.lastSentChunk = Option.some(reply)
195
+ return request.message.respond(reply)
280
196
  })
281
- }
282
- })
197
+ ))
198
+ }
199
+ case "Defect": {
200
+ const effect = writeRef.unsafeRebuild()
201
+ defectRequestIds = Array.from(activeRequests.keys())
202
+ return Effect.logError("Defect in entity, restarting", Cause.die(response.defect)).pipe(
203
+ Effect.andThen(effect.pipe(
204
+ Effect.tapErrorCause(Effect.logError),
205
+ Effect.retry(Schedule.spaced(500))
206
+ )),
207
+ Effect.annotateLogs({
208
+ module: "EntityManager",
209
+ address,
210
+ runner: options.runnerAddress
211
+ })
212
+ )
213
+ }
214
+ case "ClientEnd": {
215
+ return endLatch.open
216
+ }
217
+ }
218
+ }
219
+ }).pipe(
220
+ Scope.extend(scope),
221
+ Effect.provide(handlers)
222
+ )
223
+
224
+ yield* Scope.addFinalizer(
225
+ scope,
226
+ Effect.sync(() => {
227
+ isShuttingDown = true
228
+ })
229
+ )
230
+
231
+ for (const id of defectRequestIds) {
232
+ const { lastSentChunk, message } = activeRequests.get(id)!
233
+ yield* server.write(0, {
234
+ ...message.envelope,
235
+ id: RequestId(message.envelope.requestId),
236
+ tag: message.envelope.tag as any,
237
+ payload: new Request({
238
+ ...message.envelope,
239
+ lastSentChunk
240
+ } as any) as any
283
241
  })
284
- ))
242
+ }
243
+ defectRequestIds = []
244
+
245
+ return server.write
246
+ })
247
+ )
248
+
249
+ const state: EntityState = {
250
+ address,
251
+ mailboxGauge: ClusterMetrics.mailboxSize.pipe(
252
+ Metric.tagged("type", entity.type),
253
+ Metric.tagged("entityId", address.entityId)
254
+ ),
255
+ write(clientId, message) {
256
+ if (writeRef.state.current._tag !== "Acquired") {
257
+ return Effect.flatMap(writeRef.await, (write) => write(clientId, message))
258
+ }
259
+ return writeRef.state.current.value(clientId, message)
260
+ },
261
+ activeRequests,
262
+ lastActiveCheck: clock.unsafeCurrentTimeMillis()
285
263
  }
286
264
 
287
- function sendAndGetState(
288
- envelope: SerializedEnvelope.SerializedEnvelope
289
- ): Effect.Effect<
290
- MessageState.MessageState<SerializedMessage.SerializedMessage>,
291
- | ShardingException.EntityNotManagedByThisPodException
292
- | ShardingException.PodUnavailableException
293
- | ShardingException.ExceptionWhileOfferingMessageException
294
- | ShardingException.SerializationException
295
- > {
296
- return pipe(
297
- Effect.Do,
298
- Effect.tap(() => {
299
- // first, verify that this entity should be handled by this pod
300
- if (recipientType._tag === "EntityType") {
301
- return Effect.asVoid(Effect.unlessEffect(
302
- Effect.fail(
303
- new ShardingException.EntityNotManagedByThisPodException({
304
- recipientAddress: envelope.recipientAddress
305
- })
306
- ),
307
- sharding.isEntityOnLocalShards(envelope.recipientAddress)
308
- ))
309
- } else if (recipientType._tag === "TopicType") {
310
- return Effect.void
311
- }
312
- return Effect.die("Unhandled recipientType")
313
- }),
314
- Effect.bind("maybeEntityState", () => getOrCreateEntityState(envelope.recipientAddress)),
315
- Effect.flatMap((_) =>
316
- pipe(
317
- _.maybeEntityState,
318
- Option.match({
319
- onNone: () =>
320
- pipe(
321
- Effect.sleep(Duration.millis(100)),
322
- Effect.flatMap(() => sendAndGetState(envelope))
323
- ),
324
- onSome: (entityState) => {
325
- return entityState.sendAndGetState(envelope)
326
- }
327
- })
328
- )
265
+ // During shutdown, signal that no more messages will be processed
266
+ // and wait for the fiber to complete.
267
+ //
268
+ // If the termination timeout is reached, let the server clean itself up
269
+ yield* Scope.addFinalizer(
270
+ scope,
271
+ Effect.withFiberRuntime((fiber) => {
272
+ activeServers.delete(address.entityId)
273
+ internalInterruptors.add(fiber.id())
274
+ return state.write(0, { _tag: "Eof" }).pipe(
275
+ Effect.andThen(Effect.interruptible(endLatch.await)),
276
+ Effect.timeoutOption(config.entityTerminationTimeout)
329
277
  )
330
- )
278
+ })
279
+ )
280
+ activeServers.set(address.entityId, state)
281
+
282
+ return state
283
+ }, Effect.locally(FiberRef.currentLogAnnotations, HashMap.empty())))
284
+
285
+ const reaper = yield* EntityReaper
286
+ const maxIdleTime = Duration.toMillis(options.maxIdleTime ?? config.entityMaxIdleTime)
287
+ if (Number.isFinite(maxIdleTime)) {
288
+ yield* reaper.register({
289
+ maxIdleTime,
290
+ servers: activeServers,
291
+ entities
292
+ })
293
+ }
294
+
295
+ // update metrics for active servers
296
+ const gauge = ClusterMetrics.entities.pipe(Metric.tagged("type", entity.type))
297
+ yield* Effect.sync(() => {
298
+ gauge.unsafeUpdate(BigInt(activeServers.size), [])
299
+ for (const state of activeServers.values()) {
300
+ state.mailboxGauge.unsafeUpdate(BigInt(state.activeRequests.size), [])
331
301
  }
302
+ }).pipe(
303
+ Effect.andThen(Effect.sleep(1000)),
304
+ Effect.forever,
305
+ Effect.forkIn(managerScope)
306
+ )
332
307
 
333
- const terminateAllEntities = pipe(
334
- RefSynchronized.get(entityStates),
335
- Effect.map(HashMap.keySet),
336
- Effect.flatMap(terminateEntities)
337
- )
308
+ function sendLocal<R extends Rpc.Any>(
309
+ message: Message.IncomingLocal<R>
310
+ ): Effect.Effect<void, EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage> {
311
+ return Effect.locally(
312
+ Effect.flatMap(
313
+ entities.get(message.envelope.address),
314
+ (server): Effect.Effect<void, EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage> => {
315
+ switch (message._tag) {
316
+ case "IncomingRequestLocal": {
317
+ // If the request is already running, then we might have more than
318
+ // one sender for the same request. In this case, the other senders
319
+ // should resume from storage only.
320
+ let entry = server.activeRequests.get(message.envelope.requestId)
321
+ if (entry) {
322
+ return Effect.fail(
323
+ new AlreadyProcessingMessage({
324
+ envelopeId: message.envelope.requestId,
325
+ address: message.envelope.address
326
+ })
327
+ )
328
+ }
338
329
 
339
- function terminateEntities(
340
- entitiesToTerminate: HashSet.HashSet<
341
- RecipientAddress.RecipientAddress
342
- >
343
- ) {
344
- return pipe(
345
- entitiesToTerminate,
346
- Effect.forEach(
347
- (recipientAddress) =>
348
- pipe(
349
- forkEntityTermination(recipientAddress),
350
- Effect.flatMap((_) =>
351
- Option.match(_, {
352
- onNone: () => Effect.void,
353
- onSome: (terminationFiber) =>
354
- pipe(
355
- Fiber.await(terminationFiber),
356
- Effect.timeout(config.entityTerminationTimeout),
357
- Effect.match({
358
- onFailure: () =>
359
- Effect.logError(
360
- `Entity ${recipientAddress} termination is taking more than expected entityTerminationTimeout (${
361
- Duration.toMillis(config.entityTerminationTimeout)
362
- }ms).`
363
- ),
364
- onSuccess: () =>
365
- Effect.logDebug(
366
- `Entity ${recipientAddress} cleaned up.`
367
- )
368
- }),
369
- Effect.asVoid
370
- )
330
+ if (mailboxCapacity !== "unbounded" && server.activeRequests.size >= mailboxCapacity) {
331
+ return Effect.fail(new MailboxFull({ address: message.envelope.address }))
332
+ }
333
+
334
+ entry = {
335
+ rpc: entity.protocol.requests.get(message.envelope.tag)! as any as Rpc.AnyWithProps,
336
+ message,
337
+ lastSentChunk: message.lastSentReply as any,
338
+ sequence: Option.match(message.lastSentReply, {
339
+ onNone: () => 0,
340
+ onSome: (reply) => reply._tag === "Chunk" ? reply.sequence + 1 : 0
341
+ })
342
+ }
343
+ server.activeRequests.set(message.envelope.requestId, entry)
344
+ return server.write(0, {
345
+ ...message.envelope,
346
+ id: RequestId(message.envelope.requestId),
347
+ payload: new Request({
348
+ ...message.envelope,
349
+ lastSentChunk: message.lastSentReply as any
371
350
  })
351
+ })
352
+ }
353
+ case "IncomingEnvelope": {
354
+ const entry = server.activeRequests.get(message.envelope.requestId)
355
+ if (!entry) {
356
+ return Effect.fail(new EntityNotManagedByRunner({ address: message.envelope.address }))
357
+ } else if (
358
+ message.envelope._tag === "AckChunk" &&
359
+ Option.isSome(entry.lastSentChunk) &&
360
+ message.envelope.replyId !== entry.lastSentChunk.value.id
361
+ ) {
362
+ return Effect.void
363
+ }
364
+ return server.write(
365
+ 0,
366
+ message.envelope._tag === "AckChunk"
367
+ ? { _tag: "Ack", requestId: RequestId(message.envelope.requestId) }
368
+ : { _tag: "Interrupt", requestId: RequestId(message.envelope.requestId), interruptors: [] }
372
369
  )
373
- ),
374
- { concurrency: "inherit" }
375
- ),
376
- Effect.asVoid
377
- )
378
- }
370
+ }
371
+ }
372
+ }
373
+ ),
374
+ FiberRef.currentLogAnnotations,
375
+ HashMap.empty()
376
+ )
377
+ }
379
378
 
380
- function terminateEntitiesOnShards(shards: HashSet.HashSet<ShardId.ShardId>) {
381
- return pipe(
382
- RefSynchronized.modify(entityStates, (entities) => [
383
- HashMap.filter(
384
- entities,
385
- (_, recipientAddress) => HashSet.has(shards, sharding.getShardId(recipientAddress))
386
- ),
387
- entities
388
- ]),
389
- Effect.map(HashMap.keySet),
390
- Effect.flatMap(terminateEntities)
379
+ const interruptShard = (shardId: ShardId) =>
380
+ Effect.suspend(function loop(): Effect.Effect<void> {
381
+ const toInterrupt = new Set<EntityState>()
382
+ for (const state of activeServers.values()) {
383
+ if (shardId === state.address.shardId) {
384
+ toInterrupt.add(state)
385
+ }
386
+ }
387
+ if (toInterrupt.size === 0) {
388
+ return Effect.void
389
+ }
390
+ return Effect.flatMap(
391
+ Effect.forEach(toInterrupt, (state) => entities.removeIgnore(state.address), {
392
+ concurrency: "unbounded",
393
+ discard: true
394
+ }),
395
+ loop
391
396
  )
392
- }
397
+ })
393
398
 
394
- const self: EntityManager = {
395
- [EntityManagerTypeId]: EntityManagerTypeId,
396
- sendAndGetState,
397
- terminateAllEntities,
398
- terminateEntitiesOnShards
399
- }
400
- return self
399
+ const decodeMessage = Schema.decode(makeMessageSchema(entity))
400
+
401
+ return identity<EntityManager>({
402
+ interruptShard,
403
+ isProcessingFor(message) {
404
+ const state = activeServers.get(message.envelope.address.entityId)
405
+ if (!state) return false
406
+ return state.activeRequests.has(message.envelope.requestId)
407
+ },
408
+ sendLocal,
409
+ send: (message) =>
410
+ decodeMessage(message).pipe(
411
+ Effect.matchEffect({
412
+ onFailure: (cause) => {
413
+ if (message._tag === "IncomingEnvelope") {
414
+ return Effect.die(new MalformedMessage({ cause }))
415
+ }
416
+ return Effect.orDie(message.respond(
417
+ new Reply.ReplyWithContext({
418
+ reply: new Reply.WithExit({
419
+ id: snowflakeGen.unsafeNext(),
420
+ requestId: message.envelope.requestId,
421
+ exit: Exit.die(new MalformedMessage({ cause }))
422
+ }),
423
+ rpc: entity.protocol.requests.get(message.envelope.tag)!,
424
+ context
425
+ })
426
+ ))
427
+ },
428
+ onSuccess: (decoded) => {
429
+ if (decoded._tag === "IncomingEnvelope") {
430
+ return sendLocal(
431
+ new Message.IncomingEnvelope(decoded)
432
+ )
433
+ }
434
+ const request = message as Message.IncomingRequest<any>
435
+ const rpc = entity.protocol.requests.get(decoded.envelope.tag)!
436
+ return sendLocal(
437
+ new Message.IncomingRequestLocal({
438
+ envelope: decoded.envelope,
439
+ lastSentReply: decoded.lastSentReply,
440
+ respond: (reply) =>
441
+ request.respond(
442
+ new Reply.ReplyWithContext({
443
+ reply,
444
+ rpc,
445
+ context
446
+ })
447
+ )
448
+ })
449
+ )
450
+ }
451
+ }),
452
+ Effect.provide(context as Context.Context<unknown>)
453
+ )
401
454
  })
455
+ })
456
+
457
+ const makeMessageSchema = <Rpcs extends Rpc.Any>(entity: Entity<Rpcs>): Schema.Schema<
458
+ {
459
+ readonly _tag: "IncomingRequest"
460
+ readonly envelope: Envelope.Request.Any
461
+ readonly lastSentReply: Option.Option<Reply.Reply<Rpcs>>
462
+ } | {
463
+ readonly _tag: "IncomingEnvelope"
464
+ readonly envelope: Envelope.AckChunk | Envelope.Interrupt
465
+ },
466
+ Message.Incoming<Rpcs>,
467
+ Rpc.Context<Rpcs>
468
+ > => {
469
+ const requests = Arr.empty<Schema.Schema.Any>()
470
+
471
+ for (const rpc of entity.protocol.requests.values()) {
472
+ requests.push(
473
+ Schema.TaggedStruct("IncomingRequest", {
474
+ envelope: Schema.transform(
475
+ Schema.Struct({
476
+ ...Envelope.PartialEncodedRequestFromSelf.fields,
477
+ tag: Schema.Literal(rpc._tag),
478
+ payload: (rpc as any as Rpc.AnyWithProps).payloadSchema
479
+ }),
480
+ Envelope.RequestFromSelf,
481
+ {
482
+ decode: (encoded) => Envelope.makeRequest(encoded),
483
+ encode: identity
484
+ }
485
+ ),
486
+ lastSentReply: Schema.OptionFromSelf(Reply.Reply(rpc))
487
+ })
488
+ )
489
+ }
490
+
491
+ return Schema.Union(
492
+ ...requests,
493
+ Schema.TaggedStruct("IncomingEnvelope", {
494
+ envelope: Schema.Union(
495
+ Schema.typeSchema(Envelope.AckChunk),
496
+ Schema.typeSchema(Envelope.Interrupt)
497
+ )
498
+ })
499
+ ) as any
402
500
  }
501
+
502
+ const retryRespond = <A, E, R>(times: number, effect: Effect.Effect<A, E, R>): Effect.Effect<A, E, R> =>
503
+ times === 0 ?
504
+ effect :
505
+ Effect.catchAll(effect, () => Effect.delay(retryRespond(times - 1, effect), 200))