@effect/cluster 0.28.4 → 0.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (626) hide show
  1. package/ClusterError/package.json +6 -0
  2. package/ClusterMetrics/package.json +6 -0
  3. package/ClusterSchema/package.json +6 -0
  4. package/DeliverAt/package.json +6 -0
  5. package/Entity/package.json +6 -0
  6. package/EntityAddress/package.json +6 -0
  7. package/EntityId/package.json +6 -0
  8. package/EntityType/package.json +6 -0
  9. package/Envelope/package.json +6 -0
  10. package/HttpCommon/package.json +6 -0
  11. package/HttpRunner/package.json +6 -0
  12. package/HttpShardManager/package.json +6 -0
  13. package/MachineId/package.json +6 -0
  14. package/MessageStorage/package.json +6 -0
  15. package/README.md +2 -2
  16. package/Reply/package.json +6 -0
  17. package/Runner/package.json +6 -0
  18. package/RunnerAddress/package.json +6 -0
  19. package/RunnerHealth/package.json +6 -0
  20. package/RunnerServer/package.json +6 -0
  21. package/Runners/package.json +6 -0
  22. package/ShardStorage/package.json +6 -0
  23. package/Singleton/package.json +6 -0
  24. package/SingletonAddress/package.json +6 -0
  25. package/Snowflake/package.json +6 -0
  26. package/SocketRunner/package.json +6 -0
  27. package/SocketShardManager/package.json +6 -0
  28. package/SqlMessageStorage/package.json +6 -0
  29. package/SqlShardStorage/package.json +6 -0
  30. package/SynchronizedClock/package.json +6 -0
  31. package/dist/cjs/ClusterError.js +180 -0
  32. package/dist/cjs/ClusterError.js.map +1 -0
  33. package/dist/cjs/ClusterMetrics.js +63 -0
  34. package/dist/cjs/ClusterMetrics.js.map +1 -0
  35. package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
  36. package/dist/cjs/ClusterSchema.js.map +1 -0
  37. package/dist/cjs/DeliverAt.js +30 -0
  38. package/dist/cjs/DeliverAt.js.map +1 -0
  39. package/dist/cjs/Entity.js +187 -0
  40. package/dist/cjs/Entity.js.map +1 -0
  41. package/dist/cjs/EntityAddress.js +54 -0
  42. package/dist/cjs/EntityAddress.js.map +1 -0
  43. package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
  44. package/dist/cjs/EntityId.js.map +1 -0
  45. package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
  46. package/dist/cjs/EntityType.js.map +1 -0
  47. package/dist/cjs/Envelope.js +168 -0
  48. package/dist/cjs/Envelope.js.map +1 -0
  49. package/dist/cjs/HttpCommon.js +49 -0
  50. package/dist/cjs/HttpCommon.js.map +1 -0
  51. package/dist/cjs/HttpRunner.js +108 -0
  52. package/dist/cjs/HttpRunner.js.map +1 -0
  53. package/dist/cjs/HttpShardManager.js +140 -0
  54. package/dist/cjs/HttpShardManager.js.map +1 -0
  55. package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
  56. package/dist/cjs/MachineId.js.map +1 -0
  57. package/dist/cjs/Message.js +99 -18
  58. package/dist/cjs/Message.js.map +1 -1
  59. package/dist/cjs/MessageStorage.js +356 -0
  60. package/dist/cjs/MessageStorage.js.map +1 -0
  61. package/dist/cjs/Reply.js +200 -0
  62. package/dist/cjs/Reply.js.map +1 -0
  63. package/dist/cjs/Runner.js +79 -0
  64. package/dist/cjs/Runner.js.map +1 -0
  65. package/dist/cjs/RunnerAddress.js +63 -0
  66. package/dist/cjs/RunnerAddress.js.map +1 -0
  67. package/dist/cjs/RunnerHealth.js +68 -0
  68. package/dist/cjs/RunnerHealth.js.map +1 -0
  69. package/dist/cjs/RunnerServer.js +125 -0
  70. package/dist/cjs/RunnerServer.js.map +1 -0
  71. package/dist/cjs/Runners.js +344 -0
  72. package/dist/cjs/Runners.js.map +1 -0
  73. package/dist/cjs/ShardId.js +7 -46
  74. package/dist/cjs/ShardId.js.map +1 -1
  75. package/dist/cjs/ShardManager.js +493 -8
  76. package/dist/cjs/ShardManager.js.map +1 -1
  77. package/dist/cjs/ShardStorage.js +139 -0
  78. package/dist/cjs/ShardStorage.js.map +1 -0
  79. package/dist/cjs/Sharding.js +732 -88
  80. package/dist/cjs/Sharding.js.map +1 -1
  81. package/dist/cjs/ShardingConfig.js +85 -18
  82. package/dist/cjs/ShardingConfig.js.map +1 -1
  83. package/dist/cjs/ShardingRegistrationEvent.js +26 -32
  84. package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
  85. package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
  86. package/dist/cjs/Singleton.js.map +1 -0
  87. package/dist/cjs/SingletonAddress.js +50 -0
  88. package/dist/cjs/SingletonAddress.js.map +1 -0
  89. package/dist/cjs/Snowflake.js +133 -0
  90. package/dist/cjs/Snowflake.js.map +1 -0
  91. package/dist/cjs/SocketRunner.js +40 -0
  92. package/dist/cjs/SocketRunner.js.map +1 -0
  93. package/dist/cjs/SocketShardManager.js +33 -0
  94. package/dist/cjs/SocketShardManager.js.map +1 -0
  95. package/dist/cjs/SqlMessageStorage.js +668 -0
  96. package/dist/cjs/SqlMessageStorage.js.map +1 -0
  97. package/dist/cjs/SqlShardStorage.js +228 -0
  98. package/dist/cjs/SqlShardStorage.js.map +1 -0
  99. package/dist/cjs/SynchronizedClock.js +66 -0
  100. package/dist/cjs/SynchronizedClock.js.map +1 -0
  101. package/dist/cjs/index.js +57 -45
  102. package/dist/cjs/internal/entityManager.js +311 -143
  103. package/dist/cjs/internal/entityManager.js.map +1 -1
  104. package/dist/cjs/internal/entityReaper.js +47 -0
  105. package/dist/cjs/internal/entityReaper.js.map +1 -0
  106. package/dist/cjs/internal/hash.js +20 -0
  107. package/dist/cjs/internal/hash.js.map +1 -0
  108. package/dist/cjs/internal/interruptors.js +9 -0
  109. package/dist/cjs/internal/interruptors.js.map +1 -0
  110. package/dist/cjs/internal/resourceMap.js +88 -0
  111. package/dist/cjs/internal/resourceMap.js.map +1 -0
  112. package/dist/cjs/internal/resourceRef.js +92 -0
  113. package/dist/cjs/internal/resourceRef.js.map +1 -0
  114. package/dist/cjs/internal/shardManager.js +219 -235
  115. package/dist/cjs/internal/shardManager.js.map +1 -1
  116. package/dist/dts/ClusterError.d.ts +169 -0
  117. package/dist/dts/ClusterError.d.ts.map +1 -0
  118. package/dist/dts/ClusterMetrics.d.ts +50 -0
  119. package/dist/dts/ClusterMetrics.d.ts.map +1 -0
  120. package/dist/dts/ClusterSchema.d.ts +13 -0
  121. package/dist/dts/ClusterSchema.d.ts.map +1 -0
  122. package/dist/dts/DeliverAt.d.ts +27 -0
  123. package/dist/dts/DeliverAt.d.ts.map +1 -0
  124. package/dist/dts/Entity.d.ts +180 -0
  125. package/dist/dts/Entity.d.ts.map +1 -0
  126. package/dist/dts/EntityAddress.d.ts +55 -0
  127. package/dist/dts/EntityAddress.d.ts.map +1 -0
  128. package/dist/dts/EntityId.d.ts +15 -0
  129. package/dist/dts/EntityId.d.ts.map +1 -0
  130. package/dist/dts/EntityType.d.ts +15 -0
  131. package/dist/dts/EntityType.d.ts.map +1 -0
  132. package/dist/dts/Envelope.d.ts +252 -0
  133. package/dist/dts/Envelope.d.ts.map +1 -0
  134. package/dist/dts/HttpCommon.d.ts +25 -0
  135. package/dist/dts/HttpCommon.d.ts.map +1 -0
  136. package/dist/dts/HttpRunner.d.ts +76 -0
  137. package/dist/dts/HttpRunner.d.ts.map +1 -0
  138. package/dist/dts/HttpShardManager.d.ts +119 -0
  139. package/dist/dts/HttpShardManager.d.ts.map +1 -0
  140. package/dist/dts/MachineId.d.ts +20 -0
  141. package/dist/dts/MachineId.d.ts.map +1 -0
  142. package/dist/dts/Message.d.ts +91 -74
  143. package/dist/dts/Message.d.ts.map +1 -1
  144. package/dist/dts/MessageStorage.d.ts +336 -0
  145. package/dist/dts/MessageStorage.d.ts.map +1 -0
  146. package/dist/dts/Reply.d.ts +171 -0
  147. package/dist/dts/Reply.d.ts.map +1 -0
  148. package/dist/dts/Runner.d.ts +81 -0
  149. package/dist/dts/Runner.d.ts.map +1 -0
  150. package/dist/dts/RunnerAddress.d.ts +56 -0
  151. package/dist/dts/RunnerAddress.d.ts.map +1 -0
  152. package/dist/dts/RunnerHealth.d.ts +54 -0
  153. package/dist/dts/RunnerHealth.d.ts.map +1 -0
  154. package/dist/dts/RunnerServer.d.ts +44 -0
  155. package/dist/dts/RunnerServer.d.ts.map +1 -0
  156. package/dist/dts/Runners.d.ts +161 -0
  157. package/dist/dts/Runners.d.ts.map +1 -0
  158. package/dist/dts/ShardId.d.ts +5 -55
  159. package/dist/dts/ShardId.d.ts.map +1 -1
  160. package/dist/dts/ShardManager.d.ts +435 -23
  161. package/dist/dts/ShardManager.d.ts.map +1 -1
  162. package/dist/dts/ShardStorage.d.ts +200 -0
  163. package/dist/dts/ShardStorage.d.ts.map +1 -0
  164. package/dist/dts/Sharding.d.ts +109 -131
  165. package/dist/dts/Sharding.d.ts.map +1 -1
  166. package/dist/dts/ShardingConfig.d.ts +147 -44
  167. package/dist/dts/ShardingConfig.d.ts.map +1 -1
  168. package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
  169. package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
  170. package/dist/dts/Singleton.d.ts +13 -0
  171. package/dist/dts/Singleton.d.ts.map +1 -0
  172. package/dist/dts/SingletonAddress.d.ts +49 -0
  173. package/dist/dts/SingletonAddress.d.ts.map +1 -0
  174. package/dist/dts/Snowflake.d.ts +121 -0
  175. package/dist/dts/Snowflake.d.ts.map +1 -0
  176. package/dist/dts/SocketRunner.d.ts +22 -0
  177. package/dist/dts/SocketRunner.d.ts.map +1 -0
  178. package/dist/dts/SocketShardManager.d.ts +17 -0
  179. package/dist/dts/SocketShardManager.d.ts.map +1 -0
  180. package/dist/dts/SqlMessageStorage.d.ts +43 -0
  181. package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
  182. package/dist/dts/SqlShardStorage.d.ts +38 -0
  183. package/dist/dts/SqlShardStorage.d.ts.map +1 -0
  184. package/dist/dts/SynchronizedClock.d.ts +19 -0
  185. package/dist/dts/SynchronizedClock.d.ts.map +1 -0
  186. package/dist/dts/index.d.ts +48 -24
  187. package/dist/dts/index.d.ts.map +1 -1
  188. package/dist/dts/internal/entityReaper.d.ts +2 -0
  189. package/dist/dts/internal/entityReaper.d.ts.map +1 -0
  190. package/dist/dts/internal/hash.d.ts +2 -0
  191. package/dist/dts/internal/hash.d.ts.map +1 -0
  192. package/dist/dts/internal/interruptors.d.ts +2 -0
  193. package/dist/dts/internal/interruptors.d.ts.map +1 -0
  194. package/dist/dts/internal/resourceMap.d.ts +22 -0
  195. package/dist/dts/internal/resourceMap.d.ts.map +1 -0
  196. package/dist/dts/internal/resourceRef.d.ts +25 -0
  197. package/dist/dts/internal/resourceRef.d.ts.map +1 -0
  198. package/dist/dts/internal/shardManager.d.ts +1 -11
  199. package/dist/dts/internal/shardManager.d.ts.map +1 -1
  200. package/dist/esm/ClusterError.js +164 -0
  201. package/dist/esm/ClusterError.js.map +1 -0
  202. package/dist/esm/ClusterMetrics.js +54 -0
  203. package/dist/esm/ClusterMetrics.js.map +1 -0
  204. package/dist/esm/ClusterSchema.js +13 -0
  205. package/dist/esm/ClusterSchema.js.map +1 -0
  206. package/dist/esm/DeliverAt.js +22 -0
  207. package/dist/esm/DeliverAt.js.map +1 -0
  208. package/dist/esm/Entity.js +173 -0
  209. package/dist/esm/Entity.js.map +1 -0
  210. package/dist/esm/EntityAddress.js +44 -0
  211. package/dist/esm/EntityAddress.js.map +1 -0
  212. package/dist/esm/EntityId.js +10 -0
  213. package/dist/esm/EntityId.js.map +1 -0
  214. package/dist/esm/EntityType.js +10 -0
  215. package/dist/esm/EntityType.js.map +1 -0
  216. package/dist/esm/Envelope.js +154 -0
  217. package/dist/esm/Envelope.js.map +1 -0
  218. package/dist/esm/HttpCommon.js +38 -0
  219. package/dist/esm/HttpCommon.js.map +1 -0
  220. package/dist/esm/HttpRunner.js +98 -0
  221. package/dist/esm/HttpRunner.js.map +1 -0
  222. package/dist/esm/HttpShardManager.js +128 -0
  223. package/dist/esm/HttpShardManager.js.map +1 -0
  224. package/dist/esm/MachineId.js +17 -0
  225. package/dist/esm/MachineId.js.map +1 -0
  226. package/dist/esm/Message.js +88 -17
  227. package/dist/esm/Message.js.map +1 -1
  228. package/dist/esm/MessageStorage.js +345 -0
  229. package/dist/esm/MessageStorage.js.map +1 -0
  230. package/dist/esm/Reply.js +184 -0
  231. package/dist/esm/Reply.js.map +1 -0
  232. package/dist/esm/Runner.js +68 -0
  233. package/dist/esm/Runner.js.map +1 -0
  234. package/dist/esm/RunnerAddress.js +52 -0
  235. package/dist/esm/RunnerAddress.js.map +1 -0
  236. package/dist/esm/RunnerHealth.js +58 -0
  237. package/dist/esm/RunnerHealth.js.map +1 -0
  238. package/dist/esm/RunnerServer.js +116 -0
  239. package/dist/esm/RunnerServer.js.map +1 -0
  240. package/dist/esm/Runners.js +332 -0
  241. package/dist/esm/Runners.js.map +1 -0
  242. package/dist/esm/ShardId.js +5 -42
  243. package/dist/esm/ShardId.js.map +1 -1
  244. package/dist/esm/ShardManager.js +486 -7
  245. package/dist/esm/ShardManager.js.map +1 -1
  246. package/dist/esm/ShardStorage.js +129 -0
  247. package/dist/esm/ShardStorage.js.map +1 -0
  248. package/dist/esm/Sharding.js +730 -87
  249. package/dist/esm/Sharding.js.map +1 -1
  250. package/dist/esm/ShardingConfig.js +80 -17
  251. package/dist/esm/ShardingConfig.js.map +1 -1
  252. package/dist/esm/ShardingRegistrationEvent.js +19 -29
  253. package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
  254. package/dist/esm/Singleton.js +15 -0
  255. package/dist/esm/Singleton.js.map +1 -0
  256. package/dist/esm/SingletonAddress.js +40 -0
  257. package/dist/esm/SingletonAddress.js.map +1 -0
  258. package/dist/esm/Snowflake.js +117 -0
  259. package/dist/esm/Snowflake.js.map +1 -0
  260. package/dist/esm/SocketRunner.js +31 -0
  261. package/dist/esm/SocketRunner.js.map +1 -0
  262. package/dist/esm/SocketShardManager.js +24 -0
  263. package/dist/esm/SocketShardManager.js.map +1 -0
  264. package/dist/esm/SqlMessageStorage.js +658 -0
  265. package/dist/esm/SqlMessageStorage.js.map +1 -0
  266. package/dist/esm/SqlShardStorage.js +218 -0
  267. package/dist/esm/SqlShardStorage.js.map +1 -0
  268. package/dist/esm/SynchronizedClock.js +57 -0
  269. package/dist/esm/SynchronizedClock.js.map +1 -0
  270. package/dist/esm/index.js +48 -24
  271. package/dist/esm/index.js.map +1 -1
  272. package/dist/esm/internal/entityManager.js +311 -142
  273. package/dist/esm/internal/entityManager.js.map +1 -1
  274. package/dist/esm/internal/entityReaper.js +38 -0
  275. package/dist/esm/internal/entityReaper.js.map +1 -0
  276. package/dist/esm/internal/hash.js +12 -0
  277. package/dist/esm/internal/hash.js.map +1 -0
  278. package/dist/esm/internal/interruptors.js +3 -0
  279. package/dist/esm/internal/interruptors.js.map +1 -0
  280. package/dist/esm/internal/resourceMap.js +79 -0
  281. package/dist/esm/internal/resourceMap.js.map +1 -0
  282. package/dist/esm/internal/resourceRef.js +83 -0
  283. package/dist/esm/internal/resourceRef.js.map +1 -0
  284. package/dist/esm/internal/shardManager.js +217 -233
  285. package/dist/esm/internal/shardManager.js.map +1 -1
  286. package/package.json +212 -154
  287. package/src/ClusterError.ts +193 -0
  288. package/src/ClusterMetrics.ts +62 -0
  289. package/src/ClusterSchema.ts +13 -0
  290. package/src/DeliverAt.ts +36 -0
  291. package/src/Entity.ts +438 -0
  292. package/src/EntityAddress.ts +55 -0
  293. package/src/EntityId.ts +16 -0
  294. package/src/EntityType.ts +16 -0
  295. package/src/Envelope.ts +352 -0
  296. package/src/HttpCommon.ts +73 -0
  297. package/src/HttpRunner.ts +196 -0
  298. package/src/HttpShardManager.ts +273 -0
  299. package/src/MachineId.ts +27 -0
  300. package/src/Message.ts +143 -92
  301. package/src/MessageStorage.ts +697 -0
  302. package/src/Reply.ts +295 -0
  303. package/src/Runner.ts +84 -0
  304. package/src/RunnerAddress.ts +61 -0
  305. package/src/RunnerHealth.ts +87 -0
  306. package/src/RunnerServer.ts +156 -0
  307. package/src/Runners.ts +533 -0
  308. package/src/ShardId.ts +10 -62
  309. package/src/ShardManager.ts +780 -29
  310. package/src/ShardStorage.ts +289 -0
  311. package/src/Sharding.ts +1060 -183
  312. package/src/ShardingConfig.ts +186 -45
  313. package/src/ShardingRegistrationEvent.ts +38 -39
  314. package/src/Singleton.ts +20 -0
  315. package/src/SingletonAddress.ts +47 -0
  316. package/src/Snowflake.ts +194 -0
  317. package/src/SocketRunner.ts +59 -0
  318. package/src/SocketShardManager.ts +48 -0
  319. package/src/SqlMessageStorage.ts +833 -0
  320. package/src/SqlShardStorage.ts +292 -0
  321. package/src/SynchronizedClock.ts +82 -0
  322. package/src/index.ts +54 -24
  323. package/src/internal/entityManager.ts +464 -361
  324. package/src/internal/entityReaper.ts +53 -0
  325. package/src/internal/hash.ts +11 -0
  326. package/src/internal/interruptors.ts +4 -0
  327. package/src/internal/resourceMap.ts +89 -0
  328. package/src/internal/resourceRef.ts +88 -0
  329. package/src/internal/shardManager.ts +273 -546
  330. package/AtLeastOnce/package.json +0 -6
  331. package/AtLeastOnceStorage/package.json +0 -6
  332. package/Broadcaster/package.json +0 -6
  333. package/ManagerConfig/package.json +0 -6
  334. package/MessageState/package.json +0 -6
  335. package/Messenger/package.json +0 -6
  336. package/Pod/package.json +0 -6
  337. package/PodAddress/package.json +0 -6
  338. package/Pods/package.json +0 -6
  339. package/PodsHealth/package.json +0 -6
  340. package/PoisonPill/package.json +0 -6
  341. package/RecipientAddress/package.json +0 -6
  342. package/RecipientBehaviour/package.json +0 -6
  343. package/RecipientBehaviourContext/package.json +0 -6
  344. package/RecipientType/package.json +0 -6
  345. package/Serialization/package.json +0 -6
  346. package/SerializedEnvelope/package.json +0 -6
  347. package/SerializedMessage/package.json +0 -6
  348. package/ShardManagerClient/package.json +0 -6
  349. package/ShardingEvent/package.json +0 -6
  350. package/ShardingException/package.json +0 -6
  351. package/Storage/package.json +0 -6
  352. package/dist/cjs/AtLeastOnce.js.map +0 -1
  353. package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
  354. package/dist/cjs/Broadcaster.js +0 -6
  355. package/dist/cjs/Broadcaster.js.map +0 -1
  356. package/dist/cjs/ManagerConfig.js.map +0 -1
  357. package/dist/cjs/MessageState.js +0 -55
  358. package/dist/cjs/MessageState.js.map +0 -1
  359. package/dist/cjs/Messenger.js +0 -6
  360. package/dist/cjs/Messenger.js.map +0 -1
  361. package/dist/cjs/Pod.js +0 -78
  362. package/dist/cjs/Pod.js.map +0 -1
  363. package/dist/cjs/PodAddress.js +0 -77
  364. package/dist/cjs/PodAddress.js.map +0 -1
  365. package/dist/cjs/Pods.js.map +0 -1
  366. package/dist/cjs/PodsHealth.js +0 -41
  367. package/dist/cjs/PodsHealth.js.map +0 -1
  368. package/dist/cjs/PoisonPill.js +0 -78
  369. package/dist/cjs/PoisonPill.js.map +0 -1
  370. package/dist/cjs/RecipientAddress.js +0 -79
  371. package/dist/cjs/RecipientAddress.js.map +0 -1
  372. package/dist/cjs/RecipientBehaviour.js +0 -38
  373. package/dist/cjs/RecipientBehaviour.js.map +0 -1
  374. package/dist/cjs/RecipientBehaviourContext.js +0 -64
  375. package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
  376. package/dist/cjs/RecipientType.js +0 -123
  377. package/dist/cjs/RecipientType.js.map +0 -1
  378. package/dist/cjs/Serialization.js +0 -32
  379. package/dist/cjs/Serialization.js.map +0 -1
  380. package/dist/cjs/SerializedEnvelope.js +0 -87
  381. package/dist/cjs/SerializedEnvelope.js.map +0 -1
  382. package/dist/cjs/SerializedMessage.js +0 -64
  383. package/dist/cjs/SerializedMessage.js.map +0 -1
  384. package/dist/cjs/ShardManagerClient.js.map +0 -1
  385. package/dist/cjs/ShardingEvent.js +0 -72
  386. package/dist/cjs/ShardingEvent.js.map +0 -1
  387. package/dist/cjs/ShardingException.js +0 -107
  388. package/dist/cjs/ShardingException.js.map +0 -1
  389. package/dist/cjs/Storage.js +0 -40
  390. package/dist/cjs/Storage.js.map +0 -1
  391. package/dist/cjs/internal/atLeastOnce.js +0 -35
  392. package/dist/cjs/internal/atLeastOnce.js.map +0 -1
  393. package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
  394. package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
  395. package/dist/cjs/internal/entityState.js +0 -47
  396. package/dist/cjs/internal/entityState.js.map +0 -1
  397. package/dist/cjs/internal/managerConfig.js +0 -46
  398. package/dist/cjs/internal/managerConfig.js.map +0 -1
  399. package/dist/cjs/internal/message.js +0 -48
  400. package/dist/cjs/internal/message.js.map +0 -1
  401. package/dist/cjs/internal/messageState.js +0 -79
  402. package/dist/cjs/internal/messageState.js.map +0 -1
  403. package/dist/cjs/internal/podWithMetadata.js +0 -54
  404. package/dist/cjs/internal/podWithMetadata.js.map +0 -1
  405. package/dist/cjs/internal/pods.js +0 -35
  406. package/dist/cjs/internal/pods.js.map +0 -1
  407. package/dist/cjs/internal/podsHealth.js +0 -40
  408. package/dist/cjs/internal/podsHealth.js.map +0 -1
  409. package/dist/cjs/internal/recipientBehaviour.js +0 -52
  410. package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
  411. package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
  412. package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
  413. package/dist/cjs/internal/serialization.js +0 -48
  414. package/dist/cjs/internal/serialization.js.map +0 -1
  415. package/dist/cjs/internal/shardManagerClient.js +0 -48
  416. package/dist/cjs/internal/shardManagerClient.js.map +0 -1
  417. package/dist/cjs/internal/shardManagerState.js +0 -44
  418. package/dist/cjs/internal/shardManagerState.js.map +0 -1
  419. package/dist/cjs/internal/sharding.js +0 -306
  420. package/dist/cjs/internal/sharding.js.map +0 -1
  421. package/dist/cjs/internal/shardingConfig.js +0 -56
  422. package/dist/cjs/internal/shardingConfig.js.map +0 -1
  423. package/dist/cjs/internal/storage.js +0 -52
  424. package/dist/cjs/internal/storage.js.map +0 -1
  425. package/dist/cjs/internal/utils.js +0 -69
  426. package/dist/cjs/internal/utils.js.map +0 -1
  427. package/dist/dts/AtLeastOnce.d.ts +0 -20
  428. package/dist/dts/AtLeastOnce.d.ts.map +0 -1
  429. package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
  430. package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
  431. package/dist/dts/Broadcaster.d.ts +0 -32
  432. package/dist/dts/Broadcaster.d.ts.map +0 -1
  433. package/dist/dts/ManagerConfig.d.ts +0 -61
  434. package/dist/dts/ManagerConfig.d.ts.map +0 -1
  435. package/dist/dts/MessageState.d.ts +0 -107
  436. package/dist/dts/MessageState.d.ts.map +0 -1
  437. package/dist/dts/Messenger.d.ts +0 -32
  438. package/dist/dts/Messenger.d.ts.map +0 -1
  439. package/dist/dts/Pod.d.ts +0 -81
  440. package/dist/dts/Pod.d.ts.map +0 -1
  441. package/dist/dts/PodAddress.d.ts +0 -80
  442. package/dist/dts/PodAddress.d.ts.map +0 -1
  443. package/dist/dts/Pods.d.ts +0 -78
  444. package/dist/dts/Pods.d.ts.map +0 -1
  445. package/dist/dts/PodsHealth.d.ts +0 -66
  446. package/dist/dts/PodsHealth.d.ts.map +0 -1
  447. package/dist/dts/PoisonPill.d.ts +0 -78
  448. package/dist/dts/PoisonPill.d.ts.map +0 -1
  449. package/dist/dts/RecipientAddress.d.ts +0 -57
  450. package/dist/dts/RecipientAddress.d.ts.map +0 -1
  451. package/dist/dts/RecipientBehaviour.d.ts +0 -72
  452. package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
  453. package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
  454. package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
  455. package/dist/dts/RecipientType.d.ts +0 -93
  456. package/dist/dts/RecipientType.d.ts.map +0 -1
  457. package/dist/dts/Serialization.d.ts +0 -58
  458. package/dist/dts/Serialization.d.ts.map +0 -1
  459. package/dist/dts/SerializedEnvelope.d.ts +0 -86
  460. package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
  461. package/dist/dts/SerializedMessage.d.ts +0 -66
  462. package/dist/dts/SerializedMessage.d.ts.map +0 -1
  463. package/dist/dts/ShardManagerClient.d.ts +0 -50
  464. package/dist/dts/ShardManagerClient.d.ts.map +0 -1
  465. package/dist/dts/ShardingEvent.d.ts +0 -90
  466. package/dist/dts/ShardingEvent.d.ts.map +0 -1
  467. package/dist/dts/ShardingException.d.ts +0 -125
  468. package/dist/dts/ShardingException.d.ts.map +0 -1
  469. package/dist/dts/Storage.d.ts +0 -78
  470. package/dist/dts/Storage.d.ts.map +0 -1
  471. package/dist/dts/internal/atLeastOnce.d.ts +0 -2
  472. package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
  473. package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
  474. package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
  475. package/dist/dts/internal/entityState.d.ts +0 -21
  476. package/dist/dts/internal/entityState.d.ts.map +0 -1
  477. package/dist/dts/internal/managerConfig.d.ts +0 -2
  478. package/dist/dts/internal/managerConfig.d.ts.map +0 -1
  479. package/dist/dts/internal/message.d.ts +0 -9
  480. package/dist/dts/internal/message.d.ts.map +0 -1
  481. package/dist/dts/internal/messageState.d.ts +0 -2
  482. package/dist/dts/internal/messageState.d.ts.map +0 -1
  483. package/dist/dts/internal/podWithMetadata.d.ts +0 -2
  484. package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
  485. package/dist/dts/internal/pods.d.ts +0 -2
  486. package/dist/dts/internal/pods.d.ts.map +0 -1
  487. package/dist/dts/internal/podsHealth.d.ts +0 -2
  488. package/dist/dts/internal/podsHealth.d.ts.map +0 -1
  489. package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
  490. package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
  491. package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
  492. package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
  493. package/dist/dts/internal/serialization.d.ts +0 -2
  494. package/dist/dts/internal/serialization.d.ts.map +0 -1
  495. package/dist/dts/internal/shardManagerClient.d.ts +0 -2
  496. package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
  497. package/dist/dts/internal/shardManagerState.d.ts +0 -26
  498. package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
  499. package/dist/dts/internal/sharding.d.ts +0 -2
  500. package/dist/dts/internal/sharding.d.ts.map +0 -1
  501. package/dist/dts/internal/shardingConfig.d.ts +0 -2
  502. package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
  503. package/dist/dts/internal/storage.d.ts +0 -2
  504. package/dist/dts/internal/storage.d.ts.map +0 -1
  505. package/dist/dts/internal/utils.d.ts +0 -2
  506. package/dist/dts/internal/utils.d.ts.map +0 -1
  507. package/dist/esm/AtLeastOnce.js +0 -12
  508. package/dist/esm/AtLeastOnce.js.map +0 -1
  509. package/dist/esm/AtLeastOnceStorage.js +0 -17
  510. package/dist/esm/AtLeastOnceStorage.js.map +0 -1
  511. package/dist/esm/Broadcaster.js +0 -2
  512. package/dist/esm/Broadcaster.js.map +0 -1
  513. package/dist/esm/ManagerConfig.js +0 -26
  514. package/dist/esm/ManagerConfig.js.map +0 -1
  515. package/dist/esm/MessageState.js +0 -47
  516. package/dist/esm/MessageState.js.map +0 -1
  517. package/dist/esm/Messenger.js +0 -2
  518. package/dist/esm/Messenger.js.map +0 -1
  519. package/dist/esm/Pod.js +0 -65
  520. package/dist/esm/Pod.js.map +0 -1
  521. package/dist/esm/PodAddress.js +0 -64
  522. package/dist/esm/PodAddress.js.map +0 -1
  523. package/dist/esm/Pods.js +0 -27
  524. package/dist/esm/Pods.js.map +0 -1
  525. package/dist/esm/PodsHealth.js +0 -33
  526. package/dist/esm/PodsHealth.js.map +0 -1
  527. package/dist/esm/PoisonPill.js +0 -65
  528. package/dist/esm/PoisonPill.js.map +0 -1
  529. package/dist/esm/RecipientAddress.js +0 -67
  530. package/dist/esm/RecipientAddress.js.map +0 -1
  531. package/dist/esm/RecipientBehaviour.js +0 -30
  532. package/dist/esm/RecipientBehaviour.js.map +0 -1
  533. package/dist/esm/RecipientBehaviourContext.js +0 -56
  534. package/dist/esm/RecipientBehaviourContext.js.map +0 -1
  535. package/dist/esm/RecipientType.js +0 -108
  536. package/dist/esm/RecipientType.js.map +0 -1
  537. package/dist/esm/Serialization.js +0 -24
  538. package/dist/esm/Serialization.js.map +0 -1
  539. package/dist/esm/SerializedEnvelope.js +0 -74
  540. package/dist/esm/SerializedEnvelope.js.map +0 -1
  541. package/dist/esm/SerializedMessage.js +0 -51
  542. package/dist/esm/SerializedMessage.js.map +0 -1
  543. package/dist/esm/ShardManagerClient.js +0 -22
  544. package/dist/esm/ShardManagerClient.js.map +0 -1
  545. package/dist/esm/ShardingEvent.js +0 -62
  546. package/dist/esm/ShardingEvent.js.map +0 -1
  547. package/dist/esm/ShardingException.js +0 -91
  548. package/dist/esm/ShardingException.js.map +0 -1
  549. package/dist/esm/Storage.js +0 -32
  550. package/dist/esm/Storage.js.map +0 -1
  551. package/dist/esm/internal/atLeastOnce.js +0 -26
  552. package/dist/esm/internal/atLeastOnce.js.map +0 -1
  553. package/dist/esm/internal/atLeastOnceStorage.js +0 -154
  554. package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
  555. package/dist/esm/internal/entityState.js +0 -35
  556. package/dist/esm/internal/entityState.js.map +0 -1
  557. package/dist/esm/internal/managerConfig.js +0 -38
  558. package/dist/esm/internal/managerConfig.js.map +0 -1
  559. package/dist/esm/internal/message.js +0 -35
  560. package/dist/esm/internal/message.js.map +0 -1
  561. package/dist/esm/internal/messageState.js +0 -66
  562. package/dist/esm/internal/messageState.js.map +0 -1
  563. package/dist/esm/internal/podWithMetadata.js +0 -41
  564. package/dist/esm/internal/podWithMetadata.js.map +0 -1
  565. package/dist/esm/internal/pods.js +0 -25
  566. package/dist/esm/internal/pods.js.map +0 -1
  567. package/dist/esm/internal/podsHealth.js +0 -30
  568. package/dist/esm/internal/podsHealth.js.map +0 -1
  569. package/dist/esm/internal/recipientBehaviour.js +0 -42
  570. package/dist/esm/internal/recipientBehaviour.js.map +0 -1
  571. package/dist/esm/internal/recipientBehaviourContext.js +0 -26
  572. package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
  573. package/dist/esm/internal/serialization.js +0 -38
  574. package/dist/esm/internal/serialization.js.map +0 -1
  575. package/dist/esm/internal/shardManagerClient.js +0 -38
  576. package/dist/esm/internal/shardManagerClient.js.map +0 -1
  577. package/dist/esm/internal/shardManagerState.js +0 -36
  578. package/dist/esm/internal/shardManagerState.js.map +0 -1
  579. package/dist/esm/internal/sharding.js +0 -288
  580. package/dist/esm/internal/sharding.js.map +0 -1
  581. package/dist/esm/internal/shardingConfig.js +0 -47
  582. package/dist/esm/internal/shardingConfig.js.map +0 -1
  583. package/dist/esm/internal/storage.js +0 -42
  584. package/dist/esm/internal/storage.js.map +0 -1
  585. package/dist/esm/internal/utils.js +0 -56
  586. package/dist/esm/internal/utils.js.map +0 -1
  587. package/src/AtLeastOnce.ts +0 -28
  588. package/src/AtLeastOnceStorage.ts +0 -96
  589. package/src/Broadcaster.ts +0 -48
  590. package/src/ManagerConfig.ts +0 -67
  591. package/src/MessageState.ts +0 -126
  592. package/src/Messenger.ts +0 -40
  593. package/src/Pod.ts +0 -95
  594. package/src/PodAddress.ts +0 -94
  595. package/src/Pods.ts +0 -100
  596. package/src/PodsHealth.ts +0 -74
  597. package/src/PoisonPill.ts +0 -105
  598. package/src/RecipientAddress.ts +0 -72
  599. package/src/RecipientBehaviour.ts +0 -108
  600. package/src/RecipientBehaviourContext.ts +0 -101
  601. package/src/RecipientType.ts +0 -134
  602. package/src/Serialization.ts +0 -72
  603. package/src/SerializedEnvelope.ts +0 -108
  604. package/src/SerializedMessage.ts +0 -82
  605. package/src/ShardManagerClient.ts +0 -57
  606. package/src/ShardingEvent.ts +0 -121
  607. package/src/ShardingException.ts +0 -151
  608. package/src/Storage.ts +0 -92
  609. package/src/internal/atLeastOnce.ts +0 -59
  610. package/src/internal/atLeastOnceStorage.ts +0 -218
  611. package/src/internal/entityState.ts +0 -64
  612. package/src/internal/managerConfig.ts +0 -84
  613. package/src/internal/message.ts +0 -63
  614. package/src/internal/messageState.ts +0 -98
  615. package/src/internal/podWithMetadata.ts +0 -72
  616. package/src/internal/pods.ts +0 -29
  617. package/src/internal/podsHealth.ts +0 -39
  618. package/src/internal/recipientBehaviour.ts +0 -133
  619. package/src/internal/recipientBehaviourContext.ts +0 -70
  620. package/src/internal/serialization.ts +0 -63
  621. package/src/internal/shardManagerClient.ts +0 -49
  622. package/src/internal/shardManagerState.ts +0 -80
  623. package/src/internal/sharding.ts +0 -789
  624. package/src/internal/shardingConfig.ts +0 -97
  625. package/src/internal/storage.ts +0 -60
  626. package/src/internal/utils.ts +0 -54
package/src/Sharding.ts CHANGED
@@ -1,214 +1,1091 @@
1
1
  /**
2
2
  * @since 1.0.0
3
3
  */
4
- import type * as Effect from "effect/Effect"
5
- import type * as HashSet from "effect/HashSet"
6
- import type * as Scope from "effect/Scope"
7
- import type * as Stream from "effect/Stream"
8
- import type { Broadcaster } from "./Broadcaster.js"
9
- import * as internal from "./internal/sharding.js"
10
- import type * as Message from "./Message.js"
11
- import type * as MessageState from "./MessageState.js"
12
- import type { Messenger } from "./Messenger.js"
13
- import type * as PodAddress from "./PodAddress.js"
14
- import type * as RecipientAddress from "./RecipientAddress.js"
15
- import type * as RecipientBehaviour from "./RecipientBehaviour.js"
16
- import type * as RecipientBehaviourContext from "./RecipientBehaviourContext.js"
17
- import type * as RecipentType from "./RecipientType.js"
18
- import type * as SerializedEnvelope from "./SerializedEnvelope.js"
19
- import type * as SerializedMessage from "./SerializedMessage.js"
20
- import type * as ShardId from "./ShardId.js"
21
- import type * as ShardingException from "./ShardingException.js"
22
- import type * as ShardingRegistrationEvent from "./ShardingRegistrationEvent.js"
4
+ import type * as Rpc from "@effect/rpc/Rpc"
5
+ import * as RpcClient from "@effect/rpc/RpcClient"
6
+ import { type FromServer, RequestId } from "@effect/rpc/RpcMessage"
7
+ import * as Arr from "effect/Array"
8
+ import * as Cause from "effect/Cause"
9
+ import * as Context from "effect/Context"
10
+ import type { DurationInput } from "effect/Duration"
11
+ import * as Effect from "effect/Effect"
12
+ import * as Equal from "effect/Equal"
13
+ import * as Fiber from "effect/Fiber"
14
+ import * as FiberHandle from "effect/FiberHandle"
15
+ import * as FiberMap from "effect/FiberMap"
16
+ import * as FiberRef from "effect/FiberRef"
17
+ import { constant } from "effect/Function"
18
+ import * as HashMap from "effect/HashMap"
19
+ import * as Iterable from "effect/Iterable"
20
+ import * as Layer from "effect/Layer"
21
+ import * as MutableHashMap from "effect/MutableHashMap"
22
+ import * as MutableRef from "effect/MutableRef"
23
+ import * as Option from "effect/Option"
24
+ import * as Predicate from "effect/Predicate"
25
+ import * as PubSub from "effect/PubSub"
26
+ import * as Schedule from "effect/Schedule"
27
+ import * as Scope from "effect/Scope"
28
+ import * as Stream from "effect/Stream"
29
+ import type { AlreadyProcessingMessage, MailboxFull, PersistenceError } from "./ClusterError.js"
30
+ import { EntityNotManagedByRunner, RunnerUnavailable } from "./ClusterError.js"
31
+ import { Persisted } from "./ClusterSchema.js"
32
+ import type { CurrentAddress, Entity, HandlersFrom } from "./Entity.js"
33
+ import { EntityAddress } from "./EntityAddress.js"
34
+ import { EntityId } from "./EntityId.js"
35
+ import type { EntityType } from "./EntityType.js"
36
+ import * as Envelope from "./Envelope.js"
37
+ import * as EntityManager from "./internal/entityManager.js"
38
+ import { EntityReaper } from "./internal/entityReaper.js"
39
+ import { hashString } from "./internal/hash.js"
40
+ import { internalInterruptors } from "./internal/interruptors.js"
41
+ import { ResourceMap } from "./internal/resourceMap.js"
42
+ import * as Message from "./Message.js"
43
+ import * as MessageStorage from "./MessageStorage.js"
44
+ import * as Reply from "./Reply.js"
45
+ import type { RunnerAddress } from "./RunnerAddress.js"
46
+ import { Runners } from "./Runners.js"
47
+ import { ShardId } from "./ShardId.js"
48
+ import { ShardingConfig } from "./ShardingConfig.js"
49
+ import { EntityRegistered, type ShardingRegistrationEvent, SingletonRegistered } from "./ShardingRegistrationEvent.js"
50
+ import { ShardManagerClient } from "./ShardManager.js"
51
+ import { ShardStorage } from "./ShardStorage.js"
52
+ import { SingletonAddress } from "./SingletonAddress.js"
53
+ import * as Snowflake from "./Snowflake.js"
23
54
 
24
55
  /**
25
56
  * @since 1.0.0
26
- * @category symbols
57
+ * @category models
27
58
  */
28
- export const ShardingTypeId: unique symbol = internal.ShardingTypeId
59
+ export class Sharding extends Context.Tag("@effect/cluster/Sharding")<Sharding, {
60
+ /**
61
+ * Returns a stream of events that occur when the runner registers entities or
62
+ * singletons.
63
+ */
64
+ readonly getRegistrationEvents: Stream.Stream<ShardingRegistrationEvent>
29
65
 
30
- /**
31
- * @since 1.0.0
32
- * @category symbols
33
- */
34
- export type ShardingTypeId = typeof ShardingTypeId
66
+ /**
67
+ * Returns the `ShardId` of the shard to which the entity at the specified
68
+ * `address` is assigned.
69
+ */
70
+ readonly getShardId: (entityId: EntityId) => ShardId
35
71
 
36
- /**
37
- * @since 1.0.0
38
- * @category models
39
- */
40
- export interface Sharding {
41
- readonly [ShardingTypeId]: ShardingTypeId
42
- readonly register: Effect.Effect<void>
43
- readonly unregister: Effect.Effect<void>
44
- readonly messenger: <Msg extends Message.Message.Any>(
45
- entityType: RecipentType.EntityType<Msg>
46
- ) => Messenger<Msg>
47
- readonly broadcaster: <Msg extends Message.Message.Any>(
48
- topicType: RecipentType.TopicType<Msg>
49
- ) => Broadcaster<Msg>
50
- readonly isEntityOnLocalShards: (
51
- recipientAddress: RecipientAddress.RecipientAddress
52
- ) => Effect.Effect<boolean>
53
- readonly isShuttingDown: Effect.Effect<boolean>
54
-
55
- readonly registerScoped: Effect.Effect<void, never, Scope.Scope>
56
- readonly registerEntity: <Msg extends Message.Message.Any>(
57
- entityType: RecipentType.EntityType<Msg>
58
- ) => <R>(
59
- behaviour: RecipientBehaviour.RecipientBehaviour<Msg, R>,
60
- options?: RecipientBehaviour.EntityBehaviourOptions
61
- ) => Effect.Effect<void, never, Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>>
62
- readonly registerTopic: <Msg extends Message.Message.Any>(
63
- topicType: RecipentType.TopicType<Msg>
64
- ) => <R>(
65
- behaviour: RecipientBehaviour.RecipientBehaviour<Msg, R>,
66
- options?: RecipientBehaviour.EntityBehaviourOptions
67
- ) => Effect.Effect<void, never, Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>>
68
- readonly getShardingRegistrationEvents: Stream.Stream<ShardingRegistrationEvent.ShardingRegistrationEvent>
69
- readonly registerSingleton: <R>(name: string, run: Effect.Effect<void, never, R>) => Effect.Effect<void, never, R>
70
- readonly assign: (shards: HashSet.HashSet<ShardId.ShardId>) => Effect.Effect<void>
71
- readonly unassign: (shards: HashSet.HashSet<ShardId.ShardId>) => Effect.Effect<void>
72
- readonly sendMessageToLocalEntityManagerWithoutRetries: (
73
- message: SerializedEnvelope.SerializedEnvelope
72
+ /**
73
+ * Returns `true` if sharding is shutting down, `false` otherwise.
74
+ */
75
+ readonly isShutdown: Effect.Effect<boolean>
76
+
77
+ /**
78
+ * Constructs a `RpcClient` which can be used to send messages to the
79
+ * specified `Entity`.
80
+ */
81
+ readonly makeClient: <Rpcs extends Rpc.Any>(
82
+ entity: Entity<Rpcs>
74
83
  ) => Effect.Effect<
75
- MessageState.MessageState<SerializedMessage.SerializedMessage>,
76
- ShardingException.ShardingException
84
+ (entityId: string) => RpcClient.RpcClient<Rpcs, MailboxFull | AlreadyProcessingMessage | PersistenceError>
77
85
  >
78
- readonly getPods: Effect.Effect<HashSet.HashSet<PodAddress.PodAddress>>
79
- readonly getAssignedShardIds: Effect.Effect<HashSet.HashSet<ShardId.ShardId>>
80
- /** @internal */
81
- readonly refreshAssignments: Effect.Effect<void, never, Scope.Scope>
82
- /** @internal */
83
- readonly getShardId: (recipientAddress: RecipientAddress.RecipientAddress) => ShardId.ShardId
86
+
87
+ /**
88
+ * Registers a new entity with the runner.
89
+ */
90
+ readonly registerEntity: <Rpcs extends Rpc.Any, Handlers extends HandlersFrom<Rpcs>, RX>(
91
+ entity: Entity<Rpcs>,
92
+ handlers: Effect.Effect<Handlers, never, RX>,
93
+ options?: {
94
+ readonly maxIdleTime?: DurationInput | undefined
95
+ readonly concurrency?: number | "unbounded" | undefined
96
+ readonly mailboxCapacity?: number | "unbounded" | undefined
97
+ }
98
+ ) => Effect.Effect<void, never, Rpc.Context<Rpcs> | Rpc.Middleware<Rpcs> | Exclude<RX, Scope.Scope | CurrentAddress>>
99
+
100
+ /**
101
+ * Registers a new singleton with the runner.
102
+ */
103
+ readonly registerSingleton: <E, R>(
104
+ name: string,
105
+ run: Effect.Effect<void, E, R>
106
+ ) => Effect.Effect<void, never, Exclude<R, Scope.Scope>>
107
+
108
+ /**
109
+ * Sends a message to the specified entity.
110
+ */
111
+ readonly send: (message: Message.Incoming<any>) => Effect.Effect<
112
+ void,
113
+ EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage
114
+ >
115
+
116
+ /**
117
+ * Notify sharding that a message has been persisted to storage.
118
+ */
119
+ readonly notify: (message: Message.Incoming<any>) => Effect.Effect<
120
+ void,
121
+ EntityNotManagedByRunner
122
+ >
123
+ }>() {}
124
+
125
+ // -----------------------------------------------------------------------------
126
+ // Implementation
127
+ // -----------------------------------------------------------------------------
128
+
129
+ interface EntityManagerState {
130
+ readonly entity: Entity<any>
131
+ readonly scope: Scope.CloseableScope
132
+ readonly manager: EntityManager.EntityManager
84
133
  }
85
134
 
86
135
  /**
87
136
  * @since 1.0.0
88
- * @category context
137
+ * @category constructors
89
138
  */
90
- export const Tag = internal.shardingTag
139
+ export const make = Effect.gen(function*() {
140
+ const config = yield* ShardingConfig
91
141
 
92
- /**
93
- * @since 1.0.0
94
- * @category layers
95
- */
96
- export const live = internal.live
142
+ const runners = yield* Runners
143
+ const shardManager = yield* ShardManagerClient
144
+ const snowflakeGen = yield* Snowflake.Generator
145
+ const shardingScope = yield* Effect.scope
146
+ const isShutdown = MutableRef.make(false)
97
147
 
98
- /**
99
- * Notify the shard manager that shards can now be assigned to this pod.
100
- *
101
- * @since 1.0.0
102
- * @category utils
103
- */
104
- export const register: Effect.Effect<void, never, Sharding> = internal.register
148
+ const storage = yield* MessageStorage.MessageStorage
149
+ const storageEnabled = storage !== MessageStorage.noop
150
+ const shardStorage = yield* ShardStorage
105
151
 
106
- /**
107
- * Notify the shard manager that shards must be unassigned from this pod.
108
- *
109
- * @since 1.0.0
110
- * @category utils
111
- */
112
- export const unregister: Effect.Effect<void, never, Sharding> = internal.unregister
152
+ const entityManagers = new Map<EntityType, EntityManagerState>()
113
153
 
114
- /**
115
- * Same as `register`, but will automatically call `unregister` when the `Scope` is terminated.
116
- *
117
- * @since 1.0.0
118
- * @category utils
119
- */
120
- export const registerScoped: Effect.Effect<void, never, Scope.Scope | Sharding> = internal.registerScoped
154
+ const shardAssignments = MutableHashMap.empty<ShardId, RunnerAddress>()
155
+ const selfShards = new Set<ShardId>()
121
156
 
122
- /**
123
- * Start a computation that is guaranteed to run only on a single pod.
124
- * Each pod should call `registerSingleton` but only a single pod will actually run it at any given time.
125
- *
126
- * @since 1.0.0
127
- * @category utils
128
- */
129
- export const registerSingleton: <R>(
130
- name: string,
131
- run: Effect.Effect<void, never, R>
132
- ) => Effect.Effect<void, never, Sharding | R> = internal.registerSingleton
157
+ // the active shards are the ones that we have acquired the lock for
158
+ const acquiredShards = new Set<ShardId>()
159
+ const activeShardsLatch = yield* Effect.makeLatch(false)
133
160
 
134
- /**
135
- * Register a new entity type, allowing pods to send messages to entities of this type.
136
- *
137
- * @since 1.0.0
138
- * @category utils
139
- */
140
- export const registerEntity: <Msg extends Message.Message.Any>(
141
- entityType: RecipentType.EntityType<Msg>
142
- ) => <R>(
143
- behavior: RecipientBehaviour.RecipientBehaviour<Msg, R>,
144
- options?: RecipientBehaviour.EntityBehaviourOptions | undefined
145
- ) => Effect.Effect<void, never, Sharding | Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>> =
146
- internal.registerEntity
161
+ const events = yield* PubSub.unbounded<ShardingRegistrationEvent>()
162
+ const getRegistrationEvents: Stream.Stream<ShardingRegistrationEvent> = Stream.fromPubSub(events)
147
163
 
148
- /**
149
- * Register a new topic type, allowing pods to broadcast messages to subscribers.
150
- *
151
- * @since 1.0.0
152
- * @category utils
153
- */
154
- export const registerTopic: <Msg extends Message.Message.Any>(
155
- topicType: RecipentType.TopicType<Msg>
156
- ) => <R>(
157
- behavior: RecipientBehaviour.RecipientBehaviour<Msg, R>,
158
- options?: RecipientBehaviour.EntityBehaviourOptions | undefined
159
- ) => Effect.Effect<void, never, Sharding | Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>> =
160
- internal.registerTopic
164
+ const isLocalRunner = (address: RunnerAddress) =>
165
+ Option.isSome(config.runnerAddress) && Equal.equals(address, config.runnerAddress.value)
161
166
 
162
- /**
163
- * Get an object that allows sending messages to a given entity type.
164
- * You can provide a custom send timeout to override the one globally defined.
165
- *
166
- * @since 1.0.0
167
- * @category utils
168
- */
169
- export const messenger: <Msg extends Message.Message.Any>(
170
- entityType: RecipentType.EntityType<Msg>
171
- ) => Effect.Effect<Messenger<Msg>, never, Sharding> = internal.messenger
167
+ function getShardId(entityId: EntityId): ShardId {
168
+ return ShardId.make((Math.abs(hashString(entityId) % config.numberOfShards)) + 1)
169
+ }
172
170
 
173
- /**
174
- * Get an object that allows broadcasting messages to a given topic type.
175
- * You can provide a custom send timeout to override the one globally defined.
176
- *
177
- * @since 1.0.0
178
- * @category utils
179
- */
180
- export const broadcaster: <Msg extends Message.Message.Any>(
181
- topicType: RecipentType.TopicType<Msg>
182
- ) => Effect.Effect<Broadcaster<Msg>, never, Sharding> = internal.broadcaster
171
+ function isEntityOnLocalShards(address: EntityAddress): boolean {
172
+ return acquiredShards.has(address.shardId)
173
+ }
183
174
 
184
- /**
185
- * Get the list of pods currently registered to the Shard Manager
186
- *
187
- * @since 1.0.0
188
- * @category utils
189
- */
190
- export const getPods: Effect.Effect<HashSet.HashSet<PodAddress.PodAddress>, never, Sharding> = internal.getPods
175
+ // --- Shard acquisition ---
191
176
 
192
- /**
193
- * Sends a raw message to the local entity manager without performing reties.
194
- * Those are up to the caller.
195
- *
196
- * @since 1.0.0
197
- * @category utils
198
- */
199
- export const sendMessageToLocalEntityManagerWithoutRetries: (
200
- message: SerializedEnvelope.SerializedEnvelope
201
- ) => Effect.Effect<
202
- MessageState.MessageState<SerializedMessage.SerializedMessage>,
203
- ShardingException.ShardingException,
204
- Sharding
205
- > = internal.sendMessageToLocalEntityManagerWithoutRetries
177
+ if (Option.isSome(config.runnerAddress)) {
178
+ const selfAddress = config.runnerAddress.value
179
+ yield* Scope.addFinalizerExit(shardingScope, () => {
180
+ // the locks expire over time, so if this fails we ignore it
181
+ return Effect.ignore(shardStorage.releaseAll(selfAddress))
182
+ })
183
+
184
+ const releasingShards = new Set<ShardId>()
185
+ yield* Effect.gen(function*() {
186
+ while (true) {
187
+ yield* activeShardsLatch.await
188
+
189
+ // if a shard is no longer assigned to this runner, we release it
190
+ for (const shardId of acquiredShards) {
191
+ if (selfShards.has(shardId)) continue
192
+ acquiredShards.delete(shardId)
193
+ releasingShards.add(shardId)
194
+ }
195
+ // if a shard has been assigned to this runner, we acquire it
196
+ const unacquiredShards = new Set<ShardId>()
197
+ for (const shardId of selfShards) {
198
+ if (acquiredShards.has(shardId) || releasingShards.has(shardId)) continue
199
+ unacquiredShards.add(shardId)
200
+ }
201
+
202
+ if (releasingShards.size > 0) {
203
+ yield* Effect.forkIn(syncSingletons, shardingScope)
204
+ yield* releaseShards
205
+ }
206
+
207
+ if (unacquiredShards.size === 0) {
208
+ yield* activeShardsLatch.close
209
+ continue
210
+ }
211
+
212
+ const acquired = yield* shardStorage.acquire(selfAddress, unacquiredShards)
213
+ for (const shardId of acquired) {
214
+ acquiredShards.add(shardId)
215
+ }
216
+ if (acquired.length > 0) {
217
+ yield* storageReadLatch.open
218
+ yield* Effect.forkIn(syncSingletons, shardingScope)
219
+ }
220
+ yield* Effect.sleep(1000)
221
+ }
222
+ }).pipe(
223
+ Effect.catchAllCause((cause) => Effect.logWarning("Could not acquire/release shards", cause)),
224
+ Effect.repeat(Schedule.spaced(config.entityMessagePollInterval)),
225
+ Effect.annotateLogs({
226
+ package: "@effect/cluster",
227
+ module: "Sharding",
228
+ fiber: "Shard acquisition loop",
229
+ runner: selfAddress
230
+ }),
231
+ Effect.interruptible,
232
+ Effect.forkIn(shardingScope)
233
+ )
234
+
235
+ // refresh the shard locks every minute
236
+ yield* Effect.suspend(() =>
237
+ shardStorage.refresh(selfAddress, [
238
+ ...acquiredShards,
239
+ ...releasingShards
240
+ ])
241
+ ).pipe(
242
+ Effect.flatMap((acquired) => {
243
+ for (const shardId of acquiredShards) {
244
+ if (!acquired.includes(shardId)) {
245
+ acquiredShards.delete(shardId)
246
+ releasingShards.add(shardId)
247
+ }
248
+ }
249
+ return releasingShards.size > 0 ?
250
+ Effect.andThen(
251
+ Effect.forkIn(syncSingletons, shardingScope),
252
+ releaseShards
253
+ ) :
254
+ Effect.void
255
+ }),
256
+ Effect.retry({
257
+ times: 5,
258
+ schedule: Schedule.spaced(250)
259
+ }),
260
+ Effect.catchAllCause((cause) =>
261
+ Effect.logError("Could not refresh shard locks", cause).pipe(
262
+ Effect.andThen(clearSelfShards)
263
+ )
264
+ ),
265
+ Effect.delay("1 minute"),
266
+ Effect.forever,
267
+ Effect.interruptible,
268
+ Effect.forkIn(shardingScope)
269
+ )
270
+
271
+ const releaseShardsLock = Effect.unsafeMakeSemaphore(1).withPermits(1)
272
+ const releaseShards = releaseShardsLock(
273
+ Effect.suspend(() =>
274
+ Effect.forEach(
275
+ releasingShards,
276
+ (shardId) =>
277
+ Effect.forEach(
278
+ entityManagers.values(),
279
+ (state) => state.manager.interruptShard(shardId),
280
+ { concurrency: "unbounded", discard: true }
281
+ ).pipe(
282
+ Effect.andThen(shardStorage.release(selfAddress, shardId)),
283
+ Effect.annotateLogs({
284
+ runner: selfAddress
285
+ }),
286
+ Effect.andThen(() => {
287
+ releasingShards.delete(shardId)
288
+ })
289
+ ),
290
+ { concurrency: "unbounded", discard: true }
291
+ )
292
+ )
293
+ )
294
+ }
295
+
296
+ const clearSelfShards = Effect.suspend(() => {
297
+ selfShards.clear()
298
+ return activeShardsLatch.open
299
+ })
300
+
301
+ // --- Singletons ---
302
+
303
+ const singletons = new Map<ShardId, MutableHashMap.MutableHashMap<SingletonAddress, Effect.Effect<void>>>()
304
+ const singletonFibers = yield* FiberMap.make<SingletonAddress>()
305
+ const withSingletonLock = Effect.unsafeMakeSemaphore(1).withPermits(1)
306
+
307
+ const registerSingleton: Sharding["Type"]["registerSingleton"] = Effect.fnUntraced(
308
+ function*(name, run) {
309
+ const address = new SingletonAddress({
310
+ shardId: getShardId(EntityId.make(name)),
311
+ name
312
+ })
313
+
314
+ let map = singletons.get(address.shardId)
315
+ if (!map) {
316
+ map = MutableHashMap.empty()
317
+ singletons.set(address.shardId, map)
318
+ }
319
+ if (MutableHashMap.has(map, address)) {
320
+ return yield* Effect.dieMessage(`Singleton '${name}' is already registered`)
321
+ }
322
+
323
+ const context = yield* Effect.context<never>()
324
+ const wrappedRun = run.pipe(
325
+ Effect.locally(FiberRef.currentLogAnnotations, HashMap.empty()),
326
+ Effect.andThen(Effect.never),
327
+ Effect.scoped,
328
+ Effect.provide(context),
329
+ Effect.orDie,
330
+ Effect.interruptible
331
+ ) as Effect.Effect<never>
332
+ MutableHashMap.set(map, address, wrappedRun)
333
+
334
+ yield* PubSub.publish(events, SingletonRegistered({ address }))
335
+
336
+ // start if we are on the right shard
337
+ if (acquiredShards.has(address.shardId)) {
338
+ yield* Effect.logDebug("Starting singleton", address)
339
+ yield* FiberMap.run(singletonFibers, address, wrappedRun)
340
+ }
341
+ },
342
+ withSingletonLock
343
+ )
344
+
345
+ const syncSingletons = withSingletonLock(Effect.gen(function*() {
346
+ for (const [shardId, map] of singletons) {
347
+ for (const [address, run] of map) {
348
+ const running = FiberMap.unsafeHas(singletonFibers, address)
349
+ const shouldBeRunning = acquiredShards.has(shardId)
350
+ if (running && !shouldBeRunning) {
351
+ yield* Effect.logDebug("Stopping singleton", address)
352
+ internalInterruptors.add(yield* Effect.fiberId)
353
+ yield* FiberMap.remove(singletonFibers, address)
354
+ } else if (!running && shouldBeRunning) {
355
+ yield* Effect.logDebug("Starting singleton", address)
356
+ yield* FiberMap.run(singletonFibers, address, run)
357
+ }
358
+ }
359
+ }
360
+ }))
361
+
362
+ // --- Storage inbox ---
363
+
364
+ const storageReadLatch = yield* Effect.makeLatch(true)
365
+ const openStorageReadLatch = constant(storageReadLatch.open)
366
+
367
+ const storageReadLock = Effect.unsafeMakeSemaphore(1)
368
+ const withStorageReadLock = storageReadLock.withPermits(1)
369
+
370
+ if (storageEnabled && Option.isSome(config.runnerAddress)) {
371
+ const selfAddress = config.runnerAddress.value
372
+
373
+ yield* Effect.gen(function*() {
374
+ yield* Effect.logDebug("Starting")
375
+ yield* Effect.addFinalizer(() => Effect.logDebug("Shutting down"))
376
+
377
+ // keep track of the last sent request ids to avoid duplicates
378
+ // we only keep the last 30 sets to avoid memory leaks
379
+ const sentRequestIds = new Set<Snowflake.Snowflake>()
380
+ const sentRequestIdSets = new Set<Set<Snowflake.Snowflake>>()
381
+
382
+ while (true) {
383
+ // wait for the next poll interval, or if we get notified of a change
384
+ yield* storageReadLatch.await
385
+
386
+ // if we get notified of a change, ensure we start a read immediately
387
+ // next iteration
388
+ storageReadLatch.unsafeClose()
389
+
390
+ // the lock is used to ensure resuming entities have a garantee that no
391
+ // more items are added to the unprocessed set while the semaphore is
392
+ // acquired.
393
+ yield* storageReadLock.take(1)
394
+
395
+ const messages = yield* storage.unprocessedMessages(acquiredShards)
396
+ const currentSentRequestIds = new Set<Snowflake.Snowflake>()
397
+ sentRequestIdSets.add(currentSentRequestIds)
398
+
399
+ const send = Effect.catchAllCause(
400
+ Effect.suspend(() => {
401
+ const message = messages[index]
402
+ if (message._tag === "IncomingRequest") {
403
+ if (sentRequestIds.has(message.envelope.requestId)) {
404
+ return Effect.void
405
+ }
406
+ sentRequestIds.add(message.envelope.requestId)
407
+ currentSentRequestIds.add(message.envelope.requestId)
408
+ }
409
+ const address = message.envelope.address
410
+ const state = entityManagers.get(address.entityType)
411
+ if (!state || !acquiredShards.has(address.shardId)) {
412
+ return Effect.void
413
+ }
414
+
415
+ const isProcessing = state.manager.isProcessingFor(message)
416
+
417
+ // If the message might affect a currently processing request, we
418
+ // send it to the entity manager to be processed.
419
+ if (message._tag === "IncomingEnvelope" && isProcessing) {
420
+ return state.manager.send(message)
421
+ } else if (isProcessing) {
422
+ return Effect.void
423
+ }
424
+
425
+ // If the entity was resuming in another fiber, we add the message
426
+ // id to the unprocessed set.
427
+ const resumptionState = MutableHashMap.get(entityResumptionState, address)
428
+ if (Option.isSome(resumptionState)) {
429
+ resumptionState.value.unprocessed.add(message.envelope.requestId)
430
+ if (message.envelope._tag === "Interrupt") {
431
+ resumptionState.value.interrupts.set(message.envelope.requestId, message as Message.IncomingEnvelope)
432
+ }
433
+ return Effect.void
434
+ }
435
+ return state.manager.send(message)
436
+ }),
437
+ (cause) => {
438
+ const message = messages[index]
439
+ const error = Cause.failureOption(cause)
440
+ // if we get a defect, then update storage
441
+ if (Option.isNone(error)) {
442
+ return storage.saveReply(Reply.ReplyWithContext.fromDefect({
443
+ id: snowflakeGen.unsafeNext(),
444
+ requestId: message.envelope.requestId,
445
+ defect: Cause.squash(cause)
446
+ }))
447
+ }
448
+ if (error.value._tag === "MailboxFull") {
449
+ // MailboxFull can only happen for requests, so this cast is safe
450
+ return resumeEntityFromStorage(message as Message.IncomingRequest<any>)
451
+ }
452
+ return Effect.void
453
+ }
454
+ )
455
+
456
+ let index = 0
457
+ yield* Effect.whileLoop({
458
+ while: () => index < messages.length,
459
+ step: () => index++,
460
+ body: constant(send)
461
+ })
462
+
463
+ // let the resuming entities check if they are done
464
+ yield* storageReadLock.release(1)
465
+
466
+ while (sentRequestIdSets.size > 30) {
467
+ const oldest = Iterable.unsafeHead(sentRequestIdSets)
468
+ sentRequestIdSets.delete(oldest)
469
+ for (const id of oldest) {
470
+ sentRequestIds.delete(id)
471
+ }
472
+ }
473
+ }
474
+ }).pipe(
475
+ Effect.scoped,
476
+ Effect.ensuring(storageReadLock.releaseAll),
477
+ Effect.catchAllCause((cause) => Effect.logWarning("Could not read messages from storage", cause)),
478
+ Effect.repeat(Schedule.spaced(config.entityMessagePollInterval)),
479
+ Effect.annotateLogs({
480
+ package: "@effect/cluster",
481
+ module: "Sharding",
482
+ fiber: "Storage read loop",
483
+ runner: selfAddress
484
+ }),
485
+ Effect.interruptible,
486
+ Effect.forkIn(shardingScope)
487
+ )
488
+
489
+ // open the storage latch every poll interval
490
+ yield* storageReadLatch.open.pipe(
491
+ Effect.delay(config.entityMessagePollInterval),
492
+ Effect.forever,
493
+ Effect.interruptible,
494
+ Effect.forkIn(shardingScope)
495
+ )
496
+
497
+ // Resume unprocessed messages for entities that reached a full mailbox.
498
+ const entityResumptionState = MutableHashMap.empty<EntityAddress, {
499
+ unprocessed: Set<Snowflake.Snowflake>
500
+ interrupts: Map<Snowflake.Snowflake, Message.IncomingEnvelope>
501
+ }>()
502
+ const resumeEntityFromStorage = (lastReceivedMessage: Message.IncomingRequest<any>) => {
503
+ const address = lastReceivedMessage.envelope.address
504
+ const resumptionState = MutableHashMap.get(entityResumptionState, address)
505
+ if (Option.isSome(resumptionState)) {
506
+ resumptionState.value.unprocessed.add(lastReceivedMessage.envelope.requestId)
507
+ return Effect.void
508
+ }
509
+ MutableHashMap.set(entityResumptionState, address, {
510
+ unprocessed: new Set([lastReceivedMessage.envelope.requestId]),
511
+ interrupts: new Map()
512
+ })
513
+ return resumeEntityFromStorageImpl(address)
514
+ }
515
+ const resumeEntityFromStorageImpl = Effect.fnUntraced(
516
+ function*(address: EntityAddress) {
517
+ const state = entityManagers.get(address.entityType)
518
+ if (!state) {
519
+ MutableHashMap.remove(entityResumptionState, address)
520
+ return
521
+ }
522
+
523
+ const resumptionState = Option.getOrThrow(MutableHashMap.get(entityResumptionState, address))
524
+ let done = false
525
+
526
+ while (!done) {
527
+ // if the shard is no longer assigned to this runner, we stop
528
+ if (!acquiredShards.has(address.shardId)) {
529
+ return
530
+ }
531
+
532
+ // take a batch of unprocessed messages ids
533
+ const messageIds = Arr.empty<Snowflake.Snowflake>()
534
+ for (const id of resumptionState.unprocessed) {
535
+ if (messageIds.length === 1024) break
536
+ messageIds.push(id)
537
+ }
538
+
539
+ const messages = yield* storage.unprocessedMessagesById(messageIds)
540
+
541
+ // this should not happen, but we handle it just in case
542
+ if (messages.length === 0) {
543
+ yield* Effect.sleep(config.entityMessagePollInterval)
544
+ continue
545
+ }
546
+
547
+ let index = 0
548
+
549
+ const sendWithRetry: Effect.Effect<
550
+ void,
551
+ EntityNotManagedByRunner
552
+ > = Effect.catchTags(
553
+ Effect.suspend(() => {
554
+ if (!acquiredShards.has(address.shardId)) {
555
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
556
+ }
557
+
558
+ const message = messages[index]
559
+ // check if this is a request that was interrupted
560
+ const interrupt = message._tag === "IncomingRequest" &&
561
+ resumptionState.interrupts.get(message.envelope.requestId)
562
+ return interrupt ?
563
+ Effect.flatMap(state.manager.send(message), () => {
564
+ resumptionState.interrupts.delete(message.envelope.requestId)
565
+ return state.manager.send(interrupt)
566
+ }) :
567
+ state.manager.send(message)
568
+ }),
569
+ {
570
+ MailboxFull: () => Effect.delay(sendWithRetry, config.sendRetryInterval),
571
+ AlreadyProcessingMessage: () => Effect.void
572
+ }
573
+ )
574
+
575
+ yield* Effect.whileLoop({
576
+ while: () => index < messages.length,
577
+ body: constant(sendWithRetry),
578
+ step: () => index++
579
+ })
580
+
581
+ for (const id of messageIds) {
582
+ resumptionState.unprocessed.delete(id)
583
+ }
584
+ if (resumptionState.unprocessed.size > 0) continue
585
+
586
+ // if we have caught up to the main storage loop, we let it take over
587
+ yield* withStorageReadLock(Effect.sync(() => {
588
+ if (resumptionState.unprocessed.size === 0) {
589
+ MutableHashMap.remove(entityResumptionState, address)
590
+ done = true
591
+ }
592
+ }))
593
+ }
594
+ },
595
+ Effect.retry({
596
+ while: (e) => e._tag === "PersistenceError",
597
+ schedule: Schedule.spaced(config.entityMessagePollInterval)
598
+ }),
599
+ Effect.catchAllCause((cause) => Effect.logError("Could not resume unprocessed messages", cause)),
600
+ (effect, address) =>
601
+ Effect.annotateLogs(effect, {
602
+ package: "@effect/cluster",
603
+ module: "Sharding",
604
+ fiber: "Resuming unprocessed messages",
605
+ runner: selfAddress,
606
+ entity: address
607
+ }),
608
+ (effect, address) =>
609
+ Effect.ensuring(
610
+ effect,
611
+ Effect.sync(() => MutableHashMap.remove(entityResumptionState, address))
612
+ ),
613
+ Effect.interruptible,
614
+ Effect.forkIn(shardingScope)
615
+ )
616
+ }
617
+
618
+ // --- Sending messages ---
619
+
620
+ const sendLocal = (
621
+ message: Message.Outgoing<any> | Message.Incoming<any>
622
+ ): Effect.Effect<
623
+ void,
624
+ EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage
625
+ > =>
626
+ Effect.suspend(() => {
627
+ const address = message.envelope.address
628
+ if (!isEntityOnLocalShards(address)) {
629
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
630
+ }
631
+ const state = entityManagers.get(address.entityType)
632
+ if (!state) {
633
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
634
+ }
635
+
636
+ return message._tag === "IncomingRequest" || message._tag === "IncomingEnvelope" ?
637
+ state.manager.send(message) :
638
+ runners.sendLocal({
639
+ message,
640
+ send: state.manager.sendLocal,
641
+ simulateRemoteSerialization: config.simulateRemoteSerialization
642
+ })
643
+ })
644
+
645
+ const notifyLocal = (message: Message.Outgoing<any> | Message.Incoming<any>, discard: boolean) =>
646
+ Effect.suspend(() => {
647
+ const address = message.envelope.address
648
+ if (!isEntityOnLocalShards(address)) {
649
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
650
+ }
651
+
652
+ const notify = storageEnabled
653
+ ? openStorageReadLatch
654
+ : () => Effect.dieMessage("Sharding.notifyLocal: storage is disabled")
655
+
656
+ return message._tag === "IncomingRequest" || message._tag === "IncomingEnvelope"
657
+ ? notify()
658
+ : runners.notifyLocal({ message, notify, discard })
659
+ })
660
+
661
+ const isTransientError = Predicate.or(RunnerUnavailable.is, EntityNotManagedByRunner.is)
662
+ function sendOutgoing(
663
+ message: Message.Outgoing<any>,
664
+ discard: boolean,
665
+ retries?: number
666
+ ): Effect.Effect<void, MailboxFull | AlreadyProcessingMessage | PersistenceError> {
667
+ return Effect.catchIf(
668
+ Effect.suspend(() => {
669
+ const address = message.envelope.address
670
+ const maybeRunner = MutableHashMap.get(shardAssignments, address.shardId)
671
+ const isPersisted = storageEnabled && Context.get(message.rpc.annotations, Persisted)
672
+ const runnerIsLocal = Option.isSome(maybeRunner) && isLocalRunner(maybeRunner.value)
673
+ if (isPersisted) {
674
+ return runnerIsLocal
675
+ ? notifyLocal(message, discard)
676
+ : runners.notify({ address: maybeRunner, message, discard })
677
+ } else if (Option.isNone(maybeRunner)) {
678
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
679
+ }
680
+ return runnerIsLocal
681
+ ? sendLocal(message)
682
+ : runners.send({ address: maybeRunner.value, message })
683
+ }),
684
+ isTransientError,
685
+ (error) => {
686
+ if (retries === 0) {
687
+ return Effect.die(error)
688
+ }
689
+ return Effect.delay(sendOutgoing(message, discard, retries && retries - 1), config.sendRetryInterval)
690
+ }
691
+ )
692
+ }
693
+
694
+ // --- Shard Manager sync ---
695
+
696
+ const shardManagerTimeoutFiber = yield* FiberHandle.make().pipe(
697
+ Scope.extend(shardingScope)
698
+ )
699
+ const startShardManagerTimeout = FiberHandle.run(
700
+ shardManagerTimeoutFiber,
701
+ Effect.flatMap(Effect.sleep(config.shardManagerUnavailableTimeout), () => {
702
+ MutableHashMap.clear(shardAssignments)
703
+ return clearSelfShards
704
+ }),
705
+ { onlyIfMissing: true }
706
+ )
707
+ const stopShardManagerTimeout = FiberHandle.clear(shardManagerTimeoutFiber)
708
+
709
+ // Every time the link to the shard manager is lost, we re-register the runner
710
+ // and re-subscribe to sharding events
711
+ yield* Effect.gen(function*() {
712
+ yield* Effect.logDebug("Registering with shard manager")
713
+ if (Option.isSome(config.runnerAddress)) {
714
+ const machineId = yield* shardManager.register(config.runnerAddress.value)
715
+ yield* snowflakeGen.setMachineId(machineId)
716
+ }
717
+
718
+ yield* stopShardManagerTimeout
719
+
720
+ yield* Effect.logDebug("Subscribing to sharding events")
721
+ const mailbox = yield* shardManager.shardingEvents
722
+ const startedLatch = yield* Effect.makeLatch(false)
723
+
724
+ const eventsFiber = yield* Effect.gen(function*() {
725
+ while (true) {
726
+ const [events] = yield* mailbox.takeAll
727
+ for (const event of events) {
728
+ yield* Effect.logDebug("Received sharding event", event)
729
+
730
+ switch (event._tag) {
731
+ case "StreamStarted": {
732
+ yield* startedLatch.open
733
+ break
734
+ }
735
+ case "ShardsAssigned": {
736
+ for (const shard of event.shards) {
737
+ MutableHashMap.set(shardAssignments, shard, event.address)
738
+ }
739
+ if (!MutableRef.get(isShutdown) && isLocalRunner(event.address)) {
740
+ for (const shardId of event.shards) {
741
+ if (selfShards.has(shardId)) continue
742
+ selfShards.add(shardId)
743
+ }
744
+ yield* activeShardsLatch.open
745
+ }
746
+ break
747
+ }
748
+ case "ShardsUnassigned": {
749
+ for (const shard of event.shards) {
750
+ MutableHashMap.remove(shardAssignments, shard)
751
+ }
752
+ if (isLocalRunner(event.address)) {
753
+ for (const shard of event.shards) {
754
+ selfShards.delete(shard)
755
+ }
756
+ yield* activeShardsLatch.open
757
+ }
758
+ break
759
+ }
760
+ }
761
+ }
762
+ }
763
+ }).pipe(Effect.forkScoped)
764
+
765
+ // Wait for the stream to be established
766
+ yield* startedLatch.await
767
+
768
+ // perform a full sync every config.refreshAssignmentsInterval
769
+ const syncFiber = yield* syncAssignments.pipe(
770
+ Effect.andThen(Effect.sleep(config.refreshAssignmentsInterval)),
771
+ Effect.forever,
772
+ Effect.forkScoped
773
+ )
774
+
775
+ yield* Fiber.joinAll([eventsFiber, syncFiber])
776
+ }).pipe(
777
+ Effect.scoped,
778
+ Effect.catchAllCause((cause) => Effect.logDebug(cause)),
779
+ Effect.zipRight(startShardManagerTimeout),
780
+ Effect.repeat(
781
+ Schedule.exponential(1000).pipe(
782
+ Schedule.union(Schedule.spaced(10_000))
783
+ )
784
+ ),
785
+ Effect.annotateLogs({
786
+ package: "@effect/cluster",
787
+ module: "Sharding",
788
+ fiber: "ShardManager sync",
789
+ runner: config.runnerAddress
790
+ }),
791
+ Effect.interruptible,
792
+ Effect.forkIn(shardingScope)
793
+ )
794
+
795
+ const syncAssignments = Effect.gen(function*() {
796
+ const assignments = yield* shardManager.getAssignments
797
+ yield* Effect.logDebug("Received shard assignments", assignments)
798
+
799
+ for (const [shardId, runner] of assignments) {
800
+ if (Option.isNone(runner)) {
801
+ MutableHashMap.remove(shardAssignments, shardId)
802
+ selfShards.delete(shardId)
803
+ continue
804
+ }
805
+
806
+ MutableHashMap.set(shardAssignments, shardId, runner.value)
807
+
808
+ if (!isLocalRunner(runner.value)) {
809
+ selfShards.delete(shardId)
810
+ continue
811
+ }
812
+ if (MutableRef.get(isShutdown) || selfShards.has(shardId)) {
813
+ continue
814
+ }
815
+ selfShards.add(shardId)
816
+ }
817
+
818
+ yield* activeShardsLatch.open
819
+ })
820
+
821
+ // --- Clients ---
822
+
823
+ type ClientRequestEntry = {
824
+ readonly rpc: Rpc.AnyWithProps
825
+ readonly context: Context.Context<never>
826
+ lastChunkId?: Snowflake.Snowflake
827
+ }
828
+ const clientRequests = new Map<Snowflake.Snowflake, ClientRequestEntry>()
829
+
830
+ const clients: ResourceMap<
831
+ Entity<any>,
832
+ (entityId: string) => RpcClient.RpcClient<any, MailboxFull | AlreadyProcessingMessage>,
833
+ never
834
+ > = yield* ResourceMap.make(Effect.fnUntraced(function*(entity: Entity<any>) {
835
+ const client = yield* RpcClient.makeNoSerialization(entity.protocol, {
836
+ supportsAck: true,
837
+ generateRequestId: () => RequestId(snowflakeGen.unsafeNext()),
838
+ onFromClient(options): Effect.Effect<void, MailboxFull | AlreadyProcessingMessage | PersistenceError> {
839
+ const address = Context.unsafeGet(options.context, ClientAddressTag)
840
+ switch (options.message._tag) {
841
+ case "Request": {
842
+ const fiber = Option.getOrThrow(Fiber.getCurrentFiber())
843
+ const id = Snowflake.Snowflake(options.message.id)
844
+ const rpc = entity.protocol.requests.get(options.message.tag)!
845
+ let respond: (reply: Reply.Reply<any>) => Effect.Effect<void>
846
+ if (!options.discard) {
847
+ const entry: ClientRequestEntry = {
848
+ rpc: rpc as any,
849
+ context: fiber.currentContext
850
+ }
851
+ clientRequests.set(id, entry)
852
+ respond = makeClientRespond(entry, client.write)
853
+ } else {
854
+ respond = clientRespondDiscard
855
+ }
856
+ return sendOutgoing(
857
+ new Message.OutgoingRequest({
858
+ envelope: Envelope.makeRequest({
859
+ requestId: id,
860
+ address,
861
+ tag: options.message.tag,
862
+ payload: options.message.payload,
863
+ headers: options.message.headers,
864
+ traceId: options.message.traceId,
865
+ spanId: options.message.spanId,
866
+ sampled: options.message.sampled
867
+ }),
868
+ lastReceivedReply: Option.none(),
869
+ rpc,
870
+ context: fiber.currentContext as Context.Context<any>,
871
+ respond
872
+ }),
873
+ options.discard
874
+ )
875
+ }
876
+ case "Ack": {
877
+ const requestId = Snowflake.Snowflake(options.message.requestId)
878
+ const entry = clientRequests.get(requestId)
879
+ if (!entry) return Effect.void
880
+ return sendOutgoing(
881
+ new Message.OutgoingEnvelope({
882
+ envelope: new Envelope.AckChunk({
883
+ id: snowflakeGen.unsafeNext(),
884
+ address,
885
+ requestId,
886
+ replyId: entry.lastChunkId!
887
+ }),
888
+ rpc: entry.rpc
889
+ }),
890
+ false
891
+ )
892
+ }
893
+ case "Interrupt": {
894
+ const requestId = Snowflake.Snowflake(options.message.requestId)
895
+ const entry = clientRequests.get(requestId)!
896
+ if (!entry) return Effect.void
897
+ clientRequests.delete(requestId)
898
+ // for durable messages, we ignore interrupts on shutdown or as a
899
+ // result of a shard being resassigned
900
+ const isTransientInterrupt = MutableRef.get(isShutdown) ||
901
+ options.message.interruptors.some((id) => internalInterruptors.has(id))
902
+ if (isTransientInterrupt && storageEnabled && Context.get(entry.rpc.annotations, Persisted)) {
903
+ return Effect.void
904
+ }
905
+ return Effect.ignore(sendOutgoing(
906
+ new Message.OutgoingEnvelope({
907
+ envelope: new Envelope.Interrupt({
908
+ id: snowflakeGen.unsafeNext(),
909
+ address,
910
+ requestId
911
+ }),
912
+ rpc: entry.rpc
913
+ }),
914
+ false,
915
+ 3
916
+ ))
917
+ }
918
+ }
919
+ return Effect.void
920
+ }
921
+ })
922
+
923
+ const wrappedClient: any = {}
924
+ for (const method of Object.keys(client.client)) {
925
+ wrappedClient[method] = function(this: any, payload: any, options?: {
926
+ readonly context?: Context.Context<never>
927
+ }) {
928
+ return (client as any).client[method](payload, {
929
+ ...options,
930
+ context: options?.context
931
+ ? Context.merge(options.context, this[currentClientAddress])
932
+ : this[currentClientAddress]
933
+ })
934
+ }
935
+ }
936
+
937
+ yield* Scope.addFinalizer(
938
+ yield* Effect.scope,
939
+ Effect.withFiberRuntime((fiber) => {
940
+ internalInterruptors.add(fiber.id())
941
+ return Effect.void
942
+ })
943
+ )
944
+
945
+ return (entityId: string) => {
946
+ const id = EntityId.make(entityId)
947
+ return {
948
+ ...wrappedClient,
949
+ [currentClientAddress]: ClientAddressTag.context(EntityAddress.make({
950
+ shardId: getShardId(id),
951
+ entityId: id,
952
+ entityType: entity.type
953
+ }))
954
+ }
955
+ }
956
+ }))
957
+
958
+ const makeClient = <Rpcs extends Rpc.Any>(entity: Entity<Rpcs>): Effect.Effect<
959
+ (entityId: string) => RpcClient.RpcClient<Rpcs, MailboxFull | AlreadyProcessingMessage>
960
+ > => clients.get(entity)
961
+
962
+ const clientRespondDiscard = (_reply: Reply.Reply<any>) => Effect.void
963
+
964
+ const makeClientRespond = (
965
+ entry: ClientRequestEntry,
966
+ write: (reply: FromServer<any>) => Effect.Effect<void>
967
+ ) =>
968
+ (reply: Reply.Reply<any>) => {
969
+ switch (reply._tag) {
970
+ case "Chunk": {
971
+ entry.lastChunkId = reply.id
972
+ return write({
973
+ _tag: "Chunk",
974
+ clientId: 0,
975
+ requestId: RequestId(reply.requestId),
976
+ values: reply.values
977
+ })
978
+ }
979
+ case "WithExit": {
980
+ clientRequests.delete(reply.requestId)
981
+ return write({
982
+ _tag: "Exit",
983
+ clientId: 0,
984
+ requestId: RequestId(reply.requestId),
985
+ exit: reply.exit
986
+ })
987
+ }
988
+ }
989
+ }
990
+
991
+ // --- Entities ---
992
+
993
+ const context = yield* Effect.context<ShardingConfig>()
994
+ const reaper = yield* EntityReaper
995
+ const registerEntity: Sharding["Type"]["registerEntity"] = Effect.fnUntraced(
996
+ function*(entity, build, options) {
997
+ if (entityManagers.has(entity.type)) return
998
+ const scope = yield* Scope.make()
999
+ const manager = yield* EntityManager.make(entity, build, {
1000
+ ...options,
1001
+ storage,
1002
+ runnerAddress: Option.getOrThrow(config.runnerAddress),
1003
+ sharding
1004
+ }).pipe(
1005
+ Effect.provide(context.pipe(
1006
+ Context.add(EntityReaper, reaper),
1007
+ Context.add(Scope.Scope, scope),
1008
+ Context.add(Snowflake.Generator, snowflakeGen)
1009
+ ))
1010
+ ) as Effect.Effect<EntityManager.EntityManager>
1011
+ entityManagers.set(entity.type, {
1012
+ entity,
1013
+ scope,
1014
+ manager
1015
+ })
1016
+
1017
+ yield* Scope.addFinalizer(scope, Effect.sync(() => entityManagers.delete(entity.type)))
1018
+ yield* PubSub.publish(events, EntityRegistered({ entity }))
1019
+ }
1020
+ )
1021
+
1022
+ yield* Scope.addFinalizerExit(
1023
+ shardingScope,
1024
+ (exit) =>
1025
+ Effect.forEach(
1026
+ entityManagers.values(),
1027
+ (state) =>
1028
+ Effect.catchAllCause(Scope.close(state.scope, exit), (cause) =>
1029
+ Effect.annotateLogs(Effect.logError("Error closing entity manager", cause), {
1030
+ entity: state.entity.type
1031
+ })),
1032
+ { concurrency: "unbounded", discard: true }
1033
+ )
1034
+ )
1035
+
1036
+ // --- Finalization ---
1037
+
1038
+ if (Option.isSome(config.runnerAddress)) {
1039
+ const selfAddress = config.runnerAddress.value
1040
+ // Unregister runner from shard manager when scope is closed
1041
+ yield* Scope.addFinalizer(
1042
+ shardingScope,
1043
+ Effect.gen(function*() {
1044
+ yield* Effect.logDebug("Unregistering runner from shard manager", selfAddress)
1045
+ yield* shardManager.unregister(selfAddress).pipe(
1046
+ Effect.catchAllCause((cause) => Effect.logError("Error calling unregister with shard manager", cause))
1047
+ )
1048
+ yield* clearSelfShards
1049
+ })
1050
+ )
1051
+ }
1052
+
1053
+ yield* Scope.addFinalizer(
1054
+ shardingScope,
1055
+ Effect.withFiberRuntime((fiber) => {
1056
+ MutableRef.set(isShutdown, true)
1057
+ internalInterruptors.add(fiber.id())
1058
+ return Effect.void
1059
+ })
1060
+ )
1061
+
1062
+ const sharding = Sharding.of({
1063
+ getRegistrationEvents,
1064
+ getShardId,
1065
+ isShutdown: Effect.sync(() => MutableRef.get(isShutdown)),
1066
+ registerEntity,
1067
+ registerSingleton,
1068
+ makeClient,
1069
+ send: sendLocal,
1070
+ notify: (message) => notifyLocal(message, false)
1071
+ })
1072
+
1073
+ return sharding
1074
+ })
206
1075
 
207
1076
  /**
208
- * Gets the list of shardIds assigned to the current Pod
209
- *
210
1077
  * @since 1.0.0
211
- * @category utils
1078
+ * @category layers
212
1079
  */
213
- export const getAssignedShardIds: Effect.Effect<HashSet.HashSet<ShardId.ShardId>, never, Sharding> =
214
- internal.getAssignedShardIds
1080
+ export const layer: Layer.Layer<
1081
+ Sharding,
1082
+ never,
1083
+ ShardingConfig | Runners | ShardManagerClient | MessageStorage.MessageStorage | ShardStorage
1084
+ > = Layer.scoped(Sharding, make).pipe(
1085
+ Layer.provide([Snowflake.layerGenerator, EntityReaper.Default])
1086
+ )
1087
+
1088
+ // Utilities
1089
+
1090
+ const ClientAddressTag = Context.GenericTag<EntityAddress>("@effect/cluster/Sharding/ClientAddress")
1091
+ const currentClientAddress = Symbol.for(ClientAddressTag.key)