@effect/cluster 0.28.4 → 0.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (626) hide show
  1. package/ClusterError/package.json +6 -0
  2. package/ClusterMetrics/package.json +6 -0
  3. package/ClusterSchema/package.json +6 -0
  4. package/DeliverAt/package.json +6 -0
  5. package/Entity/package.json +6 -0
  6. package/EntityAddress/package.json +6 -0
  7. package/EntityId/package.json +6 -0
  8. package/EntityType/package.json +6 -0
  9. package/Envelope/package.json +6 -0
  10. package/HttpCommon/package.json +6 -0
  11. package/HttpRunner/package.json +6 -0
  12. package/HttpShardManager/package.json +6 -0
  13. package/MachineId/package.json +6 -0
  14. package/MessageStorage/package.json +6 -0
  15. package/README.md +2 -2
  16. package/Reply/package.json +6 -0
  17. package/Runner/package.json +6 -0
  18. package/RunnerAddress/package.json +6 -0
  19. package/RunnerHealth/package.json +6 -0
  20. package/RunnerServer/package.json +6 -0
  21. package/Runners/package.json +6 -0
  22. package/ShardStorage/package.json +6 -0
  23. package/Singleton/package.json +6 -0
  24. package/SingletonAddress/package.json +6 -0
  25. package/Snowflake/package.json +6 -0
  26. package/SocketRunner/package.json +6 -0
  27. package/SocketShardManager/package.json +6 -0
  28. package/SqlMessageStorage/package.json +6 -0
  29. package/SqlShardStorage/package.json +6 -0
  30. package/SynchronizedClock/package.json +6 -0
  31. package/dist/cjs/ClusterError.js +180 -0
  32. package/dist/cjs/ClusterError.js.map +1 -0
  33. package/dist/cjs/ClusterMetrics.js +63 -0
  34. package/dist/cjs/ClusterMetrics.js.map +1 -0
  35. package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
  36. package/dist/cjs/ClusterSchema.js.map +1 -0
  37. package/dist/cjs/DeliverAt.js +30 -0
  38. package/dist/cjs/DeliverAt.js.map +1 -0
  39. package/dist/cjs/Entity.js +187 -0
  40. package/dist/cjs/Entity.js.map +1 -0
  41. package/dist/cjs/EntityAddress.js +54 -0
  42. package/dist/cjs/EntityAddress.js.map +1 -0
  43. package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
  44. package/dist/cjs/EntityId.js.map +1 -0
  45. package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
  46. package/dist/cjs/EntityType.js.map +1 -0
  47. package/dist/cjs/Envelope.js +168 -0
  48. package/dist/cjs/Envelope.js.map +1 -0
  49. package/dist/cjs/HttpCommon.js +49 -0
  50. package/dist/cjs/HttpCommon.js.map +1 -0
  51. package/dist/cjs/HttpRunner.js +108 -0
  52. package/dist/cjs/HttpRunner.js.map +1 -0
  53. package/dist/cjs/HttpShardManager.js +140 -0
  54. package/dist/cjs/HttpShardManager.js.map +1 -0
  55. package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
  56. package/dist/cjs/MachineId.js.map +1 -0
  57. package/dist/cjs/Message.js +99 -18
  58. package/dist/cjs/Message.js.map +1 -1
  59. package/dist/cjs/MessageStorage.js +356 -0
  60. package/dist/cjs/MessageStorage.js.map +1 -0
  61. package/dist/cjs/Reply.js +200 -0
  62. package/dist/cjs/Reply.js.map +1 -0
  63. package/dist/cjs/Runner.js +79 -0
  64. package/dist/cjs/Runner.js.map +1 -0
  65. package/dist/cjs/RunnerAddress.js +63 -0
  66. package/dist/cjs/RunnerAddress.js.map +1 -0
  67. package/dist/cjs/RunnerHealth.js +68 -0
  68. package/dist/cjs/RunnerHealth.js.map +1 -0
  69. package/dist/cjs/RunnerServer.js +125 -0
  70. package/dist/cjs/RunnerServer.js.map +1 -0
  71. package/dist/cjs/Runners.js +344 -0
  72. package/dist/cjs/Runners.js.map +1 -0
  73. package/dist/cjs/ShardId.js +7 -46
  74. package/dist/cjs/ShardId.js.map +1 -1
  75. package/dist/cjs/ShardManager.js +493 -8
  76. package/dist/cjs/ShardManager.js.map +1 -1
  77. package/dist/cjs/ShardStorage.js +139 -0
  78. package/dist/cjs/ShardStorage.js.map +1 -0
  79. package/dist/cjs/Sharding.js +732 -88
  80. package/dist/cjs/Sharding.js.map +1 -1
  81. package/dist/cjs/ShardingConfig.js +85 -18
  82. package/dist/cjs/ShardingConfig.js.map +1 -1
  83. package/dist/cjs/ShardingRegistrationEvent.js +26 -32
  84. package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
  85. package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
  86. package/dist/cjs/Singleton.js.map +1 -0
  87. package/dist/cjs/SingletonAddress.js +50 -0
  88. package/dist/cjs/SingletonAddress.js.map +1 -0
  89. package/dist/cjs/Snowflake.js +133 -0
  90. package/dist/cjs/Snowflake.js.map +1 -0
  91. package/dist/cjs/SocketRunner.js +40 -0
  92. package/dist/cjs/SocketRunner.js.map +1 -0
  93. package/dist/cjs/SocketShardManager.js +33 -0
  94. package/dist/cjs/SocketShardManager.js.map +1 -0
  95. package/dist/cjs/SqlMessageStorage.js +668 -0
  96. package/dist/cjs/SqlMessageStorage.js.map +1 -0
  97. package/dist/cjs/SqlShardStorage.js +228 -0
  98. package/dist/cjs/SqlShardStorage.js.map +1 -0
  99. package/dist/cjs/SynchronizedClock.js +66 -0
  100. package/dist/cjs/SynchronizedClock.js.map +1 -0
  101. package/dist/cjs/index.js +57 -45
  102. package/dist/cjs/internal/entityManager.js +311 -143
  103. package/dist/cjs/internal/entityManager.js.map +1 -1
  104. package/dist/cjs/internal/entityReaper.js +47 -0
  105. package/dist/cjs/internal/entityReaper.js.map +1 -0
  106. package/dist/cjs/internal/hash.js +20 -0
  107. package/dist/cjs/internal/hash.js.map +1 -0
  108. package/dist/cjs/internal/interruptors.js +9 -0
  109. package/dist/cjs/internal/interruptors.js.map +1 -0
  110. package/dist/cjs/internal/resourceMap.js +88 -0
  111. package/dist/cjs/internal/resourceMap.js.map +1 -0
  112. package/dist/cjs/internal/resourceRef.js +92 -0
  113. package/dist/cjs/internal/resourceRef.js.map +1 -0
  114. package/dist/cjs/internal/shardManager.js +219 -235
  115. package/dist/cjs/internal/shardManager.js.map +1 -1
  116. package/dist/dts/ClusterError.d.ts +169 -0
  117. package/dist/dts/ClusterError.d.ts.map +1 -0
  118. package/dist/dts/ClusterMetrics.d.ts +50 -0
  119. package/dist/dts/ClusterMetrics.d.ts.map +1 -0
  120. package/dist/dts/ClusterSchema.d.ts +13 -0
  121. package/dist/dts/ClusterSchema.d.ts.map +1 -0
  122. package/dist/dts/DeliverAt.d.ts +27 -0
  123. package/dist/dts/DeliverAt.d.ts.map +1 -0
  124. package/dist/dts/Entity.d.ts +180 -0
  125. package/dist/dts/Entity.d.ts.map +1 -0
  126. package/dist/dts/EntityAddress.d.ts +55 -0
  127. package/dist/dts/EntityAddress.d.ts.map +1 -0
  128. package/dist/dts/EntityId.d.ts +15 -0
  129. package/dist/dts/EntityId.d.ts.map +1 -0
  130. package/dist/dts/EntityType.d.ts +15 -0
  131. package/dist/dts/EntityType.d.ts.map +1 -0
  132. package/dist/dts/Envelope.d.ts +252 -0
  133. package/dist/dts/Envelope.d.ts.map +1 -0
  134. package/dist/dts/HttpCommon.d.ts +25 -0
  135. package/dist/dts/HttpCommon.d.ts.map +1 -0
  136. package/dist/dts/HttpRunner.d.ts +76 -0
  137. package/dist/dts/HttpRunner.d.ts.map +1 -0
  138. package/dist/dts/HttpShardManager.d.ts +119 -0
  139. package/dist/dts/HttpShardManager.d.ts.map +1 -0
  140. package/dist/dts/MachineId.d.ts +20 -0
  141. package/dist/dts/MachineId.d.ts.map +1 -0
  142. package/dist/dts/Message.d.ts +91 -74
  143. package/dist/dts/Message.d.ts.map +1 -1
  144. package/dist/dts/MessageStorage.d.ts +336 -0
  145. package/dist/dts/MessageStorage.d.ts.map +1 -0
  146. package/dist/dts/Reply.d.ts +171 -0
  147. package/dist/dts/Reply.d.ts.map +1 -0
  148. package/dist/dts/Runner.d.ts +81 -0
  149. package/dist/dts/Runner.d.ts.map +1 -0
  150. package/dist/dts/RunnerAddress.d.ts +56 -0
  151. package/dist/dts/RunnerAddress.d.ts.map +1 -0
  152. package/dist/dts/RunnerHealth.d.ts +54 -0
  153. package/dist/dts/RunnerHealth.d.ts.map +1 -0
  154. package/dist/dts/RunnerServer.d.ts +44 -0
  155. package/dist/dts/RunnerServer.d.ts.map +1 -0
  156. package/dist/dts/Runners.d.ts +161 -0
  157. package/dist/dts/Runners.d.ts.map +1 -0
  158. package/dist/dts/ShardId.d.ts +5 -55
  159. package/dist/dts/ShardId.d.ts.map +1 -1
  160. package/dist/dts/ShardManager.d.ts +435 -23
  161. package/dist/dts/ShardManager.d.ts.map +1 -1
  162. package/dist/dts/ShardStorage.d.ts +200 -0
  163. package/dist/dts/ShardStorage.d.ts.map +1 -0
  164. package/dist/dts/Sharding.d.ts +109 -131
  165. package/dist/dts/Sharding.d.ts.map +1 -1
  166. package/dist/dts/ShardingConfig.d.ts +147 -44
  167. package/dist/dts/ShardingConfig.d.ts.map +1 -1
  168. package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
  169. package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
  170. package/dist/dts/Singleton.d.ts +13 -0
  171. package/dist/dts/Singleton.d.ts.map +1 -0
  172. package/dist/dts/SingletonAddress.d.ts +49 -0
  173. package/dist/dts/SingletonAddress.d.ts.map +1 -0
  174. package/dist/dts/Snowflake.d.ts +121 -0
  175. package/dist/dts/Snowflake.d.ts.map +1 -0
  176. package/dist/dts/SocketRunner.d.ts +22 -0
  177. package/dist/dts/SocketRunner.d.ts.map +1 -0
  178. package/dist/dts/SocketShardManager.d.ts +17 -0
  179. package/dist/dts/SocketShardManager.d.ts.map +1 -0
  180. package/dist/dts/SqlMessageStorage.d.ts +43 -0
  181. package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
  182. package/dist/dts/SqlShardStorage.d.ts +38 -0
  183. package/dist/dts/SqlShardStorage.d.ts.map +1 -0
  184. package/dist/dts/SynchronizedClock.d.ts +19 -0
  185. package/dist/dts/SynchronizedClock.d.ts.map +1 -0
  186. package/dist/dts/index.d.ts +48 -24
  187. package/dist/dts/index.d.ts.map +1 -1
  188. package/dist/dts/internal/entityReaper.d.ts +2 -0
  189. package/dist/dts/internal/entityReaper.d.ts.map +1 -0
  190. package/dist/dts/internal/hash.d.ts +2 -0
  191. package/dist/dts/internal/hash.d.ts.map +1 -0
  192. package/dist/dts/internal/interruptors.d.ts +2 -0
  193. package/dist/dts/internal/interruptors.d.ts.map +1 -0
  194. package/dist/dts/internal/resourceMap.d.ts +22 -0
  195. package/dist/dts/internal/resourceMap.d.ts.map +1 -0
  196. package/dist/dts/internal/resourceRef.d.ts +25 -0
  197. package/dist/dts/internal/resourceRef.d.ts.map +1 -0
  198. package/dist/dts/internal/shardManager.d.ts +1 -11
  199. package/dist/dts/internal/shardManager.d.ts.map +1 -1
  200. package/dist/esm/ClusterError.js +164 -0
  201. package/dist/esm/ClusterError.js.map +1 -0
  202. package/dist/esm/ClusterMetrics.js +54 -0
  203. package/dist/esm/ClusterMetrics.js.map +1 -0
  204. package/dist/esm/ClusterSchema.js +13 -0
  205. package/dist/esm/ClusterSchema.js.map +1 -0
  206. package/dist/esm/DeliverAt.js +22 -0
  207. package/dist/esm/DeliverAt.js.map +1 -0
  208. package/dist/esm/Entity.js +173 -0
  209. package/dist/esm/Entity.js.map +1 -0
  210. package/dist/esm/EntityAddress.js +44 -0
  211. package/dist/esm/EntityAddress.js.map +1 -0
  212. package/dist/esm/EntityId.js +10 -0
  213. package/dist/esm/EntityId.js.map +1 -0
  214. package/dist/esm/EntityType.js +10 -0
  215. package/dist/esm/EntityType.js.map +1 -0
  216. package/dist/esm/Envelope.js +154 -0
  217. package/dist/esm/Envelope.js.map +1 -0
  218. package/dist/esm/HttpCommon.js +38 -0
  219. package/dist/esm/HttpCommon.js.map +1 -0
  220. package/dist/esm/HttpRunner.js +98 -0
  221. package/dist/esm/HttpRunner.js.map +1 -0
  222. package/dist/esm/HttpShardManager.js +128 -0
  223. package/dist/esm/HttpShardManager.js.map +1 -0
  224. package/dist/esm/MachineId.js +17 -0
  225. package/dist/esm/MachineId.js.map +1 -0
  226. package/dist/esm/Message.js +88 -17
  227. package/dist/esm/Message.js.map +1 -1
  228. package/dist/esm/MessageStorage.js +345 -0
  229. package/dist/esm/MessageStorage.js.map +1 -0
  230. package/dist/esm/Reply.js +184 -0
  231. package/dist/esm/Reply.js.map +1 -0
  232. package/dist/esm/Runner.js +68 -0
  233. package/dist/esm/Runner.js.map +1 -0
  234. package/dist/esm/RunnerAddress.js +52 -0
  235. package/dist/esm/RunnerAddress.js.map +1 -0
  236. package/dist/esm/RunnerHealth.js +58 -0
  237. package/dist/esm/RunnerHealth.js.map +1 -0
  238. package/dist/esm/RunnerServer.js +116 -0
  239. package/dist/esm/RunnerServer.js.map +1 -0
  240. package/dist/esm/Runners.js +332 -0
  241. package/dist/esm/Runners.js.map +1 -0
  242. package/dist/esm/ShardId.js +5 -42
  243. package/dist/esm/ShardId.js.map +1 -1
  244. package/dist/esm/ShardManager.js +486 -7
  245. package/dist/esm/ShardManager.js.map +1 -1
  246. package/dist/esm/ShardStorage.js +129 -0
  247. package/dist/esm/ShardStorage.js.map +1 -0
  248. package/dist/esm/Sharding.js +730 -87
  249. package/dist/esm/Sharding.js.map +1 -1
  250. package/dist/esm/ShardingConfig.js +80 -17
  251. package/dist/esm/ShardingConfig.js.map +1 -1
  252. package/dist/esm/ShardingRegistrationEvent.js +19 -29
  253. package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
  254. package/dist/esm/Singleton.js +15 -0
  255. package/dist/esm/Singleton.js.map +1 -0
  256. package/dist/esm/SingletonAddress.js +40 -0
  257. package/dist/esm/SingletonAddress.js.map +1 -0
  258. package/dist/esm/Snowflake.js +117 -0
  259. package/dist/esm/Snowflake.js.map +1 -0
  260. package/dist/esm/SocketRunner.js +31 -0
  261. package/dist/esm/SocketRunner.js.map +1 -0
  262. package/dist/esm/SocketShardManager.js +24 -0
  263. package/dist/esm/SocketShardManager.js.map +1 -0
  264. package/dist/esm/SqlMessageStorage.js +658 -0
  265. package/dist/esm/SqlMessageStorage.js.map +1 -0
  266. package/dist/esm/SqlShardStorage.js +218 -0
  267. package/dist/esm/SqlShardStorage.js.map +1 -0
  268. package/dist/esm/SynchronizedClock.js +57 -0
  269. package/dist/esm/SynchronizedClock.js.map +1 -0
  270. package/dist/esm/index.js +48 -24
  271. package/dist/esm/index.js.map +1 -1
  272. package/dist/esm/internal/entityManager.js +311 -142
  273. package/dist/esm/internal/entityManager.js.map +1 -1
  274. package/dist/esm/internal/entityReaper.js +38 -0
  275. package/dist/esm/internal/entityReaper.js.map +1 -0
  276. package/dist/esm/internal/hash.js +12 -0
  277. package/dist/esm/internal/hash.js.map +1 -0
  278. package/dist/esm/internal/interruptors.js +3 -0
  279. package/dist/esm/internal/interruptors.js.map +1 -0
  280. package/dist/esm/internal/resourceMap.js +79 -0
  281. package/dist/esm/internal/resourceMap.js.map +1 -0
  282. package/dist/esm/internal/resourceRef.js +83 -0
  283. package/dist/esm/internal/resourceRef.js.map +1 -0
  284. package/dist/esm/internal/shardManager.js +217 -233
  285. package/dist/esm/internal/shardManager.js.map +1 -1
  286. package/package.json +212 -154
  287. package/src/ClusterError.ts +193 -0
  288. package/src/ClusterMetrics.ts +62 -0
  289. package/src/ClusterSchema.ts +13 -0
  290. package/src/DeliverAt.ts +36 -0
  291. package/src/Entity.ts +438 -0
  292. package/src/EntityAddress.ts +55 -0
  293. package/src/EntityId.ts +16 -0
  294. package/src/EntityType.ts +16 -0
  295. package/src/Envelope.ts +352 -0
  296. package/src/HttpCommon.ts +73 -0
  297. package/src/HttpRunner.ts +196 -0
  298. package/src/HttpShardManager.ts +273 -0
  299. package/src/MachineId.ts +27 -0
  300. package/src/Message.ts +143 -92
  301. package/src/MessageStorage.ts +697 -0
  302. package/src/Reply.ts +295 -0
  303. package/src/Runner.ts +84 -0
  304. package/src/RunnerAddress.ts +61 -0
  305. package/src/RunnerHealth.ts +87 -0
  306. package/src/RunnerServer.ts +156 -0
  307. package/src/Runners.ts +533 -0
  308. package/src/ShardId.ts +10 -62
  309. package/src/ShardManager.ts +780 -29
  310. package/src/ShardStorage.ts +289 -0
  311. package/src/Sharding.ts +1060 -183
  312. package/src/ShardingConfig.ts +186 -45
  313. package/src/ShardingRegistrationEvent.ts +38 -39
  314. package/src/Singleton.ts +20 -0
  315. package/src/SingletonAddress.ts +47 -0
  316. package/src/Snowflake.ts +194 -0
  317. package/src/SocketRunner.ts +59 -0
  318. package/src/SocketShardManager.ts +48 -0
  319. package/src/SqlMessageStorage.ts +833 -0
  320. package/src/SqlShardStorage.ts +292 -0
  321. package/src/SynchronizedClock.ts +82 -0
  322. package/src/index.ts +54 -24
  323. package/src/internal/entityManager.ts +464 -361
  324. package/src/internal/entityReaper.ts +53 -0
  325. package/src/internal/hash.ts +11 -0
  326. package/src/internal/interruptors.ts +4 -0
  327. package/src/internal/resourceMap.ts +89 -0
  328. package/src/internal/resourceRef.ts +88 -0
  329. package/src/internal/shardManager.ts +273 -546
  330. package/AtLeastOnce/package.json +0 -6
  331. package/AtLeastOnceStorage/package.json +0 -6
  332. package/Broadcaster/package.json +0 -6
  333. package/ManagerConfig/package.json +0 -6
  334. package/MessageState/package.json +0 -6
  335. package/Messenger/package.json +0 -6
  336. package/Pod/package.json +0 -6
  337. package/PodAddress/package.json +0 -6
  338. package/Pods/package.json +0 -6
  339. package/PodsHealth/package.json +0 -6
  340. package/PoisonPill/package.json +0 -6
  341. package/RecipientAddress/package.json +0 -6
  342. package/RecipientBehaviour/package.json +0 -6
  343. package/RecipientBehaviourContext/package.json +0 -6
  344. package/RecipientType/package.json +0 -6
  345. package/Serialization/package.json +0 -6
  346. package/SerializedEnvelope/package.json +0 -6
  347. package/SerializedMessage/package.json +0 -6
  348. package/ShardManagerClient/package.json +0 -6
  349. package/ShardingEvent/package.json +0 -6
  350. package/ShardingException/package.json +0 -6
  351. package/Storage/package.json +0 -6
  352. package/dist/cjs/AtLeastOnce.js.map +0 -1
  353. package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
  354. package/dist/cjs/Broadcaster.js +0 -6
  355. package/dist/cjs/Broadcaster.js.map +0 -1
  356. package/dist/cjs/ManagerConfig.js.map +0 -1
  357. package/dist/cjs/MessageState.js +0 -55
  358. package/dist/cjs/MessageState.js.map +0 -1
  359. package/dist/cjs/Messenger.js +0 -6
  360. package/dist/cjs/Messenger.js.map +0 -1
  361. package/dist/cjs/Pod.js +0 -78
  362. package/dist/cjs/Pod.js.map +0 -1
  363. package/dist/cjs/PodAddress.js +0 -77
  364. package/dist/cjs/PodAddress.js.map +0 -1
  365. package/dist/cjs/Pods.js.map +0 -1
  366. package/dist/cjs/PodsHealth.js +0 -41
  367. package/dist/cjs/PodsHealth.js.map +0 -1
  368. package/dist/cjs/PoisonPill.js +0 -78
  369. package/dist/cjs/PoisonPill.js.map +0 -1
  370. package/dist/cjs/RecipientAddress.js +0 -79
  371. package/dist/cjs/RecipientAddress.js.map +0 -1
  372. package/dist/cjs/RecipientBehaviour.js +0 -38
  373. package/dist/cjs/RecipientBehaviour.js.map +0 -1
  374. package/dist/cjs/RecipientBehaviourContext.js +0 -64
  375. package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
  376. package/dist/cjs/RecipientType.js +0 -123
  377. package/dist/cjs/RecipientType.js.map +0 -1
  378. package/dist/cjs/Serialization.js +0 -32
  379. package/dist/cjs/Serialization.js.map +0 -1
  380. package/dist/cjs/SerializedEnvelope.js +0 -87
  381. package/dist/cjs/SerializedEnvelope.js.map +0 -1
  382. package/dist/cjs/SerializedMessage.js +0 -64
  383. package/dist/cjs/SerializedMessage.js.map +0 -1
  384. package/dist/cjs/ShardManagerClient.js.map +0 -1
  385. package/dist/cjs/ShardingEvent.js +0 -72
  386. package/dist/cjs/ShardingEvent.js.map +0 -1
  387. package/dist/cjs/ShardingException.js +0 -107
  388. package/dist/cjs/ShardingException.js.map +0 -1
  389. package/dist/cjs/Storage.js +0 -40
  390. package/dist/cjs/Storage.js.map +0 -1
  391. package/dist/cjs/internal/atLeastOnce.js +0 -35
  392. package/dist/cjs/internal/atLeastOnce.js.map +0 -1
  393. package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
  394. package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
  395. package/dist/cjs/internal/entityState.js +0 -47
  396. package/dist/cjs/internal/entityState.js.map +0 -1
  397. package/dist/cjs/internal/managerConfig.js +0 -46
  398. package/dist/cjs/internal/managerConfig.js.map +0 -1
  399. package/dist/cjs/internal/message.js +0 -48
  400. package/dist/cjs/internal/message.js.map +0 -1
  401. package/dist/cjs/internal/messageState.js +0 -79
  402. package/dist/cjs/internal/messageState.js.map +0 -1
  403. package/dist/cjs/internal/podWithMetadata.js +0 -54
  404. package/dist/cjs/internal/podWithMetadata.js.map +0 -1
  405. package/dist/cjs/internal/pods.js +0 -35
  406. package/dist/cjs/internal/pods.js.map +0 -1
  407. package/dist/cjs/internal/podsHealth.js +0 -40
  408. package/dist/cjs/internal/podsHealth.js.map +0 -1
  409. package/dist/cjs/internal/recipientBehaviour.js +0 -52
  410. package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
  411. package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
  412. package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
  413. package/dist/cjs/internal/serialization.js +0 -48
  414. package/dist/cjs/internal/serialization.js.map +0 -1
  415. package/dist/cjs/internal/shardManagerClient.js +0 -48
  416. package/dist/cjs/internal/shardManagerClient.js.map +0 -1
  417. package/dist/cjs/internal/shardManagerState.js +0 -44
  418. package/dist/cjs/internal/shardManagerState.js.map +0 -1
  419. package/dist/cjs/internal/sharding.js +0 -306
  420. package/dist/cjs/internal/sharding.js.map +0 -1
  421. package/dist/cjs/internal/shardingConfig.js +0 -56
  422. package/dist/cjs/internal/shardingConfig.js.map +0 -1
  423. package/dist/cjs/internal/storage.js +0 -52
  424. package/dist/cjs/internal/storage.js.map +0 -1
  425. package/dist/cjs/internal/utils.js +0 -69
  426. package/dist/cjs/internal/utils.js.map +0 -1
  427. package/dist/dts/AtLeastOnce.d.ts +0 -20
  428. package/dist/dts/AtLeastOnce.d.ts.map +0 -1
  429. package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
  430. package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
  431. package/dist/dts/Broadcaster.d.ts +0 -32
  432. package/dist/dts/Broadcaster.d.ts.map +0 -1
  433. package/dist/dts/ManagerConfig.d.ts +0 -61
  434. package/dist/dts/ManagerConfig.d.ts.map +0 -1
  435. package/dist/dts/MessageState.d.ts +0 -107
  436. package/dist/dts/MessageState.d.ts.map +0 -1
  437. package/dist/dts/Messenger.d.ts +0 -32
  438. package/dist/dts/Messenger.d.ts.map +0 -1
  439. package/dist/dts/Pod.d.ts +0 -81
  440. package/dist/dts/Pod.d.ts.map +0 -1
  441. package/dist/dts/PodAddress.d.ts +0 -80
  442. package/dist/dts/PodAddress.d.ts.map +0 -1
  443. package/dist/dts/Pods.d.ts +0 -78
  444. package/dist/dts/Pods.d.ts.map +0 -1
  445. package/dist/dts/PodsHealth.d.ts +0 -66
  446. package/dist/dts/PodsHealth.d.ts.map +0 -1
  447. package/dist/dts/PoisonPill.d.ts +0 -78
  448. package/dist/dts/PoisonPill.d.ts.map +0 -1
  449. package/dist/dts/RecipientAddress.d.ts +0 -57
  450. package/dist/dts/RecipientAddress.d.ts.map +0 -1
  451. package/dist/dts/RecipientBehaviour.d.ts +0 -72
  452. package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
  453. package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
  454. package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
  455. package/dist/dts/RecipientType.d.ts +0 -93
  456. package/dist/dts/RecipientType.d.ts.map +0 -1
  457. package/dist/dts/Serialization.d.ts +0 -58
  458. package/dist/dts/Serialization.d.ts.map +0 -1
  459. package/dist/dts/SerializedEnvelope.d.ts +0 -86
  460. package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
  461. package/dist/dts/SerializedMessage.d.ts +0 -66
  462. package/dist/dts/SerializedMessage.d.ts.map +0 -1
  463. package/dist/dts/ShardManagerClient.d.ts +0 -50
  464. package/dist/dts/ShardManagerClient.d.ts.map +0 -1
  465. package/dist/dts/ShardingEvent.d.ts +0 -90
  466. package/dist/dts/ShardingEvent.d.ts.map +0 -1
  467. package/dist/dts/ShardingException.d.ts +0 -125
  468. package/dist/dts/ShardingException.d.ts.map +0 -1
  469. package/dist/dts/Storage.d.ts +0 -78
  470. package/dist/dts/Storage.d.ts.map +0 -1
  471. package/dist/dts/internal/atLeastOnce.d.ts +0 -2
  472. package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
  473. package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
  474. package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
  475. package/dist/dts/internal/entityState.d.ts +0 -21
  476. package/dist/dts/internal/entityState.d.ts.map +0 -1
  477. package/dist/dts/internal/managerConfig.d.ts +0 -2
  478. package/dist/dts/internal/managerConfig.d.ts.map +0 -1
  479. package/dist/dts/internal/message.d.ts +0 -9
  480. package/dist/dts/internal/message.d.ts.map +0 -1
  481. package/dist/dts/internal/messageState.d.ts +0 -2
  482. package/dist/dts/internal/messageState.d.ts.map +0 -1
  483. package/dist/dts/internal/podWithMetadata.d.ts +0 -2
  484. package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
  485. package/dist/dts/internal/pods.d.ts +0 -2
  486. package/dist/dts/internal/pods.d.ts.map +0 -1
  487. package/dist/dts/internal/podsHealth.d.ts +0 -2
  488. package/dist/dts/internal/podsHealth.d.ts.map +0 -1
  489. package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
  490. package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
  491. package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
  492. package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
  493. package/dist/dts/internal/serialization.d.ts +0 -2
  494. package/dist/dts/internal/serialization.d.ts.map +0 -1
  495. package/dist/dts/internal/shardManagerClient.d.ts +0 -2
  496. package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
  497. package/dist/dts/internal/shardManagerState.d.ts +0 -26
  498. package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
  499. package/dist/dts/internal/sharding.d.ts +0 -2
  500. package/dist/dts/internal/sharding.d.ts.map +0 -1
  501. package/dist/dts/internal/shardingConfig.d.ts +0 -2
  502. package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
  503. package/dist/dts/internal/storage.d.ts +0 -2
  504. package/dist/dts/internal/storage.d.ts.map +0 -1
  505. package/dist/dts/internal/utils.d.ts +0 -2
  506. package/dist/dts/internal/utils.d.ts.map +0 -1
  507. package/dist/esm/AtLeastOnce.js +0 -12
  508. package/dist/esm/AtLeastOnce.js.map +0 -1
  509. package/dist/esm/AtLeastOnceStorage.js +0 -17
  510. package/dist/esm/AtLeastOnceStorage.js.map +0 -1
  511. package/dist/esm/Broadcaster.js +0 -2
  512. package/dist/esm/Broadcaster.js.map +0 -1
  513. package/dist/esm/ManagerConfig.js +0 -26
  514. package/dist/esm/ManagerConfig.js.map +0 -1
  515. package/dist/esm/MessageState.js +0 -47
  516. package/dist/esm/MessageState.js.map +0 -1
  517. package/dist/esm/Messenger.js +0 -2
  518. package/dist/esm/Messenger.js.map +0 -1
  519. package/dist/esm/Pod.js +0 -65
  520. package/dist/esm/Pod.js.map +0 -1
  521. package/dist/esm/PodAddress.js +0 -64
  522. package/dist/esm/PodAddress.js.map +0 -1
  523. package/dist/esm/Pods.js +0 -27
  524. package/dist/esm/Pods.js.map +0 -1
  525. package/dist/esm/PodsHealth.js +0 -33
  526. package/dist/esm/PodsHealth.js.map +0 -1
  527. package/dist/esm/PoisonPill.js +0 -65
  528. package/dist/esm/PoisonPill.js.map +0 -1
  529. package/dist/esm/RecipientAddress.js +0 -67
  530. package/dist/esm/RecipientAddress.js.map +0 -1
  531. package/dist/esm/RecipientBehaviour.js +0 -30
  532. package/dist/esm/RecipientBehaviour.js.map +0 -1
  533. package/dist/esm/RecipientBehaviourContext.js +0 -56
  534. package/dist/esm/RecipientBehaviourContext.js.map +0 -1
  535. package/dist/esm/RecipientType.js +0 -108
  536. package/dist/esm/RecipientType.js.map +0 -1
  537. package/dist/esm/Serialization.js +0 -24
  538. package/dist/esm/Serialization.js.map +0 -1
  539. package/dist/esm/SerializedEnvelope.js +0 -74
  540. package/dist/esm/SerializedEnvelope.js.map +0 -1
  541. package/dist/esm/SerializedMessage.js +0 -51
  542. package/dist/esm/SerializedMessage.js.map +0 -1
  543. package/dist/esm/ShardManagerClient.js +0 -22
  544. package/dist/esm/ShardManagerClient.js.map +0 -1
  545. package/dist/esm/ShardingEvent.js +0 -62
  546. package/dist/esm/ShardingEvent.js.map +0 -1
  547. package/dist/esm/ShardingException.js +0 -91
  548. package/dist/esm/ShardingException.js.map +0 -1
  549. package/dist/esm/Storage.js +0 -32
  550. package/dist/esm/Storage.js.map +0 -1
  551. package/dist/esm/internal/atLeastOnce.js +0 -26
  552. package/dist/esm/internal/atLeastOnce.js.map +0 -1
  553. package/dist/esm/internal/atLeastOnceStorage.js +0 -154
  554. package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
  555. package/dist/esm/internal/entityState.js +0 -35
  556. package/dist/esm/internal/entityState.js.map +0 -1
  557. package/dist/esm/internal/managerConfig.js +0 -38
  558. package/dist/esm/internal/managerConfig.js.map +0 -1
  559. package/dist/esm/internal/message.js +0 -35
  560. package/dist/esm/internal/message.js.map +0 -1
  561. package/dist/esm/internal/messageState.js +0 -66
  562. package/dist/esm/internal/messageState.js.map +0 -1
  563. package/dist/esm/internal/podWithMetadata.js +0 -41
  564. package/dist/esm/internal/podWithMetadata.js.map +0 -1
  565. package/dist/esm/internal/pods.js +0 -25
  566. package/dist/esm/internal/pods.js.map +0 -1
  567. package/dist/esm/internal/podsHealth.js +0 -30
  568. package/dist/esm/internal/podsHealth.js.map +0 -1
  569. package/dist/esm/internal/recipientBehaviour.js +0 -42
  570. package/dist/esm/internal/recipientBehaviour.js.map +0 -1
  571. package/dist/esm/internal/recipientBehaviourContext.js +0 -26
  572. package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
  573. package/dist/esm/internal/serialization.js +0 -38
  574. package/dist/esm/internal/serialization.js.map +0 -1
  575. package/dist/esm/internal/shardManagerClient.js +0 -38
  576. package/dist/esm/internal/shardManagerClient.js.map +0 -1
  577. package/dist/esm/internal/shardManagerState.js +0 -36
  578. package/dist/esm/internal/shardManagerState.js.map +0 -1
  579. package/dist/esm/internal/sharding.js +0 -288
  580. package/dist/esm/internal/sharding.js.map +0 -1
  581. package/dist/esm/internal/shardingConfig.js +0 -47
  582. package/dist/esm/internal/shardingConfig.js.map +0 -1
  583. package/dist/esm/internal/storage.js +0 -42
  584. package/dist/esm/internal/storage.js.map +0 -1
  585. package/dist/esm/internal/utils.js +0 -56
  586. package/dist/esm/internal/utils.js.map +0 -1
  587. package/src/AtLeastOnce.ts +0 -28
  588. package/src/AtLeastOnceStorage.ts +0 -96
  589. package/src/Broadcaster.ts +0 -48
  590. package/src/ManagerConfig.ts +0 -67
  591. package/src/MessageState.ts +0 -126
  592. package/src/Messenger.ts +0 -40
  593. package/src/Pod.ts +0 -95
  594. package/src/PodAddress.ts +0 -94
  595. package/src/Pods.ts +0 -100
  596. package/src/PodsHealth.ts +0 -74
  597. package/src/PoisonPill.ts +0 -105
  598. package/src/RecipientAddress.ts +0 -72
  599. package/src/RecipientBehaviour.ts +0 -108
  600. package/src/RecipientBehaviourContext.ts +0 -101
  601. package/src/RecipientType.ts +0 -134
  602. package/src/Serialization.ts +0 -72
  603. package/src/SerializedEnvelope.ts +0 -108
  604. package/src/SerializedMessage.ts +0 -82
  605. package/src/ShardManagerClient.ts +0 -57
  606. package/src/ShardingEvent.ts +0 -121
  607. package/src/ShardingException.ts +0 -151
  608. package/src/Storage.ts +0 -92
  609. package/src/internal/atLeastOnce.ts +0 -59
  610. package/src/internal/atLeastOnceStorage.ts +0 -218
  611. package/src/internal/entityState.ts +0 -64
  612. package/src/internal/managerConfig.ts +0 -84
  613. package/src/internal/message.ts +0 -63
  614. package/src/internal/messageState.ts +0 -98
  615. package/src/internal/podWithMetadata.ts +0 -72
  616. package/src/internal/pods.ts +0 -29
  617. package/src/internal/podsHealth.ts +0 -39
  618. package/src/internal/recipientBehaviour.ts +0 -133
  619. package/src/internal/recipientBehaviourContext.ts +0 -70
  620. package/src/internal/serialization.ts +0 -63
  621. package/src/internal/shardManagerClient.ts +0 -49
  622. package/src/internal/shardManagerState.ts +0 -80
  623. package/src/internal/sharding.ts +0 -789
  624. package/src/internal/shardingConfig.ts +0 -97
  625. package/src/internal/storage.ts +0 -60
  626. package/src/internal/utils.ts +0 -54
@@ -1,98 +1,741 @@
1
- import * as internal from "./internal/sharding.js";
1
+ import * as RpcClient from "@effect/rpc/RpcClient";
2
+ import { RequestId } from "@effect/rpc/RpcMessage";
3
+ import * as Arr from "effect/Array";
4
+ import * as Cause from "effect/Cause";
5
+ import * as Context from "effect/Context";
6
+ import * as Effect from "effect/Effect";
7
+ import * as Equal from "effect/Equal";
8
+ import * as Fiber from "effect/Fiber";
9
+ import * as FiberHandle from "effect/FiberHandle";
10
+ import * as FiberMap from "effect/FiberMap";
11
+ import * as FiberRef from "effect/FiberRef";
12
+ import { constant } from "effect/Function";
13
+ import * as HashMap from "effect/HashMap";
14
+ import * as Iterable from "effect/Iterable";
15
+ import * as Layer from "effect/Layer";
16
+ import * as MutableHashMap from "effect/MutableHashMap";
17
+ import * as MutableRef from "effect/MutableRef";
18
+ import * as Option from "effect/Option";
19
+ import * as Predicate from "effect/Predicate";
20
+ import * as PubSub from "effect/PubSub";
21
+ import * as Schedule from "effect/Schedule";
22
+ import * as Scope from "effect/Scope";
23
+ import * as Stream from "effect/Stream";
24
+ import { EntityNotManagedByRunner, RunnerUnavailable } from "./ClusterError.js";
25
+ import { Persisted } from "./ClusterSchema.js";
26
+ import { EntityAddress } from "./EntityAddress.js";
27
+ import { EntityId } from "./EntityId.js";
28
+ import * as Envelope from "./Envelope.js";
29
+ import * as EntityManager from "./internal/entityManager.js";
30
+ import { EntityReaper } from "./internal/entityReaper.js";
31
+ import { hashString } from "./internal/hash.js";
32
+ import { internalInterruptors } from "./internal/interruptors.js";
33
+ import { ResourceMap } from "./internal/resourceMap.js";
34
+ import * as Message from "./Message.js";
35
+ import * as MessageStorage from "./MessageStorage.js";
36
+ import * as Reply from "./Reply.js";
37
+ import { Runners } from "./Runners.js";
38
+ import { ShardId } from "./ShardId.js";
39
+ import { ShardingConfig } from "./ShardingConfig.js";
40
+ import { EntityRegistered, SingletonRegistered } from "./ShardingRegistrationEvent.js";
41
+ import { ShardManagerClient } from "./ShardManager.js";
42
+ import { ShardStorage } from "./ShardStorage.js";
43
+ import { SingletonAddress } from "./SingletonAddress.js";
44
+ import * as Snowflake from "./Snowflake.js";
2
45
  /**
3
46
  * @since 1.0.0
4
- * @category symbols
47
+ * @category models
5
48
  */
6
- export const ShardingTypeId = internal.ShardingTypeId;
49
+ export class Sharding extends /*#__PURE__*/Context.Tag("@effect/cluster/Sharding")() {}
7
50
  /**
8
51
  * @since 1.0.0
9
- * @category context
52
+ * @category constructors
10
53
  */
11
- export const Tag = internal.shardingTag;
54
+ export const make = /*#__PURE__*/Effect.gen(function* () {
55
+ const config = yield* ShardingConfig;
56
+ const runners = yield* Runners;
57
+ const shardManager = yield* ShardManagerClient;
58
+ const snowflakeGen = yield* Snowflake.Generator;
59
+ const shardingScope = yield* Effect.scope;
60
+ const isShutdown = MutableRef.make(false);
61
+ const storage = yield* MessageStorage.MessageStorage;
62
+ const storageEnabled = storage !== MessageStorage.noop;
63
+ const shardStorage = yield* ShardStorage;
64
+ const entityManagers = new Map();
65
+ const shardAssignments = MutableHashMap.empty();
66
+ const selfShards = new Set();
67
+ // the active shards are the ones that we have acquired the lock for
68
+ const acquiredShards = new Set();
69
+ const activeShardsLatch = yield* Effect.makeLatch(false);
70
+ const events = yield* PubSub.unbounded();
71
+ const getRegistrationEvents = Stream.fromPubSub(events);
72
+ const isLocalRunner = address => Option.isSome(config.runnerAddress) && Equal.equals(address, config.runnerAddress.value);
73
+ function getShardId(entityId) {
74
+ return ShardId.make(Math.abs(hashString(entityId) % config.numberOfShards) + 1);
75
+ }
76
+ function isEntityOnLocalShards(address) {
77
+ return acquiredShards.has(address.shardId);
78
+ }
79
+ // --- Shard acquisition ---
80
+ if (Option.isSome(config.runnerAddress)) {
81
+ const selfAddress = config.runnerAddress.value;
82
+ yield* Scope.addFinalizerExit(shardingScope, () => {
83
+ // the locks expire over time, so if this fails we ignore it
84
+ return Effect.ignore(shardStorage.releaseAll(selfAddress));
85
+ });
86
+ const releasingShards = new Set();
87
+ yield* Effect.gen(function* () {
88
+ while (true) {
89
+ yield* activeShardsLatch.await;
90
+ // if a shard is no longer assigned to this runner, we release it
91
+ for (const shardId of acquiredShards) {
92
+ if (selfShards.has(shardId)) continue;
93
+ acquiredShards.delete(shardId);
94
+ releasingShards.add(shardId);
95
+ }
96
+ // if a shard has been assigned to this runner, we acquire it
97
+ const unacquiredShards = new Set();
98
+ for (const shardId of selfShards) {
99
+ if (acquiredShards.has(shardId) || releasingShards.has(shardId)) continue;
100
+ unacquiredShards.add(shardId);
101
+ }
102
+ if (releasingShards.size > 0) {
103
+ yield* Effect.forkIn(syncSingletons, shardingScope);
104
+ yield* releaseShards;
105
+ }
106
+ if (unacquiredShards.size === 0) {
107
+ yield* activeShardsLatch.close;
108
+ continue;
109
+ }
110
+ const acquired = yield* shardStorage.acquire(selfAddress, unacquiredShards);
111
+ for (const shardId of acquired) {
112
+ acquiredShards.add(shardId);
113
+ }
114
+ if (acquired.length > 0) {
115
+ yield* storageReadLatch.open;
116
+ yield* Effect.forkIn(syncSingletons, shardingScope);
117
+ }
118
+ yield* Effect.sleep(1000);
119
+ }
120
+ }).pipe(Effect.catchAllCause(cause => Effect.logWarning("Could not acquire/release shards", cause)), Effect.repeat(Schedule.spaced(config.entityMessagePollInterval)), Effect.annotateLogs({
121
+ package: "@effect/cluster",
122
+ module: "Sharding",
123
+ fiber: "Shard acquisition loop",
124
+ runner: selfAddress
125
+ }), Effect.interruptible, Effect.forkIn(shardingScope));
126
+ // refresh the shard locks every minute
127
+ yield* Effect.suspend(() => shardStorage.refresh(selfAddress, [...acquiredShards, ...releasingShards])).pipe(Effect.flatMap(acquired => {
128
+ for (const shardId of acquiredShards) {
129
+ if (!acquired.includes(shardId)) {
130
+ acquiredShards.delete(shardId);
131
+ releasingShards.add(shardId);
132
+ }
133
+ }
134
+ return releasingShards.size > 0 ? Effect.andThen(Effect.forkIn(syncSingletons, shardingScope), releaseShards) : Effect.void;
135
+ }), Effect.retry({
136
+ times: 5,
137
+ schedule: Schedule.spaced(250)
138
+ }), Effect.catchAllCause(cause => Effect.logError("Could not refresh shard locks", cause).pipe(Effect.andThen(clearSelfShards))), Effect.delay("1 minute"), Effect.forever, Effect.interruptible, Effect.forkIn(shardingScope));
139
+ const releaseShardsLock = Effect.unsafeMakeSemaphore(1).withPermits(1);
140
+ const releaseShards = releaseShardsLock(Effect.suspend(() => Effect.forEach(releasingShards, shardId => Effect.forEach(entityManagers.values(), state => state.manager.interruptShard(shardId), {
141
+ concurrency: "unbounded",
142
+ discard: true
143
+ }).pipe(Effect.andThen(shardStorage.release(selfAddress, shardId)), Effect.annotateLogs({
144
+ runner: selfAddress
145
+ }), Effect.andThen(() => {
146
+ releasingShards.delete(shardId);
147
+ })), {
148
+ concurrency: "unbounded",
149
+ discard: true
150
+ })));
151
+ }
152
+ const clearSelfShards = Effect.suspend(() => {
153
+ selfShards.clear();
154
+ return activeShardsLatch.open;
155
+ });
156
+ // --- Singletons ---
157
+ const singletons = new Map();
158
+ const singletonFibers = yield* FiberMap.make();
159
+ const withSingletonLock = Effect.unsafeMakeSemaphore(1).withPermits(1);
160
+ const registerSingleton = Effect.fnUntraced(function* (name, run) {
161
+ const address = new SingletonAddress({
162
+ shardId: getShardId(EntityId.make(name)),
163
+ name
164
+ });
165
+ let map = singletons.get(address.shardId);
166
+ if (!map) {
167
+ map = MutableHashMap.empty();
168
+ singletons.set(address.shardId, map);
169
+ }
170
+ if (MutableHashMap.has(map, address)) {
171
+ return yield* Effect.dieMessage(`Singleton '${name}' is already registered`);
172
+ }
173
+ const context = yield* Effect.context();
174
+ const wrappedRun = run.pipe(Effect.locally(FiberRef.currentLogAnnotations, HashMap.empty()), Effect.andThen(Effect.never), Effect.scoped, Effect.provide(context), Effect.orDie, Effect.interruptible);
175
+ MutableHashMap.set(map, address, wrappedRun);
176
+ yield* PubSub.publish(events, SingletonRegistered({
177
+ address
178
+ }));
179
+ // start if we are on the right shard
180
+ if (acquiredShards.has(address.shardId)) {
181
+ yield* Effect.logDebug("Starting singleton", address);
182
+ yield* FiberMap.run(singletonFibers, address, wrappedRun);
183
+ }
184
+ }, withSingletonLock);
185
+ const syncSingletons = withSingletonLock(Effect.gen(function* () {
186
+ for (const [shardId, map] of singletons) {
187
+ for (const [address, run] of map) {
188
+ const running = FiberMap.unsafeHas(singletonFibers, address);
189
+ const shouldBeRunning = acquiredShards.has(shardId);
190
+ if (running && !shouldBeRunning) {
191
+ yield* Effect.logDebug("Stopping singleton", address);
192
+ internalInterruptors.add(yield* Effect.fiberId);
193
+ yield* FiberMap.remove(singletonFibers, address);
194
+ } else if (!running && shouldBeRunning) {
195
+ yield* Effect.logDebug("Starting singleton", address);
196
+ yield* FiberMap.run(singletonFibers, address, run);
197
+ }
198
+ }
199
+ }
200
+ }));
201
+ // --- Storage inbox ---
202
+ const storageReadLatch = yield* Effect.makeLatch(true);
203
+ const openStorageReadLatch = constant(storageReadLatch.open);
204
+ const storageReadLock = Effect.unsafeMakeSemaphore(1);
205
+ const withStorageReadLock = storageReadLock.withPermits(1);
206
+ if (storageEnabled && Option.isSome(config.runnerAddress)) {
207
+ const selfAddress = config.runnerAddress.value;
208
+ yield* Effect.gen(function* () {
209
+ yield* Effect.logDebug("Starting");
210
+ yield* Effect.addFinalizer(() => Effect.logDebug("Shutting down"));
211
+ // keep track of the last sent request ids to avoid duplicates
212
+ // we only keep the last 30 sets to avoid memory leaks
213
+ const sentRequestIds = new Set();
214
+ const sentRequestIdSets = new Set();
215
+ while (true) {
216
+ // wait for the next poll interval, or if we get notified of a change
217
+ yield* storageReadLatch.await;
218
+ // if we get notified of a change, ensure we start a read immediately
219
+ // next iteration
220
+ storageReadLatch.unsafeClose();
221
+ // the lock is used to ensure resuming entities have a garantee that no
222
+ // more items are added to the unprocessed set while the semaphore is
223
+ // acquired.
224
+ yield* storageReadLock.take(1);
225
+ const messages = yield* storage.unprocessedMessages(acquiredShards);
226
+ const currentSentRequestIds = new Set();
227
+ sentRequestIdSets.add(currentSentRequestIds);
228
+ const send = Effect.catchAllCause(Effect.suspend(() => {
229
+ const message = messages[index];
230
+ if (message._tag === "IncomingRequest") {
231
+ if (sentRequestIds.has(message.envelope.requestId)) {
232
+ return Effect.void;
233
+ }
234
+ sentRequestIds.add(message.envelope.requestId);
235
+ currentSentRequestIds.add(message.envelope.requestId);
236
+ }
237
+ const address = message.envelope.address;
238
+ const state = entityManagers.get(address.entityType);
239
+ if (!state || !acquiredShards.has(address.shardId)) {
240
+ return Effect.void;
241
+ }
242
+ const isProcessing = state.manager.isProcessingFor(message);
243
+ // If the message might affect a currently processing request, we
244
+ // send it to the entity manager to be processed.
245
+ if (message._tag === "IncomingEnvelope" && isProcessing) {
246
+ return state.manager.send(message);
247
+ } else if (isProcessing) {
248
+ return Effect.void;
249
+ }
250
+ // If the entity was resuming in another fiber, we add the message
251
+ // id to the unprocessed set.
252
+ const resumptionState = MutableHashMap.get(entityResumptionState, address);
253
+ if (Option.isSome(resumptionState)) {
254
+ resumptionState.value.unprocessed.add(message.envelope.requestId);
255
+ if (message.envelope._tag === "Interrupt") {
256
+ resumptionState.value.interrupts.set(message.envelope.requestId, message);
257
+ }
258
+ return Effect.void;
259
+ }
260
+ return state.manager.send(message);
261
+ }), cause => {
262
+ const message = messages[index];
263
+ const error = Cause.failureOption(cause);
264
+ // if we get a defect, then update storage
265
+ if (Option.isNone(error)) {
266
+ return storage.saveReply(Reply.ReplyWithContext.fromDefect({
267
+ id: snowflakeGen.unsafeNext(),
268
+ requestId: message.envelope.requestId,
269
+ defect: Cause.squash(cause)
270
+ }));
271
+ }
272
+ if (error.value._tag === "MailboxFull") {
273
+ // MailboxFull can only happen for requests, so this cast is safe
274
+ return resumeEntityFromStorage(message);
275
+ }
276
+ return Effect.void;
277
+ });
278
+ let index = 0;
279
+ yield* Effect.whileLoop({
280
+ while: () => index < messages.length,
281
+ step: () => index++,
282
+ body: constant(send)
283
+ });
284
+ // let the resuming entities check if they are done
285
+ yield* storageReadLock.release(1);
286
+ while (sentRequestIdSets.size > 30) {
287
+ const oldest = Iterable.unsafeHead(sentRequestIdSets);
288
+ sentRequestIdSets.delete(oldest);
289
+ for (const id of oldest) {
290
+ sentRequestIds.delete(id);
291
+ }
292
+ }
293
+ }
294
+ }).pipe(Effect.scoped, Effect.ensuring(storageReadLock.releaseAll), Effect.catchAllCause(cause => Effect.logWarning("Could not read messages from storage", cause)), Effect.repeat(Schedule.spaced(config.entityMessagePollInterval)), Effect.annotateLogs({
295
+ package: "@effect/cluster",
296
+ module: "Sharding",
297
+ fiber: "Storage read loop",
298
+ runner: selfAddress
299
+ }), Effect.interruptible, Effect.forkIn(shardingScope));
300
+ // open the storage latch every poll interval
301
+ yield* storageReadLatch.open.pipe(Effect.delay(config.entityMessagePollInterval), Effect.forever, Effect.interruptible, Effect.forkIn(shardingScope));
302
+ // Resume unprocessed messages for entities that reached a full mailbox.
303
+ const entityResumptionState = MutableHashMap.empty();
304
+ const resumeEntityFromStorage = lastReceivedMessage => {
305
+ const address = lastReceivedMessage.envelope.address;
306
+ const resumptionState = MutableHashMap.get(entityResumptionState, address);
307
+ if (Option.isSome(resumptionState)) {
308
+ resumptionState.value.unprocessed.add(lastReceivedMessage.envelope.requestId);
309
+ return Effect.void;
310
+ }
311
+ MutableHashMap.set(entityResumptionState, address, {
312
+ unprocessed: new Set([lastReceivedMessage.envelope.requestId]),
313
+ interrupts: new Map()
314
+ });
315
+ return resumeEntityFromStorageImpl(address);
316
+ };
317
+ const resumeEntityFromStorageImpl = Effect.fnUntraced(function* (address) {
318
+ const state = entityManagers.get(address.entityType);
319
+ if (!state) {
320
+ MutableHashMap.remove(entityResumptionState, address);
321
+ return;
322
+ }
323
+ const resumptionState = Option.getOrThrow(MutableHashMap.get(entityResumptionState, address));
324
+ let done = false;
325
+ while (!done) {
326
+ // if the shard is no longer assigned to this runner, we stop
327
+ if (!acquiredShards.has(address.shardId)) {
328
+ return;
329
+ }
330
+ // take a batch of unprocessed messages ids
331
+ const messageIds = Arr.empty();
332
+ for (const id of resumptionState.unprocessed) {
333
+ if (messageIds.length === 1024) break;
334
+ messageIds.push(id);
335
+ }
336
+ const messages = yield* storage.unprocessedMessagesById(messageIds);
337
+ // this should not happen, but we handle it just in case
338
+ if (messages.length === 0) {
339
+ yield* Effect.sleep(config.entityMessagePollInterval);
340
+ continue;
341
+ }
342
+ let index = 0;
343
+ const sendWithRetry = Effect.catchTags(Effect.suspend(() => {
344
+ if (!acquiredShards.has(address.shardId)) {
345
+ return Effect.fail(new EntityNotManagedByRunner({
346
+ address
347
+ }));
348
+ }
349
+ const message = messages[index];
350
+ // check if this is a request that was interrupted
351
+ const interrupt = message._tag === "IncomingRequest" && resumptionState.interrupts.get(message.envelope.requestId);
352
+ return interrupt ? Effect.flatMap(state.manager.send(message), () => {
353
+ resumptionState.interrupts.delete(message.envelope.requestId);
354
+ return state.manager.send(interrupt);
355
+ }) : state.manager.send(message);
356
+ }), {
357
+ MailboxFull: () => Effect.delay(sendWithRetry, config.sendRetryInterval),
358
+ AlreadyProcessingMessage: () => Effect.void
359
+ });
360
+ yield* Effect.whileLoop({
361
+ while: () => index < messages.length,
362
+ body: constant(sendWithRetry),
363
+ step: () => index++
364
+ });
365
+ for (const id of messageIds) {
366
+ resumptionState.unprocessed.delete(id);
367
+ }
368
+ if (resumptionState.unprocessed.size > 0) continue;
369
+ // if we have caught up to the main storage loop, we let it take over
370
+ yield* withStorageReadLock(Effect.sync(() => {
371
+ if (resumptionState.unprocessed.size === 0) {
372
+ MutableHashMap.remove(entityResumptionState, address);
373
+ done = true;
374
+ }
375
+ }));
376
+ }
377
+ }, Effect.retry({
378
+ while: e => e._tag === "PersistenceError",
379
+ schedule: Schedule.spaced(config.entityMessagePollInterval)
380
+ }), Effect.catchAllCause(cause => Effect.logError("Could not resume unprocessed messages", cause)), (effect, address) => Effect.annotateLogs(effect, {
381
+ package: "@effect/cluster",
382
+ module: "Sharding",
383
+ fiber: "Resuming unprocessed messages",
384
+ runner: selfAddress,
385
+ entity: address
386
+ }), (effect, address) => Effect.ensuring(effect, Effect.sync(() => MutableHashMap.remove(entityResumptionState, address))), Effect.interruptible, Effect.forkIn(shardingScope));
387
+ }
388
+ // --- Sending messages ---
389
+ const sendLocal = message => Effect.suspend(() => {
390
+ const address = message.envelope.address;
391
+ if (!isEntityOnLocalShards(address)) {
392
+ return Effect.fail(new EntityNotManagedByRunner({
393
+ address
394
+ }));
395
+ }
396
+ const state = entityManagers.get(address.entityType);
397
+ if (!state) {
398
+ return Effect.fail(new EntityNotManagedByRunner({
399
+ address
400
+ }));
401
+ }
402
+ return message._tag === "IncomingRequest" || message._tag === "IncomingEnvelope" ? state.manager.send(message) : runners.sendLocal({
403
+ message,
404
+ send: state.manager.sendLocal,
405
+ simulateRemoteSerialization: config.simulateRemoteSerialization
406
+ });
407
+ });
408
+ const notifyLocal = (message, discard) => Effect.suspend(() => {
409
+ const address = message.envelope.address;
410
+ if (!isEntityOnLocalShards(address)) {
411
+ return Effect.fail(new EntityNotManagedByRunner({
412
+ address
413
+ }));
414
+ }
415
+ const notify = storageEnabled ? openStorageReadLatch : () => Effect.dieMessage("Sharding.notifyLocal: storage is disabled");
416
+ return message._tag === "IncomingRequest" || message._tag === "IncomingEnvelope" ? notify() : runners.notifyLocal({
417
+ message,
418
+ notify,
419
+ discard
420
+ });
421
+ });
422
+ const isTransientError = Predicate.or(RunnerUnavailable.is, EntityNotManagedByRunner.is);
423
+ function sendOutgoing(message, discard, retries) {
424
+ return Effect.catchIf(Effect.suspend(() => {
425
+ const address = message.envelope.address;
426
+ const maybeRunner = MutableHashMap.get(shardAssignments, address.shardId);
427
+ const isPersisted = storageEnabled && Context.get(message.rpc.annotations, Persisted);
428
+ const runnerIsLocal = Option.isSome(maybeRunner) && isLocalRunner(maybeRunner.value);
429
+ if (isPersisted) {
430
+ return runnerIsLocal ? notifyLocal(message, discard) : runners.notify({
431
+ address: maybeRunner,
432
+ message,
433
+ discard
434
+ });
435
+ } else if (Option.isNone(maybeRunner)) {
436
+ return Effect.fail(new EntityNotManagedByRunner({
437
+ address
438
+ }));
439
+ }
440
+ return runnerIsLocal ? sendLocal(message) : runners.send({
441
+ address: maybeRunner.value,
442
+ message
443
+ });
444
+ }), isTransientError, error => {
445
+ if (retries === 0) {
446
+ return Effect.die(error);
447
+ }
448
+ return Effect.delay(sendOutgoing(message, discard, retries && retries - 1), config.sendRetryInterval);
449
+ });
450
+ }
451
+ // --- Shard Manager sync ---
452
+ const shardManagerTimeoutFiber = yield* FiberHandle.make().pipe(Scope.extend(shardingScope));
453
+ const startShardManagerTimeout = FiberHandle.run(shardManagerTimeoutFiber, Effect.flatMap(Effect.sleep(config.shardManagerUnavailableTimeout), () => {
454
+ MutableHashMap.clear(shardAssignments);
455
+ return clearSelfShards;
456
+ }), {
457
+ onlyIfMissing: true
458
+ });
459
+ const stopShardManagerTimeout = FiberHandle.clear(shardManagerTimeoutFiber);
460
+ // Every time the link to the shard manager is lost, we re-register the runner
461
+ // and re-subscribe to sharding events
462
+ yield* Effect.gen(function* () {
463
+ yield* Effect.logDebug("Registering with shard manager");
464
+ if (Option.isSome(config.runnerAddress)) {
465
+ const machineId = yield* shardManager.register(config.runnerAddress.value);
466
+ yield* snowflakeGen.setMachineId(machineId);
467
+ }
468
+ yield* stopShardManagerTimeout;
469
+ yield* Effect.logDebug("Subscribing to sharding events");
470
+ const mailbox = yield* shardManager.shardingEvents;
471
+ const startedLatch = yield* Effect.makeLatch(false);
472
+ const eventsFiber = yield* Effect.gen(function* () {
473
+ while (true) {
474
+ const [events] = yield* mailbox.takeAll;
475
+ for (const event of events) {
476
+ yield* Effect.logDebug("Received sharding event", event);
477
+ switch (event._tag) {
478
+ case "StreamStarted":
479
+ {
480
+ yield* startedLatch.open;
481
+ break;
482
+ }
483
+ case "ShardsAssigned":
484
+ {
485
+ for (const shard of event.shards) {
486
+ MutableHashMap.set(shardAssignments, shard, event.address);
487
+ }
488
+ if (!MutableRef.get(isShutdown) && isLocalRunner(event.address)) {
489
+ for (const shardId of event.shards) {
490
+ if (selfShards.has(shardId)) continue;
491
+ selfShards.add(shardId);
492
+ }
493
+ yield* activeShardsLatch.open;
494
+ }
495
+ break;
496
+ }
497
+ case "ShardsUnassigned":
498
+ {
499
+ for (const shard of event.shards) {
500
+ MutableHashMap.remove(shardAssignments, shard);
501
+ }
502
+ if (isLocalRunner(event.address)) {
503
+ for (const shard of event.shards) {
504
+ selfShards.delete(shard);
505
+ }
506
+ yield* activeShardsLatch.open;
507
+ }
508
+ break;
509
+ }
510
+ }
511
+ }
512
+ }
513
+ }).pipe(Effect.forkScoped);
514
+ // Wait for the stream to be established
515
+ yield* startedLatch.await;
516
+ // perform a full sync every config.refreshAssignmentsInterval
517
+ const syncFiber = yield* syncAssignments.pipe(Effect.andThen(Effect.sleep(config.refreshAssignmentsInterval)), Effect.forever, Effect.forkScoped);
518
+ yield* Fiber.joinAll([eventsFiber, syncFiber]);
519
+ }).pipe(Effect.scoped, Effect.catchAllCause(cause => Effect.logDebug(cause)), Effect.zipRight(startShardManagerTimeout), Effect.repeat(Schedule.exponential(1000).pipe(Schedule.union(Schedule.spaced(10_000)))), Effect.annotateLogs({
520
+ package: "@effect/cluster",
521
+ module: "Sharding",
522
+ fiber: "ShardManager sync",
523
+ runner: config.runnerAddress
524
+ }), Effect.interruptible, Effect.forkIn(shardingScope));
525
+ const syncAssignments = Effect.gen(function* () {
526
+ const assignments = yield* shardManager.getAssignments;
527
+ yield* Effect.logDebug("Received shard assignments", assignments);
528
+ for (const [shardId, runner] of assignments) {
529
+ if (Option.isNone(runner)) {
530
+ MutableHashMap.remove(shardAssignments, shardId);
531
+ selfShards.delete(shardId);
532
+ continue;
533
+ }
534
+ MutableHashMap.set(shardAssignments, shardId, runner.value);
535
+ if (!isLocalRunner(runner.value)) {
536
+ selfShards.delete(shardId);
537
+ continue;
538
+ }
539
+ if (MutableRef.get(isShutdown) || selfShards.has(shardId)) {
540
+ continue;
541
+ }
542
+ selfShards.add(shardId);
543
+ }
544
+ yield* activeShardsLatch.open;
545
+ });
546
+ const clientRequests = new Map();
547
+ const clients = yield* ResourceMap.make(Effect.fnUntraced(function* (entity) {
548
+ const client = yield* RpcClient.makeNoSerialization(entity.protocol, {
549
+ supportsAck: true,
550
+ generateRequestId: () => RequestId(snowflakeGen.unsafeNext()),
551
+ onFromClient(options) {
552
+ const address = Context.unsafeGet(options.context, ClientAddressTag);
553
+ switch (options.message._tag) {
554
+ case "Request":
555
+ {
556
+ const fiber = Option.getOrThrow(Fiber.getCurrentFiber());
557
+ const id = Snowflake.Snowflake(options.message.id);
558
+ const rpc = entity.protocol.requests.get(options.message.tag);
559
+ let respond;
560
+ if (!options.discard) {
561
+ const entry = {
562
+ rpc: rpc,
563
+ context: fiber.currentContext
564
+ };
565
+ clientRequests.set(id, entry);
566
+ respond = makeClientRespond(entry, client.write);
567
+ } else {
568
+ respond = clientRespondDiscard;
569
+ }
570
+ return sendOutgoing(new Message.OutgoingRequest({
571
+ envelope: Envelope.makeRequest({
572
+ requestId: id,
573
+ address,
574
+ tag: options.message.tag,
575
+ payload: options.message.payload,
576
+ headers: options.message.headers,
577
+ traceId: options.message.traceId,
578
+ spanId: options.message.spanId,
579
+ sampled: options.message.sampled
580
+ }),
581
+ lastReceivedReply: Option.none(),
582
+ rpc,
583
+ context: fiber.currentContext,
584
+ respond
585
+ }), options.discard);
586
+ }
587
+ case "Ack":
588
+ {
589
+ const requestId = Snowflake.Snowflake(options.message.requestId);
590
+ const entry = clientRequests.get(requestId);
591
+ if (!entry) return Effect.void;
592
+ return sendOutgoing(new Message.OutgoingEnvelope({
593
+ envelope: new Envelope.AckChunk({
594
+ id: snowflakeGen.unsafeNext(),
595
+ address,
596
+ requestId,
597
+ replyId: entry.lastChunkId
598
+ }),
599
+ rpc: entry.rpc
600
+ }), false);
601
+ }
602
+ case "Interrupt":
603
+ {
604
+ const requestId = Snowflake.Snowflake(options.message.requestId);
605
+ const entry = clientRequests.get(requestId);
606
+ if (!entry) return Effect.void;
607
+ clientRequests.delete(requestId);
608
+ // for durable messages, we ignore interrupts on shutdown or as a
609
+ // result of a shard being resassigned
610
+ const isTransientInterrupt = MutableRef.get(isShutdown) || options.message.interruptors.some(id => internalInterruptors.has(id));
611
+ if (isTransientInterrupt && storageEnabled && Context.get(entry.rpc.annotations, Persisted)) {
612
+ return Effect.void;
613
+ }
614
+ return Effect.ignore(sendOutgoing(new Message.OutgoingEnvelope({
615
+ envelope: new Envelope.Interrupt({
616
+ id: snowflakeGen.unsafeNext(),
617
+ address,
618
+ requestId
619
+ }),
620
+ rpc: entry.rpc
621
+ }), false, 3));
622
+ }
623
+ }
624
+ return Effect.void;
625
+ }
626
+ });
627
+ const wrappedClient = {};
628
+ for (const method of Object.keys(client.client)) {
629
+ wrappedClient[method] = function (payload, options) {
630
+ return client.client[method](payload, {
631
+ ...options,
632
+ context: options?.context ? Context.merge(options.context, this[currentClientAddress]) : this[currentClientAddress]
633
+ });
634
+ };
635
+ }
636
+ yield* Scope.addFinalizer(yield* Effect.scope, Effect.withFiberRuntime(fiber => {
637
+ internalInterruptors.add(fiber.id());
638
+ return Effect.void;
639
+ }));
640
+ return entityId => {
641
+ const id = EntityId.make(entityId);
642
+ return {
643
+ ...wrappedClient,
644
+ [currentClientAddress]: ClientAddressTag.context(EntityAddress.make({
645
+ shardId: getShardId(id),
646
+ entityId: id,
647
+ entityType: entity.type
648
+ }))
649
+ };
650
+ };
651
+ }));
652
+ const makeClient = entity => clients.get(entity);
653
+ const clientRespondDiscard = _reply => Effect.void;
654
+ const makeClientRespond = (entry, write) => reply => {
655
+ switch (reply._tag) {
656
+ case "Chunk":
657
+ {
658
+ entry.lastChunkId = reply.id;
659
+ return write({
660
+ _tag: "Chunk",
661
+ clientId: 0,
662
+ requestId: RequestId(reply.requestId),
663
+ values: reply.values
664
+ });
665
+ }
666
+ case "WithExit":
667
+ {
668
+ clientRequests.delete(reply.requestId);
669
+ return write({
670
+ _tag: "Exit",
671
+ clientId: 0,
672
+ requestId: RequestId(reply.requestId),
673
+ exit: reply.exit
674
+ });
675
+ }
676
+ }
677
+ };
678
+ // --- Entities ---
679
+ const context = yield* Effect.context();
680
+ const reaper = yield* EntityReaper;
681
+ const registerEntity = Effect.fnUntraced(function* (entity, build, options) {
682
+ if (entityManagers.has(entity.type)) return;
683
+ const scope = yield* Scope.make();
684
+ const manager = yield* EntityManager.make(entity, build, {
685
+ ...options,
686
+ storage,
687
+ runnerAddress: Option.getOrThrow(config.runnerAddress),
688
+ sharding
689
+ }).pipe(Effect.provide(context.pipe(Context.add(EntityReaper, reaper), Context.add(Scope.Scope, scope), Context.add(Snowflake.Generator, snowflakeGen))));
690
+ entityManagers.set(entity.type, {
691
+ entity,
692
+ scope,
693
+ manager
694
+ });
695
+ yield* Scope.addFinalizer(scope, Effect.sync(() => entityManagers.delete(entity.type)));
696
+ yield* PubSub.publish(events, EntityRegistered({
697
+ entity
698
+ }));
699
+ });
700
+ yield* Scope.addFinalizerExit(shardingScope, exit => Effect.forEach(entityManagers.values(), state => Effect.catchAllCause(Scope.close(state.scope, exit), cause => Effect.annotateLogs(Effect.logError("Error closing entity manager", cause), {
701
+ entity: state.entity.type
702
+ })), {
703
+ concurrency: "unbounded",
704
+ discard: true
705
+ }));
706
+ // --- Finalization ---
707
+ if (Option.isSome(config.runnerAddress)) {
708
+ const selfAddress = config.runnerAddress.value;
709
+ // Unregister runner from shard manager when scope is closed
710
+ yield* Scope.addFinalizer(shardingScope, Effect.gen(function* () {
711
+ yield* Effect.logDebug("Unregistering runner from shard manager", selfAddress);
712
+ yield* shardManager.unregister(selfAddress).pipe(Effect.catchAllCause(cause => Effect.logError("Error calling unregister with shard manager", cause)));
713
+ yield* clearSelfShards;
714
+ }));
715
+ }
716
+ yield* Scope.addFinalizer(shardingScope, Effect.withFiberRuntime(fiber => {
717
+ MutableRef.set(isShutdown, true);
718
+ internalInterruptors.add(fiber.id());
719
+ return Effect.void;
720
+ }));
721
+ const sharding = Sharding.of({
722
+ getRegistrationEvents,
723
+ getShardId,
724
+ isShutdown: Effect.sync(() => MutableRef.get(isShutdown)),
725
+ registerEntity,
726
+ registerSingleton,
727
+ makeClient,
728
+ send: sendLocal,
729
+ notify: message => notifyLocal(message, false)
730
+ });
731
+ return sharding;
732
+ });
12
733
  /**
13
734
  * @since 1.0.0
14
735
  * @category layers
15
736
  */
16
- export const live = internal.live;
17
- /**
18
- * Notify the shard manager that shards can now be assigned to this pod.
19
- *
20
- * @since 1.0.0
21
- * @category utils
22
- */
23
- export const register = internal.register;
24
- /**
25
- * Notify the shard manager that shards must be unassigned from this pod.
26
- *
27
- * @since 1.0.0
28
- * @category utils
29
- */
30
- export const unregister = internal.unregister;
31
- /**
32
- * Same as `register`, but will automatically call `unregister` when the `Scope` is terminated.
33
- *
34
- * @since 1.0.0
35
- * @category utils
36
- */
37
- export const registerScoped = internal.registerScoped;
38
- /**
39
- * Start a computation that is guaranteed to run only on a single pod.
40
- * Each pod should call `registerSingleton` but only a single pod will actually run it at any given time.
41
- *
42
- * @since 1.0.0
43
- * @category utils
44
- */
45
- export const registerSingleton = internal.registerSingleton;
46
- /**
47
- * Register a new entity type, allowing pods to send messages to entities of this type.
48
- *
49
- * @since 1.0.0
50
- * @category utils
51
- */
52
- export const registerEntity = internal.registerEntity;
53
- /**
54
- * Register a new topic type, allowing pods to broadcast messages to subscribers.
55
- *
56
- * @since 1.0.0
57
- * @category utils
58
- */
59
- export const registerTopic = internal.registerTopic;
60
- /**
61
- * Get an object that allows sending messages to a given entity type.
62
- * You can provide a custom send timeout to override the one globally defined.
63
- *
64
- * @since 1.0.0
65
- * @category utils
66
- */
67
- export const messenger = internal.messenger;
68
- /**
69
- * Get an object that allows broadcasting messages to a given topic type.
70
- * You can provide a custom send timeout to override the one globally defined.
71
- *
72
- * @since 1.0.0
73
- * @category utils
74
- */
75
- export const broadcaster = internal.broadcaster;
76
- /**
77
- * Get the list of pods currently registered to the Shard Manager
78
- *
79
- * @since 1.0.0
80
- * @category utils
81
- */
82
- export const getPods = internal.getPods;
83
- /**
84
- * Sends a raw message to the local entity manager without performing reties.
85
- * Those are up to the caller.
86
- *
87
- * @since 1.0.0
88
- * @category utils
89
- */
90
- export const sendMessageToLocalEntityManagerWithoutRetries = internal.sendMessageToLocalEntityManagerWithoutRetries;
91
- /**
92
- * Gets the list of shardIds assigned to the current Pod
93
- *
94
- * @since 1.0.0
95
- * @category utils
96
- */
97
- export const getAssignedShardIds = internal.getAssignedShardIds;
737
+ export const layer = /*#__PURE__*/Layer.scoped(Sharding, make).pipe( /*#__PURE__*/Layer.provide([Snowflake.layerGenerator, EntityReaper.Default]));
738
+ // Utilities
739
+ const ClientAddressTag = /*#__PURE__*/Context.GenericTag("@effect/cluster/Sharding/ClientAddress");
740
+ const currentClientAddress = /*#__PURE__*/Symbol.for(ClientAddressTag.key);
98
741
  //# sourceMappingURL=Sharding.js.map