@effect/cluster 0.28.4 → 0.29.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (626) hide show
  1. package/ClusterError/package.json +6 -0
  2. package/ClusterMetrics/package.json +6 -0
  3. package/ClusterSchema/package.json +6 -0
  4. package/DeliverAt/package.json +6 -0
  5. package/Entity/package.json +6 -0
  6. package/EntityAddress/package.json +6 -0
  7. package/EntityId/package.json +6 -0
  8. package/EntityType/package.json +6 -0
  9. package/Envelope/package.json +6 -0
  10. package/HttpCommon/package.json +6 -0
  11. package/HttpRunner/package.json +6 -0
  12. package/HttpShardManager/package.json +6 -0
  13. package/MachineId/package.json +6 -0
  14. package/MessageStorage/package.json +6 -0
  15. package/README.md +2 -2
  16. package/Reply/package.json +6 -0
  17. package/Runner/package.json +6 -0
  18. package/RunnerAddress/package.json +6 -0
  19. package/RunnerHealth/package.json +6 -0
  20. package/RunnerServer/package.json +6 -0
  21. package/Runners/package.json +6 -0
  22. package/ShardStorage/package.json +6 -0
  23. package/Singleton/package.json +6 -0
  24. package/SingletonAddress/package.json +6 -0
  25. package/Snowflake/package.json +6 -0
  26. package/SocketRunner/package.json +6 -0
  27. package/SocketShardManager/package.json +6 -0
  28. package/SqlMessageStorage/package.json +6 -0
  29. package/SqlShardStorage/package.json +6 -0
  30. package/SynchronizedClock/package.json +6 -0
  31. package/dist/cjs/ClusterError.js +180 -0
  32. package/dist/cjs/ClusterError.js.map +1 -0
  33. package/dist/cjs/ClusterMetrics.js +63 -0
  34. package/dist/cjs/ClusterMetrics.js.map +1 -0
  35. package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
  36. package/dist/cjs/ClusterSchema.js.map +1 -0
  37. package/dist/cjs/DeliverAt.js +30 -0
  38. package/dist/cjs/DeliverAt.js.map +1 -0
  39. package/dist/cjs/Entity.js +187 -0
  40. package/dist/cjs/Entity.js.map +1 -0
  41. package/dist/cjs/EntityAddress.js +54 -0
  42. package/dist/cjs/EntityAddress.js.map +1 -0
  43. package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
  44. package/dist/cjs/EntityId.js.map +1 -0
  45. package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
  46. package/dist/cjs/EntityType.js.map +1 -0
  47. package/dist/cjs/Envelope.js +168 -0
  48. package/dist/cjs/Envelope.js.map +1 -0
  49. package/dist/cjs/HttpCommon.js +49 -0
  50. package/dist/cjs/HttpCommon.js.map +1 -0
  51. package/dist/cjs/HttpRunner.js +108 -0
  52. package/dist/cjs/HttpRunner.js.map +1 -0
  53. package/dist/cjs/HttpShardManager.js +140 -0
  54. package/dist/cjs/HttpShardManager.js.map +1 -0
  55. package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
  56. package/dist/cjs/MachineId.js.map +1 -0
  57. package/dist/cjs/Message.js +99 -18
  58. package/dist/cjs/Message.js.map +1 -1
  59. package/dist/cjs/MessageStorage.js +356 -0
  60. package/dist/cjs/MessageStorage.js.map +1 -0
  61. package/dist/cjs/Reply.js +200 -0
  62. package/dist/cjs/Reply.js.map +1 -0
  63. package/dist/cjs/Runner.js +79 -0
  64. package/dist/cjs/Runner.js.map +1 -0
  65. package/dist/cjs/RunnerAddress.js +63 -0
  66. package/dist/cjs/RunnerAddress.js.map +1 -0
  67. package/dist/cjs/RunnerHealth.js +68 -0
  68. package/dist/cjs/RunnerHealth.js.map +1 -0
  69. package/dist/cjs/RunnerServer.js +125 -0
  70. package/dist/cjs/RunnerServer.js.map +1 -0
  71. package/dist/cjs/Runners.js +344 -0
  72. package/dist/cjs/Runners.js.map +1 -0
  73. package/dist/cjs/ShardId.js +7 -46
  74. package/dist/cjs/ShardId.js.map +1 -1
  75. package/dist/cjs/ShardManager.js +493 -8
  76. package/dist/cjs/ShardManager.js.map +1 -1
  77. package/dist/cjs/ShardStorage.js +139 -0
  78. package/dist/cjs/ShardStorage.js.map +1 -0
  79. package/dist/cjs/Sharding.js +731 -91
  80. package/dist/cjs/Sharding.js.map +1 -1
  81. package/dist/cjs/ShardingConfig.js +85 -18
  82. package/dist/cjs/ShardingConfig.js.map +1 -1
  83. package/dist/cjs/ShardingRegistrationEvent.js +26 -32
  84. package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
  85. package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
  86. package/dist/cjs/Singleton.js.map +1 -0
  87. package/dist/cjs/SingletonAddress.js +50 -0
  88. package/dist/cjs/SingletonAddress.js.map +1 -0
  89. package/dist/cjs/Snowflake.js +133 -0
  90. package/dist/cjs/Snowflake.js.map +1 -0
  91. package/dist/cjs/SocketRunner.js +40 -0
  92. package/dist/cjs/SocketRunner.js.map +1 -0
  93. package/dist/cjs/SocketShardManager.js +33 -0
  94. package/dist/cjs/SocketShardManager.js.map +1 -0
  95. package/dist/cjs/SqlMessageStorage.js +668 -0
  96. package/dist/cjs/SqlMessageStorage.js.map +1 -0
  97. package/dist/cjs/SqlShardStorage.js +228 -0
  98. package/dist/cjs/SqlShardStorage.js.map +1 -0
  99. package/dist/cjs/SynchronizedClock.js +66 -0
  100. package/dist/cjs/SynchronizedClock.js.map +1 -0
  101. package/dist/cjs/index.js +57 -45
  102. package/dist/cjs/internal/entityManager.js +311 -143
  103. package/dist/cjs/internal/entityManager.js.map +1 -1
  104. package/dist/cjs/internal/entityReaper.js +47 -0
  105. package/dist/cjs/internal/entityReaper.js.map +1 -0
  106. package/dist/cjs/internal/hash.js +20 -0
  107. package/dist/cjs/internal/hash.js.map +1 -0
  108. package/dist/cjs/internal/interruptors.js +9 -0
  109. package/dist/cjs/internal/interruptors.js.map +1 -0
  110. package/dist/cjs/internal/resourceMap.js +88 -0
  111. package/dist/cjs/internal/resourceMap.js.map +1 -0
  112. package/dist/cjs/internal/resourceRef.js +92 -0
  113. package/dist/cjs/internal/resourceRef.js.map +1 -0
  114. package/dist/cjs/internal/shardManager.js +219 -235
  115. package/dist/cjs/internal/shardManager.js.map +1 -1
  116. package/dist/dts/ClusterError.d.ts +169 -0
  117. package/dist/dts/ClusterError.d.ts.map +1 -0
  118. package/dist/dts/ClusterMetrics.d.ts +50 -0
  119. package/dist/dts/ClusterMetrics.d.ts.map +1 -0
  120. package/dist/dts/ClusterSchema.d.ts +13 -0
  121. package/dist/dts/ClusterSchema.d.ts.map +1 -0
  122. package/dist/dts/DeliverAt.d.ts +27 -0
  123. package/dist/dts/DeliverAt.d.ts.map +1 -0
  124. package/dist/dts/Entity.d.ts +180 -0
  125. package/dist/dts/Entity.d.ts.map +1 -0
  126. package/dist/dts/EntityAddress.d.ts +55 -0
  127. package/dist/dts/EntityAddress.d.ts.map +1 -0
  128. package/dist/dts/EntityId.d.ts +15 -0
  129. package/dist/dts/EntityId.d.ts.map +1 -0
  130. package/dist/dts/EntityType.d.ts +15 -0
  131. package/dist/dts/EntityType.d.ts.map +1 -0
  132. package/dist/dts/Envelope.d.ts +252 -0
  133. package/dist/dts/Envelope.d.ts.map +1 -0
  134. package/dist/dts/HttpCommon.d.ts +25 -0
  135. package/dist/dts/HttpCommon.d.ts.map +1 -0
  136. package/dist/dts/HttpRunner.d.ts +76 -0
  137. package/dist/dts/HttpRunner.d.ts.map +1 -0
  138. package/dist/dts/HttpShardManager.d.ts +119 -0
  139. package/dist/dts/HttpShardManager.d.ts.map +1 -0
  140. package/dist/dts/MachineId.d.ts +20 -0
  141. package/dist/dts/MachineId.d.ts.map +1 -0
  142. package/dist/dts/Message.d.ts +91 -74
  143. package/dist/dts/Message.d.ts.map +1 -1
  144. package/dist/dts/MessageStorage.d.ts +336 -0
  145. package/dist/dts/MessageStorage.d.ts.map +1 -0
  146. package/dist/dts/Reply.d.ts +171 -0
  147. package/dist/dts/Reply.d.ts.map +1 -0
  148. package/dist/dts/Runner.d.ts +81 -0
  149. package/dist/dts/Runner.d.ts.map +1 -0
  150. package/dist/dts/RunnerAddress.d.ts +56 -0
  151. package/dist/dts/RunnerAddress.d.ts.map +1 -0
  152. package/dist/dts/RunnerHealth.d.ts +54 -0
  153. package/dist/dts/RunnerHealth.d.ts.map +1 -0
  154. package/dist/dts/RunnerServer.d.ts +44 -0
  155. package/dist/dts/RunnerServer.d.ts.map +1 -0
  156. package/dist/dts/Runners.d.ts +161 -0
  157. package/dist/dts/Runners.d.ts.map +1 -0
  158. package/dist/dts/ShardId.d.ts +5 -55
  159. package/dist/dts/ShardId.d.ts.map +1 -1
  160. package/dist/dts/ShardManager.d.ts +435 -23
  161. package/dist/dts/ShardManager.d.ts.map +1 -1
  162. package/dist/dts/ShardStorage.d.ts +200 -0
  163. package/dist/dts/ShardStorage.d.ts.map +1 -0
  164. package/dist/dts/Sharding.d.ts +64 -133
  165. package/dist/dts/Sharding.d.ts.map +1 -1
  166. package/dist/dts/ShardingConfig.d.ts +147 -44
  167. package/dist/dts/ShardingConfig.d.ts.map +1 -1
  168. package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
  169. package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
  170. package/dist/dts/Singleton.d.ts +13 -0
  171. package/dist/dts/Singleton.d.ts.map +1 -0
  172. package/dist/dts/SingletonAddress.d.ts +49 -0
  173. package/dist/dts/SingletonAddress.d.ts.map +1 -0
  174. package/dist/dts/Snowflake.d.ts +121 -0
  175. package/dist/dts/Snowflake.d.ts.map +1 -0
  176. package/dist/dts/SocketRunner.d.ts +22 -0
  177. package/dist/dts/SocketRunner.d.ts.map +1 -0
  178. package/dist/dts/SocketShardManager.d.ts +17 -0
  179. package/dist/dts/SocketShardManager.d.ts.map +1 -0
  180. package/dist/dts/SqlMessageStorage.d.ts +43 -0
  181. package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
  182. package/dist/dts/SqlShardStorage.d.ts +38 -0
  183. package/dist/dts/SqlShardStorage.d.ts.map +1 -0
  184. package/dist/dts/SynchronizedClock.d.ts +19 -0
  185. package/dist/dts/SynchronizedClock.d.ts.map +1 -0
  186. package/dist/dts/index.d.ts +48 -24
  187. package/dist/dts/index.d.ts.map +1 -1
  188. package/dist/dts/internal/entityReaper.d.ts +2 -0
  189. package/dist/dts/internal/entityReaper.d.ts.map +1 -0
  190. package/dist/dts/internal/hash.d.ts +2 -0
  191. package/dist/dts/internal/hash.d.ts.map +1 -0
  192. package/dist/dts/internal/interruptors.d.ts +2 -0
  193. package/dist/dts/internal/interruptors.d.ts.map +1 -0
  194. package/dist/dts/internal/resourceMap.d.ts +22 -0
  195. package/dist/dts/internal/resourceMap.d.ts.map +1 -0
  196. package/dist/dts/internal/resourceRef.d.ts +25 -0
  197. package/dist/dts/internal/resourceRef.d.ts.map +1 -0
  198. package/dist/dts/internal/shardManager.d.ts +1 -11
  199. package/dist/dts/internal/shardManager.d.ts.map +1 -1
  200. package/dist/esm/ClusterError.js +164 -0
  201. package/dist/esm/ClusterError.js.map +1 -0
  202. package/dist/esm/ClusterMetrics.js +54 -0
  203. package/dist/esm/ClusterMetrics.js.map +1 -0
  204. package/dist/esm/ClusterSchema.js +13 -0
  205. package/dist/esm/ClusterSchema.js.map +1 -0
  206. package/dist/esm/DeliverAt.js +22 -0
  207. package/dist/esm/DeliverAt.js.map +1 -0
  208. package/dist/esm/Entity.js +173 -0
  209. package/dist/esm/Entity.js.map +1 -0
  210. package/dist/esm/EntityAddress.js +44 -0
  211. package/dist/esm/EntityAddress.js.map +1 -0
  212. package/dist/esm/EntityId.js +10 -0
  213. package/dist/esm/EntityId.js.map +1 -0
  214. package/dist/esm/EntityType.js +10 -0
  215. package/dist/esm/EntityType.js.map +1 -0
  216. package/dist/esm/Envelope.js +154 -0
  217. package/dist/esm/Envelope.js.map +1 -0
  218. package/dist/esm/HttpCommon.js +38 -0
  219. package/dist/esm/HttpCommon.js.map +1 -0
  220. package/dist/esm/HttpRunner.js +98 -0
  221. package/dist/esm/HttpRunner.js.map +1 -0
  222. package/dist/esm/HttpShardManager.js +128 -0
  223. package/dist/esm/HttpShardManager.js.map +1 -0
  224. package/dist/esm/MachineId.js +17 -0
  225. package/dist/esm/MachineId.js.map +1 -0
  226. package/dist/esm/Message.js +88 -17
  227. package/dist/esm/Message.js.map +1 -1
  228. package/dist/esm/MessageStorage.js +345 -0
  229. package/dist/esm/MessageStorage.js.map +1 -0
  230. package/dist/esm/Reply.js +184 -0
  231. package/dist/esm/Reply.js.map +1 -0
  232. package/dist/esm/Runner.js +68 -0
  233. package/dist/esm/Runner.js.map +1 -0
  234. package/dist/esm/RunnerAddress.js +52 -0
  235. package/dist/esm/RunnerAddress.js.map +1 -0
  236. package/dist/esm/RunnerHealth.js +58 -0
  237. package/dist/esm/RunnerHealth.js.map +1 -0
  238. package/dist/esm/RunnerServer.js +116 -0
  239. package/dist/esm/RunnerServer.js.map +1 -0
  240. package/dist/esm/Runners.js +332 -0
  241. package/dist/esm/Runners.js.map +1 -0
  242. package/dist/esm/ShardId.js +5 -42
  243. package/dist/esm/ShardId.js.map +1 -1
  244. package/dist/esm/ShardManager.js +486 -7
  245. package/dist/esm/ShardManager.js.map +1 -1
  246. package/dist/esm/ShardStorage.js +129 -0
  247. package/dist/esm/ShardStorage.js.map +1 -0
  248. package/dist/esm/Sharding.js +729 -90
  249. package/dist/esm/Sharding.js.map +1 -1
  250. package/dist/esm/ShardingConfig.js +80 -17
  251. package/dist/esm/ShardingConfig.js.map +1 -1
  252. package/dist/esm/ShardingRegistrationEvent.js +19 -29
  253. package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
  254. package/dist/esm/Singleton.js +15 -0
  255. package/dist/esm/Singleton.js.map +1 -0
  256. package/dist/esm/SingletonAddress.js +40 -0
  257. package/dist/esm/SingletonAddress.js.map +1 -0
  258. package/dist/esm/Snowflake.js +117 -0
  259. package/dist/esm/Snowflake.js.map +1 -0
  260. package/dist/esm/SocketRunner.js +31 -0
  261. package/dist/esm/SocketRunner.js.map +1 -0
  262. package/dist/esm/SocketShardManager.js +24 -0
  263. package/dist/esm/SocketShardManager.js.map +1 -0
  264. package/dist/esm/SqlMessageStorage.js +658 -0
  265. package/dist/esm/SqlMessageStorage.js.map +1 -0
  266. package/dist/esm/SqlShardStorage.js +218 -0
  267. package/dist/esm/SqlShardStorage.js.map +1 -0
  268. package/dist/esm/SynchronizedClock.js +57 -0
  269. package/dist/esm/SynchronizedClock.js.map +1 -0
  270. package/dist/esm/index.js +48 -24
  271. package/dist/esm/index.js.map +1 -1
  272. package/dist/esm/internal/entityManager.js +311 -142
  273. package/dist/esm/internal/entityManager.js.map +1 -1
  274. package/dist/esm/internal/entityReaper.js +38 -0
  275. package/dist/esm/internal/entityReaper.js.map +1 -0
  276. package/dist/esm/internal/hash.js +12 -0
  277. package/dist/esm/internal/hash.js.map +1 -0
  278. package/dist/esm/internal/interruptors.js +3 -0
  279. package/dist/esm/internal/interruptors.js.map +1 -0
  280. package/dist/esm/internal/resourceMap.js +79 -0
  281. package/dist/esm/internal/resourceMap.js.map +1 -0
  282. package/dist/esm/internal/resourceRef.js +83 -0
  283. package/dist/esm/internal/resourceRef.js.map +1 -0
  284. package/dist/esm/internal/shardManager.js +217 -233
  285. package/dist/esm/internal/shardManager.js.map +1 -1
  286. package/package.json +212 -154
  287. package/src/ClusterError.ts +193 -0
  288. package/src/ClusterMetrics.ts +62 -0
  289. package/src/ClusterSchema.ts +13 -0
  290. package/src/DeliverAt.ts +36 -0
  291. package/src/Entity.ts +438 -0
  292. package/src/EntityAddress.ts +55 -0
  293. package/src/EntityId.ts +16 -0
  294. package/src/EntityType.ts +16 -0
  295. package/src/Envelope.ts +352 -0
  296. package/src/HttpCommon.ts +73 -0
  297. package/src/HttpRunner.ts +196 -0
  298. package/src/HttpShardManager.ts +273 -0
  299. package/src/MachineId.ts +27 -0
  300. package/src/Message.ts +143 -92
  301. package/src/MessageStorage.ts +697 -0
  302. package/src/Reply.ts +295 -0
  303. package/src/Runner.ts +84 -0
  304. package/src/RunnerAddress.ts +61 -0
  305. package/src/RunnerHealth.ts +87 -0
  306. package/src/RunnerServer.ts +156 -0
  307. package/src/Runners.ts +533 -0
  308. package/src/ShardId.ts +10 -62
  309. package/src/ShardManager.ts +780 -29
  310. package/src/ShardStorage.ts +289 -0
  311. package/src/Sharding.ts +1059 -186
  312. package/src/ShardingConfig.ts +186 -45
  313. package/src/ShardingRegistrationEvent.ts +38 -39
  314. package/src/Singleton.ts +20 -0
  315. package/src/SingletonAddress.ts +47 -0
  316. package/src/Snowflake.ts +194 -0
  317. package/src/SocketRunner.ts +59 -0
  318. package/src/SocketShardManager.ts +48 -0
  319. package/src/SqlMessageStorage.ts +833 -0
  320. package/src/SqlShardStorage.ts +292 -0
  321. package/src/SynchronizedClock.ts +82 -0
  322. package/src/index.ts +54 -24
  323. package/src/internal/entityManager.ts +464 -361
  324. package/src/internal/entityReaper.ts +53 -0
  325. package/src/internal/hash.ts +11 -0
  326. package/src/internal/interruptors.ts +4 -0
  327. package/src/internal/resourceMap.ts +89 -0
  328. package/src/internal/resourceRef.ts +88 -0
  329. package/src/internal/shardManager.ts +273 -546
  330. package/AtLeastOnce/package.json +0 -6
  331. package/AtLeastOnceStorage/package.json +0 -6
  332. package/Broadcaster/package.json +0 -6
  333. package/ManagerConfig/package.json +0 -6
  334. package/MessageState/package.json +0 -6
  335. package/Messenger/package.json +0 -6
  336. package/Pod/package.json +0 -6
  337. package/PodAddress/package.json +0 -6
  338. package/Pods/package.json +0 -6
  339. package/PodsHealth/package.json +0 -6
  340. package/PoisonPill/package.json +0 -6
  341. package/RecipientAddress/package.json +0 -6
  342. package/RecipientBehaviour/package.json +0 -6
  343. package/RecipientBehaviourContext/package.json +0 -6
  344. package/RecipientType/package.json +0 -6
  345. package/Serialization/package.json +0 -6
  346. package/SerializedEnvelope/package.json +0 -6
  347. package/SerializedMessage/package.json +0 -6
  348. package/ShardManagerClient/package.json +0 -6
  349. package/ShardingEvent/package.json +0 -6
  350. package/ShardingException/package.json +0 -6
  351. package/Storage/package.json +0 -6
  352. package/dist/cjs/AtLeastOnce.js.map +0 -1
  353. package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
  354. package/dist/cjs/Broadcaster.js +0 -6
  355. package/dist/cjs/Broadcaster.js.map +0 -1
  356. package/dist/cjs/ManagerConfig.js.map +0 -1
  357. package/dist/cjs/MessageState.js +0 -55
  358. package/dist/cjs/MessageState.js.map +0 -1
  359. package/dist/cjs/Messenger.js +0 -6
  360. package/dist/cjs/Messenger.js.map +0 -1
  361. package/dist/cjs/Pod.js +0 -78
  362. package/dist/cjs/Pod.js.map +0 -1
  363. package/dist/cjs/PodAddress.js +0 -77
  364. package/dist/cjs/PodAddress.js.map +0 -1
  365. package/dist/cjs/Pods.js.map +0 -1
  366. package/dist/cjs/PodsHealth.js +0 -41
  367. package/dist/cjs/PodsHealth.js.map +0 -1
  368. package/dist/cjs/PoisonPill.js +0 -78
  369. package/dist/cjs/PoisonPill.js.map +0 -1
  370. package/dist/cjs/RecipientAddress.js +0 -79
  371. package/dist/cjs/RecipientAddress.js.map +0 -1
  372. package/dist/cjs/RecipientBehaviour.js +0 -38
  373. package/dist/cjs/RecipientBehaviour.js.map +0 -1
  374. package/dist/cjs/RecipientBehaviourContext.js +0 -64
  375. package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
  376. package/dist/cjs/RecipientType.js +0 -123
  377. package/dist/cjs/RecipientType.js.map +0 -1
  378. package/dist/cjs/Serialization.js +0 -32
  379. package/dist/cjs/Serialization.js.map +0 -1
  380. package/dist/cjs/SerializedEnvelope.js +0 -87
  381. package/dist/cjs/SerializedEnvelope.js.map +0 -1
  382. package/dist/cjs/SerializedMessage.js +0 -64
  383. package/dist/cjs/SerializedMessage.js.map +0 -1
  384. package/dist/cjs/ShardManagerClient.js.map +0 -1
  385. package/dist/cjs/ShardingEvent.js +0 -72
  386. package/dist/cjs/ShardingEvent.js.map +0 -1
  387. package/dist/cjs/ShardingException.js +0 -107
  388. package/dist/cjs/ShardingException.js.map +0 -1
  389. package/dist/cjs/Storage.js +0 -40
  390. package/dist/cjs/Storage.js.map +0 -1
  391. package/dist/cjs/internal/atLeastOnce.js +0 -35
  392. package/dist/cjs/internal/atLeastOnce.js.map +0 -1
  393. package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
  394. package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
  395. package/dist/cjs/internal/entityState.js +0 -47
  396. package/dist/cjs/internal/entityState.js.map +0 -1
  397. package/dist/cjs/internal/managerConfig.js +0 -46
  398. package/dist/cjs/internal/managerConfig.js.map +0 -1
  399. package/dist/cjs/internal/message.js +0 -48
  400. package/dist/cjs/internal/message.js.map +0 -1
  401. package/dist/cjs/internal/messageState.js +0 -79
  402. package/dist/cjs/internal/messageState.js.map +0 -1
  403. package/dist/cjs/internal/podWithMetadata.js +0 -54
  404. package/dist/cjs/internal/podWithMetadata.js.map +0 -1
  405. package/dist/cjs/internal/pods.js +0 -35
  406. package/dist/cjs/internal/pods.js.map +0 -1
  407. package/dist/cjs/internal/podsHealth.js +0 -40
  408. package/dist/cjs/internal/podsHealth.js.map +0 -1
  409. package/dist/cjs/internal/recipientBehaviour.js +0 -52
  410. package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
  411. package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
  412. package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
  413. package/dist/cjs/internal/serialization.js +0 -48
  414. package/dist/cjs/internal/serialization.js.map +0 -1
  415. package/dist/cjs/internal/shardManagerClient.js +0 -48
  416. package/dist/cjs/internal/shardManagerClient.js.map +0 -1
  417. package/dist/cjs/internal/shardManagerState.js +0 -44
  418. package/dist/cjs/internal/shardManagerState.js.map +0 -1
  419. package/dist/cjs/internal/sharding.js +0 -306
  420. package/dist/cjs/internal/sharding.js.map +0 -1
  421. package/dist/cjs/internal/shardingConfig.js +0 -56
  422. package/dist/cjs/internal/shardingConfig.js.map +0 -1
  423. package/dist/cjs/internal/storage.js +0 -52
  424. package/dist/cjs/internal/storage.js.map +0 -1
  425. package/dist/cjs/internal/utils.js +0 -69
  426. package/dist/cjs/internal/utils.js.map +0 -1
  427. package/dist/dts/AtLeastOnce.d.ts +0 -20
  428. package/dist/dts/AtLeastOnce.d.ts.map +0 -1
  429. package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
  430. package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
  431. package/dist/dts/Broadcaster.d.ts +0 -32
  432. package/dist/dts/Broadcaster.d.ts.map +0 -1
  433. package/dist/dts/ManagerConfig.d.ts +0 -61
  434. package/dist/dts/ManagerConfig.d.ts.map +0 -1
  435. package/dist/dts/MessageState.d.ts +0 -107
  436. package/dist/dts/MessageState.d.ts.map +0 -1
  437. package/dist/dts/Messenger.d.ts +0 -32
  438. package/dist/dts/Messenger.d.ts.map +0 -1
  439. package/dist/dts/Pod.d.ts +0 -81
  440. package/dist/dts/Pod.d.ts.map +0 -1
  441. package/dist/dts/PodAddress.d.ts +0 -80
  442. package/dist/dts/PodAddress.d.ts.map +0 -1
  443. package/dist/dts/Pods.d.ts +0 -78
  444. package/dist/dts/Pods.d.ts.map +0 -1
  445. package/dist/dts/PodsHealth.d.ts +0 -66
  446. package/dist/dts/PodsHealth.d.ts.map +0 -1
  447. package/dist/dts/PoisonPill.d.ts +0 -78
  448. package/dist/dts/PoisonPill.d.ts.map +0 -1
  449. package/dist/dts/RecipientAddress.d.ts +0 -57
  450. package/dist/dts/RecipientAddress.d.ts.map +0 -1
  451. package/dist/dts/RecipientBehaviour.d.ts +0 -72
  452. package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
  453. package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
  454. package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
  455. package/dist/dts/RecipientType.d.ts +0 -93
  456. package/dist/dts/RecipientType.d.ts.map +0 -1
  457. package/dist/dts/Serialization.d.ts +0 -58
  458. package/dist/dts/Serialization.d.ts.map +0 -1
  459. package/dist/dts/SerializedEnvelope.d.ts +0 -86
  460. package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
  461. package/dist/dts/SerializedMessage.d.ts +0 -66
  462. package/dist/dts/SerializedMessage.d.ts.map +0 -1
  463. package/dist/dts/ShardManagerClient.d.ts +0 -50
  464. package/dist/dts/ShardManagerClient.d.ts.map +0 -1
  465. package/dist/dts/ShardingEvent.d.ts +0 -90
  466. package/dist/dts/ShardingEvent.d.ts.map +0 -1
  467. package/dist/dts/ShardingException.d.ts +0 -125
  468. package/dist/dts/ShardingException.d.ts.map +0 -1
  469. package/dist/dts/Storage.d.ts +0 -78
  470. package/dist/dts/Storage.d.ts.map +0 -1
  471. package/dist/dts/internal/atLeastOnce.d.ts +0 -2
  472. package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
  473. package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
  474. package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
  475. package/dist/dts/internal/entityState.d.ts +0 -21
  476. package/dist/dts/internal/entityState.d.ts.map +0 -1
  477. package/dist/dts/internal/managerConfig.d.ts +0 -2
  478. package/dist/dts/internal/managerConfig.d.ts.map +0 -1
  479. package/dist/dts/internal/message.d.ts +0 -9
  480. package/dist/dts/internal/message.d.ts.map +0 -1
  481. package/dist/dts/internal/messageState.d.ts +0 -2
  482. package/dist/dts/internal/messageState.d.ts.map +0 -1
  483. package/dist/dts/internal/podWithMetadata.d.ts +0 -2
  484. package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
  485. package/dist/dts/internal/pods.d.ts +0 -2
  486. package/dist/dts/internal/pods.d.ts.map +0 -1
  487. package/dist/dts/internal/podsHealth.d.ts +0 -2
  488. package/dist/dts/internal/podsHealth.d.ts.map +0 -1
  489. package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
  490. package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
  491. package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
  492. package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
  493. package/dist/dts/internal/serialization.d.ts +0 -2
  494. package/dist/dts/internal/serialization.d.ts.map +0 -1
  495. package/dist/dts/internal/shardManagerClient.d.ts +0 -2
  496. package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
  497. package/dist/dts/internal/shardManagerState.d.ts +0 -26
  498. package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
  499. package/dist/dts/internal/sharding.d.ts +0 -2
  500. package/dist/dts/internal/sharding.d.ts.map +0 -1
  501. package/dist/dts/internal/shardingConfig.d.ts +0 -2
  502. package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
  503. package/dist/dts/internal/storage.d.ts +0 -2
  504. package/dist/dts/internal/storage.d.ts.map +0 -1
  505. package/dist/dts/internal/utils.d.ts +0 -2
  506. package/dist/dts/internal/utils.d.ts.map +0 -1
  507. package/dist/esm/AtLeastOnce.js +0 -12
  508. package/dist/esm/AtLeastOnce.js.map +0 -1
  509. package/dist/esm/AtLeastOnceStorage.js +0 -17
  510. package/dist/esm/AtLeastOnceStorage.js.map +0 -1
  511. package/dist/esm/Broadcaster.js +0 -2
  512. package/dist/esm/Broadcaster.js.map +0 -1
  513. package/dist/esm/ManagerConfig.js +0 -26
  514. package/dist/esm/ManagerConfig.js.map +0 -1
  515. package/dist/esm/MessageState.js +0 -47
  516. package/dist/esm/MessageState.js.map +0 -1
  517. package/dist/esm/Messenger.js +0 -2
  518. package/dist/esm/Messenger.js.map +0 -1
  519. package/dist/esm/Pod.js +0 -65
  520. package/dist/esm/Pod.js.map +0 -1
  521. package/dist/esm/PodAddress.js +0 -64
  522. package/dist/esm/PodAddress.js.map +0 -1
  523. package/dist/esm/Pods.js +0 -27
  524. package/dist/esm/Pods.js.map +0 -1
  525. package/dist/esm/PodsHealth.js +0 -33
  526. package/dist/esm/PodsHealth.js.map +0 -1
  527. package/dist/esm/PoisonPill.js +0 -65
  528. package/dist/esm/PoisonPill.js.map +0 -1
  529. package/dist/esm/RecipientAddress.js +0 -67
  530. package/dist/esm/RecipientAddress.js.map +0 -1
  531. package/dist/esm/RecipientBehaviour.js +0 -30
  532. package/dist/esm/RecipientBehaviour.js.map +0 -1
  533. package/dist/esm/RecipientBehaviourContext.js +0 -56
  534. package/dist/esm/RecipientBehaviourContext.js.map +0 -1
  535. package/dist/esm/RecipientType.js +0 -108
  536. package/dist/esm/RecipientType.js.map +0 -1
  537. package/dist/esm/Serialization.js +0 -24
  538. package/dist/esm/Serialization.js.map +0 -1
  539. package/dist/esm/SerializedEnvelope.js +0 -74
  540. package/dist/esm/SerializedEnvelope.js.map +0 -1
  541. package/dist/esm/SerializedMessage.js +0 -51
  542. package/dist/esm/SerializedMessage.js.map +0 -1
  543. package/dist/esm/ShardManagerClient.js +0 -22
  544. package/dist/esm/ShardManagerClient.js.map +0 -1
  545. package/dist/esm/ShardingEvent.js +0 -62
  546. package/dist/esm/ShardingEvent.js.map +0 -1
  547. package/dist/esm/ShardingException.js +0 -91
  548. package/dist/esm/ShardingException.js.map +0 -1
  549. package/dist/esm/Storage.js +0 -32
  550. package/dist/esm/Storage.js.map +0 -1
  551. package/dist/esm/internal/atLeastOnce.js +0 -26
  552. package/dist/esm/internal/atLeastOnce.js.map +0 -1
  553. package/dist/esm/internal/atLeastOnceStorage.js +0 -154
  554. package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
  555. package/dist/esm/internal/entityState.js +0 -35
  556. package/dist/esm/internal/entityState.js.map +0 -1
  557. package/dist/esm/internal/managerConfig.js +0 -38
  558. package/dist/esm/internal/managerConfig.js.map +0 -1
  559. package/dist/esm/internal/message.js +0 -35
  560. package/dist/esm/internal/message.js.map +0 -1
  561. package/dist/esm/internal/messageState.js +0 -66
  562. package/dist/esm/internal/messageState.js.map +0 -1
  563. package/dist/esm/internal/podWithMetadata.js +0 -41
  564. package/dist/esm/internal/podWithMetadata.js.map +0 -1
  565. package/dist/esm/internal/pods.js +0 -25
  566. package/dist/esm/internal/pods.js.map +0 -1
  567. package/dist/esm/internal/podsHealth.js +0 -30
  568. package/dist/esm/internal/podsHealth.js.map +0 -1
  569. package/dist/esm/internal/recipientBehaviour.js +0 -42
  570. package/dist/esm/internal/recipientBehaviour.js.map +0 -1
  571. package/dist/esm/internal/recipientBehaviourContext.js +0 -26
  572. package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
  573. package/dist/esm/internal/serialization.js +0 -38
  574. package/dist/esm/internal/serialization.js.map +0 -1
  575. package/dist/esm/internal/shardManagerClient.js +0 -38
  576. package/dist/esm/internal/shardManagerClient.js.map +0 -1
  577. package/dist/esm/internal/shardManagerState.js +0 -36
  578. package/dist/esm/internal/shardManagerState.js.map +0 -1
  579. package/dist/esm/internal/sharding.js +0 -288
  580. package/dist/esm/internal/sharding.js.map +0 -1
  581. package/dist/esm/internal/shardingConfig.js +0 -47
  582. package/dist/esm/internal/shardingConfig.js.map +0 -1
  583. package/dist/esm/internal/storage.js +0 -42
  584. package/dist/esm/internal/storage.js.map +0 -1
  585. package/dist/esm/internal/utils.js +0 -56
  586. package/dist/esm/internal/utils.js.map +0 -1
  587. package/src/AtLeastOnce.ts +0 -28
  588. package/src/AtLeastOnceStorage.ts +0 -96
  589. package/src/Broadcaster.ts +0 -48
  590. package/src/ManagerConfig.ts +0 -67
  591. package/src/MessageState.ts +0 -126
  592. package/src/Messenger.ts +0 -40
  593. package/src/Pod.ts +0 -95
  594. package/src/PodAddress.ts +0 -94
  595. package/src/Pods.ts +0 -100
  596. package/src/PodsHealth.ts +0 -74
  597. package/src/PoisonPill.ts +0 -105
  598. package/src/RecipientAddress.ts +0 -72
  599. package/src/RecipientBehaviour.ts +0 -108
  600. package/src/RecipientBehaviourContext.ts +0 -101
  601. package/src/RecipientType.ts +0 -134
  602. package/src/Serialization.ts +0 -72
  603. package/src/SerializedEnvelope.ts +0 -108
  604. package/src/SerializedMessage.ts +0 -82
  605. package/src/ShardManagerClient.ts +0 -57
  606. package/src/ShardingEvent.ts +0 -121
  607. package/src/ShardingException.ts +0 -151
  608. package/src/Storage.ts +0 -92
  609. package/src/internal/atLeastOnce.ts +0 -59
  610. package/src/internal/atLeastOnceStorage.ts +0 -218
  611. package/src/internal/entityState.ts +0 -64
  612. package/src/internal/managerConfig.ts +0 -84
  613. package/src/internal/message.ts +0 -63
  614. package/src/internal/messageState.ts +0 -98
  615. package/src/internal/podWithMetadata.ts +0 -72
  616. package/src/internal/pods.ts +0 -29
  617. package/src/internal/podsHealth.ts +0 -39
  618. package/src/internal/recipientBehaviour.ts +0 -133
  619. package/src/internal/recipientBehaviourContext.ts +0 -70
  620. package/src/internal/serialization.ts +0 -63
  621. package/src/internal/shardManagerClient.ts +0 -49
  622. package/src/internal/shardManagerState.ts +0 -80
  623. package/src/internal/sharding.ts +0 -789
  624. package/src/internal/shardingConfig.ts +0 -97
  625. package/src/internal/storage.ts +0 -60
  626. package/src/internal/utils.ts +0 -54
@@ -3,104 +3,744 @@
3
3
  Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
- exports.unregister = exports.sendMessageToLocalEntityManagerWithoutRetries = exports.registerTopic = exports.registerSingleton = exports.registerScoped = exports.registerEntity = exports.register = exports.messenger = exports.live = exports.getPods = exports.getAssignedShardIds = exports.broadcaster = exports.Tag = exports.ShardingTypeId = void 0;
7
- var internal = _interopRequireWildcard(require("./internal/sharding.js"));
6
+ exports.layer = exports.Sharding = void 0;
7
+ var RpcClient = _interopRequireWildcard(require("@effect/rpc/RpcClient"));
8
+ var _RpcMessage = require("@effect/rpc/RpcMessage");
9
+ var Arr = _interopRequireWildcard(require("effect/Array"));
10
+ var Cause = _interopRequireWildcard(require("effect/Cause"));
11
+ var Context = _interopRequireWildcard(require("effect/Context"));
12
+ var Effect = _interopRequireWildcard(require("effect/Effect"));
13
+ var Equal = _interopRequireWildcard(require("effect/Equal"));
14
+ var Fiber = _interopRequireWildcard(require("effect/Fiber"));
15
+ var FiberHandle = _interopRequireWildcard(require("effect/FiberHandle"));
16
+ var FiberMap = _interopRequireWildcard(require("effect/FiberMap"));
17
+ var FiberRef = _interopRequireWildcard(require("effect/FiberRef"));
18
+ var _Function = require("effect/Function");
19
+ var HashMap = _interopRequireWildcard(require("effect/HashMap"));
20
+ var Iterable = _interopRequireWildcard(require("effect/Iterable"));
21
+ var Layer = _interopRequireWildcard(require("effect/Layer"));
22
+ var MutableHashMap = _interopRequireWildcard(require("effect/MutableHashMap"));
23
+ var MutableRef = _interopRequireWildcard(require("effect/MutableRef"));
24
+ var Option = _interopRequireWildcard(require("effect/Option"));
25
+ var Predicate = _interopRequireWildcard(require("effect/Predicate"));
26
+ var PubSub = _interopRequireWildcard(require("effect/PubSub"));
27
+ var Schedule = _interopRequireWildcard(require("effect/Schedule"));
28
+ var Scope = _interopRequireWildcard(require("effect/Scope"));
29
+ var Stream = _interopRequireWildcard(require("effect/Stream"));
30
+ var _ClusterError = require("./ClusterError.js");
31
+ var _ClusterSchema = require("./ClusterSchema.js");
32
+ var _EntityAddress = require("./EntityAddress.js");
33
+ var _EntityId = require("./EntityId.js");
34
+ var Envelope = _interopRequireWildcard(require("./Envelope.js"));
35
+ var EntityManager = _interopRequireWildcard(require("./internal/entityManager.js"));
36
+ var _entityReaper = require("./internal/entityReaper.js");
37
+ var _hash = require("./internal/hash.js");
38
+ var _interruptors = require("./internal/interruptors.js");
39
+ var _resourceMap = require("./internal/resourceMap.js");
40
+ var Message = _interopRequireWildcard(require("./Message.js"));
41
+ var MessageStorage = _interopRequireWildcard(require("./MessageStorage.js"));
42
+ var Reply = _interopRequireWildcard(require("./Reply.js"));
43
+ var _Runners = require("./Runners.js");
44
+ var _ShardId = require("./ShardId.js");
45
+ var _ShardingConfig = require("./ShardingConfig.js");
46
+ var _ShardingRegistrationEvent = require("./ShardingRegistrationEvent.js");
47
+ var _ShardManager = require("./ShardManager.js");
48
+ var _ShardStorage = require("./ShardStorage.js");
49
+ var _SingletonAddress = require("./SingletonAddress.js");
50
+ var Snowflake = _interopRequireWildcard(require("./Snowflake.js"));
8
51
  function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
9
52
  function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
10
53
  /**
11
54
  * @since 1.0.0
12
- * @category symbols
55
+ * @category models
13
56
  */
14
- const ShardingTypeId = exports.ShardingTypeId = internal.ShardingTypeId;
15
- /**
16
- * @since 1.0.0
17
- * @category context
18
- */
19
- const Tag = exports.Tag = internal.shardingTag;
57
+ class Sharding extends /*#__PURE__*/Context.Tag("@effect/cluster/Sharding")() {}
58
+ exports.Sharding = Sharding;
59
+ const make = /*#__PURE__*/Effect.gen(function* () {
60
+ const config = yield* _ShardingConfig.ShardingConfig;
61
+ const runners = yield* _Runners.Runners;
62
+ const shardManager = yield* _ShardManager.ShardManagerClient;
63
+ const snowflakeGen = yield* Snowflake.Generator;
64
+ const shardingScope = yield* Effect.scope;
65
+ const isShutdown = MutableRef.make(false);
66
+ const storage = yield* MessageStorage.MessageStorage;
67
+ const storageEnabled = storage !== MessageStorage.noop;
68
+ const shardStorage = yield* _ShardStorage.ShardStorage;
69
+ const entityManagers = new Map();
70
+ const shardAssignments = MutableHashMap.empty();
71
+ const selfShards = new Set();
72
+ // the active shards are the ones that we have acquired the lock for
73
+ const acquiredShards = new Set();
74
+ const activeShardsLatch = yield* Effect.makeLatch(false);
75
+ const events = yield* PubSub.unbounded();
76
+ const getRegistrationEvents = Stream.fromPubSub(events);
77
+ const isLocalRunner = address => Option.isSome(config.runnerAddress) && Equal.equals(address, config.runnerAddress.value);
78
+ function getShardId(entityId) {
79
+ return _ShardId.ShardId.make(Math.abs((0, _hash.hashString)(entityId) % config.numberOfShards) + 1);
80
+ }
81
+ function isEntityOnLocalShards(address) {
82
+ return acquiredShards.has(address.shardId);
83
+ }
84
+ // --- Shard acquisition ---
85
+ if (Option.isSome(config.runnerAddress)) {
86
+ const selfAddress = config.runnerAddress.value;
87
+ yield* Scope.addFinalizerExit(shardingScope, () => {
88
+ // the locks expire over time, so if this fails we ignore it
89
+ return Effect.ignore(shardStorage.releaseAll(selfAddress));
90
+ });
91
+ const releasingShards = new Set();
92
+ yield* Effect.gen(function* () {
93
+ while (true) {
94
+ yield* activeShardsLatch.await;
95
+ // if a shard is no longer assigned to this runner, we release it
96
+ for (const shardId of acquiredShards) {
97
+ if (selfShards.has(shardId)) continue;
98
+ acquiredShards.delete(shardId);
99
+ releasingShards.add(shardId);
100
+ }
101
+ // if a shard has been assigned to this runner, we acquire it
102
+ const unacquiredShards = new Set();
103
+ for (const shardId of selfShards) {
104
+ if (acquiredShards.has(shardId) || releasingShards.has(shardId)) continue;
105
+ unacquiredShards.add(shardId);
106
+ }
107
+ if (releasingShards.size > 0) {
108
+ yield* Effect.forkIn(syncSingletons, shardingScope);
109
+ yield* releaseShards;
110
+ }
111
+ if (unacquiredShards.size === 0) {
112
+ yield* activeShardsLatch.close;
113
+ continue;
114
+ }
115
+ const acquired = yield* shardStorage.acquire(selfAddress, unacquiredShards);
116
+ for (const shardId of acquired) {
117
+ acquiredShards.add(shardId);
118
+ }
119
+ if (acquired.length > 0) {
120
+ yield* storageReadLatch.open;
121
+ yield* Effect.forkIn(syncSingletons, shardingScope);
122
+ }
123
+ yield* Effect.sleep(1000);
124
+ }
125
+ }).pipe(Effect.catchAllCause(cause => Effect.logWarning("Could not acquire/release shards", cause)), Effect.repeat(Schedule.spaced(config.entityMessagePollInterval)), Effect.annotateLogs({
126
+ package: "@effect/cluster",
127
+ module: "Sharding",
128
+ fiber: "Shard acquisition loop",
129
+ runner: selfAddress
130
+ }), Effect.interruptible, Effect.forkIn(shardingScope));
131
+ // refresh the shard locks every minute
132
+ yield* Effect.suspend(() => shardStorage.refresh(selfAddress, [...acquiredShards, ...releasingShards])).pipe(Effect.flatMap(acquired => {
133
+ for (const shardId of acquiredShards) {
134
+ if (!acquired.includes(shardId)) {
135
+ acquiredShards.delete(shardId);
136
+ releasingShards.add(shardId);
137
+ }
138
+ }
139
+ return releasingShards.size > 0 ? Effect.andThen(Effect.forkIn(syncSingletons, shardingScope), releaseShards) : Effect.void;
140
+ }), Effect.retry({
141
+ times: 5,
142
+ schedule: Schedule.spaced(250)
143
+ }), Effect.catchAllCause(cause => Effect.logError("Could not refresh shard locks", cause).pipe(Effect.andThen(clearSelfShards))), Effect.delay("1 minute"), Effect.forever, Effect.interruptible, Effect.forkIn(shardingScope));
144
+ const releaseShardsLock = Effect.unsafeMakeSemaphore(1).withPermits(1);
145
+ const releaseShards = releaseShardsLock(Effect.suspend(() => Effect.forEach(releasingShards, shardId => Effect.forEach(entityManagers.values(), state => state.manager.interruptShard(shardId), {
146
+ concurrency: "unbounded",
147
+ discard: true
148
+ }).pipe(Effect.andThen(shardStorage.release(selfAddress, shardId)), Effect.annotateLogs({
149
+ runner: selfAddress
150
+ }), Effect.andThen(() => {
151
+ releasingShards.delete(shardId);
152
+ })), {
153
+ concurrency: "unbounded",
154
+ discard: true
155
+ })));
156
+ }
157
+ const clearSelfShards = Effect.suspend(() => {
158
+ selfShards.clear();
159
+ return activeShardsLatch.open;
160
+ });
161
+ // --- Singletons ---
162
+ const singletons = new Map();
163
+ const singletonFibers = yield* FiberMap.make();
164
+ const withSingletonLock = Effect.unsafeMakeSemaphore(1).withPermits(1);
165
+ const registerSingleton = Effect.fnUntraced(function* (name, run) {
166
+ const address = new _SingletonAddress.SingletonAddress({
167
+ shardId: getShardId(_EntityId.EntityId.make(name)),
168
+ name
169
+ });
170
+ let map = singletons.get(address.shardId);
171
+ if (!map) {
172
+ map = MutableHashMap.empty();
173
+ singletons.set(address.shardId, map);
174
+ }
175
+ if (MutableHashMap.has(map, address)) {
176
+ return yield* Effect.dieMessage(`Singleton '${name}' is already registered`);
177
+ }
178
+ const context = yield* Effect.context();
179
+ const wrappedRun = run.pipe(Effect.locally(FiberRef.currentLogAnnotations, HashMap.empty()), Effect.andThen(Effect.never), Effect.scoped, Effect.provide(context), Effect.orDie, Effect.interruptible);
180
+ MutableHashMap.set(map, address, wrappedRun);
181
+ yield* PubSub.publish(events, (0, _ShardingRegistrationEvent.SingletonRegistered)({
182
+ address
183
+ }));
184
+ // start if we are on the right shard
185
+ if (acquiredShards.has(address.shardId)) {
186
+ yield* Effect.logDebug("Starting singleton", address);
187
+ yield* FiberMap.run(singletonFibers, address, wrappedRun);
188
+ }
189
+ }, withSingletonLock);
190
+ const syncSingletons = withSingletonLock(Effect.gen(function* () {
191
+ for (const [shardId, map] of singletons) {
192
+ for (const [address, run] of map) {
193
+ const running = FiberMap.unsafeHas(singletonFibers, address);
194
+ const shouldBeRunning = acquiredShards.has(shardId);
195
+ if (running && !shouldBeRunning) {
196
+ yield* Effect.logDebug("Stopping singleton", address);
197
+ _interruptors.internalInterruptors.add(yield* Effect.fiberId);
198
+ yield* FiberMap.remove(singletonFibers, address);
199
+ } else if (!running && shouldBeRunning) {
200
+ yield* Effect.logDebug("Starting singleton", address);
201
+ yield* FiberMap.run(singletonFibers, address, run);
202
+ }
203
+ }
204
+ }
205
+ }));
206
+ // --- Storage inbox ---
207
+ const storageReadLatch = yield* Effect.makeLatch(true);
208
+ const openStorageReadLatch = (0, _Function.constant)(storageReadLatch.open);
209
+ const storageReadLock = Effect.unsafeMakeSemaphore(1);
210
+ const withStorageReadLock = storageReadLock.withPermits(1);
211
+ if (storageEnabled && Option.isSome(config.runnerAddress)) {
212
+ const selfAddress = config.runnerAddress.value;
213
+ yield* Effect.gen(function* () {
214
+ yield* Effect.logDebug("Starting");
215
+ yield* Effect.addFinalizer(() => Effect.logDebug("Shutting down"));
216
+ // keep track of the last sent request ids to avoid duplicates
217
+ // we only keep the last 30 sets to avoid memory leaks
218
+ const sentRequestIds = new Set();
219
+ const sentRequestIdSets = new Set();
220
+ while (true) {
221
+ // wait for the next poll interval, or if we get notified of a change
222
+ yield* storageReadLatch.await;
223
+ // if we get notified of a change, ensure we start a read immediately
224
+ // next iteration
225
+ storageReadLatch.unsafeClose();
226
+ // the lock is used to ensure resuming entities have a garantee that no
227
+ // more items are added to the unprocessed set while the semaphore is
228
+ // acquired.
229
+ yield* storageReadLock.take(1);
230
+ const messages = yield* storage.unprocessedMessages(acquiredShards);
231
+ const currentSentRequestIds = new Set();
232
+ sentRequestIdSets.add(currentSentRequestIds);
233
+ const send = Effect.catchAllCause(Effect.suspend(() => {
234
+ const message = messages[index];
235
+ if (message._tag === "IncomingRequest") {
236
+ if (sentRequestIds.has(message.envelope.requestId)) {
237
+ return Effect.void;
238
+ }
239
+ sentRequestIds.add(message.envelope.requestId);
240
+ currentSentRequestIds.add(message.envelope.requestId);
241
+ }
242
+ const address = message.envelope.address;
243
+ const state = entityManagers.get(address.entityType);
244
+ if (!state || !acquiredShards.has(address.shardId)) {
245
+ return Effect.void;
246
+ }
247
+ const isProcessing = state.manager.isProcessingFor(message);
248
+ // If the message might affect a currently processing request, we
249
+ // send it to the entity manager to be processed.
250
+ if (message._tag === "IncomingEnvelope" && isProcessing) {
251
+ return state.manager.send(message);
252
+ } else if (isProcessing) {
253
+ return Effect.void;
254
+ }
255
+ // If the entity was resuming in another fiber, we add the message
256
+ // id to the unprocessed set.
257
+ const resumptionState = MutableHashMap.get(entityResumptionState, address);
258
+ if (Option.isSome(resumptionState)) {
259
+ resumptionState.value.unprocessed.add(message.envelope.requestId);
260
+ if (message.envelope._tag === "Interrupt") {
261
+ resumptionState.value.interrupts.set(message.envelope.requestId, message);
262
+ }
263
+ return Effect.void;
264
+ }
265
+ return state.manager.send(message);
266
+ }), cause => {
267
+ const message = messages[index];
268
+ const error = Cause.failureOption(cause);
269
+ // if we get a defect, then update storage
270
+ if (Option.isNone(error)) {
271
+ return storage.saveReply(Reply.ReplyWithContext.fromDefect({
272
+ id: snowflakeGen.unsafeNext(),
273
+ requestId: message.envelope.requestId,
274
+ defect: Cause.squash(cause)
275
+ }));
276
+ }
277
+ if (error.value._tag === "MailboxFull") {
278
+ // MailboxFull can only happen for requests, so this cast is safe
279
+ return resumeEntityFromStorage(message);
280
+ }
281
+ return Effect.void;
282
+ });
283
+ let index = 0;
284
+ yield* Effect.whileLoop({
285
+ while: () => index < messages.length,
286
+ step: () => index++,
287
+ body: (0, _Function.constant)(send)
288
+ });
289
+ // let the resuming entities check if they are done
290
+ yield* storageReadLock.release(1);
291
+ while (sentRequestIdSets.size > 30) {
292
+ const oldest = Iterable.unsafeHead(sentRequestIdSets);
293
+ sentRequestIdSets.delete(oldest);
294
+ for (const id of oldest) {
295
+ sentRequestIds.delete(id);
296
+ }
297
+ }
298
+ }
299
+ }).pipe(Effect.scoped, Effect.ensuring(storageReadLock.releaseAll), Effect.catchAllCause(cause => Effect.logWarning("Could not read messages from storage", cause)), Effect.repeat(Schedule.spaced(config.entityMessagePollInterval)), Effect.annotateLogs({
300
+ package: "@effect/cluster",
301
+ module: "Sharding",
302
+ fiber: "Storage read loop",
303
+ runner: selfAddress
304
+ }), Effect.interruptible, Effect.forkIn(shardingScope));
305
+ // open the storage latch every poll interval
306
+ yield* storageReadLatch.open.pipe(Effect.delay(config.entityMessagePollInterval), Effect.forever, Effect.interruptible, Effect.forkIn(shardingScope));
307
+ // Resume unprocessed messages for entities that reached a full mailbox.
308
+ const entityResumptionState = MutableHashMap.empty();
309
+ const resumeEntityFromStorage = lastReceivedMessage => {
310
+ const address = lastReceivedMessage.envelope.address;
311
+ const resumptionState = MutableHashMap.get(entityResumptionState, address);
312
+ if (Option.isSome(resumptionState)) {
313
+ resumptionState.value.unprocessed.add(lastReceivedMessage.envelope.requestId);
314
+ return Effect.void;
315
+ }
316
+ MutableHashMap.set(entityResumptionState, address, {
317
+ unprocessed: new Set([lastReceivedMessage.envelope.requestId]),
318
+ interrupts: new Map()
319
+ });
320
+ return resumeEntityFromStorageImpl(address);
321
+ };
322
+ const resumeEntityFromStorageImpl = Effect.fnUntraced(function* (address) {
323
+ const state = entityManagers.get(address.entityType);
324
+ if (!state) {
325
+ MutableHashMap.remove(entityResumptionState, address);
326
+ return;
327
+ }
328
+ const resumptionState = Option.getOrThrow(MutableHashMap.get(entityResumptionState, address));
329
+ let done = false;
330
+ while (!done) {
331
+ // if the shard is no longer assigned to this runner, we stop
332
+ if (!acquiredShards.has(address.shardId)) {
333
+ return;
334
+ }
335
+ // take a batch of unprocessed messages ids
336
+ const messageIds = Arr.empty();
337
+ for (const id of resumptionState.unprocessed) {
338
+ if (messageIds.length === 1024) break;
339
+ messageIds.push(id);
340
+ }
341
+ const messages = yield* storage.unprocessedMessagesById(messageIds);
342
+ // this should not happen, but we handle it just in case
343
+ if (messages.length === 0) {
344
+ yield* Effect.sleep(config.entityMessagePollInterval);
345
+ continue;
346
+ }
347
+ let index = 0;
348
+ const sendWithRetry = Effect.catchTags(Effect.suspend(() => {
349
+ if (!acquiredShards.has(address.shardId)) {
350
+ return Effect.fail(new _ClusterError.EntityNotManagedByRunner({
351
+ address
352
+ }));
353
+ }
354
+ const message = messages[index];
355
+ // check if this is a request that was interrupted
356
+ const interrupt = message._tag === "IncomingRequest" && resumptionState.interrupts.get(message.envelope.requestId);
357
+ return interrupt ? Effect.flatMap(state.manager.send(message), () => {
358
+ resumptionState.interrupts.delete(message.envelope.requestId);
359
+ return state.manager.send(interrupt);
360
+ }) : state.manager.send(message);
361
+ }), {
362
+ MailboxFull: () => Effect.delay(sendWithRetry, config.sendRetryInterval),
363
+ AlreadyProcessingMessage: () => Effect.void
364
+ });
365
+ yield* Effect.whileLoop({
366
+ while: () => index < messages.length,
367
+ body: (0, _Function.constant)(sendWithRetry),
368
+ step: () => index++
369
+ });
370
+ for (const id of messageIds) {
371
+ resumptionState.unprocessed.delete(id);
372
+ }
373
+ if (resumptionState.unprocessed.size > 0) continue;
374
+ // if we have caught up to the main storage loop, we let it take over
375
+ yield* withStorageReadLock(Effect.sync(() => {
376
+ if (resumptionState.unprocessed.size === 0) {
377
+ MutableHashMap.remove(entityResumptionState, address);
378
+ done = true;
379
+ }
380
+ }));
381
+ }
382
+ }, Effect.retry({
383
+ while: e => e._tag === "PersistenceError",
384
+ schedule: Schedule.spaced(config.entityMessagePollInterval)
385
+ }), Effect.catchAllCause(cause => Effect.logError("Could not resume unprocessed messages", cause)), (effect, address) => Effect.annotateLogs(effect, {
386
+ package: "@effect/cluster",
387
+ module: "Sharding",
388
+ fiber: "Resuming unprocessed messages",
389
+ runner: selfAddress,
390
+ entity: address
391
+ }), (effect, address) => Effect.ensuring(effect, Effect.sync(() => MutableHashMap.remove(entityResumptionState, address))), Effect.interruptible, Effect.forkIn(shardingScope));
392
+ }
393
+ // --- Sending messages ---
394
+ const sendLocal = message => Effect.suspend(() => {
395
+ const address = message.envelope.address;
396
+ if (!isEntityOnLocalShards(address)) {
397
+ return Effect.fail(new _ClusterError.EntityNotManagedByRunner({
398
+ address
399
+ }));
400
+ }
401
+ const state = entityManagers.get(address.entityType);
402
+ if (!state) {
403
+ return Effect.fail(new _ClusterError.EntityNotManagedByRunner({
404
+ address
405
+ }));
406
+ }
407
+ return message._tag === "IncomingRequest" || message._tag === "IncomingEnvelope" ? state.manager.send(message) : runners.sendLocal({
408
+ message,
409
+ send: state.manager.sendLocal,
410
+ simulateRemoteSerialization: config.simulateRemoteSerialization
411
+ });
412
+ });
413
+ const notifyLocal = (message, discard) => Effect.suspend(() => {
414
+ const address = message.envelope.address;
415
+ if (!isEntityOnLocalShards(address)) {
416
+ return Effect.fail(new _ClusterError.EntityNotManagedByRunner({
417
+ address
418
+ }));
419
+ }
420
+ const notify = storageEnabled ? openStorageReadLatch : () => Effect.dieMessage("Sharding.notifyLocal: storage is disabled");
421
+ return message._tag === "IncomingRequest" || message._tag === "IncomingEnvelope" ? notify() : runners.notifyLocal({
422
+ message,
423
+ notify,
424
+ discard
425
+ });
426
+ });
427
+ const isTransientError = Predicate.or(_ClusterError.RunnerUnavailable.is, _ClusterError.EntityNotManagedByRunner.is);
428
+ function sendOutgoing(message, discard, retries) {
429
+ return Effect.catchIf(Effect.suspend(() => {
430
+ const address = message.envelope.address;
431
+ const maybeRunner = MutableHashMap.get(shardAssignments, address.shardId);
432
+ const isPersisted = storageEnabled && Context.get(message.rpc.annotations, _ClusterSchema.Persisted);
433
+ const runnerIsLocal = Option.isSome(maybeRunner) && isLocalRunner(maybeRunner.value);
434
+ if (isPersisted) {
435
+ return runnerIsLocal ? notifyLocal(message, discard) : runners.notify({
436
+ address: maybeRunner,
437
+ message,
438
+ discard
439
+ });
440
+ } else if (Option.isNone(maybeRunner)) {
441
+ return Effect.fail(new _ClusterError.EntityNotManagedByRunner({
442
+ address
443
+ }));
444
+ }
445
+ return runnerIsLocal ? sendLocal(message) : runners.send({
446
+ address: maybeRunner.value,
447
+ message
448
+ });
449
+ }), isTransientError, error => {
450
+ if (retries === 0) {
451
+ return Effect.die(error);
452
+ }
453
+ return Effect.delay(sendOutgoing(message, discard, retries && retries - 1), config.sendRetryInterval);
454
+ });
455
+ }
456
+ // --- Shard Manager sync ---
457
+ const shardManagerTimeoutFiber = yield* FiberHandle.make().pipe(Scope.extend(shardingScope));
458
+ const startShardManagerTimeout = FiberHandle.run(shardManagerTimeoutFiber, Effect.flatMap(Effect.sleep(config.shardManagerUnavailableTimeout), () => {
459
+ MutableHashMap.clear(shardAssignments);
460
+ return clearSelfShards;
461
+ }), {
462
+ onlyIfMissing: true
463
+ });
464
+ const stopShardManagerTimeout = FiberHandle.clear(shardManagerTimeoutFiber);
465
+ // Every time the link to the shard manager is lost, we re-register the runner
466
+ // and re-subscribe to sharding events
467
+ yield* Effect.gen(function* () {
468
+ yield* Effect.logDebug("Registering with shard manager");
469
+ if (Option.isSome(config.runnerAddress)) {
470
+ const machineId = yield* shardManager.register(config.runnerAddress.value);
471
+ yield* snowflakeGen.setMachineId(machineId);
472
+ }
473
+ yield* stopShardManagerTimeout;
474
+ yield* Effect.logDebug("Subscribing to sharding events");
475
+ const mailbox = yield* shardManager.shardingEvents;
476
+ const startedLatch = yield* Effect.makeLatch(false);
477
+ const eventsFiber = yield* Effect.gen(function* () {
478
+ while (true) {
479
+ const [events] = yield* mailbox.takeAll;
480
+ for (const event of events) {
481
+ yield* Effect.logDebug("Received sharding event", event);
482
+ switch (event._tag) {
483
+ case "StreamStarted":
484
+ {
485
+ yield* startedLatch.open;
486
+ break;
487
+ }
488
+ case "ShardsAssigned":
489
+ {
490
+ for (const shard of event.shards) {
491
+ MutableHashMap.set(shardAssignments, shard, event.address);
492
+ }
493
+ if (!MutableRef.get(isShutdown) && isLocalRunner(event.address)) {
494
+ for (const shardId of event.shards) {
495
+ if (selfShards.has(shardId)) continue;
496
+ selfShards.add(shardId);
497
+ }
498
+ yield* activeShardsLatch.open;
499
+ }
500
+ break;
501
+ }
502
+ case "ShardsUnassigned":
503
+ {
504
+ for (const shard of event.shards) {
505
+ MutableHashMap.remove(shardAssignments, shard);
506
+ }
507
+ if (isLocalRunner(event.address)) {
508
+ for (const shard of event.shards) {
509
+ selfShards.delete(shard);
510
+ }
511
+ yield* activeShardsLatch.open;
512
+ }
513
+ break;
514
+ }
515
+ }
516
+ }
517
+ }
518
+ }).pipe(Effect.forkScoped);
519
+ // Wait for the stream to be established
520
+ yield* startedLatch.await;
521
+ // perform a full sync every config.refreshAssignmentsInterval
522
+ const syncFiber = yield* syncAssignments.pipe(Effect.andThen(Effect.sleep(config.refreshAssignmentsInterval)), Effect.forever, Effect.forkScoped);
523
+ yield* Fiber.joinAll([eventsFiber, syncFiber]);
524
+ }).pipe(Effect.scoped, Effect.catchAllCause(cause => Effect.logDebug(cause)), Effect.zipRight(startShardManagerTimeout), Effect.repeat(Schedule.exponential(1000).pipe(Schedule.union(Schedule.spaced(10_000)))), Effect.annotateLogs({
525
+ package: "@effect/cluster",
526
+ module: "Sharding",
527
+ fiber: "ShardManager sync",
528
+ runner: config.runnerAddress
529
+ }), Effect.interruptible, Effect.forkIn(shardingScope));
530
+ const syncAssignments = Effect.gen(function* () {
531
+ const assignments = yield* shardManager.getAssignments;
532
+ yield* Effect.logDebug("Received shard assignments", assignments);
533
+ for (const [shardId, runner] of assignments) {
534
+ if (Option.isNone(runner)) {
535
+ MutableHashMap.remove(shardAssignments, shardId);
536
+ selfShards.delete(shardId);
537
+ continue;
538
+ }
539
+ MutableHashMap.set(shardAssignments, shardId, runner.value);
540
+ if (!isLocalRunner(runner.value)) {
541
+ selfShards.delete(shardId);
542
+ continue;
543
+ }
544
+ if (MutableRef.get(isShutdown) || selfShards.has(shardId)) {
545
+ continue;
546
+ }
547
+ selfShards.add(shardId);
548
+ }
549
+ yield* activeShardsLatch.open;
550
+ });
551
+ const clientRequests = new Map();
552
+ const clients = yield* _resourceMap.ResourceMap.make(Effect.fnUntraced(function* (entity) {
553
+ const client = yield* RpcClient.makeNoSerialization(entity.protocol, {
554
+ supportsAck: true,
555
+ generateRequestId: () => (0, _RpcMessage.RequestId)(snowflakeGen.unsafeNext()),
556
+ onFromClient(options) {
557
+ const address = Context.unsafeGet(options.context, ClientAddressTag);
558
+ switch (options.message._tag) {
559
+ case "Request":
560
+ {
561
+ const fiber = Option.getOrThrow(Fiber.getCurrentFiber());
562
+ const id = Snowflake.Snowflake(options.message.id);
563
+ const rpc = entity.protocol.requests.get(options.message.tag);
564
+ let respond;
565
+ if (!options.discard) {
566
+ const entry = {
567
+ rpc: rpc,
568
+ context: fiber.currentContext
569
+ };
570
+ clientRequests.set(id, entry);
571
+ respond = makeClientRespond(entry, client.write);
572
+ } else {
573
+ respond = clientRespondDiscard;
574
+ }
575
+ return sendOutgoing(new Message.OutgoingRequest({
576
+ envelope: Envelope.makeRequest({
577
+ requestId: id,
578
+ address,
579
+ tag: options.message.tag,
580
+ payload: options.message.payload,
581
+ headers: options.message.headers,
582
+ traceId: options.message.traceId,
583
+ spanId: options.message.spanId,
584
+ sampled: options.message.sampled
585
+ }),
586
+ lastReceivedReply: Option.none(),
587
+ rpc,
588
+ context: fiber.currentContext,
589
+ respond
590
+ }), options.discard);
591
+ }
592
+ case "Ack":
593
+ {
594
+ const requestId = Snowflake.Snowflake(options.message.requestId);
595
+ const entry = clientRequests.get(requestId);
596
+ if (!entry) return Effect.void;
597
+ return sendOutgoing(new Message.OutgoingEnvelope({
598
+ envelope: new Envelope.AckChunk({
599
+ id: snowflakeGen.unsafeNext(),
600
+ address,
601
+ requestId,
602
+ replyId: entry.lastChunkId
603
+ }),
604
+ rpc: entry.rpc
605
+ }), false);
606
+ }
607
+ case "Interrupt":
608
+ {
609
+ const requestId = Snowflake.Snowflake(options.message.requestId);
610
+ const entry = clientRequests.get(requestId);
611
+ if (!entry) return Effect.void;
612
+ clientRequests.delete(requestId);
613
+ // for durable messages, we ignore interrupts on shutdown or as a
614
+ // result of a shard being resassigned
615
+ const isTransientInterrupt = MutableRef.get(isShutdown) || options.message.interruptors.some(id => _interruptors.internalInterruptors.has(id));
616
+ if (isTransientInterrupt && storageEnabled && Context.get(entry.rpc.annotations, _ClusterSchema.Persisted)) {
617
+ return Effect.void;
618
+ }
619
+ return Effect.ignore(sendOutgoing(new Message.OutgoingEnvelope({
620
+ envelope: new Envelope.Interrupt({
621
+ id: snowflakeGen.unsafeNext(),
622
+ address,
623
+ requestId
624
+ }),
625
+ rpc: entry.rpc
626
+ }), false, 3));
627
+ }
628
+ }
629
+ return Effect.void;
630
+ }
631
+ });
632
+ const wrappedClient = {};
633
+ for (const method of Object.keys(client.client)) {
634
+ wrappedClient[method] = function (payload, options) {
635
+ return client.client[method](payload, {
636
+ ...options,
637
+ context: options?.context ? Context.merge(options.context, this[currentClientAddress]) : this[currentClientAddress]
638
+ });
639
+ };
640
+ }
641
+ yield* Scope.addFinalizer(yield* Effect.scope, Effect.withFiberRuntime(fiber => {
642
+ _interruptors.internalInterruptors.add(fiber.id());
643
+ return Effect.void;
644
+ }));
645
+ return entityId => {
646
+ const id = _EntityId.EntityId.make(entityId);
647
+ return {
648
+ ...wrappedClient,
649
+ [currentClientAddress]: ClientAddressTag.context(_EntityAddress.EntityAddress.make({
650
+ shardId: getShardId(id),
651
+ entityId: id,
652
+ entityType: entity.type
653
+ }))
654
+ };
655
+ };
656
+ }));
657
+ const makeClient = entity => clients.get(entity);
658
+ const clientRespondDiscard = _reply => Effect.void;
659
+ const makeClientRespond = (entry, write) => reply => {
660
+ switch (reply._tag) {
661
+ case "Chunk":
662
+ {
663
+ entry.lastChunkId = reply.id;
664
+ return write({
665
+ _tag: "Chunk",
666
+ clientId: 0,
667
+ requestId: (0, _RpcMessage.RequestId)(reply.requestId),
668
+ values: reply.values
669
+ });
670
+ }
671
+ case "WithExit":
672
+ {
673
+ clientRequests.delete(reply.requestId);
674
+ return write({
675
+ _tag: "Exit",
676
+ clientId: 0,
677
+ requestId: (0, _RpcMessage.RequestId)(reply.requestId),
678
+ exit: reply.exit
679
+ });
680
+ }
681
+ }
682
+ };
683
+ // --- Entities ---
684
+ const context = yield* Effect.context();
685
+ const reaper = yield* _entityReaper.EntityReaper;
686
+ const registerEntity = Effect.fnUntraced(function* (entity, build, options) {
687
+ if (entityManagers.has(entity.type)) return;
688
+ const scope = yield* Scope.make();
689
+ const manager = yield* EntityManager.make(entity, build, {
690
+ ...options,
691
+ storage,
692
+ runnerAddress: Option.getOrThrow(config.runnerAddress),
693
+ sharding
694
+ }).pipe(Effect.provide(context.pipe(Context.add(_entityReaper.EntityReaper, reaper), Context.add(Scope.Scope, scope), Context.add(Snowflake.Generator, snowflakeGen))));
695
+ entityManagers.set(entity.type, {
696
+ entity,
697
+ scope,
698
+ manager
699
+ });
700
+ yield* Scope.addFinalizer(scope, Effect.sync(() => entityManagers.delete(entity.type)));
701
+ yield* PubSub.publish(events, (0, _ShardingRegistrationEvent.EntityRegistered)({
702
+ entity
703
+ }));
704
+ });
705
+ yield* Scope.addFinalizerExit(shardingScope, exit => Effect.forEach(entityManagers.values(), state => Effect.catchAllCause(Scope.close(state.scope, exit), cause => Effect.annotateLogs(Effect.logError("Error closing entity manager", cause), {
706
+ entity: state.entity.type
707
+ })), {
708
+ concurrency: "unbounded",
709
+ discard: true
710
+ }));
711
+ // --- Finalization ---
712
+ if (Option.isSome(config.runnerAddress)) {
713
+ const selfAddress = config.runnerAddress.value;
714
+ // Unregister runner from shard manager when scope is closed
715
+ yield* Scope.addFinalizer(shardingScope, Effect.gen(function* () {
716
+ yield* Effect.logDebug("Unregistering runner from shard manager", selfAddress);
717
+ yield* shardManager.unregister(selfAddress).pipe(Effect.catchAllCause(cause => Effect.logError("Error calling unregister with shard manager", cause)));
718
+ yield* clearSelfShards;
719
+ }));
720
+ }
721
+ yield* Scope.addFinalizer(shardingScope, Effect.withFiberRuntime(fiber => {
722
+ MutableRef.set(isShutdown, true);
723
+ _interruptors.internalInterruptors.add(fiber.id());
724
+ return Effect.void;
725
+ }));
726
+ const sharding = Sharding.of({
727
+ getRegistrationEvents,
728
+ getShardId,
729
+ isShutdown: Effect.sync(() => MutableRef.get(isShutdown)),
730
+ registerEntity,
731
+ registerSingleton,
732
+ makeClient,
733
+ send: sendLocal,
734
+ notify: message => notifyLocal(message, false)
735
+ });
736
+ return sharding;
737
+ });
20
738
  /**
21
739
  * @since 1.0.0
22
740
  * @category layers
23
741
  */
24
- const live = exports.live = internal.live;
25
- /**
26
- * Notify the shard manager that shards can now be assigned to this pod.
27
- *
28
- * @since 1.0.0
29
- * @category utils
30
- */
31
- const register = exports.register = internal.register;
32
- /**
33
- * Notify the shard manager that shards must be unassigned from this pod.
34
- *
35
- * @since 1.0.0
36
- * @category utils
37
- */
38
- const unregister = exports.unregister = internal.unregister;
39
- /**
40
- * Same as `register`, but will automatically call `unregister` when the `Scope` is terminated.
41
- *
42
- * @since 1.0.0
43
- * @category utils
44
- */
45
- const registerScoped = exports.registerScoped = internal.registerScoped;
46
- /**
47
- * Start a computation that is guaranteed to run only on a single pod.
48
- * Each pod should call `registerSingleton` but only a single pod will actually run it at any given time.
49
- *
50
- * @since 1.0.0
51
- * @category utils
52
- */
53
- const registerSingleton = exports.registerSingleton = internal.registerSingleton;
54
- /**
55
- * Register a new entity type, allowing pods to send messages to entities of this type.
56
- *
57
- * @since 1.0.0
58
- * @category utils
59
- */
60
- const registerEntity = exports.registerEntity = internal.registerEntity;
61
- /**
62
- * Register a new topic type, allowing pods to broadcast messages to subscribers.
63
- *
64
- * @since 1.0.0
65
- * @category utils
66
- */
67
- const registerTopic = exports.registerTopic = internal.registerTopic;
68
- /**
69
- * Get an object that allows sending messages to a given entity type.
70
- * You can provide a custom send timeout to override the one globally defined.
71
- *
72
- * @since 1.0.0
73
- * @category utils
74
- */
75
- const messenger = exports.messenger = internal.messenger;
76
- /**
77
- * Get an object that allows broadcasting messages to a given topic type.
78
- * You can provide a custom send timeout to override the one globally defined.
79
- *
80
- * @since 1.0.0
81
- * @category utils
82
- */
83
- const broadcaster = exports.broadcaster = internal.broadcaster;
84
- /**
85
- * Get the list of pods currently registered to the Shard Manager
86
- *
87
- * @since 1.0.0
88
- * @category utils
89
- */
90
- const getPods = exports.getPods = internal.getPods;
91
- /**
92
- * Sends a raw message to the local entity manager without performing reties.
93
- * Those are up to the caller.
94
- *
95
- * @since 1.0.0
96
- * @category utils
97
- */
98
- const sendMessageToLocalEntityManagerWithoutRetries = exports.sendMessageToLocalEntityManagerWithoutRetries = internal.sendMessageToLocalEntityManagerWithoutRetries;
99
- /**
100
- * Gets the list of shardIds assigned to the current Pod
101
- *
102
- * @since 1.0.0
103
- * @category utils
104
- */
105
- const getAssignedShardIds = exports.getAssignedShardIds = internal.getAssignedShardIds;
742
+ const layer = exports.layer = /*#__PURE__*/Layer.scoped(Sharding, make).pipe( /*#__PURE__*/Layer.provide([Snowflake.layerGenerator, _entityReaper.EntityReaper.Default]));
743
+ // Utilities
744
+ const ClientAddressTag = /*#__PURE__*/Context.GenericTag("@effect/cluster/Sharding/ClientAddress");
745
+ const currentClientAddress = /*#__PURE__*/Symbol.for(ClientAddressTag.key);
106
746
  //# sourceMappingURL=Sharding.js.map