@effect/cluster 0.28.4 → 0.29.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (626) hide show
  1. package/ClusterError/package.json +6 -0
  2. package/ClusterMetrics/package.json +6 -0
  3. package/ClusterSchema/package.json +6 -0
  4. package/DeliverAt/package.json +6 -0
  5. package/Entity/package.json +6 -0
  6. package/EntityAddress/package.json +6 -0
  7. package/EntityId/package.json +6 -0
  8. package/EntityType/package.json +6 -0
  9. package/Envelope/package.json +6 -0
  10. package/HttpCommon/package.json +6 -0
  11. package/HttpRunner/package.json +6 -0
  12. package/HttpShardManager/package.json +6 -0
  13. package/MachineId/package.json +6 -0
  14. package/MessageStorage/package.json +6 -0
  15. package/README.md +2 -2
  16. package/Reply/package.json +6 -0
  17. package/Runner/package.json +6 -0
  18. package/RunnerAddress/package.json +6 -0
  19. package/RunnerHealth/package.json +6 -0
  20. package/RunnerServer/package.json +6 -0
  21. package/Runners/package.json +6 -0
  22. package/ShardStorage/package.json +6 -0
  23. package/Singleton/package.json +6 -0
  24. package/SingletonAddress/package.json +6 -0
  25. package/Snowflake/package.json +6 -0
  26. package/SocketRunner/package.json +6 -0
  27. package/SocketShardManager/package.json +6 -0
  28. package/SqlMessageStorage/package.json +6 -0
  29. package/SqlShardStorage/package.json +6 -0
  30. package/SynchronizedClock/package.json +6 -0
  31. package/dist/cjs/ClusterError.js +180 -0
  32. package/dist/cjs/ClusterError.js.map +1 -0
  33. package/dist/cjs/ClusterMetrics.js +63 -0
  34. package/dist/cjs/ClusterMetrics.js.map +1 -0
  35. package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
  36. package/dist/cjs/ClusterSchema.js.map +1 -0
  37. package/dist/cjs/DeliverAt.js +30 -0
  38. package/dist/cjs/DeliverAt.js.map +1 -0
  39. package/dist/cjs/Entity.js +187 -0
  40. package/dist/cjs/Entity.js.map +1 -0
  41. package/dist/cjs/EntityAddress.js +54 -0
  42. package/dist/cjs/EntityAddress.js.map +1 -0
  43. package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
  44. package/dist/cjs/EntityId.js.map +1 -0
  45. package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
  46. package/dist/cjs/EntityType.js.map +1 -0
  47. package/dist/cjs/Envelope.js +168 -0
  48. package/dist/cjs/Envelope.js.map +1 -0
  49. package/dist/cjs/HttpCommon.js +49 -0
  50. package/dist/cjs/HttpCommon.js.map +1 -0
  51. package/dist/cjs/HttpRunner.js +108 -0
  52. package/dist/cjs/HttpRunner.js.map +1 -0
  53. package/dist/cjs/HttpShardManager.js +140 -0
  54. package/dist/cjs/HttpShardManager.js.map +1 -0
  55. package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
  56. package/dist/cjs/MachineId.js.map +1 -0
  57. package/dist/cjs/Message.js +99 -18
  58. package/dist/cjs/Message.js.map +1 -1
  59. package/dist/cjs/MessageStorage.js +356 -0
  60. package/dist/cjs/MessageStorage.js.map +1 -0
  61. package/dist/cjs/Reply.js +200 -0
  62. package/dist/cjs/Reply.js.map +1 -0
  63. package/dist/cjs/Runner.js +79 -0
  64. package/dist/cjs/Runner.js.map +1 -0
  65. package/dist/cjs/RunnerAddress.js +63 -0
  66. package/dist/cjs/RunnerAddress.js.map +1 -0
  67. package/dist/cjs/RunnerHealth.js +68 -0
  68. package/dist/cjs/RunnerHealth.js.map +1 -0
  69. package/dist/cjs/RunnerServer.js +125 -0
  70. package/dist/cjs/RunnerServer.js.map +1 -0
  71. package/dist/cjs/Runners.js +344 -0
  72. package/dist/cjs/Runners.js.map +1 -0
  73. package/dist/cjs/ShardId.js +7 -46
  74. package/dist/cjs/ShardId.js.map +1 -1
  75. package/dist/cjs/ShardManager.js +493 -8
  76. package/dist/cjs/ShardManager.js.map +1 -1
  77. package/dist/cjs/ShardStorage.js +139 -0
  78. package/dist/cjs/ShardStorage.js.map +1 -0
  79. package/dist/cjs/Sharding.js +731 -91
  80. package/dist/cjs/Sharding.js.map +1 -1
  81. package/dist/cjs/ShardingConfig.js +85 -18
  82. package/dist/cjs/ShardingConfig.js.map +1 -1
  83. package/dist/cjs/ShardingRegistrationEvent.js +26 -32
  84. package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
  85. package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
  86. package/dist/cjs/Singleton.js.map +1 -0
  87. package/dist/cjs/SingletonAddress.js +50 -0
  88. package/dist/cjs/SingletonAddress.js.map +1 -0
  89. package/dist/cjs/Snowflake.js +133 -0
  90. package/dist/cjs/Snowflake.js.map +1 -0
  91. package/dist/cjs/SocketRunner.js +40 -0
  92. package/dist/cjs/SocketRunner.js.map +1 -0
  93. package/dist/cjs/SocketShardManager.js +33 -0
  94. package/dist/cjs/SocketShardManager.js.map +1 -0
  95. package/dist/cjs/SqlMessageStorage.js +668 -0
  96. package/dist/cjs/SqlMessageStorage.js.map +1 -0
  97. package/dist/cjs/SqlShardStorage.js +228 -0
  98. package/dist/cjs/SqlShardStorage.js.map +1 -0
  99. package/dist/cjs/SynchronizedClock.js +66 -0
  100. package/dist/cjs/SynchronizedClock.js.map +1 -0
  101. package/dist/cjs/index.js +57 -45
  102. package/dist/cjs/internal/entityManager.js +311 -143
  103. package/dist/cjs/internal/entityManager.js.map +1 -1
  104. package/dist/cjs/internal/entityReaper.js +47 -0
  105. package/dist/cjs/internal/entityReaper.js.map +1 -0
  106. package/dist/cjs/internal/hash.js +20 -0
  107. package/dist/cjs/internal/hash.js.map +1 -0
  108. package/dist/cjs/internal/interruptors.js +9 -0
  109. package/dist/cjs/internal/interruptors.js.map +1 -0
  110. package/dist/cjs/internal/resourceMap.js +88 -0
  111. package/dist/cjs/internal/resourceMap.js.map +1 -0
  112. package/dist/cjs/internal/resourceRef.js +92 -0
  113. package/dist/cjs/internal/resourceRef.js.map +1 -0
  114. package/dist/cjs/internal/shardManager.js +219 -235
  115. package/dist/cjs/internal/shardManager.js.map +1 -1
  116. package/dist/dts/ClusterError.d.ts +169 -0
  117. package/dist/dts/ClusterError.d.ts.map +1 -0
  118. package/dist/dts/ClusterMetrics.d.ts +50 -0
  119. package/dist/dts/ClusterMetrics.d.ts.map +1 -0
  120. package/dist/dts/ClusterSchema.d.ts +13 -0
  121. package/dist/dts/ClusterSchema.d.ts.map +1 -0
  122. package/dist/dts/DeliverAt.d.ts +27 -0
  123. package/dist/dts/DeliverAt.d.ts.map +1 -0
  124. package/dist/dts/Entity.d.ts +180 -0
  125. package/dist/dts/Entity.d.ts.map +1 -0
  126. package/dist/dts/EntityAddress.d.ts +55 -0
  127. package/dist/dts/EntityAddress.d.ts.map +1 -0
  128. package/dist/dts/EntityId.d.ts +15 -0
  129. package/dist/dts/EntityId.d.ts.map +1 -0
  130. package/dist/dts/EntityType.d.ts +15 -0
  131. package/dist/dts/EntityType.d.ts.map +1 -0
  132. package/dist/dts/Envelope.d.ts +252 -0
  133. package/dist/dts/Envelope.d.ts.map +1 -0
  134. package/dist/dts/HttpCommon.d.ts +25 -0
  135. package/dist/dts/HttpCommon.d.ts.map +1 -0
  136. package/dist/dts/HttpRunner.d.ts +76 -0
  137. package/dist/dts/HttpRunner.d.ts.map +1 -0
  138. package/dist/dts/HttpShardManager.d.ts +119 -0
  139. package/dist/dts/HttpShardManager.d.ts.map +1 -0
  140. package/dist/dts/MachineId.d.ts +20 -0
  141. package/dist/dts/MachineId.d.ts.map +1 -0
  142. package/dist/dts/Message.d.ts +91 -74
  143. package/dist/dts/Message.d.ts.map +1 -1
  144. package/dist/dts/MessageStorage.d.ts +336 -0
  145. package/dist/dts/MessageStorage.d.ts.map +1 -0
  146. package/dist/dts/Reply.d.ts +171 -0
  147. package/dist/dts/Reply.d.ts.map +1 -0
  148. package/dist/dts/Runner.d.ts +81 -0
  149. package/dist/dts/Runner.d.ts.map +1 -0
  150. package/dist/dts/RunnerAddress.d.ts +56 -0
  151. package/dist/dts/RunnerAddress.d.ts.map +1 -0
  152. package/dist/dts/RunnerHealth.d.ts +54 -0
  153. package/dist/dts/RunnerHealth.d.ts.map +1 -0
  154. package/dist/dts/RunnerServer.d.ts +44 -0
  155. package/dist/dts/RunnerServer.d.ts.map +1 -0
  156. package/dist/dts/Runners.d.ts +161 -0
  157. package/dist/dts/Runners.d.ts.map +1 -0
  158. package/dist/dts/ShardId.d.ts +5 -55
  159. package/dist/dts/ShardId.d.ts.map +1 -1
  160. package/dist/dts/ShardManager.d.ts +435 -23
  161. package/dist/dts/ShardManager.d.ts.map +1 -1
  162. package/dist/dts/ShardStorage.d.ts +200 -0
  163. package/dist/dts/ShardStorage.d.ts.map +1 -0
  164. package/dist/dts/Sharding.d.ts +64 -133
  165. package/dist/dts/Sharding.d.ts.map +1 -1
  166. package/dist/dts/ShardingConfig.d.ts +147 -44
  167. package/dist/dts/ShardingConfig.d.ts.map +1 -1
  168. package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
  169. package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
  170. package/dist/dts/Singleton.d.ts +13 -0
  171. package/dist/dts/Singleton.d.ts.map +1 -0
  172. package/dist/dts/SingletonAddress.d.ts +49 -0
  173. package/dist/dts/SingletonAddress.d.ts.map +1 -0
  174. package/dist/dts/Snowflake.d.ts +121 -0
  175. package/dist/dts/Snowflake.d.ts.map +1 -0
  176. package/dist/dts/SocketRunner.d.ts +22 -0
  177. package/dist/dts/SocketRunner.d.ts.map +1 -0
  178. package/dist/dts/SocketShardManager.d.ts +17 -0
  179. package/dist/dts/SocketShardManager.d.ts.map +1 -0
  180. package/dist/dts/SqlMessageStorage.d.ts +43 -0
  181. package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
  182. package/dist/dts/SqlShardStorage.d.ts +38 -0
  183. package/dist/dts/SqlShardStorage.d.ts.map +1 -0
  184. package/dist/dts/SynchronizedClock.d.ts +19 -0
  185. package/dist/dts/SynchronizedClock.d.ts.map +1 -0
  186. package/dist/dts/index.d.ts +48 -24
  187. package/dist/dts/index.d.ts.map +1 -1
  188. package/dist/dts/internal/entityReaper.d.ts +2 -0
  189. package/dist/dts/internal/entityReaper.d.ts.map +1 -0
  190. package/dist/dts/internal/hash.d.ts +2 -0
  191. package/dist/dts/internal/hash.d.ts.map +1 -0
  192. package/dist/dts/internal/interruptors.d.ts +2 -0
  193. package/dist/dts/internal/interruptors.d.ts.map +1 -0
  194. package/dist/dts/internal/resourceMap.d.ts +22 -0
  195. package/dist/dts/internal/resourceMap.d.ts.map +1 -0
  196. package/dist/dts/internal/resourceRef.d.ts +25 -0
  197. package/dist/dts/internal/resourceRef.d.ts.map +1 -0
  198. package/dist/dts/internal/shardManager.d.ts +1 -11
  199. package/dist/dts/internal/shardManager.d.ts.map +1 -1
  200. package/dist/esm/ClusterError.js +164 -0
  201. package/dist/esm/ClusterError.js.map +1 -0
  202. package/dist/esm/ClusterMetrics.js +54 -0
  203. package/dist/esm/ClusterMetrics.js.map +1 -0
  204. package/dist/esm/ClusterSchema.js +13 -0
  205. package/dist/esm/ClusterSchema.js.map +1 -0
  206. package/dist/esm/DeliverAt.js +22 -0
  207. package/dist/esm/DeliverAt.js.map +1 -0
  208. package/dist/esm/Entity.js +173 -0
  209. package/dist/esm/Entity.js.map +1 -0
  210. package/dist/esm/EntityAddress.js +44 -0
  211. package/dist/esm/EntityAddress.js.map +1 -0
  212. package/dist/esm/EntityId.js +10 -0
  213. package/dist/esm/EntityId.js.map +1 -0
  214. package/dist/esm/EntityType.js +10 -0
  215. package/dist/esm/EntityType.js.map +1 -0
  216. package/dist/esm/Envelope.js +154 -0
  217. package/dist/esm/Envelope.js.map +1 -0
  218. package/dist/esm/HttpCommon.js +38 -0
  219. package/dist/esm/HttpCommon.js.map +1 -0
  220. package/dist/esm/HttpRunner.js +98 -0
  221. package/dist/esm/HttpRunner.js.map +1 -0
  222. package/dist/esm/HttpShardManager.js +128 -0
  223. package/dist/esm/HttpShardManager.js.map +1 -0
  224. package/dist/esm/MachineId.js +17 -0
  225. package/dist/esm/MachineId.js.map +1 -0
  226. package/dist/esm/Message.js +88 -17
  227. package/dist/esm/Message.js.map +1 -1
  228. package/dist/esm/MessageStorage.js +345 -0
  229. package/dist/esm/MessageStorage.js.map +1 -0
  230. package/dist/esm/Reply.js +184 -0
  231. package/dist/esm/Reply.js.map +1 -0
  232. package/dist/esm/Runner.js +68 -0
  233. package/dist/esm/Runner.js.map +1 -0
  234. package/dist/esm/RunnerAddress.js +52 -0
  235. package/dist/esm/RunnerAddress.js.map +1 -0
  236. package/dist/esm/RunnerHealth.js +58 -0
  237. package/dist/esm/RunnerHealth.js.map +1 -0
  238. package/dist/esm/RunnerServer.js +116 -0
  239. package/dist/esm/RunnerServer.js.map +1 -0
  240. package/dist/esm/Runners.js +332 -0
  241. package/dist/esm/Runners.js.map +1 -0
  242. package/dist/esm/ShardId.js +5 -42
  243. package/dist/esm/ShardId.js.map +1 -1
  244. package/dist/esm/ShardManager.js +486 -7
  245. package/dist/esm/ShardManager.js.map +1 -1
  246. package/dist/esm/ShardStorage.js +129 -0
  247. package/dist/esm/ShardStorage.js.map +1 -0
  248. package/dist/esm/Sharding.js +729 -90
  249. package/dist/esm/Sharding.js.map +1 -1
  250. package/dist/esm/ShardingConfig.js +80 -17
  251. package/dist/esm/ShardingConfig.js.map +1 -1
  252. package/dist/esm/ShardingRegistrationEvent.js +19 -29
  253. package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
  254. package/dist/esm/Singleton.js +15 -0
  255. package/dist/esm/Singleton.js.map +1 -0
  256. package/dist/esm/SingletonAddress.js +40 -0
  257. package/dist/esm/SingletonAddress.js.map +1 -0
  258. package/dist/esm/Snowflake.js +117 -0
  259. package/dist/esm/Snowflake.js.map +1 -0
  260. package/dist/esm/SocketRunner.js +31 -0
  261. package/dist/esm/SocketRunner.js.map +1 -0
  262. package/dist/esm/SocketShardManager.js +24 -0
  263. package/dist/esm/SocketShardManager.js.map +1 -0
  264. package/dist/esm/SqlMessageStorage.js +658 -0
  265. package/dist/esm/SqlMessageStorage.js.map +1 -0
  266. package/dist/esm/SqlShardStorage.js +218 -0
  267. package/dist/esm/SqlShardStorage.js.map +1 -0
  268. package/dist/esm/SynchronizedClock.js +57 -0
  269. package/dist/esm/SynchronizedClock.js.map +1 -0
  270. package/dist/esm/index.js +48 -24
  271. package/dist/esm/index.js.map +1 -1
  272. package/dist/esm/internal/entityManager.js +311 -142
  273. package/dist/esm/internal/entityManager.js.map +1 -1
  274. package/dist/esm/internal/entityReaper.js +38 -0
  275. package/dist/esm/internal/entityReaper.js.map +1 -0
  276. package/dist/esm/internal/hash.js +12 -0
  277. package/dist/esm/internal/hash.js.map +1 -0
  278. package/dist/esm/internal/interruptors.js +3 -0
  279. package/dist/esm/internal/interruptors.js.map +1 -0
  280. package/dist/esm/internal/resourceMap.js +79 -0
  281. package/dist/esm/internal/resourceMap.js.map +1 -0
  282. package/dist/esm/internal/resourceRef.js +83 -0
  283. package/dist/esm/internal/resourceRef.js.map +1 -0
  284. package/dist/esm/internal/shardManager.js +217 -233
  285. package/dist/esm/internal/shardManager.js.map +1 -1
  286. package/package.json +212 -154
  287. package/src/ClusterError.ts +193 -0
  288. package/src/ClusterMetrics.ts +62 -0
  289. package/src/ClusterSchema.ts +13 -0
  290. package/src/DeliverAt.ts +36 -0
  291. package/src/Entity.ts +438 -0
  292. package/src/EntityAddress.ts +55 -0
  293. package/src/EntityId.ts +16 -0
  294. package/src/EntityType.ts +16 -0
  295. package/src/Envelope.ts +352 -0
  296. package/src/HttpCommon.ts +73 -0
  297. package/src/HttpRunner.ts +196 -0
  298. package/src/HttpShardManager.ts +273 -0
  299. package/src/MachineId.ts +27 -0
  300. package/src/Message.ts +143 -92
  301. package/src/MessageStorage.ts +697 -0
  302. package/src/Reply.ts +295 -0
  303. package/src/Runner.ts +84 -0
  304. package/src/RunnerAddress.ts +61 -0
  305. package/src/RunnerHealth.ts +87 -0
  306. package/src/RunnerServer.ts +156 -0
  307. package/src/Runners.ts +533 -0
  308. package/src/ShardId.ts +10 -62
  309. package/src/ShardManager.ts +780 -29
  310. package/src/ShardStorage.ts +289 -0
  311. package/src/Sharding.ts +1059 -186
  312. package/src/ShardingConfig.ts +186 -45
  313. package/src/ShardingRegistrationEvent.ts +38 -39
  314. package/src/Singleton.ts +20 -0
  315. package/src/SingletonAddress.ts +47 -0
  316. package/src/Snowflake.ts +194 -0
  317. package/src/SocketRunner.ts +59 -0
  318. package/src/SocketShardManager.ts +48 -0
  319. package/src/SqlMessageStorage.ts +833 -0
  320. package/src/SqlShardStorage.ts +292 -0
  321. package/src/SynchronizedClock.ts +82 -0
  322. package/src/index.ts +54 -24
  323. package/src/internal/entityManager.ts +464 -361
  324. package/src/internal/entityReaper.ts +53 -0
  325. package/src/internal/hash.ts +11 -0
  326. package/src/internal/interruptors.ts +4 -0
  327. package/src/internal/resourceMap.ts +89 -0
  328. package/src/internal/resourceRef.ts +88 -0
  329. package/src/internal/shardManager.ts +273 -546
  330. package/AtLeastOnce/package.json +0 -6
  331. package/AtLeastOnceStorage/package.json +0 -6
  332. package/Broadcaster/package.json +0 -6
  333. package/ManagerConfig/package.json +0 -6
  334. package/MessageState/package.json +0 -6
  335. package/Messenger/package.json +0 -6
  336. package/Pod/package.json +0 -6
  337. package/PodAddress/package.json +0 -6
  338. package/Pods/package.json +0 -6
  339. package/PodsHealth/package.json +0 -6
  340. package/PoisonPill/package.json +0 -6
  341. package/RecipientAddress/package.json +0 -6
  342. package/RecipientBehaviour/package.json +0 -6
  343. package/RecipientBehaviourContext/package.json +0 -6
  344. package/RecipientType/package.json +0 -6
  345. package/Serialization/package.json +0 -6
  346. package/SerializedEnvelope/package.json +0 -6
  347. package/SerializedMessage/package.json +0 -6
  348. package/ShardManagerClient/package.json +0 -6
  349. package/ShardingEvent/package.json +0 -6
  350. package/ShardingException/package.json +0 -6
  351. package/Storage/package.json +0 -6
  352. package/dist/cjs/AtLeastOnce.js.map +0 -1
  353. package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
  354. package/dist/cjs/Broadcaster.js +0 -6
  355. package/dist/cjs/Broadcaster.js.map +0 -1
  356. package/dist/cjs/ManagerConfig.js.map +0 -1
  357. package/dist/cjs/MessageState.js +0 -55
  358. package/dist/cjs/MessageState.js.map +0 -1
  359. package/dist/cjs/Messenger.js +0 -6
  360. package/dist/cjs/Messenger.js.map +0 -1
  361. package/dist/cjs/Pod.js +0 -78
  362. package/dist/cjs/Pod.js.map +0 -1
  363. package/dist/cjs/PodAddress.js +0 -77
  364. package/dist/cjs/PodAddress.js.map +0 -1
  365. package/dist/cjs/Pods.js.map +0 -1
  366. package/dist/cjs/PodsHealth.js +0 -41
  367. package/dist/cjs/PodsHealth.js.map +0 -1
  368. package/dist/cjs/PoisonPill.js +0 -78
  369. package/dist/cjs/PoisonPill.js.map +0 -1
  370. package/dist/cjs/RecipientAddress.js +0 -79
  371. package/dist/cjs/RecipientAddress.js.map +0 -1
  372. package/dist/cjs/RecipientBehaviour.js +0 -38
  373. package/dist/cjs/RecipientBehaviour.js.map +0 -1
  374. package/dist/cjs/RecipientBehaviourContext.js +0 -64
  375. package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
  376. package/dist/cjs/RecipientType.js +0 -123
  377. package/dist/cjs/RecipientType.js.map +0 -1
  378. package/dist/cjs/Serialization.js +0 -32
  379. package/dist/cjs/Serialization.js.map +0 -1
  380. package/dist/cjs/SerializedEnvelope.js +0 -87
  381. package/dist/cjs/SerializedEnvelope.js.map +0 -1
  382. package/dist/cjs/SerializedMessage.js +0 -64
  383. package/dist/cjs/SerializedMessage.js.map +0 -1
  384. package/dist/cjs/ShardManagerClient.js.map +0 -1
  385. package/dist/cjs/ShardingEvent.js +0 -72
  386. package/dist/cjs/ShardingEvent.js.map +0 -1
  387. package/dist/cjs/ShardingException.js +0 -107
  388. package/dist/cjs/ShardingException.js.map +0 -1
  389. package/dist/cjs/Storage.js +0 -40
  390. package/dist/cjs/Storage.js.map +0 -1
  391. package/dist/cjs/internal/atLeastOnce.js +0 -35
  392. package/dist/cjs/internal/atLeastOnce.js.map +0 -1
  393. package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
  394. package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
  395. package/dist/cjs/internal/entityState.js +0 -47
  396. package/dist/cjs/internal/entityState.js.map +0 -1
  397. package/dist/cjs/internal/managerConfig.js +0 -46
  398. package/dist/cjs/internal/managerConfig.js.map +0 -1
  399. package/dist/cjs/internal/message.js +0 -48
  400. package/dist/cjs/internal/message.js.map +0 -1
  401. package/dist/cjs/internal/messageState.js +0 -79
  402. package/dist/cjs/internal/messageState.js.map +0 -1
  403. package/dist/cjs/internal/podWithMetadata.js +0 -54
  404. package/dist/cjs/internal/podWithMetadata.js.map +0 -1
  405. package/dist/cjs/internal/pods.js +0 -35
  406. package/dist/cjs/internal/pods.js.map +0 -1
  407. package/dist/cjs/internal/podsHealth.js +0 -40
  408. package/dist/cjs/internal/podsHealth.js.map +0 -1
  409. package/dist/cjs/internal/recipientBehaviour.js +0 -52
  410. package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
  411. package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
  412. package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
  413. package/dist/cjs/internal/serialization.js +0 -48
  414. package/dist/cjs/internal/serialization.js.map +0 -1
  415. package/dist/cjs/internal/shardManagerClient.js +0 -48
  416. package/dist/cjs/internal/shardManagerClient.js.map +0 -1
  417. package/dist/cjs/internal/shardManagerState.js +0 -44
  418. package/dist/cjs/internal/shardManagerState.js.map +0 -1
  419. package/dist/cjs/internal/sharding.js +0 -306
  420. package/dist/cjs/internal/sharding.js.map +0 -1
  421. package/dist/cjs/internal/shardingConfig.js +0 -56
  422. package/dist/cjs/internal/shardingConfig.js.map +0 -1
  423. package/dist/cjs/internal/storage.js +0 -52
  424. package/dist/cjs/internal/storage.js.map +0 -1
  425. package/dist/cjs/internal/utils.js +0 -69
  426. package/dist/cjs/internal/utils.js.map +0 -1
  427. package/dist/dts/AtLeastOnce.d.ts +0 -20
  428. package/dist/dts/AtLeastOnce.d.ts.map +0 -1
  429. package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
  430. package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
  431. package/dist/dts/Broadcaster.d.ts +0 -32
  432. package/dist/dts/Broadcaster.d.ts.map +0 -1
  433. package/dist/dts/ManagerConfig.d.ts +0 -61
  434. package/dist/dts/ManagerConfig.d.ts.map +0 -1
  435. package/dist/dts/MessageState.d.ts +0 -107
  436. package/dist/dts/MessageState.d.ts.map +0 -1
  437. package/dist/dts/Messenger.d.ts +0 -32
  438. package/dist/dts/Messenger.d.ts.map +0 -1
  439. package/dist/dts/Pod.d.ts +0 -81
  440. package/dist/dts/Pod.d.ts.map +0 -1
  441. package/dist/dts/PodAddress.d.ts +0 -80
  442. package/dist/dts/PodAddress.d.ts.map +0 -1
  443. package/dist/dts/Pods.d.ts +0 -78
  444. package/dist/dts/Pods.d.ts.map +0 -1
  445. package/dist/dts/PodsHealth.d.ts +0 -66
  446. package/dist/dts/PodsHealth.d.ts.map +0 -1
  447. package/dist/dts/PoisonPill.d.ts +0 -78
  448. package/dist/dts/PoisonPill.d.ts.map +0 -1
  449. package/dist/dts/RecipientAddress.d.ts +0 -57
  450. package/dist/dts/RecipientAddress.d.ts.map +0 -1
  451. package/dist/dts/RecipientBehaviour.d.ts +0 -72
  452. package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
  453. package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
  454. package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
  455. package/dist/dts/RecipientType.d.ts +0 -93
  456. package/dist/dts/RecipientType.d.ts.map +0 -1
  457. package/dist/dts/Serialization.d.ts +0 -58
  458. package/dist/dts/Serialization.d.ts.map +0 -1
  459. package/dist/dts/SerializedEnvelope.d.ts +0 -86
  460. package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
  461. package/dist/dts/SerializedMessage.d.ts +0 -66
  462. package/dist/dts/SerializedMessage.d.ts.map +0 -1
  463. package/dist/dts/ShardManagerClient.d.ts +0 -50
  464. package/dist/dts/ShardManagerClient.d.ts.map +0 -1
  465. package/dist/dts/ShardingEvent.d.ts +0 -90
  466. package/dist/dts/ShardingEvent.d.ts.map +0 -1
  467. package/dist/dts/ShardingException.d.ts +0 -125
  468. package/dist/dts/ShardingException.d.ts.map +0 -1
  469. package/dist/dts/Storage.d.ts +0 -78
  470. package/dist/dts/Storage.d.ts.map +0 -1
  471. package/dist/dts/internal/atLeastOnce.d.ts +0 -2
  472. package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
  473. package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
  474. package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
  475. package/dist/dts/internal/entityState.d.ts +0 -21
  476. package/dist/dts/internal/entityState.d.ts.map +0 -1
  477. package/dist/dts/internal/managerConfig.d.ts +0 -2
  478. package/dist/dts/internal/managerConfig.d.ts.map +0 -1
  479. package/dist/dts/internal/message.d.ts +0 -9
  480. package/dist/dts/internal/message.d.ts.map +0 -1
  481. package/dist/dts/internal/messageState.d.ts +0 -2
  482. package/dist/dts/internal/messageState.d.ts.map +0 -1
  483. package/dist/dts/internal/podWithMetadata.d.ts +0 -2
  484. package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
  485. package/dist/dts/internal/pods.d.ts +0 -2
  486. package/dist/dts/internal/pods.d.ts.map +0 -1
  487. package/dist/dts/internal/podsHealth.d.ts +0 -2
  488. package/dist/dts/internal/podsHealth.d.ts.map +0 -1
  489. package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
  490. package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
  491. package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
  492. package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
  493. package/dist/dts/internal/serialization.d.ts +0 -2
  494. package/dist/dts/internal/serialization.d.ts.map +0 -1
  495. package/dist/dts/internal/shardManagerClient.d.ts +0 -2
  496. package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
  497. package/dist/dts/internal/shardManagerState.d.ts +0 -26
  498. package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
  499. package/dist/dts/internal/sharding.d.ts +0 -2
  500. package/dist/dts/internal/sharding.d.ts.map +0 -1
  501. package/dist/dts/internal/shardingConfig.d.ts +0 -2
  502. package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
  503. package/dist/dts/internal/storage.d.ts +0 -2
  504. package/dist/dts/internal/storage.d.ts.map +0 -1
  505. package/dist/dts/internal/utils.d.ts +0 -2
  506. package/dist/dts/internal/utils.d.ts.map +0 -1
  507. package/dist/esm/AtLeastOnce.js +0 -12
  508. package/dist/esm/AtLeastOnce.js.map +0 -1
  509. package/dist/esm/AtLeastOnceStorage.js +0 -17
  510. package/dist/esm/AtLeastOnceStorage.js.map +0 -1
  511. package/dist/esm/Broadcaster.js +0 -2
  512. package/dist/esm/Broadcaster.js.map +0 -1
  513. package/dist/esm/ManagerConfig.js +0 -26
  514. package/dist/esm/ManagerConfig.js.map +0 -1
  515. package/dist/esm/MessageState.js +0 -47
  516. package/dist/esm/MessageState.js.map +0 -1
  517. package/dist/esm/Messenger.js +0 -2
  518. package/dist/esm/Messenger.js.map +0 -1
  519. package/dist/esm/Pod.js +0 -65
  520. package/dist/esm/Pod.js.map +0 -1
  521. package/dist/esm/PodAddress.js +0 -64
  522. package/dist/esm/PodAddress.js.map +0 -1
  523. package/dist/esm/Pods.js +0 -27
  524. package/dist/esm/Pods.js.map +0 -1
  525. package/dist/esm/PodsHealth.js +0 -33
  526. package/dist/esm/PodsHealth.js.map +0 -1
  527. package/dist/esm/PoisonPill.js +0 -65
  528. package/dist/esm/PoisonPill.js.map +0 -1
  529. package/dist/esm/RecipientAddress.js +0 -67
  530. package/dist/esm/RecipientAddress.js.map +0 -1
  531. package/dist/esm/RecipientBehaviour.js +0 -30
  532. package/dist/esm/RecipientBehaviour.js.map +0 -1
  533. package/dist/esm/RecipientBehaviourContext.js +0 -56
  534. package/dist/esm/RecipientBehaviourContext.js.map +0 -1
  535. package/dist/esm/RecipientType.js +0 -108
  536. package/dist/esm/RecipientType.js.map +0 -1
  537. package/dist/esm/Serialization.js +0 -24
  538. package/dist/esm/Serialization.js.map +0 -1
  539. package/dist/esm/SerializedEnvelope.js +0 -74
  540. package/dist/esm/SerializedEnvelope.js.map +0 -1
  541. package/dist/esm/SerializedMessage.js +0 -51
  542. package/dist/esm/SerializedMessage.js.map +0 -1
  543. package/dist/esm/ShardManagerClient.js +0 -22
  544. package/dist/esm/ShardManagerClient.js.map +0 -1
  545. package/dist/esm/ShardingEvent.js +0 -62
  546. package/dist/esm/ShardingEvent.js.map +0 -1
  547. package/dist/esm/ShardingException.js +0 -91
  548. package/dist/esm/ShardingException.js.map +0 -1
  549. package/dist/esm/Storage.js +0 -32
  550. package/dist/esm/Storage.js.map +0 -1
  551. package/dist/esm/internal/atLeastOnce.js +0 -26
  552. package/dist/esm/internal/atLeastOnce.js.map +0 -1
  553. package/dist/esm/internal/atLeastOnceStorage.js +0 -154
  554. package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
  555. package/dist/esm/internal/entityState.js +0 -35
  556. package/dist/esm/internal/entityState.js.map +0 -1
  557. package/dist/esm/internal/managerConfig.js +0 -38
  558. package/dist/esm/internal/managerConfig.js.map +0 -1
  559. package/dist/esm/internal/message.js +0 -35
  560. package/dist/esm/internal/message.js.map +0 -1
  561. package/dist/esm/internal/messageState.js +0 -66
  562. package/dist/esm/internal/messageState.js.map +0 -1
  563. package/dist/esm/internal/podWithMetadata.js +0 -41
  564. package/dist/esm/internal/podWithMetadata.js.map +0 -1
  565. package/dist/esm/internal/pods.js +0 -25
  566. package/dist/esm/internal/pods.js.map +0 -1
  567. package/dist/esm/internal/podsHealth.js +0 -30
  568. package/dist/esm/internal/podsHealth.js.map +0 -1
  569. package/dist/esm/internal/recipientBehaviour.js +0 -42
  570. package/dist/esm/internal/recipientBehaviour.js.map +0 -1
  571. package/dist/esm/internal/recipientBehaviourContext.js +0 -26
  572. package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
  573. package/dist/esm/internal/serialization.js +0 -38
  574. package/dist/esm/internal/serialization.js.map +0 -1
  575. package/dist/esm/internal/shardManagerClient.js +0 -38
  576. package/dist/esm/internal/shardManagerClient.js.map +0 -1
  577. package/dist/esm/internal/shardManagerState.js +0 -36
  578. package/dist/esm/internal/shardManagerState.js.map +0 -1
  579. package/dist/esm/internal/sharding.js +0 -288
  580. package/dist/esm/internal/sharding.js.map +0 -1
  581. package/dist/esm/internal/shardingConfig.js +0 -47
  582. package/dist/esm/internal/shardingConfig.js.map +0 -1
  583. package/dist/esm/internal/storage.js +0 -42
  584. package/dist/esm/internal/storage.js.map +0 -1
  585. package/dist/esm/internal/utils.js +0 -56
  586. package/dist/esm/internal/utils.js.map +0 -1
  587. package/src/AtLeastOnce.ts +0 -28
  588. package/src/AtLeastOnceStorage.ts +0 -96
  589. package/src/Broadcaster.ts +0 -48
  590. package/src/ManagerConfig.ts +0 -67
  591. package/src/MessageState.ts +0 -126
  592. package/src/Messenger.ts +0 -40
  593. package/src/Pod.ts +0 -95
  594. package/src/PodAddress.ts +0 -94
  595. package/src/Pods.ts +0 -100
  596. package/src/PodsHealth.ts +0 -74
  597. package/src/PoisonPill.ts +0 -105
  598. package/src/RecipientAddress.ts +0 -72
  599. package/src/RecipientBehaviour.ts +0 -108
  600. package/src/RecipientBehaviourContext.ts +0 -101
  601. package/src/RecipientType.ts +0 -134
  602. package/src/Serialization.ts +0 -72
  603. package/src/SerializedEnvelope.ts +0 -108
  604. package/src/SerializedMessage.ts +0 -82
  605. package/src/ShardManagerClient.ts +0 -57
  606. package/src/ShardingEvent.ts +0 -121
  607. package/src/ShardingException.ts +0 -151
  608. package/src/Storage.ts +0 -92
  609. package/src/internal/atLeastOnce.ts +0 -59
  610. package/src/internal/atLeastOnceStorage.ts +0 -218
  611. package/src/internal/entityState.ts +0 -64
  612. package/src/internal/managerConfig.ts +0 -84
  613. package/src/internal/message.ts +0 -63
  614. package/src/internal/messageState.ts +0 -98
  615. package/src/internal/podWithMetadata.ts +0 -72
  616. package/src/internal/pods.ts +0 -29
  617. package/src/internal/podsHealth.ts +0 -39
  618. package/src/internal/recipientBehaviour.ts +0 -133
  619. package/src/internal/recipientBehaviourContext.ts +0 -70
  620. package/src/internal/serialization.ts +0 -63
  621. package/src/internal/shardManagerClient.ts +0 -49
  622. package/src/internal/shardManagerState.ts +0 -80
  623. package/src/internal/sharding.ts +0 -789
  624. package/src/internal/shardingConfig.ts +0 -97
  625. package/src/internal/storage.ts +0 -60
  626. package/src/internal/utils.ts +0 -54
package/src/Sharding.ts CHANGED
@@ -1,214 +1,1087 @@
1
1
  /**
2
2
  * @since 1.0.0
3
3
  */
4
- import type * as Effect from "effect/Effect"
5
- import type * as HashSet from "effect/HashSet"
6
- import type * as Scope from "effect/Scope"
7
- import type * as Stream from "effect/Stream"
8
- import type { Broadcaster } from "./Broadcaster.js"
9
- import * as internal from "./internal/sharding.js"
10
- import type * as Message from "./Message.js"
11
- import type * as MessageState from "./MessageState.js"
12
- import type { Messenger } from "./Messenger.js"
13
- import type * as PodAddress from "./PodAddress.js"
14
- import type * as RecipientAddress from "./RecipientAddress.js"
15
- import type * as RecipientBehaviour from "./RecipientBehaviour.js"
16
- import type * as RecipientBehaviourContext from "./RecipientBehaviourContext.js"
17
- import type * as RecipentType from "./RecipientType.js"
18
- import type * as SerializedEnvelope from "./SerializedEnvelope.js"
19
- import type * as SerializedMessage from "./SerializedMessage.js"
20
- import type * as ShardId from "./ShardId.js"
21
- import type * as ShardingException from "./ShardingException.js"
22
- import type * as ShardingRegistrationEvent from "./ShardingRegistrationEvent.js"
4
+ import type * as Rpc from "@effect/rpc/Rpc"
5
+ import * as RpcClient from "@effect/rpc/RpcClient"
6
+ import { type FromServer, RequestId } from "@effect/rpc/RpcMessage"
7
+ import * as Arr from "effect/Array"
8
+ import * as Cause from "effect/Cause"
9
+ import * as Context from "effect/Context"
10
+ import type { DurationInput } from "effect/Duration"
11
+ import * as Effect from "effect/Effect"
12
+ import * as Equal from "effect/Equal"
13
+ import * as Fiber from "effect/Fiber"
14
+ import * as FiberHandle from "effect/FiberHandle"
15
+ import * as FiberMap from "effect/FiberMap"
16
+ import * as FiberRef from "effect/FiberRef"
17
+ import { constant } from "effect/Function"
18
+ import * as HashMap from "effect/HashMap"
19
+ import * as Iterable from "effect/Iterable"
20
+ import * as Layer from "effect/Layer"
21
+ import * as MutableHashMap from "effect/MutableHashMap"
22
+ import * as MutableRef from "effect/MutableRef"
23
+ import * as Option from "effect/Option"
24
+ import * as Predicate from "effect/Predicate"
25
+ import * as PubSub from "effect/PubSub"
26
+ import * as Schedule from "effect/Schedule"
27
+ import * as Scope from "effect/Scope"
28
+ import * as Stream from "effect/Stream"
29
+ import type { AlreadyProcessingMessage, MailboxFull, PersistenceError } from "./ClusterError.js"
30
+ import { EntityNotManagedByRunner, RunnerUnavailable } from "./ClusterError.js"
31
+ import { Persisted } from "./ClusterSchema.js"
32
+ import type { CurrentAddress, Entity, HandlersFrom } from "./Entity.js"
33
+ import { EntityAddress } from "./EntityAddress.js"
34
+ import { EntityId } from "./EntityId.js"
35
+ import type { EntityType } from "./EntityType.js"
36
+ import * as Envelope from "./Envelope.js"
37
+ import * as EntityManager from "./internal/entityManager.js"
38
+ import { EntityReaper } from "./internal/entityReaper.js"
39
+ import { hashString } from "./internal/hash.js"
40
+ import { internalInterruptors } from "./internal/interruptors.js"
41
+ import { ResourceMap } from "./internal/resourceMap.js"
42
+ import * as Message from "./Message.js"
43
+ import * as MessageStorage from "./MessageStorage.js"
44
+ import * as Reply from "./Reply.js"
45
+ import type { RunnerAddress } from "./RunnerAddress.js"
46
+ import { Runners } from "./Runners.js"
47
+ import { ShardId } from "./ShardId.js"
48
+ import { ShardingConfig } from "./ShardingConfig.js"
49
+ import { EntityRegistered, type ShardingRegistrationEvent, SingletonRegistered } from "./ShardingRegistrationEvent.js"
50
+ import { ShardManagerClient } from "./ShardManager.js"
51
+ import { ShardStorage } from "./ShardStorage.js"
52
+ import { SingletonAddress } from "./SingletonAddress.js"
53
+ import * as Snowflake from "./Snowflake.js"
23
54
 
24
55
  /**
25
56
  * @since 1.0.0
26
- * @category symbols
57
+ * @category models
27
58
  */
28
- export const ShardingTypeId: unique symbol = internal.ShardingTypeId
59
+ export class Sharding extends Context.Tag("@effect/cluster/Sharding")<Sharding, {
60
+ /**
61
+ * Returns a stream of events that occur when the runner registers entities or
62
+ * singletons.
63
+ */
64
+ readonly getRegistrationEvents: Stream.Stream<ShardingRegistrationEvent>
29
65
 
30
- /**
31
- * @since 1.0.0
32
- * @category symbols
33
- */
34
- export type ShardingTypeId = typeof ShardingTypeId
66
+ /**
67
+ * Returns the `ShardId` of the shard to which the entity at the specified
68
+ * `address` is assigned.
69
+ */
70
+ readonly getShardId: (entityId: EntityId) => ShardId
35
71
 
36
- /**
37
- * @since 1.0.0
38
- * @category models
39
- */
40
- export interface Sharding {
41
- readonly [ShardingTypeId]: ShardingTypeId
42
- readonly register: Effect.Effect<void>
43
- readonly unregister: Effect.Effect<void>
44
- readonly messenger: <Msg extends Message.Message.Any>(
45
- entityType: RecipentType.EntityType<Msg>
46
- ) => Messenger<Msg>
47
- readonly broadcaster: <Msg extends Message.Message.Any>(
48
- topicType: RecipentType.TopicType<Msg>
49
- ) => Broadcaster<Msg>
50
- readonly isEntityOnLocalShards: (
51
- recipientAddress: RecipientAddress.RecipientAddress
52
- ) => Effect.Effect<boolean>
53
- readonly isShuttingDown: Effect.Effect<boolean>
54
-
55
- readonly registerScoped: Effect.Effect<void, never, Scope.Scope>
56
- readonly registerEntity: <Msg extends Message.Message.Any>(
57
- entityType: RecipentType.EntityType<Msg>
58
- ) => <R>(
59
- behaviour: RecipientBehaviour.RecipientBehaviour<Msg, R>,
60
- options?: RecipientBehaviour.EntityBehaviourOptions
61
- ) => Effect.Effect<void, never, Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>>
62
- readonly registerTopic: <Msg extends Message.Message.Any>(
63
- topicType: RecipentType.TopicType<Msg>
64
- ) => <R>(
65
- behaviour: RecipientBehaviour.RecipientBehaviour<Msg, R>,
66
- options?: RecipientBehaviour.EntityBehaviourOptions
67
- ) => Effect.Effect<void, never, Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>>
68
- readonly getShardingRegistrationEvents: Stream.Stream<ShardingRegistrationEvent.ShardingRegistrationEvent>
69
- readonly registerSingleton: <R>(name: string, run: Effect.Effect<void, never, R>) => Effect.Effect<void, never, R>
70
- readonly assign: (shards: HashSet.HashSet<ShardId.ShardId>) => Effect.Effect<void>
71
- readonly unassign: (shards: HashSet.HashSet<ShardId.ShardId>) => Effect.Effect<void>
72
- readonly sendMessageToLocalEntityManagerWithoutRetries: (
73
- message: SerializedEnvelope.SerializedEnvelope
72
+ /**
73
+ * Returns `true` if sharding is shutting down, `false` otherwise.
74
+ */
75
+ readonly isShutdown: Effect.Effect<boolean>
76
+
77
+ /**
78
+ * Constructs a `RpcClient` which can be used to send messages to the
79
+ * specified `Entity`.
80
+ */
81
+ readonly makeClient: <Rpcs extends Rpc.Any>(
82
+ entity: Entity<Rpcs>
74
83
  ) => Effect.Effect<
75
- MessageState.MessageState<SerializedMessage.SerializedMessage>,
76
- ShardingException.ShardingException
84
+ (entityId: string) => RpcClient.RpcClient<Rpcs, MailboxFull | AlreadyProcessingMessage | PersistenceError>
77
85
  >
78
- readonly getPods: Effect.Effect<HashSet.HashSet<PodAddress.PodAddress>>
79
- readonly getAssignedShardIds: Effect.Effect<HashSet.HashSet<ShardId.ShardId>>
80
- /** @internal */
81
- readonly refreshAssignments: Effect.Effect<void, never, Scope.Scope>
82
- /** @internal */
83
- readonly getShardId: (recipientAddress: RecipientAddress.RecipientAddress) => ShardId.ShardId
86
+
87
+ /**
88
+ * Registers a new entity with the runner.
89
+ */
90
+ readonly registerEntity: <Rpcs extends Rpc.Any, Handlers extends HandlersFrom<Rpcs>, RX>(
91
+ entity: Entity<Rpcs>,
92
+ handlers: Effect.Effect<Handlers, never, RX>,
93
+ options?: {
94
+ readonly maxIdleTime?: DurationInput | undefined
95
+ readonly concurrency?: number | "unbounded" | undefined
96
+ readonly mailboxCapacity?: number | "unbounded" | undefined
97
+ }
98
+ ) => Effect.Effect<void, never, Rpc.Context<Rpcs> | Rpc.Middleware<Rpcs> | Exclude<RX, Scope.Scope | CurrentAddress>>
99
+
100
+ /**
101
+ * Registers a new singleton with the runner.
102
+ */
103
+ readonly registerSingleton: <E, R>(
104
+ name: string,
105
+ run: Effect.Effect<void, E, R>
106
+ ) => Effect.Effect<void, never, Exclude<R, Scope.Scope>>
107
+
108
+ /**
109
+ * Sends a message to the specified entity.
110
+ */
111
+ readonly send: (message: Message.Incoming<any>) => Effect.Effect<
112
+ void,
113
+ EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage
114
+ >
115
+
116
+ /**
117
+ * Notify sharding that a message has been persisted to storage.
118
+ */
119
+ readonly notify: (message: Message.Incoming<any>) => Effect.Effect<
120
+ void,
121
+ EntityNotManagedByRunner
122
+ >
123
+ }>() {}
124
+
125
+ // -----------------------------------------------------------------------------
126
+ // Implementation
127
+ // -----------------------------------------------------------------------------
128
+
129
+ interface EntityManagerState {
130
+ readonly entity: Entity<any>
131
+ readonly scope: Scope.CloseableScope
132
+ readonly manager: EntityManager.EntityManager
84
133
  }
85
134
 
86
- /**
87
- * @since 1.0.0
88
- * @category context
89
- */
90
- export const Tag = internal.shardingTag
135
+ const make = Effect.gen(function*() {
136
+ const config = yield* ShardingConfig
91
137
 
92
- /**
93
- * @since 1.0.0
94
- * @category layers
95
- */
96
- export const live = internal.live
138
+ const runners = yield* Runners
139
+ const shardManager = yield* ShardManagerClient
140
+ const snowflakeGen = yield* Snowflake.Generator
141
+ const shardingScope = yield* Effect.scope
142
+ const isShutdown = MutableRef.make(false)
97
143
 
98
- /**
99
- * Notify the shard manager that shards can now be assigned to this pod.
100
- *
101
- * @since 1.0.0
102
- * @category utils
103
- */
104
- export const register: Effect.Effect<void, never, Sharding> = internal.register
144
+ const storage = yield* MessageStorage.MessageStorage
145
+ const storageEnabled = storage !== MessageStorage.noop
146
+ const shardStorage = yield* ShardStorage
105
147
 
106
- /**
107
- * Notify the shard manager that shards must be unassigned from this pod.
108
- *
109
- * @since 1.0.0
110
- * @category utils
111
- */
112
- export const unregister: Effect.Effect<void, never, Sharding> = internal.unregister
148
+ const entityManagers = new Map<EntityType, EntityManagerState>()
113
149
 
114
- /**
115
- * Same as `register`, but will automatically call `unregister` when the `Scope` is terminated.
116
- *
117
- * @since 1.0.0
118
- * @category utils
119
- */
120
- export const registerScoped: Effect.Effect<void, never, Scope.Scope | Sharding> = internal.registerScoped
150
+ const shardAssignments = MutableHashMap.empty<ShardId, RunnerAddress>()
151
+ const selfShards = new Set<ShardId>()
121
152
 
122
- /**
123
- * Start a computation that is guaranteed to run only on a single pod.
124
- * Each pod should call `registerSingleton` but only a single pod will actually run it at any given time.
125
- *
126
- * @since 1.0.0
127
- * @category utils
128
- */
129
- export const registerSingleton: <R>(
130
- name: string,
131
- run: Effect.Effect<void, never, R>
132
- ) => Effect.Effect<void, never, Sharding | R> = internal.registerSingleton
153
+ // the active shards are the ones that we have acquired the lock for
154
+ const acquiredShards = new Set<ShardId>()
155
+ const activeShardsLatch = yield* Effect.makeLatch(false)
133
156
 
134
- /**
135
- * Register a new entity type, allowing pods to send messages to entities of this type.
136
- *
137
- * @since 1.0.0
138
- * @category utils
139
- */
140
- export const registerEntity: <Msg extends Message.Message.Any>(
141
- entityType: RecipentType.EntityType<Msg>
142
- ) => <R>(
143
- behavior: RecipientBehaviour.RecipientBehaviour<Msg, R>,
144
- options?: RecipientBehaviour.EntityBehaviourOptions | undefined
145
- ) => Effect.Effect<void, never, Sharding | Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>> =
146
- internal.registerEntity
157
+ const events = yield* PubSub.unbounded<ShardingRegistrationEvent>()
158
+ const getRegistrationEvents: Stream.Stream<ShardingRegistrationEvent> = Stream.fromPubSub(events)
147
159
 
148
- /**
149
- * Register a new topic type, allowing pods to broadcast messages to subscribers.
150
- *
151
- * @since 1.0.0
152
- * @category utils
153
- */
154
- export const registerTopic: <Msg extends Message.Message.Any>(
155
- topicType: RecipentType.TopicType<Msg>
156
- ) => <R>(
157
- behavior: RecipientBehaviour.RecipientBehaviour<Msg, R>,
158
- options?: RecipientBehaviour.EntityBehaviourOptions | undefined
159
- ) => Effect.Effect<void, never, Sharding | Exclude<R, RecipientBehaviourContext.RecipientBehaviourContext>> =
160
- internal.registerTopic
160
+ const isLocalRunner = (address: RunnerAddress) =>
161
+ Option.isSome(config.runnerAddress) && Equal.equals(address, config.runnerAddress.value)
161
162
 
162
- /**
163
- * Get an object that allows sending messages to a given entity type.
164
- * You can provide a custom send timeout to override the one globally defined.
165
- *
166
- * @since 1.0.0
167
- * @category utils
168
- */
169
- export const messenger: <Msg extends Message.Message.Any>(
170
- entityType: RecipentType.EntityType<Msg>
171
- ) => Effect.Effect<Messenger<Msg>, never, Sharding> = internal.messenger
163
+ function getShardId(entityId: EntityId): ShardId {
164
+ return ShardId.make((Math.abs(hashString(entityId) % config.numberOfShards)) + 1)
165
+ }
172
166
 
173
- /**
174
- * Get an object that allows broadcasting messages to a given topic type.
175
- * You can provide a custom send timeout to override the one globally defined.
176
- *
177
- * @since 1.0.0
178
- * @category utils
179
- */
180
- export const broadcaster: <Msg extends Message.Message.Any>(
181
- topicType: RecipentType.TopicType<Msg>
182
- ) => Effect.Effect<Broadcaster<Msg>, never, Sharding> = internal.broadcaster
167
+ function isEntityOnLocalShards(address: EntityAddress): boolean {
168
+ return acquiredShards.has(address.shardId)
169
+ }
183
170
 
184
- /**
185
- * Get the list of pods currently registered to the Shard Manager
186
- *
187
- * @since 1.0.0
188
- * @category utils
189
- */
190
- export const getPods: Effect.Effect<HashSet.HashSet<PodAddress.PodAddress>, never, Sharding> = internal.getPods
171
+ // --- Shard acquisition ---
191
172
 
192
- /**
193
- * Sends a raw message to the local entity manager without performing reties.
194
- * Those are up to the caller.
195
- *
196
- * @since 1.0.0
197
- * @category utils
198
- */
199
- export const sendMessageToLocalEntityManagerWithoutRetries: (
200
- message: SerializedEnvelope.SerializedEnvelope
201
- ) => Effect.Effect<
202
- MessageState.MessageState<SerializedMessage.SerializedMessage>,
203
- ShardingException.ShardingException,
204
- Sharding
205
- > = internal.sendMessageToLocalEntityManagerWithoutRetries
173
+ if (Option.isSome(config.runnerAddress)) {
174
+ const selfAddress = config.runnerAddress.value
175
+ yield* Scope.addFinalizerExit(shardingScope, () => {
176
+ // the locks expire over time, so if this fails we ignore it
177
+ return Effect.ignore(shardStorage.releaseAll(selfAddress))
178
+ })
179
+
180
+ const releasingShards = new Set<ShardId>()
181
+ yield* Effect.gen(function*() {
182
+ while (true) {
183
+ yield* activeShardsLatch.await
184
+
185
+ // if a shard is no longer assigned to this runner, we release it
186
+ for (const shardId of acquiredShards) {
187
+ if (selfShards.has(shardId)) continue
188
+ acquiredShards.delete(shardId)
189
+ releasingShards.add(shardId)
190
+ }
191
+ // if a shard has been assigned to this runner, we acquire it
192
+ const unacquiredShards = new Set<ShardId>()
193
+ for (const shardId of selfShards) {
194
+ if (acquiredShards.has(shardId) || releasingShards.has(shardId)) continue
195
+ unacquiredShards.add(shardId)
196
+ }
197
+
198
+ if (releasingShards.size > 0) {
199
+ yield* Effect.forkIn(syncSingletons, shardingScope)
200
+ yield* releaseShards
201
+ }
202
+
203
+ if (unacquiredShards.size === 0) {
204
+ yield* activeShardsLatch.close
205
+ continue
206
+ }
207
+
208
+ const acquired = yield* shardStorage.acquire(selfAddress, unacquiredShards)
209
+ for (const shardId of acquired) {
210
+ acquiredShards.add(shardId)
211
+ }
212
+ if (acquired.length > 0) {
213
+ yield* storageReadLatch.open
214
+ yield* Effect.forkIn(syncSingletons, shardingScope)
215
+ }
216
+ yield* Effect.sleep(1000)
217
+ }
218
+ }).pipe(
219
+ Effect.catchAllCause((cause) => Effect.logWarning("Could not acquire/release shards", cause)),
220
+ Effect.repeat(Schedule.spaced(config.entityMessagePollInterval)),
221
+ Effect.annotateLogs({
222
+ package: "@effect/cluster",
223
+ module: "Sharding",
224
+ fiber: "Shard acquisition loop",
225
+ runner: selfAddress
226
+ }),
227
+ Effect.interruptible,
228
+ Effect.forkIn(shardingScope)
229
+ )
230
+
231
+ // refresh the shard locks every minute
232
+ yield* Effect.suspend(() =>
233
+ shardStorage.refresh(selfAddress, [
234
+ ...acquiredShards,
235
+ ...releasingShards
236
+ ])
237
+ ).pipe(
238
+ Effect.flatMap((acquired) => {
239
+ for (const shardId of acquiredShards) {
240
+ if (!acquired.includes(shardId)) {
241
+ acquiredShards.delete(shardId)
242
+ releasingShards.add(shardId)
243
+ }
244
+ }
245
+ return releasingShards.size > 0 ?
246
+ Effect.andThen(
247
+ Effect.forkIn(syncSingletons, shardingScope),
248
+ releaseShards
249
+ ) :
250
+ Effect.void
251
+ }),
252
+ Effect.retry({
253
+ times: 5,
254
+ schedule: Schedule.spaced(250)
255
+ }),
256
+ Effect.catchAllCause((cause) =>
257
+ Effect.logError("Could not refresh shard locks", cause).pipe(
258
+ Effect.andThen(clearSelfShards)
259
+ )
260
+ ),
261
+ Effect.delay("1 minute"),
262
+ Effect.forever,
263
+ Effect.interruptible,
264
+ Effect.forkIn(shardingScope)
265
+ )
266
+
267
+ const releaseShardsLock = Effect.unsafeMakeSemaphore(1).withPermits(1)
268
+ const releaseShards = releaseShardsLock(
269
+ Effect.suspend(() =>
270
+ Effect.forEach(
271
+ releasingShards,
272
+ (shardId) =>
273
+ Effect.forEach(
274
+ entityManagers.values(),
275
+ (state) => state.manager.interruptShard(shardId),
276
+ { concurrency: "unbounded", discard: true }
277
+ ).pipe(
278
+ Effect.andThen(shardStorage.release(selfAddress, shardId)),
279
+ Effect.annotateLogs({
280
+ runner: selfAddress
281
+ }),
282
+ Effect.andThen(() => {
283
+ releasingShards.delete(shardId)
284
+ })
285
+ ),
286
+ { concurrency: "unbounded", discard: true }
287
+ )
288
+ )
289
+ )
290
+ }
291
+
292
+ const clearSelfShards = Effect.suspend(() => {
293
+ selfShards.clear()
294
+ return activeShardsLatch.open
295
+ })
296
+
297
+ // --- Singletons ---
298
+
299
+ const singletons = new Map<ShardId, MutableHashMap.MutableHashMap<SingletonAddress, Effect.Effect<void>>>()
300
+ const singletonFibers = yield* FiberMap.make<SingletonAddress>()
301
+ const withSingletonLock = Effect.unsafeMakeSemaphore(1).withPermits(1)
302
+
303
+ const registerSingleton: Sharding["Type"]["registerSingleton"] = Effect.fnUntraced(
304
+ function*(name, run) {
305
+ const address = new SingletonAddress({
306
+ shardId: getShardId(EntityId.make(name)),
307
+ name
308
+ })
309
+
310
+ let map = singletons.get(address.shardId)
311
+ if (!map) {
312
+ map = MutableHashMap.empty()
313
+ singletons.set(address.shardId, map)
314
+ }
315
+ if (MutableHashMap.has(map, address)) {
316
+ return yield* Effect.dieMessage(`Singleton '${name}' is already registered`)
317
+ }
318
+
319
+ const context = yield* Effect.context<never>()
320
+ const wrappedRun = run.pipe(
321
+ Effect.locally(FiberRef.currentLogAnnotations, HashMap.empty()),
322
+ Effect.andThen(Effect.never),
323
+ Effect.scoped,
324
+ Effect.provide(context),
325
+ Effect.orDie,
326
+ Effect.interruptible
327
+ ) as Effect.Effect<never>
328
+ MutableHashMap.set(map, address, wrappedRun)
329
+
330
+ yield* PubSub.publish(events, SingletonRegistered({ address }))
331
+
332
+ // start if we are on the right shard
333
+ if (acquiredShards.has(address.shardId)) {
334
+ yield* Effect.logDebug("Starting singleton", address)
335
+ yield* FiberMap.run(singletonFibers, address, wrappedRun)
336
+ }
337
+ },
338
+ withSingletonLock
339
+ )
340
+
341
+ const syncSingletons = withSingletonLock(Effect.gen(function*() {
342
+ for (const [shardId, map] of singletons) {
343
+ for (const [address, run] of map) {
344
+ const running = FiberMap.unsafeHas(singletonFibers, address)
345
+ const shouldBeRunning = acquiredShards.has(shardId)
346
+ if (running && !shouldBeRunning) {
347
+ yield* Effect.logDebug("Stopping singleton", address)
348
+ internalInterruptors.add(yield* Effect.fiberId)
349
+ yield* FiberMap.remove(singletonFibers, address)
350
+ } else if (!running && shouldBeRunning) {
351
+ yield* Effect.logDebug("Starting singleton", address)
352
+ yield* FiberMap.run(singletonFibers, address, run)
353
+ }
354
+ }
355
+ }
356
+ }))
357
+
358
+ // --- Storage inbox ---
359
+
360
+ const storageReadLatch = yield* Effect.makeLatch(true)
361
+ const openStorageReadLatch = constant(storageReadLatch.open)
362
+
363
+ const storageReadLock = Effect.unsafeMakeSemaphore(1)
364
+ const withStorageReadLock = storageReadLock.withPermits(1)
365
+
366
+ if (storageEnabled && Option.isSome(config.runnerAddress)) {
367
+ const selfAddress = config.runnerAddress.value
368
+
369
+ yield* Effect.gen(function*() {
370
+ yield* Effect.logDebug("Starting")
371
+ yield* Effect.addFinalizer(() => Effect.logDebug("Shutting down"))
372
+
373
+ // keep track of the last sent request ids to avoid duplicates
374
+ // we only keep the last 30 sets to avoid memory leaks
375
+ const sentRequestIds = new Set<Snowflake.Snowflake>()
376
+ const sentRequestIdSets = new Set<Set<Snowflake.Snowflake>>()
377
+
378
+ while (true) {
379
+ // wait for the next poll interval, or if we get notified of a change
380
+ yield* storageReadLatch.await
381
+
382
+ // if we get notified of a change, ensure we start a read immediately
383
+ // next iteration
384
+ storageReadLatch.unsafeClose()
385
+
386
+ // the lock is used to ensure resuming entities have a garantee that no
387
+ // more items are added to the unprocessed set while the semaphore is
388
+ // acquired.
389
+ yield* storageReadLock.take(1)
390
+
391
+ const messages = yield* storage.unprocessedMessages(acquiredShards)
392
+ const currentSentRequestIds = new Set<Snowflake.Snowflake>()
393
+ sentRequestIdSets.add(currentSentRequestIds)
394
+
395
+ const send = Effect.catchAllCause(
396
+ Effect.suspend(() => {
397
+ const message = messages[index]
398
+ if (message._tag === "IncomingRequest") {
399
+ if (sentRequestIds.has(message.envelope.requestId)) {
400
+ return Effect.void
401
+ }
402
+ sentRequestIds.add(message.envelope.requestId)
403
+ currentSentRequestIds.add(message.envelope.requestId)
404
+ }
405
+ const address = message.envelope.address
406
+ const state = entityManagers.get(address.entityType)
407
+ if (!state || !acquiredShards.has(address.shardId)) {
408
+ return Effect.void
409
+ }
410
+
411
+ const isProcessing = state.manager.isProcessingFor(message)
412
+
413
+ // If the message might affect a currently processing request, we
414
+ // send it to the entity manager to be processed.
415
+ if (message._tag === "IncomingEnvelope" && isProcessing) {
416
+ return state.manager.send(message)
417
+ } else if (isProcessing) {
418
+ return Effect.void
419
+ }
420
+
421
+ // If the entity was resuming in another fiber, we add the message
422
+ // id to the unprocessed set.
423
+ const resumptionState = MutableHashMap.get(entityResumptionState, address)
424
+ if (Option.isSome(resumptionState)) {
425
+ resumptionState.value.unprocessed.add(message.envelope.requestId)
426
+ if (message.envelope._tag === "Interrupt") {
427
+ resumptionState.value.interrupts.set(message.envelope.requestId, message as Message.IncomingEnvelope)
428
+ }
429
+ return Effect.void
430
+ }
431
+ return state.manager.send(message)
432
+ }),
433
+ (cause) => {
434
+ const message = messages[index]
435
+ const error = Cause.failureOption(cause)
436
+ // if we get a defect, then update storage
437
+ if (Option.isNone(error)) {
438
+ return storage.saveReply(Reply.ReplyWithContext.fromDefect({
439
+ id: snowflakeGen.unsafeNext(),
440
+ requestId: message.envelope.requestId,
441
+ defect: Cause.squash(cause)
442
+ }))
443
+ }
444
+ if (error.value._tag === "MailboxFull") {
445
+ // MailboxFull can only happen for requests, so this cast is safe
446
+ return resumeEntityFromStorage(message as Message.IncomingRequest<any>)
447
+ }
448
+ return Effect.void
449
+ }
450
+ )
451
+
452
+ let index = 0
453
+ yield* Effect.whileLoop({
454
+ while: () => index < messages.length,
455
+ step: () => index++,
456
+ body: constant(send)
457
+ })
458
+
459
+ // let the resuming entities check if they are done
460
+ yield* storageReadLock.release(1)
461
+
462
+ while (sentRequestIdSets.size > 30) {
463
+ const oldest = Iterable.unsafeHead(sentRequestIdSets)
464
+ sentRequestIdSets.delete(oldest)
465
+ for (const id of oldest) {
466
+ sentRequestIds.delete(id)
467
+ }
468
+ }
469
+ }
470
+ }).pipe(
471
+ Effect.scoped,
472
+ Effect.ensuring(storageReadLock.releaseAll),
473
+ Effect.catchAllCause((cause) => Effect.logWarning("Could not read messages from storage", cause)),
474
+ Effect.repeat(Schedule.spaced(config.entityMessagePollInterval)),
475
+ Effect.annotateLogs({
476
+ package: "@effect/cluster",
477
+ module: "Sharding",
478
+ fiber: "Storage read loop",
479
+ runner: selfAddress
480
+ }),
481
+ Effect.interruptible,
482
+ Effect.forkIn(shardingScope)
483
+ )
484
+
485
+ // open the storage latch every poll interval
486
+ yield* storageReadLatch.open.pipe(
487
+ Effect.delay(config.entityMessagePollInterval),
488
+ Effect.forever,
489
+ Effect.interruptible,
490
+ Effect.forkIn(shardingScope)
491
+ )
492
+
493
+ // Resume unprocessed messages for entities that reached a full mailbox.
494
+ const entityResumptionState = MutableHashMap.empty<EntityAddress, {
495
+ unprocessed: Set<Snowflake.Snowflake>
496
+ interrupts: Map<Snowflake.Snowflake, Message.IncomingEnvelope>
497
+ }>()
498
+ const resumeEntityFromStorage = (lastReceivedMessage: Message.IncomingRequest<any>) => {
499
+ const address = lastReceivedMessage.envelope.address
500
+ const resumptionState = MutableHashMap.get(entityResumptionState, address)
501
+ if (Option.isSome(resumptionState)) {
502
+ resumptionState.value.unprocessed.add(lastReceivedMessage.envelope.requestId)
503
+ return Effect.void
504
+ }
505
+ MutableHashMap.set(entityResumptionState, address, {
506
+ unprocessed: new Set([lastReceivedMessage.envelope.requestId]),
507
+ interrupts: new Map()
508
+ })
509
+ return resumeEntityFromStorageImpl(address)
510
+ }
511
+ const resumeEntityFromStorageImpl = Effect.fnUntraced(
512
+ function*(address: EntityAddress) {
513
+ const state = entityManagers.get(address.entityType)
514
+ if (!state) {
515
+ MutableHashMap.remove(entityResumptionState, address)
516
+ return
517
+ }
518
+
519
+ const resumptionState = Option.getOrThrow(MutableHashMap.get(entityResumptionState, address))
520
+ let done = false
521
+
522
+ while (!done) {
523
+ // if the shard is no longer assigned to this runner, we stop
524
+ if (!acquiredShards.has(address.shardId)) {
525
+ return
526
+ }
527
+
528
+ // take a batch of unprocessed messages ids
529
+ const messageIds = Arr.empty<Snowflake.Snowflake>()
530
+ for (const id of resumptionState.unprocessed) {
531
+ if (messageIds.length === 1024) break
532
+ messageIds.push(id)
533
+ }
534
+
535
+ const messages = yield* storage.unprocessedMessagesById(messageIds)
536
+
537
+ // this should not happen, but we handle it just in case
538
+ if (messages.length === 0) {
539
+ yield* Effect.sleep(config.entityMessagePollInterval)
540
+ continue
541
+ }
542
+
543
+ let index = 0
544
+
545
+ const sendWithRetry: Effect.Effect<
546
+ void,
547
+ EntityNotManagedByRunner
548
+ > = Effect.catchTags(
549
+ Effect.suspend(() => {
550
+ if (!acquiredShards.has(address.shardId)) {
551
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
552
+ }
553
+
554
+ const message = messages[index]
555
+ // check if this is a request that was interrupted
556
+ const interrupt = message._tag === "IncomingRequest" &&
557
+ resumptionState.interrupts.get(message.envelope.requestId)
558
+ return interrupt ?
559
+ Effect.flatMap(state.manager.send(message), () => {
560
+ resumptionState.interrupts.delete(message.envelope.requestId)
561
+ return state.manager.send(interrupt)
562
+ }) :
563
+ state.manager.send(message)
564
+ }),
565
+ {
566
+ MailboxFull: () => Effect.delay(sendWithRetry, config.sendRetryInterval),
567
+ AlreadyProcessingMessage: () => Effect.void
568
+ }
569
+ )
570
+
571
+ yield* Effect.whileLoop({
572
+ while: () => index < messages.length,
573
+ body: constant(sendWithRetry),
574
+ step: () => index++
575
+ })
576
+
577
+ for (const id of messageIds) {
578
+ resumptionState.unprocessed.delete(id)
579
+ }
580
+ if (resumptionState.unprocessed.size > 0) continue
581
+
582
+ // if we have caught up to the main storage loop, we let it take over
583
+ yield* withStorageReadLock(Effect.sync(() => {
584
+ if (resumptionState.unprocessed.size === 0) {
585
+ MutableHashMap.remove(entityResumptionState, address)
586
+ done = true
587
+ }
588
+ }))
589
+ }
590
+ },
591
+ Effect.retry({
592
+ while: (e) => e._tag === "PersistenceError",
593
+ schedule: Schedule.spaced(config.entityMessagePollInterval)
594
+ }),
595
+ Effect.catchAllCause((cause) => Effect.logError("Could not resume unprocessed messages", cause)),
596
+ (effect, address) =>
597
+ Effect.annotateLogs(effect, {
598
+ package: "@effect/cluster",
599
+ module: "Sharding",
600
+ fiber: "Resuming unprocessed messages",
601
+ runner: selfAddress,
602
+ entity: address
603
+ }),
604
+ (effect, address) =>
605
+ Effect.ensuring(
606
+ effect,
607
+ Effect.sync(() => MutableHashMap.remove(entityResumptionState, address))
608
+ ),
609
+ Effect.interruptible,
610
+ Effect.forkIn(shardingScope)
611
+ )
612
+ }
613
+
614
+ // --- Sending messages ---
615
+
616
+ const sendLocal = (
617
+ message: Message.Outgoing<any> | Message.Incoming<any>
618
+ ): Effect.Effect<
619
+ void,
620
+ EntityNotManagedByRunner | MailboxFull | AlreadyProcessingMessage
621
+ > =>
622
+ Effect.suspend(() => {
623
+ const address = message.envelope.address
624
+ if (!isEntityOnLocalShards(address)) {
625
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
626
+ }
627
+ const state = entityManagers.get(address.entityType)
628
+ if (!state) {
629
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
630
+ }
631
+
632
+ return message._tag === "IncomingRequest" || message._tag === "IncomingEnvelope" ?
633
+ state.manager.send(message) :
634
+ runners.sendLocal({
635
+ message,
636
+ send: state.manager.sendLocal,
637
+ simulateRemoteSerialization: config.simulateRemoteSerialization
638
+ })
639
+ })
640
+
641
+ const notifyLocal = (message: Message.Outgoing<any> | Message.Incoming<any>, discard: boolean) =>
642
+ Effect.suspend(() => {
643
+ const address = message.envelope.address
644
+ if (!isEntityOnLocalShards(address)) {
645
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
646
+ }
647
+
648
+ const notify = storageEnabled
649
+ ? openStorageReadLatch
650
+ : () => Effect.dieMessage("Sharding.notifyLocal: storage is disabled")
651
+
652
+ return message._tag === "IncomingRequest" || message._tag === "IncomingEnvelope"
653
+ ? notify()
654
+ : runners.notifyLocal({ message, notify, discard })
655
+ })
656
+
657
+ const isTransientError = Predicate.or(RunnerUnavailable.is, EntityNotManagedByRunner.is)
658
+ function sendOutgoing(
659
+ message: Message.Outgoing<any>,
660
+ discard: boolean,
661
+ retries?: number
662
+ ): Effect.Effect<void, MailboxFull | AlreadyProcessingMessage | PersistenceError> {
663
+ return Effect.catchIf(
664
+ Effect.suspend(() => {
665
+ const address = message.envelope.address
666
+ const maybeRunner = MutableHashMap.get(shardAssignments, address.shardId)
667
+ const isPersisted = storageEnabled && Context.get(message.rpc.annotations, Persisted)
668
+ const runnerIsLocal = Option.isSome(maybeRunner) && isLocalRunner(maybeRunner.value)
669
+ if (isPersisted) {
670
+ return runnerIsLocal
671
+ ? notifyLocal(message, discard)
672
+ : runners.notify({ address: maybeRunner, message, discard })
673
+ } else if (Option.isNone(maybeRunner)) {
674
+ return Effect.fail(new EntityNotManagedByRunner({ address }))
675
+ }
676
+ return runnerIsLocal
677
+ ? sendLocal(message)
678
+ : runners.send({ address: maybeRunner.value, message })
679
+ }),
680
+ isTransientError,
681
+ (error) => {
682
+ if (retries === 0) {
683
+ return Effect.die(error)
684
+ }
685
+ return Effect.delay(sendOutgoing(message, discard, retries && retries - 1), config.sendRetryInterval)
686
+ }
687
+ )
688
+ }
689
+
690
+ // --- Shard Manager sync ---
691
+
692
+ const shardManagerTimeoutFiber = yield* FiberHandle.make().pipe(
693
+ Scope.extend(shardingScope)
694
+ )
695
+ const startShardManagerTimeout = FiberHandle.run(
696
+ shardManagerTimeoutFiber,
697
+ Effect.flatMap(Effect.sleep(config.shardManagerUnavailableTimeout), () => {
698
+ MutableHashMap.clear(shardAssignments)
699
+ return clearSelfShards
700
+ }),
701
+ { onlyIfMissing: true }
702
+ )
703
+ const stopShardManagerTimeout = FiberHandle.clear(shardManagerTimeoutFiber)
704
+
705
+ // Every time the link to the shard manager is lost, we re-register the runner
706
+ // and re-subscribe to sharding events
707
+ yield* Effect.gen(function*() {
708
+ yield* Effect.logDebug("Registering with shard manager")
709
+ if (Option.isSome(config.runnerAddress)) {
710
+ const machineId = yield* shardManager.register(config.runnerAddress.value)
711
+ yield* snowflakeGen.setMachineId(machineId)
712
+ }
713
+
714
+ yield* stopShardManagerTimeout
715
+
716
+ yield* Effect.logDebug("Subscribing to sharding events")
717
+ const mailbox = yield* shardManager.shardingEvents
718
+ const startedLatch = yield* Effect.makeLatch(false)
719
+
720
+ const eventsFiber = yield* Effect.gen(function*() {
721
+ while (true) {
722
+ const [events] = yield* mailbox.takeAll
723
+ for (const event of events) {
724
+ yield* Effect.logDebug("Received sharding event", event)
725
+
726
+ switch (event._tag) {
727
+ case "StreamStarted": {
728
+ yield* startedLatch.open
729
+ break
730
+ }
731
+ case "ShardsAssigned": {
732
+ for (const shard of event.shards) {
733
+ MutableHashMap.set(shardAssignments, shard, event.address)
734
+ }
735
+ if (!MutableRef.get(isShutdown) && isLocalRunner(event.address)) {
736
+ for (const shardId of event.shards) {
737
+ if (selfShards.has(shardId)) continue
738
+ selfShards.add(shardId)
739
+ }
740
+ yield* activeShardsLatch.open
741
+ }
742
+ break
743
+ }
744
+ case "ShardsUnassigned": {
745
+ for (const shard of event.shards) {
746
+ MutableHashMap.remove(shardAssignments, shard)
747
+ }
748
+ if (isLocalRunner(event.address)) {
749
+ for (const shard of event.shards) {
750
+ selfShards.delete(shard)
751
+ }
752
+ yield* activeShardsLatch.open
753
+ }
754
+ break
755
+ }
756
+ }
757
+ }
758
+ }
759
+ }).pipe(Effect.forkScoped)
760
+
761
+ // Wait for the stream to be established
762
+ yield* startedLatch.await
763
+
764
+ // perform a full sync every config.refreshAssignmentsInterval
765
+ const syncFiber = yield* syncAssignments.pipe(
766
+ Effect.andThen(Effect.sleep(config.refreshAssignmentsInterval)),
767
+ Effect.forever,
768
+ Effect.forkScoped
769
+ )
770
+
771
+ yield* Fiber.joinAll([eventsFiber, syncFiber])
772
+ }).pipe(
773
+ Effect.scoped,
774
+ Effect.catchAllCause((cause) => Effect.logDebug(cause)),
775
+ Effect.zipRight(startShardManagerTimeout),
776
+ Effect.repeat(
777
+ Schedule.exponential(1000).pipe(
778
+ Schedule.union(Schedule.spaced(10_000))
779
+ )
780
+ ),
781
+ Effect.annotateLogs({
782
+ package: "@effect/cluster",
783
+ module: "Sharding",
784
+ fiber: "ShardManager sync",
785
+ runner: config.runnerAddress
786
+ }),
787
+ Effect.interruptible,
788
+ Effect.forkIn(shardingScope)
789
+ )
790
+
791
+ const syncAssignments = Effect.gen(function*() {
792
+ const assignments = yield* shardManager.getAssignments
793
+ yield* Effect.logDebug("Received shard assignments", assignments)
794
+
795
+ for (const [shardId, runner] of assignments) {
796
+ if (Option.isNone(runner)) {
797
+ MutableHashMap.remove(shardAssignments, shardId)
798
+ selfShards.delete(shardId)
799
+ continue
800
+ }
801
+
802
+ MutableHashMap.set(shardAssignments, shardId, runner.value)
803
+
804
+ if (!isLocalRunner(runner.value)) {
805
+ selfShards.delete(shardId)
806
+ continue
807
+ }
808
+ if (MutableRef.get(isShutdown) || selfShards.has(shardId)) {
809
+ continue
810
+ }
811
+ selfShards.add(shardId)
812
+ }
813
+
814
+ yield* activeShardsLatch.open
815
+ })
816
+
817
+ // --- Clients ---
818
+
819
+ type ClientRequestEntry = {
820
+ readonly rpc: Rpc.AnyWithProps
821
+ readonly context: Context.Context<never>
822
+ lastChunkId?: Snowflake.Snowflake
823
+ }
824
+ const clientRequests = new Map<Snowflake.Snowflake, ClientRequestEntry>()
825
+
826
+ const clients: ResourceMap<
827
+ Entity<any>,
828
+ (entityId: string) => RpcClient.RpcClient<any, MailboxFull | AlreadyProcessingMessage>,
829
+ never
830
+ > = yield* ResourceMap.make(Effect.fnUntraced(function*(entity: Entity<any>) {
831
+ const client = yield* RpcClient.makeNoSerialization(entity.protocol, {
832
+ supportsAck: true,
833
+ generateRequestId: () => RequestId(snowflakeGen.unsafeNext()),
834
+ onFromClient(options): Effect.Effect<void, MailboxFull | AlreadyProcessingMessage | PersistenceError> {
835
+ const address = Context.unsafeGet(options.context, ClientAddressTag)
836
+ switch (options.message._tag) {
837
+ case "Request": {
838
+ const fiber = Option.getOrThrow(Fiber.getCurrentFiber())
839
+ const id = Snowflake.Snowflake(options.message.id)
840
+ const rpc = entity.protocol.requests.get(options.message.tag)!
841
+ let respond: (reply: Reply.Reply<any>) => Effect.Effect<void>
842
+ if (!options.discard) {
843
+ const entry: ClientRequestEntry = {
844
+ rpc: rpc as any,
845
+ context: fiber.currentContext
846
+ }
847
+ clientRequests.set(id, entry)
848
+ respond = makeClientRespond(entry, client.write)
849
+ } else {
850
+ respond = clientRespondDiscard
851
+ }
852
+ return sendOutgoing(
853
+ new Message.OutgoingRequest({
854
+ envelope: Envelope.makeRequest({
855
+ requestId: id,
856
+ address,
857
+ tag: options.message.tag,
858
+ payload: options.message.payload,
859
+ headers: options.message.headers,
860
+ traceId: options.message.traceId,
861
+ spanId: options.message.spanId,
862
+ sampled: options.message.sampled
863
+ }),
864
+ lastReceivedReply: Option.none(),
865
+ rpc,
866
+ context: fiber.currentContext as Context.Context<any>,
867
+ respond
868
+ }),
869
+ options.discard
870
+ )
871
+ }
872
+ case "Ack": {
873
+ const requestId = Snowflake.Snowflake(options.message.requestId)
874
+ const entry = clientRequests.get(requestId)
875
+ if (!entry) return Effect.void
876
+ return sendOutgoing(
877
+ new Message.OutgoingEnvelope({
878
+ envelope: new Envelope.AckChunk({
879
+ id: snowflakeGen.unsafeNext(),
880
+ address,
881
+ requestId,
882
+ replyId: entry.lastChunkId!
883
+ }),
884
+ rpc: entry.rpc
885
+ }),
886
+ false
887
+ )
888
+ }
889
+ case "Interrupt": {
890
+ const requestId = Snowflake.Snowflake(options.message.requestId)
891
+ const entry = clientRequests.get(requestId)!
892
+ if (!entry) return Effect.void
893
+ clientRequests.delete(requestId)
894
+ // for durable messages, we ignore interrupts on shutdown or as a
895
+ // result of a shard being resassigned
896
+ const isTransientInterrupt = MutableRef.get(isShutdown) ||
897
+ options.message.interruptors.some((id) => internalInterruptors.has(id))
898
+ if (isTransientInterrupt && storageEnabled && Context.get(entry.rpc.annotations, Persisted)) {
899
+ return Effect.void
900
+ }
901
+ return Effect.ignore(sendOutgoing(
902
+ new Message.OutgoingEnvelope({
903
+ envelope: new Envelope.Interrupt({
904
+ id: snowflakeGen.unsafeNext(),
905
+ address,
906
+ requestId
907
+ }),
908
+ rpc: entry.rpc
909
+ }),
910
+ false,
911
+ 3
912
+ ))
913
+ }
914
+ }
915
+ return Effect.void
916
+ }
917
+ })
918
+
919
+ const wrappedClient: any = {}
920
+ for (const method of Object.keys(client.client)) {
921
+ wrappedClient[method] = function(this: any, payload: any, options?: {
922
+ readonly context?: Context.Context<never>
923
+ }) {
924
+ return (client as any).client[method](payload, {
925
+ ...options,
926
+ context: options?.context
927
+ ? Context.merge(options.context, this[currentClientAddress])
928
+ : this[currentClientAddress]
929
+ })
930
+ }
931
+ }
932
+
933
+ yield* Scope.addFinalizer(
934
+ yield* Effect.scope,
935
+ Effect.withFiberRuntime((fiber) => {
936
+ internalInterruptors.add(fiber.id())
937
+ return Effect.void
938
+ })
939
+ )
940
+
941
+ return (entityId: string) => {
942
+ const id = EntityId.make(entityId)
943
+ return {
944
+ ...wrappedClient,
945
+ [currentClientAddress]: ClientAddressTag.context(EntityAddress.make({
946
+ shardId: getShardId(id),
947
+ entityId: id,
948
+ entityType: entity.type
949
+ }))
950
+ }
951
+ }
952
+ }))
953
+
954
+ const makeClient = <Rpcs extends Rpc.Any>(entity: Entity<Rpcs>): Effect.Effect<
955
+ (entityId: string) => RpcClient.RpcClient<Rpcs, MailboxFull | AlreadyProcessingMessage>
956
+ > => clients.get(entity)
957
+
958
+ const clientRespondDiscard = (_reply: Reply.Reply<any>) => Effect.void
959
+
960
+ const makeClientRespond = (
961
+ entry: ClientRequestEntry,
962
+ write: (reply: FromServer<any>) => Effect.Effect<void>
963
+ ) =>
964
+ (reply: Reply.Reply<any>) => {
965
+ switch (reply._tag) {
966
+ case "Chunk": {
967
+ entry.lastChunkId = reply.id
968
+ return write({
969
+ _tag: "Chunk",
970
+ clientId: 0,
971
+ requestId: RequestId(reply.requestId),
972
+ values: reply.values
973
+ })
974
+ }
975
+ case "WithExit": {
976
+ clientRequests.delete(reply.requestId)
977
+ return write({
978
+ _tag: "Exit",
979
+ clientId: 0,
980
+ requestId: RequestId(reply.requestId),
981
+ exit: reply.exit
982
+ })
983
+ }
984
+ }
985
+ }
986
+
987
+ // --- Entities ---
988
+
989
+ const context = yield* Effect.context<ShardingConfig>()
990
+ const reaper = yield* EntityReaper
991
+ const registerEntity: Sharding["Type"]["registerEntity"] = Effect.fnUntraced(
992
+ function*(entity, build, options) {
993
+ if (entityManagers.has(entity.type)) return
994
+ const scope = yield* Scope.make()
995
+ const manager = yield* EntityManager.make(entity, build, {
996
+ ...options,
997
+ storage,
998
+ runnerAddress: Option.getOrThrow(config.runnerAddress),
999
+ sharding
1000
+ }).pipe(
1001
+ Effect.provide(context.pipe(
1002
+ Context.add(EntityReaper, reaper),
1003
+ Context.add(Scope.Scope, scope),
1004
+ Context.add(Snowflake.Generator, snowflakeGen)
1005
+ ))
1006
+ ) as Effect.Effect<EntityManager.EntityManager>
1007
+ entityManagers.set(entity.type, {
1008
+ entity,
1009
+ scope,
1010
+ manager
1011
+ })
1012
+
1013
+ yield* Scope.addFinalizer(scope, Effect.sync(() => entityManagers.delete(entity.type)))
1014
+ yield* PubSub.publish(events, EntityRegistered({ entity }))
1015
+ }
1016
+ )
1017
+
1018
+ yield* Scope.addFinalizerExit(
1019
+ shardingScope,
1020
+ (exit) =>
1021
+ Effect.forEach(
1022
+ entityManagers.values(),
1023
+ (state) =>
1024
+ Effect.catchAllCause(Scope.close(state.scope, exit), (cause) =>
1025
+ Effect.annotateLogs(Effect.logError("Error closing entity manager", cause), {
1026
+ entity: state.entity.type
1027
+ })),
1028
+ { concurrency: "unbounded", discard: true }
1029
+ )
1030
+ )
1031
+
1032
+ // --- Finalization ---
1033
+
1034
+ if (Option.isSome(config.runnerAddress)) {
1035
+ const selfAddress = config.runnerAddress.value
1036
+ // Unregister runner from shard manager when scope is closed
1037
+ yield* Scope.addFinalizer(
1038
+ shardingScope,
1039
+ Effect.gen(function*() {
1040
+ yield* Effect.logDebug("Unregistering runner from shard manager", selfAddress)
1041
+ yield* shardManager.unregister(selfAddress).pipe(
1042
+ Effect.catchAllCause((cause) => Effect.logError("Error calling unregister with shard manager", cause))
1043
+ )
1044
+ yield* clearSelfShards
1045
+ })
1046
+ )
1047
+ }
1048
+
1049
+ yield* Scope.addFinalizer(
1050
+ shardingScope,
1051
+ Effect.withFiberRuntime((fiber) => {
1052
+ MutableRef.set(isShutdown, true)
1053
+ internalInterruptors.add(fiber.id())
1054
+ return Effect.void
1055
+ })
1056
+ )
1057
+
1058
+ const sharding = Sharding.of({
1059
+ getRegistrationEvents,
1060
+ getShardId,
1061
+ isShutdown: Effect.sync(() => MutableRef.get(isShutdown)),
1062
+ registerEntity,
1063
+ registerSingleton,
1064
+ makeClient,
1065
+ send: sendLocal,
1066
+ notify: (message) => notifyLocal(message, false)
1067
+ })
1068
+
1069
+ return sharding
1070
+ })
206
1071
 
207
1072
  /**
208
- * Gets the list of shardIds assigned to the current Pod
209
- *
210
1073
  * @since 1.0.0
211
- * @category utils
1074
+ * @category layers
212
1075
  */
213
- export const getAssignedShardIds: Effect.Effect<HashSet.HashSet<ShardId.ShardId>, never, Sharding> =
214
- internal.getAssignedShardIds
1076
+ export const layer: Layer.Layer<
1077
+ Sharding,
1078
+ never,
1079
+ ShardingConfig | Runners | ShardManagerClient | MessageStorage.MessageStorage | ShardStorage
1080
+ > = Layer.scoped(Sharding, make).pipe(
1081
+ Layer.provide([Snowflake.layerGenerator, EntityReaper.Default])
1082
+ )
1083
+
1084
+ // Utilities
1085
+
1086
+ const ClientAddressTag = Context.GenericTag<EntityAddress>("@effect/cluster/Sharding/ClientAddress")
1087
+ const currentClientAddress = Symbol.for(ClientAddressTag.key)