@effect/cluster 0.28.4 → 0.29.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (626) hide show
  1. package/ClusterError/package.json +6 -0
  2. package/ClusterMetrics/package.json +6 -0
  3. package/ClusterSchema/package.json +6 -0
  4. package/DeliverAt/package.json +6 -0
  5. package/Entity/package.json +6 -0
  6. package/EntityAddress/package.json +6 -0
  7. package/EntityId/package.json +6 -0
  8. package/EntityType/package.json +6 -0
  9. package/Envelope/package.json +6 -0
  10. package/HttpCommon/package.json +6 -0
  11. package/HttpRunner/package.json +6 -0
  12. package/HttpShardManager/package.json +6 -0
  13. package/MachineId/package.json +6 -0
  14. package/MessageStorage/package.json +6 -0
  15. package/README.md +2 -2
  16. package/Reply/package.json +6 -0
  17. package/Runner/package.json +6 -0
  18. package/RunnerAddress/package.json +6 -0
  19. package/RunnerHealth/package.json +6 -0
  20. package/RunnerServer/package.json +6 -0
  21. package/Runners/package.json +6 -0
  22. package/ShardStorage/package.json +6 -0
  23. package/Singleton/package.json +6 -0
  24. package/SingletonAddress/package.json +6 -0
  25. package/Snowflake/package.json +6 -0
  26. package/SocketRunner/package.json +6 -0
  27. package/SocketShardManager/package.json +6 -0
  28. package/SqlMessageStorage/package.json +6 -0
  29. package/SqlShardStorage/package.json +6 -0
  30. package/SynchronizedClock/package.json +6 -0
  31. package/dist/cjs/ClusterError.js +180 -0
  32. package/dist/cjs/ClusterError.js.map +1 -0
  33. package/dist/cjs/ClusterMetrics.js +63 -0
  34. package/dist/cjs/ClusterMetrics.js.map +1 -0
  35. package/dist/cjs/{Pods.js → ClusterSchema.js} +10 -22
  36. package/dist/cjs/ClusterSchema.js.map +1 -0
  37. package/dist/cjs/DeliverAt.js +30 -0
  38. package/dist/cjs/DeliverAt.js.map +1 -0
  39. package/dist/cjs/Entity.js +187 -0
  40. package/dist/cjs/Entity.js.map +1 -0
  41. package/dist/cjs/EntityAddress.js +54 -0
  42. package/dist/cjs/EntityAddress.js.map +1 -0
  43. package/dist/cjs/{AtLeastOnce.js → EntityId.js} +6 -7
  44. package/dist/cjs/EntityId.js.map +1 -0
  45. package/dist/cjs/{ShardManagerClient.js → EntityType.js} +5 -16
  46. package/dist/cjs/EntityType.js.map +1 -0
  47. package/dist/cjs/Envelope.js +168 -0
  48. package/dist/cjs/Envelope.js.map +1 -0
  49. package/dist/cjs/HttpCommon.js +49 -0
  50. package/dist/cjs/HttpCommon.js.map +1 -0
  51. package/dist/cjs/HttpRunner.js +108 -0
  52. package/dist/cjs/HttpRunner.js.map +1 -0
  53. package/dist/cjs/HttpShardManager.js +140 -0
  54. package/dist/cjs/HttpShardManager.js.map +1 -0
  55. package/dist/cjs/{AtLeastOnceStorage.js → MachineId.js} +11 -9
  56. package/dist/cjs/MachineId.js.map +1 -0
  57. package/dist/cjs/Message.js +99 -18
  58. package/dist/cjs/Message.js.map +1 -1
  59. package/dist/cjs/MessageStorage.js +356 -0
  60. package/dist/cjs/MessageStorage.js.map +1 -0
  61. package/dist/cjs/Reply.js +200 -0
  62. package/dist/cjs/Reply.js.map +1 -0
  63. package/dist/cjs/Runner.js +79 -0
  64. package/dist/cjs/Runner.js.map +1 -0
  65. package/dist/cjs/RunnerAddress.js +63 -0
  66. package/dist/cjs/RunnerAddress.js.map +1 -0
  67. package/dist/cjs/RunnerHealth.js +68 -0
  68. package/dist/cjs/RunnerHealth.js.map +1 -0
  69. package/dist/cjs/RunnerServer.js +125 -0
  70. package/dist/cjs/RunnerServer.js.map +1 -0
  71. package/dist/cjs/Runners.js +344 -0
  72. package/dist/cjs/Runners.js.map +1 -0
  73. package/dist/cjs/ShardId.js +7 -46
  74. package/dist/cjs/ShardId.js.map +1 -1
  75. package/dist/cjs/ShardManager.js +493 -8
  76. package/dist/cjs/ShardManager.js.map +1 -1
  77. package/dist/cjs/ShardStorage.js +139 -0
  78. package/dist/cjs/ShardStorage.js.map +1 -0
  79. package/dist/cjs/Sharding.js +731 -91
  80. package/dist/cjs/Sharding.js.map +1 -1
  81. package/dist/cjs/ShardingConfig.js +85 -18
  82. package/dist/cjs/ShardingConfig.js.map +1 -1
  83. package/dist/cjs/ShardingRegistrationEvent.js +26 -32
  84. package/dist/cjs/ShardingRegistrationEvent.js.map +1 -1
  85. package/dist/cjs/{ManagerConfig.js → Singleton.js} +11 -20
  86. package/dist/cjs/Singleton.js.map +1 -0
  87. package/dist/cjs/SingletonAddress.js +50 -0
  88. package/dist/cjs/SingletonAddress.js.map +1 -0
  89. package/dist/cjs/Snowflake.js +133 -0
  90. package/dist/cjs/Snowflake.js.map +1 -0
  91. package/dist/cjs/SocketRunner.js +40 -0
  92. package/dist/cjs/SocketRunner.js.map +1 -0
  93. package/dist/cjs/SocketShardManager.js +33 -0
  94. package/dist/cjs/SocketShardManager.js.map +1 -0
  95. package/dist/cjs/SqlMessageStorage.js +668 -0
  96. package/dist/cjs/SqlMessageStorage.js.map +1 -0
  97. package/dist/cjs/SqlShardStorage.js +228 -0
  98. package/dist/cjs/SqlShardStorage.js.map +1 -0
  99. package/dist/cjs/SynchronizedClock.js +66 -0
  100. package/dist/cjs/SynchronizedClock.js.map +1 -0
  101. package/dist/cjs/index.js +57 -45
  102. package/dist/cjs/internal/entityManager.js +311 -143
  103. package/dist/cjs/internal/entityManager.js.map +1 -1
  104. package/dist/cjs/internal/entityReaper.js +47 -0
  105. package/dist/cjs/internal/entityReaper.js.map +1 -0
  106. package/dist/cjs/internal/hash.js +20 -0
  107. package/dist/cjs/internal/hash.js.map +1 -0
  108. package/dist/cjs/internal/interruptors.js +9 -0
  109. package/dist/cjs/internal/interruptors.js.map +1 -0
  110. package/dist/cjs/internal/resourceMap.js +88 -0
  111. package/dist/cjs/internal/resourceMap.js.map +1 -0
  112. package/dist/cjs/internal/resourceRef.js +92 -0
  113. package/dist/cjs/internal/resourceRef.js.map +1 -0
  114. package/dist/cjs/internal/shardManager.js +219 -235
  115. package/dist/cjs/internal/shardManager.js.map +1 -1
  116. package/dist/dts/ClusterError.d.ts +169 -0
  117. package/dist/dts/ClusterError.d.ts.map +1 -0
  118. package/dist/dts/ClusterMetrics.d.ts +50 -0
  119. package/dist/dts/ClusterMetrics.d.ts.map +1 -0
  120. package/dist/dts/ClusterSchema.d.ts +13 -0
  121. package/dist/dts/ClusterSchema.d.ts.map +1 -0
  122. package/dist/dts/DeliverAt.d.ts +27 -0
  123. package/dist/dts/DeliverAt.d.ts.map +1 -0
  124. package/dist/dts/Entity.d.ts +180 -0
  125. package/dist/dts/Entity.d.ts.map +1 -0
  126. package/dist/dts/EntityAddress.d.ts +55 -0
  127. package/dist/dts/EntityAddress.d.ts.map +1 -0
  128. package/dist/dts/EntityId.d.ts +15 -0
  129. package/dist/dts/EntityId.d.ts.map +1 -0
  130. package/dist/dts/EntityType.d.ts +15 -0
  131. package/dist/dts/EntityType.d.ts.map +1 -0
  132. package/dist/dts/Envelope.d.ts +252 -0
  133. package/dist/dts/Envelope.d.ts.map +1 -0
  134. package/dist/dts/HttpCommon.d.ts +25 -0
  135. package/dist/dts/HttpCommon.d.ts.map +1 -0
  136. package/dist/dts/HttpRunner.d.ts +76 -0
  137. package/dist/dts/HttpRunner.d.ts.map +1 -0
  138. package/dist/dts/HttpShardManager.d.ts +119 -0
  139. package/dist/dts/HttpShardManager.d.ts.map +1 -0
  140. package/dist/dts/MachineId.d.ts +20 -0
  141. package/dist/dts/MachineId.d.ts.map +1 -0
  142. package/dist/dts/Message.d.ts +91 -74
  143. package/dist/dts/Message.d.ts.map +1 -1
  144. package/dist/dts/MessageStorage.d.ts +336 -0
  145. package/dist/dts/MessageStorage.d.ts.map +1 -0
  146. package/dist/dts/Reply.d.ts +171 -0
  147. package/dist/dts/Reply.d.ts.map +1 -0
  148. package/dist/dts/Runner.d.ts +81 -0
  149. package/dist/dts/Runner.d.ts.map +1 -0
  150. package/dist/dts/RunnerAddress.d.ts +56 -0
  151. package/dist/dts/RunnerAddress.d.ts.map +1 -0
  152. package/dist/dts/RunnerHealth.d.ts +54 -0
  153. package/dist/dts/RunnerHealth.d.ts.map +1 -0
  154. package/dist/dts/RunnerServer.d.ts +44 -0
  155. package/dist/dts/RunnerServer.d.ts.map +1 -0
  156. package/dist/dts/Runners.d.ts +161 -0
  157. package/dist/dts/Runners.d.ts.map +1 -0
  158. package/dist/dts/ShardId.d.ts +5 -55
  159. package/dist/dts/ShardId.d.ts.map +1 -1
  160. package/dist/dts/ShardManager.d.ts +435 -23
  161. package/dist/dts/ShardManager.d.ts.map +1 -1
  162. package/dist/dts/ShardStorage.d.ts +200 -0
  163. package/dist/dts/ShardStorage.d.ts.map +1 -0
  164. package/dist/dts/Sharding.d.ts +64 -133
  165. package/dist/dts/Sharding.d.ts.map +1 -1
  166. package/dist/dts/ShardingConfig.d.ts +147 -44
  167. package/dist/dts/ShardingConfig.d.ts.map +1 -1
  168. package/dist/dts/ShardingRegistrationEvent.d.ts +38 -23
  169. package/dist/dts/ShardingRegistrationEvent.d.ts.map +1 -1
  170. package/dist/dts/Singleton.d.ts +13 -0
  171. package/dist/dts/Singleton.d.ts.map +1 -0
  172. package/dist/dts/SingletonAddress.d.ts +49 -0
  173. package/dist/dts/SingletonAddress.d.ts.map +1 -0
  174. package/dist/dts/Snowflake.d.ts +121 -0
  175. package/dist/dts/Snowflake.d.ts.map +1 -0
  176. package/dist/dts/SocketRunner.d.ts +22 -0
  177. package/dist/dts/SocketRunner.d.ts.map +1 -0
  178. package/dist/dts/SocketShardManager.d.ts +17 -0
  179. package/dist/dts/SocketShardManager.d.ts.map +1 -0
  180. package/dist/dts/SqlMessageStorage.d.ts +43 -0
  181. package/dist/dts/SqlMessageStorage.d.ts.map +1 -0
  182. package/dist/dts/SqlShardStorage.d.ts +38 -0
  183. package/dist/dts/SqlShardStorage.d.ts.map +1 -0
  184. package/dist/dts/SynchronizedClock.d.ts +19 -0
  185. package/dist/dts/SynchronizedClock.d.ts.map +1 -0
  186. package/dist/dts/index.d.ts +48 -24
  187. package/dist/dts/index.d.ts.map +1 -1
  188. package/dist/dts/internal/entityReaper.d.ts +2 -0
  189. package/dist/dts/internal/entityReaper.d.ts.map +1 -0
  190. package/dist/dts/internal/hash.d.ts +2 -0
  191. package/dist/dts/internal/hash.d.ts.map +1 -0
  192. package/dist/dts/internal/interruptors.d.ts +2 -0
  193. package/dist/dts/internal/interruptors.d.ts.map +1 -0
  194. package/dist/dts/internal/resourceMap.d.ts +22 -0
  195. package/dist/dts/internal/resourceMap.d.ts.map +1 -0
  196. package/dist/dts/internal/resourceRef.d.ts +25 -0
  197. package/dist/dts/internal/resourceRef.d.ts.map +1 -0
  198. package/dist/dts/internal/shardManager.d.ts +1 -11
  199. package/dist/dts/internal/shardManager.d.ts.map +1 -1
  200. package/dist/esm/ClusterError.js +164 -0
  201. package/dist/esm/ClusterError.js.map +1 -0
  202. package/dist/esm/ClusterMetrics.js +54 -0
  203. package/dist/esm/ClusterMetrics.js.map +1 -0
  204. package/dist/esm/ClusterSchema.js +13 -0
  205. package/dist/esm/ClusterSchema.js.map +1 -0
  206. package/dist/esm/DeliverAt.js +22 -0
  207. package/dist/esm/DeliverAt.js.map +1 -0
  208. package/dist/esm/Entity.js +173 -0
  209. package/dist/esm/Entity.js.map +1 -0
  210. package/dist/esm/EntityAddress.js +44 -0
  211. package/dist/esm/EntityAddress.js.map +1 -0
  212. package/dist/esm/EntityId.js +10 -0
  213. package/dist/esm/EntityId.js.map +1 -0
  214. package/dist/esm/EntityType.js +10 -0
  215. package/dist/esm/EntityType.js.map +1 -0
  216. package/dist/esm/Envelope.js +154 -0
  217. package/dist/esm/Envelope.js.map +1 -0
  218. package/dist/esm/HttpCommon.js +38 -0
  219. package/dist/esm/HttpCommon.js.map +1 -0
  220. package/dist/esm/HttpRunner.js +98 -0
  221. package/dist/esm/HttpRunner.js.map +1 -0
  222. package/dist/esm/HttpShardManager.js +128 -0
  223. package/dist/esm/HttpShardManager.js.map +1 -0
  224. package/dist/esm/MachineId.js +17 -0
  225. package/dist/esm/MachineId.js.map +1 -0
  226. package/dist/esm/Message.js +88 -17
  227. package/dist/esm/Message.js.map +1 -1
  228. package/dist/esm/MessageStorage.js +345 -0
  229. package/dist/esm/MessageStorage.js.map +1 -0
  230. package/dist/esm/Reply.js +184 -0
  231. package/dist/esm/Reply.js.map +1 -0
  232. package/dist/esm/Runner.js +68 -0
  233. package/dist/esm/Runner.js.map +1 -0
  234. package/dist/esm/RunnerAddress.js +52 -0
  235. package/dist/esm/RunnerAddress.js.map +1 -0
  236. package/dist/esm/RunnerHealth.js +58 -0
  237. package/dist/esm/RunnerHealth.js.map +1 -0
  238. package/dist/esm/RunnerServer.js +116 -0
  239. package/dist/esm/RunnerServer.js.map +1 -0
  240. package/dist/esm/Runners.js +332 -0
  241. package/dist/esm/Runners.js.map +1 -0
  242. package/dist/esm/ShardId.js +5 -42
  243. package/dist/esm/ShardId.js.map +1 -1
  244. package/dist/esm/ShardManager.js +486 -7
  245. package/dist/esm/ShardManager.js.map +1 -1
  246. package/dist/esm/ShardStorage.js +129 -0
  247. package/dist/esm/ShardStorage.js.map +1 -0
  248. package/dist/esm/Sharding.js +729 -90
  249. package/dist/esm/Sharding.js.map +1 -1
  250. package/dist/esm/ShardingConfig.js +80 -17
  251. package/dist/esm/ShardingConfig.js.map +1 -1
  252. package/dist/esm/ShardingRegistrationEvent.js +19 -29
  253. package/dist/esm/ShardingRegistrationEvent.js.map +1 -1
  254. package/dist/esm/Singleton.js +15 -0
  255. package/dist/esm/Singleton.js.map +1 -0
  256. package/dist/esm/SingletonAddress.js +40 -0
  257. package/dist/esm/SingletonAddress.js.map +1 -0
  258. package/dist/esm/Snowflake.js +117 -0
  259. package/dist/esm/Snowflake.js.map +1 -0
  260. package/dist/esm/SocketRunner.js +31 -0
  261. package/dist/esm/SocketRunner.js.map +1 -0
  262. package/dist/esm/SocketShardManager.js +24 -0
  263. package/dist/esm/SocketShardManager.js.map +1 -0
  264. package/dist/esm/SqlMessageStorage.js +658 -0
  265. package/dist/esm/SqlMessageStorage.js.map +1 -0
  266. package/dist/esm/SqlShardStorage.js +218 -0
  267. package/dist/esm/SqlShardStorage.js.map +1 -0
  268. package/dist/esm/SynchronizedClock.js +57 -0
  269. package/dist/esm/SynchronizedClock.js.map +1 -0
  270. package/dist/esm/index.js +48 -24
  271. package/dist/esm/index.js.map +1 -1
  272. package/dist/esm/internal/entityManager.js +311 -142
  273. package/dist/esm/internal/entityManager.js.map +1 -1
  274. package/dist/esm/internal/entityReaper.js +38 -0
  275. package/dist/esm/internal/entityReaper.js.map +1 -0
  276. package/dist/esm/internal/hash.js +12 -0
  277. package/dist/esm/internal/hash.js.map +1 -0
  278. package/dist/esm/internal/interruptors.js +3 -0
  279. package/dist/esm/internal/interruptors.js.map +1 -0
  280. package/dist/esm/internal/resourceMap.js +79 -0
  281. package/dist/esm/internal/resourceMap.js.map +1 -0
  282. package/dist/esm/internal/resourceRef.js +83 -0
  283. package/dist/esm/internal/resourceRef.js.map +1 -0
  284. package/dist/esm/internal/shardManager.js +217 -233
  285. package/dist/esm/internal/shardManager.js.map +1 -1
  286. package/package.json +212 -154
  287. package/src/ClusterError.ts +193 -0
  288. package/src/ClusterMetrics.ts +62 -0
  289. package/src/ClusterSchema.ts +13 -0
  290. package/src/DeliverAt.ts +36 -0
  291. package/src/Entity.ts +438 -0
  292. package/src/EntityAddress.ts +55 -0
  293. package/src/EntityId.ts +16 -0
  294. package/src/EntityType.ts +16 -0
  295. package/src/Envelope.ts +352 -0
  296. package/src/HttpCommon.ts +73 -0
  297. package/src/HttpRunner.ts +196 -0
  298. package/src/HttpShardManager.ts +273 -0
  299. package/src/MachineId.ts +27 -0
  300. package/src/Message.ts +143 -92
  301. package/src/MessageStorage.ts +697 -0
  302. package/src/Reply.ts +295 -0
  303. package/src/Runner.ts +84 -0
  304. package/src/RunnerAddress.ts +61 -0
  305. package/src/RunnerHealth.ts +87 -0
  306. package/src/RunnerServer.ts +156 -0
  307. package/src/Runners.ts +533 -0
  308. package/src/ShardId.ts +10 -62
  309. package/src/ShardManager.ts +780 -29
  310. package/src/ShardStorage.ts +289 -0
  311. package/src/Sharding.ts +1059 -186
  312. package/src/ShardingConfig.ts +186 -45
  313. package/src/ShardingRegistrationEvent.ts +38 -39
  314. package/src/Singleton.ts +20 -0
  315. package/src/SingletonAddress.ts +47 -0
  316. package/src/Snowflake.ts +194 -0
  317. package/src/SocketRunner.ts +59 -0
  318. package/src/SocketShardManager.ts +48 -0
  319. package/src/SqlMessageStorage.ts +833 -0
  320. package/src/SqlShardStorage.ts +292 -0
  321. package/src/SynchronizedClock.ts +82 -0
  322. package/src/index.ts +54 -24
  323. package/src/internal/entityManager.ts +464 -361
  324. package/src/internal/entityReaper.ts +53 -0
  325. package/src/internal/hash.ts +11 -0
  326. package/src/internal/interruptors.ts +4 -0
  327. package/src/internal/resourceMap.ts +89 -0
  328. package/src/internal/resourceRef.ts +88 -0
  329. package/src/internal/shardManager.ts +273 -546
  330. package/AtLeastOnce/package.json +0 -6
  331. package/AtLeastOnceStorage/package.json +0 -6
  332. package/Broadcaster/package.json +0 -6
  333. package/ManagerConfig/package.json +0 -6
  334. package/MessageState/package.json +0 -6
  335. package/Messenger/package.json +0 -6
  336. package/Pod/package.json +0 -6
  337. package/PodAddress/package.json +0 -6
  338. package/Pods/package.json +0 -6
  339. package/PodsHealth/package.json +0 -6
  340. package/PoisonPill/package.json +0 -6
  341. package/RecipientAddress/package.json +0 -6
  342. package/RecipientBehaviour/package.json +0 -6
  343. package/RecipientBehaviourContext/package.json +0 -6
  344. package/RecipientType/package.json +0 -6
  345. package/Serialization/package.json +0 -6
  346. package/SerializedEnvelope/package.json +0 -6
  347. package/SerializedMessage/package.json +0 -6
  348. package/ShardManagerClient/package.json +0 -6
  349. package/ShardingEvent/package.json +0 -6
  350. package/ShardingException/package.json +0 -6
  351. package/Storage/package.json +0 -6
  352. package/dist/cjs/AtLeastOnce.js.map +0 -1
  353. package/dist/cjs/AtLeastOnceStorage.js.map +0 -1
  354. package/dist/cjs/Broadcaster.js +0 -6
  355. package/dist/cjs/Broadcaster.js.map +0 -1
  356. package/dist/cjs/ManagerConfig.js.map +0 -1
  357. package/dist/cjs/MessageState.js +0 -55
  358. package/dist/cjs/MessageState.js.map +0 -1
  359. package/dist/cjs/Messenger.js +0 -6
  360. package/dist/cjs/Messenger.js.map +0 -1
  361. package/dist/cjs/Pod.js +0 -78
  362. package/dist/cjs/Pod.js.map +0 -1
  363. package/dist/cjs/PodAddress.js +0 -77
  364. package/dist/cjs/PodAddress.js.map +0 -1
  365. package/dist/cjs/Pods.js.map +0 -1
  366. package/dist/cjs/PodsHealth.js +0 -41
  367. package/dist/cjs/PodsHealth.js.map +0 -1
  368. package/dist/cjs/PoisonPill.js +0 -78
  369. package/dist/cjs/PoisonPill.js.map +0 -1
  370. package/dist/cjs/RecipientAddress.js +0 -79
  371. package/dist/cjs/RecipientAddress.js.map +0 -1
  372. package/dist/cjs/RecipientBehaviour.js +0 -38
  373. package/dist/cjs/RecipientBehaviour.js.map +0 -1
  374. package/dist/cjs/RecipientBehaviourContext.js +0 -64
  375. package/dist/cjs/RecipientBehaviourContext.js.map +0 -1
  376. package/dist/cjs/RecipientType.js +0 -123
  377. package/dist/cjs/RecipientType.js.map +0 -1
  378. package/dist/cjs/Serialization.js +0 -32
  379. package/dist/cjs/Serialization.js.map +0 -1
  380. package/dist/cjs/SerializedEnvelope.js +0 -87
  381. package/dist/cjs/SerializedEnvelope.js.map +0 -1
  382. package/dist/cjs/SerializedMessage.js +0 -64
  383. package/dist/cjs/SerializedMessage.js.map +0 -1
  384. package/dist/cjs/ShardManagerClient.js.map +0 -1
  385. package/dist/cjs/ShardingEvent.js +0 -72
  386. package/dist/cjs/ShardingEvent.js.map +0 -1
  387. package/dist/cjs/ShardingException.js +0 -107
  388. package/dist/cjs/ShardingException.js.map +0 -1
  389. package/dist/cjs/Storage.js +0 -40
  390. package/dist/cjs/Storage.js.map +0 -1
  391. package/dist/cjs/internal/atLeastOnce.js +0 -35
  392. package/dist/cjs/internal/atLeastOnce.js.map +0 -1
  393. package/dist/cjs/internal/atLeastOnceStorage.js +0 -163
  394. package/dist/cjs/internal/atLeastOnceStorage.js.map +0 -1
  395. package/dist/cjs/internal/entityState.js +0 -47
  396. package/dist/cjs/internal/entityState.js.map +0 -1
  397. package/dist/cjs/internal/managerConfig.js +0 -46
  398. package/dist/cjs/internal/managerConfig.js.map +0 -1
  399. package/dist/cjs/internal/message.js +0 -48
  400. package/dist/cjs/internal/message.js.map +0 -1
  401. package/dist/cjs/internal/messageState.js +0 -79
  402. package/dist/cjs/internal/messageState.js.map +0 -1
  403. package/dist/cjs/internal/podWithMetadata.js +0 -54
  404. package/dist/cjs/internal/podWithMetadata.js.map +0 -1
  405. package/dist/cjs/internal/pods.js +0 -35
  406. package/dist/cjs/internal/pods.js.map +0 -1
  407. package/dist/cjs/internal/podsHealth.js +0 -40
  408. package/dist/cjs/internal/podsHealth.js.map +0 -1
  409. package/dist/cjs/internal/recipientBehaviour.js +0 -52
  410. package/dist/cjs/internal/recipientBehaviour.js.map +0 -1
  411. package/dist/cjs/internal/recipientBehaviourContext.js +0 -36
  412. package/dist/cjs/internal/recipientBehaviourContext.js.map +0 -1
  413. package/dist/cjs/internal/serialization.js +0 -48
  414. package/dist/cjs/internal/serialization.js.map +0 -1
  415. package/dist/cjs/internal/shardManagerClient.js +0 -48
  416. package/dist/cjs/internal/shardManagerClient.js.map +0 -1
  417. package/dist/cjs/internal/shardManagerState.js +0 -44
  418. package/dist/cjs/internal/shardManagerState.js.map +0 -1
  419. package/dist/cjs/internal/sharding.js +0 -306
  420. package/dist/cjs/internal/sharding.js.map +0 -1
  421. package/dist/cjs/internal/shardingConfig.js +0 -56
  422. package/dist/cjs/internal/shardingConfig.js.map +0 -1
  423. package/dist/cjs/internal/storage.js +0 -52
  424. package/dist/cjs/internal/storage.js.map +0 -1
  425. package/dist/cjs/internal/utils.js +0 -69
  426. package/dist/cjs/internal/utils.js.map +0 -1
  427. package/dist/dts/AtLeastOnce.d.ts +0 -20
  428. package/dist/dts/AtLeastOnce.d.ts.map +0 -1
  429. package/dist/dts/AtLeastOnceStorage.d.ts +0 -75
  430. package/dist/dts/AtLeastOnceStorage.d.ts.map +0 -1
  431. package/dist/dts/Broadcaster.d.ts +0 -32
  432. package/dist/dts/Broadcaster.d.ts.map +0 -1
  433. package/dist/dts/ManagerConfig.d.ts +0 -61
  434. package/dist/dts/ManagerConfig.d.ts.map +0 -1
  435. package/dist/dts/MessageState.d.ts +0 -107
  436. package/dist/dts/MessageState.d.ts.map +0 -1
  437. package/dist/dts/Messenger.d.ts +0 -32
  438. package/dist/dts/Messenger.d.ts.map +0 -1
  439. package/dist/dts/Pod.d.ts +0 -81
  440. package/dist/dts/Pod.d.ts.map +0 -1
  441. package/dist/dts/PodAddress.d.ts +0 -80
  442. package/dist/dts/PodAddress.d.ts.map +0 -1
  443. package/dist/dts/Pods.d.ts +0 -78
  444. package/dist/dts/Pods.d.ts.map +0 -1
  445. package/dist/dts/PodsHealth.d.ts +0 -66
  446. package/dist/dts/PodsHealth.d.ts.map +0 -1
  447. package/dist/dts/PoisonPill.d.ts +0 -78
  448. package/dist/dts/PoisonPill.d.ts.map +0 -1
  449. package/dist/dts/RecipientAddress.d.ts +0 -57
  450. package/dist/dts/RecipientAddress.d.ts.map +0 -1
  451. package/dist/dts/RecipientBehaviour.d.ts +0 -72
  452. package/dist/dts/RecipientBehaviour.d.ts.map +0 -1
  453. package/dist/dts/RecipientBehaviourContext.d.ts +0 -83
  454. package/dist/dts/RecipientBehaviourContext.d.ts.map +0 -1
  455. package/dist/dts/RecipientType.d.ts +0 -93
  456. package/dist/dts/RecipientType.d.ts.map +0 -1
  457. package/dist/dts/Serialization.d.ts +0 -58
  458. package/dist/dts/Serialization.d.ts.map +0 -1
  459. package/dist/dts/SerializedEnvelope.d.ts +0 -86
  460. package/dist/dts/SerializedEnvelope.d.ts.map +0 -1
  461. package/dist/dts/SerializedMessage.d.ts +0 -66
  462. package/dist/dts/SerializedMessage.d.ts.map +0 -1
  463. package/dist/dts/ShardManagerClient.d.ts +0 -50
  464. package/dist/dts/ShardManagerClient.d.ts.map +0 -1
  465. package/dist/dts/ShardingEvent.d.ts +0 -90
  466. package/dist/dts/ShardingEvent.d.ts.map +0 -1
  467. package/dist/dts/ShardingException.d.ts +0 -125
  468. package/dist/dts/ShardingException.d.ts.map +0 -1
  469. package/dist/dts/Storage.d.ts +0 -78
  470. package/dist/dts/Storage.d.ts.map +0 -1
  471. package/dist/dts/internal/atLeastOnce.d.ts +0 -2
  472. package/dist/dts/internal/atLeastOnce.d.ts.map +0 -1
  473. package/dist/dts/internal/atLeastOnceStorage.d.ts +0 -2
  474. package/dist/dts/internal/atLeastOnceStorage.d.ts.map +0 -1
  475. package/dist/dts/internal/entityState.d.ts +0 -21
  476. package/dist/dts/internal/entityState.d.ts.map +0 -1
  477. package/dist/dts/internal/managerConfig.d.ts +0 -2
  478. package/dist/dts/internal/managerConfig.d.ts.map +0 -1
  479. package/dist/dts/internal/message.d.ts +0 -9
  480. package/dist/dts/internal/message.d.ts.map +0 -1
  481. package/dist/dts/internal/messageState.d.ts +0 -2
  482. package/dist/dts/internal/messageState.d.ts.map +0 -1
  483. package/dist/dts/internal/podWithMetadata.d.ts +0 -2
  484. package/dist/dts/internal/podWithMetadata.d.ts.map +0 -1
  485. package/dist/dts/internal/pods.d.ts +0 -2
  486. package/dist/dts/internal/pods.d.ts.map +0 -1
  487. package/dist/dts/internal/podsHealth.d.ts +0 -2
  488. package/dist/dts/internal/podsHealth.d.ts.map +0 -1
  489. package/dist/dts/internal/recipientBehaviour.d.ts +0 -2
  490. package/dist/dts/internal/recipientBehaviour.d.ts.map +0 -1
  491. package/dist/dts/internal/recipientBehaviourContext.d.ts +0 -2
  492. package/dist/dts/internal/recipientBehaviourContext.d.ts.map +0 -1
  493. package/dist/dts/internal/serialization.d.ts +0 -2
  494. package/dist/dts/internal/serialization.d.ts.map +0 -1
  495. package/dist/dts/internal/shardManagerClient.d.ts +0 -2
  496. package/dist/dts/internal/shardManagerClient.d.ts.map +0 -1
  497. package/dist/dts/internal/shardManagerState.d.ts +0 -26
  498. package/dist/dts/internal/shardManagerState.d.ts.map +0 -1
  499. package/dist/dts/internal/sharding.d.ts +0 -2
  500. package/dist/dts/internal/sharding.d.ts.map +0 -1
  501. package/dist/dts/internal/shardingConfig.d.ts +0 -2
  502. package/dist/dts/internal/shardingConfig.d.ts.map +0 -1
  503. package/dist/dts/internal/storage.d.ts +0 -2
  504. package/dist/dts/internal/storage.d.ts.map +0 -1
  505. package/dist/dts/internal/utils.d.ts +0 -2
  506. package/dist/dts/internal/utils.d.ts.map +0 -1
  507. package/dist/esm/AtLeastOnce.js +0 -12
  508. package/dist/esm/AtLeastOnce.js.map +0 -1
  509. package/dist/esm/AtLeastOnceStorage.js +0 -17
  510. package/dist/esm/AtLeastOnceStorage.js.map +0 -1
  511. package/dist/esm/Broadcaster.js +0 -2
  512. package/dist/esm/Broadcaster.js.map +0 -1
  513. package/dist/esm/ManagerConfig.js +0 -26
  514. package/dist/esm/ManagerConfig.js.map +0 -1
  515. package/dist/esm/MessageState.js +0 -47
  516. package/dist/esm/MessageState.js.map +0 -1
  517. package/dist/esm/Messenger.js +0 -2
  518. package/dist/esm/Messenger.js.map +0 -1
  519. package/dist/esm/Pod.js +0 -65
  520. package/dist/esm/Pod.js.map +0 -1
  521. package/dist/esm/PodAddress.js +0 -64
  522. package/dist/esm/PodAddress.js.map +0 -1
  523. package/dist/esm/Pods.js +0 -27
  524. package/dist/esm/Pods.js.map +0 -1
  525. package/dist/esm/PodsHealth.js +0 -33
  526. package/dist/esm/PodsHealth.js.map +0 -1
  527. package/dist/esm/PoisonPill.js +0 -65
  528. package/dist/esm/PoisonPill.js.map +0 -1
  529. package/dist/esm/RecipientAddress.js +0 -67
  530. package/dist/esm/RecipientAddress.js.map +0 -1
  531. package/dist/esm/RecipientBehaviour.js +0 -30
  532. package/dist/esm/RecipientBehaviour.js.map +0 -1
  533. package/dist/esm/RecipientBehaviourContext.js +0 -56
  534. package/dist/esm/RecipientBehaviourContext.js.map +0 -1
  535. package/dist/esm/RecipientType.js +0 -108
  536. package/dist/esm/RecipientType.js.map +0 -1
  537. package/dist/esm/Serialization.js +0 -24
  538. package/dist/esm/Serialization.js.map +0 -1
  539. package/dist/esm/SerializedEnvelope.js +0 -74
  540. package/dist/esm/SerializedEnvelope.js.map +0 -1
  541. package/dist/esm/SerializedMessage.js +0 -51
  542. package/dist/esm/SerializedMessage.js.map +0 -1
  543. package/dist/esm/ShardManagerClient.js +0 -22
  544. package/dist/esm/ShardManagerClient.js.map +0 -1
  545. package/dist/esm/ShardingEvent.js +0 -62
  546. package/dist/esm/ShardingEvent.js.map +0 -1
  547. package/dist/esm/ShardingException.js +0 -91
  548. package/dist/esm/ShardingException.js.map +0 -1
  549. package/dist/esm/Storage.js +0 -32
  550. package/dist/esm/Storage.js.map +0 -1
  551. package/dist/esm/internal/atLeastOnce.js +0 -26
  552. package/dist/esm/internal/atLeastOnce.js.map +0 -1
  553. package/dist/esm/internal/atLeastOnceStorage.js +0 -154
  554. package/dist/esm/internal/atLeastOnceStorage.js.map +0 -1
  555. package/dist/esm/internal/entityState.js +0 -35
  556. package/dist/esm/internal/entityState.js.map +0 -1
  557. package/dist/esm/internal/managerConfig.js +0 -38
  558. package/dist/esm/internal/managerConfig.js.map +0 -1
  559. package/dist/esm/internal/message.js +0 -35
  560. package/dist/esm/internal/message.js.map +0 -1
  561. package/dist/esm/internal/messageState.js +0 -66
  562. package/dist/esm/internal/messageState.js.map +0 -1
  563. package/dist/esm/internal/podWithMetadata.js +0 -41
  564. package/dist/esm/internal/podWithMetadata.js.map +0 -1
  565. package/dist/esm/internal/pods.js +0 -25
  566. package/dist/esm/internal/pods.js.map +0 -1
  567. package/dist/esm/internal/podsHealth.js +0 -30
  568. package/dist/esm/internal/podsHealth.js.map +0 -1
  569. package/dist/esm/internal/recipientBehaviour.js +0 -42
  570. package/dist/esm/internal/recipientBehaviour.js.map +0 -1
  571. package/dist/esm/internal/recipientBehaviourContext.js +0 -26
  572. package/dist/esm/internal/recipientBehaviourContext.js.map +0 -1
  573. package/dist/esm/internal/serialization.js +0 -38
  574. package/dist/esm/internal/serialization.js.map +0 -1
  575. package/dist/esm/internal/shardManagerClient.js +0 -38
  576. package/dist/esm/internal/shardManagerClient.js.map +0 -1
  577. package/dist/esm/internal/shardManagerState.js +0 -36
  578. package/dist/esm/internal/shardManagerState.js.map +0 -1
  579. package/dist/esm/internal/sharding.js +0 -288
  580. package/dist/esm/internal/sharding.js.map +0 -1
  581. package/dist/esm/internal/shardingConfig.js +0 -47
  582. package/dist/esm/internal/shardingConfig.js.map +0 -1
  583. package/dist/esm/internal/storage.js +0 -42
  584. package/dist/esm/internal/storage.js.map +0 -1
  585. package/dist/esm/internal/utils.js +0 -56
  586. package/dist/esm/internal/utils.js.map +0 -1
  587. package/src/AtLeastOnce.ts +0 -28
  588. package/src/AtLeastOnceStorage.ts +0 -96
  589. package/src/Broadcaster.ts +0 -48
  590. package/src/ManagerConfig.ts +0 -67
  591. package/src/MessageState.ts +0 -126
  592. package/src/Messenger.ts +0 -40
  593. package/src/Pod.ts +0 -95
  594. package/src/PodAddress.ts +0 -94
  595. package/src/Pods.ts +0 -100
  596. package/src/PodsHealth.ts +0 -74
  597. package/src/PoisonPill.ts +0 -105
  598. package/src/RecipientAddress.ts +0 -72
  599. package/src/RecipientBehaviour.ts +0 -108
  600. package/src/RecipientBehaviourContext.ts +0 -101
  601. package/src/RecipientType.ts +0 -134
  602. package/src/Serialization.ts +0 -72
  603. package/src/SerializedEnvelope.ts +0 -108
  604. package/src/SerializedMessage.ts +0 -82
  605. package/src/ShardManagerClient.ts +0 -57
  606. package/src/ShardingEvent.ts +0 -121
  607. package/src/ShardingException.ts +0 -151
  608. package/src/Storage.ts +0 -92
  609. package/src/internal/atLeastOnce.ts +0 -59
  610. package/src/internal/atLeastOnceStorage.ts +0 -218
  611. package/src/internal/entityState.ts +0 -64
  612. package/src/internal/managerConfig.ts +0 -84
  613. package/src/internal/message.ts +0 -63
  614. package/src/internal/messageState.ts +0 -98
  615. package/src/internal/podWithMetadata.ts +0 -72
  616. package/src/internal/pods.ts +0 -29
  617. package/src/internal/podsHealth.ts +0 -39
  618. package/src/internal/recipientBehaviour.ts +0 -133
  619. package/src/internal/recipientBehaviourContext.ts +0 -70
  620. package/src/internal/serialization.ts +0 -63
  621. package/src/internal/shardManagerClient.ts +0 -49
  622. package/src/internal/shardManagerState.ts +0 -80
  623. package/src/internal/sharding.ts +0 -789
  624. package/src/internal/shardingConfig.ts +0 -97
  625. package/src/internal/storage.ts +0 -60
  626. package/src/internal/utils.ts +0 -54
@@ -1,591 +1,318 @@
1
- /**
2
- * @since 1.0.0
3
- */
4
- import * as Chunk from "effect/Chunk"
1
+ import * as Arr from "effect/Array"
5
2
  import * as Clock from "effect/Clock"
6
- import { GenericTag } from "effect/Context"
7
3
  import * as Effect from "effect/Effect"
8
- import { equals } from "effect/Equal"
9
- import { pipe } from "effect/Function"
10
- import * as HashMap from "effect/HashMap"
11
- import * as HashSet from "effect/HashSet"
12
- import * as Layer from "effect/Layer"
13
- import * as List from "effect/List"
4
+ import { constFalse } from "effect/Function"
5
+ import * as MutableHashMap from "effect/MutableHashMap"
6
+ import * as MutableHashSet from "effect/MutableHashSet"
14
7
  import * as Option from "effect/Option"
15
- import * as PubSub from "effect/PubSub"
16
- import * as Schedule from "effect/Schedule"
17
- import type * as Scope from "effect/Scope"
18
- import * as Stream from "effect/Stream"
19
- import * as RefSynchronized from "effect/SynchronizedRef"
20
- import * as ManagerConfig from "../ManagerConfig.js"
21
- import type * as Pod from "../Pod.js"
22
- import type * as PodAddress from "../PodAddress.js"
23
- import * as Pods from "../Pods.js"
24
- import * as PodsHealth from "../PodsHealth.js"
25
- import * as ShardId from "../ShardId.js"
26
- import * as ShardingEvent from "../ShardingEvent.js"
27
- import * as ShardingException from "../ShardingException.js"
28
- import type * as ShardManager from "../ShardManager.js"
29
- import * as Storage from "../Storage.js"
30
- import * as PodWithMetadata from "./podWithMetadata.js"
31
- import * as ShardManagerState from "./shardManagerState.js"
32
- import { groupBy, minByOption } from "./utils.js"
8
+ import * as Order from "effect/Order"
9
+ import type { Runner } from "../Runner.js"
10
+ import type { RunnerAddress } from "../RunnerAddress.js"
11
+ import { RunnerHealth } from "../RunnerHealth.js"
12
+ import { ShardId } from "../ShardId.js"
13
+ import { ShardStorage } from "../ShardStorage.js"
33
14
 
34
15
  /** @internal */
35
- const ShardManagerSymbolKey = "@effect/cluster/ShardManager"
16
+ export class State {
17
+ static fromStorage = Effect.fnUntraced(function*(numberOfShards: number) {
18
+ const storage = yield* ShardStorage
19
+ const runnerHealth = yield* RunnerHealth
20
+
21
+ // Fetch registered runners and shard assignments from cluster storage
22
+ const storedRunners = yield* storage.getRunners
23
+ const storedAssignments = yield* storage.getAssignments
24
+
25
+ // Determine which runners are still alive
26
+ const deadRunners = Arr.empty<Runner>()
27
+ const aliveRunners = MutableHashMap.empty<RunnerAddress, Runner>()
28
+ yield* Effect.forEach(storedRunners, ([address, runner]) =>
29
+ Effect.map(runnerHealth.isAlive(address), (isAlive) => {
30
+ if (isAlive) {
31
+ MutableHashMap.set(aliveRunners, address, runner)
32
+ } else {
33
+ deadRunners.push(runner)
34
+ }
35
+ }), { concurrency: "unbounded", discard: true })
36
+ if (deadRunners.length > 0) {
37
+ yield* Effect.logWarning("Ignoring runners that are no longer considered alive:", deadRunners)
38
+ }
36
39
 
37
- /** @internal */
38
- export const ShardManagerTypeId: ShardManager.ShardManagerTypeId = Symbol.for(
39
- ShardManagerSymbolKey
40
- ) as ShardManager.ShardManagerTypeId
40
+ // Determine which shards remain unassigned to a runner
41
+ const assignedShards = new Map<ShardId, RunnerAddress>()
42
+ const invalidAssignments = Arr.empty<[ShardId, RunnerAddress]>()
43
+ for (const [shard, address] of storedAssignments) {
44
+ if (Option.isSome(address) && MutableHashMap.has(aliveRunners, address.value)) {
45
+ assignedShards.set(shard, address.value)
46
+ } else if (Option.isSome(address)) {
47
+ invalidAssignments.push([shard, address.value])
48
+ }
49
+ }
50
+ if (invalidAssignments.length > 0) {
51
+ yield* Effect.logWarning(
52
+ "Ignoring shard assignments for runners that are no longer considered alive: ",
53
+ invalidAssignments
54
+ )
55
+ }
41
56
 
42
- /** @internal */
43
- export const shardManagerTag = GenericTag<ShardManager.ShardManager>(ShardManagerSymbolKey)
57
+ // Construct the initial state
58
+ const now = yield* Clock.currentTimeMillis
59
+ const runnerState = MutableHashMap.empty<RunnerAddress, RunnerWithMetadata>()
60
+ for (const [address, runner] of aliveRunners) {
61
+ MutableHashMap.set(runnerState, address, RunnerWithMetadata({ runner, registeredAt: now }))
62
+ }
44
63
 
45
- /** @internal */
46
- function make(
47
- layerScope: Scope.Scope,
48
- stateRef: RefSynchronized.SynchronizedRef<ShardManagerState.ShardManagerState>,
49
- rebalanceSemaphore: Effect.Semaphore,
50
- eventsHub: PubSub.PubSub<ShardingEvent.ShardingEvent>,
51
- healthApi: PodsHealth.PodsHealth,
52
- podApi: Pods.Pods,
53
- stateRepository: Storage.Storage,
54
- config: ManagerConfig.ManagerConfig
55
- ): ShardManager.ShardManager {
56
- const getAssignments: Effect.Effect<HashMap.HashMap<ShardId.ShardId, Option.Option<PodAddress.PodAddress>>> = pipe(
57
- RefSynchronized.get(stateRef),
58
- Effect.map((_) => _.shards)
59
- )
60
-
61
- const getShardingEvents = Stream.fromPubSub(eventsHub)
62
-
63
- function register(pod: Pod.Pod) {
64
- return pipe(
65
- Effect.logDebug("Registering " + (pod.address) + "@" + pod.version),
66
- Effect.zipRight(
67
- RefSynchronized.updateAndGetEffect(stateRef, (state) =>
68
- pipe(
69
- Effect.flatMap(Effect.clock, (_) => _.currentTimeMillis),
70
- Effect.map((cdt) =>
71
- ShardManagerState.make(
72
- HashMap.set(state.pods, pod.address, PodWithMetadata.make(pod, cdt)),
73
- state.shards
74
- )
75
- )
76
- ))
77
- ),
78
- Effect.zipLeft(PubSub.publish(eventsHub, ShardingEvent.PodRegistered(pod.address))),
79
- Effect.flatMap((state) => Effect.when(rebalance(false), () => HashSet.size(state.unassignedShards) > 0)),
80
- Effect.zipRight(Effect.forkIn(layerScope)(persistPods)),
81
- Effect.asVoid
82
- )
83
- }
64
+ const shardState = new Map<ShardId, Option.Option<RunnerAddress>>()
65
+ for (let n = 1; n <= numberOfShards; n++) {
66
+ const shardId = ShardId.make(n)
67
+ shardState.set(shardId, Option.fromNullable(assignedShards.get(shardId)))
68
+ }
84
69
 
85
- function stateHasPod(podAddress: PodAddress.PodAddress) {
86
- return pipe(
87
- RefSynchronized.get(stateRef),
88
- Effect.map((_) => HashMap.has(_.pods, podAddress))
89
- )
90
- }
70
+ return new State(runnerState, shardState)
71
+ })
91
72
 
92
- function notifyUnhealthyPod(podAddress: PodAddress.PodAddress) {
93
- return pipe(
94
- Effect.whenEffect(
95
- pipe(
96
- PubSub.publish(eventsHub, ShardingEvent.PodHealthChecked(podAddress)),
97
- Effect.zipRight(
98
- Effect.unlessEffect(
99
- Effect.zipRight(
100
- Effect.logWarning(`${podAddress} is not alive, unregistering`),
101
- unregister(podAddress)
102
- ),
103
- healthApi.isAlive(podAddress)
104
- )
105
- )
106
- ),
107
- stateHasPod(podAddress)
108
- ),
109
- Effect.asVoid
110
- )
111
- }
73
+ constructor(
74
+ readonly runners: MutableHashMap.MutableHashMap<RunnerAddress, RunnerWithMetadata>,
75
+ readonly shards: Map<ShardId, Option.Option<RunnerAddress>>
76
+ ) {}
112
77
 
113
- const checkAllPodsHealth = pipe(
114
- RefSynchronized.get(stateRef),
115
- Effect.map((_) => HashMap.keySet(_.pods)),
116
- Effect.flatMap((_) => (Effect.forEach(_, notifyUnhealthyPod, { concurrency: 4, discard: true })))
117
- )
118
-
119
- function unregister(podAddress: PodAddress.PodAddress) {
120
- const eff = pipe(
121
- Effect.Do,
122
- Effect.zipLeft(Effect.logDebug(`Unregistering ${podAddress}`)),
123
- Effect.bind("unassignments", (_) =>
124
- pipe(
125
- stateRef,
126
- RefSynchronized.modify((state) => [
127
- pipe(
128
- state.shards,
129
- HashMap.filter((pod) => equals(pod)(Option.some(podAddress))),
130
- HashMap.keySet
131
- ),
132
- {
133
- ...state,
134
- pods: HashMap.remove(state.pods, podAddress),
135
- shards: HashMap.map(state.shards, (_) => equals(_)(Option.some(podAddress)) ? Option.none() : _)
136
- }
137
- ])
138
- )),
139
- Effect.tap((_) => PubSub.publish(eventsHub, ShardingEvent.PodUnregistered(podAddress))),
140
- Effect.tap((_) =>
141
- Effect.when(
142
- PubSub.publish(eventsHub, ShardingEvent.ShardsUnassigned(podAddress, _.unassignments)),
143
- () => HashSet.size(_.unassignments) > 0
144
- )
145
- ),
146
- Effect.zipLeft(Effect.forkIn(layerScope)(persistPods)),
147
- Effect.zipLeft(Effect.forkIn(layerScope)(rebalance(true)))
148
- )
149
- return Effect.asVoid(Effect.whenEffect(eff, stateHasPod(podAddress)))
78
+ get maxVersion(): Option.Option<number> {
79
+ if (MutableHashMap.size(this.runners) === 0) return Option.none()
80
+ let version: number | undefined = undefined
81
+ for (const [, meta] of this.runners) {
82
+ if (version === undefined || meta.runner.version > version) {
83
+ version = meta.runner.version
84
+ }
85
+ }
86
+ return Option.some(version!)
150
87
  }
151
88
 
152
- function withRetry<A, E>(zio: Effect.Effect<A, E>): Effect.Effect<void> {
153
- return pipe(
154
- zio,
155
- Effect.retry(
156
- pipe(
157
- Schedule.spaced(config.persistRetryInterval),
158
- Schedule.andThen(Schedule.recurs(config.persistRetryCount))
159
- )
160
- ),
161
- Effect.ignore
89
+ allRunnersHaveVersion(version: Option.Option<number>): boolean {
90
+ return version.pipe(
91
+ Option.map((max) => Arr.every(this.runnerVersions, (version) => version === max)),
92
+ Option.getOrElse(constFalse)
162
93
  )
163
94
  }
164
95
 
165
- const persistAssignments = withRetry(
166
- pipe(
167
- RefSynchronized.get(stateRef),
168
- Effect.flatMap((state) => stateRepository.saveAssignments(state.shards))
169
- )
170
- )
96
+ get shardsPerRunner(): MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>> {
97
+ const shards = MutableHashMap.empty<RunnerAddress, Set<ShardId>>()
171
98
 
172
- const persistPods = withRetry(
173
- pipe(
174
- RefSynchronized.get(stateRef),
175
- Effect.flatMap((state) => stateRepository.savePods(HashMap.map(state.pods, (v) => v.pod)))
176
- )
177
- )
178
-
179
- function updateShardsState(
180
- shards: HashSet.HashSet<ShardId.ShardId>,
181
- pod: Option.Option<PodAddress.PodAddress>
182
- ) {
183
- return RefSynchronized.updateEffect(stateRef, (state) => {
184
- if (Option.isSome(pod) && !HashMap.has(state.pods, pod.value)) {
185
- return Effect.fail(new ShardingException.PodNoLongerRegisteredException({ podAddress: pod.value }))
186
- }
187
- return Effect.succeed({
188
- ...state,
189
- shards: pipe(
190
- state.shards,
191
- HashMap.map((assignment, shard) => HashSet.has(shards, shard) ? pod : assignment)
192
- )
193
- })
99
+ if (MutableHashMap.isEmpty(this.runners)) return shards
100
+ MutableHashMap.forEach(this.runners, (_, address) => {
101
+ MutableHashMap.set(shards, address, new Set())
194
102
  })
195
- }
196
103
 
197
- function rebalance(rebalanceImmediately: boolean): Effect.Effect<void> {
198
- const algo = Effect.gen(function*() {
199
- const state = yield* RefSynchronized.get(stateRef)
104
+ for (const [shard, address] of this.shards) {
105
+ if (Option.isNone(address)) continue
106
+ const shardIds = Option.getOrUndefined(MutableHashMap.get(shards, address.value))!
107
+ shardIds.add(shard)
108
+ }
200
109
 
201
- const [assignments, unassignments] = rebalanceImmediately || HashSet.size(state.unassignedShards) > 0
202
- ? decideAssignmentsForUnassignedShards(state)
203
- : decideAssignmentsForUnbalancedShards(state, config.rebalanceRate)
110
+ return shards
111
+ }
204
112
 
205
- const areChanges = HashMap.size(assignments) > 0 || HashMap.size(unassignments) > 0
113
+ get averageShardsPerRunner(): number {
114
+ const runnerCount = MutableHashMap.size(this.runners)
115
+ return runnerCount > 0 ? this.shards.size / runnerCount : 0
116
+ }
206
117
 
207
- if (areChanges) {
208
- yield* Effect.logDebug(
209
- "Rebalance (rebalanceImmidiately=" + JSON.stringify(rebalanceImmediately) + ")"
210
- )
118
+ get unassignedShards(): Array<ShardId> {
119
+ const shardIds: Array<ShardId> = []
120
+ for (const [shard, address] of this.shards) {
121
+ if (Option.isNone(address)) {
122
+ shardIds.push(shard)
211
123
  }
124
+ }
125
+ return shardIds
126
+ }
212
127
 
213
- const failedPingedPods = yield* pipe(
214
- HashSet.union(HashMap.keySet(assignments), HashMap.keySet(unassignments)),
215
- Effect.forEach(
216
- (pod) =>
217
- pipe(
218
- podApi.ping(pod),
219
- Effect.timeout(config.pingTimeout),
220
- Effect.match({
221
- onFailure: () => Chunk.fromIterable([pod]),
222
- onSuccess: () => Chunk.empty<PodAddress.PodAddress>()
223
- })
224
- ),
225
- { concurrency: "inherit" }
226
- ),
227
- Effect.map(Chunk.fromIterable),
228
- Effect.map((_) => Chunk.flatten(_)),
229
- Effect.map(HashSet.fromIterable)
230
- )
128
+ private get runnerVersions(): Array<number> {
129
+ const runnerVersions: Array<number> = []
130
+ for (const [, meta] of this.runners) {
131
+ runnerVersions.push(meta.runner.version)
132
+ }
133
+ return runnerVersions
134
+ }
135
+ }
231
136
 
232
- const shardsToRemove = pipe(
233
- List.fromIterable(assignments),
234
- List.appendAll(List.fromIterable(unassignments)),
235
- List.filter(([pod, __]) => HashSet.has(failedPingedPods, pod)),
236
- List.map(([_, shards]) => List.fromIterable(shards)),
237
- List.flatMap((_) => _), // TODO: List is missing flatMap
238
- HashSet.fromIterable
239
- )
137
+ /** @internal */
138
+ export interface RunnerWithMetadata {
139
+ readonly runner: Runner
140
+ readonly registeredAt: number
141
+ }
142
+ /** @internal */
143
+ export const RunnerWithMetadata = (runner: RunnerWithMetadata): RunnerWithMetadata => runner
240
144
 
241
- const readyAssignments = pipe(
242
- assignments,
243
- HashMap.map(HashSet.difference(shardsToRemove)),
244
- HashMap.filter((__) => HashSet.size(__) > 0)
245
- )
145
+ /** @internal */
146
+ export function decideAssignmentsForUnassignedShards(state: State): readonly [
147
+ assignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
148
+ unassignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
149
+ changes: MutableHashSet.MutableHashSet<RunnerAddress>
150
+ ] {
151
+ return pickNewRunners(state.unassignedShards, state, true, 1)
152
+ }
246
153
 
247
- const readyUnassignments = pipe(
248
- unassignments,
249
- HashMap.map(HashSet.difference(shardsToRemove)),
250
- HashMap.filter((__) => HashSet.size(__) > 0)
251
- )
154
+ const allocationOrder: Order.Order<[ShardId, number, number]> = Order.combine(
155
+ Order.mapInput(Order.number, ([, shards]) => shards),
156
+ Order.mapInput(Order.number, ([, , registeredAt]) => registeredAt)
157
+ )
252
158
 
253
- const [failedUnassignedPods, failedUnassignedShards] = yield* pipe(
254
- Effect.forEach(readyUnassignments, ([pod, shards]) =>
255
- pipe(
256
- podApi.unassignShards(pod, shards),
257
- Effect.zipRight(updateShardsState(shards, Option.none())),
258
- Effect.matchEffect({
259
- onFailure: () => Effect.succeed([HashSet.fromIterable([pod]), shards] as const),
260
- onSuccess: () =>
261
- pipe(
262
- PubSub.publish(eventsHub, ShardingEvent.ShardsUnassigned(pod, shards)),
263
- Effect.as(
264
- [
265
- HashSet.empty<PodAddress.PodAddress>(),
266
- HashSet.empty<ShardId.ShardId>()
267
- ] as const
268
- )
269
- )
270
- })
271
- ), { concurrency: "inherit" }),
272
- Effect.map(Chunk.fromIterable),
273
- Effect.map((_) => Chunk.unzip(_)),
274
- Effect.map(
275
- ([pods, shards]) => [Chunk.map(pods, Chunk.fromIterable), Chunk.map(shards, Chunk.fromIterable)] as const
276
- ),
277
- Effect.map(
278
- ([pods, shards]) =>
279
- [
280
- HashSet.fromIterable(Chunk.flatten(pods)),
281
- HashSet.fromIterable(Chunk.flatten(shards))
282
- ] as const
283
- )
284
- )
159
+ /** @internal */
160
+ export function decideAssignmentsForUnbalancedShards(state: State, rate: number): readonly [
161
+ assignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
162
+ unassignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
163
+ changes: MutableHashSet.MutableHashSet<RunnerAddress>
164
+ ] {
165
+ const shardsPerRunner = state.shardsPerRunner
166
+ const maxVersion = state.maxVersion
167
+ const extraShardsToAllocate = Arr.empty<[ShardId, shardsInverse: number, registeredAt: number]>()
168
+
169
+ if (state.allRunnersHaveVersion(maxVersion)) {
170
+ const averageShardsPerRunner = state.averageShardsPerRunner
171
+ MutableHashMap.forEach(shardsPerRunner, (shards) => {
172
+ // Count how many extra shards there are compared to the average
173
+ const extraShards = Math.max(0, shards.size - averageShardsPerRunner)
174
+ for (const shard of takeRandom(shards, extraShards)) {
175
+ const maybeAddress = state.shards.get(shard) ?? Option.none()
176
+ if (Option.isNone(maybeAddress)) {
177
+ extraShardsToAllocate.push([shard, Number.MIN_SAFE_INTEGER, Number.MIN_SAFE_INTEGER])
178
+ continue
179
+ }
180
+ const address = maybeAddress.value
181
+ extraShardsToAllocate.push([
182
+ shard,
183
+ Option.match(MutableHashMap.get(shardsPerRunner, address), {
184
+ onNone: () => Number.MIN_SAFE_INTEGER,
185
+ onSome: (shards) => -shards.size
186
+ }),
187
+ Option.match(MutableHashMap.get(state.runners, address), {
188
+ onNone: () => Number.MIN_SAFE_INTEGER,
189
+ onSome: (meta) => meta.registeredAt
190
+ })
191
+ ])
192
+ }
193
+ })
194
+ }
285
195
 
286
- // remove assignments of shards that couldn't be unassigned, as well as faulty pods.
287
- const filteredAssignments = pipe(
288
- HashMap.removeMany(readyAssignments, failedUnassignedPods),
289
- HashMap.map((shards, __) => HashSet.difference(shards, failedUnassignedShards))
290
- )
196
+ const sortedShardsToRebalance = extraShardsToAllocate.sort(allocationOrder).map(([shard]) => shard)
291
197
 
292
- // then do the assignments
293
- const failedAssignedPods = yield* pipe(
294
- Effect.forEach(filteredAssignments, ([pod, shards]) =>
295
- pipe(
296
- podApi.assignShards(pod, shards),
297
- Effect.zipRight(updateShardsState(shards, Option.some(pod))),
298
- Effect.matchEffect({
299
- onFailure: () => Effect.succeed(Chunk.fromIterable([pod])),
300
- onSuccess: () =>
301
- pipe(
302
- PubSub.publish(eventsHub, ShardingEvent.ShardsAssigned(pod, shards)),
303
- Effect.as(Chunk.empty())
304
- )
305
- })
306
- ), { concurrency: "inherit" }),
307
- Effect.map(Chunk.fromIterable),
308
- Effect.map((_) => Chunk.flatten(_)),
309
- Effect.map(HashSet.fromIterable)
310
- )
198
+ return pickNewRunners(sortedShardsToRebalance, state, false, rate, shardsPerRunner, maxVersion)
199
+ }
311
200
 
312
- const failedPods = HashSet.union(
313
- HashSet.union(failedPingedPods, failedUnassignedPods),
314
- failedAssignedPods
315
- )
201
+ function pickNewRunners(
202
+ shardsToRebalance: ReadonlyArray<ShardId>,
203
+ state: State,
204
+ immediate: boolean,
205
+ rate: number,
206
+ shardsPerRunner = state.shardsPerRunner,
207
+ maybeMaxVersion = state.maxVersion
208
+ ): readonly [
209
+ assignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
210
+ unassignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<ShardId>>,
211
+ changes: MutableHashSet.MutableHashSet<RunnerAddress>
212
+ ] {
213
+ const addressAssignments = MutableHashMap.empty<RunnerAddress, Set<ShardId>>()
214
+ const unassignments = MutableHashMap.empty<RunnerAddress, Set<ShardId>>()
215
+ const changes = MutableHashSet.empty<RunnerAddress>()
316
216
 
317
- // check if failing pods are still up
318
- yield* Effect.forkIn(layerScope)(Effect.forEach(failedPods, (_) => notifyUnhealthyPod(_), { discard: true }))
319
-
320
- if (HashSet.size(failedPods) > 0) {
321
- yield* Effect.logDebug(
322
- "Failed to rebalance pods: " +
323
- failedPods +
324
- " failed pinged: " + failedPingedPods +
325
- " failed assigned: " + failedAssignedPods +
326
- " failed unassigned: " + failedUnassignedPods
327
- )
217
+ if (Option.isNone(maybeMaxVersion)) {
218
+ return [addressAssignments, unassignments, changes]
219
+ }
220
+ const maxVersion = maybeMaxVersion.value
221
+
222
+ for (const shardId of shardsToRebalance) {
223
+ // Find the runner with the fewest assigned shards
224
+ let candidate: RunnerAddress | undefined
225
+ let candidateShards: Set<ShardId> | undefined
226
+
227
+ for (const [address, shards] of shardsPerRunner) {
228
+ // Keep only runners with the maximum version
229
+ const maybeRunnerMeta = MutableHashMap.get(state.runners, address)
230
+ if (Option.isNone(maybeRunnerMeta)) continue
231
+ const runnerMeta = maybeRunnerMeta.value
232
+ if (runnerMeta.runner.version !== maxVersion) continue
233
+
234
+ // Do not assign to a runner that has unassignments in the same rebalance
235
+ if (MutableHashMap.has(unassignments, address)) continue
236
+
237
+ // Do not assign too many shards to each runner unless rebalancing must
238
+ // occur immediately
239
+ if (!immediate) {
240
+ const assignmentCount = Option.getOrUndefined(MutableHashMap.get(addressAssignments, address))?.size ?? 0
241
+ if (assignmentCount >= state.shards.size * rate) continue
328
242
  }
329
243
 
330
- // retry rebalancing later if there was any failure
331
- if (HashSet.size(failedPods) > 0 && rebalanceImmediately) {
332
- yield* pipe(
333
- Effect.sleep(config.rebalanceRetryInterval),
334
- Effect.zipRight(rebalance(rebalanceImmediately)),
335
- Effect.forkIn(layerScope)
336
- )
244
+ if (candidate === undefined || shards.size < candidateShards!.size) {
245
+ candidate = address
246
+ candidateShards = shards
337
247
  }
248
+ }
249
+ if (!candidate || !candidateShards) break
338
250
 
339
- // persist state changes to Redis
340
- if (areChanges) {
341
- yield* Effect.forkIn(layerScope)(persistAssignments)
342
- }
343
- })
251
+ // If the old runner is the same as the new runner, do nothing
252
+ const oldRunner = Option.getOrUndefined(state.shards.get(shardId) ?? Option.none())
253
+ if (oldRunner && oldRunner.toString() === candidate.toString()) {
254
+ continue
255
+ }
256
+ const oldShards = oldRunner && Option.getOrUndefined(MutableHashMap.get(shardsPerRunner, oldRunner))
257
+
258
+ // If the new runner has one less, as many, or more shards than the
259
+ // old runner, do not change anything
260
+ if (oldShards && candidateShards.size + 1 >= oldShards.size) continue
261
+
262
+ // Otherwise create a new assignment
263
+ MutableHashMap.modifyAt(
264
+ addressAssignments,
265
+ candidate,
266
+ Option.match({
267
+ onNone: () => Option.some(new Set([shardId])),
268
+ onSome: (shards) => {
269
+ shards.add(shardId)
270
+ return Option.some(shards)
271
+ }
272
+ })
273
+ )
274
+ if (oldRunner) {
275
+ MutableHashMap.modifyAt(
276
+ unassignments,
277
+ oldRunner,
278
+ Option.match({
279
+ onNone: () => Option.some(new Set([shardId])),
280
+ onSome: (shards) => {
281
+ shards.add(shardId)
282
+ return Option.some(shards)
283
+ }
284
+ })
285
+ )
286
+ }
344
287
 
345
- return rebalanceSemaphore.withPermits(1)(algo)
346
- }
288
+ // Move the shard to the new runner
289
+ candidateShards.add(shardId)
290
+ if (oldShards) {
291
+ oldShards.delete(shardId)
292
+ }
347
293
 
348
- return {
349
- getAssignments,
350
- getShardingEvents,
351
- register,
352
- unregister,
353
- persistPods,
354
- rebalance,
355
- notifyUnhealthyPod,
356
- checkAllPodsHealth
294
+ // Track changes
295
+ MutableHashSet.add(changes, candidate)
296
+ if (oldRunner) MutableHashSet.add(changes, oldRunner)
357
297
  }
358
- }
359
298
 
360
- /** @internal */
361
- export function decideAssignmentsForUnassignedShards(state: ShardManagerState.ShardManagerState) {
362
- return pickNewPods(List.fromIterable(state.unassignedShards), state, true, 1)
299
+ return [addressAssignments, unassignments, changes]
363
300
  }
364
301
 
365
- /** @internal */
366
- export function decideAssignmentsForUnbalancedShards(
367
- state: ShardManagerState.ShardManagerState,
368
- rebalanceRate: number
369
- ) {
370
- // don't do regular rebalance in the middle of a rolling update
371
- const extraShardsToAllocate = state.allPodsHaveMaxVersion
372
- ? pipe(
373
- state.shardsPerPod,
374
- HashMap.flatMap((shards, _) => {
375
- // count how many extra shards compared to the average
376
- const extraShards = Math.max(HashSet.size(shards) - state.averageShardsPerPod.value, 0)
377
- return pipe(
378
- HashMap.empty(),
379
- HashMap.set(_, HashSet.fromIterable(List.take(List.fromIterable(shards), extraShards)))
380
- )
381
- }),
382
- HashSet.fromIterable,
383
- HashSet.map((_) => _[1]),
384
- HashSet.flatMap((_) => _)
385
- )
386
- : HashSet.empty()
387
-
388
- /*
389
- TODO: port sortBy
390
-
391
- val sortedShardsToRebalance = extraShardsToAllocate.toList.sortBy { shard =>
392
- // handle unassigned shards first, then shards on the pods with most shards, then shards on old pods
393
- state.shards.get(shard).flatten.fold((Int.MinValue, OffsetDateTime.MIN)) { pod =>
394
- (
395
- state.shardsPerPod.get(pod).fold(Int.MinValue)(-_.size),
396
- state.pods.get(pod).fold(OffsetDateTime.MIN)(_.registered)
397
- )
398
- }
399
- }
400
- * */
401
- const sortedShardsToRebalance = List.fromIterable(extraShardsToAllocate)
402
- return pickNewPods(sortedShardsToRebalance, state, false, rebalanceRate)
302
+ function takeRandom<A>(self: Iterable<A>, n: number): ReadonlyArray<A> {
303
+ const array = Array.from(self)
304
+ let currentIndex = array.length
305
+ while (currentIndex != 0) {
306
+ const randomIndex = Math.floor(Math.random() * currentIndex)
307
+ currentIndex = currentIndex - 1
308
+ swap(array, currentIndex, randomIndex)
309
+ }
310
+ return n < array.length ? array.slice(0, n) : array
403
311
  }
404
312
 
405
- function pickNewPods(
406
- shardsToRebalance: List.List<ShardId.ShardId>,
407
- state: ShardManagerState.ShardManagerState,
408
- rebalanceImmediately: boolean,
409
- rebalanceRate: number
410
- ): readonly [
411
- assignments: HashMap.HashMap<PodAddress.PodAddress, HashSet.HashSet<ShardId.ShardId>>,
412
- unassignments: HashMap.HashMap<PodAddress.PodAddress, HashSet.HashSet<ShardId.ShardId>>
413
- ] {
414
- const [_, assignments] = pipe(
415
- List.reduce(
416
- shardsToRebalance,
417
- [
418
- state.shardsPerPod,
419
- List.empty<readonly [ShardId.ShardId, PodAddress.PodAddress]>()
420
- ] as const,
421
- ([shardsPerPod, assignments], shard) => {
422
- const unassignedPods = pipe(
423
- assignments,
424
- List.flatMap(([shard, _]) =>
425
- pipe(
426
- HashMap.get(state.shards, shard),
427
- Option.flatten,
428
- Option.toArray,
429
- List.fromIterable
430
- )
431
- )
432
- )
433
-
434
- // find pod with least amount of shards
435
- return pipe(
436
- // keep only pods with the max version
437
- HashMap.filter(shardsPerPod, (_, pod) => {
438
- const maxVersion = state.maxVersion
439
- if (Option.isNone(maxVersion)) return true
440
- return pipe(
441
- HashMap.get(state.pods, pod),
442
- Option.map(PodWithMetadata.extractVersion),
443
- Option.map((_) => PodWithMetadata.compareVersion(_, maxVersion.value) === 0),
444
- Option.getOrElse(() => false)
445
- )
446
- }),
447
- // don't assign too many shards to the same pods, unless we need rebalance immediately
448
- HashMap.filter((_, pod) => {
449
- if (rebalanceImmediately) return true
450
- return (
451
- pipe(
452
- assignments,
453
- List.filter(([_, p]) => equals(p)(pod)),
454
- List.size
455
- ) <
456
- HashMap.size(state.shards) * rebalanceRate
457
- )
458
- }),
459
- // don't assign to a pod that was unassigned in the same rebalance
460
- HashMap.filter(
461
- (_, pod) => !Option.isSome(List.findFirst(unassignedPods, equals(pod)))
462
- ),
463
- minByOption(([_, pods]) => HashSet.size(pods)),
464
- Option.match({
465
- onNone: () => [shardsPerPod, assignments] as const,
466
- onSome: ([pod, shards]) => {
467
- const oldPod = Option.flatten(HashMap.get(state.shards, shard))
468
- // if old pod is same as new pod, don't change anything
469
- if (equals(oldPod)(pod)) {
470
- return [shardsPerPod, assignments] as const
471
- // if the new pod has more, as much, or only 1 less shard than the old pod, don't change anything
472
- } else if (
473
- Option.match(HashMap.get(shardsPerPod, pod), { onNone: () => 0, onSome: HashSet.size }) + 1 >=
474
- Option.match(
475
- oldPod,
476
- {
477
- onNone: () => Number.MAX_SAFE_INTEGER,
478
- onSome: (_) =>
479
- Option.match(HashMap.get(shardsPerPod, _), { onNone: () => 0, onSome: HashSet.size })
480
- }
481
- )
482
- ) {
483
- return [shardsPerPod, assignments] as const
484
-
485
- // otherwise, create a new assignment
486
- } else {
487
- const unassigned = Option.match(
488
- oldPod,
489
- {
490
- onNone: () => shardsPerPod,
491
- onSome: (oldPod) => HashMap.modify(shardsPerPod, oldPod, HashSet.remove(shard))
492
- }
493
- )
494
- return [
495
- HashMap.modify(unassigned, pod, (_) => HashSet.add(shards, shard)),
496
- List.prepend(assignments, [shard, pod] as const)
497
- ] as const
498
- }
499
- }
500
- })
501
- )
502
- }
503
- )
504
- )
505
-
506
- const unassignments = List.flatMap(assignments, ([shard, _]) =>
507
- pipe(
508
- Option.flatten(HashMap.get(state.shards, shard)),
509
- Option.map((_) => [shard, _] as const),
510
- Option.match({ onNone: List.empty, onSome: List.of })
511
- ))
512
-
513
- const assignmentsPerPod = pipe(
514
- assignments,
515
- groupBy(([_, pod]) => pod),
516
- HashMap.map(HashSet.map(([shardId, _]) => shardId))
517
- )
518
- const unassignmentsPerPod = pipe(
519
- unassignments,
520
- groupBy(([_, pod]) => pod),
521
- HashMap.map(HashSet.map(([shardId, _]) => shardId))
522
- )
523
- return [assignmentsPerPod, unassignmentsPerPod] as const
313
+ function swap<A>(array: Array<A>, i: number, j: number): ReadonlyArray<A> {
314
+ const tmp = array[i]
315
+ array[i] = array[j]
316
+ array[j] = tmp
317
+ return array
524
318
  }
525
-
526
- /**
527
- * @since 1.0.0
528
- * @category layers
529
- */
530
- export const live = Effect.gen(function*() {
531
- const config = yield* ManagerConfig.ManagerConfig
532
- const stateRepository = yield* Storage.Storage
533
- const healthApi = yield* PodsHealth.PodsHealth
534
- const podsApi = yield* Pods.Pods
535
- const layerScope = yield* Effect.scope
536
-
537
- const pods = yield* stateRepository.getPods
538
- const assignments = yield* stateRepository.getAssignments
539
-
540
- const filteredPods = yield* pipe(
541
- Effect.filter(pods, ([podAddress]) => healthApi.isAlive(podAddress), { concurrency: "inherit" }),
542
- Effect.map(HashMap.fromIterable)
543
- )
544
- const filteredAssignments = HashMap.filter(
545
- assignments,
546
- (pod) => Option.isSome(pod) && HashMap.has(filteredPods, pod.value)
547
- )
548
- const cdt = yield* Clock.currentTimeMillis
549
- const initialState = ShardManagerState.make(
550
- HashMap.map(filteredPods, (pod) => PodWithMetadata.make(pod, cdt)),
551
- HashMap.union(
552
- filteredAssignments,
553
- pipe(
554
- Chunk.range(1, config.numberOfShards),
555
- Chunk.map((n) => [ShardId.make(n), Option.none()] as const),
556
- HashMap.fromIterable
557
- )
558
- )
559
- )
560
- const state = yield* RefSynchronized.make(initialState)
561
- const rebalanceSemaphore = yield* Effect.makeSemaphore(1)
562
- const eventsHub = yield* PubSub.unbounded<ShardingEvent.ShardingEvent>()
563
- const shardManager = make(
564
- layerScope,
565
- state,
566
- rebalanceSemaphore,
567
- eventsHub,
568
- healthApi,
569
- podsApi,
570
- stateRepository,
571
- config
572
- )
573
- yield* Effect.forkIn(layerScope)(shardManager.persistPods)
574
- // rebalance immediately if there are unassigned shards
575
- yield* shardManager.rebalance(HashSet.size(initialState.unassignedShards) > 0)
576
- // start a regular rebalance at the given interval
577
- yield* pipe(
578
- shardManager.rebalance(false),
579
- Effect.repeat(Schedule.spaced(config.rebalanceInterval)),
580
- Effect.forkIn(layerScope)
581
- )
582
- // log info events
583
- yield* pipe(
584
- shardManager.getShardingEvents,
585
- Stream.mapEffect((_) => Effect.logDebug(JSON.stringify(_))),
586
- Stream.runDrain,
587
- Effect.forkIn(layerScope)
588
- )
589
- yield* Effect.logDebug("Shard Manager loaded")
590
- return shardManager
591
- }).pipe(Layer.scoped(shardManagerTag))