cuboid 0.0.3 → 0.1.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (495) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +5 -0
  3. data/Gemfile +0 -2
  4. data/README.md +14 -13
  5. data/cuboid.gemspec +0 -4
  6. data/lib/cuboid/application.rb +10 -10
  7. data/lib/cuboid/option_groups/agent.rb +54 -0
  8. data/lib/cuboid/option_groups/paths.rb +13 -4
  9. data/lib/cuboid/options.rb +1 -1
  10. data/lib/cuboid/processes/{dispatchers.rb → agents.rb} +40 -26
  11. data/lib/cuboid/processes/executables/agent.rb +5 -0
  12. data/lib/cuboid/processes/helpers/agents.rb +23 -0
  13. data/lib/cuboid/processes/helpers/instances.rb +4 -4
  14. data/lib/cuboid/processes/helpers.rb +1 -1
  15. data/lib/cuboid/processes/instances.rb +22 -10
  16. data/lib/cuboid/processes/schedulers.rb +16 -3
  17. data/lib/cuboid/processes.rb +2 -2
  18. data/lib/cuboid/report.rb +1 -0
  19. data/lib/cuboid/rest/server/instance_helpers.rb +13 -13
  20. data/lib/cuboid/rest/server/routes/dispatcher.rb +11 -11
  21. data/lib/cuboid/rest/server/routes/grid.rb +8 -8
  22. data/lib/cuboid/rest/server/routes/instances.rb +1 -1
  23. data/lib/cuboid/rest/server.rb +5 -5
  24. data/lib/cuboid/rpc/client/{dispatcher.rb → agent.rb} +4 -4
  25. data/lib/cuboid/rpc/client/instance.rb +2 -2
  26. data/lib/cuboid/rpc/client.rb +1 -1
  27. data/lib/cuboid/rpc/server/agent/node.rb +247 -0
  28. data/lib/cuboid/rpc/server/{dispatcher → agent}/service.rb +13 -13
  29. data/lib/cuboid/rpc/server/{dispatcher.rb → agent.rb} +62 -32
  30. data/lib/cuboid/rpc/server/application_wrapper.rb +6 -4
  31. data/lib/cuboid/rpc/server/instance.rb +4 -4
  32. data/lib/cuboid/rpc/server/scheduler.rb +13 -12
  33. data/lib/cuboid.rb +0 -3
  34. data/lib/version +1 -1
  35. data/spec/cuboid/option_groups/dispatcher_spec.rb +2 -2
  36. data/spec/cuboid/option_groups/paths_spec.rb +6 -3
  37. data/spec/cuboid/rest/server_spec.rb +46 -46
  38. data/spec/cuboid/rpc/client/dispatcher_spec.rb +2 -2
  39. data/spec/cuboid/rpc/server/dispatcher/node_spec.rb +65 -65
  40. data/spec/cuboid/rpc/server/dispatcher/service_spec.rb +16 -16
  41. data/spec/cuboid/rpc/server/dispatcher_spec.rb +187 -72
  42. data/spec/cuboid/rpc/server/scheduler_spec.rb +8 -8
  43. data/spec/support/fixtures/executables/node.rb +3 -3
  44. data/spec/support/fixtures/mock_app/test_service.rb +8 -8
  45. data/spec/support/fixtures/mock_app.rb +1 -1
  46. data/spec/support/fixtures/services/echo.rb +6 -6
  47. data/spec/support/helpers/resets.rb +1 -1
  48. data/spec/support/lib/web_server_client.rb +2 -2
  49. data/spec/support/lib/web_server_dispatcher.rb +1 -1
  50. data/spec/support/logs/Agent - 2486896-44236.log +6 -0
  51. data/spec/support/logs/Agent - 2487229-16390.log +6 -0
  52. data/spec/support/logs/Agent - 2487520-2511.log +6 -0
  53. data/spec/support/logs/Agent - 2487522-24008.log +6 -0
  54. data/spec/support/logs/Agent - 2487526-3383.log +6 -0
  55. data/spec/support/logs/Agent - 2487528-23713.log +10 -0
  56. data/spec/support/logs/Agent - 2487530-42875.log +10 -0
  57. data/spec/support/logs/Agent - 2487533-14182.log +10 -0
  58. data/spec/support/logs/Agent - 2487535-32486.log +10 -0
  59. data/spec/support/logs/Agent - 2487537-30578.log +10 -0
  60. data/spec/support/logs/Agent - 2487539-65402.log +10 -0
  61. data/spec/support/logs/Agent - 2493974-23066.log +6 -0
  62. data/spec/support/logs/Agent - 2501716-11729.log +6 -0
  63. data/spec/support/logs/Agent - 2501724-48638.log +6 -0
  64. data/spec/support/logs/Agent - 2501961-60077.log +6 -0
  65. data/spec/support/logs/Agent - 2501976-10941.log +10 -0
  66. data/spec/support/logs/Agent - 2502050-45312.log +10 -0
  67. data/spec/support/logs/Agent - 2502131-45940.log +10 -0
  68. data/spec/support/logs/Agent - 2502139-59848.log +10 -0
  69. data/spec/support/logs/Agent - 2502262-46629.log +10 -0
  70. data/spec/support/logs/Agent - 2502298-28395.log +10 -0
  71. data/spec/support/logs/Scheduler - 2486608-59709.log +3 -0
  72. data/spec/support/logs/Scheduler - 2486612-44110.log +27 -0
  73. data/spec/support/logs/Scheduler - 2486723-50393.log +3 -0
  74. data/spec/support/logs/Scheduler - 2486727-21620.log +27 -0
  75. data/spec/support/logs/Scheduler - 2486877-37845.log +3 -0
  76. data/spec/support/logs/Scheduler - 2486881-3624.log +1 -0
  77. data/spec/support/logs/Scheduler - 2486911-24752.log +3 -0
  78. data/spec/support/logs/Scheduler - 2486919-48535.log +27 -0
  79. data/spec/support/logs/Scheduler - 2486985-8897.log +1 -0
  80. data/spec/support/logs/Scheduler - 2487211-7516.log +3 -0
  81. data/spec/support/logs/Scheduler - 2487215-2831.log +1 -0
  82. data/spec/support/logs/Scheduler - 2487246-7826.log +3 -0
  83. data/spec/support/logs/Scheduler - 2487256-35669.log +6 -0
  84. data/spec/support/logs/Scheduler - 2487272-11542.log +4 -0
  85. data/spec/support/logs/Scheduler - 2487278-9621.log +1 -0
  86. data/spec/support/logs/Scheduler - 2487291-24094.log +3 -0
  87. data/spec/support/logs/Scheduler - 2487299-60095.log +6 -0
  88. data/spec/support/logs/Scheduler - 2487368-7706.log +3 -0
  89. data/spec/support/logs/Scheduler - 2487378-9859.log +6 -0
  90. data/spec/support/logs/Scheduler - 2487396-17812.log +3 -0
  91. data/spec/support/logs/Scheduler - 2487407-25543.log +6 -0
  92. data/spec/support/logs/Scheduler - 2487451-44767.log +4 -0
  93. data/spec/support/logs/Scheduler - 2487506-1422.log +6 -0
  94. data/spec/support/logs/Scheduler - 2487541-38068.log +1 -0
  95. data/spec/support/logs/Scheduler - 2487544-21866.log +1 -0
  96. data/spec/support/logs/Scheduler - 2487548-15245.log +1 -0
  97. data/spec/support/logs/Scheduler - 2487551-34905.log +1 -0
  98. data/spec/support/logs/Scheduler - 2487554-22142.log +1 -0
  99. data/spec/support/logs/Scheduler - 2487562-35113.log +1 -0
  100. data/spec/support/logs/Scheduler - 2487565-55125.log +3 -0
  101. data/spec/support/logs/Scheduler - 2487569-48845.log +6 -0
  102. data/spec/support/logs/Scheduler - 2487576-57192.log +4 -0
  103. data/spec/support/logs/Scheduler - 2487583-17991.log +1 -0
  104. data/spec/support/logs/Scheduler - 2487586-30014.log +1 -0
  105. data/spec/support/logs/Scheduler - 2487591-6472.log +1 -0
  106. data/spec/support/logs/Scheduler - 2487594-2195.log +1 -0
  107. data/spec/support/logs/Scheduler - 2487598-55808.log +3 -0
  108. data/spec/support/logs/Scheduler - 2487605-7400.log +1 -0
  109. data/spec/support/logs/Scheduler - 2487607-4337.log +1 -0
  110. data/spec/support/logs/Scheduler - 2487610-25835.log +1 -0
  111. data/spec/support/logs/Scheduler - 2493623-45209.log +3 -0
  112. data/spec/support/logs/Scheduler - 2493714-59407.log +1 -0
  113. data/spec/support/logs/Scheduler - 2494470-61696.log +3 -0
  114. data/spec/support/logs/Scheduler - 2494723-2810.log +6 -0
  115. data/spec/support/logs/Scheduler - 2495458-22112.log +4 -0
  116. data/spec/support/logs/Scheduler - 2496034-4076.log +1 -0
  117. data/spec/support/logs/Scheduler - 2496119-62253.log +3 -0
  118. data/spec/support/logs/Scheduler - 2496210-50380.log +6 -0
  119. data/spec/support/logs/Scheduler - 2497536-24922.log +3 -0
  120. data/spec/support/logs/Scheduler - 2497786-13515.log +6 -0
  121. data/spec/support/logs/Scheduler - 2498774-16911.log +3 -0
  122. data/spec/support/logs/Scheduler - 2498961-4742.log +6 -0
  123. data/spec/support/logs/Scheduler - 2500340-16045.log +4 -0
  124. data/spec/support/logs/Scheduler - 2500980-26158.log +6 -0
  125. data/spec/support/logs/Scheduler - 2502381-26435.log +1 -0
  126. data/spec/support/logs/Scheduler - 2502463-62965.log +1 -0
  127. data/spec/support/logs/Scheduler - 2502547-53434.log +1 -0
  128. data/spec/support/logs/Scheduler - 2502628-43720.log +1 -0
  129. data/spec/support/logs/Scheduler - 2502643-58379.log +1 -0
  130. data/spec/support/logs/Scheduler - 2502873-64893.log +1 -0
  131. data/spec/support/logs/Scheduler - 2502954-43885.log +3 -0
  132. data/spec/support/logs/Scheduler - 2503039-52147.log +6 -0
  133. data/spec/support/logs/Scheduler - 2503768-28831.log +4 -0
  134. data/spec/support/logs/Scheduler - 2504259-24533.log +1 -0
  135. data/spec/support/logs/Scheduler - 2504343-56967.log +1 -0
  136. data/spec/support/logs/Scheduler - 2504502-25085.log +1 -0
  137. data/spec/support/logs/Scheduler - 2504587-30789.log +1 -0
  138. data/spec/support/logs/Scheduler - 2504608-56601.log +3 -0
  139. data/spec/support/logs/Scheduler - 2504760-36374.log +1 -0
  140. data/spec/support/logs/Scheduler - 2504841-49675.log +1 -0
  141. data/spec/support/logs/Scheduler - 2504923-15781.log +1 -0
  142. data/spec/support/reports/18be00bff4371738c7c7013b284b415b.crf +0 -0
  143. data/spec/support/reports/1ca39d410c2cf1f652eb8c320d6682bd.crf +0 -0
  144. data/spec/support/reports/266a09d73152ce2f3d2951f1dab133f3.crf +0 -0
  145. data/spec/support/reports/2929bee9c126b2695dc569b693fef574.crf +0 -0
  146. data/spec/support/reports/2b9ce956f7060163d7a0b78603dc05ca.crf +0 -0
  147. data/spec/support/reports/46bcffd844008e71c7d90a76baf8597d.crf +0 -0
  148. data/spec/support/reports/620287cfdc373595385cf2471e1d4523.crf +0 -0
  149. data/spec/support/reports/75fecdd5e006942292e02e6a223e7279.crf +0 -0
  150. data/spec/support/reports/81cca53163bbab5ccf4d5f0401d5adcd.crf +0 -0
  151. data/spec/support/reports/8c204ee129fe1bd6c5964d29ae5d03ae.crf +0 -0
  152. data/spec/support/reports/aadbf2c1544b0e11174853fb4883a38c.crf +0 -0
  153. data/spec/support/reports/c360a2833f3e635e69036916010edeac.crf +0 -0
  154. data/spec/support/reports/e96abea937f1ed3f89bc2ec5397522db.crf +0 -0
  155. data/spec/support/reports/f3d542036e17f6a66e11bfacb2fb3366.crf +0 -0
  156. data/spec/support/snapshots/Cuboid 2022-02-01 13_10_28 +0200 a06d9bd7db81b1b4fb077ceadcc3895f.csf +0 -0
  157. data/spec/support/snapshots/Cuboid 2022-02-01 13_10_42 +0200 36c68859faf144eed9ff9c01ae754217.csf +0 -0
  158. data/spec/support/snapshots/Cuboid 2022-02-01 13_24_42 +0200 5a26112a913330ee8763b2982a4d42df.csf +0 -0
  159. data/spec/support/snapshots/Cuboid 2022-02-01 13_24_56 +0200 190856b98ac7099eb553ed3abcfbcb87.csf +0 -0
  160. metadata +306 -768
  161. data/lib/cuboid/option_groups/dispatcher.rb +0 -38
  162. data/lib/cuboid/processes/executables/dispatcher.rb +0 -5
  163. data/lib/cuboid/processes/helpers/dispatchers.rb +0 -23
  164. data/lib/cuboid/rpc/server/dispatcher/node.rb +0 -247
  165. data/logs/error-3207383.log +0 -106
  166. data/logs/error-3207482.log +0 -106
  167. data/logs/error-3208344.log +0 -109
  168. data/logs/error-3208460.log +0 -106
  169. data/logs/error-903730.log +0 -105
  170. data/logs/error-903846.log +0 -105
  171. data/logs/error-904783.log +0 -108
  172. data/logs/error-905101.log +0 -105
  173. data/spec/support/logs/Dispatcher - 3204062-27546.log +0 -6
  174. data/spec/support/logs/Dispatcher - 3207166-30195.log +0 -6
  175. data/spec/support/logs/Dispatcher - 3207418-16491.log +0 -6
  176. data/spec/support/logs/Dispatcher - 3207420-23797.log +0 -6
  177. data/spec/support/logs/Dispatcher - 3207424-64333.log +0 -6
  178. data/spec/support/logs/Dispatcher - 3207427-50621.log +0 -10
  179. data/spec/support/logs/Dispatcher - 3207429-15351.log +0 -10
  180. data/spec/support/logs/Dispatcher - 3207432-3685.log +0 -10
  181. data/spec/support/logs/Dispatcher - 3207436-43126.log +0 -10
  182. data/spec/support/logs/Dispatcher - 3207438-58131.log +0 -10
  183. data/spec/support/logs/Dispatcher - 3207440-32187.log +0 -10
  184. data/spec/support/logs/Dispatcher - 3207654-42085.log +0 -6
  185. data/spec/support/logs/Dispatcher - 3207769-16303.log +0 -6
  186. data/spec/support/logs/Dispatcher - 3207771-31196.log +0 -6
  187. data/spec/support/logs/Dispatcher - 3207773-53419.log +0 -6
  188. data/spec/support/logs/Dispatcher - 3207775-17015.log +0 -6
  189. data/spec/support/logs/Dispatcher - 3207787-56572.log +0 -6
  190. data/spec/support/logs/Dispatcher - 3207799-41227.log +0 -6
  191. data/spec/support/logs/Dispatcher - 3207815-49397.log +0 -6
  192. data/spec/support/logs/Dispatcher - 3207817-13826.log +0 -6
  193. data/spec/support/logs/Dispatcher - 3207819-46821.log +0 -6
  194. data/spec/support/logs/Dispatcher - 3207821-37991.log +0 -6
  195. data/spec/support/logs/Dispatcher - 3207825-52955.log +0 -6
  196. data/spec/support/logs/Dispatcher - 3207829-12122.log +0 -6
  197. data/spec/support/logs/Dispatcher - 3207831-58485.log +0 -16
  198. data/spec/support/logs/Dispatcher - 3207833-47083.log +0 -14
  199. data/spec/support/logs/Dispatcher - 3207837-53679.log +0 -10
  200. data/spec/support/logs/Dispatcher - 3207847-12037.log +0 -16
  201. data/spec/support/logs/Dispatcher - 3207852-64296.log +0 -14
  202. data/spec/support/logs/Dispatcher - 3207858-56473.log +0 -10
  203. data/spec/support/logs/Dispatcher - 3207864-26736.log +0 -6
  204. data/spec/support/logs/Dispatcher - 3207866-24113.log +0 -6
  205. data/spec/support/logs/Dispatcher - 3207870-6896.log +0 -6
  206. data/spec/support/logs/Dispatcher - 3207873-16434.log +0 -6
  207. data/spec/support/logs/Dispatcher - 3207885-31058.log +0 -6
  208. data/spec/support/logs/Dispatcher - 3207891-19927.log +0 -6
  209. data/spec/support/logs/Dispatcher - 3207897-41533.log +0 -6
  210. data/spec/support/logs/Dispatcher - 3207903-26815.log +0 -6
  211. data/spec/support/logs/Dispatcher - 3207909-25294.log +0 -6
  212. data/spec/support/logs/Dispatcher - 3207929-51610.log +0 -6
  213. data/spec/support/logs/Dispatcher - 3207990-8943.log +0 -16
  214. data/spec/support/logs/Dispatcher - 3208000-30657.log +0 -14
  215. data/spec/support/logs/Dispatcher - 3208010-54017.log +0 -10
  216. data/spec/support/logs/Dispatcher - 3208041-58792.log +0 -16
  217. data/spec/support/logs/Dispatcher - 3208047-50811.log +0 -14
  218. data/spec/support/logs/Dispatcher - 3208051-52018.log +0 -10
  219. data/spec/support/logs/Dispatcher - 3208067-46852.log +0 -6
  220. data/spec/support/logs/Dispatcher - 3208075-56209.log +0 -6
  221. data/spec/support/logs/Dispatcher - 3208088-4783.log +0 -6
  222. data/spec/support/logs/Dispatcher - 3208100-47518.log +0 -6
  223. data/spec/support/logs/Dispatcher - 3208115-25109.log +0 -6
  224. data/spec/support/logs/Dispatcher - 3208127-46551.log +0 -6
  225. data/spec/support/logs/Dispatcher - 3208133-2576.log +0 -6
  226. data/spec/support/logs/Dispatcher - 3208138-25988.log +0 -6
  227. data/spec/support/logs/Dispatcher - 3208299-19611.log +0 -6
  228. data/spec/support/logs/Dispatcher - 3208330-35076.log +0 -6
  229. data/spec/support/logs/Dispatcher - 3208340-32759.log +0 -6
  230. data/spec/support/logs/Dispatcher - 903393-34771.log +0 -6
  231. data/spec/support/logs/Dispatcher - 903765-19862.log +0 -6
  232. data/spec/support/logs/Dispatcher - 903767-43611.log +0 -6
  233. data/spec/support/logs/Dispatcher - 903770-34337.log +0 -6
  234. data/spec/support/logs/Dispatcher - 903774-7484.log +0 -10
  235. data/spec/support/logs/Dispatcher - 903777-5256.log +0 -10
  236. data/spec/support/logs/Dispatcher - 903780-12391.log +0 -10
  237. data/spec/support/logs/Dispatcher - 903782-54621.log +0 -10
  238. data/spec/support/logs/Dispatcher - 903786-46071.log +0 -10
  239. data/spec/support/logs/Dispatcher - 903794-48819.log +0 -10
  240. data/spec/support/logs/Dispatcher - 903906-54562.log +0 -6
  241. data/spec/support/logs/Dispatcher - 904068-37293.log +0 -6
  242. data/spec/support/logs/Dispatcher - 904070-53492.log +0 -6
  243. data/spec/support/logs/Dispatcher - 904073-27607.log +0 -6
  244. data/spec/support/logs/Dispatcher - 904075-41641.log +0 -6
  245. data/spec/support/logs/Dispatcher - 904099-53541.log +0 -6
  246. data/spec/support/logs/Dispatcher - 904112-10508.log +0 -6
  247. data/spec/support/logs/Dispatcher - 904132-5791.log +0 -6
  248. data/spec/support/logs/Dispatcher - 904141-56406.log +0 -6
  249. data/spec/support/logs/Dispatcher - 904147-21550.log +0 -6
  250. data/spec/support/logs/Dispatcher - 904149-20120.log +0 -6
  251. data/spec/support/logs/Dispatcher - 904155-33639.log +0 -6
  252. data/spec/support/logs/Dispatcher - 904161-53730.log +0 -6
  253. data/spec/support/logs/Dispatcher - 904169-49991.log +0 -16
  254. data/spec/support/logs/Dispatcher - 904172-39635.log +0 -14
  255. data/spec/support/logs/Dispatcher - 904192-9525.log +0 -10
  256. data/spec/support/logs/Dispatcher - 904206-3529.log +0 -16
  257. data/spec/support/logs/Dispatcher - 904211-16856.log +0 -14
  258. data/spec/support/logs/Dispatcher - 904216-49974.log +0 -10
  259. data/spec/support/logs/Dispatcher - 904228-16891.log +0 -6
  260. data/spec/support/logs/Dispatcher - 904231-34999.log +0 -6
  261. data/spec/support/logs/Dispatcher - 904236-50872.log +0 -6
  262. data/spec/support/logs/Dispatcher - 904238-25464.log +0 -6
  263. data/spec/support/logs/Dispatcher - 904251-43339.log +0 -6
  264. data/spec/support/logs/Dispatcher - 904256-18461.log +0 -6
  265. data/spec/support/logs/Dispatcher - 904266-59699.log +0 -6
  266. data/spec/support/logs/Dispatcher - 904279-17401.log +0 -6
  267. data/spec/support/logs/Dispatcher - 904289-48953.log +0 -6
  268. data/spec/support/logs/Dispatcher - 904309-22599.log +0 -6
  269. data/spec/support/logs/Dispatcher - 904386-44447.log +0 -16
  270. data/spec/support/logs/Dispatcher - 904409-51015.log +0 -14
  271. data/spec/support/logs/Dispatcher - 904420-34336.log +0 -10
  272. data/spec/support/logs/Dispatcher - 904455-24852.log +0 -16
  273. data/spec/support/logs/Dispatcher - 904459-54769.log +0 -14
  274. data/spec/support/logs/Dispatcher - 904464-49280.log +0 -10
  275. data/spec/support/logs/Dispatcher - 904490-41571.log +0 -6
  276. data/spec/support/logs/Dispatcher - 904495-62362.log +0 -6
  277. data/spec/support/logs/Dispatcher - 904517-14314.log +0 -6
  278. data/spec/support/logs/Dispatcher - 904529-30060.log +0 -6
  279. data/spec/support/logs/Dispatcher - 904538-61870.log +0 -6
  280. data/spec/support/logs/Dispatcher - 904553-59343.log +0 -6
  281. data/spec/support/logs/Dispatcher - 904563-59027.log +0 -6
  282. data/spec/support/logs/Dispatcher - 904576-62144.log +0 -6
  283. data/spec/support/logs/Dispatcher - 904742-2935.log +0 -6
  284. data/spec/support/logs/Dispatcher - 904771-62183.log +0 -6
  285. data/spec/support/logs/Dispatcher - 904780-13353.log +0 -6
  286. data/spec/support/logs/Instance - 3208178-11741.error.log +0 -106
  287. data/spec/support/logs/Instance - 3208181-15143.error.log +0 -106
  288. data/spec/support/logs/Instance - 3208183-7742.error.log +0 -106
  289. data/spec/support/logs/Instance - 904628-41184.error.log +0 -105
  290. data/spec/support/logs/Instance - 904631-38626.error.log +0 -105
  291. data/spec/support/logs/Instance - 904634-37879.error.log +0 -105
  292. data/spec/support/logs/Scheduler - 3203309-65225.log +0 -3
  293. data/spec/support/logs/Scheduler - 3203315-52999.log +0 -6
  294. data/spec/support/logs/Scheduler - 3204127-50400.log +0 -3
  295. data/spec/support/logs/Scheduler - 3204138-29313.log +0 -6
  296. data/spec/support/logs/Scheduler - 3204154-9476.log +0 -4
  297. data/spec/support/logs/Scheduler - 3204163-52855.log +0 -1
  298. data/spec/support/logs/Scheduler - 3204175-31574.log +0 -3
  299. data/spec/support/logs/Scheduler - 3204194-7097.log +0 -6
  300. data/spec/support/logs/Scheduler - 3204251-11724.log +0 -3
  301. data/spec/support/logs/Scheduler - 3204262-10820.log +0 -4
  302. data/spec/support/logs/Scheduler - 3207131-48958.log +0 -3
  303. data/spec/support/logs/Scheduler - 3207138-8974.log +0 -6
  304. data/spec/support/logs/Scheduler - 3207187-62652.log +0 -3
  305. data/spec/support/logs/Scheduler - 3207197-19207.log +0 -6
  306. data/spec/support/logs/Scheduler - 3207218-27080.log +0 -4
  307. data/spec/support/logs/Scheduler - 3207228-4393.log +0 -1
  308. data/spec/support/logs/Scheduler - 3207240-7381.log +0 -3
  309. data/spec/support/logs/Scheduler - 3207252-53772.log +0 -6
  310. data/spec/support/logs/Scheduler - 3207306-56622.log +0 -3
  311. data/spec/support/logs/Scheduler - 3207318-9939.log +0 -6
  312. data/spec/support/logs/Scheduler - 3207342-36988.log +0 -3
  313. data/spec/support/logs/Scheduler - 3207352-31746.log +0 -6
  314. data/spec/support/logs/Scheduler - 3207383-56973.log +0 -4
  315. data/spec/support/logs/Scheduler - 3207400-19390.log +0 -6
  316. data/spec/support/logs/Scheduler - 3207442-63021.log +0 -1
  317. data/spec/support/logs/Scheduler - 3207445-42476.log +0 -1
  318. data/spec/support/logs/Scheduler - 3207450-45489.log +0 -1
  319. data/spec/support/logs/Scheduler - 3207453-18262.log +0 -1
  320. data/spec/support/logs/Scheduler - 3207458-47234.log +0 -1
  321. data/spec/support/logs/Scheduler - 3207462-5628.log +0 -1
  322. data/spec/support/logs/Scheduler - 3207464-14620.log +0 -3
  323. data/spec/support/logs/Scheduler - 3207468-4793.log +0 -6
  324. data/spec/support/logs/Scheduler - 3207482-45268.log +0 -4
  325. data/spec/support/logs/Scheduler - 3207494-44991.log +0 -1
  326. data/spec/support/logs/Scheduler - 3207498-21429.log +0 -1
  327. data/spec/support/logs/Scheduler - 3207503-54136.log +0 -1
  328. data/spec/support/logs/Scheduler - 3207507-43714.log +0 -1
  329. data/spec/support/logs/Scheduler - 3207512-38735.log +0 -3
  330. data/spec/support/logs/Scheduler - 3207516-64075.log +0 -1
  331. data/spec/support/logs/Scheduler - 3207523-26974.log +0 -1
  332. data/spec/support/logs/Scheduler - 3207527-30807.log +0 -1
  333. data/spec/support/logs/Scheduler - 3208261-26059.log +0 -16
  334. data/spec/support/logs/Scheduler - 3208278-13735.log +0 -6
  335. data/spec/support/logs/Scheduler - 3208287-55638.log +0 -6
  336. data/spec/support/logs/Scheduler - 3208303-38465.log +0 -6
  337. data/spec/support/logs/Scheduler - 3208334-43532.log +0 -3
  338. data/spec/support/logs/Scheduler - 3208344-20376.log +0 -5
  339. data/spec/support/logs/Scheduler - 3208351-38224.log +0 -1
  340. data/spec/support/logs/Scheduler - 3208355-9843.log +0 -1
  341. data/spec/support/logs/Scheduler - 3208357-43942.log +0 -1
  342. data/spec/support/logs/Scheduler - 3208360-58330.log +0 -1
  343. data/spec/support/logs/Scheduler - 3208363-23807.log +0 -1
  344. data/spec/support/logs/Scheduler - 3208366-29256.log +0 -1
  345. data/spec/support/logs/Scheduler - 3208369-25684.log +0 -1
  346. data/spec/support/logs/Scheduler - 3208372-28479.log +0 -3
  347. data/spec/support/logs/Scheduler - 3208382-34006.log +0 -3
  348. data/spec/support/logs/Scheduler - 3208396-57942.log +0 -3
  349. data/spec/support/logs/Scheduler - 3208402-34617.log +0 -3
  350. data/spec/support/logs/Scheduler - 3208406-31477.log +0 -4
  351. data/spec/support/logs/Scheduler - 3208418-25154.log +0 -1
  352. data/spec/support/logs/Scheduler - 3208423-3948.log +0 -4
  353. data/spec/support/logs/Scheduler - 3208428-21648.log +0 -1
  354. data/spec/support/logs/Scheduler - 3208434-64685.log +0 -1
  355. data/spec/support/logs/Scheduler - 3208440-58157.log +0 -16
  356. data/spec/support/logs/Scheduler - 3208460-6293.log +0 -4
  357. data/spec/support/logs/Scheduler - 3208467-29409.log +0 -1
  358. data/spec/support/logs/Scheduler - 3208470-12825.log +0 -1
  359. data/spec/support/logs/Scheduler - 3208473-52401.log +0 -1
  360. data/spec/support/logs/Scheduler - 3208476-6567.log +0 -1
  361. data/spec/support/logs/Scheduler - 3208480-28476.log +0 -3
  362. data/spec/support/logs/Scheduler - 3208488-36893.log +0 -1
  363. data/spec/support/logs/Scheduler - 3208490-11932.log +0 -1
  364. data/spec/support/logs/Scheduler - 3208493-56676.log +0 -1
  365. data/spec/support/logs/Scheduler - 3208509-46176.log +0 -1
  366. data/spec/support/logs/Scheduler - 3208513-14321.log +0 -1
  367. data/spec/support/logs/Scheduler - 3208517-10539.log +0 -1
  368. data/spec/support/logs/Scheduler - 3208521-30079.log +0 -2
  369. data/spec/support/logs/Scheduler - 903345-9616.log +0 -3
  370. data/spec/support/logs/Scheduler - 903353-58507.log +0 -6
  371. data/spec/support/logs/Scheduler - 903417-55835.log +0 -3
  372. data/spec/support/logs/Scheduler - 903427-18261.log +0 -6
  373. data/spec/support/logs/Scheduler - 903439-36633.log +0 -4
  374. data/spec/support/logs/Scheduler - 903455-41936.log +0 -1
  375. data/spec/support/logs/Scheduler - 903506-60484.log +0 -3
  376. data/spec/support/logs/Scheduler - 903519-10519.log +0 -6
  377. data/spec/support/logs/Scheduler - 903593-8109.log +0 -3
  378. data/spec/support/logs/Scheduler - 903614-61308.log +0 -6
  379. data/spec/support/logs/Scheduler - 903667-39623.log +0 -3
  380. data/spec/support/logs/Scheduler - 903683-35117.log +0 -6
  381. data/spec/support/logs/Scheduler - 903730-34262.log +0 -4
  382. data/spec/support/logs/Scheduler - 903747-57287.log +0 -6
  383. data/spec/support/logs/Scheduler - 903798-40499.log +0 -1
  384. data/spec/support/logs/Scheduler - 903801-5479.log +0 -1
  385. data/spec/support/logs/Scheduler - 903806-11293.log +0 -1
  386. data/spec/support/logs/Scheduler - 903811-52201.log +0 -1
  387. data/spec/support/logs/Scheduler - 903813-54636.log +0 -1
  388. data/spec/support/logs/Scheduler - 903827-5581.log +0 -1
  389. data/spec/support/logs/Scheduler - 903830-48439.log +0 -3
  390. data/spec/support/logs/Scheduler - 903835-17198.log +0 -6
  391. data/spec/support/logs/Scheduler - 903846-28718.log +0 -4
  392. data/spec/support/logs/Scheduler - 903855-45172.log +0 -1
  393. data/spec/support/logs/Scheduler - 903864-11909.log +0 -1
  394. data/spec/support/logs/Scheduler - 903869-1794.log +0 -1
  395. data/spec/support/logs/Scheduler - 903873-59405.log +0 -1
  396. data/spec/support/logs/Scheduler - 903880-3155.log +0 -3
  397. data/spec/support/logs/Scheduler - 903887-52240.log +0 -1
  398. data/spec/support/logs/Scheduler - 903889-27541.log +0 -1
  399. data/spec/support/logs/Scheduler - 903895-16003.log +0 -1
  400. data/spec/support/logs/Scheduler - 904706-61946.log +0 -16
  401. data/spec/support/logs/Scheduler - 904725-2441.log +0 -6
  402. data/spec/support/logs/Scheduler - 904736-12992.log +0 -6
  403. data/spec/support/logs/Scheduler - 904744-61626.log +0 -6
  404. data/spec/support/logs/Scheduler - 904774-45665.log +0 -3
  405. data/spec/support/logs/Scheduler - 904783-51443.log +0 -5
  406. data/spec/support/logs/Scheduler - 904791-45170.log +0 -1
  407. data/spec/support/logs/Scheduler - 904793-58901.log +0 -1
  408. data/spec/support/logs/Scheduler - 904801-2336.log +0 -1
  409. data/spec/support/logs/Scheduler - 904803-10954.log +0 -1
  410. data/spec/support/logs/Scheduler - 904806-25343.log +0 -1
  411. data/spec/support/logs/Scheduler - 904810-23633.log +0 -1
  412. data/spec/support/logs/Scheduler - 904814-27547.log +0 -1
  413. data/spec/support/logs/Scheduler - 904819-53508.log +0 -3
  414. data/spec/support/logs/Scheduler - 904826-41103.log +0 -3
  415. data/spec/support/logs/Scheduler - 904835-20113.log +0 -3
  416. data/spec/support/logs/Scheduler - 904866-61722.log +0 -3
  417. data/spec/support/logs/Scheduler - 904878-18373.log +0 -4
  418. data/spec/support/logs/Scheduler - 904999-46113.log +0 -1
  419. data/spec/support/logs/Scheduler - 905011-23507.log +0 -4
  420. data/spec/support/logs/Scheduler - 905017-8299.log +0 -1
  421. data/spec/support/logs/Scheduler - 905028-51728.log +0 -1
  422. data/spec/support/logs/Scheduler - 905031-16092.log +0 -16
  423. data/spec/support/logs/Scheduler - 905101-65244.log +0 -4
  424. data/spec/support/logs/Scheduler - 905224-20698.log +0 -1
  425. data/spec/support/logs/Scheduler - 905234-53973.log +0 -1
  426. data/spec/support/logs/Scheduler - 905241-48042.log +0 -1
  427. data/spec/support/logs/Scheduler - 905334-30796.log +0 -1
  428. data/spec/support/logs/Scheduler - 905337-14399.log +0 -3
  429. data/spec/support/logs/Scheduler - 905350-31560.log +0 -1
  430. data/spec/support/logs/Scheduler - 905353-63541.log +0 -1
  431. data/spec/support/logs/Scheduler - 905359-22685.log +0 -1
  432. data/spec/support/logs/Scheduler - 905362-31483.log +0 -1
  433. data/spec/support/logs/Scheduler - 905365-28301.log +0 -1
  434. data/spec/support/logs/Scheduler - 905369-51335.log +0 -1
  435. data/spec/support/logs/Scheduler - 905373-43552.log +0 -2
  436. data/spec/support/logs/error-3206970.log +0 -801
  437. data/spec/support/logs/error-903103.log +0 -797
  438. data/spec/support/logs/output_spec_3206970.log +0 -390
  439. data/spec/support/logs/output_spec_903103.log +0 -390
  440. data/spec/support/reports/010223f6102d7d7ef50d7061f5a7c120.crf +0 -0
  441. data/spec/support/reports/0efcc0441bc58e27299737fc0e8bdb4a.crf +0 -0
  442. data/spec/support/reports/1444c78cae8a70d9a8f64a5b84579248.crf +0 -0
  443. data/spec/support/reports/16d1cdae64bbcf0b2083bc1870940fe8.crf +0 -0
  444. data/spec/support/reports/22cd328dd5adf2e5830db3eaf3e6d2fa.crf +0 -0
  445. data/spec/support/reports/26f5f97b84015ea1b3ebfaa04e6863f2.crf +0 -0
  446. data/spec/support/reports/40d544db82e9be4328c8bb16f2abbe96.crf +0 -0
  447. data/spec/support/reports/432466f64ec7b3a9e3a51e6c0c0f2b68.crf +0 -0
  448. data/spec/support/reports/493d52465fdd74388e1c1769e738fe0a.crf +0 -0
  449. data/spec/support/reports/49593f3c7dcff9466cebd4a3f71df16e.crf +0 -0
  450. data/spec/support/reports/4a875ccf578b82ea8af0661d09a0c9a2.crf +0 -0
  451. data/spec/support/reports/4fe265d0f318a758fff8f9c117d85c70.crf +0 -0
  452. data/spec/support/reports/5405af2d7e6bdba82761b3deb0316abb.crf +0 -0
  453. data/spec/support/reports/5801342bc5e7553ca065d9fc76b80a31.crf +0 -0
  454. data/spec/support/reports/5ca817d21692604d643a1ec4b9d698f8.crf +0 -0
  455. data/spec/support/reports/60a48dd87c16aaba48705cfa1102e6f0.crf +0 -0
  456. data/spec/support/reports/61b93d5be434e58e8286bc24375df5a6.crf +0 -0
  457. data/spec/support/reports/62ae63c7a653ccc450a042c83be6272e.crf +0 -0
  458. data/spec/support/reports/6dfba35f84478f2f8740989650e5c9b2.crf +0 -0
  459. data/spec/support/reports/72d8ff7e33ea0a4fa3208de46060ecb4.crf +0 -0
  460. data/spec/support/reports/7e73587e8be563c70f2864bb9982d4ac.crf +0 -0
  461. data/spec/support/reports/8a20cd9e7ea1f083c463f85990e48b9d.crf +0 -0
  462. data/spec/support/reports/8e22c8f69d18bfdc387ac2e2c73fd994.crf +0 -0
  463. data/spec/support/reports/99de7d4c926e154d9df18bbb66044360.crf +0 -0
  464. data/spec/support/reports/a8cda3f125d6de78da7e601e17ae02e0.crf +0 -0
  465. data/spec/support/reports/aa6b3e0cabbfa8f622cc3faa5e70d82d.crf +0 -0
  466. data/spec/support/reports/ad7582cad690ca1f6ec5529766dacecd.crf +0 -0
  467. data/spec/support/reports/af253c3c9e54c7efc1eb19a1ba0bc45b.crf +0 -0
  468. data/spec/support/reports/b57af832ae733e1a4182138f8373029d.crf +0 -0
  469. data/spec/support/reports/c266644ae90cff19058101b06c2410bd.crf +0 -0
  470. data/spec/support/reports/c684686518f8bb5af1fc05632b2ee3a1.crf +0 -0
  471. data/spec/support/reports/d0de163911157b30b56076653a01bd04.crf +0 -0
  472. data/spec/support/reports/d11cb8c19f0ef398e393e461d48fab49.crf +0 -0
  473. data/spec/support/reports/d29486b6155119827e12d512f38cf1a5.crf +0 -0
  474. data/spec/support/reports/d6348fa0f269cef7861d8a55ccb817b8.crf +0 -0
  475. data/spec/support/reports/d68cddd22874664f66ea296768de93cb.crf +0 -0
  476. data/spec/support/reports/d73172a30f03f6e4f73e77a379876368.crf +0 -0
  477. data/spec/support/reports/e0113960b4015876416519d1e36c6174.crf +0 -0
  478. data/spec/support/reports/e684ad3b2061330bf8016b0cda4c8aeb.crf +0 -0
  479. data/spec/support/reports/e6bec3c23e6367f309a43b6faec6c1af.crf +0 -0
  480. data/spec/support/reports/eadbebf5e6e8a2b325cdc82a4a667d1a.crf +0 -0
  481. data/spec/support/reports/fe4ca4a133464c018e8405dd73064f04.crf +0 -0
  482. data/spec/support/snapshots/Cuboid 2021-11-28 11_34_27 +0200 7757c257352bfa7abdfc764fa978115c.csf +0 -0
  483. data/spec/support/snapshots/Cuboid 2021-11-28 11_34_41 +0200 30367c49c18c17b84f6cdbfad6fe8209.csf +0 -0
  484. data/spec/support/snapshots/Cuboid 2021-11-28 11_35_24 +0200 0faa83c7ec023eca9e68e959b2b6a991.csf +0 -0
  485. data/spec/support/snapshots/Cuboid 2021-11-28 11_35_38 +0200 e61c3dae449133e330c24f9d1d34bc17.csf +0 -0
  486. data/spec/support/snapshots/Cuboid 2021-11-28 11_45_36 +0200 ef5b013868ce241f47ebef4f0ee96d23.csf +0 -0
  487. data/spec/support/snapshots/Cuboid 2021-11-28 11_45_42 +0200 ae63b2e851a211039d4dfa999bfc1f79.csf +0 -0
  488. data/spec/support/snapshots/Cuboid 2021-11-28 11_45_45 +0200 59a5d8a5ef5de0937e0d8a697d3a06cb.csf +0 -0
  489. data/spec/support/snapshots/Cuboid 2021-12-26 08_10_45 +0200 7534324302d1127f33460417057c0d99.csf +0 -0
  490. data/spec/support/snapshots/Cuboid 2021-12-26 08_10_59 +0200 2e45425f623e46a876531b65ff3319d4.csf +0 -0
  491. data/spec/support/snapshots/Cuboid 2021-12-26 08_14_43 +0200 d570989be752d5e9f930379a7f861028.csf +0 -0
  492. data/spec/support/snapshots/Cuboid 2021-12-26 08_14_57 +0200 37fe4c6328f04448257e962065d49d05.csf +0 -0
  493. data/spec/support/snapshots/Cuboid 2021-12-26 08_25_10 +0200 728fc33e7947c9dc606d69d7b9202dbc.csf +0 -0
  494. data/spec/support/snapshots/Cuboid 2021-12-26 08_25_15 +0200 cde4edd9a05a4183ff301d157654cb30.csf +0 -0
  495. data/spec/support/snapshots/Cuboid 2021-12-26 08_25_17 +0200 e47c2b6d6354bca5f07fd2903aefd262.csf +0 -0
@@ -1,16 +1,16 @@
1
1
  require 'spec_helper'
2
2
  require 'fileutils'
3
3
 
4
- require "#{Cuboid::Options.paths.lib}/rpc/server/dispatcher"
4
+ require "#{Cuboid::Options.paths.lib}/rpc/server/agent"
5
5
 
6
- describe Cuboid::RPC::Server::Dispatcher do
6
+ describe Cuboid::RPC::Server::Agent do
7
7
  before( :each ) do
8
8
  Cuboid::Options.system.max_slots = slots
9
9
  end
10
10
 
11
11
  let(:instance_info_keys) { %w(token application pid url owner birthdate helpers now age) }
12
12
  let(:slots) { 3 }
13
- let(:subject) { dispatcher_spawn( application: "#{fixtures_path}/mock_app.rb" ) }
13
+ let(:subject) { agent_spawn( application: "#{fixtures_path}/mock_app.rb" ) }
14
14
 
15
15
  describe '#alive?' do
16
16
  it 'returns true' do
@@ -19,38 +19,71 @@ describe Cuboid::RPC::Server::Dispatcher do
19
19
  end
20
20
 
21
21
  describe '#preferred' do
22
- context 'when the dispatcher is a grid member' do
23
- it 'returns the URL of least burdened Dispatcher' do
24
- dispatcher_spawn( neighbour: subject.url ).dispatch( load_balance: false )
25
- dispatcher_spawn( neighbour: subject.url ).dispatch( load_balance: false )
22
+ context 'when the agent is a grid member' do
23
+ context 'and strategy is' do
24
+ context :horizontal do
25
+ it 'returns the URL of least burdened Agent' do
26
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
27
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
28
+
29
+ expect(subject.preferred( :horizontal )).to eq(subject.url)
30
+ end
31
+ end
26
32
 
27
- expect(subject.preferred).to eq(subject.url)
33
+ context :vertical do
34
+ it 'returns the URL of most burdened Agent' do
35
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
36
+ d = agent_spawn( peer: subject.url )
37
+ d.spawn( load_balance: false )
38
+ d.spawn( load_balance: false )
39
+
40
+ expect(subject.preferred( :vertical )).to eq(d.url)
41
+ end
42
+ end
43
+
44
+ context 'default' do
45
+ it 'returns the URL of least burdened Agent' do
46
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
47
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
48
+
49
+ expect(subject.preferred).to eq(subject.url)
50
+ end
51
+ end
52
+
53
+ context 'other' do
54
+ it 'returns :error_unknown_strategy' do
55
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
56
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
57
+
58
+ expect(subject.preferred( :blah )).to eq('error_unknown_strategy')
59
+ end
60
+ end
28
61
  end
29
62
 
30
- context 'and all Dispatchers are at max utilization' do
63
+ context 'and all Agents are at max utilization' do
31
64
  before :each do
32
- subject.dispatch( load_balance: false )
65
+ subject.spawn( load_balance: false )
33
66
  end
34
67
 
35
68
  let(:slots) { 1 }
36
69
 
37
70
  it 'returns nil' do
38
- dispatcher_spawn( neighbour: subject.url ).dispatch( load_balance: false )
39
- dispatcher_spawn( neighbour: subject.url ).dispatch( load_balance: false )
71
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
72
+ agent_spawn( peer: subject.url ).spawn( load_balance: false )
40
73
 
41
74
  expect(subject.preferred).to be_nil
42
75
  end
43
76
  end
44
77
  end
45
78
 
46
- context 'when the dispatcher is not a grid member' do
47
- it 'returns the URL of the Dispatcher' do
79
+ context 'when the agent is not a grid member' do
80
+ it 'returns the URL of the Agent' do
48
81
  expect(subject.preferred).to eq(subject.url)
49
82
  end
50
83
 
51
84
  context 'and it is at max utilization' do
52
85
  before :each do
53
- subject.dispatch( load_balance: false )
86
+ subject.spawn( load_balance: false )
54
87
  end
55
88
 
56
89
  let(:slots) { 1 }
@@ -68,20 +101,20 @@ describe Cuboid::RPC::Server::Dispatcher do
68
101
  end
69
102
  end
70
103
 
71
- describe '#dispatch' do
104
+ describe '#spawn' do
72
105
  it 'does not leak Instances' do
73
106
  slots.times do
74
- subject.dispatch
107
+ subject.spawn
75
108
  end
76
109
 
77
110
  expect(subject.instances.size).to eq(slots)
78
111
  end
79
112
 
80
- it 'sets OptionGroups::Dispatcher#url' do
81
- info = subject.dispatch
113
+ it 'sets OptionGroups::Agent#url' do
114
+ info = subject.spawn
82
115
  instance = instance_connect( info['url'], info['token'] )
83
116
 
84
- expect(instance.dispatcher_url).to eq subject.url
117
+ expect(instance.agent_url).to eq subject.url
85
118
  end
86
119
 
87
120
  context "when #{Cuboid::OptionGroups::RPC}#server_external_address has been set" do
@@ -92,13 +125,13 @@ describe Cuboid::RPC::Server::Dispatcher do
92
125
  let(:address) { '127.0.0.2' }
93
126
 
94
127
  it 'advertises that address' do
95
- expect(subject.dispatch['url']).to start_with "#{address}:"
128
+ expect(subject.spawn['url']).to start_with "#{address}:"
96
129
  end
97
130
  end
98
131
 
99
132
  context 'when not a Grid member' do
100
133
  it 'returns Instance info' do
101
- info = subject.dispatch( owner: 'rspec' )
134
+ info = subject.spawn( owner: 'rspec' )
102
135
 
103
136
  %w(token application pid url owner birthdate helpers).each do |k|
104
137
  expect(info[k]).to be_truthy
@@ -110,19 +143,19 @@ describe Cuboid::RPC::Server::Dispatcher do
110
143
 
111
144
  it 'assigns an optional owner' do
112
145
  owner = 'blah'
113
- expect(subject.dispatch( owner: owner )['owner']).to eq(owner)
146
+ expect(subject.spawn( owner: owner )['owner']).to eq(owner)
114
147
  end
115
148
 
116
149
  context 'when the there are no available slots' do
117
150
  let(:slots) { 5 }
118
151
  before :each do
119
152
  slots.times do
120
- subject.dispatch
153
+ subject.spawn
121
154
  end
122
155
  end
123
156
 
124
157
  it 'returns nil' do
125
- expect(subject.dispatch).to be nil
158
+ expect(subject.spawn).to be nil
126
159
  end
127
160
 
128
161
  context 'and slots are freed' do
@@ -140,12 +173,12 @@ describe Cuboid::RPC::Server::Dispatcher do
140
173
 
141
174
  instances = []
142
175
  free.times do
143
- instances << subject.dispatch
176
+ instances << subject.spawn
144
177
  end
145
178
  instances.compact!
146
179
 
147
180
  expect(instances.size).to eq free
148
- expect(subject.dispatch).to be nil
181
+ expect(subject.spawn).to be nil
149
182
  end
150
183
  end
151
184
  end
@@ -154,68 +187,150 @@ describe Cuboid::RPC::Server::Dispatcher do
154
187
  context 'when a Grid member' do
155
188
  let(:slots) { 4 }
156
189
 
157
- it 'returns Instance info from the least burdened Dispatcher' do
158
- d1 = dispatcher_spawn(
159
- address: '127.0.0.1',
160
- application: "#{fixtures_path}/mock_app.rb"
161
- )
190
+ context 'and strategy is' do
191
+ context :horizontal do
192
+ it 'provides Instances from the least burdened Agent' do
193
+ d1 = agent_spawn(
194
+ address: '127.0.0.1',
195
+ application: "#{fixtures_path}/mock_app.rb"
196
+ )
197
+
198
+ 3.times do
199
+ d1.spawn( load_balance: false )
200
+ end
201
+
202
+ d2 = agent_spawn(
203
+ address: '127.0.0.2',
204
+ peer: d1.url,
205
+ application: "#{fixtures_path}/mock_app.rb"
206
+ )
162
207
 
163
- 3.times do
164
- d1.dispatch( load_balance: false )
208
+ 2.times do
209
+ d2.spawn( load_balance: false )
210
+ end
211
+
212
+ d3 = agent_spawn(
213
+ address: '127.0.0.3',
214
+ peer: d1.url,
215
+ application: "#{fixtures_path}/mock_app.rb"
216
+ )
217
+ d3.spawn( load_balance: false )
218
+ preferred = d3.url.split( ':' ).first
219
+
220
+ expect(d3.spawn(strategy: :horizontal )['url'].split( ':' ).first).to eq(preferred)
221
+ expect(%W{127.0.0.3 127.0.0.2}).to include d1.spawn['url'].split( ':' ).first
222
+ expect(d2.spawn(strategy: :horizontal )['url'].split( ':' ).first).to eq(preferred)
223
+ expect(%W{127.0.0.1 127.0.0.3}).to include d3.spawn(strategy: :horizontal )['url'].split( ':' ).first
224
+ expect(%W{127.0.0.2 127.0.0.3}).to include d3.spawn(strategy: :horizontal )['url'].split( ':' ).first
225
+ expect(%W{127.0.0.2 127.0.0.3}).to include d1.spawn(strategy: :horizontal )['url'].split( ':' ).first
226
+ end
165
227
  end
166
228
 
167
- d2 = dispatcher_spawn(
168
- address: '127.0.0.2',
169
- neighbour: d1.url,
170
- application: "#{fixtures_path}/mock_app.rb"
171
- )
229
+ context :vertical do
230
+ it 'provides Instances from the most burdened Agent' do
231
+ d1 = agent_spawn(
232
+ address: '127.0.0.1',
233
+ application: "#{fixtures_path}/mock_app.rb"
234
+ )
235
+
236
+ 3.times do
237
+ d1.spawn( load_balance: false )
238
+ end
239
+
240
+ d2 = agent_spawn(
241
+ address: '127.0.0.2',
242
+ peer: d1.url,
243
+ application: "#{fixtures_path}/mock_app.rb"
244
+ )
245
+
246
+ 2.times do
247
+ d2.spawn( load_balance: false )
248
+ end
249
+
250
+ d3 = agent_spawn(
251
+ address: '127.0.0.3',
252
+ peer: d1.url,
253
+ application: "#{fixtures_path}/mock_app.rb"
254
+ )
255
+ d3.spawn( load_balance: false )
172
256
 
173
- 2.times do
174
- d2.dispatch( load_balance: false )
257
+ preferred = d1.url.split( ':' ).first
258
+ expect(d3.spawn( strategy: :vertical )['url'].split( ':' ).first).to eq(preferred)
259
+ end
175
260
  end
176
261
 
177
- d3 = dispatcher_spawn(
178
- address: '127.0.0.3',
179
- neighbour: d1.url,
180
- application: "#{fixtures_path}/mock_app.rb"
181
- )
182
- d3.dispatch( load_balance: false )
183
- preferred = d3.url.split( ':' ).first
184
-
185
- expect(d3.dispatch['url'].split( ':' ).first).to eq(preferred)
186
- expect(%W{127.0.0.3 127.0.0.2}).to include d1.dispatch['url'].split( ':' ).first
187
- expect(d2.dispatch['url'].split( ':' ).first).to eq(preferred)
188
- expect(%W{127.0.0.1 127.0.0.3}).to include d3.dispatch['url'].split( ':' ).first
189
- expect(%W{127.0.0.2 127.0.0.3}).to include d3.dispatch['url'].split( ':' ).first
190
- expect(%W{127.0.0.2 127.0.0.3}).to include d1.dispatch['url'].split( ':' ).first
262
+ context 'default' do
263
+ it 'provides Instances from the least burdened Agent' do
264
+ d1 = agent_spawn(
265
+ address: '127.0.0.1',
266
+ application: "#{fixtures_path}/mock_app.rb"
267
+ )
268
+
269
+ 3.times do
270
+ d1.spawn( load_balance: false )
271
+ end
272
+
273
+ d2 = agent_spawn(
274
+ address: '127.0.0.2',
275
+ peer: d1.url,
276
+ application: "#{fixtures_path}/mock_app.rb"
277
+ )
278
+
279
+ 2.times do
280
+ d2.spawn( load_balance: false )
281
+ end
282
+
283
+ d3 = agent_spawn(
284
+ address: '127.0.0.3',
285
+ peer: d1.url,
286
+ application: "#{fixtures_path}/mock_app.rb"
287
+ )
288
+ d3.spawn( load_balance: false )
289
+ preferred = d3.url.split( ':' ).first
290
+
291
+ expect(d3.spawn['url'].split( ':' ).first).to eq(preferred)
292
+ expect(%W{127.0.0.3 127.0.0.2}).to include d1.spawn['url'].split( ':' ).first
293
+ expect(d2.spawn['url'].split( ':' ).first).to eq(preferred)
294
+ expect(%W{127.0.0.1 127.0.0.3}).to include d3.spawn['url'].split( ':' ).first
295
+ expect(%W{127.0.0.2 127.0.0.3}).to include d3.spawn['url'].split( ':' ).first
296
+ expect(%W{127.0.0.2 127.0.0.3}).to include d1.spawn['url'].split( ':' ).first
297
+ end
298
+ end
299
+
300
+ context 'other' do
301
+ it 'returns :error_unknown_strategy' do
302
+ expect(agent_spawn( peer: subject.url ).
303
+ spawn( strategy: 'blah' )).to eq('error_unknown_strategy')
304
+ end
305
+ end
191
306
  end
192
307
 
193
308
  context 'when the load-balance option is set to false' do
194
- it 'returns an Instance from the requested Dispatcher' do
195
- d1 = dispatcher_spawn(
309
+ it 'returns an Instance from the requested Agent' do
310
+ d1 = agent_spawn(
196
311
  address: '127.0.0.1',
197
312
  application: "#{fixtures_path}/mock_app.rb"
198
313
  )
199
314
 
200
- d1.dispatch( load_balance: false )
315
+ d1.spawn( load_balance: false )
201
316
 
202
- d2 = dispatcher_spawn(
317
+ d2 = agent_spawn(
203
318
  address: '127.0.0.2',
204
- neighbour: d1.url,
319
+ peer: d1.url,
205
320
  application: "#{fixtures_path}/mock_app.rb"
206
321
  )
207
- d2.dispatch( load_balance: false )
322
+ d2.spawn( load_balance: false )
208
323
 
209
- d3 = dispatcher_spawn(
324
+ d3 = agent_spawn(
210
325
  address: '127.0.0.3',
211
- neighbour: d1.url,
326
+ peer: d1.url,
212
327
  application: "#{fixtures_path}/mock_app.rb"
213
328
  )
214
329
  2.times do
215
- d3.dispatch( load_balance: false )
330
+ d3.spawn( load_balance: false )
216
331
  end
217
332
 
218
- expect(d3.dispatch( load_balance: false )['url'].
333
+ expect(d3.spawn( load_balance: false )['url'].
219
334
  split( ':' ).first).to eq('127.0.0.3')
220
335
  end
221
336
  end
@@ -224,7 +339,7 @@ describe Cuboid::RPC::Server::Dispatcher do
224
339
 
225
340
  describe '#instance' do
226
341
  it 'returns proc info by PID' do
227
- instance = subject.dispatch( owner: 'rspec' )
342
+ instance = subject.spawn( owner: 'rspec' )
228
343
  info = subject.instance( instance['pid'] )
229
344
  instance_info_keys.each do |k|
230
345
  expect(info[k]).to be_truthy
@@ -234,7 +349,7 @@ describe Cuboid::RPC::Server::Dispatcher do
234
349
 
235
350
  describe '#instances' do
236
351
  it 'returns proc info by PID for all instances' do
237
- slots.times { subject.dispatch( owner: 'rspec' ) }
352
+ slots.times { subject.spawn( owner: 'rspec' ) }
238
353
 
239
354
  subject.instances.each do |instance|
240
355
  instance_info_keys.each do |k|
@@ -246,7 +361,7 @@ describe Cuboid::RPC::Server::Dispatcher do
246
361
 
247
362
  describe '#running_instances' do
248
363
  it 'returns proc info for running instances' do
249
- slots.times { subject.dispatch }
364
+ slots.times { subject.spawn }
250
365
 
251
366
  expect(subject.running_instances.size).to eq(slots)
252
367
  end
@@ -254,7 +369,7 @@ describe Cuboid::RPC::Server::Dispatcher do
254
369
 
255
370
  describe '#finished_instances' do
256
371
  it 'returns proc info for finished instances' do
257
- 3.times { Cuboid::Processes::Manager.kill subject.dispatch['pid'] }
372
+ 3.times { Cuboid::Processes::Manager.kill subject.spawn['pid'] }
258
373
 
259
374
  expect(subject.finished_instances.size).to eq(3)
260
375
  end
@@ -263,7 +378,7 @@ describe Cuboid::RPC::Server::Dispatcher do
263
378
  describe '#utilization' do
264
379
  it 'returns a float signifying the amount of workload' do
265
380
  3.times do
266
- subject.dispatch
381
+ subject.spawn
267
382
  end
268
383
 
269
384
  expect(subject.utilization).to eq(3 / Float(slots))
@@ -272,7 +387,7 @@ describe Cuboid::RPC::Server::Dispatcher do
272
387
 
273
388
  describe '#statistics' do
274
389
  it 'returns general statistics' do
275
- subject.dispatch
390
+ subject.spawn
276
391
  instances = subject.instances
277
392
  Cuboid::Processes::Manager.kill( instances.first['pid'] )
278
393
 
@@ -291,7 +406,7 @@ describe Cuboid::RPC::Server::Dispatcher do
291
406
 
292
407
  context 'when there are snapshots' do
293
408
  it 'lists them' do
294
- info = subject.dispatch
409
+ info = subject.spawn
295
410
 
296
411
  instance = Cuboid::RPC::Client::Instance.new(
297
412
  info['url'], info['token']
@@ -52,20 +52,20 @@ describe Cuboid::RPC::Server::Scheduler do
52
52
  end
53
53
  end
54
54
 
55
- context 'when a Dispatcher has been set' do
56
- subject { Cuboid::Processes::Schedulers.spawn dispatcher: dispatcher.url }
57
- let(:dispatcher) do
58
- Cuboid::Processes::Dispatchers.spawn( application: "#{fixtures_path}/mock_app.rb" )
55
+ context 'when a Agent has been set' do
56
+ subject { Cuboid::Processes::Schedulers.spawn agent: agent.url }
57
+ let(:agent) do
58
+ Cuboid::Processes::Agents.spawn( application: "#{fixtures_path}/mock_app.rb" )
59
59
  end
60
60
 
61
61
  it 'gets Instances from it' do
62
- expect(dispatcher.finished_instances).to be_empty
62
+ expect(agent.finished_instances).to be_empty
63
63
 
64
64
  subject.push( options )
65
65
  sleep 0.1 while subject.completed.empty?
66
66
  sleep 2
67
67
 
68
- expect(dispatcher.finished_instances).to be_any
68
+ expect(agent.finished_instances).to be_any
69
69
  end
70
70
 
71
71
  it 'sets OptionGroups::Scheduler#url' do
@@ -80,7 +80,7 @@ describe Cuboid::RPC::Server::Scheduler do
80
80
  it 'does not consume the queue' do
81
81
  subject
82
82
 
83
- Cuboid::Processes::Dispatchers.killall
83
+ Cuboid::Processes::Agents.killall
84
84
  sleep 3
85
85
 
86
86
  expect(subject.size).to be 0
@@ -89,7 +89,7 @@ describe Cuboid::RPC::Server::Scheduler do
89
89
  sleep 5
90
90
 
91
91
  expect(subject.size).to be 1
92
- expect(subject.errors.join("\n")).to include "Failed to contact Dispatcher at: #{dispatcher.url}"
92
+ expect(subject.errors.join("\n")).to include "Failed to contact Agent at: #{agent.url}"
93
93
  end
94
94
  end
95
95
  end
@@ -1,14 +1,14 @@
1
1
  require Options.paths.lib + 'ui/output'
2
- require Options.paths.lib + 'rpc/server/dispatcher'
2
+ require Options.paths.lib + 'rpc/server/agent'
3
3
  require Options.paths.lib + 'processes/manager'
4
4
 
5
- class Node < Cuboid::RPC::Server::Dispatcher::Node
5
+ class Node < Cuboid::RPC::Server::Agent::Node
6
6
 
7
7
  def initialize
8
8
  @options = Options.instance
9
9
 
10
10
  methods.each do |m|
11
- next if method( m ).owner != Cuboid::RPC::Server::Dispatcher::Node
11
+ next if method( m ).owner != Cuboid::RPC::Server::Agent::Node
12
12
  self.class.send :private, m
13
13
  self.class.send :public, m
14
14
  end
@@ -1,20 +1,20 @@
1
- require 'cuboid/rpc/server/dispatcher'
1
+ require 'cuboid/rpc/server/agent'
2
2
 
3
- class TestService < Cuboid::RPC::Server::Dispatcher::Service
3
+ class TestService < Cuboid::RPC::Server::Agent::Service
4
4
 
5
5
  private :instances
6
6
  public :instances
7
7
 
8
- def test_dispatcher
9
- dispatcher.class == Cuboid::RPC::Server::Dispatcher
8
+ def test_agent
9
+ agent.class == Cuboid::RPC::Server::Agent
10
10
  end
11
11
 
12
12
  def test_opts
13
- dispatcher.instance_eval{ @options } == options
13
+ agent.instance_eval{ @options } == options
14
14
  end
15
15
 
16
16
  def test_node
17
- node.class == Cuboid::RPC::Server::Dispatcher::Node
17
+ node.class == Cuboid::RPC::Server::Agent::Node
18
18
  end
19
19
 
20
20
  def test_map_instances( &block )
@@ -39,8 +39,8 @@ class TestService < Cuboid::RPC::Server::Dispatcher::Service
39
39
  iterator_for( instances ).class == Arachni::Reactor::Iterator
40
40
  end
41
41
 
42
- def test_connect_to_dispatcher( url, &block )
43
- connect_to_dispatcher( url ).alive? { |b| block.call b }
42
+ def test_connect_to_agent( url, &block )
43
+ connect_to_agent( url ).alive? { |b| block.call b }
44
44
  end
45
45
 
46
46
  def test_connect_to_instance( *args, &block )
@@ -21,7 +21,7 @@ class MockApp < Cuboid::Application
21
21
  # RPC, report and snapshot file.
22
22
  serialize_with Marshal
23
23
 
24
- dispatcher_service_for :test_service, TestService
24
+ agent_service_for :test_service, TestService
25
25
 
26
26
  # Execution entry point.
27
27
  def run
@@ -1,15 +1,15 @@
1
- class Cuboid::RPC::Server::Dispatcher
1
+ class Cuboid::RPC::Server::Agent
2
2
  class Service::Echo < Service
3
3
 
4
4
  private :instances
5
5
  public :instances
6
6
 
7
- def test_dispatcher
8
- dispatcher.class == Cuboid::RPC::Server::Dispatcher
7
+ def test_agent
8
+ agent.class == Cuboid::RPC::Server::Agent
9
9
  end
10
10
 
11
11
  def test_opts
12
- dispatcher.instance_eval{ @options } == options
12
+ agent.instance_eval{ @options } == options
13
13
  end
14
14
 
15
15
  def test_node
@@ -38,8 +38,8 @@ class Service::Echo < Service
38
38
  iterator_for( instances ).class == Arachni::Reactor::Iterator
39
39
  end
40
40
 
41
- def test_connect_to_dispatcher( url, &block )
42
- connect_to_dispatcher( url ).alive? { |b| block.call b }
41
+ def test_connect_to_agent( url, &block )
42
+ connect_to_agent( url ).alive? { |b| block.call b }
43
43
  end
44
44
 
45
45
  def test_connect_to_instance( *args, &block )
@@ -40,7 +40,7 @@ end
40
40
 
41
41
  def processes_killall
42
42
  instance_killall
43
- dispatcher_killall
43
+ agent_killall
44
44
  scheduler_killall
45
45
  process_killall
46
46
  process_kill_reactor
@@ -2,9 +2,9 @@ require 'arachni/rpc'
2
2
 
3
3
  # @note Needs `ENV['WEB_SERVER_DISPATCHER']` in the format of `host:port`.
4
4
  #
5
- # {WebServerManager}-API-compatible client for the {WebServerDispatcher}.
5
+ # {WebServerManager}-API-compatible client for the {WebServerAgent}.
6
6
  #
7
- # Delegates test webserver creation to the machine running {WebServerDispatcher},
7
+ # Delegates test webserver creation to the machine running {WebServerAgent},
8
8
  # for hosts that lack support for fast servers (like Windows, which can't run
9
9
  # Thin, Puma etc.).
10
10
  #
@@ -9,7 +9,7 @@ require 'arachni/rpc'
9
9
  # Exposes the {WebServerManager} over RPC.
10
10
  #
11
11
  # @author Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
12
- class WebServerDispatcher
12
+ class WebServerAgent
13
13
 
14
14
  def initialize( options = {} )
15
15
  host, port = ENV['WEB_SERVER_DISPATCHER'].split( ':' )
@@ -0,0 +1,6 @@
1
+ [Tue Feb 1 13:18:07 2022] [status] Starting the RPC Server...
2
+ [Tue Feb 1 13:18:07 2022] [status] Initializing grid node...
3
+ [Tue Feb 1 13:18:07 2022] [status] Node ready.
4
+ [Tue Feb 1 13:18:07 2022] [info] Updated peers:
5
+ [Tue Feb 1 13:18:07 2022] [info] <empty>
6
+ [Tue Feb 1 13:18:07 2022] [status] Ready
@@ -0,0 +1,6 @@
1
+ [Tue Feb 1 13:21:29 2022] [status] Starting the RPC Server...
2
+ [Tue Feb 1 13:21:29 2022] [status] Initializing grid node...
3
+ [Tue Feb 1 13:21:29 2022] [status] Node ready.
4
+ [Tue Feb 1 13:21:29 2022] [info] Updated peers:
5
+ [Tue Feb 1 13:21:29 2022] [info] <empty>
6
+ [Tue Feb 1 13:21:29 2022] [status] Ready
@@ -0,0 +1,6 @@
1
+ [Tue Feb 1 13:23:37 2022] [status] Starting the RPC Server...
2
+ [Tue Feb 1 13:23:37 2022] [status] Initializing grid node...
3
+ [Tue Feb 1 13:23:37 2022] [status] Node ready.
4
+ [Tue Feb 1 13:23:37 2022] [info] Updated peers:
5
+ [Tue Feb 1 13:23:37 2022] [info] <empty>
6
+ [Tue Feb 1 13:23:37 2022] [status] Ready
@@ -0,0 +1,6 @@
1
+ [Tue Feb 1 13:23:38 2022] [status] Starting the RPC Server...
2
+ [Tue Feb 1 13:23:38 2022] [status] Initializing grid node...
3
+ [Tue Feb 1 13:23:38 2022] [status] Node ready.
4
+ [Tue Feb 1 13:23:38 2022] [info] Updated peers:
5
+ [Tue Feb 1 13:23:38 2022] [info] <empty>
6
+ [Tue Feb 1 13:23:38 2022] [status] Ready
@@ -0,0 +1,6 @@
1
+ [Tue Feb 1 13:23:41 2022] [status] Starting the RPC Server...
2
+ [Tue Feb 1 13:23:41 2022] [status] Initializing grid node...
3
+ [Tue Feb 1 13:23:41 2022] [status] Node ready.
4
+ [Tue Feb 1 13:23:41 2022] [info] Updated peers:
5
+ [Tue Feb 1 13:23:41 2022] [info] <empty>
6
+ [Tue Feb 1 13:23:41 2022] [status] Ready
@@ -0,0 +1,10 @@
1
+ [Tue Feb 1 13:23:42 2022] [status] Starting the RPC Server...
2
+ [Tue Feb 1 13:23:42 2022] [status] Initializing grid node...
3
+ [Tue Feb 1 13:23:42 2022] [status] Node ready.
4
+ [Tue Feb 1 13:23:42 2022] [info] Updated peers:
5
+ [Tue Feb 1 13:23:42 2022] [info] <empty>
6
+ [Tue Feb 1 13:23:42 2022] [status] Ready
7
+ [Tue Feb 1 13:23:43 2022] [status] Adding peer: 127.0.0.1:42875
8
+ [Tue Feb 1 13:23:43 2022] [info] Updated peers:
9
+ [Tue Feb 1 13:23:43 2022] [info] ---- 127.0.0.1:42875
10
+ [Tue Feb 1 13:23:43 2022] [status] Advertising: 127.0.0.1:42875