cuboid 0.1.2 → 0.1.5

Sign up to get free protection for your applications and to get access to all the features.
Files changed (435) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +8 -0
  3. data/README.md +121 -4
  4. data/Rakefile +17 -0
  5. data/cuboid.gemspec +1 -1
  6. data/lib/cuboid/application.rb +1 -1
  7. data/lib/cuboid/option_groups/agent.rb +1 -1
  8. data/lib/cuboid/processes/agents.rb +1 -1
  9. data/lib/cuboid/processes/executables/agent.rb +1 -1
  10. data/lib/cuboid/processes/executables/scheduler.rb +1 -1
  11. data/lib/cuboid/processes/instances.rb +1 -1
  12. data/lib/cuboid/processes/manager.rb +2 -2
  13. data/lib/cuboid/processes/schedulers.rb +1 -1
  14. data/lib/cuboid/rpc/client/agent.rb +2 -2
  15. data/lib/cuboid/rpc/client/base.rb +2 -2
  16. data/lib/cuboid/rpc/client/instance/service.rb +1 -1
  17. data/lib/cuboid/rpc/client/instance.rb +4 -4
  18. data/lib/cuboid/rpc/server/agent/node.rb +2 -2
  19. data/lib/cuboid/rpc/server/agent/service.rb +5 -5
  20. data/lib/cuboid/rpc/server/agent.rb +18 -12
  21. data/lib/cuboid/rpc/server/base.rb +2 -2
  22. data/lib/cuboid/rpc/server/instance.rb +2 -2
  23. data/lib/cuboid/rpc/server/scheduler.rb +1 -1
  24. data/lib/version +1 -1
  25. data/spec/cuboid/rest/server_spec.rb +5 -5
  26. data/spec/cuboid/rpc/client/base_spec.rb +4 -4
  27. data/spec/cuboid/rpc/client/instance_spec.rb +1 -1
  28. data/spec/cuboid/rpc/server/{dispatcher → agent}/node_spec.rb +2 -2
  29. data/spec/cuboid/rpc/server/{dispatcher → agent}/service_spec.rb +0 -0
  30. data/spec/cuboid/rpc/server/{dispatcher_spec.rb → agent_spec.rb} +34 -26
  31. data/spec/cuboid/rpc/server/base_spec.rb +2 -2
  32. data/spec/cuboid/rpc/server/instance_spec.rb +3 -3
  33. data/spec/cuboid/rpc/server/scheduler_spec.rb +3 -3
  34. data/spec/support/fixtures/executables/node.rb +3 -3
  35. data/spec/support/fixtures/mock_app/test_service.rb +1 -1
  36. data/spec/support/fixtures/services/echo.rb +1 -1
  37. data/spec/support/lib/web_server_client.rb +4 -4
  38. data/spec/support/lib/web_server_dispatcher.rb +1 -1
  39. metadata +78 -870
  40. data/spec/support/logs/Agent - 2486896-44236.log +0 -6
  41. data/spec/support/logs/Agent - 2487229-16390.log +0 -6
  42. data/spec/support/logs/Agent - 2487520-2511.log +0 -6
  43. data/spec/support/logs/Agent - 2487522-24008.log +0 -6
  44. data/spec/support/logs/Agent - 2487526-3383.log +0 -6
  45. data/spec/support/logs/Agent - 2487528-23713.log +0 -10
  46. data/spec/support/logs/Agent - 2487530-42875.log +0 -10
  47. data/spec/support/logs/Agent - 2487533-14182.log +0 -10
  48. data/spec/support/logs/Agent - 2487535-32486.log +0 -10
  49. data/spec/support/logs/Agent - 2487537-30578.log +0 -10
  50. data/spec/support/logs/Agent - 2487539-65402.log +0 -10
  51. data/spec/support/logs/Agent - 2493974-23066.log +0 -6
  52. data/spec/support/logs/Agent - 2501716-11729.log +0 -6
  53. data/spec/support/logs/Agent - 2501724-48638.log +0 -6
  54. data/spec/support/logs/Agent - 2501961-60077.log +0 -6
  55. data/spec/support/logs/Agent - 2501976-10941.log +0 -10
  56. data/spec/support/logs/Agent - 2502050-45312.log +0 -10
  57. data/spec/support/logs/Agent - 2502131-45940.log +0 -10
  58. data/spec/support/logs/Agent - 2502139-59848.log +0 -10
  59. data/spec/support/logs/Agent - 2502262-46629.log +0 -10
  60. data/spec/support/logs/Agent - 2502298-28395.log +0 -10
  61. data/spec/support/logs/Agent - 2512541-7108.log +0 -6
  62. data/spec/support/logs/Agent - 2515458-5532.log +0 -6
  63. data/spec/support/logs/Agent - 2515540-20379.log +0 -6
  64. data/spec/support/logs/Agent - 2515550-19661.log +0 -6
  65. data/spec/support/logs/Agent - 2515626-34192.log +0 -6
  66. data/spec/support/logs/Agent - 2515879-19884.log +0 -6
  67. data/spec/support/logs/Agent - 2516130-27514.log +0 -6
  68. data/spec/support/logs/Agent - 2516382-45886.log +0 -6
  69. data/spec/support/logs/Agent - 2516390-47909.log +0 -6
  70. data/spec/support/logs/Agent - 2516465-47774.log +0 -6
  71. data/spec/support/logs/Agent - 2516546-13955.log +0 -6
  72. data/spec/support/logs/Agent - 2516561-39041.log +0 -6
  73. data/spec/support/logs/Agent - 2516710-46722.log +0 -6
  74. data/spec/support/logs/Agent - 2516795-56481.log +0 -16
  75. data/spec/support/logs/Agent - 2516798-22121.log +0 -14
  76. data/spec/support/logs/Agent - 2516881-21755.log +0 -10
  77. data/spec/support/logs/Agent - 2517053-64944.log +0 -16
  78. data/spec/support/logs/Agent - 2517056-63364.log +0 -14
  79. data/spec/support/logs/Agent - 2517066-34797.log +0 -10
  80. data/spec/support/logs/Agent - 2518042-30794.log +0 -16
  81. data/spec/support/logs/Agent - 2518050-6895.log +0 -14
  82. data/spec/support/logs/Agent - 2518212-36288.log +0 -10
  83. data/spec/support/logs/Agent - 2518395-10976.log +0 -16
  84. data/spec/support/logs/Agent - 2518472-35419.log +0 -14
  85. data/spec/support/logs/Agent - 2518559-43119.log +0 -10
  86. data/spec/support/logs/Agent - 2518647-48415.log +0 -16
  87. data/spec/support/logs/Agent - 2518652-65099.log +0 -14
  88. data/spec/support/logs/Agent - 2518658-1449.log +0 -10
  89. data/spec/support/logs/Agent - 2518910-62083.log +0 -6
  90. data/spec/support/logs/Agent - 2518919-58035.log +0 -6
  91. data/spec/support/logs/Agent - 2519076-57825.log +0 -6
  92. data/spec/support/logs/Agent - 2519156-8413.log +0 -6
  93. data/spec/support/logs/Agent - 2519406-64200.log +0 -6
  94. data/spec/support/logs/Agent - 2519570-20841.log +0 -6
  95. data/spec/support/logs/Agent - 2519659-30136.log +0 -6
  96. data/spec/support/logs/Agent - 2519822-9736.log +0 -6
  97. data/spec/support/logs/Agent - 2520004-59217.log +0 -6
  98. data/spec/support/logs/Agent - 2520357-37723.log +0 -6
  99. data/spec/support/logs/Agent - 2521294-5229.log +0 -16
  100. data/spec/support/logs/Agent - 2521407-31515.log +0 -14
  101. data/spec/support/logs/Agent - 2521415-7501.log +0 -10
  102. data/spec/support/logs/Agent - 2521452-22168.log +0 -16
  103. data/spec/support/logs/Agent - 2521463-24492.log +0 -14
  104. data/spec/support/logs/Agent - 2521482-50808.log +0 -10
  105. data/spec/support/logs/Agent - 2521503-8180.log +0 -16
  106. data/spec/support/logs/Agent - 2521562-3183.log +0 -14
  107. data/spec/support/logs/Agent - 2521639-45199.log +0 -10
  108. data/spec/support/logs/Agent - 2521891-30967.log +0 -10
  109. data/spec/support/logs/Agent - 2521897-64837.log +0 -10
  110. data/spec/support/logs/Agent - 2521916-23526.log +0 -16
  111. data/spec/support/logs/Agent - 2521936-26313.log +0 -14
  112. data/spec/support/logs/Agent - 2521948-18199.log +0 -10
  113. data/spec/support/logs/Agent - 2522026-18512.log +0 -6
  114. data/spec/support/logs/Agent - 2522093-46502.log +0 -6
  115. data/spec/support/logs/Agent - 2522195-30981.log +0 -6
  116. data/spec/support/logs/Agent - 2522295-55509.log +0 -6
  117. data/spec/support/logs/Agent - 2522347-32811.log +0 -6
  118. data/spec/support/logs/Agent - 2523391-7522.log +0 -6
  119. data/spec/support/logs/Agent - 2523626-61560.log +0 -6
  120. data/spec/support/logs/Agent - 2523913-25677.log +0 -6
  121. data/spec/support/logs/Agent - 2539887-21281.log +0 -6
  122. data/spec/support/logs/Agent - 2540261-61895.log +0 -6
  123. data/spec/support/logs/Agent - 2540430-8261.log +0 -6
  124. data/spec/support/logs/Agent - 361913-24577.log +0 -6
  125. data/spec/support/logs/Agent - 362152-33398.log +0 -6
  126. data/spec/support/logs/Agent - 362154-44243.log +0 -6
  127. data/spec/support/logs/Agent - 362157-6034.log +0 -6
  128. data/spec/support/logs/Agent - 362160-38548.log +0 -10
  129. data/spec/support/logs/Agent - 362162-59176.log +0 -10
  130. data/spec/support/logs/Agent - 362165-23738.log +0 -10
  131. data/spec/support/logs/Agent - 362167-57542.log +0 -10
  132. data/spec/support/logs/Agent - 362172-22798.log +0 -10
  133. data/spec/support/logs/Agent - 362174-46609.log +0 -10
  134. data/spec/support/logs/Agent - 362398-10913.log +0 -6
  135. data/spec/support/logs/Agent - 362524-29482.log +0 -6
  136. data/spec/support/logs/Agent - 362527-40674.log +0 -6
  137. data/spec/support/logs/Agent - 362530-32963.log +0 -6
  138. data/spec/support/logs/Agent - 362532-22236.log +0 -6
  139. data/spec/support/logs/Agent - 362543-4429.log +0 -6
  140. data/spec/support/logs/Agent - 362557-7510.log +0 -6
  141. data/spec/support/logs/Agent - 362574-36915.log +0 -6
  142. data/spec/support/logs/Agent - 362576-65372.log +0 -6
  143. data/spec/support/logs/Agent - 362578-29183.log +0 -6
  144. data/spec/support/logs/Agent - 362580-43546.log +0 -6
  145. data/spec/support/logs/Agent - 362582-22254.log +0 -6
  146. data/spec/support/logs/Agent - 362588-40009.log +0 -6
  147. data/spec/support/logs/Agent - 362591-60596.log +0 -16
  148. data/spec/support/logs/Agent - 362595-3584.log +0 -14
  149. data/spec/support/logs/Agent - 362599-39016.log +0 -10
  150. data/spec/support/logs/Agent - 362606-58506.log +0 -16
  151. data/spec/support/logs/Agent - 362609-55768.log +0 -14
  152. data/spec/support/logs/Agent - 362613-37168.log +0 -10
  153. data/spec/support/logs/Agent - 362629-16593.log +0 -16
  154. data/spec/support/logs/Agent - 362632-32264.log +0 -14
  155. data/spec/support/logs/Agent - 362643-59807.log +0 -10
  156. data/spec/support/logs/Agent - 362649-40552.log +0 -16
  157. data/spec/support/logs/Agent - 362655-31144.log +0 -14
  158. data/spec/support/logs/Agent - 362660-12968.log +0 -10
  159. data/spec/support/logs/Agent - 362666-6806.log +0 -16
  160. data/spec/support/logs/Agent - 362673-39178.log +0 -14
  161. data/spec/support/logs/Agent - 362678-36132.log +0 -10
  162. data/spec/support/logs/Agent - 362685-62240.log +0 -6
  163. data/spec/support/logs/Agent - 362687-4068.log +0 -6
  164. data/spec/support/logs/Agent - 362691-48465.log +0 -6
  165. data/spec/support/logs/Agent - 362693-40295.log +0 -6
  166. data/spec/support/logs/Agent - 362705-53855.log +0 -6
  167. data/spec/support/logs/Agent - 362712-14029.log +0 -6
  168. data/spec/support/logs/Agent - 362717-3069.log +0 -6
  169. data/spec/support/logs/Agent - 362721-38670.log +0 -6
  170. data/spec/support/logs/Agent - 362725-7644.log +0 -6
  171. data/spec/support/logs/Agent - 362748-18302.log +0 -6
  172. data/spec/support/logs/Agent - 362828-61766.log +0 -16
  173. data/spec/support/logs/Agent - 362838-44693.log +0 -14
  174. data/spec/support/logs/Agent - 362847-31875.log +0 -10
  175. data/spec/support/logs/Agent - 362875-46401.log +0 -16
  176. data/spec/support/logs/Agent - 362887-52041.log +0 -14
  177. data/spec/support/logs/Agent - 362897-7426.log +0 -10
  178. data/spec/support/logs/Agent - 362908-35228.log +0 -16
  179. data/spec/support/logs/Agent - 362919-33764.log +0 -14
  180. data/spec/support/logs/Agent - 362928-1323.log +0 -10
  181. data/spec/support/logs/Agent - 362960-27883.log +0 -10
  182. data/spec/support/logs/Agent - 362964-26072.log +0 -10
  183. data/spec/support/logs/Agent - 362966-42927.log +0 -16
  184. data/spec/support/logs/Agent - 362972-56025.log +0 -14
  185. data/spec/support/logs/Agent - 362977-39574.log +0 -10
  186. data/spec/support/logs/Agent - 362992-23510.log +0 -6
  187. data/spec/support/logs/Agent - 362996-63445.log +0 -6
  188. data/spec/support/logs/Agent - 363017-59359.log +0 -6
  189. data/spec/support/logs/Agent - 363034-15682.log +0 -6
  190. data/spec/support/logs/Agent - 363050-6605.log +0 -6
  191. data/spec/support/logs/Agent - 363065-59445.log +0 -6
  192. data/spec/support/logs/Agent - 363070-6619.log +0 -6
  193. data/spec/support/logs/Agent - 363077-18731.log +0 -6
  194. data/spec/support/logs/Agent - 363250-16962.log +0 -6
  195. data/spec/support/logs/Agent - 363265-25598.log +0 -6
  196. data/spec/support/logs/Agent - 363279-50603.log +0 -6
  197. data/spec/support/logs/Instance - 2525557-35695.error.log +0 -105
  198. data/spec/support/logs/Instance - 2525638-6350.error.log +0 -105
  199. data/spec/support/logs/Instance - 2525719-30351.error.log +0 -105
  200. data/spec/support/logs/Instance - 363119-21886.error.log +0 -105
  201. data/spec/support/logs/Instance - 363121-31774.error.log +0 -105
  202. data/spec/support/logs/Instance - 363127-52671.error.log +0 -105
  203. data/spec/support/logs/Scheduler - 2486608-59709.log +0 -3
  204. data/spec/support/logs/Scheduler - 2486612-44110.log +0 -27
  205. data/spec/support/logs/Scheduler - 2486723-50393.log +0 -3
  206. data/spec/support/logs/Scheduler - 2486727-21620.log +0 -27
  207. data/spec/support/logs/Scheduler - 2486877-37845.log +0 -3
  208. data/spec/support/logs/Scheduler - 2486881-3624.log +0 -1
  209. data/spec/support/logs/Scheduler - 2486911-24752.log +0 -3
  210. data/spec/support/logs/Scheduler - 2486919-48535.log +0 -27
  211. data/spec/support/logs/Scheduler - 2486985-8897.log +0 -1
  212. data/spec/support/logs/Scheduler - 2487211-7516.log +0 -3
  213. data/spec/support/logs/Scheduler - 2487215-2831.log +0 -1
  214. data/spec/support/logs/Scheduler - 2487246-7826.log +0 -3
  215. data/spec/support/logs/Scheduler - 2487256-35669.log +0 -6
  216. data/spec/support/logs/Scheduler - 2487272-11542.log +0 -4
  217. data/spec/support/logs/Scheduler - 2487278-9621.log +0 -1
  218. data/spec/support/logs/Scheduler - 2487291-24094.log +0 -3
  219. data/spec/support/logs/Scheduler - 2487299-60095.log +0 -6
  220. data/spec/support/logs/Scheduler - 2487368-7706.log +0 -3
  221. data/spec/support/logs/Scheduler - 2487378-9859.log +0 -6
  222. data/spec/support/logs/Scheduler - 2487396-17812.log +0 -3
  223. data/spec/support/logs/Scheduler - 2487407-25543.log +0 -6
  224. data/spec/support/logs/Scheduler - 2487451-44767.log +0 -4
  225. data/spec/support/logs/Scheduler - 2487506-1422.log +0 -6
  226. data/spec/support/logs/Scheduler - 2487541-38068.log +0 -1
  227. data/spec/support/logs/Scheduler - 2487544-21866.log +0 -1
  228. data/spec/support/logs/Scheduler - 2487548-15245.log +0 -1
  229. data/spec/support/logs/Scheduler - 2487551-34905.log +0 -1
  230. data/spec/support/logs/Scheduler - 2487554-22142.log +0 -1
  231. data/spec/support/logs/Scheduler - 2487562-35113.log +0 -1
  232. data/spec/support/logs/Scheduler - 2487565-55125.log +0 -3
  233. data/spec/support/logs/Scheduler - 2487569-48845.log +0 -6
  234. data/spec/support/logs/Scheduler - 2487576-57192.log +0 -4
  235. data/spec/support/logs/Scheduler - 2487583-17991.log +0 -1
  236. data/spec/support/logs/Scheduler - 2487586-30014.log +0 -1
  237. data/spec/support/logs/Scheduler - 2487591-6472.log +0 -1
  238. data/spec/support/logs/Scheduler - 2487594-2195.log +0 -1
  239. data/spec/support/logs/Scheduler - 2487598-55808.log +0 -3
  240. data/spec/support/logs/Scheduler - 2487605-7400.log +0 -1
  241. data/spec/support/logs/Scheduler - 2487607-4337.log +0 -1
  242. data/spec/support/logs/Scheduler - 2487610-25835.log +0 -1
  243. data/spec/support/logs/Scheduler - 2493623-45209.log +0 -3
  244. data/spec/support/logs/Scheduler - 2493714-59407.log +0 -1
  245. data/spec/support/logs/Scheduler - 2494470-61696.log +0 -3
  246. data/spec/support/logs/Scheduler - 2494723-2810.log +0 -6
  247. data/spec/support/logs/Scheduler - 2495458-22112.log +0 -4
  248. data/spec/support/logs/Scheduler - 2496034-4076.log +0 -1
  249. data/spec/support/logs/Scheduler - 2496119-62253.log +0 -3
  250. data/spec/support/logs/Scheduler - 2496210-50380.log +0 -6
  251. data/spec/support/logs/Scheduler - 2497536-24922.log +0 -3
  252. data/spec/support/logs/Scheduler - 2497786-13515.log +0 -6
  253. data/spec/support/logs/Scheduler - 2498774-16911.log +0 -3
  254. data/spec/support/logs/Scheduler - 2498961-4742.log +0 -6
  255. data/spec/support/logs/Scheduler - 2500340-16045.log +0 -4
  256. data/spec/support/logs/Scheduler - 2500980-26158.log +0 -6
  257. data/spec/support/logs/Scheduler - 2502381-26435.log +0 -1
  258. data/spec/support/logs/Scheduler - 2502463-62965.log +0 -1
  259. data/spec/support/logs/Scheduler - 2502547-53434.log +0 -1
  260. data/spec/support/logs/Scheduler - 2502628-43720.log +0 -1
  261. data/spec/support/logs/Scheduler - 2502643-58379.log +0 -1
  262. data/spec/support/logs/Scheduler - 2502873-64893.log +0 -1
  263. data/spec/support/logs/Scheduler - 2502954-43885.log +0 -3
  264. data/spec/support/logs/Scheduler - 2503039-52147.log +0 -6
  265. data/spec/support/logs/Scheduler - 2503768-28831.log +0 -4
  266. data/spec/support/logs/Scheduler - 2504259-24533.log +0 -1
  267. data/spec/support/logs/Scheduler - 2504343-56967.log +0 -1
  268. data/spec/support/logs/Scheduler - 2504502-25085.log +0 -1
  269. data/spec/support/logs/Scheduler - 2504587-30789.log +0 -1
  270. data/spec/support/logs/Scheduler - 2504608-56601.log +0 -3
  271. data/spec/support/logs/Scheduler - 2504760-36374.log +0 -1
  272. data/spec/support/logs/Scheduler - 2504841-49675.log +0 -1
  273. data/spec/support/logs/Scheduler - 2504923-15781.log +0 -1
  274. data/spec/support/logs/Scheduler - 2532871-63847.log +0 -16
  275. data/spec/support/logs/Scheduler - 2538978-63705.log +0 -6
  276. data/spec/support/logs/Scheduler - 2539461-10579.log +0 -6
  277. data/spec/support/logs/Scheduler - 2539981-44320.log +0 -6
  278. data/spec/support/logs/Scheduler - 2540265-37672.log +0 -3
  279. data/spec/support/logs/Scheduler - 2540434-9490.log +0 -5
  280. data/spec/support/logs/Scheduler - 2540840-9957.log +0 -1
  281. data/spec/support/logs/Scheduler - 2540921-2437.log +0 -1
  282. data/spec/support/logs/Scheduler - 2540937-17100.log +0 -1
  283. data/spec/support/logs/Scheduler - 2541007-52812.log +0 -1
  284. data/spec/support/logs/Scheduler - 2541091-56468.log +0 -1
  285. data/spec/support/logs/Scheduler - 2541109-54138.log +0 -1
  286. data/spec/support/logs/Scheduler - 2541188-17918.log +0 -1
  287. data/spec/support/logs/Scheduler - 2541267-2563.log +0 -3
  288. data/spec/support/logs/Scheduler - 2541430-40675.log +0 -3
  289. data/spec/support/logs/Scheduler - 2542341-38074.log +0 -3
  290. data/spec/support/logs/Scheduler - 2542519-33069.log +0 -3
  291. data/spec/support/logs/Scheduler - 2542610-41819.log +0 -4
  292. data/spec/support/logs/Scheduler - 2543168-3708.log +0 -1
  293. data/spec/support/logs/Scheduler - 2543332-46957.log +0 -4
  294. data/spec/support/logs/Scheduler - 2543334-53034.log +0 -1
  295. data/spec/support/logs/Scheduler - 2543419-31038.log +0 -1
  296. data/spec/support/logs/Scheduler - 2543666-10481.log +0 -16
  297. data/spec/support/logs/Scheduler - 2544245-30154.log +0 -4
  298. data/spec/support/logs/Scheduler - 2544487-8113.log +0 -1
  299. data/spec/support/logs/Scheduler - 2544490-62030.log +0 -1
  300. data/spec/support/logs/Scheduler - 2544494-64830.log +0 -1
  301. data/spec/support/logs/Scheduler - 2544585-3507.log +0 -1
  302. data/spec/support/logs/Scheduler - 2544668-24185.log +0 -3
  303. data/spec/support/logs/Scheduler - 2544762-17601.log +0 -1
  304. data/spec/support/logs/Scheduler - 2544790-32379.log +0 -1
  305. data/spec/support/logs/Scheduler - 2544853-35518.log +0 -1
  306. data/spec/support/logs/Scheduler - 2544939-52525.log +0 -1
  307. data/spec/support/logs/Scheduler - 2545016-12352.log +0 -1
  308. data/spec/support/logs/Scheduler - 2545096-63001.log +0 -1
  309. data/spec/support/logs/Scheduler - 2545178-63647.log +0 -2
  310. data/spec/support/logs/Scheduler - 361893-37255.log +0 -3
  311. data/spec/support/logs/Scheduler - 361897-36090.log +0 -1
  312. data/spec/support/logs/Scheduler - 361928-44834.log +0 -3
  313. data/spec/support/logs/Scheduler - 361937-43013.log +0 -6
  314. data/spec/support/logs/Scheduler - 361956-8533.log +0 -4
  315. data/spec/support/logs/Scheduler - 361968-31856.log +0 -1
  316. data/spec/support/logs/Scheduler - 361988-37894.log +0 -3
  317. data/spec/support/logs/Scheduler - 361997-31525.log +0 -6
  318. data/spec/support/logs/Scheduler - 362037-51907.log +0 -3
  319. data/spec/support/logs/Scheduler - 362047-32758.log +0 -6
  320. data/spec/support/logs/Scheduler - 362072-11620.log +0 -3
  321. data/spec/support/logs/Scheduler - 362080-16797.log +0 -6
  322. data/spec/support/logs/Scheduler - 362124-56230.log +0 -4
  323. data/spec/support/logs/Scheduler - 362137-4746.log +0 -6
  324. data/spec/support/logs/Scheduler - 362176-32691.log +0 -1
  325. data/spec/support/logs/Scheduler - 362179-34759.log +0 -1
  326. data/spec/support/logs/Scheduler - 362183-18758.log +0 -1
  327. data/spec/support/logs/Scheduler - 362187-63438.log +0 -1
  328. data/spec/support/logs/Scheduler - 362189-35075.log +0 -1
  329. data/spec/support/logs/Scheduler - 362191-34351.log +0 -1
  330. data/spec/support/logs/Scheduler - 362195-51436.log +0 -3
  331. data/spec/support/logs/Scheduler - 362199-46884.log +0 -6
  332. data/spec/support/logs/Scheduler - 362214-12351.log +0 -4
  333. data/spec/support/logs/Scheduler - 362230-12723.log +0 -1
  334. data/spec/support/logs/Scheduler - 362233-16133.log +0 -1
  335. data/spec/support/logs/Scheduler - 362238-51103.log +0 -1
  336. data/spec/support/logs/Scheduler - 362241-20441.log +0 -1
  337. data/spec/support/logs/Scheduler - 362245-40692.log +0 -3
  338. data/spec/support/logs/Scheduler - 362251-40074.log +0 -1
  339. data/spec/support/logs/Scheduler - 362253-48087.log +0 -1
  340. data/spec/support/logs/Scheduler - 362256-27112.log +0 -1
  341. data/spec/support/logs/Scheduler - 363208-60869.log +0 -16
  342. data/spec/support/logs/Scheduler - 363231-35883.log +0 -6
  343. data/spec/support/logs/Scheduler - 363240-11734.log +0 -6
  344. data/spec/support/logs/Scheduler - 363252-28394.log +0 -6
  345. data/spec/support/logs/Scheduler - 363267-7527.log +0 -3
  346. data/spec/support/logs/Scheduler - 363282-48049.log +0 -5
  347. data/spec/support/logs/Scheduler - 363298-26965.log +0 -1
  348. data/spec/support/logs/Scheduler - 363300-14973.log +0 -1
  349. data/spec/support/logs/Scheduler - 363302-58216.log +0 -1
  350. data/spec/support/logs/Scheduler - 363304-11942.log +0 -1
  351. data/spec/support/logs/Scheduler - 363306-39654.log +0 -1
  352. data/spec/support/logs/Scheduler - 363308-57479.log +0 -1
  353. data/spec/support/logs/Scheduler - 363310-32176.log +0 -1
  354. data/spec/support/logs/Scheduler - 363314-13492.log +0 -3
  355. data/spec/support/logs/Scheduler - 363322-38112.log +0 -3
  356. data/spec/support/logs/Scheduler - 363342-26080.log +0 -3
  357. data/spec/support/logs/Scheduler - 363349-51783.log +0 -3
  358. data/spec/support/logs/Scheduler - 363355-44132.log +0 -4
  359. data/spec/support/logs/Scheduler - 363367-12160.log +0 -1
  360. data/spec/support/logs/Scheduler - 363372-28520.log +0 -4
  361. data/spec/support/logs/Scheduler - 363376-58951.log +0 -1
  362. data/spec/support/logs/Scheduler - 363379-28506.log +0 -1
  363. data/spec/support/logs/Scheduler - 363384-63536.log +0 -16
  364. data/spec/support/logs/Scheduler - 363402-32695.log +0 -4
  365. data/spec/support/logs/Scheduler - 363412-41984.log +0 -1
  366. data/spec/support/logs/Scheduler - 363414-47366.log +0 -1
  367. data/spec/support/logs/Scheduler - 363416-44368.log +0 -1
  368. data/spec/support/logs/Scheduler - 363427-29292.log +0 -1
  369. data/spec/support/logs/Scheduler - 363431-44605.log +0 -3
  370. data/spec/support/logs/Scheduler - 363439-19754.log +0 -1
  371. data/spec/support/logs/Scheduler - 363441-5762.log +0 -1
  372. data/spec/support/logs/Scheduler - 363443-4000.log +0 -1
  373. data/spec/support/logs/Scheduler - 363445-3688.log +0 -1
  374. data/spec/support/logs/Scheduler - 363448-50268.log +0 -1
  375. data/spec/support/logs/Scheduler - 363451-23104.log +0 -1
  376. data/spec/support/logs/Scheduler - 363454-12897.log +0 -2
  377. data/spec/support/logs/error-2487640.log +0 -797
  378. data/spec/support/logs/error-360075.log +0 -797
  379. data/spec/support/logs/output_spec_2487640.log +0 -390
  380. data/spec/support/logs/output_spec_360075.log +0 -390
  381. data/spec/support/reports/052f88c73e2d0ecf331dbf5fce0cfe3c.crf +0 -0
  382. data/spec/support/reports/06eed3dec98f5cd5fc717190a004d7d6.crf +0 -0
  383. data/spec/support/reports/0902bd88bbd4df462910007a3b5018fc.crf +0 -0
  384. data/spec/support/reports/10b9864e81e3204d1ef4940ccb88b0ed.crf +0 -0
  385. data/spec/support/reports/16b575097e68156416a84a6854d3719c.crf +0 -0
  386. data/spec/support/reports/18be00bff4371738c7c7013b284b415b.crf +0 -0
  387. data/spec/support/reports/1ca39d410c2cf1f652eb8c320d6682bd.crf +0 -0
  388. data/spec/support/reports/266a09d73152ce2f3d2951f1dab133f3.crf +0 -0
  389. data/spec/support/reports/2929bee9c126b2695dc569b693fef574.crf +0 -0
  390. data/spec/support/reports/2abd611201d06f0428f1b060b32d4839.crf +0 -0
  391. data/spec/support/reports/2b9ce956f7060163d7a0b78603dc05ca.crf +0 -0
  392. data/spec/support/reports/3254163f76b8696fa1677d18eaefce1d.crf +0 -0
  393. data/spec/support/reports/3a9e6a4fa91c901550ff295cd21d69c9.crf +0 -0
  394. data/spec/support/reports/3de18cf956e18664f441f94ee05e4118.crf +0 -0
  395. data/spec/support/reports/46bcffd844008e71c7d90a76baf8597d.crf +0 -0
  396. data/spec/support/reports/482cf0d0c54cda32bedf59b2ea995c65.crf +0 -0
  397. data/spec/support/reports/502508e3a29966200350196c9859881c.crf +0 -0
  398. data/spec/support/reports/5f0d832d46338d69b4eab6ff91ba4c91.crf +0 -0
  399. data/spec/support/reports/620287cfdc373595385cf2471e1d4523.crf +0 -0
  400. data/spec/support/reports/65d71de3b0e8c9fe894f4ba36cb4cb7c.crf +0 -0
  401. data/spec/support/reports/6754d1f86dc5544abeca7f06314ebc56.crf +0 -0
  402. data/spec/support/reports/75fecdd5e006942292e02e6a223e7279.crf +0 -0
  403. data/spec/support/reports/77523cc87357aa220c33d5604f41b5a1.crf +0 -0
  404. data/spec/support/reports/7e137bd5f71e0f0fbb63b93b87dae160.crf +0 -0
  405. data/spec/support/reports/806a17c3a40cc22eafd77c97b64a2ed5.crf +0 -0
  406. data/spec/support/reports/81cca53163bbab5ccf4d5f0401d5adcd.crf +0 -0
  407. data/spec/support/reports/8745ae8fdeb76289765582e06a614940.crf +0 -0
  408. data/spec/support/reports/8c204ee129fe1bd6c5964d29ae5d03ae.crf +0 -0
  409. data/spec/support/reports/937b7b516a99c23b2a641bb7bf695bfa.crf +0 -0
  410. data/spec/support/reports/a6eb89ffb1c33e33e421c1ee393118a9.crf +0 -0
  411. data/spec/support/reports/aadbf2c1544b0e11174853fb4883a38c.crf +0 -0
  412. data/spec/support/reports/c360a2833f3e635e69036916010edeac.crf +0 -0
  413. data/spec/support/reports/cf0bfb4662a7cda85db324936aae93d3.crf +0 -0
  414. data/spec/support/reports/cfff9c06897e28b1d58977909738c78f.crf +0 -0
  415. data/spec/support/reports/e2d5321452359193a758f7d16879ddb3.crf +0 -0
  416. data/spec/support/reports/e58c9dfbf9edeca9adcf32af5940f16b.crf +0 -0
  417. data/spec/support/reports/e96abea937f1ed3f89bc2ec5397522db.crf +0 -0
  418. data/spec/support/reports/ec0bbd4424061d305963551a29d20e1a.crf +0 -0
  419. data/spec/support/reports/f16e679098a4411d4f1c123abcd6af90.crf +0 -0
  420. data/spec/support/reports/f2c155087c431851979048ceb8c46ec9.crf +0 -0
  421. data/spec/support/reports/f3d542036e17f6a66e11bfacb2fb3366.crf +0 -0
  422. data/spec/support/reports/f72c7feef5f9c7e29f423c3ebe345ef3.crf +0 -0
  423. data/spec/support/reports/fbb2e1c6ada2d4b8229488f4c4b3b418.crf +0 -0
  424. data/spec/support/snapshots/Cuboid 2022-02-01 13_10_28 +0200 a06d9bd7db81b1b4fb077ceadcc3895f.csf +0 -0
  425. data/spec/support/snapshots/Cuboid 2022-02-01 13_10_42 +0200 36c68859faf144eed9ff9c01ae754217.csf +0 -0
  426. data/spec/support/snapshots/Cuboid 2022-02-01 13_24_42 +0200 5a26112a913330ee8763b2982a4d42df.csf +0 -0
  427. data/spec/support/snapshots/Cuboid 2022-02-01 13_24_56 +0200 190856b98ac7099eb553ed3abcfbcb87.csf +0 -0
  428. data/spec/support/snapshots/Cuboid 2022-02-01 13_35_35 +0200 a9c5229b459119840c9428a3d7c54eb1.csf +0 -0
  429. data/spec/support/snapshots/Cuboid 2022-02-01 13_35_41 +0200 d93660f71110dad136cea0758b29611e.csf +0 -0
  430. data/spec/support/snapshots/Cuboid 2022-02-01 13_35_44 +0200 57083dafa382f1d21cc33c9740861c88.csf +0 -0
  431. data/spec/support/snapshots/Cuboid 2022-02-13 07_55_57 +0200 6ff9bf8c7b70ad85fa3a42f44bcc5fe7.csf +0 -0
  432. data/spec/support/snapshots/Cuboid 2022-02-13 07_56_12 +0200 2852d4e0ae04e5365301703913f7b763.csf +0 -0
  433. data/spec/support/snapshots/Cuboid 2022-02-13 08_06_41 +0200 54da34dab4b059de90cdd33d400ccac7.csf +0 -0
  434. data/spec/support/snapshots/Cuboid 2022-02-13 08_06_46 +0200 61596bdce9318446373d1b736ec7c9ce.csf +0 -0
  435. data/spec/support/snapshots/Cuboid 2022-02-13 08_06_48 +0200 19030be87532ece27c1d4791a8d3229c.csf +0 -0
@@ -11,11 +11,11 @@ describe Cuboid::RPC::Server::Agent::Node do
11
11
  c = Cuboid::RPC::Client::Base.new(
12
12
  "#{Cuboid::Options.rpc.server_address}:#{port}"
13
13
  )
14
- c = Arachni::RPC::Proxy.new( c, 'node' )
14
+ c = Toq::Proxy.new( c, 'node' )
15
15
 
16
16
  begin
17
17
  c.alive?
18
- rescue Arachni::RPC::Exceptions::ConnectionError
18
+ rescue Toq::Exceptions::ConnectionError
19
19
  sleep 0.1
20
20
  retry
21
21
  end
@@ -23,8 +23,8 @@ describe Cuboid::RPC::Server::Agent do
23
23
  context 'and strategy is' do
24
24
  context :horizontal do
25
25
  it 'returns the URL of least burdened Agent' do
26
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
27
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
26
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
27
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
28
28
 
29
29
  expect(subject.preferred( :horizontal )).to eq(subject.url)
30
30
  end
@@ -32,19 +32,23 @@ describe Cuboid::RPC::Server::Agent do
32
32
 
33
33
  context :vertical do
34
34
  it 'returns the URL of most burdened Agent' do
35
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
35
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
36
36
  d = agent_spawn( peer: subject.url )
37
- d.spawn( load_balance: false )
38
- d.spawn( load_balance: false )
37
+ d.spawn( strategy: :direct )
38
+ d.spawn( strategy: :direct )
39
39
 
40
40
  expect(subject.preferred( :vertical )).to eq(d.url)
41
41
  end
42
42
  end
43
43
 
44
+ context :direct do
45
+ it 'returns the URL of this Agent'
46
+ end
47
+
44
48
  context 'default' do
45
49
  it 'returns the URL of least burdened Agent' do
46
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
47
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
50
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
51
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
48
52
 
49
53
  expect(subject.preferred).to eq(subject.url)
50
54
  end
@@ -52,8 +56,8 @@ describe Cuboid::RPC::Server::Agent do
52
56
 
53
57
  context 'other' do
54
58
  it 'returns :error_unknown_strategy' do
55
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
56
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
59
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
60
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
57
61
 
58
62
  expect(subject.preferred( :blah )).to eq('error_unknown_strategy')
59
63
  end
@@ -62,14 +66,14 @@ describe Cuboid::RPC::Server::Agent do
62
66
 
63
67
  context 'and all Agents are at max utilization' do
64
68
  before :each do
65
- subject.spawn( load_balance: false )
69
+ subject.spawn( strategy: :direct )
66
70
  end
67
71
 
68
72
  let(:slots) { 1 }
69
73
 
70
74
  it 'returns nil' do
71
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
72
- agent_spawn( peer: subject.url ).spawn( load_balance: false )
75
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
76
+ agent_spawn( peer: subject.url ).spawn( strategy: :direct )
73
77
 
74
78
  expect(subject.preferred).to be_nil
75
79
  end
@@ -83,7 +87,7 @@ describe Cuboid::RPC::Server::Agent do
83
87
 
84
88
  context 'and it is at max utilization' do
85
89
  before :each do
86
- subject.spawn( load_balance: false )
90
+ subject.spawn( strategy: :direct )
87
91
  end
88
92
 
89
93
  let(:slots) { 1 }
@@ -188,6 +192,10 @@ describe Cuboid::RPC::Server::Agent do
188
192
  let(:slots) { 4 }
189
193
 
190
194
  context 'and strategy is' do
195
+ context :direct do
196
+ it 'provides Instances from this Agent'
197
+ end
198
+
191
199
  context :horizontal do
192
200
  it 'provides Instances from the least burdened Agent' do
193
201
  d1 = agent_spawn(
@@ -196,7 +204,7 @@ describe Cuboid::RPC::Server::Agent do
196
204
  )
197
205
 
198
206
  3.times do
199
- d1.spawn( load_balance: false )
207
+ d1.spawn( strategy: :direct )
200
208
  end
201
209
 
202
210
  d2 = agent_spawn(
@@ -206,7 +214,7 @@ describe Cuboid::RPC::Server::Agent do
206
214
  )
207
215
 
208
216
  2.times do
209
- d2.spawn( load_balance: false )
217
+ d2.spawn( strategy: :direct )
210
218
  end
211
219
 
212
220
  d3 = agent_spawn(
@@ -214,7 +222,7 @@ describe Cuboid::RPC::Server::Agent do
214
222
  peer: d1.url,
215
223
  application: "#{fixtures_path}/mock_app.rb"
216
224
  )
217
- d3.spawn( load_balance: false )
225
+ d3.spawn( strategy: :direct )
218
226
  preferred = d3.url.split( ':' ).first
219
227
 
220
228
  expect(d3.spawn(strategy: :horizontal )['url'].split( ':' ).first).to eq(preferred)
@@ -234,7 +242,7 @@ describe Cuboid::RPC::Server::Agent do
234
242
  )
235
243
 
236
244
  3.times do
237
- d1.spawn( load_balance: false )
245
+ d1.spawn( strategy: :direct )
238
246
  end
239
247
 
240
248
  d2 = agent_spawn(
@@ -244,7 +252,7 @@ describe Cuboid::RPC::Server::Agent do
244
252
  )
245
253
 
246
254
  2.times do
247
- d2.spawn( load_balance: false )
255
+ d2.spawn( strategy: :direct )
248
256
  end
249
257
 
250
258
  d3 = agent_spawn(
@@ -252,7 +260,7 @@ describe Cuboid::RPC::Server::Agent do
252
260
  peer: d1.url,
253
261
  application: "#{fixtures_path}/mock_app.rb"
254
262
  )
255
- d3.spawn( load_balance: false )
263
+ d3.spawn( strategy: :direct )
256
264
 
257
265
  preferred = d1.url.split( ':' ).first
258
266
  expect(d3.spawn( strategy: :vertical )['url'].split( ':' ).first).to eq(preferred)
@@ -267,7 +275,7 @@ describe Cuboid::RPC::Server::Agent do
267
275
  )
268
276
 
269
277
  3.times do
270
- d1.spawn( load_balance: false )
278
+ d1.spawn( strategy: :direct )
271
279
  end
272
280
 
273
281
  d2 = agent_spawn(
@@ -277,7 +285,7 @@ describe Cuboid::RPC::Server::Agent do
277
285
  )
278
286
 
279
287
  2.times do
280
- d2.spawn( load_balance: false )
288
+ d2.spawn( strategy: :direct )
281
289
  end
282
290
 
283
291
  d3 = agent_spawn(
@@ -285,7 +293,7 @@ describe Cuboid::RPC::Server::Agent do
285
293
  peer: d1.url,
286
294
  application: "#{fixtures_path}/mock_app.rb"
287
295
  )
288
- d3.spawn( load_balance: false )
296
+ d3.spawn( strategy: :direct )
289
297
  preferred = d3.url.split( ':' ).first
290
298
 
291
299
  expect(d3.spawn['url'].split( ':' ).first).to eq(preferred)
@@ -312,14 +320,14 @@ describe Cuboid::RPC::Server::Agent do
312
320
  application: "#{fixtures_path}/mock_app.rb"
313
321
  )
314
322
 
315
- d1.spawn( load_balance: false )
323
+ d1.spawn( strategy: :direct )
316
324
 
317
325
  d2 = agent_spawn(
318
326
  address: '127.0.0.2',
319
327
  peer: d1.url,
320
328
  application: "#{fixtures_path}/mock_app.rb"
321
329
  )
322
- d2.spawn( load_balance: false )
330
+ d2.spawn( strategy: :direct )
323
331
 
324
332
  d3 = agent_spawn(
325
333
  address: '127.0.0.3',
@@ -327,10 +335,10 @@ describe Cuboid::RPC::Server::Agent do
327
335
  application: "#{fixtures_path}/mock_app.rb"
328
336
  )
329
337
  2.times do
330
- d3.spawn( load_balance: false )
338
+ d3.spawn( strategy: :direct )
331
339
  end
332
340
 
333
- expect(d3.spawn( load_balance: false )['url'].
341
+ expect(d3.spawn( strategy: :direct )['url'].
334
342
  split( ':' ).first).to eq('127.0.0.3')
335
343
  end
336
344
  end
@@ -3,7 +3,7 @@ require "#{Cuboid::Options.paths.lib}/rpc/server/base"
3
3
 
4
4
  describe Cuboid::RPC::Server::Base do
5
5
  before( :each ) do
6
- Arachni::Reactor.global.run_in_thread
6
+ Raktr.global.run_in_thread
7
7
  end
8
8
 
9
9
  let(:subject) { Cuboid::RPC::Server::Base.new(
@@ -11,7 +11,7 @@ describe Cuboid::RPC::Server::Base do
11
11
  ) }
12
12
  let(:port) { available_port }
13
13
 
14
- it 'supports UNIX sockets', if: Arachni::Reactor.supports_unix_sockets? do
14
+ it 'supports UNIX sockets', if: Raktr.supports_unix_sockets? do
15
15
  server = Cuboid::RPC::Server::Base.new(
16
16
  socket: "#{Dir.tmpdir}/cuboid-base-#{Cuboid::Utilities.generate_token}"
17
17
  )
@@ -5,7 +5,7 @@ require "#{fixtures_path}/mock_app"
5
5
  describe 'Cuboid::RPC::Server::Instance' do
6
6
  let(:subject) { instance_spawn( application: "#{fixtures_path}/mock_app.rb" ) }
7
7
 
8
- it 'supports UNIX sockets', if: Arachni::Reactor.supports_unix_sockets? do
8
+ it 'supports UNIX sockets', if: Raktr.supports_unix_sockets? do
9
9
  socket = "#{Dir.tmpdir}/cuboid-instance-#{Cuboid::Utilities.generate_token}"
10
10
  subject = instance_spawn(
11
11
  socket: socket,
@@ -239,7 +239,7 @@ describe 'Cuboid::RPC::Server::Instance' do
239
239
  it 'raises ArgumentError' do
240
240
  expect do
241
241
  subject.run invalid: :stuff
242
- end.to raise_error Arachni::RPC::Exceptions::RemoteException
242
+ end.to raise_error Toq::Exceptions::RemoteException
243
243
  end
244
244
  end
245
245
 
@@ -300,7 +300,7 @@ describe 'Cuboid::RPC::Server::Instance' do
300
300
  expect(subject.shutdown).to be_truthy
301
301
  sleep 4
302
302
 
303
- expect { subject.alive? }.to raise_error Arachni::RPC::Exceptions::ConnectionError
303
+ expect { subject.alive? }.to raise_error Toq::Exceptions::ConnectionError
304
304
  end
305
305
  end
306
306
 
@@ -293,7 +293,7 @@ describe Cuboid::RPC::Server::Scheduler do
293
293
  process_kill( subject.running.values.first['pid'] )
294
294
  sleep 0.1 while subject.failed.empty?
295
295
 
296
- expect(subject.failed[id]['error']).to eq 'Arachni::RPC::Exceptions::ConnectionError'
296
+ expect(subject.failed[id]['error']).to eq 'Toq::Exceptions::ConnectionError'
297
297
  expect(subject.failed[id]['description']).to include 'Connection closed'
298
298
  end
299
299
  end
@@ -339,7 +339,7 @@ describe Cuboid::RPC::Server::Scheduler do
339
339
  it 'raises ArgumentError' do
340
340
  expect do
341
341
  subject.push invalid: :test
342
- end.to raise_error Arachni::RPC::Exceptions::RemoteException
342
+ end.to raise_error Toq::Exceptions::RemoteException
343
343
  end
344
344
  end
345
345
 
@@ -394,7 +394,7 @@ describe Cuboid::RPC::Server::Scheduler do
394
394
 
395
395
  expect do
396
396
  subject.alive?
397
- end.to raise_error Arachni::RPC::Exceptions::ConnectionError
397
+ end.to raise_error Toq::Exceptions::ConnectionError
398
398
  end
399
399
  end
400
400
  end
@@ -30,7 +30,7 @@ class Node < Cuboid::RPC::Server::Agent::Node
30
30
  end
31
31
 
32
32
  def shutdown
33
- Arachni::Reactor.global.delay 1 do
33
+ Raktr.global.delay 1 do
34
34
  Process.kill( 'KILL', Process.pid )
35
35
  end
36
36
  end
@@ -41,10 +41,10 @@ class Node < Cuboid::RPC::Server::Agent::Node
41
41
 
42
42
  def self.connect_to_peer( url )
43
43
  c = Cuboid::RPC::Client::Base.new( url )
44
- Arachni::RPC::Proxy.new( c, 'node' )
44
+ Toq::Proxy.new( c, 'node' )
45
45
  end
46
46
  end
47
47
 
48
- Arachni::Reactor.global.run do
48
+ Raktr.global.run do
49
49
  Node.new
50
50
  end
@@ -36,7 +36,7 @@ class TestService < Cuboid::RPC::Server::Agent::Service
36
36
  end
37
37
 
38
38
  def test_iterator_for
39
- iterator_for( instances ).class == Arachni::Reactor::Iterator
39
+ iterator_for( instances ).class == Raktr::Iterator
40
40
  end
41
41
 
42
42
  def test_connect_to_agent( url, &block )
@@ -35,7 +35,7 @@ class Service::Echo < Service
35
35
  end
36
36
 
37
37
  def test_iterator_for
38
- iterator_for( instances ).class == Arachni::Reactor::Iterator
38
+ iterator_for( instances ).class == Raktr::Iterator
39
39
  end
40
40
 
41
41
  def test_connect_to_agent( url, &block )
@@ -1,4 +1,4 @@
1
- require 'arachni/rpc'
1
+ require 'toq'
2
2
 
3
3
  # @note Needs `ENV['WEB_SERVER_DISPATCHER']` in the format of `host:port`.
4
4
  #
@@ -9,15 +9,15 @@ require 'arachni/rpc'
9
9
  # Thin, Puma etc.).
10
10
  #
11
11
  # @author Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
12
- class WebServerClient < Arachni::RPC::Proxy
12
+ class WebServerClient < Toq::Proxy
13
13
  include Singleton
14
14
 
15
15
  def initialize( options = {} )
16
16
  @host, port = ENV['WEB_SERVER_DISPATCHER'].split( ':' )
17
17
 
18
- Arachni::Reactor.global.run_in_thread if !Arachni::Reactor.global.running?
18
+ Raktr.global.run_in_thread if !Raktr.global.running?
19
19
 
20
- client = Arachni::RPC::Client.new( host: @host, port: port )
20
+ client = Toq::Client.new( host: @host, port: port )
21
21
  super client, 'server'
22
22
  end
23
23
 
@@ -2,7 +2,7 @@ require_relative '../../../lib/cuboid/processes/manager'
2
2
  require_relative '../../../lib/cuboid/processes/helpers'
3
3
  require_relative '../../support/helpers/paths'
4
4
  require_relative 'web_server_manager'
5
- require 'arachni/rpc'
5
+ require 'toq'
6
6
 
7
7
  # @note Needs `ENV['WEB_SERVER_DISPATCHER']` in the format of `host:port`.
8
8
  #