cuboid 0.1.3 → 0.1.6

Sign up to get free protection for your applications and to get access to all the features.
Files changed (597) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +5 -2
  3. data/README.md +121 -4
  4. data/Rakefile +17 -0
  5. data/cuboid.gemspec +1 -1
  6. data/lib/cuboid/application.rb +1 -1
  7. data/lib/cuboid/processes/agents.rb +1 -1
  8. data/lib/cuboid/processes/executables/agent.rb +1 -1
  9. data/lib/cuboid/processes/executables/scheduler.rb +1 -1
  10. data/lib/cuboid/processes/instances.rb +1 -1
  11. data/lib/cuboid/processes/manager.rb +2 -2
  12. data/lib/cuboid/processes/schedulers.rb +1 -1
  13. data/lib/cuboid/rpc/client/agent.rb +2 -2
  14. data/lib/cuboid/rpc/client/base.rb +2 -2
  15. data/lib/cuboid/rpc/client/instance/service.rb +1 -1
  16. data/lib/cuboid/rpc/client/instance.rb +4 -4
  17. data/lib/cuboid/rpc/server/agent/node.rb +2 -2
  18. data/lib/cuboid/rpc/server/agent/service.rb +5 -5
  19. data/lib/cuboid/rpc/server/agent.rb +3 -3
  20. data/lib/cuboid/rpc/server/base.rb +2 -2
  21. data/lib/cuboid/rpc/server/instance.rb +3 -3
  22. data/lib/cuboid/rpc/server/scheduler.rb +1 -1
  23. data/lib/version +1 -1
  24. data/spec/cuboid/rest/server_spec.rb +5 -5
  25. data/spec/cuboid/rpc/client/base_spec.rb +4 -4
  26. data/spec/cuboid/rpc/client/instance_spec.rb +1 -1
  27. data/spec/cuboid/rpc/server/agent/node_spec.rb +2 -2
  28. data/spec/cuboid/rpc/server/base_spec.rb +2 -2
  29. data/spec/cuboid/rpc/server/instance_spec.rb +6 -4
  30. data/spec/cuboid/rpc/server/scheduler_spec.rb +3 -3
  31. data/spec/support/fixtures/executables/node.rb +3 -3
  32. data/spec/support/fixtures/mock_app/test_service.rb +1 -1
  33. data/spec/support/fixtures/services/echo.rb +1 -1
  34. data/spec/support/lib/web_server_client.rb +4 -4
  35. data/spec/support/lib/web_server_dispatcher.rb +1 -1
  36. metadata +75 -1197
  37. data/spec/support/logs/Agent - 1797289-46860.log +0 -6
  38. data/spec/support/logs/Agent - 1797295-27499.log +0 -11
  39. data/spec/support/logs/Agent - 1797297-3144.log +0 -11
  40. data/spec/support/logs/Agent - 1797379-31898.log +0 -6
  41. data/spec/support/logs/Agent - 1797386-37553.log +0 -11
  42. data/spec/support/logs/Agent - 1797390-46182.log +0 -11
  43. data/spec/support/logs/Agent - 1797487-7120.log +0 -11
  44. data/spec/support/logs/Agent - 1797492-19266.log +0 -11
  45. data/spec/support/logs/Agent - 1797535-15905.log +0 -6
  46. data/spec/support/logs/Agent - 1797540-32892.log +0 -16
  47. data/spec/support/logs/Agent - 1797542-59822.log +0 -14
  48. data/spec/support/logs/Agent - 1797547-57893.log +0 -10
  49. data/spec/support/logs/Agent - 1797558-26026.log +0 -11
  50. data/spec/support/logs/Agent - 1797560-6022.log +0 -11
  51. data/spec/support/logs/Agent - 1797592-36797.log +0 -6
  52. data/spec/support/logs/Agent - 1797598-8455.log +0 -16
  53. data/spec/support/logs/Agent - 1797600-56813.log +0 -14
  54. data/spec/support/logs/Agent - 1797604-47328.log +0 -10
  55. data/spec/support/logs/Agent - 1797611-25674.log +0 -16
  56. data/spec/support/logs/Agent - 1797613-63564.log +0 -14
  57. data/spec/support/logs/Agent - 1797622-56082.log +0 -10
  58. data/spec/support/logs/Agent - 1797632-63113.log +0 -16
  59. data/spec/support/logs/Agent - 1797640-55358.log +0 -14
  60. data/spec/support/logs/Agent - 1797644-19227.log +0 -10
  61. data/spec/support/logs/Agent - 1797649-4273.log +0 -16
  62. data/spec/support/logs/Agent - 1797651-45800.log +0 -14
  63. data/spec/support/logs/Agent - 1797655-44436.log +0 -10
  64. data/spec/support/logs/Agent - 1797663-8070.log +0 -16
  65. data/spec/support/logs/Agent - 1797667-56769.log +0 -14
  66. data/spec/support/logs/Agent - 1797671-53005.log +0 -10
  67. data/spec/support/logs/Agent - 1797678-6376.log +0 -6
  68. data/spec/support/logs/Agent - 1797680-53067.log +0 -6
  69. data/spec/support/logs/Agent - 1797684-1440.log +0 -6
  70. data/spec/support/logs/Agent - 1797688-2251.log +0 -6
  71. data/spec/support/logs/Agent - 1797698-41295.log +0 -6
  72. data/spec/support/logs/Agent - 1797702-50250.log +0 -6
  73. data/spec/support/logs/Agent - 1797707-29900.log +0 -6
  74. data/spec/support/logs/Agent - 1797718-49346.log +0 -6
  75. data/spec/support/logs/Agent - 1797726-15376.log +0 -6
  76. data/spec/support/logs/Agent - 1797745-40056.log +0 -6
  77. data/spec/support/logs/Agent - 1797804-62957.log +0 -17
  78. data/spec/support/logs/Agent - 1797821-21500.log +0 -15
  79. data/spec/support/logs/Agent - 1797832-1983.log +0 -11
  80. data/spec/support/logs/Agent - 1797939-12833.log +0 -17
  81. data/spec/support/logs/Agent - 1797952-60103.log +0 -15
  82. data/spec/support/logs/Agent - 1797960-13475.log +0 -11
  83. data/spec/support/logs/Agent - 1798019-61496.log +0 -17
  84. data/spec/support/logs/Agent - 1798033-46806.log +0 -15
  85. data/spec/support/logs/Agent - 1798041-8155.log +0 -11
  86. data/spec/support/logs/Agent - 1798099-42581.log +0 -17
  87. data/spec/support/logs/Agent - 1798113-6252.log +0 -15
  88. data/spec/support/logs/Agent - 1798121-58237.log +0 -11
  89. data/spec/support/logs/Agent - 1798182-15586.log +0 -16
  90. data/spec/support/logs/Agent - 1798197-41430.log +0 -14
  91. data/spec/support/logs/Agent - 1798205-23172.log +0 -10
  92. data/spec/support/logs/Agent - 1798248-46908.log +0 -6
  93. data/spec/support/logs/Agent - 1798253-25994.log +0 -16
  94. data/spec/support/logs/Agent - 1798255-19341.log +0 -14
  95. data/spec/support/logs/Agent - 1798261-4788.log +0 -10
  96. data/spec/support/logs/Agent - 1798270-2955.log +0 -16
  97. data/spec/support/logs/Agent - 1798277-35029.log +0 -14
  98. data/spec/support/logs/Agent - 1798283-51766.log +0 -10
  99. data/spec/support/logs/Agent - 1798294-27029.log +0 -16
  100. data/spec/support/logs/Agent - 1798296-19420.log +0 -14
  101. data/spec/support/logs/Agent - 1798301-49988.log +0 -10
  102. data/spec/support/logs/Agent - 1798306-62835.log +0 -16
  103. data/spec/support/logs/Agent - 1798308-5619.log +0 -14
  104. data/spec/support/logs/Agent - 1798317-13936.log +0 -10
  105. data/spec/support/logs/Agent - 1798325-24989.log +0 -16
  106. data/spec/support/logs/Agent - 1798329-59694.log +0 -14
  107. data/spec/support/logs/Agent - 1798333-7693.log +0 -10
  108. data/spec/support/logs/Agent - 1798339-9797.log +0 -6
  109. data/spec/support/logs/Agent - 1798341-47566.log +0 -6
  110. data/spec/support/logs/Agent - 1798345-57649.log +0 -6
  111. data/spec/support/logs/Agent - 1798349-1601.log +0 -6
  112. data/spec/support/logs/Agent - 1798365-33727.log +0 -6
  113. data/spec/support/logs/Agent - 1798369-47048.log +0 -6
  114. data/spec/support/logs/Agent - 1798373-39436.log +0 -6
  115. data/spec/support/logs/Agent - 1798380-22295.log +0 -6
  116. data/spec/support/logs/Agent - 1798384-54448.log +0 -6
  117. data/spec/support/logs/Agent - 1798401-25598.log +0 -6
  118. data/spec/support/logs/Agent - 1798464-60577.log +0 -16
  119. data/spec/support/logs/Agent - 1798476-32013.log +0 -14
  120. data/spec/support/logs/Agent - 1798488-17640.log +0 -10
  121. data/spec/support/logs/Agent - 1798516-32546.log +0 -16
  122. data/spec/support/logs/Agent - 1798526-32438.log +0 -14
  123. data/spec/support/logs/Agent - 1798533-62298.log +0 -10
  124. data/spec/support/logs/Agent - 1798543-21098.log +0 -16
  125. data/spec/support/logs/Agent - 1798555-25891.log +0 -14
  126. data/spec/support/logs/Agent - 1798562-34080.log +0 -10
  127. data/spec/support/logs/Agent - 1798606-52034.log +0 -10
  128. data/spec/support/logs/Agent - 1798608-43174.log +0 -10
  129. data/spec/support/logs/Agent - 1798610-14773.log +0 -16
  130. data/spec/support/logs/Agent - 1798614-18350.log +0 -14
  131. data/spec/support/logs/Agent - 1798621-4674.log +0 -10
  132. data/spec/support/logs/Agent - 1798645-52114.log +0 -6
  133. data/spec/support/logs/Agent - 1798650-42695.log +0 -6
  134. data/spec/support/logs/Agent - 1798661-51655.log +0 -7
  135. data/spec/support/logs/Agent - 1798680-43699.log +0 -6
  136. data/spec/support/logs/Agent - 1798687-63896.log +0 -16
  137. data/spec/support/logs/Agent - 1798691-57598.log +0 -14
  138. data/spec/support/logs/Agent - 1798701-14752.log +0 -10
  139. data/spec/support/logs/Agent - 1798708-25763.log +0 -16
  140. data/spec/support/logs/Agent - 1798710-32797.log +0 -14
  141. data/spec/support/logs/Agent - 1798714-28009.log +0 -10
  142. data/spec/support/logs/Agent - 1798724-41047.log +0 -16
  143. data/spec/support/logs/Agent - 1798726-55249.log +0 -14
  144. data/spec/support/logs/Agent - 1798735-6516.log +0 -10
  145. data/spec/support/logs/Agent - 1798743-54492.log +0 -16
  146. data/spec/support/logs/Agent - 1798745-3549.log +0 -14
  147. data/spec/support/logs/Agent - 1798749-63028.log +0 -10
  148. data/spec/support/logs/Agent - 1798759-46764.log +0 -16
  149. data/spec/support/logs/Agent - 1798763-20439.log +0 -14
  150. data/spec/support/logs/Agent - 1798767-24602.log +0 -10
  151. data/spec/support/logs/Agent - 1798773-21120.log +0 -6
  152. data/spec/support/logs/Agent - 1798775-30653.log +0 -7
  153. data/spec/support/logs/Agent - 1798794-13970.log +0 -6
  154. data/spec/support/logs/Agent - 1798799-61184.log +0 -16
  155. data/spec/support/logs/Agent - 1798801-17972.log +0 -14
  156. data/spec/support/logs/Agent - 1798805-20634.log +0 -10
  157. data/spec/support/logs/Agent - 1798812-2372.log +0 -16
  158. data/spec/support/logs/Agent - 1798814-56878.log +0 -14
  159. data/spec/support/logs/Agent - 1798824-62001.log +0 -10
  160. data/spec/support/logs/Agent - 1798835-39880.log +0 -16
  161. data/spec/support/logs/Agent - 1798837-61826.log +0 -14
  162. data/spec/support/logs/Agent - 1798841-16507.log +0 -10
  163. data/spec/support/logs/Agent - 1798846-48234.log +0 -16
  164. data/spec/support/logs/Agent - 1798848-5064.log +0 -14
  165. data/spec/support/logs/Agent - 1798853-31760.log +0 -10
  166. data/spec/support/logs/Agent - 1798866-50670.log +0 -16
  167. data/spec/support/logs/Agent - 1798870-45487.log +0 -14
  168. data/spec/support/logs/Agent - 1798874-52084.log +0 -10
  169. data/spec/support/logs/Agent - 1798880-36517.log +0 -6
  170. data/spec/support/logs/Agent - 1798882-40177.log +0 -6
  171. data/spec/support/logs/Agent - 1798886-7030.log +0 -6
  172. data/spec/support/logs/Agent - 1798895-7798.log +0 -6
  173. data/spec/support/logs/Agent - 1798906-43816.log +0 -6
  174. data/spec/support/logs/Agent - 1798910-18160.log +0 -6
  175. data/spec/support/logs/Agent - 1798916-29506.log +0 -6
  176. data/spec/support/logs/Agent - 1798920-21682.log +0 -6
  177. data/spec/support/logs/Agent - 1798926-45303.log +0 -6
  178. data/spec/support/logs/Agent - 1798950-17451.log +0 -6
  179. data/spec/support/logs/Agent - 1799005-32280.log +0 -16
  180. data/spec/support/logs/Agent - 1799017-9877.log +0 -14
  181. data/spec/support/logs/Agent - 1799024-65347.log +0 -10
  182. data/spec/support/logs/Agent - 1799051-25939.log +0 -16
  183. data/spec/support/logs/Agent - 1799061-59062.log +0 -14
  184. data/spec/support/logs/Agent - 1799074-49397.log +0 -10
  185. data/spec/support/logs/Agent - 1799083-27497.log +0 -16
  186. data/spec/support/logs/Agent - 1799096-40902.log +0 -14
  187. data/spec/support/logs/Agent - 1799103-20369.log +0 -10
  188. data/spec/support/logs/Agent - 1799135-60557.log +0 -10
  189. data/spec/support/logs/Agent - 1799137-3725.log +0 -10
  190. data/spec/support/logs/Agent - 1799144-38486.log +0 -16
  191. data/spec/support/logs/Agent - 1799150-30915.log +0 -14
  192. data/spec/support/logs/Agent - 1799154-18474.log +0 -10
  193. data/spec/support/logs/Agent - 1799170-26867.log +0 -6
  194. data/spec/support/logs/Agent - 1799174-16635.log +0 -6
  195. data/spec/support/logs/Agent - 1799184-8862.log +0 -6
  196. data/spec/support/logs/Agent - 1799197-45365.log +0 -6
  197. data/spec/support/logs/Agent - 1799214-21005.log +0 -6
  198. data/spec/support/logs/Agent - 1799225-8303.log +0 -6
  199. data/spec/support/logs/Agent - 1799229-20233.log +0 -6
  200. data/spec/support/logs/Agent - 1799234-31440.log +0 -6
  201. data/spec/support/logs/Agent - 2486896-44236.log +0 -6
  202. data/spec/support/logs/Agent - 2487229-16390.log +0 -6
  203. data/spec/support/logs/Agent - 2487520-2511.log +0 -6
  204. data/spec/support/logs/Agent - 2487522-24008.log +0 -6
  205. data/spec/support/logs/Agent - 2487526-3383.log +0 -6
  206. data/spec/support/logs/Agent - 2487528-23713.log +0 -10
  207. data/spec/support/logs/Agent - 2487530-42875.log +0 -10
  208. data/spec/support/logs/Agent - 2487533-14182.log +0 -10
  209. data/spec/support/logs/Agent - 2487535-32486.log +0 -10
  210. data/spec/support/logs/Agent - 2487537-30578.log +0 -10
  211. data/spec/support/logs/Agent - 2487539-65402.log +0 -10
  212. data/spec/support/logs/Agent - 2493974-23066.log +0 -6
  213. data/spec/support/logs/Agent - 2501716-11729.log +0 -6
  214. data/spec/support/logs/Agent - 2501724-48638.log +0 -6
  215. data/spec/support/logs/Agent - 2501961-60077.log +0 -6
  216. data/spec/support/logs/Agent - 2501976-10941.log +0 -10
  217. data/spec/support/logs/Agent - 2502050-45312.log +0 -10
  218. data/spec/support/logs/Agent - 2502131-45940.log +0 -10
  219. data/spec/support/logs/Agent - 2502139-59848.log +0 -10
  220. data/spec/support/logs/Agent - 2502262-46629.log +0 -10
  221. data/spec/support/logs/Agent - 2502298-28395.log +0 -10
  222. data/spec/support/logs/Agent - 2512541-7108.log +0 -6
  223. data/spec/support/logs/Agent - 2515458-5532.log +0 -6
  224. data/spec/support/logs/Agent - 2515540-20379.log +0 -6
  225. data/spec/support/logs/Agent - 2515550-19661.log +0 -6
  226. data/spec/support/logs/Agent - 2515626-34192.log +0 -6
  227. data/spec/support/logs/Agent - 2515879-19884.log +0 -6
  228. data/spec/support/logs/Agent - 2516130-27514.log +0 -6
  229. data/spec/support/logs/Agent - 2516382-45886.log +0 -6
  230. data/spec/support/logs/Agent - 2516390-47909.log +0 -6
  231. data/spec/support/logs/Agent - 2516465-47774.log +0 -6
  232. data/spec/support/logs/Agent - 2516546-13955.log +0 -6
  233. data/spec/support/logs/Agent - 2516561-39041.log +0 -6
  234. data/spec/support/logs/Agent - 2516710-46722.log +0 -6
  235. data/spec/support/logs/Agent - 2516795-56481.log +0 -16
  236. data/spec/support/logs/Agent - 2516798-22121.log +0 -14
  237. data/spec/support/logs/Agent - 2516881-21755.log +0 -10
  238. data/spec/support/logs/Agent - 2517053-64944.log +0 -16
  239. data/spec/support/logs/Agent - 2517056-63364.log +0 -14
  240. data/spec/support/logs/Agent - 2517066-34797.log +0 -10
  241. data/spec/support/logs/Agent - 2518042-30794.log +0 -16
  242. data/spec/support/logs/Agent - 2518050-6895.log +0 -14
  243. data/spec/support/logs/Agent - 2518212-36288.log +0 -10
  244. data/spec/support/logs/Agent - 2518395-10976.log +0 -16
  245. data/spec/support/logs/Agent - 2518472-35419.log +0 -14
  246. data/spec/support/logs/Agent - 2518559-43119.log +0 -10
  247. data/spec/support/logs/Agent - 2518647-48415.log +0 -16
  248. data/spec/support/logs/Agent - 2518652-65099.log +0 -14
  249. data/spec/support/logs/Agent - 2518658-1449.log +0 -10
  250. data/spec/support/logs/Agent - 2518910-62083.log +0 -6
  251. data/spec/support/logs/Agent - 2518919-58035.log +0 -6
  252. data/spec/support/logs/Agent - 2519076-57825.log +0 -6
  253. data/spec/support/logs/Agent - 2519156-8413.log +0 -6
  254. data/spec/support/logs/Agent - 2519406-64200.log +0 -6
  255. data/spec/support/logs/Agent - 2519570-20841.log +0 -6
  256. data/spec/support/logs/Agent - 2519659-30136.log +0 -6
  257. data/spec/support/logs/Agent - 2519822-9736.log +0 -6
  258. data/spec/support/logs/Agent - 2520004-59217.log +0 -6
  259. data/spec/support/logs/Agent - 2520357-37723.log +0 -6
  260. data/spec/support/logs/Agent - 2521294-5229.log +0 -16
  261. data/spec/support/logs/Agent - 2521407-31515.log +0 -14
  262. data/spec/support/logs/Agent - 2521415-7501.log +0 -10
  263. data/spec/support/logs/Agent - 2521452-22168.log +0 -16
  264. data/spec/support/logs/Agent - 2521463-24492.log +0 -14
  265. data/spec/support/logs/Agent - 2521482-50808.log +0 -10
  266. data/spec/support/logs/Agent - 2521503-8180.log +0 -16
  267. data/spec/support/logs/Agent - 2521562-3183.log +0 -14
  268. data/spec/support/logs/Agent - 2521639-45199.log +0 -10
  269. data/spec/support/logs/Agent - 2521891-30967.log +0 -10
  270. data/spec/support/logs/Agent - 2521897-64837.log +0 -10
  271. data/spec/support/logs/Agent - 2521916-23526.log +0 -16
  272. data/spec/support/logs/Agent - 2521936-26313.log +0 -14
  273. data/spec/support/logs/Agent - 2521948-18199.log +0 -10
  274. data/spec/support/logs/Agent - 2522026-18512.log +0 -6
  275. data/spec/support/logs/Agent - 2522093-46502.log +0 -6
  276. data/spec/support/logs/Agent - 2522195-30981.log +0 -6
  277. data/spec/support/logs/Agent - 2522295-55509.log +0 -6
  278. data/spec/support/logs/Agent - 2522347-32811.log +0 -6
  279. data/spec/support/logs/Agent - 2523391-7522.log +0 -6
  280. data/spec/support/logs/Agent - 2523626-61560.log +0 -6
  281. data/spec/support/logs/Agent - 2523913-25677.log +0 -6
  282. data/spec/support/logs/Agent - 2539887-21281.log +0 -6
  283. data/spec/support/logs/Agent - 2540261-61895.log +0 -6
  284. data/spec/support/logs/Agent - 2540430-8261.log +0 -6
  285. data/spec/support/logs/Agent - 361913-24577.log +0 -6
  286. data/spec/support/logs/Agent - 362152-33398.log +0 -6
  287. data/spec/support/logs/Agent - 362154-44243.log +0 -6
  288. data/spec/support/logs/Agent - 362157-6034.log +0 -6
  289. data/spec/support/logs/Agent - 362160-38548.log +0 -10
  290. data/spec/support/logs/Agent - 362162-59176.log +0 -10
  291. data/spec/support/logs/Agent - 362165-23738.log +0 -10
  292. data/spec/support/logs/Agent - 362167-57542.log +0 -10
  293. data/spec/support/logs/Agent - 362172-22798.log +0 -10
  294. data/spec/support/logs/Agent - 362174-46609.log +0 -10
  295. data/spec/support/logs/Agent - 362398-10913.log +0 -6
  296. data/spec/support/logs/Agent - 362524-29482.log +0 -6
  297. data/spec/support/logs/Agent - 362527-40674.log +0 -6
  298. data/spec/support/logs/Agent - 362530-32963.log +0 -6
  299. data/spec/support/logs/Agent - 362532-22236.log +0 -6
  300. data/spec/support/logs/Agent - 362543-4429.log +0 -6
  301. data/spec/support/logs/Agent - 362557-7510.log +0 -6
  302. data/spec/support/logs/Agent - 362574-36915.log +0 -6
  303. data/spec/support/logs/Agent - 362576-65372.log +0 -6
  304. data/spec/support/logs/Agent - 362578-29183.log +0 -6
  305. data/spec/support/logs/Agent - 362580-43546.log +0 -6
  306. data/spec/support/logs/Agent - 362582-22254.log +0 -6
  307. data/spec/support/logs/Agent - 362588-40009.log +0 -6
  308. data/spec/support/logs/Agent - 362591-60596.log +0 -16
  309. data/spec/support/logs/Agent - 362595-3584.log +0 -14
  310. data/spec/support/logs/Agent - 362599-39016.log +0 -10
  311. data/spec/support/logs/Agent - 362606-58506.log +0 -16
  312. data/spec/support/logs/Agent - 362609-55768.log +0 -14
  313. data/spec/support/logs/Agent - 362613-37168.log +0 -10
  314. data/spec/support/logs/Agent - 362629-16593.log +0 -16
  315. data/spec/support/logs/Agent - 362632-32264.log +0 -14
  316. data/spec/support/logs/Agent - 362643-59807.log +0 -10
  317. data/spec/support/logs/Agent - 362649-40552.log +0 -16
  318. data/spec/support/logs/Agent - 362655-31144.log +0 -14
  319. data/spec/support/logs/Agent - 362660-12968.log +0 -10
  320. data/spec/support/logs/Agent - 362666-6806.log +0 -16
  321. data/spec/support/logs/Agent - 362673-39178.log +0 -14
  322. data/spec/support/logs/Agent - 362678-36132.log +0 -10
  323. data/spec/support/logs/Agent - 362685-62240.log +0 -6
  324. data/spec/support/logs/Agent - 362687-4068.log +0 -6
  325. data/spec/support/logs/Agent - 362691-48465.log +0 -6
  326. data/spec/support/logs/Agent - 362693-40295.log +0 -6
  327. data/spec/support/logs/Agent - 362705-53855.log +0 -6
  328. data/spec/support/logs/Agent - 362712-14029.log +0 -6
  329. data/spec/support/logs/Agent - 362717-3069.log +0 -6
  330. data/spec/support/logs/Agent - 362721-38670.log +0 -6
  331. data/spec/support/logs/Agent - 362725-7644.log +0 -6
  332. data/spec/support/logs/Agent - 362748-18302.log +0 -6
  333. data/spec/support/logs/Agent - 362828-61766.log +0 -16
  334. data/spec/support/logs/Agent - 362838-44693.log +0 -14
  335. data/spec/support/logs/Agent - 362847-31875.log +0 -10
  336. data/spec/support/logs/Agent - 362875-46401.log +0 -16
  337. data/spec/support/logs/Agent - 362887-52041.log +0 -14
  338. data/spec/support/logs/Agent - 362897-7426.log +0 -10
  339. data/spec/support/logs/Agent - 362908-35228.log +0 -16
  340. data/spec/support/logs/Agent - 362919-33764.log +0 -14
  341. data/spec/support/logs/Agent - 362928-1323.log +0 -10
  342. data/spec/support/logs/Agent - 362960-27883.log +0 -10
  343. data/spec/support/logs/Agent - 362964-26072.log +0 -10
  344. data/spec/support/logs/Agent - 362966-42927.log +0 -16
  345. data/spec/support/logs/Agent - 362972-56025.log +0 -14
  346. data/spec/support/logs/Agent - 362977-39574.log +0 -10
  347. data/spec/support/logs/Agent - 362992-23510.log +0 -6
  348. data/spec/support/logs/Agent - 362996-63445.log +0 -6
  349. data/spec/support/logs/Agent - 363017-59359.log +0 -6
  350. data/spec/support/logs/Agent - 363034-15682.log +0 -6
  351. data/spec/support/logs/Agent - 363050-6605.log +0 -6
  352. data/spec/support/logs/Agent - 363065-59445.log +0 -6
  353. data/spec/support/logs/Agent - 363070-6619.log +0 -6
  354. data/spec/support/logs/Agent - 363077-18731.log +0 -6
  355. data/spec/support/logs/Agent - 363250-16962.log +0 -6
  356. data/spec/support/logs/Agent - 363265-25598.log +0 -6
  357. data/spec/support/logs/Agent - 363279-50603.log +0 -6
  358. data/spec/support/logs/Instance - 2525557-35695.error.log +0 -105
  359. data/spec/support/logs/Instance - 2525638-6350.error.log +0 -105
  360. data/spec/support/logs/Instance - 2525719-30351.error.log +0 -105
  361. data/spec/support/logs/Instance - 363119-21886.error.log +0 -105
  362. data/spec/support/logs/Instance - 363121-31774.error.log +0 -105
  363. data/spec/support/logs/Instance - 363127-52671.error.log +0 -105
  364. data/spec/support/logs/Scheduler - 2486608-59709.log +0 -3
  365. data/spec/support/logs/Scheduler - 2486612-44110.log +0 -27
  366. data/spec/support/logs/Scheduler - 2486723-50393.log +0 -3
  367. data/spec/support/logs/Scheduler - 2486727-21620.log +0 -27
  368. data/spec/support/logs/Scheduler - 2486877-37845.log +0 -3
  369. data/spec/support/logs/Scheduler - 2486881-3624.log +0 -1
  370. data/spec/support/logs/Scheduler - 2486911-24752.log +0 -3
  371. data/spec/support/logs/Scheduler - 2486919-48535.log +0 -27
  372. data/spec/support/logs/Scheduler - 2486985-8897.log +0 -1
  373. data/spec/support/logs/Scheduler - 2487211-7516.log +0 -3
  374. data/spec/support/logs/Scheduler - 2487215-2831.log +0 -1
  375. data/spec/support/logs/Scheduler - 2487246-7826.log +0 -3
  376. data/spec/support/logs/Scheduler - 2487256-35669.log +0 -6
  377. data/spec/support/logs/Scheduler - 2487272-11542.log +0 -4
  378. data/spec/support/logs/Scheduler - 2487278-9621.log +0 -1
  379. data/spec/support/logs/Scheduler - 2487291-24094.log +0 -3
  380. data/spec/support/logs/Scheduler - 2487299-60095.log +0 -6
  381. data/spec/support/logs/Scheduler - 2487368-7706.log +0 -3
  382. data/spec/support/logs/Scheduler - 2487378-9859.log +0 -6
  383. data/spec/support/logs/Scheduler - 2487396-17812.log +0 -3
  384. data/spec/support/logs/Scheduler - 2487407-25543.log +0 -6
  385. data/spec/support/logs/Scheduler - 2487451-44767.log +0 -4
  386. data/spec/support/logs/Scheduler - 2487506-1422.log +0 -6
  387. data/spec/support/logs/Scheduler - 2487541-38068.log +0 -1
  388. data/spec/support/logs/Scheduler - 2487544-21866.log +0 -1
  389. data/spec/support/logs/Scheduler - 2487548-15245.log +0 -1
  390. data/spec/support/logs/Scheduler - 2487551-34905.log +0 -1
  391. data/spec/support/logs/Scheduler - 2487554-22142.log +0 -1
  392. data/spec/support/logs/Scheduler - 2487562-35113.log +0 -1
  393. data/spec/support/logs/Scheduler - 2487565-55125.log +0 -3
  394. data/spec/support/logs/Scheduler - 2487569-48845.log +0 -6
  395. data/spec/support/logs/Scheduler - 2487576-57192.log +0 -4
  396. data/spec/support/logs/Scheduler - 2487583-17991.log +0 -1
  397. data/spec/support/logs/Scheduler - 2487586-30014.log +0 -1
  398. data/spec/support/logs/Scheduler - 2487591-6472.log +0 -1
  399. data/spec/support/logs/Scheduler - 2487594-2195.log +0 -1
  400. data/spec/support/logs/Scheduler - 2487598-55808.log +0 -3
  401. data/spec/support/logs/Scheduler - 2487605-7400.log +0 -1
  402. data/spec/support/logs/Scheduler - 2487607-4337.log +0 -1
  403. data/spec/support/logs/Scheduler - 2487610-25835.log +0 -1
  404. data/spec/support/logs/Scheduler - 2493623-45209.log +0 -3
  405. data/spec/support/logs/Scheduler - 2493714-59407.log +0 -1
  406. data/spec/support/logs/Scheduler - 2494470-61696.log +0 -3
  407. data/spec/support/logs/Scheduler - 2494723-2810.log +0 -6
  408. data/spec/support/logs/Scheduler - 2495458-22112.log +0 -4
  409. data/spec/support/logs/Scheduler - 2496034-4076.log +0 -1
  410. data/spec/support/logs/Scheduler - 2496119-62253.log +0 -3
  411. data/spec/support/logs/Scheduler - 2496210-50380.log +0 -6
  412. data/spec/support/logs/Scheduler - 2497536-24922.log +0 -3
  413. data/spec/support/logs/Scheduler - 2497786-13515.log +0 -6
  414. data/spec/support/logs/Scheduler - 2498774-16911.log +0 -3
  415. data/spec/support/logs/Scheduler - 2498961-4742.log +0 -6
  416. data/spec/support/logs/Scheduler - 2500340-16045.log +0 -4
  417. data/spec/support/logs/Scheduler - 2500980-26158.log +0 -6
  418. data/spec/support/logs/Scheduler - 2502381-26435.log +0 -1
  419. data/spec/support/logs/Scheduler - 2502463-62965.log +0 -1
  420. data/spec/support/logs/Scheduler - 2502547-53434.log +0 -1
  421. data/spec/support/logs/Scheduler - 2502628-43720.log +0 -1
  422. data/spec/support/logs/Scheduler - 2502643-58379.log +0 -1
  423. data/spec/support/logs/Scheduler - 2502873-64893.log +0 -1
  424. data/spec/support/logs/Scheduler - 2502954-43885.log +0 -3
  425. data/spec/support/logs/Scheduler - 2503039-52147.log +0 -6
  426. data/spec/support/logs/Scheduler - 2503768-28831.log +0 -4
  427. data/spec/support/logs/Scheduler - 2504259-24533.log +0 -1
  428. data/spec/support/logs/Scheduler - 2504343-56967.log +0 -1
  429. data/spec/support/logs/Scheduler - 2504502-25085.log +0 -1
  430. data/spec/support/logs/Scheduler - 2504587-30789.log +0 -1
  431. data/spec/support/logs/Scheduler - 2504608-56601.log +0 -3
  432. data/spec/support/logs/Scheduler - 2504760-36374.log +0 -1
  433. data/spec/support/logs/Scheduler - 2504841-49675.log +0 -1
  434. data/spec/support/logs/Scheduler - 2504923-15781.log +0 -1
  435. data/spec/support/logs/Scheduler - 2532871-63847.log +0 -16
  436. data/spec/support/logs/Scheduler - 2538978-63705.log +0 -6
  437. data/spec/support/logs/Scheduler - 2539461-10579.log +0 -6
  438. data/spec/support/logs/Scheduler - 2539981-44320.log +0 -6
  439. data/spec/support/logs/Scheduler - 2540265-37672.log +0 -3
  440. data/spec/support/logs/Scheduler - 2540434-9490.log +0 -5
  441. data/spec/support/logs/Scheduler - 2540840-9957.log +0 -1
  442. data/spec/support/logs/Scheduler - 2540921-2437.log +0 -1
  443. data/spec/support/logs/Scheduler - 2540937-17100.log +0 -1
  444. data/spec/support/logs/Scheduler - 2541007-52812.log +0 -1
  445. data/spec/support/logs/Scheduler - 2541091-56468.log +0 -1
  446. data/spec/support/logs/Scheduler - 2541109-54138.log +0 -1
  447. data/spec/support/logs/Scheduler - 2541188-17918.log +0 -1
  448. data/spec/support/logs/Scheduler - 2541267-2563.log +0 -3
  449. data/spec/support/logs/Scheduler - 2541430-40675.log +0 -3
  450. data/spec/support/logs/Scheduler - 2542341-38074.log +0 -3
  451. data/spec/support/logs/Scheduler - 2542519-33069.log +0 -3
  452. data/spec/support/logs/Scheduler - 2542610-41819.log +0 -4
  453. data/spec/support/logs/Scheduler - 2543168-3708.log +0 -1
  454. data/spec/support/logs/Scheduler - 2543332-46957.log +0 -4
  455. data/spec/support/logs/Scheduler - 2543334-53034.log +0 -1
  456. data/spec/support/logs/Scheduler - 2543419-31038.log +0 -1
  457. data/spec/support/logs/Scheduler - 2543666-10481.log +0 -16
  458. data/spec/support/logs/Scheduler - 2544245-30154.log +0 -4
  459. data/spec/support/logs/Scheduler - 2544487-8113.log +0 -1
  460. data/spec/support/logs/Scheduler - 2544490-62030.log +0 -1
  461. data/spec/support/logs/Scheduler - 2544494-64830.log +0 -1
  462. data/spec/support/logs/Scheduler - 2544585-3507.log +0 -1
  463. data/spec/support/logs/Scheduler - 2544668-24185.log +0 -3
  464. data/spec/support/logs/Scheduler - 2544762-17601.log +0 -1
  465. data/spec/support/logs/Scheduler - 2544790-32379.log +0 -1
  466. data/spec/support/logs/Scheduler - 2544853-35518.log +0 -1
  467. data/spec/support/logs/Scheduler - 2544939-52525.log +0 -1
  468. data/spec/support/logs/Scheduler - 2545016-12352.log +0 -1
  469. data/spec/support/logs/Scheduler - 2545096-63001.log +0 -1
  470. data/spec/support/logs/Scheduler - 2545178-63647.log +0 -2
  471. data/spec/support/logs/Scheduler - 361893-37255.log +0 -3
  472. data/spec/support/logs/Scheduler - 361897-36090.log +0 -1
  473. data/spec/support/logs/Scheduler - 361928-44834.log +0 -3
  474. data/spec/support/logs/Scheduler - 361937-43013.log +0 -6
  475. data/spec/support/logs/Scheduler - 361956-8533.log +0 -4
  476. data/spec/support/logs/Scheduler - 361968-31856.log +0 -1
  477. data/spec/support/logs/Scheduler - 361988-37894.log +0 -3
  478. data/spec/support/logs/Scheduler - 361997-31525.log +0 -6
  479. data/spec/support/logs/Scheduler - 362037-51907.log +0 -3
  480. data/spec/support/logs/Scheduler - 362047-32758.log +0 -6
  481. data/spec/support/logs/Scheduler - 362072-11620.log +0 -3
  482. data/spec/support/logs/Scheduler - 362080-16797.log +0 -6
  483. data/spec/support/logs/Scheduler - 362124-56230.log +0 -4
  484. data/spec/support/logs/Scheduler - 362137-4746.log +0 -6
  485. data/spec/support/logs/Scheduler - 362176-32691.log +0 -1
  486. data/spec/support/logs/Scheduler - 362179-34759.log +0 -1
  487. data/spec/support/logs/Scheduler - 362183-18758.log +0 -1
  488. data/spec/support/logs/Scheduler - 362187-63438.log +0 -1
  489. data/spec/support/logs/Scheduler - 362189-35075.log +0 -1
  490. data/spec/support/logs/Scheduler - 362191-34351.log +0 -1
  491. data/spec/support/logs/Scheduler - 362195-51436.log +0 -3
  492. data/spec/support/logs/Scheduler - 362199-46884.log +0 -6
  493. data/spec/support/logs/Scheduler - 362214-12351.log +0 -4
  494. data/spec/support/logs/Scheduler - 362230-12723.log +0 -1
  495. data/spec/support/logs/Scheduler - 362233-16133.log +0 -1
  496. data/spec/support/logs/Scheduler - 362238-51103.log +0 -1
  497. data/spec/support/logs/Scheduler - 362241-20441.log +0 -1
  498. data/spec/support/logs/Scheduler - 362245-40692.log +0 -3
  499. data/spec/support/logs/Scheduler - 362251-40074.log +0 -1
  500. data/spec/support/logs/Scheduler - 362253-48087.log +0 -1
  501. data/spec/support/logs/Scheduler - 362256-27112.log +0 -1
  502. data/spec/support/logs/Scheduler - 363208-60869.log +0 -16
  503. data/spec/support/logs/Scheduler - 363231-35883.log +0 -6
  504. data/spec/support/logs/Scheduler - 363240-11734.log +0 -6
  505. data/spec/support/logs/Scheduler - 363252-28394.log +0 -6
  506. data/spec/support/logs/Scheduler - 363267-7527.log +0 -3
  507. data/spec/support/logs/Scheduler - 363282-48049.log +0 -5
  508. data/spec/support/logs/Scheduler - 363298-26965.log +0 -1
  509. data/spec/support/logs/Scheduler - 363300-14973.log +0 -1
  510. data/spec/support/logs/Scheduler - 363302-58216.log +0 -1
  511. data/spec/support/logs/Scheduler - 363304-11942.log +0 -1
  512. data/spec/support/logs/Scheduler - 363306-39654.log +0 -1
  513. data/spec/support/logs/Scheduler - 363308-57479.log +0 -1
  514. data/spec/support/logs/Scheduler - 363310-32176.log +0 -1
  515. data/spec/support/logs/Scheduler - 363314-13492.log +0 -3
  516. data/spec/support/logs/Scheduler - 363322-38112.log +0 -3
  517. data/spec/support/logs/Scheduler - 363342-26080.log +0 -3
  518. data/spec/support/logs/Scheduler - 363349-51783.log +0 -3
  519. data/spec/support/logs/Scheduler - 363355-44132.log +0 -4
  520. data/spec/support/logs/Scheduler - 363367-12160.log +0 -1
  521. data/spec/support/logs/Scheduler - 363372-28520.log +0 -4
  522. data/spec/support/logs/Scheduler - 363376-58951.log +0 -1
  523. data/spec/support/logs/Scheduler - 363379-28506.log +0 -1
  524. data/spec/support/logs/Scheduler - 363384-63536.log +0 -16
  525. data/spec/support/logs/Scheduler - 363402-32695.log +0 -4
  526. data/spec/support/logs/Scheduler - 363412-41984.log +0 -1
  527. data/spec/support/logs/Scheduler - 363414-47366.log +0 -1
  528. data/spec/support/logs/Scheduler - 363416-44368.log +0 -1
  529. data/spec/support/logs/Scheduler - 363427-29292.log +0 -1
  530. data/spec/support/logs/Scheduler - 363431-44605.log +0 -3
  531. data/spec/support/logs/Scheduler - 363439-19754.log +0 -1
  532. data/spec/support/logs/Scheduler - 363441-5762.log +0 -1
  533. data/spec/support/logs/Scheduler - 363443-4000.log +0 -1
  534. data/spec/support/logs/Scheduler - 363445-3688.log +0 -1
  535. data/spec/support/logs/Scheduler - 363448-50268.log +0 -1
  536. data/spec/support/logs/Scheduler - 363451-23104.log +0 -1
  537. data/spec/support/logs/Scheduler - 363454-12897.log +0 -2
  538. data/spec/support/logs/error-2487640.log +0 -797
  539. data/spec/support/logs/error-360075.log +0 -797
  540. data/spec/support/logs/output_spec_2487640.log +0 -390
  541. data/spec/support/logs/output_spec_360075.log +0 -390
  542. data/spec/support/reports/052f88c73e2d0ecf331dbf5fce0cfe3c.crf +0 -0
  543. data/spec/support/reports/06eed3dec98f5cd5fc717190a004d7d6.crf +0 -0
  544. data/spec/support/reports/0902bd88bbd4df462910007a3b5018fc.crf +0 -0
  545. data/spec/support/reports/10b9864e81e3204d1ef4940ccb88b0ed.crf +0 -0
  546. data/spec/support/reports/16b575097e68156416a84a6854d3719c.crf +0 -0
  547. data/spec/support/reports/18be00bff4371738c7c7013b284b415b.crf +0 -0
  548. data/spec/support/reports/1ca39d410c2cf1f652eb8c320d6682bd.crf +0 -0
  549. data/spec/support/reports/266a09d73152ce2f3d2951f1dab133f3.crf +0 -0
  550. data/spec/support/reports/2929bee9c126b2695dc569b693fef574.crf +0 -0
  551. data/spec/support/reports/2abd611201d06f0428f1b060b32d4839.crf +0 -0
  552. data/spec/support/reports/2b9ce956f7060163d7a0b78603dc05ca.crf +0 -0
  553. data/spec/support/reports/3254163f76b8696fa1677d18eaefce1d.crf +0 -0
  554. data/spec/support/reports/3a9e6a4fa91c901550ff295cd21d69c9.crf +0 -0
  555. data/spec/support/reports/3de18cf956e18664f441f94ee05e4118.crf +0 -0
  556. data/spec/support/reports/46bcffd844008e71c7d90a76baf8597d.crf +0 -0
  557. data/spec/support/reports/482cf0d0c54cda32bedf59b2ea995c65.crf +0 -0
  558. data/spec/support/reports/502508e3a29966200350196c9859881c.crf +0 -0
  559. data/spec/support/reports/5f0d832d46338d69b4eab6ff91ba4c91.crf +0 -0
  560. data/spec/support/reports/620287cfdc373595385cf2471e1d4523.crf +0 -0
  561. data/spec/support/reports/65d71de3b0e8c9fe894f4ba36cb4cb7c.crf +0 -0
  562. data/spec/support/reports/6754d1f86dc5544abeca7f06314ebc56.crf +0 -0
  563. data/spec/support/reports/75fecdd5e006942292e02e6a223e7279.crf +0 -0
  564. data/spec/support/reports/77523cc87357aa220c33d5604f41b5a1.crf +0 -0
  565. data/spec/support/reports/7e137bd5f71e0f0fbb63b93b87dae160.crf +0 -0
  566. data/spec/support/reports/806a17c3a40cc22eafd77c97b64a2ed5.crf +0 -0
  567. data/spec/support/reports/81cca53163bbab5ccf4d5f0401d5adcd.crf +0 -0
  568. data/spec/support/reports/8745ae8fdeb76289765582e06a614940.crf +0 -0
  569. data/spec/support/reports/8c204ee129fe1bd6c5964d29ae5d03ae.crf +0 -0
  570. data/spec/support/reports/937b7b516a99c23b2a641bb7bf695bfa.crf +0 -0
  571. data/spec/support/reports/a6eb89ffb1c33e33e421c1ee393118a9.crf +0 -0
  572. data/spec/support/reports/aadbf2c1544b0e11174853fb4883a38c.crf +0 -0
  573. data/spec/support/reports/c360a2833f3e635e69036916010edeac.crf +0 -0
  574. data/spec/support/reports/cf0bfb4662a7cda85db324936aae93d3.crf +0 -0
  575. data/spec/support/reports/cfff9c06897e28b1d58977909738c78f.crf +0 -0
  576. data/spec/support/reports/e2d5321452359193a758f7d16879ddb3.crf +0 -0
  577. data/spec/support/reports/e58c9dfbf9edeca9adcf32af5940f16b.crf +0 -0
  578. data/spec/support/reports/e96abea937f1ed3f89bc2ec5397522db.crf +0 -0
  579. data/spec/support/reports/ec0bbd4424061d305963551a29d20e1a.crf +0 -0
  580. data/spec/support/reports/f16e679098a4411d4f1c123abcd6af90.crf +0 -0
  581. data/spec/support/reports/f2c155087c431851979048ceb8c46ec9.crf +0 -0
  582. data/spec/support/reports/f3d542036e17f6a66e11bfacb2fb3366.crf +0 -0
  583. data/spec/support/reports/f72c7feef5f9c7e29f423c3ebe345ef3.crf +0 -0
  584. data/spec/support/reports/fbb2e1c6ada2d4b8229488f4c4b3b418.crf +0 -0
  585. data/spec/support/snapshots/Cuboid 2022-02-01 13_10_28 +0200 a06d9bd7db81b1b4fb077ceadcc3895f.csf +0 -0
  586. data/spec/support/snapshots/Cuboid 2022-02-01 13_10_42 +0200 36c68859faf144eed9ff9c01ae754217.csf +0 -0
  587. data/spec/support/snapshots/Cuboid 2022-02-01 13_24_42 +0200 5a26112a913330ee8763b2982a4d42df.csf +0 -0
  588. data/spec/support/snapshots/Cuboid 2022-02-01 13_24_56 +0200 190856b98ac7099eb553ed3abcfbcb87.csf +0 -0
  589. data/spec/support/snapshots/Cuboid 2022-02-01 13_35_35 +0200 a9c5229b459119840c9428a3d7c54eb1.csf +0 -0
  590. data/spec/support/snapshots/Cuboid 2022-02-01 13_35_41 +0200 d93660f71110dad136cea0758b29611e.csf +0 -0
  591. data/spec/support/snapshots/Cuboid 2022-02-01 13_35_44 +0200 57083dafa382f1d21cc33c9740861c88.csf +0 -0
  592. data/spec/support/snapshots/Cuboid 2022-02-13 07_55_57 +0200 6ff9bf8c7b70ad85fa3a42f44bcc5fe7.csf +0 -0
  593. data/spec/support/snapshots/Cuboid 2022-02-13 07_56_12 +0200 2852d4e0ae04e5365301703913f7b763.csf +0 -0
  594. data/spec/support/snapshots/Cuboid 2022-02-13 08_06_41 +0200 54da34dab4b059de90cdd33d400ccac7.csf +0 -0
  595. data/spec/support/snapshots/Cuboid 2022-02-13 08_06_46 +0200 61596bdce9318446373d1b736ec7c9ce.csf +0 -0
  596. data/spec/support/snapshots/Cuboid 2022-02-13 08_06_48 +0200 19030be87532ece27c1d4791a8d3229c.csf +0 -0
  597. data/spec/support/snapshots/Cuboid 2022-02-20 03_01_57 +0200 b03ce79d889c60483385e77cda635831.csf +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d9533dcacaf536c3e740993765c95acabe9ef309f47d0ee9abb17057ba20248b
4
- data.tar.gz: 0557ca182b8e7589dca448e75062bb87444716b6d36a7fc257d678efa7090fd3
3
+ metadata.gz: 99e1706f1f0771fae638ae08cf525077625255e3cda4b3a7ff09e92c192692c4
4
+ data.tar.gz: 77f486938d14f0f579ccc1d4f3f20d0265c9d3590095912c667d28494f03013b
5
5
  SHA512:
6
- metadata.gz: 1ceff653d44e2821fc770c6913fdb9de57922bbfd809f20a043746c2c0f2ddb0103f1e77156bdb103047fd6770bf2968ad9183db5ad6bf623a73965848082be1
7
- data.tar.gz: 96d5ef822e102d7200068b192ef9f4817d0dba02e4d01a8f83c6aaafa7d2d997947d170bee8a344d61f7657ad29d54e0d7b30042c874e5f53dade035a20f15c2
6
+ metadata.gz: 5bc2b6bcb5d51deeccb44292aebacca47ded75cafeff7085bb9c645a108a58524b6c0a89bd11f521ec58a7e40cf3565d1d4a3c8dd5e9b8fe880a2226c2575209
7
+ data.tar.gz: 031e775efa26dfc56d51823e8b00687a7727a72c22c78ab76b126b8fb2b7dc94ae79ea595a23eba3b2c1a121b08237adb542d4ad1783f38d9ebe4933eb7bb2ea
data/CHANGELOG.md CHANGED
@@ -1,7 +1,10 @@
1
+ # 0.1.5
2
+
3
+ * Fixed relative path for application source path detection.
4
+
1
5
  # 0.1.3
2
6
 
3
- * `Agent`
4
- * `#preferred`, `#spawn`: Replaced `load_balance` override with `direct` `strategy`.
7
+ * Replaced `Arachni::RPC` and `Arachni::Reactor` with `Toq` and `Raktr`.
5
8
 
6
9
  # 0.1.0
7
10
 
data/README.md CHANGED
@@ -30,7 +30,7 @@ It offers:
30
30
  * Developer freedom.
31
31
  * Apart from keeping _Data_ and _State_ separate not many other rules to follow.
32
32
  * Only if interested in _suspensions_ and can also be left to the last minute
33
- if necessary -- in cases of Ractor enforced isolation for example.
33
+ if necessary -- in cases of `Ractor` enforced isolation for example.
34
34
 
35
35
  ## Entities
36
36
 
@@ -144,7 +144,7 @@ _**Application**_ itself.
144
144
 
145
145
  A simple RPC is employed, specs for 3rd party implementations can be found at:
146
146
 
147
- https://github.com/Arachni/arachni-rpc/wiki
147
+ https://github.com/toq/arachni-rpc/wiki
148
148
 
149
149
  Each _**Application**_ can extend upon this and expose an API via its _**Instance**_'s
150
150
  RPC interface.
@@ -162,13 +162,130 @@ service's interface.
162
162
 
163
163
  ## Examples
164
164
 
165
- See `examples/`.
166
-
167
165
  ### MyApp
168
166
 
169
167
  Tutorial application going over different APIs and **Cuboid** _**Application**_
170
168
  options and specification.
171
169
 
170
+ See `examples/my_app`.
171
+
172
+ ### Parallel code on same host
173
+
174
+ To run code in parallel on the same machine utilising multiple cores, with each
175
+ instance isolated to its own process, you can use something like the following:
176
+
177
+ ```ruby
178
+ require 'cuboid'
179
+
180
+ class Sleeper < Cuboid::Application
181
+
182
+ def run
183
+ sleep options['time']
184
+ end
185
+
186
+ end
187
+
188
+ return if $0 != __FILE__
189
+
190
+ sleepers = []
191
+ sleepers << Sleeper.spawn( :instance )
192
+ sleepers << Sleeper.spawn( :instance )
193
+ sleepers << Sleeper.spawn( :instance )
194
+
195
+ sleepers.each do |sleeper|
196
+ sleeper.run( time: 5 )
197
+ end
198
+
199
+ sleep 0.1 while sleepers.map(&:busy?).include?( true )
200
+ ```
201
+
202
+ time bundle exec ruby same_host.rb
203
+ [...]
204
+ real 0m6,506s
205
+ user 0m0,423s
206
+ sys 0m0,063s
207
+
208
+ ### Parallel code on different hosts
209
+
210
+ In this example we'll be using `Agents` to spawn instances from 3 different hosts.
211
+
212
+ #### Host 1
213
+
214
+ ```ruby
215
+ require 'cuboid'
216
+
217
+ class Sleeper < Cuboid::Application
218
+
219
+ def run
220
+ sleep options['time']
221
+ end
222
+
223
+ end
224
+
225
+ return if $0 != __FILE__
226
+
227
+ Sleeper.spawn( :agent, port: 7331 )
228
+ ```
229
+
230
+ bundle exec ruby multiple_hosts_1.rb
231
+
232
+ #### Host 2
233
+
234
+ ```ruby
235
+ require 'cuboid'
236
+
237
+ class Sleeper < Cuboid::Application
238
+
239
+ def run
240
+ sleep options['time']
241
+ end
242
+
243
+ end
244
+
245
+ return if $0 != __FILE__
246
+
247
+ Sleeper.spawn( :agent, port: 7332, peer: 'host1:7331' )
248
+ ```
249
+
250
+ bundle exec ruby multiple_hosts_2.rb
251
+
252
+ #### Host 3
253
+
254
+ ```ruby
255
+ require 'cuboid'
256
+
257
+ class Sleeper < Cuboid::Application
258
+
259
+ def run
260
+ sleep options['time']
261
+ end
262
+
263
+ end
264
+
265
+ return if $0 != __FILE__
266
+
267
+ grid_agent = Sleeper.spawn( :agent, port: 7333, peer: 'host1:7331' )
268
+
269
+ sleepers = []
270
+ 3.times do
271
+ connection_info = grid_agent.spawn
272
+ sleepers << Sleeper.connect( connection_info )
273
+ end
274
+
275
+ sleepers.each do |sleeper|
276
+ sleeper.run( time: 5 )
277
+ end
278
+
279
+ sleep 0.1 while sleepers.map(&:busy?).include?( true )
280
+ ```
281
+
282
+ time bundle exec ruby multiple_hosts_3.rb
283
+ real 0m7,318s
284
+ user 0m0,426s
285
+ sys 0m0,091s
286
+
287
+
288
+ _You can replace `host1` with `localhost` and run all examples on the same terminal._
172
289
 
173
290
  ## License
174
291
 
data/Rakefile CHANGED
@@ -55,3 +55,20 @@ task :clean do
55
55
  files.each { |file| puts " * #{file}" }
56
56
  FileUtils.rm files
57
57
  end
58
+
59
+ desc 'Build the gem.'
60
+ task build: [ :clean ] do
61
+ sh "gem build cuboid.gemspec"
62
+ end
63
+
64
+ desc 'Build and install the gem.'
65
+ task install: [ :build ] do
66
+ sh "gem install cuboid-#{Cuboid::VERSION}.gem"
67
+ end
68
+
69
+ desc 'Push a new version to Rubygems'
70
+ task publish: [ :build ] do
71
+ sh "git tag -a v#{Cuboid::VERSION} -m 'Version #{Cuboid::VERSION}'"
72
+ sh "gem push cuboid-#{Cuboid::VERSION}.gem"
73
+ end
74
+ task release: [ :publish ]
data/cuboid.gemspec CHANGED
@@ -53,7 +53,7 @@ Gem::Specification.new do |s|
53
53
  s.add_dependency 'sinatra-contrib', '2.1.0'
54
54
 
55
55
  # RPC client/server implementation.
56
- s.add_dependency 'arachni-rpc', '~> 0.2.1.4'
56
+ s.add_dependency 'toq', '~> 0.0.1'
57
57
 
58
58
  s.add_dependency 'vmstat', '2.3.0'
59
59
  s.add_dependency 'sys-proctable', '1.1.5'
@@ -174,7 +174,7 @@ class Application
174
174
  last_const = last_const.const_get( const_name.to_sym )
175
175
  end
176
176
 
177
- last_const.const_source_location( app.to_sym ).first
177
+ File.expand_path last_const.const_source_location( app.to_sym ).first
178
178
  end
179
179
 
180
180
  def spawn( type, options = {}, &block )
@@ -25,7 +25,7 @@ class Agents
25
25
  #
26
26
  # @return [RPC::Client::Agent]
27
27
  def connect( url, options = nil )
28
- Arachni::Reactor.global.run_in_thread if !Arachni::Reactor.global.running?
28
+ Raktr.global.run_in_thread if !Raktr.global.running?
29
29
 
30
30
  fresh = false
31
31
  if options
@@ -1,5 +1,5 @@
1
1
  require Options.paths.lib + 'rpc/server/agent'
2
2
 
3
- Arachni::Reactor.global.run do
3
+ Raktr.global.run do
4
4
  RPC::Server::Agent.new
5
5
  end
@@ -1,5 +1,5 @@
1
1
  require Options.paths.lib + 'rpc/server/scheduler'
2
2
 
3
- Arachni::Reactor.global.run do
3
+ Raktr.global.run do
4
4
  RPC::Server::Scheduler.new
5
5
  end
@@ -28,7 +28,7 @@ class Instances
28
28
  # @return [RPC::Client::Instance]
29
29
  #
30
30
  def connect( url, token = nil )
31
- Arachni::Reactor.global.run_in_thread if !Arachni::Reactor.global.running?
31
+ Raktr.global.run_in_thread if !Raktr.global.running?
32
32
 
33
33
  token ||= @list[url]
34
34
  @list[url] ||= token
@@ -1,5 +1,5 @@
1
1
  require 'singleton'
2
- require 'arachni/reactor'
2
+ require 'raktr'
3
3
 
4
4
  module Cuboid
5
5
  module Processes
@@ -156,7 +156,7 @@ class Manager
156
156
 
157
157
  # Stops the Reactor.
158
158
  def kill_reactor
159
- Arachni::Reactor.stop
159
+ Raktr.stop
160
160
  rescue
161
161
  nil
162
162
  end
@@ -23,7 +23,7 @@ class Schedulers
23
23
  #
24
24
  # @return [RPC::Client::Scheduler]
25
25
  def connect( url, options = nil )
26
- Arachni::Reactor.global.run_in_thread if !Arachni::Reactor.global.running?
26
+ Raktr.global.run_in_thread if !Raktr.global.running?
27
27
 
28
28
  fresh = false
29
29
  if options
@@ -16,14 +16,14 @@ class Agent
16
16
 
17
17
  def initialize( url, options = nil )
18
18
  @client = Base.new( url, nil, options )
19
- @node = Arachni::RPC::Proxy.new( @client, 'node' )
19
+ @node = Toq::Proxy.new( @client, 'node' )
20
20
 
21
21
  Cuboid::Application.application.agent_services.keys.each do |name|
22
22
  self.class.send( :attr_reader, name.to_sym )
23
23
 
24
24
  instance_variable_set(
25
25
  "@#{name}".to_sym,
26
- Arachni::RPC::Proxy.new( @client, name )
26
+ Toq::Proxy.new( @client, name )
27
27
  )
28
28
  end
29
29
  end
@@ -1,4 +1,4 @@
1
- require 'arachni/rpc'
1
+ require 'toq'
2
2
  require_relative '../serializer'
3
3
 
4
4
  module Cuboid
@@ -6,7 +6,7 @@ module RPC
6
6
  class Client
7
7
 
8
8
  # @author Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
9
- class Base < Arachni::RPC::Client
9
+ class Base < Toq::Client
10
10
  attr_reader :url
11
11
 
12
12
  # @param [String] url
@@ -5,7 +5,7 @@ class Client
5
5
  class Instance
6
6
 
7
7
  # @author Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
8
- class Proxy < Arachni::RPC::Proxy
8
+ class Proxy < Toq::Proxy
9
9
 
10
10
  def initialize( client )
11
11
  super client, 'instance'
@@ -24,10 +24,10 @@ class Instance
24
24
  )
25
25
 
26
26
  client = new( url, token, options )
27
- Arachni::Reactor.global.delay( 0.1 ) do |task|
27
+ Raktr.global.delay( 0.1 ) do |task|
28
28
  client.alive? do |r|
29
29
  if r.rpc_exception?
30
- Arachni::Reactor.global.delay( 0.1, &task )
30
+ Raktr.global.delay( 0.1, &task )
31
31
  next
32
32
  end
33
33
 
@@ -45,7 +45,7 @@ class Instance
45
45
  @client = Base.new( url, token, options )
46
46
 
47
47
  @instance = Proxy.new( @client )
48
- @options = Arachni::RPC::Proxy.new( @client, 'options' )
48
+ @options = Toq::Proxy.new( @client, 'options' )
49
49
 
50
50
  # map Agent handlers
51
51
  Cuboid::Application.application.instance_services.keys.each do |name|
@@ -53,7 +53,7 @@ class Instance
53
53
 
54
54
  instance_variable_set(
55
55
  "@#{name}".to_sym,
56
- Arachni::RPC::Proxy.new( @client, name )
56
+ Toq::Proxy.new( @client, name )
57
57
  )
58
58
  end
59
59
  end
@@ -65,7 +65,7 @@ class Server::Agent::Node
65
65
 
66
66
  log_updated_peers
67
67
 
68
- Arachni::Reactor.global.at_interval( @options.agent.ping_interval ) do
68
+ Raktr.global.at_interval( @options.agent.ping_interval ) do
69
69
  ping
70
70
  check_for_comebacks
71
71
  end
@@ -152,7 +152,7 @@ class Server::Agent::Node
152
152
  block.call( @nodes_info_cache )
153
153
  end
154
154
 
155
- Arachni::Reactor.global.create_iterator( peers ).map( each, after )
155
+ Raktr.global.create_iterator( peers ).map( each, after )
156
156
  else
157
157
  block.call( @nodes_info_cache )
158
158
  end
@@ -45,7 +45,7 @@ class Server::Agent::Service
45
45
  # Performs an asynchronous map operation over all running instances.
46
46
  #
47
47
  # @param [Proc] each
48
- # Block to be passed {Client::Instance} and `Arachni::Reactor::Iterator`.
48
+ # Block to be passed {Client::Instance} and `Raktr::Iterator`.
49
49
  # @param [Proc] after
50
50
  # Block to be passed the Array of results.
51
51
  def map_instances( each, after )
@@ -58,7 +58,7 @@ class Server::Agent::Service
58
58
  # Performs an asynchronous iteration over all running instances.
59
59
  #
60
60
  # @param [Proc] block
61
- # Block to be passed {Client::Instance} and `Arachni::Reactor::Iterator`.
61
+ # Block to be passed {Client::Instance} and `Raktr::Iterator`.
62
62
  def each_instance( &block )
63
63
  wrap = proc do |instance, iterator|
64
64
  block.call( connect_to_instance( instance ), iterator )
@@ -88,15 +88,15 @@ class Server::Agent::Service
88
88
  #
89
89
  # @param [Block] block
90
90
  def run_asap( &block )
91
- Arachni::Reactor.global.next_tick( &block )
91
+ Raktr.global.next_tick( &block )
92
92
  end
93
93
 
94
94
  # @param [Array] list
95
95
  #
96
- # @return [Arachni::Reactor::Iterator]
96
+ # @return [Raktr::Iterator]
97
97
  # Iterator for the provided array.
98
98
  def iterator_for( list, max_concurrency = 10 )
99
- Arachni::Reactor.global.create_iterator( list, max_concurrency )
99
+ Raktr.global.create_iterator( list, max_concurrency )
100
100
  end
101
101
 
102
102
  # @return [Array<Hash>]
@@ -137,7 +137,7 @@ class Agent
137
137
  block.call nodes.sort_by { |_, score| adjust_score_by_strategy.call score }[0][0]
138
138
  end
139
139
 
140
- Arachni::Reactor.global.create_iterator( @node.peers ).map( each, after )
140
+ Raktr.global.create_iterator( @node.peers ).map( each, after )
141
141
  end
142
142
 
143
143
  # Spawns an {Instance}.
@@ -294,7 +294,7 @@ class Agent
294
294
 
295
295
  # Starts the agent's server
296
296
  def run
297
- Arachni::Reactor.global.on_error do |_, e|
297
+ Raktr.global.on_error do |_, e|
298
298
  print_error "Reactor: #{e}"
299
299
 
300
300
  e.backtrace.each do |l|
@@ -314,7 +314,7 @@ class Agent
314
314
  def shutdown
315
315
  Thread.new do
316
316
  print_status 'Shutting down...'
317
- Arachni::Reactor.global.stop
317
+ Raktr.global.stop
318
318
  end
319
319
  end
320
320
 
@@ -1,5 +1,5 @@
1
1
  require 'ostruct'
2
- require 'arachni/rpc'
2
+ require 'toq'
3
3
  require_relative '../serializer'
4
4
 
5
5
  module Cuboid
@@ -10,7 +10,7 @@ class Server
10
10
  #
11
11
  # @private
12
12
  # @author Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
13
- class Base < Arachni::RPC::Server
13
+ class Base < Toq::Server
14
14
 
15
15
  # @param [Hash] options
16
16
  # @option options [Integer] :host
@@ -74,7 +74,7 @@ class Instance
74
74
  trap( signal ){ shutdown if !@options.datastore.do_not_trap }
75
75
  end
76
76
 
77
- Arachni::Reactor.global.run do
77
+ Raktr.global.run do
78
78
  _run
79
79
  end
80
80
  end
@@ -123,7 +123,7 @@ class Instance
123
123
  #
124
124
  # @see #report
125
125
  def abort_and_generate_report
126
- @application.clean_up
126
+ @application.abort!
127
127
  generate_report
128
128
  end
129
129
 
@@ -294,7 +294,7 @@ class Instance
294
294
 
295
295
  # Starts RPC service.
296
296
  def _run
297
- Arachni::Reactor.global.on_error do |_, e|
297
+ Raktr.global.on_error do |_, e|
298
298
  print_error "Reactor: #{e}"
299
299
 
300
300
  e.backtrace.each do |l|
@@ -399,7 +399,7 @@ class Scheduler
399
399
  end
400
400
 
401
401
  def reactor
402
- Arachni::Reactor.global
402
+ Raktr.global
403
403
  end
404
404
 
405
405
  def trap_interrupts( &block )
data/lib/version CHANGED
@@ -1 +1 @@
1
- 0.1.3
1
+ 0.1.6
@@ -204,7 +204,7 @@ describe Cuboid::Rest::Server do
204
204
  post url, invalid: 'blah'
205
205
 
206
206
  expect(response_code).to eq 500
207
- expect(response_data['error']).to eq 'Arachni::RPC::Exceptions::RemoteException'
207
+ expect(response_data['error']).to eq 'Toq::Exceptions::RemoteException'
208
208
  expect(response_data).to include 'backtrace'
209
209
  end
210
210
 
@@ -658,7 +658,7 @@ describe Cuboid::Rest::Server do
658
658
  it 'returns 500' do
659
659
  put url, 'localhost:383838'
660
660
  expect(response_code).to eq 500
661
- expect(response_data['error']).to eq 'Arachni::RPC::Exceptions::ConnectionError'
661
+ expect(response_data['error']).to eq 'Toq::Exceptions::ConnectionError'
662
662
  end
663
663
  end
664
664
  end
@@ -821,7 +821,7 @@ describe Cuboid::Rest::Server do
821
821
  post url, invalid: 'blah'
822
822
 
823
823
  expect(response_code).to eq 500
824
- expect(response_data['error']).to eq 'Arachni::RPC::Exceptions::RemoteException'
824
+ expect(response_data['error']).to eq 'Toq::Exceptions::RemoteException'
825
825
  expect(response_data).to include 'backtrace'
826
826
  end
827
827
  end
@@ -876,7 +876,7 @@ describe Cuboid::Rest::Server do
876
876
  put url, 'localhost:393939'
877
877
 
878
878
  expect(response_code).to eq 500
879
- expect(response_data['error']).to eq 'Arachni::RPC::Exceptions::ConnectionError'
879
+ expect(response_data['error']).to eq 'Toq::Exceptions::ConnectionError'
880
880
  expect(response_data['description']).to include 'Connection closed'
881
881
  end
882
882
  end
@@ -992,7 +992,7 @@ describe Cuboid::Rest::Server do
992
992
 
993
993
  get url
994
994
  expect(response_data.size).to be 1
995
- expect(response_data[@id]['error']).to eq 'Arachni::RPC::Exceptions::ConnectionError'
995
+ expect(response_data[@id]['error']).to eq 'Toq::Exceptions::ConnectionError'
996
996
  expect(response_data[@id]['description']).to include 'Connection closed [Connection refused - connect(2) for'
997
997
  end
998
998
  end
@@ -18,7 +18,7 @@ class Server
18
18
  end
19
19
 
20
20
  def start
21
- Arachni::Reactor.global.run_in_thread if !Arachni::Reactor.global.running?
21
+ Raktr.global.run_in_thread if !Raktr.global.running?
22
22
  @server.start
23
23
  sleep( 0.1 ) while !@server.ready?
24
24
  end
@@ -86,7 +86,7 @@ describe Cuboid::RPC::Client::Base do
86
86
  begin
87
87
  client = described_class.new( server.url, nil, client_ssl_options )
88
88
  client.call( "foo.bar" )
89
- rescue Arachni::RPC::Exceptions::ConnectionError
89
+ rescue Toq::Exceptions::ConnectionError
90
90
  raised = true
91
91
  end
92
92
 
@@ -102,7 +102,7 @@ describe Cuboid::RPC::Client::Base do
102
102
  begin
103
103
  client = described_class.new( server.url, nil, empty_options )
104
104
  client.call( "foo.bar" )
105
- rescue Arachni::RPC::Exceptions::ConnectionError
105
+ rescue Toq::Exceptions::ConnectionError
106
106
  raised = true
107
107
  end
108
108
 
@@ -137,7 +137,7 @@ describe Cuboid::RPC::Client::Base do
137
137
  begin
138
138
  client = described_class.new( server.url, nil, empty_options )
139
139
  client.call( "foo.bar" )
140
- rescue Arachni::RPC::Exceptions::InvalidToken
140
+ rescue Toq::Exceptions::InvalidToken
141
141
  raised = true
142
142
  end
143
143
 
@@ -16,7 +16,7 @@ describe Cuboid::RPC::Client::Instance do
16
16
  it 'should fail to connect' do
17
17
  expect do
18
18
  described_class.new( subject.url, 'blah' ).alive?
19
- end.to raise_error Arachni::RPC::Exceptions::InvalidToken
19
+ end.to raise_error Toq::Exceptions::InvalidToken
20
20
  end
21
21
  end
22
22
  end
@@ -11,11 +11,11 @@ describe Cuboid::RPC::Server::Agent::Node do
11
11
  c = Cuboid::RPC::Client::Base.new(
12
12
  "#{Cuboid::Options.rpc.server_address}:#{port}"
13
13
  )
14
- c = Arachni::RPC::Proxy.new( c, 'node' )
14
+ c = Toq::Proxy.new( c, 'node' )
15
15
 
16
16
  begin
17
17
  c.alive?
18
- rescue Arachni::RPC::Exceptions::ConnectionError
18
+ rescue Toq::Exceptions::ConnectionError
19
19
  sleep 0.1
20
20
  retry
21
21
  end
@@ -3,7 +3,7 @@ require "#{Cuboid::Options.paths.lib}/rpc/server/base"
3
3
 
4
4
  describe Cuboid::RPC::Server::Base do
5
5
  before( :each ) do
6
- Arachni::Reactor.global.run_in_thread
6
+ Raktr.global.run_in_thread
7
7
  end
8
8
 
9
9
  let(:subject) { Cuboid::RPC::Server::Base.new(
@@ -11,7 +11,7 @@ describe Cuboid::RPC::Server::Base do
11
11
  ) }
12
12
  let(:port) { available_port }
13
13
 
14
- it 'supports UNIX sockets', if: Arachni::Reactor.supports_unix_sockets? do
14
+ it 'supports UNIX sockets', if: Raktr.supports_unix_sockets? do
15
15
  server = Cuboid::RPC::Server::Base.new(
16
16
  socket: "#{Dir.tmpdir}/cuboid-base-#{Cuboid::Utilities.generate_token}"
17
17
  )
@@ -5,7 +5,7 @@ require "#{fixtures_path}/mock_app"
5
5
  describe 'Cuboid::RPC::Server::Instance' do
6
6
  let(:subject) { instance_spawn( application: "#{fixtures_path}/mock_app.rb" ) }
7
7
 
8
- it 'supports UNIX sockets', if: Arachni::Reactor.supports_unix_sockets? do
8
+ it 'supports UNIX sockets', if: Raktr.supports_unix_sockets? do
9
9
  socket = "#{Dir.tmpdir}/cuboid-instance-#{Cuboid::Utilities.generate_token}"
10
10
  subject = instance_spawn(
11
11
  socket: socket,
@@ -203,7 +203,9 @@ describe 'Cuboid::RPC::Server::Instance' do
203
203
  end
204
204
 
205
205
  describe '#abort_and_generate_report' do
206
- it "cleans-up and returns the report as #{Cuboid::Report}" do
206
+ it "cleans-up and returns the report as #{Cuboid::Report}", focus: true do
207
+ subject.run
208
+
207
209
  expect(subject.abort_and_generate_report).to be_kind_of Cuboid::Report
208
210
  end
209
211
  end
@@ -239,7 +241,7 @@ describe 'Cuboid::RPC::Server::Instance' do
239
241
  it 'raises ArgumentError' do
240
242
  expect do
241
243
  subject.run invalid: :stuff
242
- end.to raise_error Arachni::RPC::Exceptions::RemoteException
244
+ end.to raise_error Toq::Exceptions::RemoteException
243
245
  end
244
246
  end
245
247
 
@@ -300,7 +302,7 @@ describe 'Cuboid::RPC::Server::Instance' do
300
302
  expect(subject.shutdown).to be_truthy
301
303
  sleep 4
302
304
 
303
- expect { subject.alive? }.to raise_error Arachni::RPC::Exceptions::ConnectionError
305
+ expect { subject.alive? }.to raise_error Toq::Exceptions::ConnectionError
304
306
  end
305
307
  end
306
308