cuboid 0.1.2 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/README.md +121 -4
- data/Rakefile +17 -0
- data/cuboid.gemspec +1 -1
- data/lib/cuboid/application.rb +1 -1
- data/lib/cuboid/option_groups/agent.rb +1 -1
- data/lib/cuboid/processes/agents.rb +1 -1
- data/lib/cuboid/processes/executables/agent.rb +1 -1
- data/lib/cuboid/processes/executables/scheduler.rb +1 -1
- data/lib/cuboid/processes/instances.rb +1 -1
- data/lib/cuboid/processes/manager.rb +2 -2
- data/lib/cuboid/processes/schedulers.rb +1 -1
- data/lib/cuboid/rpc/client/agent.rb +2 -2
- data/lib/cuboid/rpc/client/base.rb +2 -2
- data/lib/cuboid/rpc/client/instance/service.rb +1 -1
- data/lib/cuboid/rpc/client/instance.rb +4 -4
- data/lib/cuboid/rpc/server/agent/node.rb +2 -2
- data/lib/cuboid/rpc/server/agent/service.rb +5 -5
- data/lib/cuboid/rpc/server/agent.rb +18 -12
- data/lib/cuboid/rpc/server/base.rb +2 -2
- data/lib/cuboid/rpc/server/instance.rb +2 -2
- data/lib/cuboid/rpc/server/scheduler.rb +1 -1
- data/lib/version +1 -1
- data/spec/cuboid/rest/server_spec.rb +5 -5
- data/spec/cuboid/rpc/client/base_spec.rb +4 -4
- data/spec/cuboid/rpc/client/instance_spec.rb +1 -1
- data/spec/cuboid/rpc/server/{dispatcher → agent}/node_spec.rb +2 -2
- data/spec/cuboid/rpc/server/{dispatcher → agent}/service_spec.rb +0 -0
- data/spec/cuboid/rpc/server/{dispatcher_spec.rb → agent_spec.rb} +34 -26
- data/spec/cuboid/rpc/server/base_spec.rb +2 -2
- data/spec/cuboid/rpc/server/instance_spec.rb +3 -3
- data/spec/cuboid/rpc/server/scheduler_spec.rb +3 -3
- data/spec/support/fixtures/executables/node.rb +3 -3
- data/spec/support/fixtures/mock_app/test_service.rb +1 -1
- data/spec/support/fixtures/services/echo.rb +1 -1
- data/spec/support/lib/web_server_client.rb +4 -4
- data/spec/support/lib/web_server_dispatcher.rb +1 -1
- metadata +78 -870
- data/spec/support/logs/Agent - 2486896-44236.log +0 -6
- data/spec/support/logs/Agent - 2487229-16390.log +0 -6
- data/spec/support/logs/Agent - 2487520-2511.log +0 -6
- data/spec/support/logs/Agent - 2487522-24008.log +0 -6
- data/spec/support/logs/Agent - 2487526-3383.log +0 -6
- data/spec/support/logs/Agent - 2487528-23713.log +0 -10
- data/spec/support/logs/Agent - 2487530-42875.log +0 -10
- data/spec/support/logs/Agent - 2487533-14182.log +0 -10
- data/spec/support/logs/Agent - 2487535-32486.log +0 -10
- data/spec/support/logs/Agent - 2487537-30578.log +0 -10
- data/spec/support/logs/Agent - 2487539-65402.log +0 -10
- data/spec/support/logs/Agent - 2493974-23066.log +0 -6
- data/spec/support/logs/Agent - 2501716-11729.log +0 -6
- data/spec/support/logs/Agent - 2501724-48638.log +0 -6
- data/spec/support/logs/Agent - 2501961-60077.log +0 -6
- data/spec/support/logs/Agent - 2501976-10941.log +0 -10
- data/spec/support/logs/Agent - 2502050-45312.log +0 -10
- data/spec/support/logs/Agent - 2502131-45940.log +0 -10
- data/spec/support/logs/Agent - 2502139-59848.log +0 -10
- data/spec/support/logs/Agent - 2502262-46629.log +0 -10
- data/spec/support/logs/Agent - 2502298-28395.log +0 -10
- data/spec/support/logs/Agent - 2512541-7108.log +0 -6
- data/spec/support/logs/Agent - 2515458-5532.log +0 -6
- data/spec/support/logs/Agent - 2515540-20379.log +0 -6
- data/spec/support/logs/Agent - 2515550-19661.log +0 -6
- data/spec/support/logs/Agent - 2515626-34192.log +0 -6
- data/spec/support/logs/Agent - 2515879-19884.log +0 -6
- data/spec/support/logs/Agent - 2516130-27514.log +0 -6
- data/spec/support/logs/Agent - 2516382-45886.log +0 -6
- data/spec/support/logs/Agent - 2516390-47909.log +0 -6
- data/spec/support/logs/Agent - 2516465-47774.log +0 -6
- data/spec/support/logs/Agent - 2516546-13955.log +0 -6
- data/spec/support/logs/Agent - 2516561-39041.log +0 -6
- data/spec/support/logs/Agent - 2516710-46722.log +0 -6
- data/spec/support/logs/Agent - 2516795-56481.log +0 -16
- data/spec/support/logs/Agent - 2516798-22121.log +0 -14
- data/spec/support/logs/Agent - 2516881-21755.log +0 -10
- data/spec/support/logs/Agent - 2517053-64944.log +0 -16
- data/spec/support/logs/Agent - 2517056-63364.log +0 -14
- data/spec/support/logs/Agent - 2517066-34797.log +0 -10
- data/spec/support/logs/Agent - 2518042-30794.log +0 -16
- data/spec/support/logs/Agent - 2518050-6895.log +0 -14
- data/spec/support/logs/Agent - 2518212-36288.log +0 -10
- data/spec/support/logs/Agent - 2518395-10976.log +0 -16
- data/spec/support/logs/Agent - 2518472-35419.log +0 -14
- data/spec/support/logs/Agent - 2518559-43119.log +0 -10
- data/spec/support/logs/Agent - 2518647-48415.log +0 -16
- data/spec/support/logs/Agent - 2518652-65099.log +0 -14
- data/spec/support/logs/Agent - 2518658-1449.log +0 -10
- data/spec/support/logs/Agent - 2518910-62083.log +0 -6
- data/spec/support/logs/Agent - 2518919-58035.log +0 -6
- data/spec/support/logs/Agent - 2519076-57825.log +0 -6
- data/spec/support/logs/Agent - 2519156-8413.log +0 -6
- data/spec/support/logs/Agent - 2519406-64200.log +0 -6
- data/spec/support/logs/Agent - 2519570-20841.log +0 -6
- data/spec/support/logs/Agent - 2519659-30136.log +0 -6
- data/spec/support/logs/Agent - 2519822-9736.log +0 -6
- data/spec/support/logs/Agent - 2520004-59217.log +0 -6
- data/spec/support/logs/Agent - 2520357-37723.log +0 -6
- data/spec/support/logs/Agent - 2521294-5229.log +0 -16
- data/spec/support/logs/Agent - 2521407-31515.log +0 -14
- data/spec/support/logs/Agent - 2521415-7501.log +0 -10
- data/spec/support/logs/Agent - 2521452-22168.log +0 -16
- data/spec/support/logs/Agent - 2521463-24492.log +0 -14
- data/spec/support/logs/Agent - 2521482-50808.log +0 -10
- data/spec/support/logs/Agent - 2521503-8180.log +0 -16
- data/spec/support/logs/Agent - 2521562-3183.log +0 -14
- data/spec/support/logs/Agent - 2521639-45199.log +0 -10
- data/spec/support/logs/Agent - 2521891-30967.log +0 -10
- data/spec/support/logs/Agent - 2521897-64837.log +0 -10
- data/spec/support/logs/Agent - 2521916-23526.log +0 -16
- data/spec/support/logs/Agent - 2521936-26313.log +0 -14
- data/spec/support/logs/Agent - 2521948-18199.log +0 -10
- data/spec/support/logs/Agent - 2522026-18512.log +0 -6
- data/spec/support/logs/Agent - 2522093-46502.log +0 -6
- data/spec/support/logs/Agent - 2522195-30981.log +0 -6
- data/spec/support/logs/Agent - 2522295-55509.log +0 -6
- data/spec/support/logs/Agent - 2522347-32811.log +0 -6
- data/spec/support/logs/Agent - 2523391-7522.log +0 -6
- data/spec/support/logs/Agent - 2523626-61560.log +0 -6
- data/spec/support/logs/Agent - 2523913-25677.log +0 -6
- data/spec/support/logs/Agent - 2539887-21281.log +0 -6
- data/spec/support/logs/Agent - 2540261-61895.log +0 -6
- data/spec/support/logs/Agent - 2540430-8261.log +0 -6
- data/spec/support/logs/Agent - 361913-24577.log +0 -6
- data/spec/support/logs/Agent - 362152-33398.log +0 -6
- data/spec/support/logs/Agent - 362154-44243.log +0 -6
- data/spec/support/logs/Agent - 362157-6034.log +0 -6
- data/spec/support/logs/Agent - 362160-38548.log +0 -10
- data/spec/support/logs/Agent - 362162-59176.log +0 -10
- data/spec/support/logs/Agent - 362165-23738.log +0 -10
- data/spec/support/logs/Agent - 362167-57542.log +0 -10
- data/spec/support/logs/Agent - 362172-22798.log +0 -10
- data/spec/support/logs/Agent - 362174-46609.log +0 -10
- data/spec/support/logs/Agent - 362398-10913.log +0 -6
- data/spec/support/logs/Agent - 362524-29482.log +0 -6
- data/spec/support/logs/Agent - 362527-40674.log +0 -6
- data/spec/support/logs/Agent - 362530-32963.log +0 -6
- data/spec/support/logs/Agent - 362532-22236.log +0 -6
- data/spec/support/logs/Agent - 362543-4429.log +0 -6
- data/spec/support/logs/Agent - 362557-7510.log +0 -6
- data/spec/support/logs/Agent - 362574-36915.log +0 -6
- data/spec/support/logs/Agent - 362576-65372.log +0 -6
- data/spec/support/logs/Agent - 362578-29183.log +0 -6
- data/spec/support/logs/Agent - 362580-43546.log +0 -6
- data/spec/support/logs/Agent - 362582-22254.log +0 -6
- data/spec/support/logs/Agent - 362588-40009.log +0 -6
- data/spec/support/logs/Agent - 362591-60596.log +0 -16
- data/spec/support/logs/Agent - 362595-3584.log +0 -14
- data/spec/support/logs/Agent - 362599-39016.log +0 -10
- data/spec/support/logs/Agent - 362606-58506.log +0 -16
- data/spec/support/logs/Agent - 362609-55768.log +0 -14
- data/spec/support/logs/Agent - 362613-37168.log +0 -10
- data/spec/support/logs/Agent - 362629-16593.log +0 -16
- data/spec/support/logs/Agent - 362632-32264.log +0 -14
- data/spec/support/logs/Agent - 362643-59807.log +0 -10
- data/spec/support/logs/Agent - 362649-40552.log +0 -16
- data/spec/support/logs/Agent - 362655-31144.log +0 -14
- data/spec/support/logs/Agent - 362660-12968.log +0 -10
- data/spec/support/logs/Agent - 362666-6806.log +0 -16
- data/spec/support/logs/Agent - 362673-39178.log +0 -14
- data/spec/support/logs/Agent - 362678-36132.log +0 -10
- data/spec/support/logs/Agent - 362685-62240.log +0 -6
- data/spec/support/logs/Agent - 362687-4068.log +0 -6
- data/spec/support/logs/Agent - 362691-48465.log +0 -6
- data/spec/support/logs/Agent - 362693-40295.log +0 -6
- data/spec/support/logs/Agent - 362705-53855.log +0 -6
- data/spec/support/logs/Agent - 362712-14029.log +0 -6
- data/spec/support/logs/Agent - 362717-3069.log +0 -6
- data/spec/support/logs/Agent - 362721-38670.log +0 -6
- data/spec/support/logs/Agent - 362725-7644.log +0 -6
- data/spec/support/logs/Agent - 362748-18302.log +0 -6
- data/spec/support/logs/Agent - 362828-61766.log +0 -16
- data/spec/support/logs/Agent - 362838-44693.log +0 -14
- data/spec/support/logs/Agent - 362847-31875.log +0 -10
- data/spec/support/logs/Agent - 362875-46401.log +0 -16
- data/spec/support/logs/Agent - 362887-52041.log +0 -14
- data/spec/support/logs/Agent - 362897-7426.log +0 -10
- data/spec/support/logs/Agent - 362908-35228.log +0 -16
- data/spec/support/logs/Agent - 362919-33764.log +0 -14
- data/spec/support/logs/Agent - 362928-1323.log +0 -10
- data/spec/support/logs/Agent - 362960-27883.log +0 -10
- data/spec/support/logs/Agent - 362964-26072.log +0 -10
- data/spec/support/logs/Agent - 362966-42927.log +0 -16
- data/spec/support/logs/Agent - 362972-56025.log +0 -14
- data/spec/support/logs/Agent - 362977-39574.log +0 -10
- data/spec/support/logs/Agent - 362992-23510.log +0 -6
- data/spec/support/logs/Agent - 362996-63445.log +0 -6
- data/spec/support/logs/Agent - 363017-59359.log +0 -6
- data/spec/support/logs/Agent - 363034-15682.log +0 -6
- data/spec/support/logs/Agent - 363050-6605.log +0 -6
- data/spec/support/logs/Agent - 363065-59445.log +0 -6
- data/spec/support/logs/Agent - 363070-6619.log +0 -6
- data/spec/support/logs/Agent - 363077-18731.log +0 -6
- data/spec/support/logs/Agent - 363250-16962.log +0 -6
- data/spec/support/logs/Agent - 363265-25598.log +0 -6
- data/spec/support/logs/Agent - 363279-50603.log +0 -6
- data/spec/support/logs/Instance - 2525557-35695.error.log +0 -105
- data/spec/support/logs/Instance - 2525638-6350.error.log +0 -105
- data/spec/support/logs/Instance - 2525719-30351.error.log +0 -105
- data/spec/support/logs/Instance - 363119-21886.error.log +0 -105
- data/spec/support/logs/Instance - 363121-31774.error.log +0 -105
- data/spec/support/logs/Instance - 363127-52671.error.log +0 -105
- data/spec/support/logs/Scheduler - 2486608-59709.log +0 -3
- data/spec/support/logs/Scheduler - 2486612-44110.log +0 -27
- data/spec/support/logs/Scheduler - 2486723-50393.log +0 -3
- data/spec/support/logs/Scheduler - 2486727-21620.log +0 -27
- data/spec/support/logs/Scheduler - 2486877-37845.log +0 -3
- data/spec/support/logs/Scheduler - 2486881-3624.log +0 -1
- data/spec/support/logs/Scheduler - 2486911-24752.log +0 -3
- data/spec/support/logs/Scheduler - 2486919-48535.log +0 -27
- data/spec/support/logs/Scheduler - 2486985-8897.log +0 -1
- data/spec/support/logs/Scheduler - 2487211-7516.log +0 -3
- data/spec/support/logs/Scheduler - 2487215-2831.log +0 -1
- data/spec/support/logs/Scheduler - 2487246-7826.log +0 -3
- data/spec/support/logs/Scheduler - 2487256-35669.log +0 -6
- data/spec/support/logs/Scheduler - 2487272-11542.log +0 -4
- data/spec/support/logs/Scheduler - 2487278-9621.log +0 -1
- data/spec/support/logs/Scheduler - 2487291-24094.log +0 -3
- data/spec/support/logs/Scheduler - 2487299-60095.log +0 -6
- data/spec/support/logs/Scheduler - 2487368-7706.log +0 -3
- data/spec/support/logs/Scheduler - 2487378-9859.log +0 -6
- data/spec/support/logs/Scheduler - 2487396-17812.log +0 -3
- data/spec/support/logs/Scheduler - 2487407-25543.log +0 -6
- data/spec/support/logs/Scheduler - 2487451-44767.log +0 -4
- data/spec/support/logs/Scheduler - 2487506-1422.log +0 -6
- data/spec/support/logs/Scheduler - 2487541-38068.log +0 -1
- data/spec/support/logs/Scheduler - 2487544-21866.log +0 -1
- data/spec/support/logs/Scheduler - 2487548-15245.log +0 -1
- data/spec/support/logs/Scheduler - 2487551-34905.log +0 -1
- data/spec/support/logs/Scheduler - 2487554-22142.log +0 -1
- data/spec/support/logs/Scheduler - 2487562-35113.log +0 -1
- data/spec/support/logs/Scheduler - 2487565-55125.log +0 -3
- data/spec/support/logs/Scheduler - 2487569-48845.log +0 -6
- data/spec/support/logs/Scheduler - 2487576-57192.log +0 -4
- data/spec/support/logs/Scheduler - 2487583-17991.log +0 -1
- data/spec/support/logs/Scheduler - 2487586-30014.log +0 -1
- data/spec/support/logs/Scheduler - 2487591-6472.log +0 -1
- data/spec/support/logs/Scheduler - 2487594-2195.log +0 -1
- data/spec/support/logs/Scheduler - 2487598-55808.log +0 -3
- data/spec/support/logs/Scheduler - 2487605-7400.log +0 -1
- data/spec/support/logs/Scheduler - 2487607-4337.log +0 -1
- data/spec/support/logs/Scheduler - 2487610-25835.log +0 -1
- data/spec/support/logs/Scheduler - 2493623-45209.log +0 -3
- data/spec/support/logs/Scheduler - 2493714-59407.log +0 -1
- data/spec/support/logs/Scheduler - 2494470-61696.log +0 -3
- data/spec/support/logs/Scheduler - 2494723-2810.log +0 -6
- data/spec/support/logs/Scheduler - 2495458-22112.log +0 -4
- data/spec/support/logs/Scheduler - 2496034-4076.log +0 -1
- data/spec/support/logs/Scheduler - 2496119-62253.log +0 -3
- data/spec/support/logs/Scheduler - 2496210-50380.log +0 -6
- data/spec/support/logs/Scheduler - 2497536-24922.log +0 -3
- data/spec/support/logs/Scheduler - 2497786-13515.log +0 -6
- data/spec/support/logs/Scheduler - 2498774-16911.log +0 -3
- data/spec/support/logs/Scheduler - 2498961-4742.log +0 -6
- data/spec/support/logs/Scheduler - 2500340-16045.log +0 -4
- data/spec/support/logs/Scheduler - 2500980-26158.log +0 -6
- data/spec/support/logs/Scheduler - 2502381-26435.log +0 -1
- data/spec/support/logs/Scheduler - 2502463-62965.log +0 -1
- data/spec/support/logs/Scheduler - 2502547-53434.log +0 -1
- data/spec/support/logs/Scheduler - 2502628-43720.log +0 -1
- data/spec/support/logs/Scheduler - 2502643-58379.log +0 -1
- data/spec/support/logs/Scheduler - 2502873-64893.log +0 -1
- data/spec/support/logs/Scheduler - 2502954-43885.log +0 -3
- data/spec/support/logs/Scheduler - 2503039-52147.log +0 -6
- data/spec/support/logs/Scheduler - 2503768-28831.log +0 -4
- data/spec/support/logs/Scheduler - 2504259-24533.log +0 -1
- data/spec/support/logs/Scheduler - 2504343-56967.log +0 -1
- data/spec/support/logs/Scheduler - 2504502-25085.log +0 -1
- data/spec/support/logs/Scheduler - 2504587-30789.log +0 -1
- data/spec/support/logs/Scheduler - 2504608-56601.log +0 -3
- data/spec/support/logs/Scheduler - 2504760-36374.log +0 -1
- data/spec/support/logs/Scheduler - 2504841-49675.log +0 -1
- data/spec/support/logs/Scheduler - 2504923-15781.log +0 -1
- data/spec/support/logs/Scheduler - 2532871-63847.log +0 -16
- data/spec/support/logs/Scheduler - 2538978-63705.log +0 -6
- data/spec/support/logs/Scheduler - 2539461-10579.log +0 -6
- data/spec/support/logs/Scheduler - 2539981-44320.log +0 -6
- data/spec/support/logs/Scheduler - 2540265-37672.log +0 -3
- data/spec/support/logs/Scheduler - 2540434-9490.log +0 -5
- data/spec/support/logs/Scheduler - 2540840-9957.log +0 -1
- data/spec/support/logs/Scheduler - 2540921-2437.log +0 -1
- data/spec/support/logs/Scheduler - 2540937-17100.log +0 -1
- data/spec/support/logs/Scheduler - 2541007-52812.log +0 -1
- data/spec/support/logs/Scheduler - 2541091-56468.log +0 -1
- data/spec/support/logs/Scheduler - 2541109-54138.log +0 -1
- data/spec/support/logs/Scheduler - 2541188-17918.log +0 -1
- data/spec/support/logs/Scheduler - 2541267-2563.log +0 -3
- data/spec/support/logs/Scheduler - 2541430-40675.log +0 -3
- data/spec/support/logs/Scheduler - 2542341-38074.log +0 -3
- data/spec/support/logs/Scheduler - 2542519-33069.log +0 -3
- data/spec/support/logs/Scheduler - 2542610-41819.log +0 -4
- data/spec/support/logs/Scheduler - 2543168-3708.log +0 -1
- data/spec/support/logs/Scheduler - 2543332-46957.log +0 -4
- data/spec/support/logs/Scheduler - 2543334-53034.log +0 -1
- data/spec/support/logs/Scheduler - 2543419-31038.log +0 -1
- data/spec/support/logs/Scheduler - 2543666-10481.log +0 -16
- data/spec/support/logs/Scheduler - 2544245-30154.log +0 -4
- data/spec/support/logs/Scheduler - 2544487-8113.log +0 -1
- data/spec/support/logs/Scheduler - 2544490-62030.log +0 -1
- data/spec/support/logs/Scheduler - 2544494-64830.log +0 -1
- data/spec/support/logs/Scheduler - 2544585-3507.log +0 -1
- data/spec/support/logs/Scheduler - 2544668-24185.log +0 -3
- data/spec/support/logs/Scheduler - 2544762-17601.log +0 -1
- data/spec/support/logs/Scheduler - 2544790-32379.log +0 -1
- data/spec/support/logs/Scheduler - 2544853-35518.log +0 -1
- data/spec/support/logs/Scheduler - 2544939-52525.log +0 -1
- data/spec/support/logs/Scheduler - 2545016-12352.log +0 -1
- data/spec/support/logs/Scheduler - 2545096-63001.log +0 -1
- data/spec/support/logs/Scheduler - 2545178-63647.log +0 -2
- data/spec/support/logs/Scheduler - 361893-37255.log +0 -3
- data/spec/support/logs/Scheduler - 361897-36090.log +0 -1
- data/spec/support/logs/Scheduler - 361928-44834.log +0 -3
- data/spec/support/logs/Scheduler - 361937-43013.log +0 -6
- data/spec/support/logs/Scheduler - 361956-8533.log +0 -4
- data/spec/support/logs/Scheduler - 361968-31856.log +0 -1
- data/spec/support/logs/Scheduler - 361988-37894.log +0 -3
- data/spec/support/logs/Scheduler - 361997-31525.log +0 -6
- data/spec/support/logs/Scheduler - 362037-51907.log +0 -3
- data/spec/support/logs/Scheduler - 362047-32758.log +0 -6
- data/spec/support/logs/Scheduler - 362072-11620.log +0 -3
- data/spec/support/logs/Scheduler - 362080-16797.log +0 -6
- data/spec/support/logs/Scheduler - 362124-56230.log +0 -4
- data/spec/support/logs/Scheduler - 362137-4746.log +0 -6
- data/spec/support/logs/Scheduler - 362176-32691.log +0 -1
- data/spec/support/logs/Scheduler - 362179-34759.log +0 -1
- data/spec/support/logs/Scheduler - 362183-18758.log +0 -1
- data/spec/support/logs/Scheduler - 362187-63438.log +0 -1
- data/spec/support/logs/Scheduler - 362189-35075.log +0 -1
- data/spec/support/logs/Scheduler - 362191-34351.log +0 -1
- data/spec/support/logs/Scheduler - 362195-51436.log +0 -3
- data/spec/support/logs/Scheduler - 362199-46884.log +0 -6
- data/spec/support/logs/Scheduler - 362214-12351.log +0 -4
- data/spec/support/logs/Scheduler - 362230-12723.log +0 -1
- data/spec/support/logs/Scheduler - 362233-16133.log +0 -1
- data/spec/support/logs/Scheduler - 362238-51103.log +0 -1
- data/spec/support/logs/Scheduler - 362241-20441.log +0 -1
- data/spec/support/logs/Scheduler - 362245-40692.log +0 -3
- data/spec/support/logs/Scheduler - 362251-40074.log +0 -1
- data/spec/support/logs/Scheduler - 362253-48087.log +0 -1
- data/spec/support/logs/Scheduler - 362256-27112.log +0 -1
- data/spec/support/logs/Scheduler - 363208-60869.log +0 -16
- data/spec/support/logs/Scheduler - 363231-35883.log +0 -6
- data/spec/support/logs/Scheduler - 363240-11734.log +0 -6
- data/spec/support/logs/Scheduler - 363252-28394.log +0 -6
- data/spec/support/logs/Scheduler - 363267-7527.log +0 -3
- data/spec/support/logs/Scheduler - 363282-48049.log +0 -5
- data/spec/support/logs/Scheduler - 363298-26965.log +0 -1
- data/spec/support/logs/Scheduler - 363300-14973.log +0 -1
- data/spec/support/logs/Scheduler - 363302-58216.log +0 -1
- data/spec/support/logs/Scheduler - 363304-11942.log +0 -1
- data/spec/support/logs/Scheduler - 363306-39654.log +0 -1
- data/spec/support/logs/Scheduler - 363308-57479.log +0 -1
- data/spec/support/logs/Scheduler - 363310-32176.log +0 -1
- data/spec/support/logs/Scheduler - 363314-13492.log +0 -3
- data/spec/support/logs/Scheduler - 363322-38112.log +0 -3
- data/spec/support/logs/Scheduler - 363342-26080.log +0 -3
- data/spec/support/logs/Scheduler - 363349-51783.log +0 -3
- data/spec/support/logs/Scheduler - 363355-44132.log +0 -4
- data/spec/support/logs/Scheduler - 363367-12160.log +0 -1
- data/spec/support/logs/Scheduler - 363372-28520.log +0 -4
- data/spec/support/logs/Scheduler - 363376-58951.log +0 -1
- data/spec/support/logs/Scheduler - 363379-28506.log +0 -1
- data/spec/support/logs/Scheduler - 363384-63536.log +0 -16
- data/spec/support/logs/Scheduler - 363402-32695.log +0 -4
- data/spec/support/logs/Scheduler - 363412-41984.log +0 -1
- data/spec/support/logs/Scheduler - 363414-47366.log +0 -1
- data/spec/support/logs/Scheduler - 363416-44368.log +0 -1
- data/spec/support/logs/Scheduler - 363427-29292.log +0 -1
- data/spec/support/logs/Scheduler - 363431-44605.log +0 -3
- data/spec/support/logs/Scheduler - 363439-19754.log +0 -1
- data/spec/support/logs/Scheduler - 363441-5762.log +0 -1
- data/spec/support/logs/Scheduler - 363443-4000.log +0 -1
- data/spec/support/logs/Scheduler - 363445-3688.log +0 -1
- data/spec/support/logs/Scheduler - 363448-50268.log +0 -1
- data/spec/support/logs/Scheduler - 363451-23104.log +0 -1
- data/spec/support/logs/Scheduler - 363454-12897.log +0 -2
- data/spec/support/logs/error-2487640.log +0 -797
- data/spec/support/logs/error-360075.log +0 -797
- data/spec/support/logs/output_spec_2487640.log +0 -390
- data/spec/support/logs/output_spec_360075.log +0 -390
- data/spec/support/reports/052f88c73e2d0ecf331dbf5fce0cfe3c.crf +0 -0
- data/spec/support/reports/06eed3dec98f5cd5fc717190a004d7d6.crf +0 -0
- data/spec/support/reports/0902bd88bbd4df462910007a3b5018fc.crf +0 -0
- data/spec/support/reports/10b9864e81e3204d1ef4940ccb88b0ed.crf +0 -0
- data/spec/support/reports/16b575097e68156416a84a6854d3719c.crf +0 -0
- data/spec/support/reports/18be00bff4371738c7c7013b284b415b.crf +0 -0
- data/spec/support/reports/1ca39d410c2cf1f652eb8c320d6682bd.crf +0 -0
- data/spec/support/reports/266a09d73152ce2f3d2951f1dab133f3.crf +0 -0
- data/spec/support/reports/2929bee9c126b2695dc569b693fef574.crf +0 -0
- data/spec/support/reports/2abd611201d06f0428f1b060b32d4839.crf +0 -0
- data/spec/support/reports/2b9ce956f7060163d7a0b78603dc05ca.crf +0 -0
- data/spec/support/reports/3254163f76b8696fa1677d18eaefce1d.crf +0 -0
- data/spec/support/reports/3a9e6a4fa91c901550ff295cd21d69c9.crf +0 -0
- data/spec/support/reports/3de18cf956e18664f441f94ee05e4118.crf +0 -0
- data/spec/support/reports/46bcffd844008e71c7d90a76baf8597d.crf +0 -0
- data/spec/support/reports/482cf0d0c54cda32bedf59b2ea995c65.crf +0 -0
- data/spec/support/reports/502508e3a29966200350196c9859881c.crf +0 -0
- data/spec/support/reports/5f0d832d46338d69b4eab6ff91ba4c91.crf +0 -0
- data/spec/support/reports/620287cfdc373595385cf2471e1d4523.crf +0 -0
- data/spec/support/reports/65d71de3b0e8c9fe894f4ba36cb4cb7c.crf +0 -0
- data/spec/support/reports/6754d1f86dc5544abeca7f06314ebc56.crf +0 -0
- data/spec/support/reports/75fecdd5e006942292e02e6a223e7279.crf +0 -0
- data/spec/support/reports/77523cc87357aa220c33d5604f41b5a1.crf +0 -0
- data/spec/support/reports/7e137bd5f71e0f0fbb63b93b87dae160.crf +0 -0
- data/spec/support/reports/806a17c3a40cc22eafd77c97b64a2ed5.crf +0 -0
- data/spec/support/reports/81cca53163bbab5ccf4d5f0401d5adcd.crf +0 -0
- data/spec/support/reports/8745ae8fdeb76289765582e06a614940.crf +0 -0
- data/spec/support/reports/8c204ee129fe1bd6c5964d29ae5d03ae.crf +0 -0
- data/spec/support/reports/937b7b516a99c23b2a641bb7bf695bfa.crf +0 -0
- data/spec/support/reports/a6eb89ffb1c33e33e421c1ee393118a9.crf +0 -0
- data/spec/support/reports/aadbf2c1544b0e11174853fb4883a38c.crf +0 -0
- data/spec/support/reports/c360a2833f3e635e69036916010edeac.crf +0 -0
- data/spec/support/reports/cf0bfb4662a7cda85db324936aae93d3.crf +0 -0
- data/spec/support/reports/cfff9c06897e28b1d58977909738c78f.crf +0 -0
- data/spec/support/reports/e2d5321452359193a758f7d16879ddb3.crf +0 -0
- data/spec/support/reports/e58c9dfbf9edeca9adcf32af5940f16b.crf +0 -0
- data/spec/support/reports/e96abea937f1ed3f89bc2ec5397522db.crf +0 -0
- data/spec/support/reports/ec0bbd4424061d305963551a29d20e1a.crf +0 -0
- data/spec/support/reports/f16e679098a4411d4f1c123abcd6af90.crf +0 -0
- data/spec/support/reports/f2c155087c431851979048ceb8c46ec9.crf +0 -0
- data/spec/support/reports/f3d542036e17f6a66e11bfacb2fb3366.crf +0 -0
- data/spec/support/reports/f72c7feef5f9c7e29f423c3ebe345ef3.crf +0 -0
- data/spec/support/reports/fbb2e1c6ada2d4b8229488f4c4b3b418.crf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-01 13_10_28 +0200 a06d9bd7db81b1b4fb077ceadcc3895f.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-01 13_10_42 +0200 36c68859faf144eed9ff9c01ae754217.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-01 13_24_42 +0200 5a26112a913330ee8763b2982a4d42df.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-01 13_24_56 +0200 190856b98ac7099eb553ed3abcfbcb87.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-01 13_35_35 +0200 a9c5229b459119840c9428a3d7c54eb1.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-01 13_35_41 +0200 d93660f71110dad136cea0758b29611e.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-01 13_35_44 +0200 57083dafa382f1d21cc33c9740861c88.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-13 07_55_57 +0200 6ff9bf8c7b70ad85fa3a42f44bcc5fe7.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-13 07_56_12 +0200 2852d4e0ae04e5365301703913f7b763.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-13 08_06_41 +0200 54da34dab4b059de90cdd33d400ccac7.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-13 08_06_46 +0200 61596bdce9318446373d1b736ec7c9ce.csf +0 -0
- data/spec/support/snapshots/Cuboid 2022-02-13 08_06_48 +0200 19030be87532ece27c1d4791a8d3229c.csf +0 -0
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 5b9121b12ad9684342c07f1f103a7ceccaf7a5fc4726aff0f869c004eb0609c6
|
|
4
|
+
data.tar.gz: 8bc40ef564aa932c948acb929e9ebd41e97fe3f684326db9a13095fc321a9049
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 803d5ae299c9af535d49172cb9dcf1c4d91567d40a0a6e981223f3fb7de7a16869449d30d9f2f81de01f35ae2e6ee9c1e40d7f517c9781ae3c29a82c9f070c2c
|
|
7
|
+
data.tar.gz: fdb7bb6c4fa8c9bb97338d78f6e53671789aa1c1624653bab2794754cbcade7c6a915a1c5a39bbbaab983a182a4d220d82ee36cfe8037271ed2eacf514f8e420
|
data/CHANGELOG.md
CHANGED
data/README.md
CHANGED
|
@@ -30,7 +30,7 @@ It offers:
|
|
|
30
30
|
* Developer freedom.
|
|
31
31
|
* Apart from keeping _Data_ and _State_ separate not many other rules to follow.
|
|
32
32
|
* Only if interested in _suspensions_ and can also be left to the last minute
|
|
33
|
-
if necessary -- in cases of Ractor enforced isolation
|
|
33
|
+
if necessary -- in cases of `Ractor` enforced isolation for example.
|
|
34
34
|
|
|
35
35
|
## Entities
|
|
36
36
|
|
|
@@ -144,7 +144,7 @@ _**Application**_ itself.
|
|
|
144
144
|
|
|
145
145
|
A simple RPC is employed, specs for 3rd party implementations can be found at:
|
|
146
146
|
|
|
147
|
-
https://github.com/
|
|
147
|
+
https://github.com/toq/arachni-rpc/wiki
|
|
148
148
|
|
|
149
149
|
Each _**Application**_ can extend upon this and expose an API via its _**Instance**_'s
|
|
150
150
|
RPC interface.
|
|
@@ -162,13 +162,130 @@ service's interface.
|
|
|
162
162
|
|
|
163
163
|
## Examples
|
|
164
164
|
|
|
165
|
-
See `examples/`.
|
|
166
|
-
|
|
167
165
|
### MyApp
|
|
168
166
|
|
|
169
167
|
Tutorial application going over different APIs and **Cuboid** _**Application**_
|
|
170
168
|
options and specification.
|
|
171
169
|
|
|
170
|
+
See `examples/my_app`.
|
|
171
|
+
|
|
172
|
+
### Parallel code on same host
|
|
173
|
+
|
|
174
|
+
To run code in parallel on the same machine utilising multiple cores, with each
|
|
175
|
+
instance isolated to its own process, you can use something like the following:
|
|
176
|
+
|
|
177
|
+
```ruby
|
|
178
|
+
require 'cuboid'
|
|
179
|
+
|
|
180
|
+
class Sleeper < Cuboid::Application
|
|
181
|
+
|
|
182
|
+
def run
|
|
183
|
+
sleep options['time']
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
return if $0 != __FILE__
|
|
189
|
+
|
|
190
|
+
sleepers = []
|
|
191
|
+
sleepers << Sleeper.spawn( :instance )
|
|
192
|
+
sleepers << Sleeper.spawn( :instance )
|
|
193
|
+
sleepers << Sleeper.spawn( :instance )
|
|
194
|
+
|
|
195
|
+
sleepers.each do |sleeper|
|
|
196
|
+
sleeper.run( time: 5 )
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
sleep 0.1 while sleepers.map(&:busy?).include?( true )
|
|
200
|
+
```
|
|
201
|
+
|
|
202
|
+
time bundle exec ruby same_host.rb
|
|
203
|
+
[...]
|
|
204
|
+
real 0m6,506s
|
|
205
|
+
user 0m0,423s
|
|
206
|
+
sys 0m0,063s
|
|
207
|
+
|
|
208
|
+
### Parallel code on different hosts
|
|
209
|
+
|
|
210
|
+
In this example we'll be using `Agents` to spawn instances from 3 different hosts.
|
|
211
|
+
|
|
212
|
+
#### Host 1
|
|
213
|
+
|
|
214
|
+
```ruby
|
|
215
|
+
require 'cuboid'
|
|
216
|
+
|
|
217
|
+
class Sleeper < Cuboid::Application
|
|
218
|
+
|
|
219
|
+
def run
|
|
220
|
+
sleep options['time']
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
return if $0 != __FILE__
|
|
226
|
+
|
|
227
|
+
Sleeper.spawn( :agent, port: 7331 )
|
|
228
|
+
```
|
|
229
|
+
|
|
230
|
+
bundle exec ruby multiple_hosts_1.rb
|
|
231
|
+
|
|
232
|
+
#### Host 2
|
|
233
|
+
|
|
234
|
+
```ruby
|
|
235
|
+
require 'cuboid'
|
|
236
|
+
|
|
237
|
+
class Sleeper < Cuboid::Application
|
|
238
|
+
|
|
239
|
+
def run
|
|
240
|
+
sleep options['time']
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
return if $0 != __FILE__
|
|
246
|
+
|
|
247
|
+
Sleeper.spawn( :agent, port: 7332, peer: 'host1:7331' )
|
|
248
|
+
```
|
|
249
|
+
|
|
250
|
+
bundle exec ruby multiple_hosts_2.rb
|
|
251
|
+
|
|
252
|
+
#### Host 3
|
|
253
|
+
|
|
254
|
+
```ruby
|
|
255
|
+
require 'cuboid'
|
|
256
|
+
|
|
257
|
+
class Sleeper < Cuboid::Application
|
|
258
|
+
|
|
259
|
+
def run
|
|
260
|
+
sleep options['time']
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
return if $0 != __FILE__
|
|
266
|
+
|
|
267
|
+
grid_agent = Sleeper.spawn( :agent, port: 7333, peer: 'host1:7331' )
|
|
268
|
+
|
|
269
|
+
sleepers = []
|
|
270
|
+
3.times do
|
|
271
|
+
connection_info = grid_agent.spawn
|
|
272
|
+
sleepers << Sleeper.connect( connection_info )
|
|
273
|
+
end
|
|
274
|
+
|
|
275
|
+
sleepers.each do |sleeper|
|
|
276
|
+
sleeper.run( time: 5 )
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
sleep 0.1 while sleepers.map(&:busy?).include?( true )
|
|
280
|
+
```
|
|
281
|
+
|
|
282
|
+
time bundle exec ruby multiple_hosts_3.rb
|
|
283
|
+
real 0m7,318s
|
|
284
|
+
user 0m0,426s
|
|
285
|
+
sys 0m0,091s
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
_You can replace `host1` with `localhost` and run all examples on the same terminal._
|
|
172
289
|
|
|
173
290
|
## License
|
|
174
291
|
|
data/Rakefile
CHANGED
|
@@ -55,3 +55,20 @@ task :clean do
|
|
|
55
55
|
files.each { |file| puts " * #{file}" }
|
|
56
56
|
FileUtils.rm files
|
|
57
57
|
end
|
|
58
|
+
|
|
59
|
+
desc 'Build the gem.'
|
|
60
|
+
task build: [ :clean ] do
|
|
61
|
+
sh "gem build cuboid.gemspec"
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
desc 'Build and install the gem.'
|
|
65
|
+
task install: [ :build ] do
|
|
66
|
+
sh "gem install cuboid-#{Cuboid::VERSION}.gem"
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
desc 'Push a new version to Rubygems'
|
|
70
|
+
task publish: [ :build ] do
|
|
71
|
+
sh "git tag -a v#{Cuboid::VERSION} -m 'Version #{Cuboid::VERSION}'"
|
|
72
|
+
sh "gem push cuboid-#{Cuboid::VERSION}.gem"
|
|
73
|
+
end
|
|
74
|
+
task release: [ :publish ]
|
data/cuboid.gemspec
CHANGED
|
@@ -53,7 +53,7 @@ Gem::Specification.new do |s|
|
|
|
53
53
|
s.add_dependency 'sinatra-contrib', '2.1.0'
|
|
54
54
|
|
|
55
55
|
# RPC client/server implementation.
|
|
56
|
-
s.add_dependency '
|
|
56
|
+
s.add_dependency 'toq', '~> 0.0.1'
|
|
57
57
|
|
|
58
58
|
s.add_dependency 'vmstat', '2.3.0'
|
|
59
59
|
s.add_dependency 'sys-proctable', '1.1.5'
|
data/lib/cuboid/application.rb
CHANGED
|
@@ -174,7 +174,7 @@ class Application
|
|
|
174
174
|
last_const = last_const.const_get( const_name.to_sym )
|
|
175
175
|
end
|
|
176
176
|
|
|
177
|
-
last_const.const_source_location( app.to_sym ).first
|
|
177
|
+
File.expand_path last_const.const_source_location( app.to_sym ).first
|
|
178
178
|
end
|
|
179
179
|
|
|
180
180
|
def spawn( type, options = {}, &block )
|
|
@@ -5,7 +5,7 @@ module Cuboid::OptionGroups
|
|
|
5
5
|
# @author Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
|
|
6
6
|
class Agent < Cuboid::OptionGroup
|
|
7
7
|
|
|
8
|
-
STRATEGIES = Set.new([:horizontal, :vertical])
|
|
8
|
+
STRATEGIES = Set.new([:horizontal, :vertical, :direct])
|
|
9
9
|
|
|
10
10
|
# @return [String]
|
|
11
11
|
# URL of a {RPC::Server::Agent}.
|
|
@@ -28,7 +28,7 @@ class Instances
|
|
|
28
28
|
# @return [RPC::Client::Instance]
|
|
29
29
|
#
|
|
30
30
|
def connect( url, token = nil )
|
|
31
|
-
|
|
31
|
+
Raktr.global.run_in_thread if !Raktr.global.running?
|
|
32
32
|
|
|
33
33
|
token ||= @list[url]
|
|
34
34
|
@list[url] ||= token
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
require 'singleton'
|
|
2
|
-
require '
|
|
2
|
+
require 'raktr'
|
|
3
3
|
|
|
4
4
|
module Cuboid
|
|
5
5
|
module Processes
|
|
@@ -156,7 +156,7 @@ class Manager
|
|
|
156
156
|
|
|
157
157
|
# Stops the Reactor.
|
|
158
158
|
def kill_reactor
|
|
159
|
-
|
|
159
|
+
Raktr.stop
|
|
160
160
|
rescue
|
|
161
161
|
nil
|
|
162
162
|
end
|
|
@@ -23,7 +23,7 @@ class Schedulers
|
|
|
23
23
|
#
|
|
24
24
|
# @return [RPC::Client::Scheduler]
|
|
25
25
|
def connect( url, options = nil )
|
|
26
|
-
|
|
26
|
+
Raktr.global.run_in_thread if !Raktr.global.running?
|
|
27
27
|
|
|
28
28
|
fresh = false
|
|
29
29
|
if options
|
|
@@ -16,14 +16,14 @@ class Agent
|
|
|
16
16
|
|
|
17
17
|
def initialize( url, options = nil )
|
|
18
18
|
@client = Base.new( url, nil, options )
|
|
19
|
-
@node =
|
|
19
|
+
@node = Toq::Proxy.new( @client, 'node' )
|
|
20
20
|
|
|
21
21
|
Cuboid::Application.application.agent_services.keys.each do |name|
|
|
22
22
|
self.class.send( :attr_reader, name.to_sym )
|
|
23
23
|
|
|
24
24
|
instance_variable_set(
|
|
25
25
|
"@#{name}".to_sym,
|
|
26
|
-
|
|
26
|
+
Toq::Proxy.new( @client, name )
|
|
27
27
|
)
|
|
28
28
|
end
|
|
29
29
|
end
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
require '
|
|
1
|
+
require 'toq'
|
|
2
2
|
require_relative '../serializer'
|
|
3
3
|
|
|
4
4
|
module Cuboid
|
|
@@ -6,7 +6,7 @@ module RPC
|
|
|
6
6
|
class Client
|
|
7
7
|
|
|
8
8
|
# @author Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
|
|
9
|
-
class Base <
|
|
9
|
+
class Base < Toq::Client
|
|
10
10
|
attr_reader :url
|
|
11
11
|
|
|
12
12
|
# @param [String] url
|
|
@@ -24,10 +24,10 @@ class Instance
|
|
|
24
24
|
)
|
|
25
25
|
|
|
26
26
|
client = new( url, token, options )
|
|
27
|
-
|
|
27
|
+
Raktr.global.delay( 0.1 ) do |task|
|
|
28
28
|
client.alive? do |r|
|
|
29
29
|
if r.rpc_exception?
|
|
30
|
-
|
|
30
|
+
Raktr.global.delay( 0.1, &task )
|
|
31
31
|
next
|
|
32
32
|
end
|
|
33
33
|
|
|
@@ -45,7 +45,7 @@ class Instance
|
|
|
45
45
|
@client = Base.new( url, token, options )
|
|
46
46
|
|
|
47
47
|
@instance = Proxy.new( @client )
|
|
48
|
-
@options =
|
|
48
|
+
@options = Toq::Proxy.new( @client, 'options' )
|
|
49
49
|
|
|
50
50
|
# map Agent handlers
|
|
51
51
|
Cuboid::Application.application.instance_services.keys.each do |name|
|
|
@@ -53,7 +53,7 @@ class Instance
|
|
|
53
53
|
|
|
54
54
|
instance_variable_set(
|
|
55
55
|
"@#{name}".to_sym,
|
|
56
|
-
|
|
56
|
+
Toq::Proxy.new( @client, name )
|
|
57
57
|
)
|
|
58
58
|
end
|
|
59
59
|
end
|
|
@@ -65,7 +65,7 @@ class Server::Agent::Node
|
|
|
65
65
|
|
|
66
66
|
log_updated_peers
|
|
67
67
|
|
|
68
|
-
|
|
68
|
+
Raktr.global.at_interval( @options.agent.ping_interval ) do
|
|
69
69
|
ping
|
|
70
70
|
check_for_comebacks
|
|
71
71
|
end
|
|
@@ -152,7 +152,7 @@ class Server::Agent::Node
|
|
|
152
152
|
block.call( @nodes_info_cache )
|
|
153
153
|
end
|
|
154
154
|
|
|
155
|
-
|
|
155
|
+
Raktr.global.create_iterator( peers ).map( each, after )
|
|
156
156
|
else
|
|
157
157
|
block.call( @nodes_info_cache )
|
|
158
158
|
end
|
|
@@ -45,7 +45,7 @@ class Server::Agent::Service
|
|
|
45
45
|
# Performs an asynchronous map operation over all running instances.
|
|
46
46
|
#
|
|
47
47
|
# @param [Proc] each
|
|
48
|
-
# Block to be passed {Client::Instance} and `
|
|
48
|
+
# Block to be passed {Client::Instance} and `Raktr::Iterator`.
|
|
49
49
|
# @param [Proc] after
|
|
50
50
|
# Block to be passed the Array of results.
|
|
51
51
|
def map_instances( each, after )
|
|
@@ -58,7 +58,7 @@ class Server::Agent::Service
|
|
|
58
58
|
# Performs an asynchronous iteration over all running instances.
|
|
59
59
|
#
|
|
60
60
|
# @param [Proc] block
|
|
61
|
-
# Block to be passed {Client::Instance} and `
|
|
61
|
+
# Block to be passed {Client::Instance} and `Raktr::Iterator`.
|
|
62
62
|
def each_instance( &block )
|
|
63
63
|
wrap = proc do |instance, iterator|
|
|
64
64
|
block.call( connect_to_instance( instance ), iterator )
|
|
@@ -88,15 +88,15 @@ class Server::Agent::Service
|
|
|
88
88
|
#
|
|
89
89
|
# @param [Block] block
|
|
90
90
|
def run_asap( &block )
|
|
91
|
-
|
|
91
|
+
Raktr.global.next_tick( &block )
|
|
92
92
|
end
|
|
93
93
|
|
|
94
94
|
# @param [Array] list
|
|
95
95
|
#
|
|
96
|
-
# @return [
|
|
96
|
+
# @return [Raktr::Iterator]
|
|
97
97
|
# Iterator for the provided array.
|
|
98
98
|
def iterator_for( list, max_concurrency = 10 )
|
|
99
|
-
|
|
99
|
+
Raktr.global.create_iterator( list, max_concurrency )
|
|
100
100
|
end
|
|
101
101
|
|
|
102
102
|
# @return [Array<Hash>]
|
|
@@ -82,6 +82,7 @@ class Agent
|
|
|
82
82
|
# @param [Symbol] strategy
|
|
83
83
|
# `:horizontal` -- Pick the Agent with the least amount of workload.
|
|
84
84
|
# `:vertical` -- Pick the Agent with the most amount of workload.
|
|
85
|
+
# `:direct` -- Bypass the grid and get an Instance directly from this agent.
|
|
85
86
|
#
|
|
86
87
|
# @return [String, nil]
|
|
87
88
|
# Depending on strategy and availability:
|
|
@@ -97,7 +98,7 @@ class Agent
|
|
|
97
98
|
raise ArgumentError, "Unknown strategy: #{strategy}"
|
|
98
99
|
end
|
|
99
100
|
|
|
100
|
-
if !@node.grid_member?
|
|
101
|
+
if strategy == :direct || !@node.grid_member?
|
|
101
102
|
block.call( self.utilization == 1 ? nil : @url )
|
|
102
103
|
return
|
|
103
104
|
end
|
|
@@ -136,10 +137,10 @@ class Agent
|
|
|
136
137
|
block.call nodes.sort_by { |_, score| adjust_score_by_strategy.call score }[0][0]
|
|
137
138
|
end
|
|
138
139
|
|
|
139
|
-
|
|
140
|
+
Raktr.global.create_iterator( @node.peers ).map( each, after )
|
|
140
141
|
end
|
|
141
142
|
|
|
142
|
-
#
|
|
143
|
+
# Spawns an {Instance}.
|
|
143
144
|
#
|
|
144
145
|
# @param [String] options
|
|
145
146
|
# @option [String] strategy
|
|
@@ -147,22 +148,24 @@ class Agent
|
|
|
147
148
|
# An owner to assign to the {Instance}.
|
|
148
149
|
# @option [Hash] helpers
|
|
149
150
|
# Hash of helper data to be added to the instance info.
|
|
150
|
-
# @option [Boolean] load_balance
|
|
151
|
-
# Use the Grid (when available) or this one directly?
|
|
152
151
|
#
|
|
153
152
|
# @return [Hash, nil]
|
|
154
153
|
# Depending on availability:
|
|
155
154
|
#
|
|
156
155
|
# * `Hash`: Connection and proc info.
|
|
157
|
-
# * `nil`: Max utilization
|
|
156
|
+
# * `nil`: Max utilization or currently spawning, wait and retry.
|
|
158
157
|
def spawn( options = {}, &block )
|
|
158
|
+
if @spawning
|
|
159
|
+
block.call nil
|
|
160
|
+
return
|
|
161
|
+
end
|
|
162
|
+
|
|
159
163
|
options = options.my_symbolize_keys
|
|
160
164
|
strategy = options.delete(:strategy)
|
|
161
165
|
owner = options[:owner]
|
|
162
166
|
helpers = options[:helpers] || {}
|
|
163
|
-
load_balance = options[:load_balance].nil? ? true : options[:load_balance]
|
|
164
167
|
|
|
165
|
-
if
|
|
168
|
+
if strategy != 'direct' && @node.grid_member?
|
|
166
169
|
preferred *[strategy].compact do |url|
|
|
167
170
|
if !url
|
|
168
171
|
block.call
|
|
@@ -175,8 +178,8 @@ class Agent
|
|
|
175
178
|
end
|
|
176
179
|
|
|
177
180
|
connect_to_peer( url ).spawn( options.merge(
|
|
178
|
-
helpers:
|
|
179
|
-
|
|
181
|
+
helpers: helpers.merge( via: @url ),
|
|
182
|
+
strategy: :direct
|
|
180
183
|
),
|
|
181
184
|
&block
|
|
182
185
|
)
|
|
@@ -189,6 +192,7 @@ class Agent
|
|
|
189
192
|
return
|
|
190
193
|
end
|
|
191
194
|
|
|
195
|
+
@spawning = true
|
|
192
196
|
spawn_instance do |info|
|
|
193
197
|
info['owner'] = owner
|
|
194
198
|
info['helpers'] = helpers
|
|
@@ -196,6 +200,8 @@ class Agent
|
|
|
196
200
|
@instances << info
|
|
197
201
|
|
|
198
202
|
block.call info
|
|
203
|
+
|
|
204
|
+
@spawning = false
|
|
199
205
|
end
|
|
200
206
|
end
|
|
201
207
|
|
|
@@ -288,7 +294,7 @@ class Agent
|
|
|
288
294
|
|
|
289
295
|
# Starts the agent's server
|
|
290
296
|
def run
|
|
291
|
-
|
|
297
|
+
Raktr.global.on_error do |_, e|
|
|
292
298
|
print_error "Reactor: #{e}"
|
|
293
299
|
|
|
294
300
|
e.backtrace.each do |l|
|
|
@@ -308,7 +314,7 @@ class Agent
|
|
|
308
314
|
def shutdown
|
|
309
315
|
Thread.new do
|
|
310
316
|
print_status 'Shutting down...'
|
|
311
|
-
|
|
317
|
+
Raktr.global.stop
|
|
312
318
|
end
|
|
313
319
|
end
|
|
314
320
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
require 'ostruct'
|
|
2
|
-
require '
|
|
2
|
+
require 'toq'
|
|
3
3
|
require_relative '../serializer'
|
|
4
4
|
|
|
5
5
|
module Cuboid
|
|
@@ -10,7 +10,7 @@ class Server
|
|
|
10
10
|
#
|
|
11
11
|
# @private
|
|
12
12
|
# @author Tasos "Zapotek" Laskos <tasos.laskos@gmail.com>
|
|
13
|
-
class Base <
|
|
13
|
+
class Base < Toq::Server
|
|
14
14
|
|
|
15
15
|
# @param [Hash] options
|
|
16
16
|
# @option options [Integer] :host
|
|
@@ -74,7 +74,7 @@ class Instance
|
|
|
74
74
|
trap( signal ){ shutdown if !@options.datastore.do_not_trap }
|
|
75
75
|
end
|
|
76
76
|
|
|
77
|
-
|
|
77
|
+
Raktr.global.run do
|
|
78
78
|
_run
|
|
79
79
|
end
|
|
80
80
|
end
|
|
@@ -294,7 +294,7 @@ class Instance
|
|
|
294
294
|
|
|
295
295
|
# Starts RPC service.
|
|
296
296
|
def _run
|
|
297
|
-
|
|
297
|
+
Raktr.global.on_error do |_, e|
|
|
298
298
|
print_error "Reactor: #{e}"
|
|
299
299
|
|
|
300
300
|
e.backtrace.each do |l|
|
data/lib/version
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
0.1.
|
|
1
|
+
0.1.5
|
|
@@ -204,7 +204,7 @@ describe Cuboid::Rest::Server do
|
|
|
204
204
|
post url, invalid: 'blah'
|
|
205
205
|
|
|
206
206
|
expect(response_code).to eq 500
|
|
207
|
-
expect(response_data['error']).to eq '
|
|
207
|
+
expect(response_data['error']).to eq 'Toq::Exceptions::RemoteException'
|
|
208
208
|
expect(response_data).to include 'backtrace'
|
|
209
209
|
end
|
|
210
210
|
|
|
@@ -658,7 +658,7 @@ describe Cuboid::Rest::Server do
|
|
|
658
658
|
it 'returns 500' do
|
|
659
659
|
put url, 'localhost:383838'
|
|
660
660
|
expect(response_code).to eq 500
|
|
661
|
-
expect(response_data['error']).to eq '
|
|
661
|
+
expect(response_data['error']).to eq 'Toq::Exceptions::ConnectionError'
|
|
662
662
|
end
|
|
663
663
|
end
|
|
664
664
|
end
|
|
@@ -821,7 +821,7 @@ describe Cuboid::Rest::Server do
|
|
|
821
821
|
post url, invalid: 'blah'
|
|
822
822
|
|
|
823
823
|
expect(response_code).to eq 500
|
|
824
|
-
expect(response_data['error']).to eq '
|
|
824
|
+
expect(response_data['error']).to eq 'Toq::Exceptions::RemoteException'
|
|
825
825
|
expect(response_data).to include 'backtrace'
|
|
826
826
|
end
|
|
827
827
|
end
|
|
@@ -876,7 +876,7 @@ describe Cuboid::Rest::Server do
|
|
|
876
876
|
put url, 'localhost:393939'
|
|
877
877
|
|
|
878
878
|
expect(response_code).to eq 500
|
|
879
|
-
expect(response_data['error']).to eq '
|
|
879
|
+
expect(response_data['error']).to eq 'Toq::Exceptions::ConnectionError'
|
|
880
880
|
expect(response_data['description']).to include 'Connection closed'
|
|
881
881
|
end
|
|
882
882
|
end
|
|
@@ -992,7 +992,7 @@ describe Cuboid::Rest::Server do
|
|
|
992
992
|
|
|
993
993
|
get url
|
|
994
994
|
expect(response_data.size).to be 1
|
|
995
|
-
expect(response_data[@id]['error']).to eq '
|
|
995
|
+
expect(response_data[@id]['error']).to eq 'Toq::Exceptions::ConnectionError'
|
|
996
996
|
expect(response_data[@id]['description']).to include 'Connection closed [Connection refused - connect(2) for'
|
|
997
997
|
end
|
|
998
998
|
end
|
|
@@ -18,7 +18,7 @@ class Server
|
|
|
18
18
|
end
|
|
19
19
|
|
|
20
20
|
def start
|
|
21
|
-
|
|
21
|
+
Raktr.global.run_in_thread if !Raktr.global.running?
|
|
22
22
|
@server.start
|
|
23
23
|
sleep( 0.1 ) while !@server.ready?
|
|
24
24
|
end
|
|
@@ -86,7 +86,7 @@ describe Cuboid::RPC::Client::Base do
|
|
|
86
86
|
begin
|
|
87
87
|
client = described_class.new( server.url, nil, client_ssl_options )
|
|
88
88
|
client.call( "foo.bar" )
|
|
89
|
-
rescue
|
|
89
|
+
rescue Toq::Exceptions::ConnectionError
|
|
90
90
|
raised = true
|
|
91
91
|
end
|
|
92
92
|
|
|
@@ -102,7 +102,7 @@ describe Cuboid::RPC::Client::Base do
|
|
|
102
102
|
begin
|
|
103
103
|
client = described_class.new( server.url, nil, empty_options )
|
|
104
104
|
client.call( "foo.bar" )
|
|
105
|
-
rescue
|
|
105
|
+
rescue Toq::Exceptions::ConnectionError
|
|
106
106
|
raised = true
|
|
107
107
|
end
|
|
108
108
|
|
|
@@ -137,7 +137,7 @@ describe Cuboid::RPC::Client::Base do
|
|
|
137
137
|
begin
|
|
138
138
|
client = described_class.new( server.url, nil, empty_options )
|
|
139
139
|
client.call( "foo.bar" )
|
|
140
|
-
rescue
|
|
140
|
+
rescue Toq::Exceptions::InvalidToken
|
|
141
141
|
raised = true
|
|
142
142
|
end
|
|
143
143
|
|
|
@@ -16,7 +16,7 @@ describe Cuboid::RPC::Client::Instance do
|
|
|
16
16
|
it 'should fail to connect' do
|
|
17
17
|
expect do
|
|
18
18
|
described_class.new( subject.url, 'blah' ).alive?
|
|
19
|
-
end.to raise_error
|
|
19
|
+
end.to raise_error Toq::Exceptions::InvalidToken
|
|
20
20
|
end
|
|
21
21
|
end
|
|
22
22
|
end
|