gorgon 0.2.0 → 0.3.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,74 @@
1
+ require 'gorgon/gem_service'
2
+
3
+ describe GemService do
4
+ let(:configuration){ {:connection => {:host => "host"}, :originator_log_file => "file.log"}}
5
+ let(:protocol) { stub("OriginatorProtocol", :connect => nil,
6
+ :receive_payloads => nil, :disconnect => nil,
7
+ :send_message_to_listeners => nil)}
8
+ let(:logger){ stub("Originator Logger", :log => nil, :log_message => nil)}
9
+
10
+ before do
11
+ $stdout.stub!(:write)
12
+ GemService.any_instance.stub(:load_configuration_from_file).and_return configuration
13
+ EM.stub!(:run).and_yield
14
+ EM.stub!(:add_periodic_timer).and_yield
15
+ OriginatorLogger.stub!(:new).and_return logger
16
+ OriginatorProtocol.stub!(:new).and_return(protocol)
17
+ @service = GemService.new
18
+ @command = "install"
19
+ end
20
+
21
+ describe "#run" do
22
+ it "runs EventMachine loop and connect using configuration[:connection]" do
23
+ EM.should_receive(:run)
24
+ protocol.should_receive(:connect).once.ordered.with({:host => "host"}, anything)
25
+ @service.run @command
26
+ end
27
+
28
+ it "calls Protocol#send_message_to_listeners with version number" do
29
+ protocol.should_receive(:send_message_to_listeners).with(:gem_command, :gem_command => @command)
30
+ @service.run @command
31
+ end
32
+
33
+ it "adds a periodic timer that checks if there is any listener running command" do
34
+ EM.should_receive(:add_periodic_timer).with(GemService::TIMEOUT)
35
+ @service.run @command
36
+ end
37
+
38
+ context "when it receives an running_command message" do
39
+ before do
40
+ payload = {:type => "running_command", :hostname => "host"}
41
+ protocol.stub!(:receive_payloads).and_yield Yajl::Encoder.encode(payload)
42
+ end
43
+
44
+ it "writes to console" do
45
+ $stdout.should_receive(:write).with(/host/)
46
+ @service.run @command
47
+ end
48
+
49
+ it "won't diconnect as long as there is a host running_command" do
50
+ protocol.should_not_receive(:disconnect)
51
+ @service.run @command
52
+ end
53
+ end
54
+
55
+ context "when it receives an command_completed message" do
56
+ before do
57
+ running_command_payload = {:type => "running_command", :hostname => "host"}
58
+ complete_payload = {:type => "command_completed", :hostname => "host"}
59
+ protocol.stub!(:receive_payloads).and_yield(Yajl::Encoder.encode(running_command_payload))
60
+ .and_yield(Yajl::Encoder.encode(complete_payload))
61
+ end
62
+
63
+ it "writes to console" do
64
+ $stdout.should_receive(:write).at_least(:twice).with(/host/)
65
+ @service.run @command
66
+ end
67
+
68
+ it "disconnect since there is no host running command anymore" do
69
+ protocol.should_receive(:disconnect)
70
+ @service.run @command
71
+ end
72
+ end
73
+ end
74
+ end
@@ -123,13 +123,6 @@ describe JobState do
123
123
  end.should raise_error
124
124
  end
125
125
 
126
- it "raises if job was cancelled" do
127
- @job_state.cancel
128
- lambda do
129
- @job_state.file_finished payload
130
- end.should raise_error
131
- end
132
-
133
126
  it "tells to the proper HostState object that a file finished in that host" do
134
127
  HostState.stub!(:new).and_return host_state
135
128
  @job_state.file_started({:hostname => "hostname",
@@ -136,21 +136,45 @@ describe Listener do
136
136
 
137
137
  context "ping message pending on queue" do
138
138
  let(:ping_payload) {{
139
- :payload => Yajl::Encoder.encode({:type => "ping", :reply_exchange_name => "name"}) }}
139
+ :payload => Yajl::Encoder.encode({:type => "ping", :reply_exchange_name => "name",
140
+ :body => {}}) }}
140
141
 
141
142
  before do
142
143
  queue.stub!(:pop => ping_payload)
143
- end
144
+ listener.stub(:configuration).and_return({:worker_slots => 3})
145
+ end
144
146
 
145
147
  it "publishes ping_response message with Gorgon's version" do
146
148
  listener.should_not_receive(:run_job)
147
149
  bunny.should_receive(:exchange).with("name", anything).and_return(exchange)
148
150
  response = {:type => "ping_response", :hostname => Socket.gethostname,
149
- :version => Gorgon::VERSION}
151
+ :version => Gorgon::VERSION, :worker_slots => 3}
150
152
  exchange.should_receive(:publish).with(Yajl::Encoder.encode(response))
151
153
  listener.poll
152
154
  end
153
155
  end
156
+
157
+ context "gem_command message pending on queue" do
158
+ let(:command) { "install" }
159
+
160
+ let(:payload) {
161
+ {:type => "gem_command", :reply_exchange_name => "name",
162
+ :body => {:command => command}}
163
+ }
164
+
165
+ let(:gem_command_handler) { stub("GemCommandHandler", :handle => nil) }
166
+ let(:configuration) { {:worker_slots => 3} }
167
+ before do
168
+ queue.stub!(:pop => {:payload => Yajl::Encoder.encode(payload)})
169
+ listener.stub(:configuration).and_return(configuration)
170
+ end
171
+
172
+ it "calls GemCommandHandler#handle and pass payload" do
173
+ GemCommandHandler.should_receive(:new).with(bunny).and_return gem_command_handler
174
+ gem_command_handler.should_receive(:handle).with payload, configuration
175
+ listener.poll
176
+ end
177
+ end
154
178
  end
155
179
 
156
180
  describe "#run_job" do
@@ -194,23 +218,22 @@ describe Listener do
194
218
  end
195
219
 
196
220
  it "sends message to originator with output and errors from syncer" do
197
- reply = {:type => :crash, :hostname => "hostname", :stdout => "some output", :stderr => "some errors"}
198
- exchange.should_receive(:publish).with(Yajl::Encoder.encode(reply))
199
- @listener.run_job(reply)
221
+ @listener.should_receive(:send_crash_message).with exchange, "some output", "some errors"
222
+ @listener.run_job(payload)
200
223
  end
201
224
  end
202
225
 
203
- context "Worker Manager crahes" do
226
+ context "Worker Manager crashes" do
204
227
  before do
205
- process_status.should_receive(:exitstatus).and_return 1
228
+ process_status.should_receive(:exitstatus).and_return 2, 2
206
229
  end
207
230
 
208
- it "sends message to originator with output and errors from worker manager" do
209
- stdout.should_receive(:read).and_return "some output"
210
- stderr.should_receive(:read).and_return "some errors"
211
- reply = {:type => :crash, :hostname => "hostname", :stdout => "some output", :stderr => "some errors"}
212
- exchange.should_receive(:publish).with(Yajl::Encoder.encode(reply))
213
- @listener.run_job(reply)
231
+ it "report_crash with pid, exitstatus, stdout and stderr outputs" do
232
+ @listener.should_receive(:report_crash).with(exchange,
233
+ :out_file => WorkerManager::STDOUT_FILE,
234
+ :err_file => WorkerManager::STDERR_FILE,
235
+ :footer_text => Listener::ERROR_FOOTER_TEXT)
236
+ @listener.run_job(payload)
214
237
  end
215
238
  end
216
239
 
@@ -228,11 +251,6 @@ describe Listener do
228
251
  callback_handler.should_receive(:after_sync).once
229
252
  @listener.run_job(payload)
230
253
  end
231
-
232
- it "uses Bundler#with_clean_env so the workers load new gems that could have been installed in after_sync" do
233
- Bundler.should_receive(:with_clean_env).and_yield
234
- @listener.run_job(payload)
235
- end
236
254
  end
237
255
 
238
256
  private
@@ -4,7 +4,7 @@ describe OriginatorProtocol do
4
4
  let(:connection) { stub("Connection", :disconnect => nil, :on_closed => nil)}
5
5
  let(:queue) { stub("Queue", :bind => nil, :subscribe => nil, :name => "queue", :purge => nil,
6
6
  :delete => nil) }
7
- let(:exchange) { stub("Exchange", :publish => nil, :name => "exchange") }
7
+ let(:exchange) { stub("Exchange", :publish => nil, :name => "exchange", :delete => nil) }
8
8
  let(:channel) { stub("Channel", :queue => queue, :direct => exchange, :fanout => exchange,
9
9
  :default_exchange => exchange) }
10
10
  let(:logger){ stub("Logger", :log => nil)}
@@ -39,13 +39,13 @@ describe OriginatorProtocol do
39
39
 
40
40
  it "opens a reply and exchange queue" do
41
41
  UUIDTools::UUID.stub!(:timestamp_create).and_return 1
42
- channel.should_receive(:queue).once.with("1")
42
+ channel.should_receive(:queue).once.with("reply_queue_1")
43
43
  @originator_p.connect @conn_information
44
44
  end
45
45
 
46
46
  it "opens a reply exchange and binds reply queue to it" do
47
47
  UUIDTools::UUID.stub!(:timestamp_create).and_return 1
48
- channel.should_receive(:direct).with("1")
48
+ channel.should_receive(:direct).with("reply_exchange_1")
49
49
  queue.should_receive(:bind).with(exchange)
50
50
  @originator_p.connect @conn_information
51
51
  end
@@ -82,16 +82,18 @@ describe OriginatorProtocol do
82
82
  end
83
83
  end
84
84
 
85
- describe "#ping_listeners" do
85
+ describe "#send_message_to_listeners" do
86
86
  before do
87
87
  @originator_p.connect @conn_information
88
88
  end
89
89
 
90
- it "adds reply_exchange_name to ping_messages and fanouts it using 'gorgon.jobs' exchange" do
91
- expected_msg = {:type => "ping", :reply_exchange_name => "exchange"}
90
+ it "adds type and reply_exchange_name to message and fanouts it using 'gorgon.jobs' exchange" do
91
+ expected_msg = {:type => :msg_type, :reply_exchange_name => "exchange",
92
+ :body => {:data => 'data'}}
93
+ Yajl::Encoder.should_receive(:encode).with(expected_msg).and_return :msg
92
94
  channel.should_receive(:fanout).once.ordered.with("gorgon.jobs")
93
- exchange.should_receive(:publish).once.ordered.with(Yajl::Encoder.encode(expected_msg))
94
- @originator_p.ping_listeners
95
+ exchange.should_receive(:publish).once.ordered.with(:msg)
96
+ @originator_p.send_message_to_listeners :msg_type, :data => 'data'
95
97
  end
96
98
  end
97
99
 
@@ -136,9 +138,10 @@ describe OriginatorProtocol do
136
138
  @originator_p.connect @conn_information
137
139
  end
138
140
 
139
- it "deletes reply and file queue" do
141
+ it "deletes reply_exchange and reply and file queues" do
140
142
  @originator_p.publish_files []
141
143
  queue.should_receive(:delete).twice
144
+ exchange.should_receive(:delete)
142
145
  @originator_p.disconnect
143
146
  end
144
147
 
@@ -5,7 +5,7 @@ describe "PingService" do
5
5
  let(:configuration){ {:connection => {:host => "host"}, :originator_log_file => "file.log"}}
6
6
  let(:protocol) { stub("OriginatorProtocol", :connect => nil, :ping => nil,
7
7
  :receive_payloads => nil, :disconnect => nil,
8
- :ping_listeners => nil)}
8
+ :send_message_to_listeners => nil)}
9
9
  let(:logger){ stub("Originator Logger", :log => nil, :log_message => nil)}
10
10
 
11
11
  before do
@@ -16,10 +16,10 @@ describe "PingService" do
16
16
  OriginatorLogger.stub!(:new).and_return logger
17
17
  end
18
18
 
19
- it "connnects and calls OriginatorProtocol#ping_listeners" do
19
+ it "connnects and calls OriginatorProtocol#send_message_to_listeners" do
20
20
  OriginatorProtocol.should_receive(:new).once.ordered.and_return(protocol)
21
21
  protocol.should_receive(:connect).once.ordered.with({:host => "host"}, anything)
22
- protocol.should_receive(:ping_listeners).once.ordered
22
+ protocol.should_receive(:send_message_to_listeners).once.ordered
23
23
  PingService.new.ping_listeners
24
24
  end
25
25
 
@@ -0,0 +1,53 @@
1
+ require 'gorgon/pipe_forker'
2
+
3
+ describe PipeForker do
4
+ let(:io_pipe) { stub("IO object", :close => nil)}
5
+ let(:pipe) {stub("Pipe", :write => io_pipe)}
6
+
7
+ let(:container_class) do
8
+ Class.new do
9
+ extend(PipeForker)
10
+ end
11
+ end
12
+
13
+ before do
14
+ IO.stub!(:pipe).and_return([io_pipe, io_pipe])
15
+ STDIN.stub!(:reopen)
16
+ container_class.stub!(:fork).and_yield.and_return(1)
17
+ container_class.stub!(:exit)
18
+ end
19
+
20
+ describe ".pipe_fork" do
21
+ it "creates a new pipe" do
22
+ IO.should_receive(:pipe).once.and_return ([io_pipe,io_pipe])
23
+ container_class.pipe_fork { }
24
+ end
25
+
26
+ it "forks once" do
27
+ container_class.should_receive(:fork).and_yield
28
+ container_class.pipe_fork { }
29
+ end
30
+
31
+ it "closes both side of pipe inside child and read in parent" do
32
+ io_pipe.should_receive(:close).exactly(3).times
33
+ container_class.pipe_fork { }
34
+ end
35
+
36
+ it "reopens stdin with a pipe" do
37
+ STDIN.should_receive(:reopen).with(io_pipe)
38
+ container_class.pipe_fork { }
39
+ end
40
+
41
+ it "yields" do
42
+ has_yielded = false
43
+ container_class.pipe_fork { has_yielded = true }
44
+ has_yielded.should be_true
45
+ end
46
+
47
+ it "returns pid of new process and a pipe" do
48
+ pid, stdin = container_class.pipe_fork { }
49
+ pid.should be 1
50
+ stdin.should == io_pipe
51
+ end
52
+ end
53
+ end
@@ -7,17 +7,46 @@ describe WorkerManager do
7
7
  let(:bunny) { stub("Bunny", :start => nil, :exchange => exchange,
8
8
  :queue => queue, :stop => nil) }
9
9
  before do
10
- Bunny.stub(:new).and_return(bunny)
11
- STDIN.should_receive(:read).and_return '{"source_tree_path":"path/to/source",
12
- "sync_exclude":["log"]}'
10
+ STDIN.stub!(:read).and_return "{}"
11
+ STDOUT.stub!(:reopen)
12
+ STDERR.stub!(:reopen)
13
+ STDOUT.stub!(:sync)
14
+ STDERR.stub!(:sync)
15
+ Bunny.stub!(:new).and_return(bunny)
16
+ Configuration.stub!(:load_configuration_from_file).and_return({})
17
+ EventMachine.stub!(:run).and_yield
13
18
  end
14
19
 
15
20
  describe ".build" do
16
21
  it "should load_configuration_from_file" do
17
- Configuration.stub!(:load_configuration_from_file).and_return({})
18
- Configuration.should_receive(:load_configuration_from_file).with("file.json")
22
+ STDIN.should_receive(:read).and_return '{"source_tree_path":"path/to/source",
23
+ "sync_exclude":["log"]}'
24
+
25
+ Configuration.should_receive(:load_configuration_from_file).with("file.json").and_return({})
19
26
 
20
27
  WorkerManager.build "file.json"
21
28
  end
29
+
30
+ it "redirect output to a file since writing to a pipe may block when pipe is full" do
31
+ File.should_receive(:open).with(WorkerManager::STDOUT_FILE, 'w').and_return(:file1)
32
+ STDOUT.should_receive(:reopen).with(:file1)
33
+ File.should_receive(:open).with(WorkerManager::STDERR_FILE, 'w').and_return(:file2)
34
+ STDERR.should_receive(:reopen).with(:file2)
35
+ WorkerManager.build ""
36
+ end
37
+
38
+ it "use STDOUT#sync to flush output immediately so if an exception happens, we can grab the last\
39
+ few lines of output and send it to originator. Order matters" do
40
+ STDOUT.should_receive(:reopen).once.ordered
41
+ STDOUT.should_receive(:sync=).with(true).once.ordered
42
+ WorkerManager.build ""
43
+ end
44
+
45
+ it "use STDERR#sync to flush output immediately so if an exception happens, we can grab the last\
46
+ few lines of output and send it to originator. Order matters" do
47
+ STDERR.should_receive(:reopen).once.ordered
48
+ STDERR.should_receive(:sync=).with(true).once.ordered
49
+ WorkerManager.build ""
50
+ end
22
51
  end
23
52
  end
@@ -18,6 +18,9 @@ describe Worker do
18
18
  let(:fake_amqp) { fake_amqp = FakeAmqp.new file_queue, reply_exchange }
19
19
  let(:test_runner) { double("Test Runner") }
20
20
  let(:callback_handler) { stub("Callback Handler", :before_start => nil, :after_complete => nil) }
21
+ let(:job_definition) {stub("JobDefinition", :callbacks => ["/path/to/callback"],
22
+ :file_queue_name => "queue",
23
+ :reply_exchange_name => "exchange")}
21
24
 
22
25
  let(:params) {
23
26
  {
@@ -26,11 +29,61 @@ describe Worker do
26
29
  :reply_exchange_name => "exchange",
27
30
  :worker_id => WORKER_ID,
28
31
  :test_runner => test_runner,
29
- :callback_handler => callback_handler
32
+ :callback_handler => callback_handler,
33
+ :log_file => "path/to/log_file"
30
34
  }
31
35
  }
32
36
 
37
+ describe ".build" do
38
+ let(:config) { {:connection => "", :log_file => "path/to/log_file"} }
39
+ before do
40
+ stub_streams
41
+ AmqpService.stub!(:new).and_return fake_amqp
42
+ CallbackHandler.stub!(:new).and_return callback_handler
43
+ Worker.stub!(:new)
44
+ end
45
+
46
+ it "redirects output to a file since writing to a pipe may block when pipe is full" do
47
+ File.should_receive(:open).with(Worker.output_file(1, :out), 'w').and_return(:file1)
48
+ STDOUT.should_receive(:reopen).with(:file1)
49
+ File.should_receive(:open).with(Worker.output_file(1, :err), 'w').and_return(:file2)
50
+ STDERR.should_receive(:reopen).with(:file2)
51
+ Worker.build 1, config
52
+ end
53
+
54
+ it "use STDOUT#sync to flush output immediately so if an exception happens, we can grab the last\
55
+ few lines of output and send it to originator. Order matters" do
56
+ STDOUT.should_receive(:reopen).once.ordered
57
+ STDOUT.should_receive(:sync=).with(true).once.ordered
58
+ Worker.build 1, config
59
+ end
60
+
61
+ it "use STDERR#sync to flush output immediately so if an exception happens, we can grab the last\
62
+ few lines of output and send it to originator. Order matters" do
63
+ STDERR.should_receive(:reopen).once.ordered
64
+ STDERR.should_receive(:sync=).with(true).once.ordered
65
+ Worker.build 1, config
66
+ end
67
+
68
+ it "creates a JobDefinition using a payload written to stdin" do
69
+ STDIN.should_receive(:read).and_return '{ "key": "value" }'
70
+ JobDefinition.should_receive(:new).with({:key => "value"}).and_return job_definition
71
+ Worker.build 1, config
72
+ end
73
+
74
+ it "creates a new worker" do
75
+ JobDefinition.stub!(:new).and_return job_definition
76
+ stub_const("WorkUnit", test_runner)
77
+ Worker.should_receive(:new).with(params)
78
+ Worker.build 1, config
79
+ end
80
+ end
81
+
33
82
  describe '#work' do
83
+ before do
84
+ Worker.any_instance.stub(:initialize_logger)
85
+ end
86
+
34
87
  it 'should do nothing if the file queue is empty' do
35
88
  file_queue.should_receive(:pop).and_return(nil)
36
89
 
@@ -111,4 +164,13 @@ describe Worker do
111
164
 
112
165
  end
113
166
 
167
+ private
168
+
169
+ def stub_streams
170
+ STDIN.stub!(:read).and_return "{}"
171
+ STDOUT.stub!(:reopen)
172
+ STDERR.stub!(:reopen)
173
+ STDOUT.stub!(:sync)
174
+ STDERR.stub!(:sync)
175
+ end
114
176
  end