gorgon 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +8 -0
- data/Gemfile +4 -0
- data/Gemfile.lock +52 -0
- data/README.md +53 -0
- data/Rakefile +1 -0
- data/bin/gorgon +41 -0
- data/gorgon.gemspec +33 -0
- data/lib/gorgon.rb +6 -0
- data/lib/gorgon/amqp_service.rb +39 -0
- data/lib/gorgon/callback_handler.rb +21 -0
- data/lib/gorgon/configuration.rb +9 -0
- data/lib/gorgon/failures_printer.rb +37 -0
- data/lib/gorgon/g_logger.rb +22 -0
- data/lib/gorgon/host_state.rb +31 -0
- data/lib/gorgon/job.rb +26 -0
- data/lib/gorgon/job_definition.rb +24 -0
- data/lib/gorgon/job_state.rb +119 -0
- data/lib/gorgon/listener.rb +147 -0
- data/lib/gorgon/originator.rb +120 -0
- data/lib/gorgon/originator_logger.rb +36 -0
- data/lib/gorgon/originator_protocol.rb +65 -0
- data/lib/gorgon/pipe_manager.rb +55 -0
- data/lib/gorgon/progress_bar_view.rb +121 -0
- data/lib/gorgon/source_tree_syncer.rb +37 -0
- data/lib/gorgon/testunit_runner.rb +50 -0
- data/lib/gorgon/version.rb +3 -0
- data/lib/gorgon/worker.rb +103 -0
- data/lib/gorgon/worker_manager.rb +148 -0
- data/lib/gorgon/worker_watcher.rb +22 -0
- data/spec/callback_handler_spec.rb +77 -0
- data/spec/failures_printer_spec.rb +66 -0
- data/spec/host_state_spec.rb +65 -0
- data/spec/job_definition_spec.rb +20 -0
- data/spec/job_state_spec.rb +231 -0
- data/spec/listener_spec.rb +194 -0
- data/spec/originator_logger_spec.rb +40 -0
- data/spec/originator_protocol_spec.rb +134 -0
- data/spec/originator_spec.rb +134 -0
- data/spec/progress_bar_view_spec.rb +98 -0
- data/spec/source_tree_syncer_spec.rb +65 -0
- data/spec/worker_manager_spec.rb +23 -0
- data/spec/worker_spec.rb +114 -0
- metadata +270 -0
@@ -0,0 +1,66 @@
|
|
1
|
+
require 'gorgon/failures_printer'
|
2
|
+
|
3
|
+
describe FailuresPrinter do
|
4
|
+
let(:job_state) { stub("Job State", :add_observer => nil,
|
5
|
+
:is_job_complete? => true, :is_job_cancelled? => false,
|
6
|
+
:each_failed_test => nil,
|
7
|
+
:each_running_file => nil)}
|
8
|
+
let(:fd) {stub("File descriptor", :write => nil)}
|
9
|
+
|
10
|
+
subject do
|
11
|
+
FailuresPrinter.new(job_state)
|
12
|
+
end
|
13
|
+
|
14
|
+
it { should respond_to :update }
|
15
|
+
|
16
|
+
describe "#initialize" do
|
17
|
+
it "add its self to observers of job_state" do
|
18
|
+
job_state.should_receive(:add_observer)
|
19
|
+
FailuresPrinter.new job_state
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
describe "#update" do
|
24
|
+
before do
|
25
|
+
@printer = FailuresPrinter.new job_state
|
26
|
+
end
|
27
|
+
|
28
|
+
context "job is not completed nor cancelled" do
|
29
|
+
it "doesn't output anything" do
|
30
|
+
job_state.stub!(:is_job_complete? => false)
|
31
|
+
File.should_not_receive(:open)
|
32
|
+
@printer.update({})
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
context "job is completed" do
|
37
|
+
it "outputs failed tests return by job_state#each_failed_test" do
|
38
|
+
job_state.stub!(:each_failed_test).and_yield({:filename => "file1.rb"}).and_yield({:filename => "file2.rb"})
|
39
|
+
File.should_receive(:open).with(FailuresPrinter::OUTPUT_FILE, 'w+').and_yield fd
|
40
|
+
fd.should_receive(:write).with(Yajl::Encoder.encode(["file1.rb", "file2.rb"]))
|
41
|
+
@printer.update({})
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
context "job is cancelled" do
|
46
|
+
before do
|
47
|
+
job_state.stub!(:is_job_complete?).and_return(false)
|
48
|
+
job_state.stub!(:is_job_cancelled?).and_return(true)
|
49
|
+
end
|
50
|
+
|
51
|
+
it "outputs failed tests return by job_state#each_failed_test" do
|
52
|
+
job_state.stub!(:each_failed_test).and_yield({:filename => "file1.rb"}).and_yield({:filename => "file2.rb"})
|
53
|
+
File.should_receive(:open).with(FailuresPrinter::OUTPUT_FILE, 'w+').and_yield fd
|
54
|
+
fd.should_receive(:write).once.with(Yajl::Encoder.encode(["file1.rb", "file2.rb"]))
|
55
|
+
@printer.update({})
|
56
|
+
end
|
57
|
+
|
58
|
+
it "outputs still-running files returns by job_state#each_running_file" do
|
59
|
+
job_state.stub!(:each_running_file).and_yield("host1", "file1.rb").and_yield("host2", "file2.rb")
|
60
|
+
File.stub!(:open).and_yield fd
|
61
|
+
fd.should_receive(:write).once.with(Yajl::Encoder.encode(["file1.rb", "file2.rb"]))
|
62
|
+
@printer.update({})
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
require 'gorgon/host_state'
|
2
|
+
|
3
|
+
describe HostState do
|
4
|
+
it { should respond_to(:file_started).with(2).arguments }
|
5
|
+
it { should respond_to(:file_finished).with(2).arguments }
|
6
|
+
it { should respond_to(:each_running_file).with(0).argument }
|
7
|
+
it { should respond_to(:total_running_workers).with(0).argument }
|
8
|
+
|
9
|
+
before do
|
10
|
+
@host_state = HostState.new
|
11
|
+
end
|
12
|
+
|
13
|
+
describe "#total_workers_running" do
|
14
|
+
it "returns 0 if there are no worker running files" do
|
15
|
+
@host_state.total_running_workers.should == 0
|
16
|
+
end
|
17
|
+
|
18
|
+
it "returns 1 if #file_started was called, but #file_finished has not been called with such a worker id" do
|
19
|
+
@host_state.file_started "worker1", "path/to/file.rb"
|
20
|
+
@host_state.total_running_workers.should == 1
|
21
|
+
end
|
22
|
+
|
23
|
+
it "returns 0 if #file_started and #file_finished were called for the same worker_id" do
|
24
|
+
@host_state.file_started "worker1", "path/to/file.rb"
|
25
|
+
@host_state.file_finished "worker1", "path/to/file.rb"
|
26
|
+
@host_state.total_running_workers.should == 0
|
27
|
+
end
|
28
|
+
|
29
|
+
it "returns 1 if #file_started and #file_finished were called for different worker id (worker1)" do
|
30
|
+
@host_state.file_started "worker1", "path/to/file.rb"
|
31
|
+
@host_state.file_started "worker2", "path/to/file2.rb"
|
32
|
+
@host_state.file_finished "worker2", "path/to/file2.rb"
|
33
|
+
@host_state.total_running_workers.should == 1
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
describe "#each_running_file" do
|
38
|
+
before do
|
39
|
+
@host_state.file_started "worker1", "path/to/file1.rb"
|
40
|
+
@host_state.file_started "worker2", "path/to/file2.rb"
|
41
|
+
end
|
42
|
+
|
43
|
+
context "when no #file_finished has been called" do
|
44
|
+
it "yields each currently running file" do
|
45
|
+
files = []
|
46
|
+
@host_state.each_running_file do |file|
|
47
|
+
files << file
|
48
|
+
end
|
49
|
+
files.should == ["path/to/file1.rb", "path/to/file2.rb"]
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
context "when #file_finished has been called for one of the workers" do
|
54
|
+
it "yields each currently running file" do
|
55
|
+
@host_state.file_finished "worker2", "path/to/file2.rb"
|
56
|
+
|
57
|
+
files = []
|
58
|
+
@host_state.each_running_file do |file|
|
59
|
+
files << file
|
60
|
+
end
|
61
|
+
files.should == ["path/to/file1.rb"]
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
require 'gorgon/job_definition'
|
2
|
+
require 'yajl'
|
3
|
+
|
4
|
+
describe JobDefinition do
|
5
|
+
before(:all) do
|
6
|
+
@json_parser = Yajl::Parser.new(:symbolize_keys => true)
|
7
|
+
end
|
8
|
+
|
9
|
+
describe "#to_json" do
|
10
|
+
it "should serialize itself to json" do
|
11
|
+
expected_hash = {:file_queue_name => "string 1", :reply_exchange_name => "string 2",
|
12
|
+
:source_tree_path => "string 3", :sync_exclude => "string 4", :callbacks => {}}
|
13
|
+
|
14
|
+
jd = JobDefinition.new(expected_hash)
|
15
|
+
|
16
|
+
@json_parser.parse(jd.to_json).should == expected_hash
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
@@ -0,0 +1,231 @@
|
|
1
|
+
require 'gorgon/job_state'
|
2
|
+
|
3
|
+
describe JobState do
|
4
|
+
let(:payload) {
|
5
|
+
{:hostname => "host-name", :worker_id => "worker1", :filename => "path/file.rb",
|
6
|
+
:type => "pass", :failures => []}
|
7
|
+
}
|
8
|
+
|
9
|
+
let (:host_state){ stub("Host State", :file_started => nil, :file_finished => nil)}
|
10
|
+
|
11
|
+
subject { JobState.new 5 }
|
12
|
+
it { should respond_to :failed_files_count }
|
13
|
+
it { should respond_to :finished_files_count }
|
14
|
+
it { should respond_to(:file_started).with(1).argument }
|
15
|
+
it { should respond_to(:file_finished).with(1).argument }
|
16
|
+
it { should respond_to :cancel }
|
17
|
+
it { should respond_to :each_failed_test }
|
18
|
+
it { should respond_to :each_running_file }
|
19
|
+
it { should respond_to :total_running_hosts }
|
20
|
+
it { should respond_to :total_running_workers }
|
21
|
+
it { should respond_to :is_job_complete? }
|
22
|
+
it { should respond_to :is_job_cancelled? }
|
23
|
+
|
24
|
+
before do
|
25
|
+
@job_state = JobState.new 5
|
26
|
+
end
|
27
|
+
|
28
|
+
describe "#initialize" do
|
29
|
+
it "sets total files for job" do
|
30
|
+
@job_state.total_files.should be 5
|
31
|
+
end
|
32
|
+
|
33
|
+
it "sets remaining_files_count" do
|
34
|
+
@job_state.remaining_files_count.should be 5
|
35
|
+
end
|
36
|
+
|
37
|
+
it "sets failed_files_count to 0" do
|
38
|
+
@job_state.failed_files_count.should be 0
|
39
|
+
end
|
40
|
+
|
41
|
+
it "set state to starting" do
|
42
|
+
@job_state.state.should be :starting
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
describe "#finished_files_count" do
|
47
|
+
it "returns total_files - remaining_files_count" do
|
48
|
+
@job_state.finished_files_count.should be 0
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
describe "#file_started" do
|
53
|
+
it "change state to running after first start_file_message is received" do
|
54
|
+
@job_state.file_started({})
|
55
|
+
@job_state.state.should be :running
|
56
|
+
end
|
57
|
+
|
58
|
+
it "creates a new HostState object if this is the first file started by 'hostname'" do
|
59
|
+
HostState.should_receive(:new).and_return host_state
|
60
|
+
@job_state.file_started(payload)
|
61
|
+
end
|
62
|
+
|
63
|
+
it "doesn't create a new HostState object if this is not the first file started by 'hostname'" do
|
64
|
+
HostState.stub!(:new).and_return host_state
|
65
|
+
@job_state.file_started(payload)
|
66
|
+
HostState.should_not_receive(:new)
|
67
|
+
@job_state.file_started(payload)
|
68
|
+
end
|
69
|
+
|
70
|
+
it "calls #file_started on HostState object representing 'hostname'" do
|
71
|
+
HostState.stub!(:new).and_return host_state
|
72
|
+
host_state.should_receive(:file_started).with("worker_id", "file_name")
|
73
|
+
@job_state.file_started({:hostname => "hostname",
|
74
|
+
:worker_id => "worker_id",
|
75
|
+
:filename => "file_name"})
|
76
|
+
end
|
77
|
+
|
78
|
+
it "notify observers" do
|
79
|
+
@job_state.should_receive :notify_observers
|
80
|
+
@job_state.should_receive :changed
|
81
|
+
@job_state.file_started({})
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
describe "#file_finished" do
|
86
|
+
before do
|
87
|
+
HostState.stub!(:new).and_return host_state
|
88
|
+
@job_state.file_started payload
|
89
|
+
end
|
90
|
+
|
91
|
+
it "decreases remaining_files_count" do
|
92
|
+
lambda do
|
93
|
+
@job_state.file_finished payload
|
94
|
+
end.should(change(@job_state, :remaining_files_count).by(-1))
|
95
|
+
|
96
|
+
@job_state.total_files.should be 5
|
97
|
+
end
|
98
|
+
|
99
|
+
it "doesn't change failed_files_count if type test result is pass" do
|
100
|
+
lambda do
|
101
|
+
@job_state.file_finished payload
|
102
|
+
end.should_not change(@job_state, :failed_files_count)
|
103
|
+
@job_state.failed_files_count.should be 0
|
104
|
+
end
|
105
|
+
|
106
|
+
it "increments failed_files_count if type is failed" do
|
107
|
+
lambda do
|
108
|
+
@job_state.file_finished payload.merge({:type => "fail", :failures => ["Failure messages"]})
|
109
|
+
end.should change(@job_state, :failed_files_count).by(1)
|
110
|
+
end
|
111
|
+
|
112
|
+
it "notify observers" do
|
113
|
+
@job_state.should_receive :notify_observers
|
114
|
+
@job_state.should_receive :changed
|
115
|
+
@job_state.file_finished payload
|
116
|
+
end
|
117
|
+
|
118
|
+
it "raises if job already complete" do
|
119
|
+
finish_job
|
120
|
+
lambda do
|
121
|
+
@job_state.file_finished payload
|
122
|
+
end.should raise_error
|
123
|
+
end
|
124
|
+
|
125
|
+
it "raises if job was cancelled" do
|
126
|
+
@job_state.cancel
|
127
|
+
lambda do
|
128
|
+
@job_state.file_finished payload
|
129
|
+
end.should raise_error
|
130
|
+
end
|
131
|
+
|
132
|
+
it "tells to the proper HostState object that a file finished in that host" do
|
133
|
+
HostState.stub!(:new).and_return host_state
|
134
|
+
@job_state.file_started({:hostname => "hostname",
|
135
|
+
:worker_id => "worker_id",
|
136
|
+
:filename => "file_name"})
|
137
|
+
host_state.should_receive(:file_finished).with("worker_id", "file_name")
|
138
|
+
@job_state.file_finished({:hostname => "hostname",
|
139
|
+
:worker_id => "worker_id",
|
140
|
+
:filename => "file_name"})
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
describe "#is_job_complete?" do
|
145
|
+
it "returns false if remaining_files_count != 0" do
|
146
|
+
@job_state.is_job_complete?.should be_false
|
147
|
+
end
|
148
|
+
|
149
|
+
it "returns true if remaining_files_count == 0" do
|
150
|
+
finish_job
|
151
|
+
@job_state.is_job_complete?.should be_true
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
describe "#cancel and is_job_cancelled?" do
|
156
|
+
it "cancels job" do
|
157
|
+
@job_state.is_job_cancelled?.should be_false
|
158
|
+
@job_state.cancel
|
159
|
+
@job_state.is_job_cancelled?.should be_true
|
160
|
+
end
|
161
|
+
|
162
|
+
it "notify observers when cancelling" do
|
163
|
+
@job_state.should_receive :changed
|
164
|
+
@job_state.should_receive :notify_observers
|
165
|
+
@job_state.cancel
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
describe "#each_failed_test" do
|
170
|
+
before do
|
171
|
+
@job_state.file_started payload
|
172
|
+
end
|
173
|
+
|
174
|
+
it "returns failed tests info" do
|
175
|
+
@job_state.file_finished payload.merge({:type => "fail", :failures => ["Failure messages"]})
|
176
|
+
@job_state.each_failed_test do |test|
|
177
|
+
test[:failures].should == ["Failure messages"]
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
describe "#each_running_file" do
|
183
|
+
before do
|
184
|
+
@job_state.file_started payload
|
185
|
+
@job_state.file_started payload.merge({ :hostname => "host2",
|
186
|
+
:filename => "path/file2.rb",
|
187
|
+
:worker_id => "worker2"})
|
188
|
+
end
|
189
|
+
|
190
|
+
it "returns each running file" do
|
191
|
+
hosts_files = {}
|
192
|
+
@job_state.each_running_file do |hostname, filename|
|
193
|
+
hosts_files[hostname] = filename
|
194
|
+
end
|
195
|
+
hosts_files.size.should == 2
|
196
|
+
hosts_files["host-name"].should == "path/file.rb"
|
197
|
+
hosts_files["host2"].should == "path/file2.rb"
|
198
|
+
end
|
199
|
+
end
|
200
|
+
|
201
|
+
describe "#total_running_hosts" do
|
202
|
+
it "returns total number of hosts that has workers running files" do
|
203
|
+
@job_state.file_started payload
|
204
|
+
@job_state.file_started payload.merge({:worker_id => "worker2"})
|
205
|
+
@job_state.file_started payload.merge({ :hostname => "host2",
|
206
|
+
:filename => "path/file2.rb",
|
207
|
+
:worker_id => "worker1"})
|
208
|
+
@job_state.total_running_hosts.should == 2
|
209
|
+
end
|
210
|
+
end
|
211
|
+
|
212
|
+
describe "#total_running_workers" do
|
213
|
+
it "returns total number of workers running accross all hosts" do
|
214
|
+
@job_state.file_started payload
|
215
|
+
@job_state.file_started payload.merge({:worker_id => "worker2"})
|
216
|
+
@job_state.file_started payload.merge({ :hostname => "host2",
|
217
|
+
:filename => "path/file2.rb",
|
218
|
+
:worker_id => "worker1"})
|
219
|
+
@job_state.total_running_workers.should == 3
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
private
|
224
|
+
|
225
|
+
def finish_job
|
226
|
+
5.times do
|
227
|
+
@job_state.file_started payload
|
228
|
+
@job_state.file_finished payload
|
229
|
+
end
|
230
|
+
end
|
231
|
+
end
|
@@ -0,0 +1,194 @@
|
|
1
|
+
require 'gorgon/listener'
|
2
|
+
|
3
|
+
describe Listener do
|
4
|
+
let(:connection_information) { double }
|
5
|
+
let(:queue) { stub("Bunny Queue", :bind => nil) }
|
6
|
+
let(:exchange) { stub("Bunny Exchange") }
|
7
|
+
let(:bunny) { stub("Bunny", :start => nil, :queue => queue, :exchange => exchange) }
|
8
|
+
|
9
|
+
before do
|
10
|
+
Bunny.stub(:new).and_return(bunny)
|
11
|
+
Listener.any_instance.stub(:configuration => {})
|
12
|
+
Listener.any_instance.stub(:connection_information => connection_information)
|
13
|
+
@stub_logger = stub :info => true, :datetime_format= => ""
|
14
|
+
Logger.stub(:new).and_return(@stub_logger)
|
15
|
+
end
|
16
|
+
|
17
|
+
describe "initialization" do
|
18
|
+
|
19
|
+
before do
|
20
|
+
Listener.any_instance.stub(:connect => nil, :initialize_personal_job_queue => nil)
|
21
|
+
end
|
22
|
+
|
23
|
+
it "connects" do
|
24
|
+
Listener.any_instance.should_receive(:connect)
|
25
|
+
Listener.new
|
26
|
+
end
|
27
|
+
|
28
|
+
it "initializes the personal job queue" do
|
29
|
+
Listener.any_instance.should_receive(:initialize_personal_job_queue)
|
30
|
+
Listener.new
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
describe "logging to a file" do
|
35
|
+
context "passing a log file path in the configuration" do
|
36
|
+
before do
|
37
|
+
Listener.any_instance.stub(:configuration).and_return({:log_file => 'listener.log'})
|
38
|
+
end
|
39
|
+
|
40
|
+
it "should use 'log_file' from the configuration as the log file" do
|
41
|
+
Logger.should_receive(:new).with('listener.log')
|
42
|
+
Listener.new
|
43
|
+
end
|
44
|
+
|
45
|
+
it "should log to 'log_file'" do
|
46
|
+
@stub_logger.should_receive(:info).with("Listener initialized")
|
47
|
+
|
48
|
+
Listener.new
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
context "passing a literal '-'' as the path in the configuration" do
|
53
|
+
before do
|
54
|
+
Listener.any_instance.stub(:configuration).and_return({:log_file => "-"})
|
55
|
+
end
|
56
|
+
|
57
|
+
it "logs to stdout" do
|
58
|
+
Logger.should_receive(:new).with($stdout)
|
59
|
+
Listener.new
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
context "without specifying a log file path" do
|
64
|
+
it "should not log" do
|
65
|
+
Logger.should_not_receive(:new)
|
66
|
+
@stub_logger.should_not_receive(:info)
|
67
|
+
|
68
|
+
Listener.new
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
context "initialized" do
|
74
|
+
let(:listener) { Listener.new }
|
75
|
+
|
76
|
+
describe "#connect" do
|
77
|
+
it "connects" do
|
78
|
+
Bunny.should_receive(:new).with(connection_information).and_return(bunny)
|
79
|
+
bunny.should_receive(:start)
|
80
|
+
|
81
|
+
listener.connect
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
describe "#initialize_personal_job_queue" do
|
86
|
+
it "creates the job queue" do
|
87
|
+
bunny.should_receive(:queue).with("", :exclusive => true)
|
88
|
+
listener.initialize_personal_job_queue
|
89
|
+
end
|
90
|
+
|
91
|
+
it "binds the exchange to the queue" do
|
92
|
+
bunny.should_receive(:exchange).with("gorgon.jobs", :type => :fanout).and_return(exchange)
|
93
|
+
queue.should_receive(:bind).with(exchange)
|
94
|
+
listener.initialize_personal_job_queue
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
describe "#poll" do
|
99
|
+
|
100
|
+
let(:empty_queue) { {:payload => :queue_empty} }
|
101
|
+
let(:job_payload) { {:payload => "Job"} }
|
102
|
+
before do
|
103
|
+
listener.stub(:run_job)
|
104
|
+
end
|
105
|
+
|
106
|
+
context "empty queue" do
|
107
|
+
before do
|
108
|
+
queue.stub(:pop => empty_queue)
|
109
|
+
end
|
110
|
+
|
111
|
+
it "checks the job queue" do
|
112
|
+
queue.should_receive(:pop).and_return(empty_queue)
|
113
|
+
listener.poll
|
114
|
+
end
|
115
|
+
|
116
|
+
it "returns false" do
|
117
|
+
listener.poll.should be_false
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
context "job pending on queue" do
|
122
|
+
before do
|
123
|
+
queue.stub(:pop => job_payload)
|
124
|
+
end
|
125
|
+
|
126
|
+
it "starts a new job when there is a job payload" do
|
127
|
+
queue.should_receive(:pop).and_return(job_payload)
|
128
|
+
listener.should_receive(:run_job).with(job_payload[:payload])
|
129
|
+
listener.poll
|
130
|
+
end
|
131
|
+
|
132
|
+
it "returns true" do
|
133
|
+
listener.poll.should be_true
|
134
|
+
end
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
describe "#run_job" do
|
139
|
+
let(:payload) {{
|
140
|
+
:source_tree_path => "path/to/source",
|
141
|
+
:sync_exclude => ["log"], :callbacks => {:a_callback => "path/to/callback"}
|
142
|
+
}}
|
143
|
+
|
144
|
+
let(:syncer) { stub("SourceTreeSyncer", :sync => nil, :exclude= => nil,
|
145
|
+
:remove_temp_dir => nil, :sys_command => "rsync ...")}
|
146
|
+
|
147
|
+
let(:io) { stub("IO object", :write => nil, :close => nil)}
|
148
|
+
let(:process_status) { stub("Process Status", :exitstatus => 0)}
|
149
|
+
let(:callback_handler) { stub("Callback Handler", :after_sync => nil) }
|
150
|
+
|
151
|
+
before do
|
152
|
+
@listener = Listener.new
|
153
|
+
@json_payload = Yajl::Encoder.encode(payload)
|
154
|
+
stub_classes
|
155
|
+
end
|
156
|
+
|
157
|
+
it "copy source tree" do
|
158
|
+
SourceTreeSyncer.should_receive(:new).once.with("path/to/source").and_return syncer
|
159
|
+
syncer.should_receive(:exclude=).with(["log"])
|
160
|
+
syncer.should_receive(:sync)
|
161
|
+
@listener.run_job(@json_payload)
|
162
|
+
end
|
163
|
+
|
164
|
+
it "remove temp source directory when complete" do
|
165
|
+
syncer.should_receive(:remove_temp_dir)
|
166
|
+
@listener.run_job(@json_payload)
|
167
|
+
end
|
168
|
+
|
169
|
+
it "creates a CallbackHandler object using callbacks passed in payload" do
|
170
|
+
CallbackHandler.should_receive(:new).once.with({:a_callback => "path/to/callback"}).and_return(callback_handler)
|
171
|
+
@listener.run_job(@json_payload)
|
172
|
+
end
|
173
|
+
|
174
|
+
it "calls after_sync callback" do
|
175
|
+
callback_handler.should_receive(:after_sync).once
|
176
|
+
@listener.run_job(@json_payload)
|
177
|
+
end
|
178
|
+
|
179
|
+
it "uses Bundler#with_clean_env so the workers load new gems that could have been installed in after_sync" do
|
180
|
+
Bundler.should_receive(:with_clean_env).and_yield
|
181
|
+
@listener.run_job(@json_payload)
|
182
|
+
end
|
183
|
+
end
|
184
|
+
|
185
|
+
private
|
186
|
+
|
187
|
+
def stub_classes
|
188
|
+
SourceTreeSyncer.stub!(:new).and_return syncer
|
189
|
+
CallbackHandler.stub!(:new).and_return callback_handler
|
190
|
+
Open4.stub!(:popen4).and_return([1, io])
|
191
|
+
Process.stub!(:waitpid2).and_return([0, process_status])
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|