gorgon 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +8 -0
- data/Gemfile +4 -0
- data/Gemfile.lock +52 -0
- data/README.md +53 -0
- data/Rakefile +1 -0
- data/bin/gorgon +41 -0
- data/gorgon.gemspec +33 -0
- data/lib/gorgon.rb +6 -0
- data/lib/gorgon/amqp_service.rb +39 -0
- data/lib/gorgon/callback_handler.rb +21 -0
- data/lib/gorgon/configuration.rb +9 -0
- data/lib/gorgon/failures_printer.rb +37 -0
- data/lib/gorgon/g_logger.rb +22 -0
- data/lib/gorgon/host_state.rb +31 -0
- data/lib/gorgon/job.rb +26 -0
- data/lib/gorgon/job_definition.rb +24 -0
- data/lib/gorgon/job_state.rb +119 -0
- data/lib/gorgon/listener.rb +147 -0
- data/lib/gorgon/originator.rb +120 -0
- data/lib/gorgon/originator_logger.rb +36 -0
- data/lib/gorgon/originator_protocol.rb +65 -0
- data/lib/gorgon/pipe_manager.rb +55 -0
- data/lib/gorgon/progress_bar_view.rb +121 -0
- data/lib/gorgon/source_tree_syncer.rb +37 -0
- data/lib/gorgon/testunit_runner.rb +50 -0
- data/lib/gorgon/version.rb +3 -0
- data/lib/gorgon/worker.rb +103 -0
- data/lib/gorgon/worker_manager.rb +148 -0
- data/lib/gorgon/worker_watcher.rb +22 -0
- data/spec/callback_handler_spec.rb +77 -0
- data/spec/failures_printer_spec.rb +66 -0
- data/spec/host_state_spec.rb +65 -0
- data/spec/job_definition_spec.rb +20 -0
- data/spec/job_state_spec.rb +231 -0
- data/spec/listener_spec.rb +194 -0
- data/spec/originator_logger_spec.rb +40 -0
- data/spec/originator_protocol_spec.rb +134 -0
- data/spec/originator_spec.rb +134 -0
- data/spec/progress_bar_view_spec.rb +98 -0
- data/spec/source_tree_syncer_spec.rb +65 -0
- data/spec/worker_manager_spec.rb +23 -0
- data/spec/worker_spec.rb +114 -0
- metadata +270 -0
@@ -0,0 +1,147 @@
|
|
1
|
+
require "gorgon/job_definition"
|
2
|
+
require "gorgon/configuration"
|
3
|
+
require 'gorgon/source_tree_syncer'
|
4
|
+
require "gorgon/g_logger"
|
5
|
+
require "gorgon/callback_handler"
|
6
|
+
|
7
|
+
require "yajl"
|
8
|
+
require "bunny"
|
9
|
+
require "awesome_print"
|
10
|
+
require "open4"
|
11
|
+
require "tmpdir"
|
12
|
+
require "socket"
|
13
|
+
|
14
|
+
class Listener
|
15
|
+
include Configuration
|
16
|
+
include GLogger
|
17
|
+
|
18
|
+
def initialize
|
19
|
+
@listener_config_filename = Dir.pwd + "/gorgon_listener.json"
|
20
|
+
initialize_logger configuration[:log_file]
|
21
|
+
|
22
|
+
log "Listener initialized"
|
23
|
+
connect
|
24
|
+
initialize_personal_job_queue
|
25
|
+
end
|
26
|
+
|
27
|
+
def listen
|
28
|
+
log "Waiting for jobs..."
|
29
|
+
while true
|
30
|
+
sleep 2 unless poll
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def connect
|
35
|
+
@bunny = Bunny.new(connection_information)
|
36
|
+
@bunny.start
|
37
|
+
end
|
38
|
+
|
39
|
+
def initialize_personal_job_queue
|
40
|
+
@job_queue = @bunny.queue("", :exclusive => true)
|
41
|
+
exchange = @bunny.exchange("gorgon.jobs", :type => :fanout)
|
42
|
+
@job_queue.bind(exchange)
|
43
|
+
end
|
44
|
+
|
45
|
+
def poll
|
46
|
+
message = @job_queue.pop
|
47
|
+
return false if message[:payload] == :queue_empty
|
48
|
+
|
49
|
+
run_job(message[:payload])
|
50
|
+
|
51
|
+
log "Waiting for more jobs..."
|
52
|
+
return true
|
53
|
+
end
|
54
|
+
|
55
|
+
def run_job(json_payload)
|
56
|
+
log "Job received: #{json_payload}"
|
57
|
+
payload = Yajl::Parser.new(:symbolize_keys => true).parse(json_payload)
|
58
|
+
@job_definition = JobDefinition.new(payload)
|
59
|
+
@reply_exchange = @bunny.exchange(@job_definition.reply_exchange_name)
|
60
|
+
|
61
|
+
@callback_handler = CallbackHandler.new(@job_definition.callbacks)
|
62
|
+
copy_source_tree(@job_definition.source_tree_path, @job_definition.sync_exclude)
|
63
|
+
|
64
|
+
if !run_after_sync
|
65
|
+
clean_up
|
66
|
+
return
|
67
|
+
end
|
68
|
+
|
69
|
+
Bundler.with_clean_env do
|
70
|
+
fork_worker_manager
|
71
|
+
end
|
72
|
+
|
73
|
+
clean_up
|
74
|
+
end
|
75
|
+
|
76
|
+
private
|
77
|
+
|
78
|
+
def run_after_sync
|
79
|
+
log "Running after_sync callback"
|
80
|
+
begin
|
81
|
+
@callback_handler.after_sync
|
82
|
+
rescue Exception => e
|
83
|
+
log_error "Exception raised when running after_sync callback_handler. Please, check your script in #{@job_definition.callbacks[:after_sync]}:"
|
84
|
+
log_error e.message
|
85
|
+
log_error "\n" + e.backtrace.join("\n")
|
86
|
+
|
87
|
+
reply = {:type => :crash,
|
88
|
+
:hostname => Socket.gethostname,
|
89
|
+
:message => "after_sync callback failed. Please, check your script in #{@job_definition.callbacks[:after_sync]}. Message: #{e.message}",
|
90
|
+
:backtrace => e.backtrace.join("\n")
|
91
|
+
}
|
92
|
+
@reply_exchange.publish(Yajl::Encoder.encode(reply))
|
93
|
+
return false
|
94
|
+
end
|
95
|
+
true
|
96
|
+
end
|
97
|
+
|
98
|
+
def copy_source_tree source_tree_path, exclude
|
99
|
+
log "Downloading source tree to temp directory..."
|
100
|
+
@syncer = SourceTreeSyncer.new source_tree_path
|
101
|
+
@syncer.exclude = exclude
|
102
|
+
if @syncer.sync
|
103
|
+
log "Command '#{@syncer.sys_command}' completed successfully."
|
104
|
+
else
|
105
|
+
#TODO handle error:
|
106
|
+
# - Discard job
|
107
|
+
# - Let the originator know about the error
|
108
|
+
# - Wait for the next job
|
109
|
+
log_error "Command '#{@syncer.sys_command}' failed!"
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
def clean_up
|
114
|
+
@syncer.remove_temp_dir
|
115
|
+
end
|
116
|
+
|
117
|
+
def fork_worker_manager
|
118
|
+
log "Forking Worker Manager"
|
119
|
+
ENV["GORGON_CONFIG_PATH"] = @listener_config_filename
|
120
|
+
pid, stdin, stdout, stderr = Open4::popen4 "bundle exec gorgon manage_workers"
|
121
|
+
stdin.write(@job_definition.to_json)
|
122
|
+
stdin.close
|
123
|
+
|
124
|
+
ignore, status = Process.waitpid2 pid
|
125
|
+
log "Worker Manager #{pid} finished"
|
126
|
+
|
127
|
+
if status.exitstatus != 0
|
128
|
+
log_error "Worker Manager #{pid} crashed with exit status #{status.exitstatus}!"
|
129
|
+
error_msg = stderr.read
|
130
|
+
log_error "ERROR MSG: #{error_msg}"
|
131
|
+
|
132
|
+
reply = {:type => :crash,
|
133
|
+
:hostname => Socket.gethostname,
|
134
|
+
:stdout => stdout.read,
|
135
|
+
:stderr => error_msg}
|
136
|
+
@reply_exchange.publish(Yajl::Encoder.encode(reply))
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def connection_information
|
141
|
+
configuration[:connection]
|
142
|
+
end
|
143
|
+
|
144
|
+
def configuration
|
145
|
+
@configuration ||= load_configuration_from_file("gorgon_listener.json")
|
146
|
+
end
|
147
|
+
end
|
@@ -0,0 +1,120 @@
|
|
1
|
+
require 'gorgon/originator_protocol'
|
2
|
+
require 'gorgon/configuration'
|
3
|
+
require 'gorgon/job_state'
|
4
|
+
require 'gorgon/progress_bar_view'
|
5
|
+
require 'gorgon/originator_logger'
|
6
|
+
require 'gorgon/failures_printer'
|
7
|
+
|
8
|
+
require 'awesome_print'
|
9
|
+
|
10
|
+
class Originator
|
11
|
+
include Configuration
|
12
|
+
|
13
|
+
def initialize
|
14
|
+
@configuration = nil
|
15
|
+
end
|
16
|
+
|
17
|
+
def originate
|
18
|
+
begin
|
19
|
+
Signal.trap("INT") { ctrl_c }
|
20
|
+
Signal.trap("TERM") { ctrl_c }
|
21
|
+
|
22
|
+
publish
|
23
|
+
@logger.log "Originator finished successfully"
|
24
|
+
rescue Exception
|
25
|
+
puts "Unhandled exception in originator:"
|
26
|
+
puts $!.message
|
27
|
+
puts $!.backtrace.join("\n")
|
28
|
+
puts "----------------------------------"
|
29
|
+
puts "Now attempting to cancel the job."
|
30
|
+
@logger.log_error "Unhandled Exception!"
|
31
|
+
cancel_job
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def ctrl_c
|
36
|
+
puts "\nCtrl-C received! Just wait a moment while I clean up..."
|
37
|
+
cancel_job
|
38
|
+
end
|
39
|
+
|
40
|
+
def cancel_job
|
41
|
+
@protocol.cancel_job
|
42
|
+
@job_state.cancel
|
43
|
+
|
44
|
+
@protocol.disconnect
|
45
|
+
end
|
46
|
+
|
47
|
+
def publish
|
48
|
+
@logger = OriginatorLogger.new configuration[:originator_log_file]
|
49
|
+
@protocol = OriginatorProtocol.new @logger
|
50
|
+
|
51
|
+
EventMachine.run do
|
52
|
+
@logger.log "Connecting..."
|
53
|
+
@protocol.connect connection_information, :on_closed => method(:on_disconnect)
|
54
|
+
|
55
|
+
@logger.log "Publishing files..."
|
56
|
+
@protocol.publish_files files
|
57
|
+
create_job_state_and_observers
|
58
|
+
|
59
|
+
@logger.log "Publishing Job..."
|
60
|
+
@protocol.publish_job job_definition
|
61
|
+
@logger.log "Job Published"
|
62
|
+
|
63
|
+
@protocol.receive_payloads do |payload|
|
64
|
+
handle_reply(payload)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
def cleanup_if_job_complete
|
70
|
+
if @job_state.is_job_complete?
|
71
|
+
@logger.log "Job is done"
|
72
|
+
@protocol.disconnect
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
def handle_reply(payload)
|
77
|
+
payload = Yajl::Parser.new(:symbolize_keys => true).parse(payload)
|
78
|
+
|
79
|
+
# at some point this will probably need to be fancy polymorphic type based responses, or at least a nice switch statement
|
80
|
+
if payload[:action] == "finish"
|
81
|
+
@job_state.file_finished payload
|
82
|
+
elsif payload[:action] == "start"
|
83
|
+
@job_state.file_started payload
|
84
|
+
end
|
85
|
+
@logger.log_message payload
|
86
|
+
# Uncomment this to see each message received by originator
|
87
|
+
# ap payload
|
88
|
+
|
89
|
+
cleanup_if_job_complete
|
90
|
+
end
|
91
|
+
|
92
|
+
def create_job_state_and_observers
|
93
|
+
@job_state = JobState.new files.count
|
94
|
+
@progress_bar_view = ProgressBarView.new @job_state
|
95
|
+
@progress_bar_view.show
|
96
|
+
failures_printer = FailuresPrinter.new @job_state
|
97
|
+
end
|
98
|
+
|
99
|
+
def on_disconnect
|
100
|
+
EventMachine.stop
|
101
|
+
end
|
102
|
+
|
103
|
+
def connection_information
|
104
|
+
configuration[:connection]
|
105
|
+
end
|
106
|
+
|
107
|
+
def files
|
108
|
+
@files ||= configuration[:files].reduce([]) do |memo, obj|
|
109
|
+
memo.concat(Dir[obj])
|
110
|
+
end.uniq
|
111
|
+
end
|
112
|
+
|
113
|
+
def job_definition
|
114
|
+
JobDefinition.new(@configuration[:job])
|
115
|
+
end
|
116
|
+
|
117
|
+
def configuration
|
118
|
+
@configuration ||= load_configuration_from_file("gorgon.json")
|
119
|
+
end
|
120
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
require 'gorgon/g_logger'
|
2
|
+
|
3
|
+
class OriginatorLogger
|
4
|
+
include GLogger
|
5
|
+
|
6
|
+
def initialize log_file
|
7
|
+
initialize_logger log_file
|
8
|
+
end
|
9
|
+
|
10
|
+
def log_message(payload)
|
11
|
+
if payload[:action] == "start"
|
12
|
+
log("Started running '#{payload[:filename]}' at '#{payload[:hostname]}'")
|
13
|
+
elsif payload[:action] == "finish"
|
14
|
+
print_finish(payload)
|
15
|
+
else # to be removed
|
16
|
+
ap payload
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
private
|
21
|
+
|
22
|
+
def print_finish(payload)
|
23
|
+
msg = "Finished running '#{payload[:filename]}' at '#{payload[:hostname]}'"
|
24
|
+
msg << failure_message(payload[:failures]) if payload[:type] == "fail"
|
25
|
+
log msg
|
26
|
+
end
|
27
|
+
|
28
|
+
def failure_message(failures)
|
29
|
+
msg = []
|
30
|
+
failures.each do |failure|
|
31
|
+
msg << failure
|
32
|
+
end
|
33
|
+
msg << ''
|
34
|
+
msg.join("\n")
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
require 'gorgon/job_definition'
|
2
|
+
|
3
|
+
require 'amqp'
|
4
|
+
require 'uuidtools'
|
5
|
+
|
6
|
+
class OriginatorProtocol
|
7
|
+
def initialize logger
|
8
|
+
@logger = logger
|
9
|
+
end
|
10
|
+
|
11
|
+
def connect connection_information, options={}
|
12
|
+
@connection = AMQP.connect(connection_information)
|
13
|
+
@channel = AMQP::Channel.new(@connection)
|
14
|
+
@connection.on_closed { options[:on_closed].call } if options[:on_closed]
|
15
|
+
open_queues
|
16
|
+
end
|
17
|
+
|
18
|
+
def publish_files files
|
19
|
+
files.each do |file|
|
20
|
+
@channel.default_exchange.publish(file, :routing_key => @file_queue.name)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
def publish_job job_definition
|
25
|
+
job_definition.file_queue_name = @file_queue.name
|
26
|
+
job_definition.reply_exchange_name = @reply_exchange.name
|
27
|
+
|
28
|
+
@channel.fanout("gorgon.jobs").publish(job_definition.to_json)
|
29
|
+
end
|
30
|
+
|
31
|
+
def receive_payloads
|
32
|
+
@reply_queue.subscribe do |payload|
|
33
|
+
yield payload
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def cancel_job
|
38
|
+
@file_queue.purge
|
39
|
+
@channel.fanout("gorgon.worker_managers").publish(cancel_message)
|
40
|
+
@logger.log "Cancel Message sent"
|
41
|
+
end
|
42
|
+
|
43
|
+
def disconnect
|
44
|
+
cleanup_queues
|
45
|
+
@connection.disconnect
|
46
|
+
end
|
47
|
+
|
48
|
+
private
|
49
|
+
|
50
|
+
def open_queues
|
51
|
+
@reply_queue = @channel.queue(UUIDTools::UUID.timestamp_create.to_s)
|
52
|
+
@reply_exchange = @channel.direct(UUIDTools::UUID.timestamp_create.to_s)
|
53
|
+
@reply_queue.bind(@reply_exchange)
|
54
|
+
@file_queue = @channel.queue(UUIDTools::UUID.timestamp_create.to_s)
|
55
|
+
end
|
56
|
+
|
57
|
+
def cleanup_queues
|
58
|
+
@reply_queue.delete
|
59
|
+
@file_queue.delete
|
60
|
+
end
|
61
|
+
|
62
|
+
def cancel_message
|
63
|
+
Yajl::Encoder.encode({:action => "cancel_job"})
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
module PipeManager
|
2
|
+
private
|
3
|
+
|
4
|
+
def pipe_fork_worker
|
5
|
+
pid = fork do
|
6
|
+
bind_to_fifos
|
7
|
+
worker = Worker.build(@config)
|
8
|
+
worker.work
|
9
|
+
exit
|
10
|
+
end
|
11
|
+
|
12
|
+
fifo_in, fifo_out, fifo_err = wait_for_fifos pid
|
13
|
+
|
14
|
+
pipe_in = File.open(fifo_in, "w")
|
15
|
+
pipe_out = File.open(fifo_out)
|
16
|
+
pipe_err = File.open(fifo_err)
|
17
|
+
|
18
|
+
return pid, pipe_in, pipe_out, pipe_err
|
19
|
+
end
|
20
|
+
|
21
|
+
def pipe_file pid, stream
|
22
|
+
"#{pid}_#{stream}.pipe"
|
23
|
+
end
|
24
|
+
|
25
|
+
def bind_to_fifos
|
26
|
+
fifo_in = pipe_file $$, "in"
|
27
|
+
fifo_out = pipe_file $$, "out"
|
28
|
+
fifo_err = pipe_file $$, "err"
|
29
|
+
|
30
|
+
system("mkfifo '#{fifo_in}'")
|
31
|
+
system("mkfifo '#{fifo_out}'")
|
32
|
+
system("mkfifo '#{fifo_err}'")
|
33
|
+
|
34
|
+
@@old_in = $stdin
|
35
|
+
$stdin = File.open(fifo_in)
|
36
|
+
|
37
|
+
@@old_out = $stdout
|
38
|
+
$stdout = File.open(fifo_out, "w")
|
39
|
+
|
40
|
+
@@old_err = $stderr
|
41
|
+
$stderr = File.open(fifo_err, "w")
|
42
|
+
end
|
43
|
+
|
44
|
+
def wait_for_fifos pid
|
45
|
+
fifo_in = pipe_file pid, "in"
|
46
|
+
fifo_out = pipe_file pid, "out"
|
47
|
+
fifo_err = pipe_file pid, "err"
|
48
|
+
|
49
|
+
while !File.exist?(fifo_in) || !File.exist?(fifo_out) || !File.exist?(fifo_err) do
|
50
|
+
sleep 0.01
|
51
|
+
end
|
52
|
+
|
53
|
+
return fifo_in, fifo_out, fifo_err
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,121 @@
|
|
1
|
+
require 'ruby-progressbar'
|
2
|
+
require 'colorize'
|
3
|
+
|
4
|
+
MAX_LENGTH = 200
|
5
|
+
LOADING_MSG = "Loading environment and workers..."
|
6
|
+
RUNNING_MSG = "Running files:"
|
7
|
+
LEGEND_MSG = "Legend:\nF - failure files count\nH - number of hosts that have run files\nW - number of workers running files"
|
8
|
+
|
9
|
+
FILENAME_COLOR = :light_cyan
|
10
|
+
HOST_COLOR = :light_blue
|
11
|
+
|
12
|
+
class ProgressBarView
|
13
|
+
def initialize job_state
|
14
|
+
@job_state = job_state
|
15
|
+
@job_state.add_observer(self)
|
16
|
+
end
|
17
|
+
|
18
|
+
def show
|
19
|
+
print LOADING_MSG
|
20
|
+
end
|
21
|
+
|
22
|
+
def update payload={}
|
23
|
+
create_progress_bar_if_started_job_running
|
24
|
+
|
25
|
+
return if @progress_bar.nil? || @finished
|
26
|
+
|
27
|
+
failed_files_count = @job_state.failed_files_count
|
28
|
+
|
29
|
+
@progress_bar.title="F: #{failed_files_count} H: #{@job_state.total_running_hosts} W: #{@job_state.total_running_workers}"
|
30
|
+
if failed_files_count > 0
|
31
|
+
@progress_bar.format(format(bar: :red, title: :default))
|
32
|
+
end
|
33
|
+
|
34
|
+
@progress_bar.progress = @job_state.finished_files_count
|
35
|
+
|
36
|
+
if @job_state.is_job_complete? || @job_state.is_job_cancelled?
|
37
|
+
@finished = true
|
38
|
+
print_summary
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def create_progress_bar_if_started_job_running
|
43
|
+
if @progress_bar.nil? && @job_state.state == :running
|
44
|
+
print "\r#{' ' * (LOADING_MSG.length)}\r"
|
45
|
+
puts LEGEND_MSG
|
46
|
+
@progress_bar = ProgressBar.create(:total => @job_state.total_files,
|
47
|
+
:length => [terminal_size[0], MAX_LENGTH].min,
|
48
|
+
:format => format(bar: :green, title: :white));
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
private
|
53
|
+
def format colors
|
54
|
+
# TODO: decide what bar to use
|
55
|
+
# bar = "%b>%i".colorize(colors[:bar])
|
56
|
+
bar = "%w>%i".colorize(colors[:bar])
|
57
|
+
title = "%t".colorize(colors[:title])
|
58
|
+
|
59
|
+
"%e [#{bar}] %c/%C | #{title}"
|
60
|
+
end
|
61
|
+
|
62
|
+
def terminal_size
|
63
|
+
`stty size`.split.map { |x| x.to_i }.reverse
|
64
|
+
end
|
65
|
+
|
66
|
+
def print_summary
|
67
|
+
print_failed_tests
|
68
|
+
print_running_files
|
69
|
+
#TODO: print other stats: time, total file, total failures, etc
|
70
|
+
end
|
71
|
+
|
72
|
+
def print_failed_tests
|
73
|
+
@job_state.each_failed_test do |test|
|
74
|
+
puts "\n" + ('*' * 80).magenta #light_red
|
75
|
+
puts("File '#{test[:filename].colorize(FILENAME_COLOR)}' failed/crashed at " \
|
76
|
+
+ "'#{test[:hostname].colorize(HOST_COLOR)}'\n")
|
77
|
+
msg = build_fail_message test[:failures]
|
78
|
+
puts "#{msg}\n"
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
def build_fail_message failures
|
83
|
+
result = []
|
84
|
+
failures.each do |failure|
|
85
|
+
if failure.is_a?(Hash)
|
86
|
+
result << build_fail_message_from_hash(failure)
|
87
|
+
else
|
88
|
+
result << build_fail_message_from_string(failure)
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
result.join("\n")
|
93
|
+
end
|
94
|
+
|
95
|
+
def print_running_files
|
96
|
+
title = "Unfinished files".yellow
|
97
|
+
puts "\n#{title} - The following files were still running:" if @job_state.total_running_workers > 0
|
98
|
+
|
99
|
+
@job_state.each_running_file do |hostname, filename|
|
100
|
+
filename_str = filename.dup.colorize(FILENAME_COLOR)
|
101
|
+
hostname_str = hostname.dup.colorize(HOST_COLOR)
|
102
|
+
puts "\t#{filename_str} at '#{hostname_str}'"
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
def build_fail_message_from_string failure
|
107
|
+
result = failure.gsub(/^Error:/, "Error:".yellow)
|
108
|
+
result.gsub!(/^Failure:/, "Failure:".red)
|
109
|
+
result
|
110
|
+
end
|
111
|
+
|
112
|
+
def build_fail_message_from_hash failure
|
113
|
+
result = "#{'Test name'.yellow}: #{failure[:test_name]}"
|
114
|
+
result << "\n#{'Message:'.yellow} \n#{failure[:message]}" if failure[:message]
|
115
|
+
if failure[:location]
|
116
|
+
result << "\n#{'In:'.yellow} \n\t"
|
117
|
+
result << failure[:location].join("\n\t")
|
118
|
+
end
|
119
|
+
result
|
120
|
+
end
|
121
|
+
end
|