hive-runner 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,32 @@
1
+ module Hive
2
+ class DiagnosticRunner
3
+ attr_accessor :diagnostics, :options
4
+
5
+ def initialize(options, diagnostics_config, platform)
6
+ @options = options
7
+ @platform = platform
8
+ @diagnostics = self.initialize_diagnostics(diagnostics_config[@platform]) if diagnostics_config.has_key?(@platform)
9
+ end
10
+
11
+ def initialize_diagnostics(diagnostics_config)
12
+ if diagnostics_config
13
+ @diagnostics = diagnostics_config.collect do |component, config|
14
+ Hive.logger.info("Initializing #{component.capitalize} component for #{@platform.capitalize} diagnostic")
15
+ require "hive/diagnostic/#{@platform}/#{component}"
16
+ Object.const_get('Hive').const_get('Diagnostic').const_get(@platform.capitalize).const_get(component.capitalize).new(config, @options)
17
+ end
18
+ else
19
+ Hive.logger.info("No diagnostic specified for #{@platform}")
20
+ end
21
+ end
22
+
23
+ def run
24
+ results = @diagnostics.collect do |diagnostic|
25
+ diagnostic.run
26
+ end
27
+
28
+ failures = results.select { |r| r.failed? }
29
+ failures.count == 0
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,121 @@
1
+ require 'timeout'
2
+
3
+ module Hive
4
+ class ExecutionScript
5
+ def initialize(config)
6
+ @path = config[:file_system].executed_script_path
7
+ @log_path = config[:file_system].logs_path
8
+ @log = config[:log]
9
+ @keep_running = config[:keep_running]
10
+ @log.debug "Creating execution script with path=#{@path}"
11
+ @env = {
12
+ 'HIVE_SCHEDULER' => Hive.config.network.scheduler,
13
+ 'HIVE_WORKING_DIRECTORY' => config[:file_system].testbed_path
14
+ }
15
+ @env_unset = [
16
+ 'BUNDLE_GEMFILE',
17
+ 'BUNDLE_BIN_PATH',
18
+ 'GEM_PATH',
19
+ 'RUBYOPT',
20
+ 'rvm_'
21
+ ]
22
+ # Environment variables that should not be made visible in the execution
23
+ # script uploaded with the results
24
+ @env_secure = {
25
+ 'HIVE_CERT' => Hive.config.network.cert
26
+ }
27
+ @script_lines = []
28
+ end
29
+
30
+ def prepend_bash_cmd(shell_command)
31
+ @log.debug "bash.rb - Prepending bash command to #{@path} script: " + shell_command
32
+ @script_lines = ([] << shell_command << @script_lines).flatten
33
+ end
34
+
35
+ def append_bash_cmd(shell_command)
36
+ @log.debug "bash.rb - Appending bash command to #{@path} script: " + shell_command
37
+ @script_lines << shell_command
38
+ end
39
+
40
+ def set_env(var, value)
41
+ @env[var] = value
42
+
43
+ # TODO What if the element appears multiple times?
44
+ if (i = @env_unset.index(var))
45
+ @env_unset.delete(i)
46
+ end
47
+
48
+ ## In Ruby 2, replace the above 'if' block with ...
49
+ #@env_unset.remove(var)
50
+ end
51
+
52
+ def unset_env(var)
53
+ @env.delete(var)
54
+ @env_unset << var
55
+ end
56
+
57
+ def helper_path
58
+ scripts_dir = File.expand_path(File.dirname(__FILE__) + "../../../scripts/")
59
+ File.join(scripts_dir, 'hive-script-helper.sh')
60
+ end
61
+
62
+ def run
63
+ @log.info 'bash.rb - Writing script out to file'
64
+ File.open(@path, 'w') do |f|
65
+ f.write("#!/bin/bash --login\n")
66
+ f.write(". #{helper_path}\n")
67
+ f.write("# Set environment\n")
68
+ @env.each do |key, value|
69
+ # An escaped ' in a single quoted string in bash looks like '"'"'
70
+ if value.kind_of?(Array)
71
+ f.write("export #{key}=(" + value.collect { |v| "'#{v.to_s.gsub("'", '\'"\'"\'')}'" }.join(' ') + ")\n" )
72
+ else
73
+ f.write("export #{key}='#{value.to_s.gsub("'", '\'"\'"\'')}'\n")
74
+ end
75
+ end
76
+ @env_unset.each do |var|
77
+ f.write("unset #{var}\n")
78
+ end
79
+ f.write("cd $HIVE_WORKING_DIRECTORY")
80
+ f.write("\n# Test execution\n")
81
+ f.write(@script_lines.join("\n"))
82
+ end
83
+ File.chmod(0700, @path)
84
+
85
+ pid = Process.spawn @env_secure, "#{@path}", pgroup: true, in: '/dev/null', out: "#{@log_path}/stdout.log", err: "#{@log_path}/stderr.log"
86
+ @pgid = Process.getpgid(pid)
87
+
88
+ exit_value = nil
89
+ running = true
90
+ while running
91
+ begin
92
+ Timeout.timeout(30) do
93
+ Process.wait pid
94
+ exit_value = $?.exitstatus
95
+ running = false
96
+ end
97
+ rescue Timeout::Error
98
+ Process.kill(-9, @pgid) if ! ( @keep_running.nil? || @keep_running.call )
99
+ # Do something. Eg, upload log files.
100
+ end
101
+ end
102
+
103
+ # Kill off anything that is still running
104
+ terminate
105
+
106
+ # Return exit value of the script
107
+ exit_value
108
+ end
109
+
110
+ def terminate
111
+ if @pgid
112
+ begin
113
+ Process.kill(-9, @pgid)
114
+ rescue => e
115
+ @log.warn e
116
+ end
117
+ @pgid = nil
118
+ end
119
+ end
120
+ end
121
+ end
@@ -0,0 +1,103 @@
1
+ require "fileutils"
2
+
3
+ module Hive
4
+ class FileSystem
5
+ def initialize(job_id, home_directory, log)
6
+ @job_id = job_id
7
+ @home_directory = home_directory
8
+ @log = log
9
+ @log.debug "Creating job paths with id=#{@job_id} and home=#{@home_directory}"
10
+ make_directory(home_path)
11
+ make_directory(results_path)
12
+ make_directory(logs_path)
13
+ make_directory(testbed_path)
14
+ end
15
+
16
+ def home_path
17
+ @home_path ||= "#{@home_directory}/#{@job_id.to_s}"
18
+ end
19
+
20
+ def results_path
21
+ @results_path ||= "#{home_path}/results"
22
+ end
23
+
24
+ def logs_path
25
+ @logs_path ||= "#{home_path}/logs"
26
+ end
27
+
28
+ def testbed_path
29
+ @testbed_path ||= "#{home_path}/test_code"
30
+ end
31
+
32
+ def executed_script_path
33
+ @bash_script_path ||= "#{testbed_path}/executed_script.sh"
34
+ end
35
+
36
+ # Copy useful stuff into the results directory
37
+ def finalise_results_directory
38
+ copy_file(executed_script_path, "#{results_path}/executed_script.sh")
39
+ end
40
+
41
+ def fetch_build(build_url, destination_path)
42
+ if !fetch_build_with_curl(build_url, destination_path)
43
+ @log.info( "Initial build fetch failed -- trying again shortly")
44
+ sleep 5
45
+ if !fetch_build_with_curl(build_url, destination_path)
46
+ raise "Build could not be downloaded"
47
+ end
48
+ end
49
+ end
50
+
51
+ def fetch_build_with_curl(build_url, destination_path)
52
+ cert_path = Hive.config.network['cert']
53
+ cabundle_path = Hive.config.network['cafile']
54
+ base_url = Hive.config.network['scheduler']
55
+ apk_url = base_url + '/' + build_url
56
+ curl_line = "curl -L -m 60 #{apk_url} --cert #{cert_path} --cacert #{cabundle_path} --retry 3 -o #{destination_path}"
57
+
58
+ @log.info("Fetching build from hive-scheduler: #{curl_line}")
59
+ @log.debug("CURL line: #{curl_line}")
60
+ response = `#{curl_line}`
61
+ if $? != 0
62
+ @log.info("Curl error #{$?}: #{response.to_s}")
63
+ false
64
+ Hive::Messages
65
+ else
66
+ @log.info("Curl seems happy, checking integrity of downloaded file")
67
+ check_build_integrity( destination_path )
68
+ end
69
+ end
70
+
71
+ def check_build_integrity( destination_path )
72
+ output = `file #{destination_path}`
73
+ if output =~ /zip/
74
+ result = `zip -T #{destination_path}`
75
+ @log.info(result)
76
+ $? == 0
77
+ else
78
+ true
79
+ end
80
+ end
81
+
82
+ private
83
+
84
+ def copy_file(src, dest)
85
+ begin
86
+ FileUtils.cp(src, dest)
87
+ @log.debug("Copied file #{src} -> #{dest}")
88
+ rescue => e
89
+ @log.error(e.message)
90
+ end
91
+ end
92
+
93
+ def make_directory(directory)
94
+ begin
95
+ FileUtils.rm_r(directory) if File.directory?(directory)
96
+ FileUtils.mkdir_p(directory)
97
+ @log.debug("Created directory: #{directory}")
98
+ rescue => e
99
+ @log.fatal(e.message)
100
+ end
101
+ end
102
+ end
103
+ end
data/lib/hive/log.rb ADDED
@@ -0,0 +1,60 @@
1
+ require 'mono_logger'
2
+
3
+ module Hive
4
+ # Hive logging
5
+ # Allow logging to be written to multiple locations.
6
+ class Log
7
+ # Create the logger:
8
+ #
9
+ # # No log files will be written
10
+ # log = Hive::Log.new()
11
+ # # Write log files to standard out and a log file
12
+ # log = Hive::Log.new( [
13
+ # {
14
+ # stream: 'Filename.log',
15
+ # level: 'DEBUG'
16
+ # },
17
+ # {
18
+ # stream: STDOUT,
19
+ # level: 'INFO'
20
+ # },
21
+ # ] )
22
+ def initialize(args = [])
23
+ @loggers = {}
24
+ args.each do |l|
25
+ add_logger(l[:stream], l[:level])
26
+ end
27
+ end
28
+
29
+ # Add a new log location:
30
+ #
31
+ # # INFO level log to 'Filename.log'
32
+ # log.add_logger( 'Filename.log', 'INFO' )
33
+ # # DEBUG level log to standard output
34
+ # log.add_logger( STDOUT, 'DEBUG' )
35
+ def add_logger(stream, level)
36
+ log = MonoLogger.new(stream)
37
+ log.formatter = proc do |severity, datetime, _progname, msg|
38
+ "#{severity[0, 1]} #{datetime.strftime('%Y-%m-%d %H:%M:%S')}: #{msg}\n"
39
+ end
40
+ log.level = MonoLogger.const_get(level)
41
+ @loggers[stream] = log
42
+ end
43
+
44
+ # Stop a log stream:
45
+ #
46
+ # # Stop the log to standard output
47
+ # log.stop_logger( STDOUT )
48
+ # # Stop the log to 'Filename.log'
49
+ # log.stop_logger( 'Filename.log' )
50
+ def stop_logger(stream)
51
+ @loggers.delete(stream)
52
+ end
53
+
54
+ MonoLogger::Severity.constants.each do |level|
55
+ define_method(level.downcase) do |*args|
56
+ @loggers.each { |_s, l| l.send(level.downcase, *args) }
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,79 @@
1
+ module Hive
2
+ class PortAllocator
3
+ class NoPortsAvailable < StandardError
4
+ end
5
+
6
+ # Create a port allocator
7
+ #
8
+ # For ports in the range 4000-5000
9
+ # Hive::PortAllocator.new(minimum: 4000, maximum: 5000)
10
+ #
11
+ # For ports 6000, 6050 and 7433
12
+ # Hive::PortAllocator.new(ports: [6000, 6050, 7433])
13
+ #
14
+ def initialize(config)
15
+ @allocated_ports = []
16
+ if config.has_key?(:minimum) and config.has_key?(:maximum) and config[:minimum] > 0 and config[:minimum] <= config[:maximum]
17
+ @free_ports = Array(config[:minimum]..config[:maximum])
18
+ elsif config.has_key?(:ports) and config[:ports].is_a? Array
19
+ config[:ports].each do |p|
20
+ raise ArgumentError if ! p.is_a? Integer or p <= 0
21
+ end
22
+ @free_ports = config[:ports]
23
+ else
24
+ raise ArgumentError
25
+ end
26
+ end
27
+
28
+ # Allocate a single port in the range
29
+ def allocate_port
30
+ if p = @free_ports.pop
31
+ @allocated_ports << p
32
+ p
33
+ else
34
+ raise NoPortsAvailable
35
+ end
36
+ end
37
+
38
+ # Relase a single port in the range
39
+ def release_port(p)
40
+ @free_ports << p if @allocated_ports.delete(p)
41
+ end
42
+
43
+ # Create a new Hive::PortAllocator instance with a number of ports from
44
+ # the range
45
+ def allocate_port_range(n)
46
+ if n <= @free_ports.length
47
+ ps = @free_ports.take(n)
48
+ @free_ports = @free_ports.drop(n)
49
+ @allocated_ports.concat(ps)
50
+ PortAllocator.new(ports: ps)
51
+ else
52
+ raise NoPortsAvailable
53
+ end
54
+ end
55
+
56
+ # Release ports that were previously allocated to another
57
+ # Hive::PortAllocator
58
+ #
59
+ # Note, this will fail silently if 'range' contains ports that are not
60
+ # allocated in the current instance
61
+ def release_port_range(range)
62
+ if range.ports - @allocated_ports == []
63
+ @free_ports.concat(range.ports)
64
+ @allocated_ports = @allocated_ports - range.ports
65
+ end
66
+ end
67
+
68
+ # Release all ports
69
+ def release_all_ports
70
+ @free_ports.concat(@allocated_ports)
71
+ @allocated_ports = []
72
+ end
73
+
74
+ # Full list of all ports, either free or allocated
75
+ def ports
76
+ [@free_ports, @allocated_ports].flatten
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,120 @@
1
+ require 'hive'
2
+ require 'hive/port_allocator'
3
+
4
+ module Hive
5
+ # Central register of devices and workers in the hive
6
+ class Register
7
+ attr_reader :controllers
8
+
9
+ def initialize
10
+ @controllers = []
11
+ @devices = {}
12
+ @max_devices = 5 # TODO Add to configuration file
13
+ if Hive.config.ports?
14
+ @port_allocator = Hive::PortAllocator.new(minimum: Hive.config.ports.minimum, maximum: Hive.config.ports.maximum)
15
+ else
16
+ @port_allocator = Hive::PortAllocator.new(ports: [])
17
+ end
18
+ end
19
+
20
+ def devices
21
+ list = []
22
+ @devices.each do |controller, device_list|
23
+ list.concat(device_list)
24
+ end
25
+ list
26
+ end
27
+
28
+ def worker_pids
29
+ self.devices.collect{ |d| d.worker_pid }.compact
30
+ end
31
+
32
+ def instantiate_controllers(controller_details = Hive.config.controllers)
33
+ controller_details.each do |type, opts|
34
+ Hive.logger.info("Adding controller for '#{type}'")
35
+ require "hive/controller/#{type}"
36
+ controller = Object.const_get('Hive').const_get('Controller').const_get(type.capitalize).new(opts.to_hash)
37
+ @controllers << controller
38
+ end
39
+ check_controllers
40
+ @controllers
41
+ end
42
+
43
+ def run
44
+ loop do
45
+ Hive.poll
46
+ housekeeping
47
+ check_controllers
48
+ sleep Hive.config.timings.controller_loop_interval
49
+ end
50
+ end
51
+
52
+ def check_controllers
53
+ Hive.logger.debug("Devices before update: #{@devices.inspect}")
54
+ new_device_list = {}
55
+ @controllers.each do |c|
56
+ begin
57
+ new_device_list[c.class] = []
58
+ @devices[c.class] = [] if ! @devices.has_key?(c.class)
59
+ Hive.logger.info("Checking controller #{c.class}")
60
+ c.detect.each do |device|
61
+ Hive.logger.debug("Found #{device.inspect}")
62
+ i = @devices[c.class].find_index(device)
63
+ if i
64
+ @devices[c.class][i].status = device.status
65
+ new_device_list[c.class] << @devices[c.class][i]
66
+ else
67
+ device.port_allocator = @port_allocator.allocate_port_range(c.port_range_size)
68
+ new_device_list[c.class] << device
69
+ end
70
+ end
71
+ Hive.logger.debug("new_device_list: #{new_device_list.inspect}")
72
+
73
+ # Remove any devices that have not been rediscovered
74
+ (@devices[c.class] - new_device_list[c.class]).each do |d|
75
+ @port_allocator.release_port_range(d.port_allocator)
76
+ d.stop
77
+ @devices[c.class].delete(d)
78
+ end
79
+
80
+ # Add any new devices
81
+ (new_device_list[c.class] - @devices[c.class]).each do |d|
82
+ @devices[c.class] << d
83
+ end
84
+ # Check that all known devices have running workers
85
+ @devices[c.class].each do |d|
86
+ if d.claimed?
87
+ d.stop if d.running?
88
+ else
89
+ d.start if ! d.running?
90
+ end
91
+ end
92
+ rescue Hive::Controller::DeviceDetectionFailed
93
+ Hive.logger.warn("Failed to detect devices for #{c.class}")
94
+ end
95
+ end
96
+ Hive.logger.debug("Devices after update: #{@devices.inspect}")
97
+ end
98
+
99
+ def housekeeping
100
+ clear_workspaces
101
+ end
102
+
103
+ def clear_workspaces
104
+ candidates = Dir.glob("#{Hive.config.logging.home}/*")
105
+ .select{ |f|
106
+ File.directory?(f) \
107
+ && File.exists?("#{f}/job_info") \
108
+ && File.read("#{f}/job_info").chomp.to_s =~ /completed/
109
+ }.sort_by{ |f|
110
+ File.mtime(f)
111
+ }.reverse
112
+ if candidates && candidates.length > Hive.config.logging.homes_to_keep
113
+ candidates[Hive.config.logging.homes_to_keep..-1].each do |dir|
114
+ Hive.logger.info("Found (and deleting) #{dir}")
115
+ FileUtils.rm_rf(dir)
116
+ end
117
+ end
118
+ end
119
+ end
120
+ end
@@ -0,0 +1,20 @@
1
+
2
+ module Hive
3
+ class Results
4
+ attr_reader :timestamp
5
+ def initialize( state, message, data = {})
6
+ @state = state
7
+ @message = message
8
+ @data = data
9
+ @timestamp = Time.now
10
+ end
11
+
12
+ def failed?
13
+ @state == 'fail'
14
+ end
15
+
16
+ def passed?
17
+ @state == 'pass'
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,16 @@
1
+ require 'hive/worker'
2
+
3
+ module Hive
4
+ class Worker
5
+ # The Shell worker
6
+ class Shell < Worker
7
+ def initialize(options = {})
8
+ @devicedb_register = false
9
+ super
10
+ end
11
+
12
+ def pre_script(job, file_system, script)
13
+ end
14
+ end
15
+ end
16
+ end