facter-statistax 0.0.9 → 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/acceptance/Gemfile +2 -1
- data/acceptance/log_performance/configuration.rb +65 -0
- data/acceptance/log_performance/custom_exceptions.rb +5 -0
- data/acceptance/log_performance/execute_beaker_tests/beaker.rb +77 -0
- data/acceptance/log_performance/execute_beaker_tests/command_logger.rb +24 -0
- data/acceptance/log_performance/execute_beaker_tests/ns_pooler.rb +60 -0
- data/acceptance/log_performance/execute_beaker_tests/output_parser.rb +40 -0
- data/acceptance/log_performance/execute_beaker_tests/run_command.rb +30 -0
- data/acceptance/log_performance/execute_beaker_tests/test_all_platforms.rb +99 -0
- data/acceptance/log_performance/execute_beaker_tests/vm_pooler.rb +7 -0
- data/acceptance/log_performance/get_supported_platforms.rb +1904 -0
- data/acceptance/log_performance/performance_times_logging/facter_performance_logs_parser.rb +65 -0
- data/acceptance/log_performance/performance_times_logging/google_sheets.rb +133 -0
- data/acceptance/log_performance/performance_times_logging/log_performance_times.rb +20 -0
- data/acceptance/log_performance/performance_times_logging/table_logging_utils.rb +55 -0
- data/acceptance/log_performance/performance_times_logging/write_facter_times_to_logger.rb +109 -0
- data/acceptance/log_performance/rspec/file_folder_utils_rspec.rb +12 -0
- data/acceptance/log_performance/utils.rb +41 -0
- data/acceptance/run/run_statistax.rb +45 -29
- data/lib/version.rb +1 -1
- data/scripts/benchmark_script.rb +9 -7
- metadata +21 -5
- data/acceptance/build_facter_ng_gem.sh +0 -1
@@ -0,0 +1,65 @@
|
|
1
|
+
require_relative '../utils'
|
2
|
+
|
3
|
+
class FacterPerformanceLogsParser
|
4
|
+
def initialize(statistax_logs_folder, log_files_per_platform)
|
5
|
+
@log_dir_path = statistax_logs_folder
|
6
|
+
@log_files_per_platform = log_files_per_platform
|
7
|
+
@performance_times = {}
|
8
|
+
end
|
9
|
+
|
10
|
+
def extract_performance_times_hash(platform)
|
11
|
+
json_file_paths = FileFolderUtils.get_sub_file_paths_by_type(File.join(@log_dir_path, platform), 'json')
|
12
|
+
if json_file_paths.length != @log_files_per_platform
|
13
|
+
puts "Something went wrong with logs for platform #{platform}. Skipping it!"
|
14
|
+
else
|
15
|
+
results = get_times_for_platform(platform, json_file_paths)
|
16
|
+
@performance_times[platform] = results unless results.empty?
|
17
|
+
end
|
18
|
+
@performance_times
|
19
|
+
end
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
def get_times_for_platform(platform, json_log_paths)
|
24
|
+
platform_times = {}
|
25
|
+
json_log_paths.each do |json_path|
|
26
|
+
puts "Parsing log folder #{json_path}"
|
27
|
+
content, facter_type = parse_performance_log(JSONReader.json_file_to_hash(json_path))
|
28
|
+
if content.empty?
|
29
|
+
puts "For platform #{platform}, failed to parse log #{json_path}!"
|
30
|
+
puts "Skipping all logs for platform #{platform}!"
|
31
|
+
return {}
|
32
|
+
end
|
33
|
+
platform_times[facter_type] = content
|
34
|
+
end
|
35
|
+
normalize_hash_structure(platform_times)
|
36
|
+
end
|
37
|
+
|
38
|
+
def parse_performance_log(data)
|
39
|
+
results = {}
|
40
|
+
data_hash = data[0] # the performance data is stored inside a list
|
41
|
+
|
42
|
+
if data_hash['facter_gem?'].nil? || data_hash['facts'].nil?
|
43
|
+
return [{}, '']
|
44
|
+
end
|
45
|
+
|
46
|
+
facter_type = data_hash['facter_gem?'] == 'true' ? 'gem' : 'cpp'
|
47
|
+
data_hash['facts'].each do |fact|
|
48
|
+
results[fact['name']] = fact['average']
|
49
|
+
end
|
50
|
+
[results, facter_type]
|
51
|
+
end
|
52
|
+
|
53
|
+
def normalize_hash_structure(platform_times)
|
54
|
+
# hash is extracted as {cpp/gem => {fact_name => time}}
|
55
|
+
# and is converted to {fact_name => {cpp/gem => time}}
|
56
|
+
normalized_times = {}
|
57
|
+
platform_times.each do |facter_type, fact|
|
58
|
+
fact.each do |fact_name, time|
|
59
|
+
normalized_times[fact_name] ||= {}
|
60
|
+
normalized_times[fact_name][facter_type] = time
|
61
|
+
end
|
62
|
+
end
|
63
|
+
normalized_times
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,133 @@
|
|
1
|
+
require 'google/apis/sheets_v4'
|
2
|
+
require 'googleauth'
|
3
|
+
require_relative '../utils'
|
4
|
+
|
5
|
+
class GoogleSheets
|
6
|
+
APPLICATION_NAME = 'Google Sheets API'.freeze
|
7
|
+
#env var with location of the token file needed to authenticate access to spreadsheet
|
8
|
+
AUTHENTICATION_FILE_ENV_VAR = 'GOOGLE_APPLICATION_CREDENTIALS'.freeze
|
9
|
+
|
10
|
+
def initialize(spreadsheet_id)
|
11
|
+
@spreadsheet_id = spreadsheet_id
|
12
|
+
@service = Google::Apis::SheetsV4::SheetsService.new
|
13
|
+
@service.client_options.application_name = APPLICATION_NAME
|
14
|
+
@service.authorization = Google::Auth::ServiceAccountCredentials.make_creds(
|
15
|
+
json_key_io: File.open(ENV[AUTHENTICATION_FILE_ENV_VAR]),
|
16
|
+
scope: Google::Apis::SheetsV4::AUTH_SPREADSHEETS
|
17
|
+
)
|
18
|
+
end
|
19
|
+
|
20
|
+
def name_and_path_of_pages
|
21
|
+
pages = {}
|
22
|
+
spreadsheet = @service.get_spreadsheet(@spreadsheet_id)
|
23
|
+
spreadsheet.sheets.each do |sheet|
|
24
|
+
pages[sheet.properties.title] = sheet.properties.sheet_id
|
25
|
+
end
|
26
|
+
pages
|
27
|
+
end
|
28
|
+
|
29
|
+
def create_page(page_name)
|
30
|
+
add_sheet_request = Google::Apis::SheetsV4::AddSheetRequest.new
|
31
|
+
add_sheet_request.properties = Google::Apis::SheetsV4::SheetProperties.new
|
32
|
+
add_sheet_request.properties.title = page_name
|
33
|
+
batch_update_spreadsheet([add_sheet: add_sheet_request], "Created page: #{page_name}.")
|
34
|
+
end
|
35
|
+
|
36
|
+
def get_rows_from_page(page_name, range_in_table)
|
37
|
+
normalized_range = A1Notation.convert_to_a1_notation(range_in_table)
|
38
|
+
response = @service.get_spreadsheet_values(@spreadsheet_id, get_sheet_range_string(page_name, normalized_range))
|
39
|
+
response.values.nil? ? [[]] : response.values
|
40
|
+
end
|
41
|
+
|
42
|
+
def write_to_page(rows_list, page_name, range)
|
43
|
+
value_range = Google::Apis::SheetsV4::ValueRange.new(values: rows_list)
|
44
|
+
result = @service.append_spreadsheet_value(@spreadsheet_id,
|
45
|
+
get_sheet_range_string(page_name, A1Notation.convert_to_a1_notation(range)),
|
46
|
+
value_range,
|
47
|
+
value_input_option: 'USER_ENTERED')
|
48
|
+
changed_cells = result.updates.updated_cells
|
49
|
+
puts "#{changed_cells.nil? ? 0 : changed_cells} cells appended."
|
50
|
+
end
|
51
|
+
|
52
|
+
def add_row_with_merged_cells(row, page_name, page_id, range)
|
53
|
+
write_to_page([row], page_name, range)
|
54
|
+
merge_columns(page_id, row, range)
|
55
|
+
end
|
56
|
+
|
57
|
+
def format_range_by_condition(color, page_id, rule, range, success_message)
|
58
|
+
conditional_format_request = Google::Apis::SheetsV4::AddConditionalFormatRuleRequest.new
|
59
|
+
|
60
|
+
conditional_format_rule = Google::Apis::SheetsV4::ConditionalFormatRule.new
|
61
|
+
conditional_format_rule.ranges = [create_grid_range(range, page_id)]
|
62
|
+
|
63
|
+
rule[:values] = [Google::Apis::SheetsV4::ConditionValue.new(user_entered_value: rule[:values])]
|
64
|
+
boolean_rule = Google::Apis::SheetsV4::BooleanCondition.new(rule)
|
65
|
+
format = Google::Apis::SheetsV4::CellFormat.new(background_color: Google::Apis::SheetsV4::Color.new(color.code))
|
66
|
+
conditional_format_rule.boolean_rule = Google::Apis::SheetsV4::BooleanRule.new(condition: boolean_rule, format: format)
|
67
|
+
|
68
|
+
conditional_format_request.index = 0
|
69
|
+
conditional_format_request.rule = conditional_format_rule
|
70
|
+
|
71
|
+
batch_update_spreadsheet([add_conditional_format_rule: conditional_format_request], success_message)
|
72
|
+
end
|
73
|
+
|
74
|
+
private
|
75
|
+
|
76
|
+
def batch_update_spreadsheet(requests_hashes_list, success_message)
|
77
|
+
return if requests_hashes_list.empty?
|
78
|
+
|
79
|
+
batch_update_spreadsheet_request = Google::Apis::SheetsV4::BatchUpdateSpreadsheetRequest.new
|
80
|
+
batch_update_spreadsheet_request.requests = requests_hashes_list
|
81
|
+
|
82
|
+
begin
|
83
|
+
@service.batch_update_spreadsheet(@spreadsheet_id, batch_update_spreadsheet_request)
|
84
|
+
puts success_message
|
85
|
+
rescue Google::Apis::ClientError => error
|
86
|
+
puts error.body
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
def get_sheet_range_string(page_name, range)
|
91
|
+
"#{page_name}!"\
|
92
|
+
"#{range.start_column}#{range.start_row}:"\
|
93
|
+
"#{range.end_column}#{range.end_row}"
|
94
|
+
end
|
95
|
+
|
96
|
+
#on a row, merges every empty cell with the next non empty cell
|
97
|
+
def merge_columns(sheet_id, row_data, start_range)
|
98
|
+
merge_requests = []
|
99
|
+
merge_range = start_range.clone
|
100
|
+
merge_range.end_column = merge_range.start_column
|
101
|
+
row_data.each do |value|
|
102
|
+
if value.empty?
|
103
|
+
merge_range.end_column += 1
|
104
|
+
else
|
105
|
+
merge_requests.append(merge_cells: create_merge_request(merge_range, sheet_id))
|
106
|
+
merge_range.start_column = merge_range.end_column += 1
|
107
|
+
end
|
108
|
+
end
|
109
|
+
batch_update_spreadsheet(merge_requests, 'Merged cells.') unless merge_requests.empty?
|
110
|
+
end
|
111
|
+
|
112
|
+
def create_merge_request(merge_range, sheet_id)
|
113
|
+
#for a merge request, you need to extend the right side column and row index. Eg: to merge cells at A1:B1 you need to give a range of A1:C2
|
114
|
+
range = create_grid_range(merge_range, sheet_id)
|
115
|
+
range.end_column_index += 1
|
116
|
+
range.end_row_index += 1
|
117
|
+
|
118
|
+
merge_cells_request = Google::Apis::SheetsV4::MergeCellsRequest.new
|
119
|
+
merge_cells_request.merge_type = 'MERGE_ROWS'
|
120
|
+
merge_cells_request.range = range
|
121
|
+
merge_cells_request
|
122
|
+
end
|
123
|
+
|
124
|
+
def create_grid_range(range_in_table, sheet_id)
|
125
|
+
range = Google::Apis::SheetsV4::GridRange.new
|
126
|
+
range.sheet_id = sheet_id
|
127
|
+
range.start_column_index = range_in_table.start_column
|
128
|
+
range.start_row_index = range_in_table.start_row
|
129
|
+
range.end_column_index = range_in_table.end_column
|
130
|
+
range.end_row_index = range_in_table.end_row
|
131
|
+
range
|
132
|
+
end
|
133
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
require_relative 'facter_performance_logs_parser'
|
2
|
+
require_relative 'write_facter_times_to_logger'
|
3
|
+
require_relative 'google_sheets'
|
4
|
+
require_relative '../configuration'
|
5
|
+
|
6
|
+
class LogPerformanceTimes
|
7
|
+
LOG_FILES_PER_PLATFORM = 2
|
8
|
+
FACT_COLUMNS = ['cpp', 'gem', 'gem increase %']
|
9
|
+
|
10
|
+
|
11
|
+
def initialize(statistax_logs_folder)
|
12
|
+
@log_parser = FacterPerformanceLogsParser.new(statistax_logs_folder, LOG_FILES_PER_PLATFORM)
|
13
|
+
@log_writer = WriteFacterTimesToLogger.new(GoogleSheets.new(Configuration::SPREADSHEET_ID), FACT_COLUMNS)
|
14
|
+
end
|
15
|
+
|
16
|
+
def populate_logs_for(platform_name)
|
17
|
+
performance_times = @log_parser.extract_performance_times_hash(platform_name)
|
18
|
+
@log_writer.write_to_logs(performance_times)
|
19
|
+
end
|
20
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
class RangeInTable
|
2
|
+
attr_accessor :start_column, :start_row, :end_column, :end_row
|
3
|
+
|
4
|
+
def initialize(start_column, start_row, end_column, end_row)
|
5
|
+
@start_column = start_column
|
6
|
+
@start_row = start_row
|
7
|
+
@end_column = end_column
|
8
|
+
@end_row = end_row
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
module ConditionRule
|
13
|
+
GREATER_THAN = {type: 'CUSTOM_FORMULA', values: '=GT(%<compared_with>s, %<compared>s)'}.freeze
|
14
|
+
|
15
|
+
def self.greater_than(value, start_position)
|
16
|
+
rule = {type: GREATER_THAN[:type]}
|
17
|
+
rule[:values] = format(GREATER_THAN[:values], compared: value, compared_with: A1Notation.convert_start_position(start_position))
|
18
|
+
rule
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
class Color
|
23
|
+
attr_accessor :red, :green, :blue
|
24
|
+
|
25
|
+
def initialize(red = 0, green = 0, blue = 0)
|
26
|
+
@red = red
|
27
|
+
@green = green
|
28
|
+
@blue = blue
|
29
|
+
end
|
30
|
+
|
31
|
+
def code
|
32
|
+
{red: @red, blue: @blue, green: @green}
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
module A1Notation
|
37
|
+
#In A1 notation row values start from 1, so we'll add a 1 to row values
|
38
|
+
COLUMN_LETTERS = ('A'..'ZZ').to_a
|
39
|
+
|
40
|
+
def self.convert_start_position(range)
|
41
|
+
"#{COLUMN_LETTERS[range.start_column]}#{range.start_row + 1}"
|
42
|
+
end
|
43
|
+
|
44
|
+
def self.convert_to_a1_notation(range)
|
45
|
+
end_column = if range.end_column.nil? or range.end_column == ''
|
46
|
+
''
|
47
|
+
else
|
48
|
+
COLUMN_LETTERS[range.end_column]
|
49
|
+
end
|
50
|
+
RangeInTable.new(COLUMN_LETTERS[range.start_column],
|
51
|
+
range.start_row + 1,
|
52
|
+
end_column,
|
53
|
+
range.end_row + 1)
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,109 @@
|
|
1
|
+
require_relative 'table_logging_utils'
|
2
|
+
|
3
|
+
class WriteFacterTimesToLogger
|
4
|
+
RULE_RANGE = RangeInTable.new(1, 2, 100, 1000)
|
5
|
+
|
6
|
+
def initialize(logger, facter_columns)
|
7
|
+
@log_writer = logger
|
8
|
+
@columns_each_fact = facter_columns
|
9
|
+
end
|
10
|
+
|
11
|
+
def write_to_logs(times_to_log)
|
12
|
+
return if times_to_log.empty?
|
13
|
+
|
14
|
+
@performance_times = times_to_log
|
15
|
+
create_platform_page
|
16
|
+
page_names = @log_writer.name_and_path_of_pages # done to get pages that are newly created
|
17
|
+
@performance_times.keys.each do |platform|
|
18
|
+
puts "\nWriting results for platform #{platform}\n"
|
19
|
+
facts_order_in_table, page_is_new = create_title_rows(platform, page_names[platform])
|
20
|
+
write_performance_times(facts_order_in_table, platform)
|
21
|
+
add_conditional_formatting(page_is_new, page_names, platform, RULE_RANGE)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
private
|
26
|
+
|
27
|
+
def create_platform_page
|
28
|
+
logged_platforms = @log_writer.name_and_path_of_pages.keys
|
29
|
+
platform_name = @performance_times.keys[0]
|
30
|
+
if logged_platforms.include?(platform_name)
|
31
|
+
puts 'Platform page already created.'
|
32
|
+
else
|
33
|
+
@log_writer.create_page(platform_name)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def create_title_rows(platform, page_location)
|
38
|
+
# fact names are stored on the first table row
|
39
|
+
stored_facts = @log_writer.get_rows_from_page(platform, RangeInTable.new(0, 0, nil, 0))[0]
|
40
|
+
|
41
|
+
new_facts = @performance_times[platform].keys - stored_facts
|
42
|
+
# fact names occupy @columns_per_fact cells, so just the last one has the fact name, the rest are empty
|
43
|
+
new_facts_row_with_spaces = new_facts.flat_map { |fact_name| [''] * (@columns_each_fact.size - 1) << fact_name }
|
44
|
+
|
45
|
+
if new_facts.empty?
|
46
|
+
puts 'No new fact names to add.'
|
47
|
+
else
|
48
|
+
# write new fact names from the second column (the first one is reserved for the date) if the page is empty,
|
49
|
+
# or after the last fact name
|
50
|
+
new_facts_append_range = RangeInTable.new(stored_facts.size + 1, 0, nil, 0)
|
51
|
+
|
52
|
+
puts 'Adding fact names.'
|
53
|
+
@log_writer.add_row_with_merged_cells(new_facts_row_with_spaces, platform, page_location, new_facts_append_range)
|
54
|
+
puts 'Adding facter types and gem time increase.'
|
55
|
+
create_facter_type_row(new_facts_row_with_spaces.size, new_facts_append_range.start_column, platform, stored_facts.empty?)
|
56
|
+
end
|
57
|
+
[get_new_facts_order(new_facts_row_with_spaces, stored_facts), stored_facts.empty?]
|
58
|
+
end
|
59
|
+
|
60
|
+
def get_new_facts_order(new_facts_row_with_spaces, stored_facts)
|
61
|
+
stored_facts_order = stored_facts + new_facts_row_with_spaces
|
62
|
+
stored_facts_order.delete('')
|
63
|
+
stored_facts_order
|
64
|
+
end
|
65
|
+
|
66
|
+
def create_facter_type_row(number_of_facts_to_add, write_from_column, platform, add_date_title)
|
67
|
+
facter_types_row = @columns_each_fact * (number_of_facts_to_add / @columns_each_fact.size)
|
68
|
+
if add_date_title
|
69
|
+
facter_types_row = ['Date'].concat(facter_types_row)
|
70
|
+
facter_types_range = RangeInTable.new(0, 1, nil, 1)
|
71
|
+
else
|
72
|
+
facter_types_range = RangeInTable.new(write_from_column, 1, nil, 1)
|
73
|
+
end
|
74
|
+
@log_writer.write_to_page([facter_types_row], platform, facter_types_range)
|
75
|
+
end
|
76
|
+
|
77
|
+
def write_performance_times(facts_order_list, platform)
|
78
|
+
row = [DateTime.now.strftime("%d/%m/%Y %H:%M")] # adding timestamp
|
79
|
+
facts_order_list.each do |fact|
|
80
|
+
if @performance_times[platform][fact].nil?
|
81
|
+
row.concat([''] * @columns_each_fact.size) # skip values for missing fact
|
82
|
+
else
|
83
|
+
populate_data_row(fact, platform, row)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
puts 'Appending performance times.'
|
87
|
+
# range is for the first cell where data should be added on the sheet. If that cell is not empty, the new values will be
|
88
|
+
# appended under it, where possible.
|
89
|
+
@log_writer.write_to_page([row], platform, RangeInTable.new(0, 2, nil, 2))
|
90
|
+
end
|
91
|
+
|
92
|
+
def populate_data_row(fact, platform, row)
|
93
|
+
cpp_fact = @performance_times[platform][fact][@columns_each_fact[0]]
|
94
|
+
gem_fact = @performance_times[platform][fact][@columns_each_fact[1]]
|
95
|
+
gem_percentage_increase = (gem_fact - cpp_fact) / cpp_fact * 100
|
96
|
+
|
97
|
+
row << cpp_fact
|
98
|
+
row << gem_fact
|
99
|
+
row << format('%<time_difference>.2f', time_difference: gem_percentage_increase)
|
100
|
+
end
|
101
|
+
|
102
|
+
def add_conditional_formatting(page_is_new, page_names, platform, rule_range)
|
103
|
+
return unless page_is_new
|
104
|
+
|
105
|
+
success_message = 'Added rule to highlight gem run time increased over 100%!'
|
106
|
+
rule = ConditionRule.greater_than(100, rule_range)
|
107
|
+
@log_writer.format_range_by_condition(Color.new(1), page_names[platform], rule, rule_range, success_message)
|
108
|
+
end
|
109
|
+
end
|
@@ -0,0 +1,12 @@
|
|
1
|
+
require 'loggers/google_sheets'
|
2
|
+
class FileFolderUtilsRspec
|
3
|
+
|
4
|
+
describe FileFolderUtils
|
5
|
+
context “Testing the file_exists method” do
|
6
|
+
it "should call File.file?(file_path)" do
|
7
|
+
SheetsV4 = double("")
|
8
|
+
google_parser = GoogleSheets.new('spreadsheet_is')
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'fileutils'
|
3
|
+
|
4
|
+
class JSONReader
|
5
|
+
def self.json_file_to_hash(file_path)
|
6
|
+
JSON.parse(File.read(file_path))
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
class FileFolderUtils
|
11
|
+
def self.get_children_names(parent_folder_path)
|
12
|
+
begin
|
13
|
+
#all children who's name doesn't start with '.'
|
14
|
+
Dir.entries(parent_folder_path).reject{|entry| entry =~ /^\.+/}
|
15
|
+
rescue Errno::ENOENT
|
16
|
+
puts "No #{parent_folder_path} folder found!"
|
17
|
+
[]
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
def self.file_exists(file_path)
|
22
|
+
File.file?(file_path)
|
23
|
+
end
|
24
|
+
|
25
|
+
def self.get_sub_file_paths_by_type(parent_folder, file_extension)
|
26
|
+
Dir["#{parent_folder}/**/*.#{file_extension}"].select { |f| File.file? f }
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.create_directories(path)
|
30
|
+
FileUtils.mkdir_p(path) unless File.exist?(path)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
module TimedMethods
|
35
|
+
def self.get_run_time(&block)
|
36
|
+
time = Benchmark.measure do
|
37
|
+
yield block
|
38
|
+
end
|
39
|
+
(time.real / 60).round(2)
|
40
|
+
end
|
41
|
+
end
|
@@ -1,49 +1,65 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require 'pathname'
|
4
|
+
require_relative '../log_performance/performance_times_logging/log_performance_times'
|
5
|
+
|
6
|
+
def correct_platform_name(agent)
|
7
|
+
if agent['platform'].include?('aix')
|
8
|
+
agent.to_s.split('-')[0].sub(/aix(\d)(\d)/, 'aix-\1.\2') + '-power'
|
9
|
+
elsif agent.has_key?('template') && agent['template'].include?('fips')
|
10
|
+
agent['template']
|
11
|
+
else
|
12
|
+
agent['platform']
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
def run_statistax(agent, home_dir, is_gem)
|
17
|
+
content = ::File.read(File.join(Pathname.new(File.expand_path('..', __dir__)), 'config.json'))
|
18
|
+
create_remote_file(agent, "#{home_dir}/config.json", content)
|
19
|
+
if agent['platform'] =~ /windows/
|
20
|
+
puppetbin_path = '"/cygdrive/c/Program Files (x86)/Puppet Labs/Puppet/puppet/bin":"/cygdrive/c/Program Files/Puppet Labs/Puppet/puppet/bin"'
|
21
|
+
on agent, %( echo 'export PATH=$PATH:#{puppetbin_path}' > /etc/bash.bashrc )
|
22
|
+
on agent, "statistax.bat config.json #{is_gem}"
|
23
|
+
else
|
24
|
+
on agent, "statistax #{home_dir}/config.json #{is_gem}"
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def save_output(agent, home_dir, host_dir, is_gem)
|
29
|
+
out_dir = File.join(host_dir, is_gem ? 'gem' : 'cpp')
|
30
|
+
|
31
|
+
FileUtils.mkdir_p(out_dir)
|
32
|
+
scp_from agent, "#{home_dir}/log/output.json", out_dir
|
33
|
+
end
|
34
|
+
|
4
35
|
test_name 'Run facter statistax' do
|
36
|
+
log_dir = File.join(File.expand_path('..', __dir__), "log_dir")
|
37
|
+
|
5
38
|
agents.each do |agent|
|
6
|
-
is_gem
|
7
|
-
home_dir
|
8
|
-
|
39
|
+
is_gem = false
|
40
|
+
home_dir = on(agent, 'pwd').stdout.chop
|
41
|
+
platform_name = correct_platform_name(agent)
|
42
|
+
host_dir = File.join(log_dir, "#{platform_name}")
|
9
43
|
|
10
44
|
step 'Run facter statistax for Cfacter' do
|
11
|
-
|
12
|
-
create_remote_file(agent, "#{home_dir}/config.json", content)
|
13
|
-
if agent['platform'] =~ /windows/
|
14
|
-
puppetbin_path = '"/cygdrive/c/Program Files (x86)/Puppet Labs/Puppet/puppet/bin":"/cygdrive/c/Program Files/Puppet Labs/Puppet/puppet/bin"'
|
15
|
-
on agent, %( echo 'export PATH=$PATH:#{puppetbin_path}' > /etc/bash.bashrc )
|
16
|
-
on agent, "statistax.bat config.json #{is_gem}"
|
17
|
-
else
|
18
|
-
on agent, "statistax #{home_dir}/config.json #{is_gem}"
|
19
|
-
end
|
45
|
+
run_statistax(agent, home_dir, is_gem)
|
20
46
|
end
|
21
47
|
|
22
48
|
step 'Save output' do
|
23
|
-
|
24
|
-
|
25
|
-
FileUtils.mkdir_p(out_dir)
|
26
|
-
scp_from agent, "#{home_dir}/log/output.json", out_dir
|
49
|
+
save_output(agent, home_dir, host_dir, is_gem)
|
27
50
|
end
|
28
51
|
|
29
52
|
step 'Run facter statistax for facter-ng' do
|
30
53
|
is_gem = 'true'
|
31
|
-
|
32
|
-
create_remote_file(agent, "#{home_dir}/config.json", content)
|
33
|
-
if agent['platform'] =~ /windows/
|
34
|
-
puppetbin_path = '"/cygdrive/c/Program Files (x86)/Puppet Labs/Puppet/puppet/bin":"/cygdrive/c/Program Files/Puppet Labs/Puppet/puppet/bin"'
|
35
|
-
on agent, %( echo 'export PATH=$PATH:#{puppetbin_path}' > /etc/bash.bashrc )
|
36
|
-
on agent, "statistax.bat config.json #{is_gem}"
|
37
|
-
else
|
38
|
-
on agent, "statistax #{home_dir}/config.json #{is_gem}"
|
39
|
-
end
|
54
|
+
run_statistax(agent, home_dir, is_gem)
|
40
55
|
end
|
41
56
|
|
42
|
-
step 'Save output' do
|
43
|
-
|
57
|
+
step 'Save output to files' do
|
58
|
+
save_output(agent, home_dir, host_dir, is_gem)
|
59
|
+
end
|
44
60
|
|
45
|
-
|
46
|
-
|
61
|
+
step 'Copy results to Google spreadsheet' do
|
62
|
+
LogPerformanceTimes.new(log_dir).populate_logs_for(platform_name)
|
47
63
|
end
|
48
64
|
end
|
49
65
|
end
|