xcknife 0.1.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,6 @@
1
+ module XCKnife
2
+ # Base error class for xcknife
3
+ XCKnifeError = Class.new(StandardError)
4
+
5
+ StreamParsingError = Class.new(XCKnifeError)
6
+ end
@@ -0,0 +1,24 @@
1
+ module XCKnife
2
+ module JsonStreamParserHelper
3
+ extend self
4
+
5
+ # Iterates over events, calling block once for each test_target/test event on a events (a parsed json_stream iterable)
6
+ def each_test_event(events, &block)
7
+ current_target = nil
8
+ events.each do |result|
9
+ current_target = result.targetName if result.event == "begin-ocunit"
10
+ if result.test and result.event == "end-test"
11
+ raise XCKnife::StreamParsingError, "No test target defined" if current_target.nil?
12
+ block.call(current_target, normalize_result(result))
13
+ end
14
+ end
15
+ end
16
+
17
+ def normalize_result(result)
18
+ if result.totalDuration.is_a?(String)
19
+ result.totalDuration = result.totalDuration.to_f
20
+ end
21
+ result
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,138 @@
1
+ require 'optparse'
2
+
3
+ module XCKnife
4
+ class Runner
5
+ include XCToolCmdHelper
6
+
7
+ def self.invoke
8
+ new(ARGV).run
9
+ end
10
+
11
+ attr_reader :parser
12
+
13
+ def initialize(args)
14
+ @abbreviated_output = false
15
+ @partitions = []
16
+ @partition_names = []
17
+ @worker_count = nil
18
+ @historical_timings_file = nil
19
+ @current_tests_file = nil
20
+ @output_file_name = nil
21
+ parse_arguments(args)
22
+ end
23
+
24
+ def run
25
+ stream_parser = XCKnife::StreamParser.new(@worker_count, @partitions)
26
+ result = stream_parser.compute_shards_for_file(@historical_timings_file, @current_tests_file)
27
+ data = @abbreviated_output ? gen_abbreviated_output(result) : gen_full_output(result)
28
+ write_output(data)
29
+ rescue XCKnife::XCKnifeError => e
30
+ warn "Error: #{e}"
31
+ exit 1
32
+ end
33
+ private
34
+ def gen_abbreviated_output(result)
35
+ result.test_maps.map { |partition_set| xctool_only_arguments_for_a_partition_set(partition_set) }
36
+ end
37
+
38
+ def gen_full_output(result)
39
+ {
40
+ metadata: {
41
+ worker_count: @worker_count,
42
+ partition_set_count: result.test_maps.size,
43
+ total_time_in_ms: result.total_test_time,
44
+ }.merge(result.stats.to_h),
45
+ partition_set_data: partition_sets_data(result)
46
+ }
47
+ end
48
+
49
+ def partition_sets_data(result)
50
+ shard_number = 0
51
+ result.test_maps.each_with_index.map do |partition_set, partition_set_i|
52
+ partition_data = partition_set.each_with_index.map do |partition, partition_j|
53
+ shard_number += 1
54
+ partition_data(result, shard_number, partition, partition_set_i, partition_j)
55
+ end
56
+
57
+ {
58
+ partition_set: @partition_names[partition_set_i],
59
+ size: partition_set.size,
60
+ imbalance_ratio: result.test_time_imbalances.partition_set[partition_set_i],
61
+ partitions: partition_data
62
+ }
63
+ end
64
+ end
65
+
66
+ def partition_data(result, shard_number, partition, partition_set_i, partition_j)
67
+ {
68
+ shard_number: shard_number,
69
+ cli_arguments: xctool_only_arguments(partition),
70
+ partition_imbalance_ratio: result.test_time_imbalances.partitions[partition_set_i][partition_j]
71
+ }
72
+ end
73
+
74
+ def write_output(data)
75
+ json = JSON.pretty_generate(data)
76
+ return puts json if @output_file_name.nil?
77
+ File.open(@output_file_name, "w") { |f| f.puts(json) }
78
+ puts "Wrote file to: #{@output_file_name}"
79
+ end
80
+
81
+ def parse_arguments(args)
82
+ positional_arguments = parse_options(args)
83
+ if positional_arguments.size < required_arguments.size
84
+ warn_and_exit("You must specify *all* required arguments: #{required_arguments.join(", ")}")
85
+ end
86
+ if @partitions.empty?
87
+ warn_and_exit("At least one target partition set must be provided with -p flag")
88
+ end
89
+ worker_count, @historical_timings_file, @current_tests_file = positional_arguments
90
+ @worker_count = Integer(worker_count)
91
+ end
92
+
93
+ def parse_options(args)
94
+ build_parser
95
+ begin
96
+ parser.parse(args)
97
+ rescue OptionParser::ParseError => error
98
+ warn_and_exit(error)
99
+ end
100
+ end
101
+
102
+ def build_parser
103
+ @parser = OptionParser.new do |opts|
104
+ opts.banner += " #{arguments_banner}"
105
+ opts.on("-p", "--partition TARGETS",
106
+ "Comma separated list of targets. Can be used multiple times.") do |v|
107
+ @partition_names << v
108
+ @partitions << v.split(",")
109
+ end
110
+ opts.on("-o", "--output FILENAME", "Output file. Defaults to STDOUT") { |v| @output_file_name = v }
111
+ opts.on("-a", "--abbrev", "Results are abbreviated") { |v| @abbreviated_output = v }
112
+
113
+ opts.on_tail("-h", "--help", "Show this message") do
114
+ puts opts
115
+ exit
116
+ end
117
+ end
118
+ end
119
+
120
+ def required_arguments
121
+ %w[worker-count historical-timings-json-stream-file]
122
+ end
123
+
124
+ def optional_arguments
125
+ %w[current-tests-json-stream-file]
126
+ end
127
+
128
+ def arguments_banner
129
+ optional_args = optional_arguments.map { |a| "[#{a}]" }
130
+ (required_arguments + optional_args).join(" ")
131
+ end
132
+
133
+ def warn_and_exit(msg)
134
+ warn "#{msg.to_s.capitalize} \n\n#{parser}"
135
+ exit 1
136
+ end
137
+ end
138
+ end
@@ -0,0 +1,222 @@
1
+ require 'xcknife/json_stream_parser_helper'
2
+ require 'json'
3
+ require 'set'
4
+ require 'ostruct'
5
+ require 'forwardable'
6
+
7
+ module XCKnife
8
+ class StreamParser
9
+ include JsonStreamParserHelper
10
+
11
+ attr_reader :number_of_shards, :test_partitions, :stats, :relevant_partitions
12
+
13
+ def initialize(number_of_shards, test_partitions)
14
+ @number_of_shards = number_of_shards
15
+ @test_partitions = test_partitions.map(&:to_set)
16
+ @relevant_partitions = test_partitions.flatten.to_set
17
+ @stats = ResultStats.new
18
+ ResultStats.members.each { |k| @stats[k] = 0}
19
+ end
20
+
21
+ PartitionWithMachines = Struct.new :test_time_map, :number_of_shards, :partition_time, :max_shard_count
22
+ MachineAssignment = Struct.new :test_time_map, :total_time
23
+ ResultStats = Struct.new :historical_total_tests, :current_total_tests, :class_extrapolations, :target_extrapolations
24
+
25
+ class PartitionResult
26
+ TimeImbalances = Struct.new :partition_set, :partitions
27
+ attr_reader :stats, :test_maps, :test_times, :total_test_time, :test_time_imbalances
28
+ extend Forwardable
29
+ delegate ResultStats.members => :@stats
30
+
31
+ def initialize(stats, partition_sets)
32
+ @stats = stats
33
+ @partition_sets = partition_sets
34
+ @test_maps = partition_sets_map(&:test_time_map)
35
+ @test_times = partition_sets_map(&:total_time)
36
+ @total_test_time = test_times.flatten.inject(:+)
37
+ @test_time_imbalances = compute_test_time_imbalances
38
+ end
39
+
40
+ private
41
+ # Yields the imbalances ratios of the partition sets, and the internal imbalance ratio of the respective partitions
42
+ def compute_test_time_imbalances
43
+ times = test_times
44
+ average_partition_size = times.map { |l| l.inject(:+).to_f / l.size}
45
+ ideal_partition_set_avg = average_partition_size.inject(:+) / @partition_sets.size
46
+ partition_set_imbalance = average_partition_size.map { |avg| avg / ideal_partition_set_avg }
47
+
48
+ internal_partition_imbalance = times.map do |partition_times|
49
+ internal_total = partition_times.inject(:+)
50
+ partition_times.map do |partition_time|
51
+ (partition_time * partition_times.size).to_f / internal_total
52
+ end
53
+ end
54
+ TimeImbalances.new partition_set_imbalance, internal_partition_imbalance
55
+ end
56
+
57
+ def partition_sets_map(&block)
58
+ @partition_sets.map { |assignemnt_list| assignemnt_list.map(&block) }
59
+ end
60
+ end
61
+
62
+ # Parses the output of a xctool json-stream reporter and compute the shards based of that
63
+ # see: https://github.com/facebook/xctool#included-reporters
64
+ #
65
+ # @param historical_filename: String the path of the, usually historical, test time performance.
66
+ # @param current_test_filename: [String, nil] = the path of the current test names and targets,
67
+ def compute_shards_for_file(historical_filename, current_test_filename = nil)
68
+ compute_shards_for_events(parse_json_stream_file(historical_filename), parse_json_stream_file(current_test_filename))
69
+ end
70
+
71
+ def compute_shards_for_events(historical_events, current_events = nil)
72
+ compute_shards_for_partitions(test_time_for_partitions(historical_events, current_events))
73
+ end
74
+
75
+ def compute_shards_for_partitions(test_time_for_partitions)
76
+ PartitionResult.new(@stats, split_machines_proportionally(test_time_for_partitions).map do |partition|
77
+ compute_single_shards(partition.number_of_shards, partition.test_time_map)
78
+ end)
79
+ end
80
+
81
+ def test_time_for_partitions(historical_events, current_events = nil)
82
+ analyzer = EventsAnalyzer.for(current_events, relevant_partitions)
83
+ @stats[:current_total_tests] = analyzer.total_tests
84
+ times_for_target_class = Hash.new { |h, current_target| h[current_target] = Hash.new(0) }
85
+ each_test_event(historical_events) do |target_name, result|
86
+ next unless relevant_partitions.include?(target_name)
87
+ inc_stat :historical_total_tests
88
+ next unless analyzer.has_test_class?(target_name, result.className)
89
+ times_for_target_class[target_name][result.className] += (result.totalDuration * 1000).ceil
90
+ end
91
+
92
+ extrapolate_times_for_current_events(analyzer, times_for_target_class) if current_events
93
+ hash_partitions(times_for_target_class)
94
+ end
95
+
96
+ def split_machines_proportionally(partitions)
97
+ total = 0
98
+ partitions.each do |test_time_map|
99
+ each_duration(test_time_map) { |duration_in_milliseconds| total += duration_in_milliseconds}
100
+ end
101
+
102
+ used_shards = 0
103
+ assignable_shards = number_of_shards - partitions.size
104
+ partition_with_machines_list = partitions.map do |test_time_map|
105
+ partition_time = 0
106
+ max_shard_count = test_time_map.values.map(&:size).inject(:+) || 1
107
+ each_duration(test_time_map) { |duration_in_milliseconds| partition_time += duration_in_milliseconds}
108
+ n = [1 + (assignable_shards * partition_time.to_f / total).floor, max_shard_count].min
109
+ used_shards += n
110
+ PartitionWithMachines.new(test_time_map, n, partition_time, max_shard_count)
111
+ end
112
+
113
+ fifo_with_machines_who_can_use_more_shards = partition_with_machines_list.select { |x| x.number_of_shards < x.max_shard_count}.sort_by(&:partition_time)
114
+ while (number_of_shards - used_shards) > 0
115
+ if fifo_with_machines_who_can_use_more_shards.empty?
116
+ raise XCKnife::XCKnifeError, "There are #{number_of_shards - used_shards} extra machines"
117
+ end
118
+ machine = fifo_with_machines_who_can_use_more_shards.pop
119
+ machine.number_of_shards += 1
120
+ used_shards += 1
121
+ if machine.number_of_shards < machine.max_shard_count
122
+ fifo_with_machines_who_can_use_more_shards.unshift(machine)
123
+ end
124
+ end
125
+ partition_with_machines_list
126
+ end
127
+
128
+ # Computes a 2-aproximation to the optimal partition_time, which is an instance of the Open shop scheduling problem (which is NP-hard)
129
+ # see: https://en.wikipedia.org/wiki/Open-shop_scheduling
130
+ def compute_single_shards(number_of_shards, test_time_map)
131
+ raise XCKnife::XCKnifeError, "There are not enough workers provided" if number_of_shards <= 0
132
+ raise XCKnife::XCKnifeError, "Cannot shard an empty partition_time" if test_time_map.empty?
133
+ assignements = Array.new(number_of_shards) { MachineAssignment.new(Hash.new { |k, v| k[v] = [] }, 0) }
134
+
135
+ list_of_test_target_class_times = []
136
+ test_time_map.each do |test_target, class_times|
137
+ class_times.each do |class_name, duration_in_milliseconds|
138
+ list_of_test_target_class_times << [test_target, class_name, duration_in_milliseconds]
139
+ end
140
+ end
141
+
142
+ list_of_test_target_class_times.sort_by! { |test_target, class_name, duration_in_milliseconds| -duration_in_milliseconds }
143
+ list_of_test_target_class_times.each do |test_target, class_name, duration_in_milliseconds|
144
+ assignemnt = assignements.min_by(&:total_time)
145
+ assignemnt.test_time_map[test_target] << class_name
146
+ assignemnt.total_time += duration_in_milliseconds
147
+ end
148
+ raise XCKnife::XCKnifeError, "Too many shards" if assignements.any? { |a| a.test_time_map.empty? }
149
+ assignements
150
+ end
151
+
152
+ def parse_json_stream_file(filename)
153
+ return nil if filename.nil?
154
+ return [] unless File.exists?(filename)
155
+ lines = IO.readlines(filename)
156
+ lines.lazy.map { |line| OpenStruct.new(JSON.load(line)) }
157
+ end
158
+
159
+ private
160
+ def inc_stat(name)
161
+ @stats[name] += 1
162
+ end
163
+
164
+ def each_duration(test_time_map, &block)
165
+ test_time_map.each do |test_target, class_times|
166
+ class_times.each do |class_name, duration_in_milliseconds|
167
+ yield(duration_in_milliseconds)
168
+ end
169
+ end
170
+ end
171
+
172
+ def extrapolate_times_for_current_events(analyzer, times_for_target_class)
173
+ median_map = {}
174
+ times_for_target_class.each do |test_target, class_times|
175
+ median_map[test_target] = median(class_times.values)
176
+ end
177
+
178
+ all_times_for_all_classes = times_for_target_class.values.flat_map(&:values)
179
+ median_of_targets = median(all_times_for_all_classes)
180
+ analyzer.target_class_map.each do |test_target, class_set|
181
+ if times_for_target_class.has_key?(test_target)
182
+ class_set.each do |clazz|
183
+ unless times_for_target_class[test_target].has_key?(clazz)
184
+ inc_stat :class_extrapolations
185
+ times_for_target_class[test_target][clazz] = median_map[test_target]
186
+ end
187
+ end
188
+ else
189
+ inc_stat :target_extrapolations
190
+ class_set.each do |clazz|
191
+ inc_stat :class_extrapolations
192
+ times_for_target_class[test_target][clazz] = extrapolated_duration(median_of_targets, class_set)
193
+ end
194
+ end
195
+ end
196
+ end
197
+
198
+ DEFAULT_EXTRAPOLATED_DURATION = 1000
199
+ def extrapolated_duration(median_of_targets, class_set)
200
+ return DEFAULT_EXTRAPOLATED_DURATION if median_of_targets.nil?
201
+ median_of_targets / class_set.size
202
+ end
203
+
204
+ def median(array)
205
+ array.sort[array.size / 2]
206
+ end
207
+
208
+ def hash_partitions(times)
209
+ ret = Array.new(test_partitions.size) { {} }
210
+ times.each do |test_target, times_map|
211
+ test_partitions.each_with_index do |partition, i|
212
+ ret[i][test_target] = times_map if partition.include?(test_target)
213
+ end
214
+ end
215
+ ret.each_with_index do |partition, index|
216
+ if partition.empty?
217
+ raise XCKnife::XCKnifeError, "The following partition has no tests: #{test_partitions[index].to_a.inspect}"
218
+ end
219
+ end
220
+ end
221
+ end
222
+ end
@@ -0,0 +1,14 @@
1
+ require 'pp'
2
+ module XCKnife
3
+ module XCToolCmdHelper
4
+ def xctool_only_arguments(single_partition)
5
+ single_partition.flat_map do |test_target, classes|
6
+ ['-only', "#{test_target}:#{classes.sort.join(',')}"]
7
+ end
8
+ end
9
+
10
+ def xctool_only_arguments_for_a_partition_set(partition_set)
11
+ partition_set.map { |partition| xctool_only_arguments(partition) }
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,4 @@
1
+ $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
2
+ require 'xcknife'
3
+ require 'set'
4
+ require 'pp'
@@ -0,0 +1,344 @@
1
+ require 'spec_helper'
2
+
3
+ describe XCKnife::StreamParser do
4
+ context 'test_time_for_partitions' do
5
+ subject { XCKnife::StreamParser.new(2, [["TestTarget1"], ["TestTarget2"]]) }
6
+
7
+ it 'decide how many shards each partition set needs' do
8
+ stream = [xctool_target_event("TestTarget1"),
9
+ xctool_test_event("ClassTest1", "test1"),
10
+ xctool_target_event("TestTarget2"),
11
+ xctool_test_event("ClassTest2", "test1")
12
+ ]
13
+ result = subject.test_time_for_partitions(stream)
14
+ expect(result).to eq([{ "TestTarget1" => { "ClassTest1" => 1000 } },
15
+ { "TestTarget2" => { "ClassTest2" => 1000 } }])
16
+ end
17
+
18
+ it 'aggretates the times at the class level' do
19
+ stream_parser = XCKnife::StreamParser.new(2, [["TestTarget1"]])
20
+ stream = [xctool_target_event("TestTarget1"),
21
+ xctool_test_event("ClassTest1", "test1", 1.0),
22
+ xctool_test_event("ClassTest1", "test2", 2.0)
23
+ ]
24
+ result = stream_parser.test_time_for_partitions(stream)
25
+ expect(result).to eq([{ "TestTarget1" => { "ClassTest1" => 3000 } }])
26
+ end
27
+
28
+ it 'works with multiple partitions' do
29
+ stream_parser = XCKnife::StreamParser.new(2, [["TestTarget1"], ["TestTarget2"], ["TestTarget3"]])
30
+
31
+ stream = [xctool_target_event("TestTarget1"),
32
+ xctool_test_event("Class1", "test1"),
33
+ xctool_target_event("TestTarget2"),
34
+ xctool_test_event("Class2", "test1"),
35
+ xctool_target_event("TestTarget3"),
36
+ xctool_test_event("Class3", "test1"),
37
+ ]
38
+ result = stream_parser.test_time_for_partitions(stream)
39
+ expect(result).to eq([{ "TestTarget1" => { "Class1" => 1000 } },
40
+ { "TestTarget2" => { "Class2" => 1000 } },
41
+ { "TestTarget3" => { "Class3" => 1000 } }])
42
+ end
43
+
44
+ it 'allows the same target to be listed on multiple partitions' do
45
+ stream_parser = XCKnife::StreamParser.new(2, [["TestTarget1"], ["TestTarget2", "TestTarget1"]])
46
+ stream = [xctool_target_event("TestTarget1"),
47
+ xctool_test_event("ClassTest1", "test1"),
48
+ xctool_target_event("TestTarget2"),
49
+ xctool_test_event("ClassTest2", "test1"),
50
+ ]
51
+ result = stream_parser.test_time_for_partitions(stream)
52
+ expect(result).to eq([{ "TestTarget1" => { "ClassTest1" => 1000 } },
53
+ { "TestTarget2" => { "ClassTest2" => 1000 },
54
+ "TestTarget1" => { "ClassTest1" => 1000 }
55
+ }])
56
+ end
57
+
58
+ it 'raises error when an empty partition is specified' do
59
+ stream_parser = XCKnife::StreamParser.new(1, [["TestTarget1"]])
60
+ expect { stream_parser.test_time_for_partitions([]) }.to raise_error(XCKnife::XCKnifeError, 'The following partition has no tests: ["TestTarget1"]')
61
+ end
62
+ end
63
+
64
+ context 'provided historical events' do
65
+ subject { XCKnife::StreamParser.new(2, [["TestTarget1", "TestTarget2", "TestTarget3", "NewTestTarget1"]]) }
66
+
67
+ it 'ignores test targets not present on current events' do
68
+ historical_events = [xctool_target_event("TestTarget1"),
69
+ xctool_test_event("ClassTest1", "test1"),
70
+ xctool_test_event("ClassTest1", "test2"),
71
+ xctool_target_event("TestTarget2"),
72
+ xctool_test_event("ClassTest2", "test1"),
73
+ xctool_test_event("ClassTest2", "test2")
74
+ ]
75
+ current_events = [
76
+ xctool_target_event("TestTarget1"),
77
+ xctool_test_event("ClassTest1", "test1")
78
+ ]
79
+ result = subject.test_time_for_partitions(historical_events, current_events)
80
+ expect(result).to eq([{ "TestTarget1" => { "ClassTest1" => 2000 } }])
81
+ expect(subject.stats.to_h).to eq({historical_total_tests: 4, current_total_tests: 1, class_extrapolations: 0, target_extrapolations: 0})
82
+ end
83
+
84
+
85
+ it 'ignores test classes not present on current events' do
86
+ historical_events = [xctool_target_event("TestTarget1"),
87
+ xctool_test_event("ClassTest1", "test1"),
88
+ xctool_test_event("ClassTest1", "test2"),
89
+ xctool_test_event("ClassTest2", "test1"),
90
+ xctool_test_event("ClassTest2", "test2")
91
+ ]
92
+ current_events = [
93
+ xctool_target_event("TestTarget1"),
94
+ xctool_test_event("ClassTest1", "test1")
95
+ ]
96
+ result = subject.test_time_for_partitions(historical_events, current_events)
97
+ expect(result).to eq([{ "TestTarget1" => { "ClassTest1" => 2000 } }])
98
+ expect(subject.stats.to_h).to eq({historical_total_tests: 4, current_total_tests: 1, class_extrapolations: 0, target_extrapolations: 0})
99
+ end
100
+
101
+ it 'extrapolates for new test targets' do
102
+ historical_events = [
103
+ xctool_target_event("TestTarget1"),
104
+ xctool_test_event("ClassTest1", "test1")
105
+ ]
106
+ current_events = [
107
+ xctool_target_event("TestTarget1"),
108
+ xctool_test_event("ClassTest1", "test1"),
109
+ xctool_target_event("NewTestTargetButNotRelevant"),
110
+ xctool_test_event("ClassTest10", "test1")
111
+ ]
112
+ result = subject.test_time_for_partitions(historical_events, current_events)
113
+ expect(result.to_set).to eq([{
114
+ "TestTarget1" => { "ClassTest1" => 1000 }
115
+ }
116
+ ].to_set)
117
+ expect(subject.stats.to_h).to eq({historical_total_tests: 1, current_total_tests: 1, class_extrapolations: 0, target_extrapolations: 0})
118
+ end
119
+
120
+ it 'extrapolates for new test classes' do
121
+ historical_events = [
122
+ xctool_target_event("TestTarget1"),
123
+ xctool_test_event("ClassTest1", "test1", 1.0),
124
+ xctool_test_event("ClassTest2", "test2", 5.0),
125
+ xctool_test_event("ClassTest3", "test3", 10000.0)
126
+ ]
127
+ current_events = historical_events + [
128
+ xctool_target_event("TestTarget1"),
129
+ xctool_test_event("ClassTest2", "test2"),
130
+ xctool_test_event("ClassTestNew", "test1")
131
+ ]
132
+ result = subject.test_time_for_partitions(historical_events, current_events)
133
+ median = 5000
134
+ expect(result).to eq([{
135
+ "TestTarget1" =>
136
+ {
137
+ "ClassTest1" => 1000,
138
+ "ClassTest2" => 5000,
139
+ "ClassTest3" => 10000000,
140
+ "ClassTestNew" => median
141
+ },
142
+ }])
143
+ expect(subject.stats.to_h).to eq({historical_total_tests: 3, current_total_tests: 5, class_extrapolations: 1, target_extrapolations: 0})
144
+ end
145
+
146
+ it "ignores test classes that don't belong to relevant targets" do
147
+ historical_events = [
148
+ xctool_target_event("TestTarget1"),
149
+ xctool_test_event("ClassTest1", "test1", 1.0),
150
+ xctool_test_event("ClassTest2", "test2", 5.0),
151
+ xctool_test_event("ClassTest3", "test3", 10000.0)
152
+ ]
153
+ current_events = historical_events + [
154
+ xctool_target_event("TestTarget1"),
155
+ xctool_test_event("ClassTest2", "test2"),
156
+ xctool_test_event("ClassTestNew", "test1")
157
+ ]
158
+ result = subject.test_time_for_partitions(historical_events, current_events)
159
+ median = 5000
160
+ expect(result).to eq([{
161
+ "TestTarget1" =>
162
+ {
163
+ "ClassTest1" => 1000,
164
+ "ClassTest2" => 5000,
165
+ "ClassTest3" => 10000000,
166
+ "ClassTestNew" => median
167
+ },
168
+ }])
169
+ expect(subject.stats.to_h).to eq({historical_total_tests: 3, current_total_tests: 5, class_extrapolations: 1, target_extrapolations: 0})
170
+ end
171
+ end
172
+
173
+ context 'provided an empty set of applicable historical events' do
174
+ subject { XCKnife::StreamParser.new(2, [["TestTarget1", "TestTarget2", "TestTarget3", "NewTestTarget1"]]) }
175
+
176
+ let(:empty_historical_events) { [] }
177
+ let(:default_extrapolated_duration) { XCKnife::StreamParser::DEFAULT_EXTRAPOLATED_DURATION }
178
+
179
+ it 'extrapolates the test target duration and classes get extrapolated' do
180
+ current_events = [
181
+ xctool_target_event("TestTarget1"),
182
+ xctool_test_event("ClassTest1", "test1")
183
+ ]
184
+ result = subject.test_time_for_partitions(empty_historical_events, current_events)
185
+ expect(result).to eq([{ "TestTarget1" => { "ClassTest1" => default_extrapolated_duration } }])
186
+ end
187
+
188
+ it 'extrapolates the test target to different classes' do
189
+ effectively_empty_historical_events = [
190
+ xctool_target_event("TestTarget2"),
191
+ xctool_test_event("IgnoredClass", "ignoredTest"),
192
+ ]
193
+ current_events = [
194
+ xctool_target_event("TestTarget1"),
195
+ xctool_test_event("ClassTest1", "test1"),
196
+ xctool_test_event("ClassTest2", "test2")
197
+ ]
198
+ result = subject.test_time_for_partitions(effectively_empty_historical_events, current_events)
199
+ duration = default_extrapolated_duration
200
+ expect(result).to eq([{ "TestTarget1" => { "ClassTest1" => duration, "ClassTest2" => duration } }])
201
+ end
202
+
203
+ it "can handle multiple test targets and test classes" do
204
+ current_events = [
205
+ xctool_target_event("TestTarget1"),
206
+ xctool_test_event("ClassTest11", "test1"),
207
+ xctool_test_event("ClassTest12", "test1"),
208
+ xctool_test_event("ClassTest13", "test1"),
209
+ xctool_target_event("TestTarget2"),
210
+ xctool_test_event("ClassTest21", "test1"),
211
+ xctool_test_event("ClassTest22", "test1"),
212
+ xctool_test_event("ClassTest23", "test1"),
213
+ xctool_target_event("TestTarget3"),
214
+ xctool_test_event("ClassTest31", "test1"),
215
+ xctool_test_event("ClassTest32", "test1"),
216
+ xctool_test_event("ClassTest33", "test1"),
217
+ ]
218
+ result = subject.test_time_for_partitions(empty_historical_events, current_events)
219
+ duration = default_extrapolated_duration
220
+ expect(result).to eq(
221
+ [
222
+ {
223
+ "TestTarget1" => { "ClassTest11" => duration, "ClassTest12" => duration, "ClassTest13" => duration },
224
+ "TestTarget2" => { "ClassTest21" => duration, "ClassTest22" => duration, "ClassTest23" => duration },
225
+ "TestTarget3" => { "ClassTest31" => duration, "ClassTest32" => duration, "ClassTest33" => duration } }]
226
+ )
227
+ end
228
+ end
229
+
230
+ it "can split_machines_proportionally" do
231
+ stream_parser = XCKnife::StreamParser.new(5, [["TargetOnPartition1"], ["TargetOnPartition2"]])
232
+ result = stream_parser.split_machines_proportionally([
233
+ { "TargetOnPartition1" => { "TestClass1" => 500, "TestClass2" => 500 } },
234
+ { "TargetOnPartition2" => { "TestClass3" => 1000, "TestClass4" => 1000, "TestClass5" => 1000, "TestClass6" => 1000 } }])
235
+ expect(result.map(&:number_of_shards)).to eq([1, 4])
236
+ end
237
+
238
+ it "can split_machines_proportionally even when in the presence of large imbalances" do
239
+ stream_parser = XCKnife::StreamParser.new(5, [["TargetOnPartition1"], ["TargetOnPartition2"], ["TargetOnPartition3"]])
240
+ result = stream_parser.split_machines_proportionally([{ "TargetOnPartition1" => { "TestClass1" => 1 } },
241
+ { "TargetOnPartition2" => { "TestClass2" => 1} },
242
+ { "TargetOnPartition3" => { "TestClass3" => 1000, "TestClass4" => 1000, "TestClass5" => 1000} }])
243
+ expect(result.map(&:number_of_shards)).to eq([1, 1, 3])
244
+ end
245
+
246
+
247
+ it "should never let partition_sets have less than 1 machine alocated to them" do
248
+ stream_parser = XCKnife::StreamParser.new(3, [["TestTarget1"], ["TestTarget2"]])
249
+ result = stream_parser.split_machines_proportionally([{ "TargetOnPartition1" => { "TestClass1" => 1 } },
250
+ { "TargetOnPartition2" => { "TestClass2" => 2000, "TestClass3" => 2000 } }])
251
+ expect(result.map(&:number_of_shards)).to eq([1, 2])
252
+ end
253
+
254
+
255
+ context 'test_time_for_partitions' do
256
+ it "partitions the test classes accross the number of machines" do
257
+ stream_parser = XCKnife::StreamParser.new(2, [["Test Target"]])
258
+ partition = { "Test Target" => { "Class1" => 1000, "Class2" => 1000, "Class3" => 2000 } }
259
+ shards = stream_parser.compute_single_shards(2, partition).map(&:test_time_map)
260
+ expect(shards.size).to eq 2
261
+ first_shard, second_shard = shards.sort_by { |map| map.values.flatten.size }
262
+ expect(first_shard.keys).to eq(["Test Target"])
263
+ expect(first_shard.values).to eq([["Class3"]])
264
+
265
+ expect(second_shard.keys).to eq(["Test Target"])
266
+ expect(second_shard.values.map(&:to_set)).to eq([["Class1", "Class2"].to_set])
267
+ end
268
+
269
+ it "partitions the test, across targets" do
270
+ stream_parser = XCKnife::StreamParser.new(2, [["Test Target1", "Test Target2", "Test Target3"]])
271
+ partition = { "Test Target1" => { "Class1" => 1000 },
272
+ "Test Target2" => { "Class2" => 1000 },
273
+ "Test Target3" => { "Class3" => 2000 } }
274
+ shards = stream_parser.compute_single_shards(2, partition).map(&:test_time_map)
275
+ expect(shards.size).to eq 2
276
+ first_shard, second_shard = shards.sort_by { |map| map.values.flatten.size }
277
+ expect(first_shard.keys).to eq(["Test Target3"])
278
+ expect(first_shard.values).to eq([["Class3"]])
279
+
280
+ expect(second_shard.keys.to_set).to eq(["Test Target1", "Test Target2"].to_set)
281
+ expect(second_shard.values.to_set).to eq([["Class1"], ["Class2"]].to_set)
282
+ end
283
+
284
+ it "raises an error if there are too many shards" do
285
+ too_many_machines = 2
286
+ stream_parser = XCKnife::StreamParser.new(too_many_machines, [["Test Target1"]])
287
+ partition = { "Test Target1" => { "Class1" => 1000 } }
288
+ expect { stream_parser.compute_single_shards(too_many_machines, partition) }.
289
+ to raise_error(XCKnife::XCKnifeError, "Too many shards")
290
+ end
291
+ end
292
+
293
+ it "can compute test for all partitions" do
294
+ stream_parser = XCKnife::StreamParser.new(3, [["TargetOnPartition1"], ["TargetOnPartition2"]])
295
+ result = stream_parser.compute_shards_for_partitions([{ "TargetOnPartition1" => { "TestClass1" => 1000 } },
296
+ { "TargetOnPartition2" => { "TestClass2" => 4000, "TestClass3" => 4000 } }])
297
+ expect(result.test_maps).to eq([[{ "TargetOnPartition1" => ["TestClass1"] }],
298
+ [{ "TargetOnPartition2" => ["TestClass2"] },
299
+ { "TargetOnPartition2" => ["TestClass3"] }]])
300
+ expect(result.test_times).to eq [[1000], [4000, 4000]]
301
+ expect(result.total_test_time).to eq 9000
302
+ expect(result.test_time_imbalances.to_h).to eq({
303
+ partition_set: [0.4, 1.6],
304
+ partitions: [[1.0], [1.0, 1.0]]
305
+ })
306
+ end
307
+
308
+ it "can compute for only one partition set" do
309
+ stream_parser = XCKnife::StreamParser.new(1, [["TargetOnPartition1"]])
310
+ historical_events = [xctool_target_event("TargetOnPartition1"),
311
+ xctool_test_event("ClassTest1", "test1"),
312
+ ]
313
+ result = stream_parser.compute_shards_for_events(historical_events)
314
+ expect(result.test_maps).to eq([[{ "TargetOnPartition1" => ["ClassTest1"] }]])
315
+ expect(result.test_times).to eq [[1000]]
316
+ expect(result.total_test_time).to eq 1000
317
+ expect(result.stats.to_h).to eq({historical_total_tests: 1, current_total_tests: 0, class_extrapolations: 0, target_extrapolations: 0})
318
+ expect(result.test_time_imbalances.to_h).to eq({
319
+ partition_set: [1.0],
320
+ partitions: [[1.0]]
321
+ })
322
+ end
323
+
324
+ def xctool_test_event(class_name, method_name, duration = 1.0)
325
+ OpenStruct.new({ result: "success",
326
+ exceptions: [],
327
+ test: "-[#{class_name} #{method_name}]",
328
+ className: class_name,
329
+ event: "end-test",
330
+ methodName: method_name,
331
+ succeeded: true,
332
+ output: "",
333
+ totalDuration: duration,
334
+ timestamp: 0
335
+ })
336
+ end
337
+
338
+ def xctool_target_event(target_name)
339
+ OpenStruct.new({ result: "success",
340
+ event: "begin-ocunit",
341
+ targetName: target_name
342
+ })
343
+ end
344
+ end