minitest-heat 0.0.5 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/.rubocop.yml +23 -0
- data/Gemfile +5 -3
- data/Gemfile.lock +30 -1
- data/README.md +12 -3
- data/Rakefile +8 -6
- data/lib/minitest/heat/backtrace.rb +19 -74
- data/lib/minitest/heat/hit.rb +79 -0
- data/lib/minitest/heat/issue.rb +49 -34
- data/lib/minitest/heat/line.rb +74 -0
- data/lib/minitest/heat/location.rb +20 -14
- data/lib/minitest/heat/map.rb +7 -34
- data/lib/minitest/heat/output/backtrace.rb +32 -32
- data/lib/minitest/heat/output/issue.rb +144 -0
- data/lib/minitest/heat/output/map.rb +59 -3
- data/lib/minitest/heat/output/marker.rb +50 -0
- data/lib/minitest/heat/output/results.rb +44 -22
- data/lib/minitest/heat/output/source_code.rb +2 -2
- data/lib/minitest/heat/output/token.rb +15 -13
- data/lib/minitest/heat/output.rb +23 -120
- data/lib/minitest/heat/results.rb +19 -75
- data/lib/minitest/heat/timer.rb +81 -0
- data/lib/minitest/heat/version.rb +3 -1
- data/lib/minitest/heat.rb +3 -0
- data/lib/minitest/heat_plugin.rb +5 -5
- data/lib/minitest/heat_reporter.rb +50 -26
- data/minitest-heat.gemspec +4 -2
- metadata +64 -4
- data/lib/minitest/heat/output/location.rb +0 -20
@@ -6,19 +6,25 @@ module Minitest
|
|
6
6
|
class Results
|
7
7
|
extend Forwardable
|
8
8
|
|
9
|
-
attr_accessor :results
|
9
|
+
attr_accessor :results, :timer
|
10
10
|
|
11
|
-
def_delegators :@results, :errors, :brokens, :failures, :
|
11
|
+
def_delegators :@results, :issues, :errors, :brokens, :failures, :skips, :painfuls, :slows, :problems?
|
12
12
|
|
13
|
-
def initialize(results)
|
13
|
+
def initialize(results, timer)
|
14
14
|
@results = results
|
15
|
+
@timer = timer
|
15
16
|
@tokens = []
|
16
17
|
end
|
17
18
|
|
18
19
|
def tokens
|
20
|
+
# Only show the issue type counts if there are issues
|
19
21
|
@tokens << [*issue_counts_tokens] if issue_counts_tokens&.any?
|
20
|
-
|
21
|
-
@tokens << [
|
22
|
+
|
23
|
+
@tokens << [
|
24
|
+
timing_token, spacer_token,
|
25
|
+
test_count_token, tests_performance_token, join_token,
|
26
|
+
assertions_count_token, assertions_performance_token
|
27
|
+
]
|
22
28
|
|
23
29
|
@tokens
|
24
30
|
end
|
@@ -33,16 +39,23 @@ module Minitest
|
|
33
39
|
end
|
34
40
|
|
35
41
|
def issue_counts_tokens
|
36
|
-
return unless
|
42
|
+
return unless issues.any?
|
37
43
|
|
38
|
-
counts = [
|
44
|
+
counts = [
|
45
|
+
error_count_token,
|
46
|
+
broken_count_token,
|
47
|
+
failure_count_token,
|
48
|
+
skip_count_token,
|
49
|
+
painful_count_token,
|
50
|
+
slow_count_token
|
51
|
+
].compact
|
39
52
|
|
40
53
|
# # Create an array of separator tokens one less than the total number of issue count tokens
|
41
|
-
|
54
|
+
spacer_tokens = Array.new(counts.size, spacer_token)
|
42
55
|
|
43
56
|
counts_with_separators = counts
|
44
|
-
|
45
|
-
|
57
|
+
.zip(spacer_tokens) # Add separators between the counts
|
58
|
+
.flatten(1) # Flatten the zipped separators, but no more
|
46
59
|
|
47
60
|
counts_with_separators.pop # Remove the final trailing zipped separator that's not needed
|
48
61
|
|
@@ -66,29 +79,34 @@ module Minitest
|
|
66
79
|
issue_count_token(style, skips, name: 'Skip')
|
67
80
|
end
|
68
81
|
|
82
|
+
def painful_count_token
|
83
|
+
style = problems? || skips.any? ? :muted : :painful
|
84
|
+
issue_count_token(style, painfuls, name: 'Painfully Slow')
|
85
|
+
end
|
86
|
+
|
69
87
|
def slow_count_token
|
70
|
-
style = problems? ? :muted : :slow
|
88
|
+
style = problems? || skips.any? ? :muted : :slow
|
71
89
|
issue_count_token(style, slows, name: 'Slow')
|
72
90
|
end
|
73
91
|
|
74
|
-
def
|
75
|
-
[:
|
92
|
+
def test_count_token
|
93
|
+
[:default, "#{pluralize(timer.test_count, 'test')}"]
|
76
94
|
end
|
77
95
|
|
78
96
|
def tests_performance_token
|
79
|
-
[:default, "
|
97
|
+
[:default, " (#{timer.tests_per_second}/s)"]
|
80
98
|
end
|
81
99
|
|
82
|
-
def
|
83
|
-
[:default, "
|
100
|
+
def assertions_count_token
|
101
|
+
[:default, "#{pluralize(timer.assertion_count, 'assertion')}"]
|
84
102
|
end
|
85
103
|
|
86
|
-
def
|
87
|
-
[:
|
104
|
+
def assertions_performance_token
|
105
|
+
[:default, " (#{timer.assertions_per_second}/s)"]
|
88
106
|
end
|
89
107
|
|
90
|
-
def
|
91
|
-
[:
|
108
|
+
def timing_token
|
109
|
+
[:bold, "#{timer.total_time.round(2)}s"]
|
92
110
|
end
|
93
111
|
|
94
112
|
def issue_count_token(type, collection, name: type.capitalize)
|
@@ -97,8 +115,12 @@ module Minitest
|
|
97
115
|
[type, pluralize(collection.size, name)]
|
98
116
|
end
|
99
117
|
|
100
|
-
def
|
101
|
-
[:
|
118
|
+
def spacer_token
|
119
|
+
Output::TOKENS[:spacer]
|
120
|
+
end
|
121
|
+
|
122
|
+
def join_token
|
123
|
+
[:default, ' with ']
|
102
124
|
end
|
103
125
|
end
|
104
126
|
end
|
@@ -101,9 +101,9 @@ module Minitest
|
|
101
101
|
# @return [Array<Symbol>] the Token styles for the line number and line of code
|
102
102
|
def styles_for(line_of_code)
|
103
103
|
if line_of_code == source.line && highlight_key_line?
|
104
|
-
[
|
104
|
+
%i[default default]
|
105
105
|
else
|
106
|
-
[
|
106
|
+
%i[muted muted]
|
107
107
|
end
|
108
108
|
end
|
109
109
|
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
module Minitest
|
2
4
|
module Heat
|
3
5
|
# Friendly API for printing nicely-formatted output to the console
|
@@ -6,18 +8,18 @@ module Minitest
|
|
6
8
|
class InvalidStyle < ArgumentError; end
|
7
9
|
|
8
10
|
STYLES = {
|
9
|
-
success:
|
10
|
-
slow:
|
11
|
-
painful:
|
12
|
-
error:
|
13
|
-
broken:
|
14
|
-
failure:
|
15
|
-
skipped:
|
11
|
+
success: %i[default green],
|
12
|
+
slow: %i[default green],
|
13
|
+
painful: %i[bold green],
|
14
|
+
error: %i[bold red],
|
15
|
+
broken: %i[bold red],
|
16
|
+
failure: %i[default red],
|
17
|
+
skipped: %i[default yellow],
|
16
18
|
warning_light: %i[light yellow],
|
17
|
-
italicized:
|
18
|
-
bold:
|
19
|
-
default:
|
20
|
-
muted:
|
19
|
+
italicized: %i[italic gray],
|
20
|
+
bold: %i[bold default],
|
21
|
+
default: %i[default default],
|
22
|
+
muted: %i[light gray]
|
21
23
|
}.freeze
|
22
24
|
|
23
25
|
attr_accessor :style_key, :content
|
@@ -38,14 +40,14 @@ module Minitest
|
|
38
40
|
end
|
39
41
|
|
40
42
|
def eql?(other)
|
41
|
-
style_key == other.style_key &&
|
43
|
+
style_key == other.style_key && content == other.content
|
42
44
|
end
|
43
45
|
alias :== eql?
|
44
46
|
|
45
47
|
private
|
46
48
|
|
47
49
|
ESC_SEQUENCE = "\e["
|
48
|
-
END_SEQUENCE =
|
50
|
+
END_SEQUENCE = 'm'
|
49
51
|
|
50
52
|
WEIGHTS = {
|
51
53
|
default: 0,
|
data/lib/minitest/heat/output.rb
CHANGED
@@ -2,8 +2,8 @@
|
|
2
2
|
|
3
3
|
require_relative 'output/backtrace'
|
4
4
|
require_relative 'output/issue'
|
5
|
-
require_relative 'output/location'
|
6
5
|
require_relative 'output/map'
|
6
|
+
require_relative 'output/marker'
|
7
7
|
require_relative 'output/results'
|
8
8
|
require_relative 'output/source_code'
|
9
9
|
require_relative 'output/token'
|
@@ -12,33 +12,16 @@ module Minitest
|
|
12
12
|
module Heat
|
13
13
|
# Friendly API for printing nicely-formatted output to the console
|
14
14
|
class Output
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
],
|
26
|
-
failure: [
|
27
|
-
[ %i[failure label], %i[muted spacer], %i[default test_name], %i[muted spacer], %i[muted test_class] ],
|
28
|
-
[ %i[italicized summary] ],
|
29
|
-
[ %i[muted short_location], ],
|
30
|
-
[ %i[default source_summary], ],
|
31
|
-
],
|
32
|
-
skipped: [
|
33
|
-
[ %i[skipped label], %i[muted spacer], %i[default test_name], %i[muted spacer], %i[muted test_class] ],
|
34
|
-
[ %i[italicized summary] ],
|
35
|
-
[], # New Line
|
36
|
-
],
|
37
|
-
slow: [
|
38
|
-
[ %i[slow label], %i[muted spacer], %i[default test_name], %i[muted spacer], %i[default test_class] ],
|
39
|
-
[ %i[bold slowness], %i[muted spacer], %i[default location], ],
|
40
|
-
[], # New Line
|
41
|
-
]
|
15
|
+
SYMBOLS = {
|
16
|
+
middot: '·',
|
17
|
+
arrow: '➜',
|
18
|
+
lead: '|',
|
19
|
+
}.freeze
|
20
|
+
|
21
|
+
TOKENS = {
|
22
|
+
spacer: [:muted, " #{SYMBOLS[:middot]} "],
|
23
|
+
muted_arrow: [:muted, " #{SYMBOLS[:arrow]} "],
|
24
|
+
muted_lead: [:muted, "#{SYMBOLS[:lead]} "],
|
42
25
|
}
|
43
26
|
|
44
27
|
attr_reader :stream
|
@@ -59,106 +42,22 @@ module Minitest
|
|
59
42
|
end
|
60
43
|
alias newline puts
|
61
44
|
|
62
|
-
# TOOD: Convert to output class
|
63
|
-
# - This should likely live in the output/issue class
|
64
|
-
# - Add a 'fail_fast' option that shows the issue as soon as the failure occurs
|
65
|
-
def marker(value)
|
66
|
-
case value
|
67
|
-
when 'E' then text(:error, value)
|
68
|
-
when 'B' then text(:failure, value)
|
69
|
-
when 'F' then text(:failure, value)
|
70
|
-
when 'S' then text(:skipped, value)
|
71
|
-
else text(:success, value)
|
72
|
-
end
|
73
|
-
end
|
74
|
-
|
75
|
-
# TOOD: Convert to output class
|
76
|
-
# - This should likely live in the output/issue class
|
77
|
-
# - There may be justification for creating different "strategies" for the various types
|
78
45
|
def issue_details(issue)
|
79
|
-
|
80
|
-
|
81
|
-
formatter.each do |lines|
|
82
|
-
lines.each do |tokens|
|
83
|
-
style, content_method = *tokens
|
84
|
-
|
85
|
-
if issue.respond_to?(content_method)
|
86
|
-
# If it's an available method on issue, use that to get the content
|
87
|
-
content = issue.send(content_method)
|
88
|
-
text(style, content)
|
89
|
-
else
|
90
|
-
# Otherwise, fall back to output and pass issue to *it*
|
91
|
-
send(content_method, issue)
|
92
|
-
end
|
93
|
-
end
|
94
|
-
newline
|
95
|
-
end
|
46
|
+
print_tokens Minitest::Heat::Output::Issue.new(issue).tokens
|
96
47
|
end
|
97
48
|
|
98
|
-
|
99
|
-
|
100
|
-
map.files.each do |file|
|
101
|
-
pathname = Pathname(file[0])
|
102
|
-
|
103
|
-
path = pathname.dirname.to_s
|
104
|
-
filename = pathname.basename.to_s
|
105
|
-
|
106
|
-
values = map.hits[pathname.to_s]
|
107
|
-
|
108
|
-
|
109
|
-
text(:error, 'E' * values[:error].size) if values[:error]&.any?
|
110
|
-
text(:broken, 'B' * values[:broken].size) if values[:broken]&.any?
|
111
|
-
text(:failure, 'F' * values[:failure].size) if values[:failure]&.any?
|
112
|
-
|
113
|
-
unless values[:error]&.any? || values[:broken]&.any? || values[:failure]&.any?
|
114
|
-
text(:skipped, 'S' * values[:skipped].size) if values[:skipped]&.any?
|
115
|
-
text(:painful, '—' * values[:painful].size) if values[:painful]&.any?
|
116
|
-
text(:slow, '–' * values[:slow].size) if values[:slow]&.any?
|
117
|
-
end
|
118
|
-
|
119
|
-
text(:muted, ' ') if map.hits.any?
|
120
|
-
|
121
|
-
text(:muted, "#{path.delete_prefix(Dir.pwd)}/")
|
122
|
-
text(:default, filename)
|
123
|
-
|
124
|
-
text(:muted, ':')
|
125
|
-
|
126
|
-
all_line_numbers = values.fetch(:error, []) + values.fetch(:failure, [])
|
127
|
-
all_line_numbers += values.fetch(:skipped, [])
|
128
|
-
|
129
|
-
line_numbers = all_line_numbers.compact.uniq.sort
|
130
|
-
line_numbers.each { |line_number| text(:muted, "#{line_number} ") }
|
131
|
-
newline
|
132
|
-
end
|
133
|
-
newline
|
49
|
+
def marker(issue_type)
|
50
|
+
print_token Minitest::Heat::Output::Marker.new(issue_type).token
|
134
51
|
end
|
135
52
|
|
136
|
-
|
137
|
-
def test_name_summary(issue)
|
138
|
-
text(:default, "#{issue.test_class} > #{issue.test_name}")
|
139
|
-
end
|
140
|
-
|
141
|
-
def compact_summary(results)
|
142
|
-
results_tokens = ::Minitest::Heat::Output::Results.new(results).tokens
|
143
|
-
|
144
|
-
newline
|
145
|
-
print_tokens(results_tokens)
|
53
|
+
def compact_summary(results, timer)
|
146
54
|
newline
|
55
|
+
print_tokens ::Minitest::Heat::Output::Results.new(results, timer).tokens
|
147
56
|
end
|
148
57
|
|
149
|
-
def
|
150
|
-
|
151
|
-
|
152
|
-
backtrace_tokens = ::Minitest::Heat::Output::Backtrace.new(location).tokens
|
153
|
-
print_tokens(backtrace_tokens)
|
154
|
-
end
|
155
|
-
|
156
|
-
def source_summary(issue)
|
157
|
-
filename = issue.location.project_file
|
158
|
-
line_number = issue.location.project_failure_line
|
159
|
-
|
160
|
-
source_code_tokens = ::Minitest::Heat::Output::SourceCode.new(filename, line_number).tokens
|
161
|
-
print_tokens(source_code_tokens)
|
58
|
+
def heat_map(map)
|
59
|
+
newline
|
60
|
+
print_tokens ::Minitest::Heat::Output::Map.new(map).tokens
|
162
61
|
end
|
163
62
|
|
164
63
|
private
|
@@ -176,6 +75,10 @@ module Minitest
|
|
176
75
|
style_enabled? ? :styled : :unstyled
|
177
76
|
end
|
178
77
|
|
78
|
+
def print_token(token)
|
79
|
+
print Token.new(*token).to_s(token_format)
|
80
|
+
end
|
81
|
+
|
179
82
|
def print_tokens(lines_of_tokens)
|
180
83
|
lines_of_tokens.each do |tokens|
|
181
84
|
tokens.each do |token|
|
@@ -2,108 +2,52 @@
|
|
2
2
|
|
3
3
|
module Minitest
|
4
4
|
module Heat
|
5
|
+
# A collection of test failures
|
5
6
|
class Results
|
6
|
-
|
7
|
-
attr_reader :test_count,
|
8
|
-
:assertion_count,
|
9
|
-
:success_count,
|
10
|
-
:issues,
|
11
|
-
:start_time,
|
12
|
-
:stop_time
|
7
|
+
attr_reader :issues, :heat_map
|
13
8
|
|
14
9
|
def initialize
|
15
|
-
@
|
16
|
-
@
|
17
|
-
@success_count = 0
|
18
|
-
@issues = {
|
19
|
-
error: [],
|
20
|
-
broken: [],
|
21
|
-
failure: [],
|
22
|
-
skipped: [],
|
23
|
-
slow: []
|
24
|
-
}
|
25
|
-
@start_time = nil
|
26
|
-
@stop_time = nil
|
27
|
-
end
|
28
|
-
|
29
|
-
def start_timer!
|
30
|
-
@start_time = Minitest.clock_time
|
10
|
+
@issues = []
|
11
|
+
@heat_map = Heat::Map.new
|
31
12
|
end
|
32
13
|
|
33
|
-
def
|
34
|
-
@
|
35
|
-
|
36
|
-
|
37
|
-
def total_time
|
38
|
-
delta = @stop_time - @start_time
|
39
|
-
|
40
|
-
# Don't return 0
|
41
|
-
delta.zero? ? 0.1 : delta
|
42
|
-
end
|
43
|
-
|
44
|
-
def tests_per_second
|
45
|
-
(assertion_count / total_time).round(2)
|
46
|
-
end
|
47
|
-
|
48
|
-
def assertions_per_second
|
49
|
-
(assertion_count / total_time).round(2)
|
14
|
+
def record(issue)
|
15
|
+
@issues.push(issue)
|
16
|
+
@heat_map.add(*issue.to_hit) if issue.hit?
|
50
17
|
end
|
51
18
|
|
52
19
|
def problems?
|
53
|
-
errors? || brokens? || failures?
|
20
|
+
errors.any? || brokens.any? || failures.any?
|
54
21
|
end
|
55
22
|
|
56
23
|
def errors
|
57
|
-
|
24
|
+
@errors ||= select_issues(:error)
|
58
25
|
end
|
59
26
|
|
60
27
|
def brokens
|
61
|
-
|
28
|
+
@brokens ||= select_issues(:broken)
|
62
29
|
end
|
63
30
|
|
64
31
|
def failures
|
65
|
-
|
32
|
+
@failures ||= select_issues(:failure)
|
66
33
|
end
|
67
34
|
|
68
35
|
def skips
|
69
|
-
|
36
|
+
@skips ||= select_issues(:skipped)
|
70
37
|
end
|
71
38
|
|
72
|
-
def
|
73
|
-
|
74
|
-
.fetch(:slow) { [] }
|
75
|
-
.sort { |issue| issue.time }
|
76
|
-
.reverse
|
77
|
-
.take(3)
|
78
|
-
end
|
79
|
-
|
80
|
-
def errors?
|
81
|
-
errors.any?
|
39
|
+
def painfuls
|
40
|
+
@painfuls ||= select_issues(:painful).sort_by(&:time).reverse
|
82
41
|
end
|
83
42
|
|
84
|
-
def
|
85
|
-
|
86
|
-
end
|
87
|
-
|
88
|
-
def failures?
|
89
|
-
failures.any?
|
90
|
-
end
|
91
|
-
|
92
|
-
def skips?
|
93
|
-
skips.any?
|
94
|
-
end
|
95
|
-
|
96
|
-
def slows?
|
97
|
-
slows.any?
|
43
|
+
def slows
|
44
|
+
@slows ||= select_issues(:slow).sort_by(&:time).reverse
|
98
45
|
end
|
99
46
|
|
100
|
-
|
101
|
-
@test_count += 1
|
102
|
-
@assertion_count += issue.result.assertions
|
103
|
-
@success_count += 1 if issue.result.passed?
|
47
|
+
private
|
104
48
|
|
105
|
-
|
106
|
-
|
49
|
+
def select_issues(issue_type)
|
50
|
+
issues.select { |issue| issue.type == issue_type }
|
107
51
|
end
|
108
52
|
end
|
109
53
|
end
|
@@ -0,0 +1,81 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Minitest
|
4
|
+
module Heat
|
5
|
+
# Provides a timer to keep track of the full test suite duration and provide convenient methods
|
6
|
+
# for calculating tests/second and assertions/second
|
7
|
+
class Timer
|
8
|
+
attr_reader :test_count, :assertion_count, :start_time, :stop_time
|
9
|
+
|
10
|
+
# Creates an instance of a timer to be used for the duration of a test suite run
|
11
|
+
#
|
12
|
+
# @return [self]
|
13
|
+
def initialize
|
14
|
+
@test_count = 0
|
15
|
+
@assertion_count = 0
|
16
|
+
|
17
|
+
@start_time = nil
|
18
|
+
@stop_time = nil
|
19
|
+
end
|
20
|
+
|
21
|
+
# Records the start time for the full test suite using `Minitest.clock_time`
|
22
|
+
#
|
23
|
+
# @return [Float] the Minitest.clock_time
|
24
|
+
def start!
|
25
|
+
@start_time = Minitest.clock_time
|
26
|
+
end
|
27
|
+
|
28
|
+
# Records the stop time for the full test suite using `Minitest.clock_time`
|
29
|
+
#
|
30
|
+
# @return [Float] the Minitest.clock_time
|
31
|
+
def stop!
|
32
|
+
@stop_time = Minitest.clock_time
|
33
|
+
end
|
34
|
+
|
35
|
+
# Calculates the total time take for the full test suite to run while ensuring it never
|
36
|
+
# returns a zero that would be problematic as a denomitor in calculating average times
|
37
|
+
#
|
38
|
+
# @return [Float] the clocktime duration of the test suite run in seconds
|
39
|
+
def total_time
|
40
|
+
# Don't return 0. The time can end up being 0 for a new or realy fast test suite, and
|
41
|
+
# dividing by 0 doesn't go well when determining average time, so this ensures it uses a
|
42
|
+
# close-enough-but-not-zero value.
|
43
|
+
delta.zero? ? 0.01 : delta
|
44
|
+
end
|
45
|
+
|
46
|
+
# Records the test and assertion counts for a given test outcome
|
47
|
+
# @param count [Integer] the number of assertions from the test
|
48
|
+
#
|
49
|
+
# @return [void]
|
50
|
+
def increment_counts(count)
|
51
|
+
@test_count += 1
|
52
|
+
@assertion_count += count
|
53
|
+
end
|
54
|
+
|
55
|
+
# Provides a nice rounded answer for about how many tests were completed per second
|
56
|
+
#
|
57
|
+
# @return [Float] the average number of tests completed per second
|
58
|
+
def tests_per_second
|
59
|
+
(test_count / total_time).round(2)
|
60
|
+
end
|
61
|
+
|
62
|
+
# Provides a nice rounded answer for about how many assertions were completed per second
|
63
|
+
#
|
64
|
+
# @return [Float] the average number of assertions completed per second
|
65
|
+
def assertions_per_second
|
66
|
+
(assertion_count / total_time).round(2)
|
67
|
+
end
|
68
|
+
|
69
|
+
private
|
70
|
+
|
71
|
+
# The total time the test suite was running.
|
72
|
+
#
|
73
|
+
# @return [Float] the time in seconds elapsed between starting the timer and stopping it
|
74
|
+
def delta
|
75
|
+
return 0 unless start_time && stop_time
|
76
|
+
|
77
|
+
stop_time - start_time
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
data/lib/minitest/heat.rb
CHANGED
@@ -1,12 +1,15 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require_relative 'heat/backtrace'
|
4
|
+
require_relative 'heat/hit'
|
4
5
|
require_relative 'heat/issue'
|
6
|
+
require_relative 'heat/line'
|
5
7
|
require_relative 'heat/location'
|
6
8
|
require_relative 'heat/map'
|
7
9
|
require_relative 'heat/output'
|
8
10
|
require_relative 'heat/results'
|
9
11
|
require_relative 'heat/source'
|
12
|
+
require_relative 'heat/timer'
|
10
13
|
require_relative 'heat/version'
|
11
14
|
|
12
15
|
module Minitest
|
data/lib/minitest/heat_plugin.rb
CHANGED
@@ -3,12 +3,12 @@
|
|
3
3
|
require_relative 'heat_reporter'
|
4
4
|
|
5
5
|
module Minitest
|
6
|
-
def self.plugin_heat_options(opts,
|
7
|
-
opts.on '--show-fast',
|
6
|
+
def self.plugin_heat_options(opts, _options)
|
7
|
+
opts.on '--show-fast', 'Show failures as they happen instead of waiting for the entire suite.' do
|
8
8
|
# Heat.show_fast!
|
9
9
|
end
|
10
10
|
|
11
|
-
# TODO options.
|
11
|
+
# TODO: options.
|
12
12
|
# 1. Fail Fast
|
13
13
|
# 2. Don't worry about skips.
|
14
14
|
# 3. Skip coverage.
|
@@ -18,9 +18,9 @@ module Minitest
|
|
18
18
|
io = options[:io]
|
19
19
|
|
20
20
|
# Clean out the existing reporters.
|
21
|
-
|
21
|
+
reporter.reporters = []
|
22
22
|
|
23
23
|
# Use Reviewer as the sole reporter.
|
24
|
-
|
24
|
+
reporter << HeatReporter.new(io, options)
|
25
25
|
end
|
26
26
|
end
|