minitest-heat 0.0.6 → 0.0.10

Sign up to get free protection for your applications and to get access to all the features.
@@ -2,8 +2,8 @@
2
2
 
3
3
  module Minitest
4
4
  module Heat
5
- # Friendly API for printing nicely-formatted output to the console
6
5
  class Output
6
+ # Friendly API for printing consistent markers for the various issue types
7
7
  class Marker
8
8
  SYMBOLS = {
9
9
  success: '·',
@@ -12,7 +12,8 @@ module Minitest
12
12
  broken: 'B',
13
13
  error: 'E',
14
14
  skipped: 'S',
15
- failure: 'F'
15
+ failure: 'F',
16
+ reporter: '✖'
16
17
  }.freeze
17
18
 
18
19
  STYLES = {
@@ -22,7 +23,8 @@ module Minitest
22
23
  broken: :error,
23
24
  error: :error,
24
25
  skipped: :skipped,
25
- failure: :failure
26
+ failure: :failure,
27
+ reporter: :error
26
28
  }.freeze
27
29
 
28
30
  attr_accessor :issue_type
@@ -3,22 +3,29 @@
3
3
  module Minitest
4
4
  module Heat
5
5
  class Output
6
+ # Generates the output tokens to display the results summary
6
7
  class Results
7
8
  extend Forwardable
8
9
 
9
- attr_accessor :results
10
+ attr_accessor :results, :timer
10
11
 
11
- def_delegators :@results, :errors, :brokens, :failures, :skips, :painfuls, :slows, :problems?, :slows?
12
+ def_delegators :@results, :issues, :errors, :brokens, :failures, :skips, :painfuls, :slows, :problems?
12
13
 
13
- def initialize(results)
14
+ def initialize(results, timer)
14
15
  @results = results
16
+ @timer = timer
15
17
  @tokens = []
16
18
  end
17
19
 
18
20
  def tokens
21
+ # Only show the issue type counts if there are issues
19
22
  @tokens << [*issue_counts_tokens] if issue_counts_tokens&.any?
20
- @tokens << [assertions_performance_token, tests_performance_token, timing_token]
21
- @tokens << [assertions_count_token, test_count_token]
23
+
24
+ @tokens << [
25
+ timing_token, spacer_token,
26
+ test_count_token, tests_performance_token, join_token,
27
+ assertions_count_token, assertions_performance_token
28
+ ]
22
29
 
23
30
  @tokens
24
31
  end
@@ -33,7 +40,7 @@ module Minitest
33
40
  end
34
41
 
35
42
  def issue_counts_tokens
36
- return unless problems? || slows?
43
+ return unless issues.any?
37
44
 
38
45
  counts = [
39
46
  error_count_token,
@@ -45,10 +52,10 @@ module Minitest
45
52
  ].compact
46
53
 
47
54
  # # Create an array of separator tokens one less than the total number of issue count tokens
48
- separator_tokens = Array.new(counts.size, separator_token)
55
+ spacer_tokens = Array.new(counts.size, spacer_token)
49
56
 
50
57
  counts_with_separators = counts
51
- .zip(separator_tokens) # Add separators between the counts
58
+ .zip(spacer_tokens) # Add separators between the counts
52
59
  .flatten(1) # Flatten the zipped separators, but no more
53
60
 
54
61
  counts_with_separators.pop # Remove the final trailing zipped separator that's not needed
@@ -74,33 +81,33 @@ module Minitest
74
81
  end
75
82
 
76
83
  def painful_count_token
77
- style = problems? ? :muted : :painful
84
+ style = problems? || skips.any? ? :muted : :painful
78
85
  issue_count_token(style, painfuls, name: 'Painfully Slow')
79
86
  end
80
87
 
81
88
  def slow_count_token
82
- style = problems? ? :muted : :slow
89
+ style = problems? || skips.any? ? :muted : :slow
83
90
  issue_count_token(style, slows, name: 'Slow')
84
91
  end
85
92
 
86
- def assertions_performance_token
87
- [:bold, "#{results.assertions_per_second} assertions/s"]
93
+ def test_count_token
94
+ [:default, pluralize(timer.test_count, 'test').to_s]
88
95
  end
89
96
 
90
97
  def tests_performance_token
91
- [:default, " and #{results.tests_per_second} tests/s"]
98
+ [:default, " (#{timer.tests_per_second}/s)"]
92
99
  end
93
100
 
94
- def timing_token
95
- [:default, " in #{results.total_time.round(2)}s"]
101
+ def assertions_count_token
102
+ [:default, pluralize(timer.assertion_count, 'assertion').to_s]
96
103
  end
97
104
 
98
- def assertions_count_token
99
- [:muted, pluralize(results.assertion_count, 'Assertion')]
105
+ def assertions_performance_token
106
+ [:default, " (#{timer.assertions_per_second}/s)"]
100
107
  end
101
108
 
102
- def test_count_token
103
- [:muted, " across #{pluralize(results.test_count, 'Test')}"]
109
+ def timing_token
110
+ [:bold, "#{timer.total_time.round(2)}s"]
104
111
  end
105
112
 
106
113
  def issue_count_token(type, collection, name: type.capitalize)
@@ -109,8 +116,12 @@ module Minitest
109
116
  [type, pluralize(collection.size, name)]
110
117
  end
111
118
 
112
- def separator_token
113
- [:muted, ' · ']
119
+ def spacer_token
120
+ Output::TOKENS[:spacer]
121
+ end
122
+
123
+ def join_token
124
+ [:default, ' with ']
114
125
  end
115
126
  end
116
127
  end
@@ -3,7 +3,7 @@
3
3
  module Minitest
4
4
  module Heat
5
5
  class Output
6
- # Builds the collection of tokens representing a specific set of source code lines
6
+ # Generates the tokens representing a specific set of source code lines
7
7
  class SourceCode
8
8
  DEFAULT_LINE_COUNT = 3
9
9
  DEFAULT_INDENTATION_SPACES = 2
@@ -2,8 +2,9 @@
2
2
 
3
3
  module Minitest
4
4
  module Heat
5
- # Friendly API for printing nicely-formatted output to the console
6
5
  class Output
6
+ # Provides a convenient interface for creating console-friendly output while ensuring
7
+ # consistency in the applied styles.
7
8
  class Token
8
9
  class InvalidStyle < ArgumentError; end
9
10
 
@@ -12,6 +12,18 @@ module Minitest
12
12
  module Heat
13
13
  # Friendly API for printing nicely-formatted output to the console
14
14
  class Output
15
+ SYMBOLS = {
16
+ middot: '·',
17
+ arrow: '➜',
18
+ lead: '|'
19
+ }.freeze
20
+
21
+ TOKENS = {
22
+ spacer: [:muted, " #{SYMBOLS[:middot]} "],
23
+ muted_arrow: [:muted, " #{SYMBOLS[:arrow]} "],
24
+ muted_lead: [:muted, "#{SYMBOLS[:lead]} "]
25
+ }.freeze
26
+
15
27
  attr_reader :stream
16
28
 
17
29
  def initialize(stream = $stdout)
@@ -30,26 +42,82 @@ module Minitest
30
42
  end
31
43
  alias newline puts
32
44
 
45
+ def issues_list(results)
46
+ # A couple of blank lines to create some breathing room
47
+ newline
48
+ newline
49
+
50
+ # Issues start with the least critical and go up to the most critical so that the most
51
+ # pressing issues are displayed at the bottom of the report in order to reduce scrolling.
52
+ # This way, as you fix issues, the list gets shorter, and eventually the least critical
53
+ # issues will be displayed without scrolling once more problematic issues are resolved.
54
+ %i[slows painfuls skips failures brokens errors].each do |issue_category|
55
+ next unless show?(issue_category, results)
56
+
57
+ results.send(issue_category).each { |issue| issue_details(issue) }
58
+ end
59
+ rescue => e
60
+ message = "Sorry, but Minitest Heat couldn't display the details of any failures."
61
+ exception_guidance(message, e)
62
+ end
63
+
33
64
  def issue_details(issue)
34
65
  print_tokens Minitest::Heat::Output::Issue.new(issue).tokens
66
+ rescue => e
67
+ message = "Sorry, but Minitest Heat couldn't display output for a failure."
68
+ exception_guidance(message, e)
35
69
  end
36
70
 
37
71
  def marker(issue_type)
38
72
  print_token Minitest::Heat::Output::Marker.new(issue_type).token
39
73
  end
40
74
 
41
- def compact_summary(results)
75
+ def compact_summary(results, timer)
42
76
  newline
43
- print_tokens ::Minitest::Heat::Output::Results.new(results).tokens
77
+ print_tokens ::Minitest::Heat::Output::Results.new(results, timer).tokens
78
+ rescue => e
79
+ message = "Sorry, but Minitest Heat couldn't display the summary."
80
+ exception_guidance(message, e)
44
81
  end
45
82
 
46
83
  def heat_map(map)
47
84
  newline
48
85
  print_tokens ::Minitest::Heat::Output::Map.new(map).tokens
86
+ newline
87
+ rescue => e
88
+ message = "Sorry, but Minitest Heat couldn't display the heat map."
89
+ exception_guidance(message, e)
90
+ end
91
+
92
+ def exception_guidance(message, exception)
93
+ newline
94
+ puts "#{message} Disabling Minitest Heat can get you back on track until the problem can be fixed."
95
+ puts "Please use the following exception details to submit an issue at https://github.com/garrettdimon/minitest-heat/issues"
96
+ puts "#{exception.message}:"
97
+ exception.backtrace.each do |line|
98
+ puts " #{line}"
99
+ end
100
+ newline
49
101
  end
50
102
 
51
103
  private
52
104
 
105
+ def no_problems?(results)
106
+ !results.problems?
107
+ end
108
+
109
+ def no_problems_or_skips?(results)
110
+ !results.problems? && results.skips.none?
111
+ end
112
+
113
+ def show?(issue_category, results)
114
+ case issue_category
115
+ when :skips then no_problems?(results)
116
+ when :painfuls, :slows then no_problems_or_skips?(results)
117
+ else true
118
+ end
119
+ end
120
+
53
121
  def style_enabled?
54
122
  stream.tty?
55
123
  end
@@ -4,119 +4,65 @@ module Minitest
4
4
  module Heat
5
5
  # A collection of test failures
6
6
  class Results
7
- attr_reader :test_count,
8
- :assertion_count,
9
- :success_count,
10
- :issues,
11
- :start_time,
12
- :stop_time
7
+ attr_reader :issues, :heat_map
13
8
 
14
9
  def initialize
15
- @test_count = 0
16
- @assertion_count = 0
17
- @success_count = 0
18
- @issues = {
19
- error: [],
20
- broken: [],
21
- failure: [],
22
- skipped: [],
23
- painful: [],
24
- slow: []
25
- }
26
- @start_time = nil
27
- @stop_time = nil
10
+ @issues = []
11
+ @heat_map = Heat::Map.new
28
12
  end
29
13
 
30
- def start_timer!
31
- @start_time = Minitest.clock_time
32
- end
33
-
34
- def stop_timer!
35
- @stop_time = Minitest.clock_time
36
- end
37
-
38
- def total_time
39
- delta = @stop_time - @start_time
14
+ # Logs an issue to the results for later reporting
15
+ # @param issue [Issue] the issue generated from a given test result
16
+ #
17
+ # @return [type] [description]
18
+ def record(issue)
19
+ # Record everything—even if it's a success
20
+ @issues.push(issue)
40
21
 
41
- # Don't return 0
42
- delta.zero? ? 0.1 : delta
22
+ # If it's not a genuine problem, we're done here, otherwise update the heat map
23
+ update_heat_map(issue) if issue.hit?
43
24
  end
44
25
 
45
- def tests_per_second
46
- (assertion_count / total_time).round(2)
47
- end
26
+ def update_heat_map(issue)
27
+ # Get the elements we need to generate a heat map entry
28
+ pathname = issue.location.project_file.to_s
29
+ line_number = issue.location.project_failure_line.to_i
48
30
 
49
- def assertions_per_second
50
- (assertion_count / total_time).round(2)
31
+ @heat_map.add(pathname, line_number, issue.type)
51
32
  end
52
33
 
53
34
  def problems?
54
- errors? || brokens? || failures? || skips?
35
+ errors.any? || brokens.any? || failures.any?
55
36
  end
56
37
 
57
38
  def errors
58
- issues.fetch(:error) { [] }
39
+ @errors ||= select_issues(:error)
59
40
  end
60
41
 
61
42
  def brokens
62
- issues.fetch(:broken) { [] }
43
+ @brokens ||= select_issues(:broken)
63
44
  end
64
45
 
65
46
  def failures
66
- issues.fetch(:failure) { [] }
47
+ @failures ||= select_issues(:failure)
67
48
  end
68
49
 
69
50
  def skips
70
- issues.fetch(:skipped) { [] }
51
+ @skips ||= select_issues(:skipped)
71
52
  end
72
53
 
73
54
  def painfuls
74
- issues
75
- .fetch(:painful) { [] }
76
- .sort_by(&:time)
77
- .reverse
78
- .take(5)
55
+ @painfuls ||= select_issues(:painful).sort_by(&:execution_time).reverse
79
56
  end
80
57
 
81
58
  def slows
82
- issues
83
- .fetch(:slow) { [] }
84
- .sort_by(&:time)
85
- .reverse
86
- .take(5)
87
- end
88
-
89
- def errors?
90
- errors.any?
91
- end
92
-
93
- def brokens?
94
- brokens.any?
59
+ @slows ||= select_issues(:slow).sort_by(&:execution_time).reverse
95
60
  end
96
61
 
97
- def failures?
98
- failures.any?
99
- end
100
-
101
- def skips?
102
- skips.any?
103
- end
104
-
105
- def painfuls?
106
- painfuls.any?
107
- end
108
-
109
- def slows?
110
- slows.any?
111
- end
112
-
113
- def record(issue)
114
- @test_count += 1
115
- @assertion_count += issue.result.assertions
116
- @success_count += 1 if issue.result.passed?
62
+ private
117
63
 
118
- @issues[issue.type] ||= []
119
- @issues[issue.type] << issue
64
+ def select_issues(issue_type)
65
+ issues.select { |issue| issue.type == issue_type }
120
66
  end
121
67
  end
122
68
  end
@@ -0,0 +1,81 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Minitest
4
+ module Heat
5
+ # Provides a timer to keep track of the full test suite duration and provide convenient methods
6
+ # for calculating tests/second and assertions/second
7
+ class Timer
8
+ attr_reader :test_count, :assertion_count, :start_time, :stop_time
9
+
10
+ # Creates an instance of a timer to be used for the duration of a test suite run
11
+ #
12
+ # @return [self]
13
+ def initialize
14
+ @test_count = 0
15
+ @assertion_count = 0
16
+
17
+ @start_time = nil
18
+ @stop_time = nil
19
+ end
20
+
21
+ # Records the start time for the full test suite using `Minitest.clock_time`
22
+ #
23
+ # @return [Float] the Minitest.clock_time
24
+ def start!
25
+ @start_time = Minitest.clock_time
26
+ end
27
+
28
+ # Records the stop time for the full test suite using `Minitest.clock_time`
29
+ #
30
+ # @return [Float] the Minitest.clock_time
31
+ def stop!
32
+ @stop_time = Minitest.clock_time
33
+ end
34
+
35
+ # Calculates the total time take for the full test suite to run while ensuring it never
36
+ # returns a zero that would be problematic as a denomitor in calculating average times
37
+ #
38
+ # @return [Float] the clocktime duration of the test suite run in seconds
39
+ def total_time
40
+ # Don't return 0. The time can end up being 0 for a new or realy fast test suite, and
41
+ # dividing by 0 doesn't go well when determining average time, so this ensures it uses a
42
+ # close-enough-but-not-zero value.
43
+ delta.zero? ? 0.01 : delta
44
+ end
45
+
46
+ # Records the test and assertion counts for a given test outcome
47
+ # @param count [Integer] the number of assertions from the test
48
+ #
49
+ # @return [void]
50
+ def increment_counts(count)
51
+ @test_count += 1
52
+ @assertion_count += count
53
+ end
54
+
55
+ # Provides a nice rounded answer for about how many tests were completed per second
56
+ #
57
+ # @return [Float] the average number of tests completed per second
58
+ def tests_per_second
59
+ (test_count / total_time).round(2)
60
+ end
61
+
62
+ # Provides a nice rounded answer for about how many assertions were completed per second
63
+ #
64
+ # @return [Float] the average number of assertions completed per second
65
+ def assertions_per_second
66
+ (assertion_count / total_time).round(2)
67
+ end
68
+
69
+ private
70
+
71
+ # The total time the test suite was running.
72
+ #
73
+ # @return [Float] the time in seconds elapsed between starting the timer and stopping it
74
+ def delta
75
+ return 0 unless start_time && stop_time
76
+
77
+ stop_time - start_time
78
+ end
79
+ end
80
+ end
81
+ end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Minitest
4
4
  module Heat
5
- VERSION = '0.0.6'
5
+ VERSION = '0.0.10'
6
6
  end
7
7
  end
data/lib/minitest/heat.rb CHANGED
@@ -3,16 +3,17 @@
3
3
  require_relative 'heat/backtrace'
4
4
  require_relative 'heat/hit'
5
5
  require_relative 'heat/issue'
6
- require_relative 'heat/line'
7
6
  require_relative 'heat/location'
8
7
  require_relative 'heat/map'
9
8
  require_relative 'heat/output'
10
9
  require_relative 'heat/results'
11
10
  require_relative 'heat/source'
11
+ require_relative 'heat/timer'
12
12
  require_relative 'heat/version'
13
13
 
14
14
  module Minitest
15
- # Custom minitest reporter just for Reviewer. Focuses on printing directly actionable guidance.
15
+ # Custom Minitest reporter focused on generating output designed around efficiently identifying
16
+ # issues and potential solutions
16
17
  # - Colorize the Output
17
18
  # - What files had the most errors?
18
19
  # - Show the most impacted areas first.
@@ -2,25 +2,17 @@
2
2
 
3
3
  require_relative 'heat_reporter'
4
4
 
5
- module Minitest
6
- def self.plugin_heat_options(opts, _options)
7
- opts.on '--show-fast', 'Show failures as they happen instead of waiting for the entire suite.' do
8
- # Heat.show_fast!
9
- end
10
-
11
- # TODO: options.
12
- # 1. Fail Fast
13
- # 2. Don't worry about skips.
14
- # 3. Skip coverage.
15
- end
16
-
5
+ module Minitest # rubocop:disable Style/Documentation
17
6
  def self.plugin_heat_init(options)
18
- io = options[:io]
7
+ io = options.fetch(:io, $stdout)
19
8
 
20
- # Clean out the existing reporters.
21
- reporter.reporters = []
9
+ self.reporter.reporters.reject! do |reporter|
10
+ # Minitest Heat acts as a unified Progress *and* Summary reporter. Using other reporters of
11
+ # those types in conjunction with it creates some overly-verbose output
12
+ reporter.is_a?(ProgressReporter) || reporter.is_a?(SummaryReporter)
13
+ end
22
14
 
23
- # Use Reviewer as the sole reporter.
24
- reporter << HeatReporter.new(io, options)
15
+ # Hook up Reviewer
16
+ self.reporter.reporters << HeatReporter.new(io, options)
25
17
  end
26
18
  end
@@ -31,20 +31,22 @@ module Minitest
31
31
  class HeatReporter < AbstractReporter
32
32
  attr_reader :output,
33
33
  :options,
34
- :results,
35
- :map
34
+ :timer,
35
+ :results
36
36
 
37
37
  def initialize(io = $stdout, options = {})
38
+ super()
39
+
38
40
  @options = options
39
41
 
42
+ @timer = Heat::Timer.new
40
43
  @results = Heat::Results.new
41
- @map = Heat::Map.new
42
44
  @output = Heat::Output.new(io)
43
45
  end
44
46
 
45
47
  # Starts reporting on the run.
46
48
  def start
47
- results.start_timer!
49
+ timer.start!
48
50
 
49
51
  # A couple of blank lines to create some breathing room
50
52
  output.newline
@@ -56,43 +58,47 @@ module Minitest
56
58
  def prerecord(klass, name); end
57
59
 
58
60
  # Records the data from a result.
61
+ #
59
62
  # Minitest::Result source:
60
63
  # https://github.com/seattlerb/minitest/blob/f4f57afaeb3a11bd0b86ab0757704cb78db96cf4/lib/minitest.rb#L504
61
64
  def record(result)
62
- issue = Heat::Issue.new(result)
65
+ # Convert a Minitest Result into an "issue" to more consistently expose the data needed to
66
+ # adjust the failure output to the type of failure
67
+ issue = Heat::Issue.from_result(result)
68
+
69
+ # Note the number of assertions for the performance summary
70
+ timer.increment_counts(issue.assertions)
63
71
 
72
+ # Record the issue to show details later
64
73
  results.record(issue)
65
- map.add(*issue.to_hit) if issue.hit?
66
74
 
75
+ # Show the marker
67
76
  output.marker(issue.type)
77
+ rescue => e
78
+ output.newline
79
+ puts "Sorry, but Minitest Heat encountered an exception recording an issue. Disabling Minitest Heat will get you back on track."
80
+ puts "Please use the following exception details to submit an issue at https://github.com/garrettdimon/minitest-heat/issues"
81
+ puts "#{e.message}:"
82
+ e.backtrace.each do |line|
83
+ puts " #{line}"
84
+ end
85
+ output.newline
68
86
  end
69
87
 
70
88
  # Outputs the summary of the run.
71
89
  def report
72
- results.stop_timer!
73
-
74
- # A couple of blank lines to create some breathing room
75
- output.newline
76
- output.newline
90
+ timer.stop!
77
91
 
78
- # Issues start with the least critical and go up to the most critical so that the most
79
- # pressing issues are displayed at the bottom of the report in order to reduce scrolling.
80
- # This way, as you fix issues, the list gets shorter, and eventually the least critical
81
- # issues will be displayed without scrolling once more problematic issues are resolved.
82
- %i[slows painfuls skips failures brokens errors].each do |issue_category|
83
- results.send(issue_category).each { |issue| output.issue_details(issue) }
84
- end
92
+ # The list of individual issues and their associated details
93
+ output.issues_list(results)
85
94
 
86
95
  # Display a short summary of the total issue counts fore ach category as well as performance
87
96
  # details for the test suite as a whole
88
- output.compact_summary(results)
97
+ output.compact_summary(results, timer)
89
98
 
90
99
  # If there were issues, shows a short heat map summary of which files and lines were the most
91
100
  # common sources of issues
92
- output.heat_map(map)
93
-
94
- # A blank line to create some breathing room
95
- output.newline
101
+ output.heat_map(results)
96
102
  end
97
103
 
98
104
  # Did this run pass?