benchmark-ips 2.3.0 → 2.10.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,25 @@
1
+ module Benchmark
2
+ module IPS
3
+ class NoopSuite
4
+ def start_warming
5
+ end
6
+
7
+ def start_running
8
+ end
9
+
10
+ def footer
11
+ end
12
+
13
+ def warming(a, b)
14
+ end
15
+
16
+ def warmup_stats(a, b)
17
+ end
18
+
19
+ def add_report(a, b)
20
+ end
21
+
22
+ alias_method :running, :warming
23
+ end
24
+ end
25
+ end
@@ -3,7 +3,7 @@
3
3
  module Benchmark
4
4
  module IPS
5
5
 
6
- # Report contains benchamrking entries.
6
+ # Report contains benchmarking entries.
7
7
  # Perform operations like add new entry, run comparison between entries.
8
8
  class Report
9
9
 
@@ -13,15 +13,13 @@ module Benchmark
13
13
  # @param [#to_s] label Label of entry.
14
14
  # @param [Integer] us Measured time in microsecond.
15
15
  # @param [Integer] iters Iterations.
16
- # @param [Float] ips Iterations per second.
17
- # @param [Float] ips_sd Standard deviation of iterations per second.
16
+ # @param [Object] stats Statistics.
18
17
  # @param [Integer] cycles Number of Cycles.
19
- def initialize(label, us, iters, ips, ips_sd, cycles)
18
+ def initialize(label, us, iters, stats, cycles)
20
19
  @label = label
21
20
  @microseconds = us
22
21
  @iterations = iters
23
- @ips = ips
24
- @ips_sd = ips_sd
22
+ @stats = stats
25
23
  @measurement_cycle = cycles
26
24
  @show_total_time = false
27
25
  end
@@ -38,13 +36,25 @@ module Benchmark
38
36
  # @return [Integer] number of iterations.
39
37
  attr_reader :iterations
40
38
 
41
- # Iterations per second.
39
+ # Statistical summary of samples.
40
+ # @return [Object] statisical summary.
41
+ attr_reader :stats
42
+
43
+ # LEGACY: Iterations per second.
42
44
  # @return [Float] number of iterations per second.
43
- attr_reader :ips
45
+ def ips
46
+ @stats.central_tendency
47
+ end
44
48
 
45
- # Standard deviation of iteration per second.
49
+ # LEGACY: Standard deviation of iteration per second.
46
50
  # @return [Float] standard deviation of iteration per second.
47
- attr_reader :ips_sd
51
+ def ips_sd
52
+ @stats.error
53
+ end
54
+
55
+ def samples
56
+ @stats.samples
57
+ end
48
58
 
49
59
  # Number of Cycles.
50
60
  # @return [Integer] number of cycles.
@@ -65,8 +75,8 @@ module Benchmark
65
75
 
66
76
  # Return entry's standard deviation of iteration per second in percentage.
67
77
  # @return [Float] +@ips_sd+ in percentage.
68
- def stddev_percentage
69
- 100.0 * (@ips_sd.to_f / @ips.to_f)
78
+ def error_percentage
79
+ @stats.error_percentage
70
80
  end
71
81
 
72
82
  alias_method :runtime, :seconds
@@ -78,7 +88,7 @@ module Benchmark
78
88
  def body
79
89
  case Benchmark::IPS.options[:format]
80
90
  when :human
81
- left = "%s (±%4.1f%%) i/s" % [Helpers.scale(ips), stddev_percentage]
91
+ left = "%s (±%4.1f%%) i/s" % [Helpers.scale(@stats.central_tendency), @stats.error_percentage]
82
92
  iters = Helpers.scale(@iterations)
83
93
 
84
94
  if @show_total_time
@@ -87,7 +97,7 @@ module Benchmark
87
97
  left.ljust(20) + (" - %s" % iters)
88
98
  end
89
99
  else
90
- left = "%10.1f (±%.1f%%) i/s" % [ips, stddev_percentage]
100
+ left = "%10.1f (±%.1f%%) i/s" % [@stats.central_tendency, @stats.error_percentage]
91
101
 
92
102
  if @show_total_time
93
103
  left.ljust(20) + (" - %10d in %10.6fs" % [@iterations, runtime])
@@ -117,8 +127,8 @@ module Benchmark
117
127
 
118
128
  # class Report
119
129
 
120
- # Entry to represent each benchamarked code in Report.
121
- # @return [Array<Entry>] Entries in Report.
130
+ # Entry to represent each benchmarked code in Report.
131
+ # @return [Array<Report::Entry>] Entries in Report.
122
132
  attr_reader :entries
123
133
 
124
134
  # Instantiate the Report.
@@ -131,13 +141,14 @@ module Benchmark
131
141
  # @param label [String] Entry label.
132
142
  # @param microseconds [Integer] Measured time in microsecond.
133
143
  # @param iters [Integer] Iterations.
134
- # @param ips [Float] Average Iterations per second.
135
- # @param ips_sd [Float] Standard deviation of iterations per second.
144
+ # @param stats [Object] Statistical results.
136
145
  # @param measurement_cycle [Integer] Number of cycles.
137
- # @return [Entry] Last added entry.
138
- def add_entry label, microseconds, iters, ips, ips_sd, measurement_cycle
139
- @entries << Entry.new(label, microseconds, iters, ips, ips_sd, measurement_cycle)
140
- @entries.last
146
+ # @return [Report::Entry] Last added entry.
147
+ def add_entry label, microseconds, iters, stats, measurement_cycle
148
+ entry = Entry.new(label, microseconds, iters, stats, measurement_cycle)
149
+ @entries.delete_if { |e| e.label == label }
150
+ @entries << entry
151
+ entry
141
152
  end
142
153
 
143
154
  # Entries data in array for generate json.
@@ -145,20 +156,28 @@ module Benchmark
145
156
  # name: Entry#label
146
157
  # ips: Entry#ips
147
158
  # stddev: Entry#ips_sd
148
- # @return [Array] Array of entries
159
+ # microseconds: Entry#microseconds
160
+ # iterations: Entry#iterations
161
+ # cycles: Entry#measurement_cycles
162
+ # @return [Array<Hash<Symbol,String|Float|Integer>] Array of hashes
149
163
  def data
150
164
  @data ||= @entries.collect do |entry|
151
165
  {
152
166
  :name => entry.label,
153
- :ips => entry.ips,
154
- :stddev => entry.ips_sd
167
+ :central_tendency => entry.stats.central_tendency,
168
+ :ips => entry.stats.central_tendency, # for backwards compatibility
169
+ :error => entry.stats.error,
170
+ :stddev => entry.stats.error, # for backwards compatibility
171
+ :microseconds => entry.microseconds,
172
+ :iterations => entry.iterations,
173
+ :cycles => entry.measurement_cycle,
155
174
  }
156
175
  end
157
176
  end
158
177
 
159
178
  # Run comparison of entries.
160
- def run_comparison
161
- Benchmark.compare(*@entries)
179
+ def run_comparison(order)
180
+ Benchmark.compare(*@entries, order: order)
162
181
  end
163
182
 
164
183
  # Generate json from Report#data to given path.
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'net/http'
4
+ require 'net/https'
5
+ require 'json'
6
+
7
+ module Benchmark
8
+ module IPS
9
+ class Share
10
+ DEFAULT_URL = "https://ips.fastruby.io"
11
+ def initialize(report, job)
12
+ @report = report
13
+ @job = job
14
+ end
15
+
16
+ def share
17
+ base = (ENV['SHARE_URL'] || DEFAULT_URL)
18
+ url = URI(File.join(base, "reports"))
19
+
20
+ req = Net::HTTP::Post.new(url)
21
+
22
+ data = {
23
+ "entries" => @report.data,
24
+ "options" => {
25
+ "compare" => @job.compare?
26
+ }
27
+ }
28
+
29
+ req.body = JSON.generate(data)
30
+
31
+ http = Net::HTTP.new(url.hostname, url.port)
32
+ if url.scheme == "https"
33
+ http.use_ssl = true
34
+ http.ssl_version = :TLSv1_2
35
+ end
36
+
37
+ res = http.start do |h|
38
+ h.request req
39
+ end
40
+
41
+ if Net::HTTPOK === res
42
+ data = JSON.parse res.body
43
+ puts "Shared at: #{File.join(base, data["id"])}"
44
+ else
45
+ puts "Error sharing report"
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,58 @@
1
+ module Benchmark
2
+ module IPS
3
+ module Stats
4
+
5
+ class Bootstrap
6
+ include StatsMetric
7
+ attr_reader :data, :error, :samples
8
+
9
+ def initialize(samples, confidence)
10
+ dependencies
11
+ @iterations = 10_000
12
+ @confidence = (confidence / 100.0).to_s
13
+ @samples = samples
14
+ @data = Kalibera::Data.new({[0] => samples}, [1, samples.size])
15
+ interval = @data.bootstrap_confidence_interval(@iterations, @confidence)
16
+ @median = interval.median
17
+ @error = interval.error
18
+ end
19
+
20
+ # Average stat value
21
+ # @return [Float] central_tendency
22
+ def central_tendency
23
+ @median
24
+ end
25
+
26
+ # Determines how much slower this stat is than the baseline stat
27
+ # if this average is lower than the faster baseline, higher average is better (e.g. ips) (calculate accordingly)
28
+ # @param baseline [SD|Bootstrap] faster baseline
29
+ # @returns [Array<Float, nil>] the slowdown and the error (not calculated for standard deviation)
30
+ def slowdown(baseline)
31
+ low, slowdown, high = baseline.data.bootstrap_quotient(@data, @iterations, @confidence)
32
+ error = Timing.mean([slowdown - low, high - slowdown])
33
+ [slowdown, error]
34
+ end
35
+
36
+ def speedup(baseline)
37
+ baseline.slowdown(self)
38
+ end
39
+
40
+ def footer
41
+ "with #{(@confidence.to_f * 100).round(1)}% confidence"
42
+ end
43
+
44
+ def dependencies
45
+ require 'kalibera'
46
+ rescue LoadError
47
+ puts
48
+ puts "Can't load the kalibera gem - this is required to use the :bootstrap stats options."
49
+ puts "It's optional, so we don't formally depend on it and it isn't installed along with benchmark-ips."
50
+ puts "You probably want to do something like 'gem install kalibera' to fix this."
51
+ abort
52
+ end
53
+
54
+ end
55
+
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,45 @@
1
+ module Benchmark
2
+ module IPS
3
+ module Stats
4
+
5
+ class SD
6
+ include StatsMetric
7
+ attr_reader :error, :samples
8
+
9
+ def initialize(samples)
10
+ @samples = samples
11
+ @mean = Timing.mean(samples)
12
+ @error = Timing.stddev(samples, @mean).round
13
+ end
14
+
15
+ # Average stat value
16
+ # @return [Float] central_tendency
17
+ def central_tendency
18
+ @mean
19
+ end
20
+
21
+ # Determines how much slower this stat is than the baseline stat
22
+ # if this average is lower than the faster baseline, higher average is better (e.g. ips) (calculate accordingly)
23
+ # @param baseline [SD|Bootstrap] faster baseline
24
+ # @returns [Array<Float, nil>] the slowdown and the error (not calculated for standard deviation)
25
+ def slowdown(baseline)
26
+ if baseline.central_tendency > central_tendency
27
+ [baseline.central_tendency.to_f / central_tendency, 0]
28
+ else
29
+ [central_tendency.to_f / baseline.central_tendency, 0]
30
+ end
31
+ end
32
+
33
+ def speedup(baseline)
34
+ baseline.slowdown(self)
35
+ end
36
+
37
+ def footer
38
+ nil
39
+ end
40
+
41
+ end
42
+
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,21 @@
1
+ module Benchmark
2
+ module IPS
3
+ module Stats
4
+ module StatsMetric
5
+ # Return entry's standard deviation of iteration per second in percentage.
6
+ # @return [Float] +@ips_sd+ in percentage.
7
+ def error_percentage
8
+ 100.0 * (error.to_f / central_tendency)
9
+ end
10
+
11
+ def overlaps?(baseline)
12
+ baseline_low = baseline.central_tendency - baseline.error
13
+ baseline_high = baseline.central_tendency + baseline.error
14
+ my_high = central_tendency + error
15
+ my_low = central_tendency - error
16
+ my_high > baseline_low && my_low < baseline_high
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
data/lib/benchmark/ips.rb CHANGED
@@ -1,7 +1,14 @@
1
1
  # encoding: utf-8
2
2
  require 'benchmark/timing'
3
3
  require 'benchmark/compare'
4
+ require 'benchmark/ips/stats/stats_metric'
5
+ require 'benchmark/ips/stats/sd'
6
+ require 'benchmark/ips/stats/bootstrap'
4
7
  require 'benchmark/ips/report'
8
+ require 'benchmark/ips/noop_suite'
9
+ require 'benchmark/ips/job/entry'
10
+ require 'benchmark/ips/job/stdout_report'
11
+ require 'benchmark/ips/job/noop_report'
5
12
  require 'benchmark/ips/job'
6
13
 
7
14
  # Performance benchmarking library
@@ -11,10 +18,10 @@ module Benchmark
11
18
  module IPS
12
19
 
13
20
  # Benchmark-ips Gem version.
14
- VERSION = "2.3.0"
21
+ VERSION = "2.10.0"
15
22
 
16
23
  # CODENAME of current version.
17
- CODENAME = "Monsoon BBQ"
24
+ CODENAME = "Watashi Wa Genki"
18
25
 
19
26
  # Measure code in block, each code's benchmarked result will display in
20
27
  # iteration per second with standard deviation in given time.
@@ -28,47 +35,43 @@ module Benchmark
28
35
  time, warmup, quiet = args
29
36
  end
30
37
 
31
- suite = nil
32
-
33
38
  sync, $stdout.sync = $stdout.sync, true
34
39
 
35
- if defined? Benchmark::Suite and Suite.current
36
- suite = Benchmark::Suite.current
37
- end
38
-
39
- quiet ||= (suite && suite.quiet?)
40
-
41
- job = Job.new({:suite => suite,
42
- :quiet => quiet
43
- })
40
+ job = Job.new
44
41
 
45
42
  job_opts = {}
46
43
  job_opts[:time] = time unless time.nil?
47
44
  job_opts[:warmup] = warmup unless warmup.nil?
45
+ job_opts[:quiet] = quiet unless quiet.nil?
48
46
 
49
47
  job.config job_opts
50
48
 
51
49
  yield job
52
50
 
53
- $stdout.puts "Calculating -------------------------------------" unless quiet
54
-
55
- job.run_warmup
56
-
57
- $stdout.puts "-------------------------------------------------" unless quiet
51
+ job.load_held_results
58
52
 
59
53
  job.run
60
54
 
55
+ if job.run_single? && job.all_results_have_been_run?
56
+ job.clear_held_results
57
+ else
58
+ job.save_held_results
59
+ puts '', 'Pausing here -- run Ruby again to measure the next benchmark...' if job.run_single?
60
+ end
61
+
61
62
  $stdout.sync = sync
63
+ job.run_comparison
64
+ job.generate_json
62
65
 
63
- if job.compare?
64
- job.run_comparison
65
- end
66
+ report = job.full_report
66
67
 
67
- if job.json?
68
- job.generate_json
68
+ if ENV['SHARE'] || ENV['SHARE_URL']
69
+ require 'benchmark/ips/share'
70
+ share = Share.new report, job
71
+ share.share
69
72
  end
70
73
 
71
- return job.full_report
74
+ report
72
75
  end
73
76
 
74
77
  # Set options for running the benchmarks.
@@ -100,4 +103,68 @@ module Benchmark
100
103
  end
101
104
 
102
105
  extend Benchmark::IPS # make ips available as module-level method
106
+
107
+ ##
108
+ # :singleton-method: ips
109
+ #
110
+ # require 'benchmark/ips'
111
+ #
112
+ # Benchmark.ips do |x|
113
+ # # Configure the number of seconds used during
114
+ # # the warmup phase (default 2) and calculation phase (default 5)
115
+ # x.config(:time => 5, :warmup => 2)
116
+ #
117
+ # # These parameters can also be configured this way
118
+ # x.time = 5
119
+ # x.warmup = 2
120
+ #
121
+ # # Typical mode, runs the block as many times as it can
122
+ # x.report("addition") { 1 + 2 }
123
+ #
124
+ # # To reduce overhead, the number of iterations is passed in
125
+ # # and the block must run the code the specific number of times.
126
+ # # Used for when the workload is very small and any overhead
127
+ # # introduces incorrectable errors.
128
+ # x.report("addition2") do |times|
129
+ # i = 0
130
+ # while i < times
131
+ # 1 + 2
132
+ # i += 1
133
+ # end
134
+ # end
135
+ #
136
+ # # To reduce overhead even more, grafts the code given into
137
+ # # the loop that performs the iterations internally to reduce
138
+ # # overhead. Typically not needed, use the |times| form instead.
139
+ # x.report("addition3", "1 + 2")
140
+ #
141
+ # # Really long labels should be formatted correctly
142
+ # x.report("addition-test-long-label") { 1 + 2 }
143
+ #
144
+ # # Compare the iterations per second of the various reports!
145
+ # x.compare!
146
+ # end
147
+ #
148
+ # This will generate the following report:
149
+ #
150
+ # Calculating -------------------------------------
151
+ # addition 71.254k i/100ms
152
+ # addition2 68.658k i/100ms
153
+ # addition3 83.079k i/100ms
154
+ # addition-test-long-label
155
+ # 70.129k i/100ms
156
+ # -------------------------------------------------
157
+ # addition 4.955M (± 8.7%) i/s - 24.155M
158
+ # addition2 24.011M (± 9.5%) i/s - 114.246M
159
+ # addition3 23.958M (±10.1%) i/s - 115.064M
160
+ # addition-test-long-label
161
+ # 5.014M (± 9.1%) i/s - 24.545M
162
+ #
163
+ # Comparison:
164
+ # addition2: 24011974.8 i/s
165
+ # addition3: 23958619.8 i/s - 1.00x slower
166
+ # addition-test-long-label: 5014756.0 i/s - 4.79x slower
167
+ # addition: 4955278.9 i/s - 4.85x slower
168
+ #
169
+ # See also Benchmark::IPS
103
170
  end
@@ -1,12 +1,14 @@
1
1
  module Benchmark
2
2
  # Perform caclulations on Timing results.
3
3
  module Timing
4
+ # Microseconds per second.
5
+ MICROSECONDS_PER_SECOND = 1_000_000
4
6
 
5
7
  # Calculate (arithmetic) mean of given samples.
6
8
  # @param [Array] samples Samples to calculate mean.
7
9
  # @return [Float] Mean of given samples.
8
10
  def self.mean(samples)
9
- sum = samples.inject(0) { |acc, i| acc + i }
11
+ sum = samples.inject(:+)
10
12
  sum / samples.size
11
13
  end
12
14
 
@@ -29,21 +31,7 @@ module Benchmark
29
31
  Math.sqrt variance(samples, m)
30
32
  end
31
33
 
32
- # Resample mean of given samples.
33
- # @param [Integer] resample_times Resample times, defaults to 100.
34
- # @return [Array] Resampled samples.
35
- def self.resample_mean(samples, resample_times=100)
36
- resamples = []
37
-
38
- resample_times.times do
39
- resample = samples.map { samples[rand(samples.size)] }
40
- resamples << Timing.mean(resample)
41
- end
42
-
43
- resamples
44
- end
45
-
46
- # Recycle unsed objects by starting Garbage Collector.
34
+ # Recycle used objects by starting Garbage Collector.
47
35
  def self.clean_env
48
36
  # rbx
49
37
  if GC.respond_to? :run
@@ -52,5 +40,40 @@ module Benchmark
52
40
  GC.start
53
41
  end
54
42
  end
43
+
44
+ # Use a monotonic clock if available, otherwise use Time
45
+ begin
46
+ Process.clock_gettime Process::CLOCK_MONOTONIC, :float_microsecond
47
+
48
+ # Get an object that represents now and can be converted to microseconds
49
+ def self.now
50
+ Process.clock_gettime Process::CLOCK_MONOTONIC, :float_microsecond
51
+ end
52
+
53
+ # Add one second to the time represenetation
54
+ def self.add_second(t, s)
55
+ t + (s * MICROSECONDS_PER_SECOND)
56
+ end
57
+
58
+ # Return the number of microseconds between the 2 moments
59
+ def self.time_us(before, after)
60
+ after - before
61
+ end
62
+ rescue NameError
63
+ # Get an object that represents now and can be converted to microseconds
64
+ def self.now
65
+ Time.now
66
+ end
67
+
68
+ # Add one second to the time represenetation
69
+ def self.add_second(t, s)
70
+ t + s
71
+ end
72
+
73
+ # Return the number of microseconds between the 2 moments
74
+ def self.time_us(before, after)
75
+ (after.to_f - before.to_f) * MICROSECONDS_PER_SECOND
76
+ end
77
+ end
55
78
  end
56
79
  end