benchmark-ips 2.3.0 → 2.11.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -5,95 +5,11 @@ module Benchmark
5
5
  # Microseconds per 100 millisecond.
6
6
  MICROSECONDS_PER_100MS = 100_000
7
7
  # Microseconds per second.
8
- MICROSECONDS_PER_SECOND = 1_000_000
8
+ MICROSECONDS_PER_SECOND = Timing::MICROSECONDS_PER_SECOND
9
9
  # The percentage of the expected runtime to allow
10
10
  # before reporting a weird runtime
11
11
  MAX_TIME_SKEW = 0.05
12
-
13
- # Entries in Benchmark Jobs.
14
- class Entry
15
- # Instantiate the Benchmark::IPS::Job::Entry.
16
- # @param label [#to_s] Label of Benchmarked code.
17
- # @param action [String, Proc] Code to be benchmarked.
18
- # @raise [ArgumentError] Raises when action is not String or not responding to +call+.
19
- def initialize(label, action)
20
- @label = label
21
-
22
- if action.kind_of? String
23
- compile action
24
- @action = self
25
- @as_action = true
26
- else
27
- unless action.respond_to? :call
28
- raise ArgumentError, "invalid action, must respond to #call"
29
- end
30
-
31
- @action = action
32
-
33
- if action.respond_to? :arity and action.arity > 0
34
- @call_loop = true
35
- else
36
- @call_loop = false
37
- end
38
-
39
- @as_action = false
40
- end
41
- end
42
-
43
- # The label of benchmarking action.
44
- # @return [#to_s] Label of action.
45
- attr_reader :label
46
-
47
- # The benchmarking action.
48
- # @return [String, Proc] Code to be called, could be String / Proc.
49
- attr_reader :action
50
-
51
- # Add padding to label's right if label's length < 20,
52
- # Otherwise add a new line and 20 whitespaces.
53
- # @return [String] Right justified label.
54
- def label_rjust
55
- label = @label.to_s
56
- if label.size > 20
57
- "#{label}\n#{' ' * 20}"
58
- else
59
- label.rjust(20)
60
- end
61
- end
62
-
63
- # Call action by given times, return if +@call_loop+ is present.
64
- # @param times [Integer] Times to call +@action+.
65
- # @return [Integer] Number of times the +@action+ has been called.
66
- def call_times(times)
67
- return @action.call(times) if @call_loop
68
-
69
- act = @action
70
-
71
- i = 0
72
- while i < times
73
- act.call
74
- i += 1
75
- end
76
- end
77
-
78
- # Compile code into +call_times+ method.
79
- # @param str [String] Code to be compiled.
80
- # @return [Symbol] :call_times.
81
- def compile(str)
82
- m = (class << self; self; end)
83
- code = <<-CODE
84
- def call_times(__total);
85
- __i = 0
86
- while __i < __total
87
- #{str};
88
- __i += 1
89
- end
90
- end
91
- CODE
92
- m.class_eval code
93
- end
94
- end # End of Entry
95
-
96
- # class Job
12
+ POW_2_30 = 1 << 30
97
13
 
98
14
  # Two-element arrays, consisting of label and block pairs.
99
15
  # @return [Array<Entry>] list of entries
@@ -103,6 +19,10 @@ module Benchmark
103
19
  # @return [Boolean] true if needs to run compare.
104
20
  attr_reader :compare
105
21
 
22
+ # Determining whether to hold results between Ruby invocations
23
+ # @return [Boolean]
24
+ attr_accessor :hold
25
+
106
26
  # Report object containing information about the run.
107
27
  # @return [Report] the report object.
108
28
  attr_reader :full_report
@@ -119,31 +39,76 @@ module Benchmark
119
39
  # @return [Integer]
120
40
  attr_accessor :time
121
41
 
42
+ # Warmup and calculation iterations.
43
+ # @return [Integer]
44
+ attr_accessor :iterations
45
+
46
+ # Statistics model.
47
+ # @return [Object]
48
+ attr_accessor :stats
49
+
50
+ # Confidence.
51
+ # @return [Integer]
52
+ attr_accessor :confidence
53
+
54
+ # Silence output
55
+ # @return [Boolean]
56
+ attr_reader :quiet
57
+
58
+ # Suite
59
+ # @return [Benchmark::IPS::NoopSuite]
60
+ attr_reader :suite
61
+
122
62
  # Instantiate the Benchmark::IPS::Job.
123
- # @option opts [Benchmark::Suite] (nil) :suite Specify Benchmark::Suite.
124
- # @option opts [Boolean] (false) :quiet Suppress the printing of information.
125
63
  def initialize opts={}
126
- @suite = opts[:suite] || nil
127
- @quiet = opts[:quiet] || false
128
64
  @list = []
129
- @compare = false
65
+ @run_single = false
130
66
  @json_path = false
67
+ @compare = false
68
+ @compare_order = :fastest
69
+ @held_path = nil
70
+ @held_results = nil
131
71
 
132
- @timing = {}
72
+ @timing = Hash.new 1 # default to 1 in case warmup isn't run
133
73
  @full_report = Report.new
134
74
 
135
75
  # Default warmup and calculation time in seconds.
136
76
  @warmup = 2
137
77
  @time = 5
78
+ @iterations = 1
79
+
80
+ # Default statistical model
81
+ @stats = :sd
82
+ @confidence = 95
83
+
84
+ self.quiet = false
138
85
  end
139
86
 
140
87
  # Job configuration options, set +@warmup+ and +@time+.
141
88
  # @option opts [Integer] :warmup Warmup time.
142
89
  # @option opts [Integer] :time Calculation time.
90
+ # @option iterations [Integer] :time Warmup and calculation iterations.
143
91
  def config opts
144
92
  @warmup = opts[:warmup] if opts[:warmup]
145
93
  @time = opts[:time] if opts[:time]
146
94
  @suite = opts[:suite] if opts[:suite]
95
+ @iterations = opts[:iterations] if opts[:iterations]
96
+ @stats = opts[:stats] if opts[:stats]
97
+ @confidence = opts[:confidence] if opts[:confidence]
98
+ self.quiet = opts[:quiet] if opts.key?(:quiet)
99
+ self.suite = opts[:suite]
100
+ end
101
+
102
+ def quiet=(val)
103
+ @stdout = reporter(quiet: val)
104
+ end
105
+
106
+ def suite=(suite)
107
+ @suite = suite || Benchmark::IPS::NoopSuite.new
108
+ end
109
+
110
+ def reporter(quiet:)
111
+ quiet ? NoopReport.new : StdoutReport.new
147
112
  end
148
113
 
149
114
  # Return true if job needs to be compared.
@@ -152,11 +117,40 @@ module Benchmark
152
117
  @compare
153
118
  end
154
119
 
155
- # Set @compare to true.
156
- def compare!
120
+ # Run comparison utility.
121
+ def compare!(order: :fastest)
157
122
  @compare = true
123
+ @compare_order = order
124
+ end
125
+
126
+ # Return true if results are held while multiple Ruby invocations
127
+ # @return [Boolean] Need to hold results between multiple Ruby invocations?
128
+ def hold?
129
+ !!@held_path
130
+ end
131
+
132
+ # Hold after each iteration.
133
+ # @param held_path [String] File name to store hold file.
134
+ def hold!(held_path)
135
+ @held_path = held_path
136
+ @run_single = true
158
137
  end
159
138
 
139
+ # Save interim results. Similar to hold, but all reports are run
140
+ # The report label must change for each invocation.
141
+ # One way to achieve this is to include the version in the label.
142
+ # @param held_path [String] File name to store hold file.
143
+ def save!(held_path)
144
+ @held_path = held_path
145
+ @run_single = false
146
+ end
147
+
148
+ # Return true if items are to be run one at a time.
149
+ # For the traditional hold, this is true
150
+ # @return [Boolean] Run just a single item?
151
+ def run_single?
152
+ @run_single
153
+ end
160
154
 
161
155
  # Return true if job needs to generate json.
162
156
  # @return [Boolean] Need to generate json?
@@ -164,15 +158,15 @@ module Benchmark
164
158
  !!@json_path
165
159
  end
166
160
 
167
- # Set @json_path to given path, defaults to "data.json".
161
+ # Generate json to given path, defaults to "data.json".
168
162
  def json!(path="data.json")
169
163
  @json_path = path
170
164
  end
171
165
 
172
166
  # Registers the given label and block pair in the job list.
173
167
  # @param label [String] Label of benchmarked code.
174
- # @param str [String] Code to be benchamrked.
175
- # @param blk [Proc] Code to be benchamrked.
168
+ # @param str [String] Code to be benchmarked.
169
+ # @param blk [Proc] Code to be benchmarked.
176
170
  # @raise [ArgumentError] Raises if str and blk are both present.
177
171
  # @raise [ArgumentError] Raises if str and blk are both absent.
178
172
  def item(label="", str=nil, &blk) # :yield:
@@ -195,8 +189,7 @@ module Benchmark
195
189
  # @return [Integer] Cycles per 100ms.
196
190
  def cycles_per_100ms time_msec, iters
197
191
  cycles = ((MICROSECONDS_PER_100MS / time_msec) * iters).to_i
198
- cycles = 1 if cycles <= 0
199
- cycles
192
+ cycles <= 0 ? 1 : cycles
200
193
  end
201
194
 
202
195
  # Calculate the time difference of before and after in microseconds.
@@ -216,121 +209,189 @@ module Benchmark
216
209
  MICROSECONDS_PER_SECOND * (cycles.to_f / time_us.to_f)
217
210
  end
218
211
 
219
- # Run warmup.
220
- def run_warmup
221
- @list.each do |item|
222
- @suite.warming item.label, @warmup if @suite
212
+ def load_held_results
213
+ return unless @held_path && File.exist?(@held_path) && !File.zero?(@held_path)
214
+ require "json"
215
+ @held_results = {}
216
+ JSON.load(IO.read(@held_path)).each do |result|
217
+ @held_results[result['item']] = result
218
+ create_report(result['item'], result['measured_us'], result['iter'],
219
+ create_stats(result['samples']), result['cycles'])
220
+ end
221
+ end
222
+
223
+ def save_held_results
224
+ return unless @held_path
225
+ require "json"
226
+ data = full_report.entries.map { |e|
227
+ {
228
+ 'item' => e.label,
229
+ 'measured_us' => e.microseconds,
230
+ 'iter' => e.iterations,
231
+ 'samples' => e.samples,
232
+ 'cycles' => e.measurement_cycle
233
+ }
234
+ }
235
+ IO.write(@held_path, JSON.generate(data) << "\n")
236
+ end
237
+
238
+ def all_results_have_been_run?
239
+ @full_report.entries.size == @list.size
240
+ end
241
+
242
+ def clear_held_results
243
+ File.delete @held_path if File.exist?(@held_path)
244
+ end
223
245
 
224
- unless @quiet
225
- $stdout.print item.label_rjust
246
+ def run
247
+ if @warmup && @warmup != 0 then
248
+ @stdout.start_warming
249
+ @iterations.times do
250
+ run_warmup
226
251
  end
252
+ end
227
253
 
228
- Timing.clean_env
254
+ @stdout.start_running
229
255
 
230
- before = Time.now
231
- target = Time.now + @warmup
256
+ @iterations.times do |n|
257
+ run_benchmark
258
+ end
232
259
 
233
- warmup_iter = 0
260
+ @stdout.footer
261
+ end
234
262
 
235
- while Time.now < target
236
- item.call_times(1)
237
- warmup_iter += 1
238
- end
263
+ # Run warmup.
264
+ def run_warmup
265
+ @list.each do |item|
266
+ next if run_single? && @held_results && @held_results.key?(item.label)
239
267
 
240
- after = Time.now
268
+ @suite.warming item.label, @warmup
269
+ @stdout.warming item.label, @warmup
241
270
 
242
- warmup_time_us = time_us before, after
271
+ Timing.clean_env
243
272
 
244
- @timing[item] = cycles_per_100ms warmup_time_us, warmup_iter
273
+ # Run for up to half of the configured warmup time with an increasing
274
+ # number of cycles to reduce overhead and improve accuracy.
275
+ # This also avoids running with a constant number of cycles, which a
276
+ # JIT might speculate on and then have to recompile in #run_benchmark.
277
+ before = Timing.now
278
+ target = Timing.add_second before, @warmup / 2.0
245
279
 
246
- case Benchmark::IPS.options[:format]
247
- when :human
248
- $stdout.printf "%s i/100ms\n", Helpers.scale(@timing[item]) unless @quiet
249
- else
250
- $stdout.printf "%10d i/100ms\n", @timing[item] unless @quiet
280
+ cycles = 1
281
+ begin
282
+ t0 = Timing.now
283
+ item.call_times cycles
284
+ t1 = Timing.now
285
+ warmup_iter = cycles
286
+ warmup_time_us = Timing.time_us(t0, t1)
287
+
288
+ # If the number of cycles would go outside the 32-bit signed integers range
289
+ # then exit the loop to avoid overflows and start the 100ms warmup runs
290
+ break if cycles >= POW_2_30
291
+ cycles *= 2
292
+ end while Timing.now + warmup_time_us * 2 < target
293
+
294
+ cycles = cycles_per_100ms warmup_time_us, warmup_iter
295
+ @timing[item] = cycles
296
+
297
+ # Run for the remaining of warmup in a similar way as #run_benchmark.
298
+ target = Timing.add_second before, @warmup
299
+ while Timing.now + MICROSECONDS_PER_100MS < target
300
+ item.call_times cycles
251
301
  end
252
302
 
253
- @suite.warmup_stats warmup_time_us, @timing[item] if @suite
303
+ @stdout.warmup_stats warmup_time_us, @timing[item]
304
+ @suite.warmup_stats warmup_time_us, @timing[item]
305
+
306
+ break if run_single?
254
307
  end
255
308
  end
256
309
 
257
310
  # Run calculation.
258
- def run
311
+ def run_benchmark
259
312
  @list.each do |item|
260
- @suite.running item.label, @time if @suite
313
+ next if run_single? && @held_results && @held_results.key?(item.label)
261
314
 
262
- unless @quiet
263
- $stdout.print item.label_rjust
264
- end
315
+ @suite.running item.label, @time
316
+ @stdout.running item.label, @time
265
317
 
266
318
  Timing.clean_env
267
319
 
268
320
  iter = 0
269
321
 
270
- target = Time.now + @time
271
-
272
322
  measurements_us = []
273
323
 
274
324
  # Running this number of cycles should take around 100ms.
275
325
  cycles = @timing[item]
276
326
 
277
- while Time.now < target
278
- before = Time.now
327
+ target = Timing.add_second Timing.now, @time
328
+
329
+ begin
330
+ before = Timing.now
279
331
  item.call_times cycles
280
- after = Time.now
332
+ after = Timing.now
281
333
 
282
334
  # If for some reason the timing said this took no time (O_o)
283
335
  # then ignore the iteration entirely and start another.
284
- iter_us = time_us before, after
336
+ iter_us = Timing.time_us before, after
285
337
  next if iter_us <= 0.0
286
338
 
287
339
  iter += cycles
288
340
 
289
341
  measurements_us << iter_us
290
- end
342
+ end while Timing.now < target
291
343
 
292
- final_time = Time.now
344
+ final_time = before
293
345
 
294
- measured_us = measurements_us.inject(0) { |a,i| a + i }
346
+ measured_us = measurements_us.inject(:+)
295
347
 
296
- all_ips = measurements_us.map { |time_us|
348
+ samples = measurements_us.map { |time_us|
297
349
  iterations_per_sec cycles, time_us
298
350
  }
299
351
 
300
- avg_ips = Timing.mean(all_ips)
301
- sd_ips = Timing.stddev(all_ips).round
302
-
303
- rep = create_report(item, measured_us, iter, avg_ips, sd_ips, cycles)
352
+ rep = create_report(item.label, measured_us, iter, create_stats(samples), cycles)
304
353
 
305
354
  if (final_time - target).abs >= (@time.to_f * MAX_TIME_SKEW)
306
355
  rep.show_total_time!
307
356
  end
308
357
 
309
- $stdout.puts " #{rep.body}" unless @quiet
358
+ @stdout.add_report rep, caller(1).first
359
+ @suite.add_report rep, caller(1).first
310
360
 
311
- @suite.add_report rep, caller(1).first if @suite
361
+ break if run_single?
362
+ end
363
+ end
364
+
365
+ def create_stats(samples)
366
+ case @stats
367
+ when :sd
368
+ Stats::SD.new(samples)
369
+ when :bootstrap
370
+ Stats::Bootstrap.new(samples, @confidence)
371
+ else
372
+ raise "unknown stats #{@stats}"
312
373
  end
313
374
  end
314
375
 
315
376
  # Run comparison of entries in +@full_report+.
316
377
  def run_comparison
317
- @full_report.run_comparison
378
+ @full_report.run_comparison(@compare_order) if compare?
318
379
  end
319
380
 
320
381
  # Generate json from +@full_report+.
321
382
  def generate_json
322
- @full_report.generate_json @json_path
383
+ @full_report.generate_json @json_path if json?
323
384
  end
324
385
 
325
386
  # Create report by add entry to +@full_report+.
326
- # @param item [Benchmark::IPS::Job::Entry] Report item.
387
+ # @param label [String] Report item label.
327
388
  # @param measured_us [Integer] Measured time in microsecond.
328
389
  # @param iter [Integer] Iterations.
329
- # @param avg_ips [Float] Average iterations per second.
330
- # @param sd_ips [Float] Standard deviation iterations per second.
390
+ # @param samples [Array<Float>] Sampled iterations per second.
331
391
  # @param cycles [Integer] Number of Cycles.
332
- def create_report(item, measured_us, iter, avg_ips, sd_ips, cycles)
333
- @full_report.add_entry item.label, measured_us, iter, avg_ips, sd_ips, cycles
392
+ # @return [Report::Entry] Entry with data.
393
+ def create_report(label, measured_us, iter, samples, cycles)
394
+ @full_report.add_entry label, measured_us, iter, samples, cycles
334
395
  end
335
396
  end
336
397
  end
@@ -0,0 +1,25 @@
1
+ module Benchmark
2
+ module IPS
3
+ class NoopSuite
4
+ def start_warming
5
+ end
6
+
7
+ def start_running
8
+ end
9
+
10
+ def footer
11
+ end
12
+
13
+ def warming(a, b)
14
+ end
15
+
16
+ def warmup_stats(a, b)
17
+ end
18
+
19
+ def add_report(a, b)
20
+ end
21
+
22
+ alias_method :running, :warming
23
+ end
24
+ end
25
+ end
@@ -3,7 +3,7 @@
3
3
  module Benchmark
4
4
  module IPS
5
5
 
6
- # Report contains benchamrking entries.
6
+ # Report contains benchmarking entries.
7
7
  # Perform operations like add new entry, run comparison between entries.
8
8
  class Report
9
9
 
@@ -13,15 +13,13 @@ module Benchmark
13
13
  # @param [#to_s] label Label of entry.
14
14
  # @param [Integer] us Measured time in microsecond.
15
15
  # @param [Integer] iters Iterations.
16
- # @param [Float] ips Iterations per second.
17
- # @param [Float] ips_sd Standard deviation of iterations per second.
16
+ # @param [Object] stats Statistics.
18
17
  # @param [Integer] cycles Number of Cycles.
19
- def initialize(label, us, iters, ips, ips_sd, cycles)
18
+ def initialize(label, us, iters, stats, cycles)
20
19
  @label = label
21
20
  @microseconds = us
22
21
  @iterations = iters
23
- @ips = ips
24
- @ips_sd = ips_sd
22
+ @stats = stats
25
23
  @measurement_cycle = cycles
26
24
  @show_total_time = false
27
25
  end
@@ -38,13 +36,25 @@ module Benchmark
38
36
  # @return [Integer] number of iterations.
39
37
  attr_reader :iterations
40
38
 
41
- # Iterations per second.
39
+ # Statistical summary of samples.
40
+ # @return [Object] statisical summary.
41
+ attr_reader :stats
42
+
43
+ # LEGACY: Iterations per second.
42
44
  # @return [Float] number of iterations per second.
43
- attr_reader :ips
45
+ def ips
46
+ @stats.central_tendency
47
+ end
44
48
 
45
- # Standard deviation of iteration per second.
49
+ # LEGACY: Standard deviation of iteration per second.
46
50
  # @return [Float] standard deviation of iteration per second.
47
- attr_reader :ips_sd
51
+ def ips_sd
52
+ @stats.error
53
+ end
54
+
55
+ def samples
56
+ @stats.samples
57
+ end
48
58
 
49
59
  # Number of Cycles.
50
60
  # @return [Integer] number of cycles.
@@ -65,8 +75,8 @@ module Benchmark
65
75
 
66
76
  # Return entry's standard deviation of iteration per second in percentage.
67
77
  # @return [Float] +@ips_sd+ in percentage.
68
- def stddev_percentage
69
- 100.0 * (@ips_sd.to_f / @ips.to_f)
78
+ def error_percentage
79
+ @stats.error_percentage
70
80
  end
71
81
 
72
82
  alias_method :runtime, :seconds
@@ -78,7 +88,7 @@ module Benchmark
78
88
  def body
79
89
  case Benchmark::IPS.options[:format]
80
90
  when :human
81
- left = "%s (±%4.1f%%) i/s" % [Helpers.scale(ips), stddev_percentage]
91
+ left = "%s (±%4.1f%%) i/s" % [Helpers.scale(@stats.central_tendency), @stats.error_percentage]
82
92
  iters = Helpers.scale(@iterations)
83
93
 
84
94
  if @show_total_time
@@ -87,7 +97,7 @@ module Benchmark
87
97
  left.ljust(20) + (" - %s" % iters)
88
98
  end
89
99
  else
90
- left = "%10.1f (±%.1f%%) i/s" % [ips, stddev_percentage]
100
+ left = "%10.1f (±%.1f%%) i/s" % [@stats.central_tendency, @stats.error_percentage]
91
101
 
92
102
  if @show_total_time
93
103
  left.ljust(20) + (" - %10d in %10.6fs" % [@iterations, runtime])
@@ -117,8 +127,8 @@ module Benchmark
117
127
 
118
128
  # class Report
119
129
 
120
- # Entry to represent each benchamarked code in Report.
121
- # @return [Array<Entry>] Entries in Report.
130
+ # Entry to represent each benchmarked code in Report.
131
+ # @return [Array<Report::Entry>] Entries in Report.
122
132
  attr_reader :entries
123
133
 
124
134
  # Instantiate the Report.
@@ -131,13 +141,14 @@ module Benchmark
131
141
  # @param label [String] Entry label.
132
142
  # @param microseconds [Integer] Measured time in microsecond.
133
143
  # @param iters [Integer] Iterations.
134
- # @param ips [Float] Average Iterations per second.
135
- # @param ips_sd [Float] Standard deviation of iterations per second.
144
+ # @param stats [Object] Statistical results.
136
145
  # @param measurement_cycle [Integer] Number of cycles.
137
- # @return [Entry] Last added entry.
138
- def add_entry label, microseconds, iters, ips, ips_sd, measurement_cycle
139
- @entries << Entry.new(label, microseconds, iters, ips, ips_sd, measurement_cycle)
140
- @entries.last
146
+ # @return [Report::Entry] Last added entry.
147
+ def add_entry label, microseconds, iters, stats, measurement_cycle
148
+ entry = Entry.new(label, microseconds, iters, stats, measurement_cycle)
149
+ @entries.delete_if { |e| e.label == label }
150
+ @entries << entry
151
+ entry
141
152
  end
142
153
 
143
154
  # Entries data in array for generate json.
@@ -145,28 +156,40 @@ module Benchmark
145
156
  # name: Entry#label
146
157
  # ips: Entry#ips
147
158
  # stddev: Entry#ips_sd
148
- # @return [Array] Array of entries
159
+ # microseconds: Entry#microseconds
160
+ # iterations: Entry#iterations
161
+ # cycles: Entry#measurement_cycles
162
+ # @return [Array<Hash<Symbol,String|Float|Integer>] Array of hashes
149
163
  def data
150
164
  @data ||= @entries.collect do |entry|
151
165
  {
152
166
  :name => entry.label,
153
- :ips => entry.ips,
154
- :stddev => entry.ips_sd
167
+ :central_tendency => entry.stats.central_tendency,
168
+ :ips => entry.stats.central_tendency, # for backwards compatibility
169
+ :error => entry.stats.error,
170
+ :stddev => entry.stats.error, # for backwards compatibility
171
+ :microseconds => entry.microseconds,
172
+ :iterations => entry.iterations,
173
+ :cycles => entry.measurement_cycle,
155
174
  }
156
175
  end
157
176
  end
158
177
 
159
178
  # Run comparison of entries.
160
- def run_comparison
161
- Benchmark.compare(*@entries)
179
+ def run_comparison(order)
180
+ Benchmark.compare(*@entries, order: order)
162
181
  end
163
182
 
164
183
  # Generate json from Report#data to given path.
165
184
  # @param path [String] path to generate json.
166
185
  def generate_json(path)
167
- File.open path, "w" do |f|
168
- require "json"
169
- f.write JSON.pretty_generate(data)
186
+ require "json"
187
+ if path.respond_to?(:write) # STDOUT
188
+ path.write JSON.pretty_generate(data)
189
+ else
190
+ File.open path, "w" do |f|
191
+ f.write JSON.pretty_generate(data)
192
+ end
170
193
  end
171
194
  end
172
195
  end