perf_check 0.2.1 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/perf_check/callbacks.rb +42 -0
- data/lib/perf_check/config.rb +126 -0
- data/lib/perf_check/logger.rb +19 -0
- data/lib/perf_check/middleware.rb +24 -0
- data/lib/perf_check/output.rb +112 -0
- metadata +6 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a4bbced7edbd4eb60e691db4241203c9d0c40fe7
|
4
|
+
data.tar.gz: fc643decd944fe233446917257c78aaea678f5c5
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6628a8deceb43e9bcd8a5d858d99c3e28b19cbff162bf16b83fa725752b18265e5785d8de220a2aac2b3db51e1021cd54339690f3d3a2782a8e433898e96cfec
|
7
|
+
data.tar.gz: 35432fe77f9de33e73064bfd046097dfde0130e48b4f2fe5e9a4ca2ef3cbbc9291c1680e0ac7f76b585cd0509a45c1b6cc193df59778ae62e19ba4294ad7e1d9
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
class PerfCheck
|
3
|
+
def self.when_finished(&block)
|
4
|
+
@when_finished_callbacks ||= []
|
5
|
+
@when_finished_callbacks << block
|
6
|
+
end
|
7
|
+
|
8
|
+
def self.when_finished_callbacks
|
9
|
+
@when_finished_callbacks || []
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.before_start(&block)
|
13
|
+
@before_start_callbacks ||= []
|
14
|
+
@before_start_callbacks << block
|
15
|
+
end
|
16
|
+
|
17
|
+
def self.before_start_callbacks
|
18
|
+
(@before_start_callbacks || []) + [
|
19
|
+
proc {
|
20
|
+
logger.info("=" * 77)
|
21
|
+
logger.info("PERRRRF CHERRRK! Grab a ☕️ and don't touch your working tree (we automate git)")
|
22
|
+
logger.info("=" * 77)
|
23
|
+
}
|
24
|
+
]
|
25
|
+
end
|
26
|
+
|
27
|
+
|
28
|
+
def trigger_before_start_callbacks
|
29
|
+
PerfCheck.before_start_callbacks.each{ |f| f.call(self) }
|
30
|
+
end
|
31
|
+
|
32
|
+
def trigger_when_finished_callbacks(data={})
|
33
|
+
data = data.merge(:current_branch => PerfCheck::Git.current_branch)
|
34
|
+
results = OpenStruct.new(data)
|
35
|
+
results[:ARGV] = ORIGINAL_ARGV
|
36
|
+
if test_cases.size == 1
|
37
|
+
results.current_latency = test_cases.first.this_latency
|
38
|
+
results.reference_latency = test_cases.first.reference_latency
|
39
|
+
end
|
40
|
+
PerfCheck.when_finished_callbacks.each{ |f| f.call(results) }
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,126 @@
|
|
1
|
+
require 'optparse'
|
2
|
+
|
3
|
+
class PerfCheck
|
4
|
+
def self.config
|
5
|
+
@config ||= OpenStruct.new(
|
6
|
+
number_of_requests: 10,
|
7
|
+
reference: 'master',
|
8
|
+
cookie: nil,
|
9
|
+
http_statuses: [200],
|
10
|
+
verify_responses: false,
|
11
|
+
caching: true,
|
12
|
+
json: false
|
13
|
+
)
|
14
|
+
end
|
15
|
+
|
16
|
+
def config
|
17
|
+
PerfCheck.config
|
18
|
+
end
|
19
|
+
|
20
|
+
alias :options :config
|
21
|
+
|
22
|
+
Options = OptionParser.new do |opts|
|
23
|
+
opts.banner = "Usage: perf_check [options] [route ...]"
|
24
|
+
|
25
|
+
opts.separator "\nBenchmark options:"
|
26
|
+
opts.on('--requests N', '-n',
|
27
|
+
'Use N requests in benchmark, defaults to 10') do |n|
|
28
|
+
config.number_of_requests = n.to_i
|
29
|
+
end
|
30
|
+
|
31
|
+
opts.on('--reference COMMIT', '-r',
|
32
|
+
'Benchmark against COMMIT instead of master') do |commit|
|
33
|
+
config.reference = commit
|
34
|
+
end
|
35
|
+
|
36
|
+
opts.on('--quick', '-q',
|
37
|
+
'Fire off 5 requests just on this branch, no comparison with master') do
|
38
|
+
config.number_of_requests = 5
|
39
|
+
config.reference = nil
|
40
|
+
end
|
41
|
+
|
42
|
+
opts.on('--no-caching', 'Do not enable fragment caching') do
|
43
|
+
config.caching = false
|
44
|
+
end
|
45
|
+
|
46
|
+
opts.on('--fail-fast', '-f', 'Bail immediately on non-200 HTTP response') do
|
47
|
+
config[:fail_fast?] = true
|
48
|
+
end
|
49
|
+
|
50
|
+
opts.on('--302-success', 'Consider HTTP 302 code a successful request') do
|
51
|
+
config.http_statuses.push(302)
|
52
|
+
end
|
53
|
+
|
54
|
+
opts.on('--302-failure', 'Consider HTTP 302 code an unsuccessful request') do
|
55
|
+
config.http_statuses.delete(302)
|
56
|
+
end
|
57
|
+
|
58
|
+
opts.separator "\nMisc"
|
59
|
+
opts.on('--cookie COOKIE', '-c') do |cookie|
|
60
|
+
config.cookie = cookie
|
61
|
+
end
|
62
|
+
|
63
|
+
opts.on('--json', '-j') do
|
64
|
+
config.json = true
|
65
|
+
end
|
66
|
+
|
67
|
+
opts.on('--input FILE', '-i') do |input|
|
68
|
+
File.readlines(input).each do |resource|
|
69
|
+
ARGV << resource.strip
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
opts.on('--verify-responses',
|
74
|
+
'Check whether there is a diff between the responses of this and the reference branch') do
|
75
|
+
config.verify_responses = true
|
76
|
+
end
|
77
|
+
|
78
|
+
opts.on('--brief', '-b') do
|
79
|
+
config.brief = true
|
80
|
+
end
|
81
|
+
|
82
|
+
opts.on('--diff') do
|
83
|
+
config.diff = true
|
84
|
+
config.brief = true
|
85
|
+
config.verify_responses = true
|
86
|
+
config.number_of_requests = 1
|
87
|
+
end
|
88
|
+
|
89
|
+
opts.separator ''
|
90
|
+
opts.separator <<EOF
|
91
|
+
Usage examples:
|
92
|
+
Benchmark PostController#index against master
|
93
|
+
perf_check /user/45/posts
|
94
|
+
perf_check /user/45/posts -n5
|
95
|
+
|
96
|
+
Benchmark against a specific commit
|
97
|
+
perf_check /user/45/posts -r 0123abcdefg
|
98
|
+
perf_check /user/45/posts -r HEAD~2
|
99
|
+
|
100
|
+
Benchmark the changes in the working tree
|
101
|
+
perf_check /user/45/posts -r HEAD
|
102
|
+
|
103
|
+
Benchmark and diff the output against master
|
104
|
+
perf_check /user/45/posts --verify-responses
|
105
|
+
|
106
|
+
Just diff the output on your branch with master
|
107
|
+
perf_check /user/45/posts --diff
|
108
|
+
|
109
|
+
Diff a bunch of urls listed in a file (newline seperated)
|
110
|
+
perf_check --diff --input FILE
|
111
|
+
EOF
|
112
|
+
|
113
|
+
opts.separator ''
|
114
|
+
end
|
115
|
+
|
116
|
+
def self.diff_options
|
117
|
+
@@diff_options ||=
|
118
|
+
['-U3', '--ignore-matching-lines=/mini-profiler-resources/includes.js']
|
119
|
+
end
|
120
|
+
|
121
|
+
def self.load_config
|
122
|
+
if File.exists?("#{app_root}/config/perf_check.rb")
|
123
|
+
require "#{app_root}/config/perf_check"
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
|
2
|
+
require 'logger'
|
3
|
+
|
4
|
+
class PerfCheck
|
5
|
+
def self.logger
|
6
|
+
@logger ||= Logger.new(STDERR).tap do |logger|
|
7
|
+
logger.formatter = proc do |severity, datetime, progname, msg|
|
8
|
+
"[#{datetime}] #{sprintf('%5s', severity)} --: #{msg}\n"
|
9
|
+
end
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
def logger; self.class.logger; end
|
14
|
+
end
|
15
|
+
|
16
|
+
class Object
|
17
|
+
def self.logger; PerfCheck.logger; end
|
18
|
+
def logger; PerfCheck.logger; end
|
19
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
class PerfCheck
|
2
|
+
class Middleware
|
3
|
+
attr_reader :app
|
4
|
+
attr_accessor :query_count
|
5
|
+
|
6
|
+
def initialize(app)
|
7
|
+
@app = app
|
8
|
+
|
9
|
+
self.query_count = 0
|
10
|
+
ActiveSupport::Notifications.subscribe('sql.active_record') do |_|
|
11
|
+
self.query_count += 1
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
def call(env)
|
16
|
+
self.query_count = 0
|
17
|
+
status, headers, body = app.call(env)
|
18
|
+
|
19
|
+
headers['X-PerfCheck-Query-Count'] = query_count.to_s
|
20
|
+
|
21
|
+
[status, headers, body]
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,112 @@
|
|
1
|
+
class PerfCheck
|
2
|
+
|
3
|
+
def print_diff_results(diff)
|
4
|
+
if diff.changed?
|
5
|
+
print(" Diff: #{diff.file}".bold.light_red)
|
6
|
+
else
|
7
|
+
print(" Diff: Output is identical!".bold.light_green)
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
def print_brief_results
|
12
|
+
test_cases.each do |test|
|
13
|
+
print(test.resource.ljust(40) + ': ')
|
14
|
+
|
15
|
+
codes = (test.this_profiles+test.reference_profiles).map(&:response_code).uniq
|
16
|
+
print("(HTTP "+codes.join(',')+") ")
|
17
|
+
|
18
|
+
printf('%.1fms', test.this_latency)
|
19
|
+
|
20
|
+
puts && next if test.reference_profiles.empty?
|
21
|
+
|
22
|
+
print(sprintf(' (%+5.1fms)', test.latency_difference).bold)
|
23
|
+
print_diff_results(test.response_diff) if options.verify_responses
|
24
|
+
puts
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def print_full_results
|
29
|
+
puts("==== Results ====")
|
30
|
+
test_cases.each do |test|
|
31
|
+
puts(test.resource.bold)
|
32
|
+
|
33
|
+
if test.reference_profiles.empty?
|
34
|
+
printf("your branch: ".rjust(15)+"%.1fms\n", test.this_latency)
|
35
|
+
next
|
36
|
+
end
|
37
|
+
|
38
|
+
master_latency = sprintf('%.1fms', test.reference_latency)
|
39
|
+
this_latency = sprintf('%.1fms', test.this_latency)
|
40
|
+
difference = sprintf('%+.1fms', test.latency_difference)
|
41
|
+
|
42
|
+
if test.latency_difference < 0
|
43
|
+
change_factor = test.reference_latency / test.this_latency
|
44
|
+
else
|
45
|
+
change_factor = test.this_latency / test.reference_latency
|
46
|
+
end
|
47
|
+
formatted_change = sprintf('%.1fx', change_factor)
|
48
|
+
|
49
|
+
percent_change = 100*(test.latency_difference / test.reference_latency).abs
|
50
|
+
if percent_change < 10
|
51
|
+
formatted_change = "yours is about the same"
|
52
|
+
color = :blue
|
53
|
+
elsif test.latency_difference < 0
|
54
|
+
formatted_change = "yours is #{formatted_change} faster!"
|
55
|
+
color = :green
|
56
|
+
else
|
57
|
+
formatted_change = "yours is #{formatted_change} slower!!!"
|
58
|
+
color = :light_red
|
59
|
+
end
|
60
|
+
formatted_change = difference + " (#{formatted_change})"
|
61
|
+
|
62
|
+
puts("reference: ".rjust(15) + "#{master_latency}")
|
63
|
+
puts("your branch: ".rjust(15)+ "#{this_latency}")
|
64
|
+
puts(("change: ".rjust(15) + "#{formatted_change}").bold.send(color))
|
65
|
+
|
66
|
+
print_diff_results(test.response_diff) if options.verify_responses
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def print_json_results
|
71
|
+
results = []
|
72
|
+
test_cases.each do |test|
|
73
|
+
results.push(
|
74
|
+
route: test.resource,
|
75
|
+
latency: test.this_latency,
|
76
|
+
query_count: test.this_query_count,
|
77
|
+
requests: []
|
78
|
+
)
|
79
|
+
|
80
|
+
test.this_profiles.each do |profile|
|
81
|
+
results[-1][:requests].push(
|
82
|
+
latency: profile.latency,
|
83
|
+
query_count: profile.query_count,
|
84
|
+
server_memory: profile.server_memory,
|
85
|
+
response_code: profile.response_code,
|
86
|
+
miniprofiler_url: profile.profile_url
|
87
|
+
)
|
88
|
+
end
|
89
|
+
|
90
|
+
if options.reference
|
91
|
+
results[-1].merge!(
|
92
|
+
reference_latency: test.reference_latency,
|
93
|
+
latency_difference: test.latency_difference,
|
94
|
+
speedup_factor: test.reference_latency / test.this_latency,
|
95
|
+
reference_query_count: test.reference_query_count,
|
96
|
+
reference_requests: []
|
97
|
+
)
|
98
|
+
|
99
|
+
test.reference_profiles.each do |profile|
|
100
|
+
results[-1][:reference_requests].push(
|
101
|
+
latency: profile.latency,
|
102
|
+
query_count: profile.query_count,
|
103
|
+
server_memory: profile.server_memory,
|
104
|
+
response_code: profile.response_code,
|
105
|
+
miniprofiler_url: profile.profile_url
|
106
|
+
)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
puts JSON.pretty_generate(results)
|
111
|
+
end
|
112
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: perf_check
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- rubytune
|
@@ -47,7 +47,12 @@ extra_rdoc_files: []
|
|
47
47
|
files:
|
48
48
|
- bin/perf_check
|
49
49
|
- lib/perf_check.rb
|
50
|
+
- lib/perf_check/callbacks.rb
|
51
|
+
- lib/perf_check/config.rb
|
50
52
|
- lib/perf_check/git.rb
|
53
|
+
- lib/perf_check/logger.rb
|
54
|
+
- lib/perf_check/middleware.rb
|
55
|
+
- lib/perf_check/output.rb
|
51
56
|
- lib/perf_check/railtie.rb
|
52
57
|
- lib/perf_check/server.rb
|
53
58
|
- lib/perf_check/test_case.rb
|