tylerkovacs-custom_benchmarks 0.4.0
Sign up to get free protection for your applications and to get access to all the features.
- data/VERSION.yml +4 -0
- data/lib/adapters/memcache-client.rb +134 -0
- data/lib/custom_benchmarks.rb +180 -0
- metadata +57 -0
data/VERSION.yml
ADDED
@@ -0,0 +1,134 @@
|
|
1
|
+
# Add this line to your ApplicationController (app/controllers/application.rb)
|
2
|
+
# to enable logging for memcache-client:
|
3
|
+
# custom_benchmark {|runtime| MemCache.cache_runtime(runtime) }
|
4
|
+
|
5
|
+
class MemCache
|
6
|
+
@@record_size = false
|
7
|
+
@@logger = nil
|
8
|
+
@@error_logger = nil
|
9
|
+
@@cache_latency = 0.0
|
10
|
+
@@cache_gets = 0
|
11
|
+
@@cache_sets = 0
|
12
|
+
@@cache_deletes = 0
|
13
|
+
@@cache_hits = 0
|
14
|
+
@@cache_misses = 0
|
15
|
+
@@get_data_size = 0
|
16
|
+
@@set_data_size = 0
|
17
|
+
|
18
|
+
cattr_accessor :record_size, :logger, :error_logger
|
19
|
+
|
20
|
+
def self.reset_benchmarks
|
21
|
+
@@cache_latency = 0.0
|
22
|
+
@@cache_gets = 0
|
23
|
+
@@cache_sets = 0
|
24
|
+
@@cache_deletes = 0
|
25
|
+
@@cache_hits = 0
|
26
|
+
@@cache_misses = 0
|
27
|
+
@@get_data_size = 0
|
28
|
+
@@set_data_size = 0
|
29
|
+
end
|
30
|
+
|
31
|
+
def self.get_benchmarks
|
32
|
+
[@@cache_latency, @@cache_gets, @@get_data_size, @@cache_sets, @@set_data_size, @@cache_deletes, @@cache_hits, @@cache_misses]
|
33
|
+
end
|
34
|
+
|
35
|
+
def self.cache_runtime(runtime)
|
36
|
+
latency,gets,gsize,sets,ssize,deletes,hits,misses = self.get_benchmarks
|
37
|
+
|
38
|
+
# Since we're using memcache store, the reset_benchmarks method call must
|
39
|
+
# appear at the beginning of the the request. This is necessary before
|
40
|
+
# Rails saves the session to the store after the entry is written to
|
41
|
+
# production.log. If you don't clear the MemCache stats at the beginning
|
42
|
+
# of the request, then the session save from other requests pollutes the
|
43
|
+
# cache stats for the subsequent requests. Use a before_filter in
|
44
|
+
# application.rb:
|
45
|
+
# before_filter { MemCache.reset_benchmarks }
|
46
|
+
# If you aren't using memcache session store then you uncomment this line:
|
47
|
+
# self.reset_benchmarks
|
48
|
+
|
49
|
+
" | memcache: #{sprintf("%.5f,%d,%d,%d,%d,%d,%d,%d",latency,gets,gsize,sets,ssize,deletes,hits,misses)} (#{sprintf("%d", (latency * 100) / runtime)}%)"
|
50
|
+
end
|
51
|
+
|
52
|
+
def rescue_no_connection
|
53
|
+
begin
|
54
|
+
yield
|
55
|
+
rescue MemCache::MemCacheError => err
|
56
|
+
@@error_logger.info([Time.now.to_s, err.message, err.backtrace].compact.join("\n")) if @@error_logger
|
57
|
+
if err.message != "No connection to server" and err.message !~ /^lost connection/i and ENV['RAILS_ENV'] != 'development'
|
58
|
+
SystemNotifier.deliver_non_controller_exception_notification(err)
|
59
|
+
end
|
60
|
+
return nil
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
def get_with_benchmark(key, raw=false)
|
65
|
+
val = nil
|
66
|
+
t1 = Time.now
|
67
|
+
val = rescue_no_connection { get_without_benchmark(key, raw) }
|
68
|
+
val.nil? ? (@@cache_misses += 1) : (@@cache_hits += 1)
|
69
|
+
@@cache_latency += Time.now - t1
|
70
|
+
@@cache_gets += 1
|
71
|
+
size = @@record_size ? (Marshal.dump(val).length rescue 0) : 0
|
72
|
+
@@get_data_size += size if @@record_size
|
73
|
+
@@logger.info("MEMCACHE GET #{key} SIZE #{size} TIME #{Time.now - t1}") if @@logger
|
74
|
+
val
|
75
|
+
end
|
76
|
+
alias_method :get_without_benchmark, :get
|
77
|
+
alias_method :get, :get_with_benchmark
|
78
|
+
alias [] get_with_benchmark
|
79
|
+
|
80
|
+
def get_multi_with_benchmark(*keys)
|
81
|
+
val = nil
|
82
|
+
t1 = Time.now
|
83
|
+
vals = rescue_no_connection { get_multi_without_benchmark(keys) }
|
84
|
+
@@cache_latency += Time.now - t1
|
85
|
+
@@cache_gets += 1
|
86
|
+
if @@logger or @@record_size
|
87
|
+
request_id = t1.to_f.to_s.last(4)
|
88
|
+
for key in keys
|
89
|
+
size = @@record_size ? (Marshal.dump(vals[key]).length rescue 0) : 0
|
90
|
+
@@get_data_size += size if @@record_size
|
91
|
+
@@logger.info("MEMCACHE GETMULTI ID #{request_id} KEY #{key} SIZE #{size} TIME #{Time.now - t1}") if @@logger
|
92
|
+
end
|
93
|
+
end
|
94
|
+
vals
|
95
|
+
end
|
96
|
+
alias_method :get_multi_without_benchmark, :get_multi
|
97
|
+
alias_method :get_multi, :get_multi_with_benchmark
|
98
|
+
|
99
|
+
def set_with_benchmark(key, val, expiry=0, raw=false)
|
100
|
+
t1 = Time.now
|
101
|
+
rescue_no_connection { set_without_benchmark(key, val, expiry, raw) }
|
102
|
+
@@cache_latency += Time.now - t1
|
103
|
+
@@cache_sets += 1
|
104
|
+
size = @@record_size ? (Marshal.dump(val).length rescue 0) : 0
|
105
|
+
@@set_data_size += size if @@record_size
|
106
|
+
@@logger.info("MEMCACHE SET #{key} SIZE #{size} TIME #{Time.now - t1}") if @@logger
|
107
|
+
end
|
108
|
+
alias_method :set_without_benchmark, :set
|
109
|
+
alias_method :set, :set_with_benchmark
|
110
|
+
alias []= set_with_benchmark
|
111
|
+
|
112
|
+
def add_with_benchmark(key, val, expiry=0, raw=false)
|
113
|
+
t1 = Time.now
|
114
|
+
rescue_no_connection { add_without_benchmark(key, val, expiry, raw) }
|
115
|
+
@@cache_latency += Time.now - t1
|
116
|
+
@@cache_sets += 1
|
117
|
+
size = @@record_size ? (Marshal.dump(val).length rescue 0) : 0
|
118
|
+
@@set_data_size += size if @@record_size
|
119
|
+
@@logger.info("MEMCACHE ADD #{key} SIZE #{size} TIME #{Time.now - t1}") if @@logger
|
120
|
+
end
|
121
|
+
alias_method :add_without_benchmark, :add
|
122
|
+
alias_method :add, :add_with_benchmark
|
123
|
+
|
124
|
+
def delete_with_benchmark(key, expiry=0)
|
125
|
+
t1 = Time.now
|
126
|
+
rescue_no_connection { delete_without_benchmark(key, expiry) }
|
127
|
+
@@cache_latency += Time.now - t1
|
128
|
+
@@cache_deletes += 1
|
129
|
+
@@logger.info("MEMCACHE DELETE #{key} TIME #{Time.now - t1}") if @@logger
|
130
|
+
end
|
131
|
+
alias_method :delete_without_benchmark, :delete
|
132
|
+
alias_method :delete, :delete_with_benchmark
|
133
|
+
end
|
134
|
+
|
@@ -0,0 +1,180 @@
|
|
1
|
+
# Custom Benchmarks
|
2
|
+
#
|
3
|
+
# Custom Benchmarks allow you to easily log your own information to the
|
4
|
+
# rails log at the end of each request. The standard rails summary log
|
5
|
+
# line looks like this:
|
6
|
+
#
|
7
|
+
# Completed in 5ms (View: 3, DB: 2) | 200 OK [http://zvm/]
|
8
|
+
#
|
9
|
+
# With custom_benchmarks, an additional line is added to the output that
|
10
|
+
# contains as many metrics as you like for each request. e.g.,
|
11
|
+
#
|
12
|
+
# Completed in 5ms (View: 3, DB: 2) | 200 OK [http://zvm/]
|
13
|
+
# Finished WelcomeController#index in 0.08545 (11 reqs/sec) DB: 2 | PID: 30796 | Time: 1233202720 | 200 OK [http://zvm/]
|
14
|
+
#
|
15
|
+
# Typically, the log line includes the latency associated with executing
|
16
|
+
# specific parts of a request. In the example above, we have added a
|
17
|
+
# measurement of search latency. But you can use Custom Benchmarks to add
|
18
|
+
# any information to the log line. The example above also shows the ID of
|
19
|
+
# the process (PID) that served this request. The PID is useful when parsing
|
20
|
+
# information from logs that contain data from multiple processes.
|
21
|
+
#
|
22
|
+
# Simple Example: Logging the Process ID
|
23
|
+
#
|
24
|
+
# To add the PID as a custom benchmark field, simply add a custom_benchmark
|
25
|
+
# line like the following to your ApplicationController:
|
26
|
+
#
|
27
|
+
# class ApplicationController < ActionController::Base
|
28
|
+
# custom_benchmark {|runtime| " | PID: #{$$}" }
|
29
|
+
# ...
|
30
|
+
# end
|
31
|
+
#
|
32
|
+
# Declare your custom_benchmark with a block that expects an input parameter
|
33
|
+
# called runtime. runtime, which isn't used in this example, contains the
|
34
|
+
# overall latency of the entire request. Later, we'll show you an example
|
35
|
+
# of using runtime to calculate percentage latency below. custom_benchmark
|
36
|
+
# expects your block to return a string - which will be inserted in the
|
37
|
+
# log file immediate before the status (e.g., 200 OK [http://www.zvents.com/])
|
38
|
+
#
|
39
|
+
# Complex Example: Logging Arbitrary Latency
|
40
|
+
#
|
41
|
+
# Let's say that your application includes a search function that is powered
|
42
|
+
# by Lucene. Like SQL calls issued to a database, calls to Lucene can take
|
43
|
+
# a while so you want to log your search latency.
|
44
|
+
#
|
45
|
+
# The first step is to set up a mechanism that allows you to record your
|
46
|
+
# search latency for each request. You can do that with something like this:
|
47
|
+
#
|
48
|
+
# class MySearch
|
49
|
+
# @@latency = 0.0
|
50
|
+
# cattr_accessor :latency
|
51
|
+
#
|
52
|
+
# def run_search
|
53
|
+
# @@latency = Benchmark::measure{
|
54
|
+
# # execute the call to Lucene here
|
55
|
+
# }.real
|
56
|
+
# end
|
57
|
+
#
|
58
|
+
# def self.get_timing_summary(runtime)
|
59
|
+
# summary = " | Search: #{sprintf("%.5f",@@latency)} (#{sprintf("%d", (@@latency * 100) / runtime)}%)"
|
60
|
+
# @@latency = 0.0
|
61
|
+
# summary
|
62
|
+
# end
|
63
|
+
# end
|
64
|
+
#
|
65
|
+
# The run_search method uses Benchmark::measure to record the latency of the
|
66
|
+
# search. The get_timing_summary class method, which will be invoked by
|
67
|
+
# a custom_benchmark, returns a formatted string summarizing the search
|
68
|
+
# latency in absolute and percentage terms. It also resets the value
|
69
|
+
# of @@latency to avoid affecting subsequent queries.
|
70
|
+
#
|
71
|
+
# Finally, we just need to add a custom_benchmark statement to the
|
72
|
+
# ApplicationController:
|
73
|
+
#
|
74
|
+
# custom_benchmark {|runtime| MySearch.get_timing_summary(runtime) }
|
75
|
+
|
76
|
+
module ActionController #:nodoc:
|
77
|
+
module CustomBenchmarking #:nodoc:
|
78
|
+
def self.included(base)
|
79
|
+
base.extend(ClassMethods)
|
80
|
+
|
81
|
+
#if ENV['RAILS_ENV'] != "test"
|
82
|
+
base.class_eval do
|
83
|
+
alias_method :perform_action_without_custom_benchmark, :perform_action
|
84
|
+
alias_method :perform_action, :perform_action_with_custom_benchmark
|
85
|
+
end
|
86
|
+
#end
|
87
|
+
end
|
88
|
+
|
89
|
+
module ClassMethods
|
90
|
+
def custom_benchmark(*benchmark, &block)
|
91
|
+
#return if ENV['RAILS_ENV'] == "test"
|
92
|
+
|
93
|
+
if block_given?
|
94
|
+
write_inheritable_attribute(:custom_benchmarks,
|
95
|
+
(read_inheritable_attribute(:custom_benchmarks) || []) << block)
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
def custom_benchmarks
|
100
|
+
@custom_benchmarks ||= read_inheritable_attribute(:custom_benchmarks) || []
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def perform_action_with_custom_benchmark
|
105
|
+
unless logger
|
106
|
+
perform_action_without_custom_benchmark
|
107
|
+
else
|
108
|
+
t1 = Time.now
|
109
|
+
perform_action_without_custom_benchmark
|
110
|
+
runtime = Time.now - t1
|
111
|
+
|
112
|
+
log_message = ["Finished #{controller_class_name}\##{action_name} in #{sprintf("%.5f", runtime)} (#{(1 / runtime).floor} reqs/sec)"]
|
113
|
+
if Object.const_defined?("ActiveRecord") && ActiveRecord::Base.connected?
|
114
|
+
log_message << active_record_runtime
|
115
|
+
end
|
116
|
+
log_message << rendering_runtime(runtime) if @rendering_runtime
|
117
|
+
self.class.custom_benchmarks.each do |benchmark|
|
118
|
+
log_message << benchmark.call(runtime)
|
119
|
+
end
|
120
|
+
log_message << "| Time: #{Time.now.to_i}"
|
121
|
+
log_message << "| #{headers["Status"]}"
|
122
|
+
log_message << "[#{complete_request_uri rescue "unknown"}]"
|
123
|
+
logger.info(log_message.join(' '))
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
128
|
+
|
129
|
+
module ActiveRecord
|
130
|
+
module ConnectionAdapters # :nodoc:
|
131
|
+
class AbstractAdapter
|
132
|
+
def initialize(connection, logger = nil) #:nodoc:
|
133
|
+
@connection, @logger = connection, logger
|
134
|
+
@runtime = 0
|
135
|
+
@total_runtime = 0
|
136
|
+
@last_verification = 0
|
137
|
+
end
|
138
|
+
|
139
|
+
def reset_runtime(reset=false) #:nodoc:
|
140
|
+
if reset
|
141
|
+
rt, @runtime, @total_runtime = @total_runtime, 0, 0
|
142
|
+
else
|
143
|
+
rt, @runtime = @runtime, 0, 0
|
144
|
+
end
|
145
|
+
|
146
|
+
rt
|
147
|
+
end
|
148
|
+
|
149
|
+
protected
|
150
|
+
def log(sql, name)
|
151
|
+
if block_given?
|
152
|
+
if @logger and @logger.level <= Logger::INFO
|
153
|
+
result = nil
|
154
|
+
seconds = Benchmark.realtime { result = yield }
|
155
|
+
@runtime += seconds
|
156
|
+
@total_runtime += seconds
|
157
|
+
log_info(sql, name, seconds)
|
158
|
+
result
|
159
|
+
else
|
160
|
+
seconds = Benchmark.realtime { result = yield }
|
161
|
+
@runtime += seconds
|
162
|
+
@total_runtime += seconds
|
163
|
+
result
|
164
|
+
end
|
165
|
+
else
|
166
|
+
log_info(sql, name, 0)
|
167
|
+
nil
|
168
|
+
end
|
169
|
+
rescue Exception => e
|
170
|
+
# Log message and raise exception.
|
171
|
+
# Set last_verfication to 0, so that connection gets verified
|
172
|
+
# upon reentering the request loop
|
173
|
+
@last_verification = 0
|
174
|
+
message = "#{e.class.name}: #{e.message}: #{sql}"
|
175
|
+
log_info(message, name, 0)
|
176
|
+
raise ActiveRecord::StatementInvalid, message
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
metadata
ADDED
@@ -0,0 +1,57 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: tylerkovacs-custom_benchmarks
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.4.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- tylerkovacs
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
|
12
|
+
date: 2009-01-28 00:00:00 -08:00
|
13
|
+
default_executable:
|
14
|
+
dependencies: []
|
15
|
+
|
16
|
+
description: Custom Benchmarks allow you to easily log your own information to the rails log at the end of each request.
|
17
|
+
email: tyler.kovacs@gmail.com
|
18
|
+
executables: []
|
19
|
+
|
20
|
+
extensions: []
|
21
|
+
|
22
|
+
extra_rdoc_files: []
|
23
|
+
|
24
|
+
files:
|
25
|
+
- VERSION.yml
|
26
|
+
- lib/custom_benchmarks.rb
|
27
|
+
- lib/adapters
|
28
|
+
- lib/adapters/memcache-client.rb
|
29
|
+
has_rdoc: true
|
30
|
+
homepage: http://github.com/tylerkovacs/custom_benchmarks
|
31
|
+
post_install_message:
|
32
|
+
rdoc_options:
|
33
|
+
- --inline-source
|
34
|
+
- --charset=UTF-8
|
35
|
+
require_paths:
|
36
|
+
- lib
|
37
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
38
|
+
requirements:
|
39
|
+
- - ">="
|
40
|
+
- !ruby/object:Gem::Version
|
41
|
+
version: "0"
|
42
|
+
version:
|
43
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: "0"
|
48
|
+
version:
|
49
|
+
requirements: []
|
50
|
+
|
51
|
+
rubyforge_project:
|
52
|
+
rubygems_version: 1.2.0
|
53
|
+
signing_key:
|
54
|
+
specification_version: 2
|
55
|
+
summary: custom_benchmarks
|
56
|
+
test_files: []
|
57
|
+
|