newrelic_rpm 3.5.7.59 → 3.5.8.64.beta
Sign up to get free protection for your applications and to get access to all the features.
- data.tar.gz.sig +3 -2
- data/CHANGELOG +34 -3
- data/LICENSE +23 -0
- data/lib/new_relic/agent.rb +50 -3
- data/lib/new_relic/agent/agent.rb +40 -60
- data/lib/new_relic/agent/configuration/defaults.rb +9 -3
- data/lib/new_relic/agent/configuration/server_source.rb +4 -0
- data/lib/new_relic/agent/cross_app_monitor.rb +230 -0
- data/lib/new_relic/agent/cross_app_tracing.rb +274 -0
- data/lib/new_relic/agent/database.rb +28 -10
- data/lib/new_relic/agent/error_collector.rb +5 -0
- data/lib/new_relic/agent/event_listener.rb +4 -0
- data/lib/new_relic/agent/instrumentation/controller_instrumentation.rb +53 -34
- data/lib/new_relic/agent/instrumentation/metric_frame.rb +16 -3
- data/lib/new_relic/agent/instrumentation/net.rb +13 -11
- data/lib/new_relic/agent/instrumentation/resque.rb +10 -10
- data/lib/new_relic/agent/instrumentation/sinatra.rb +19 -9
- data/lib/new_relic/agent/new_relic_service.rb +63 -9
- data/lib/new_relic/agent/pipe_service.rb +8 -12
- data/lib/new_relic/agent/rules_engine.rb +72 -0
- data/lib/new_relic/agent/shim_agent.rb +0 -1
- data/lib/new_relic/agent/sql_sampler.rb +3 -2
- data/lib/new_relic/agent/stats.rb +149 -0
- data/lib/new_relic/agent/stats_engine.rb +9 -0
- data/lib/new_relic/agent/stats_engine/gc_profiler.rb +1 -24
- data/lib/new_relic/agent/stats_engine/metric_stats.rb +84 -185
- data/lib/new_relic/agent/stats_engine/stats_hash.rb +58 -0
- data/lib/new_relic/agent/stats_engine/transactions.rb +10 -2
- data/lib/new_relic/agent/transaction_info.rb +31 -6
- data/lib/new_relic/agent/transaction_sample_builder.rb +19 -8
- data/lib/new_relic/agent/transaction_sampler.rb +17 -10
- data/lib/new_relic/helper.rb +32 -0
- data/lib/new_relic/local_environment.rb +24 -32
- data/lib/new_relic/okjson.rb +599 -0
- data/lib/new_relic/transaction_sample.rb +2 -1
- data/lib/new_relic/transaction_sample/segment.rb +2 -1
- data/lib/new_relic/version.rb +1 -1
- data/newrelic.yml +27 -41
- data/test/multiverse/suites/agent_only/audit_log_test.rb +2 -4
- data/test/multiverse/suites/agent_only/config/newrelic.yml +1 -2
- data/test/multiverse/suites/agent_only/{cross_process_test.rb → cross_application_tracing_test.rb} +3 -3
- data/test/multiverse/suites/agent_only/key_transactions_test.rb +66 -0
- data/test/multiverse/suites/agent_only/marshaling_test.rb +9 -22
- data/test/multiverse/suites/agent_only/rename_rule_test.rb +57 -0
- data/test/multiverse/suites/agent_only/start_up_test.rb +1 -1
- data/test/multiverse/suites/agent_only/thread_profiling_test.rb +17 -6
- data/test/multiverse/suites/rails/error_tracing_test.rb +20 -8
- data/test/multiverse/suites/resque/instrumentation_test.rb +2 -2
- data/test/multiverse/suites/sinatra/Envfile +2 -0
- data/test/multiverse/suites/sinatra/config/newrelic.yml +1 -0
- data/test/multiverse/suites/sinatra/sinatra_metric_explosion_test.rb +5 -5
- data/test/multiverse/suites/sinatra/sinatra_test.rb +75 -4
- data/test/new_relic/agent/agent/connect_test.rb +45 -1
- data/test/new_relic/agent/agent/start_worker_thread_test.rb +0 -3
- data/test/new_relic/agent/agent_test.rb +20 -40
- data/test/new_relic/agent/agent_test_controller_test.rb +24 -19
- data/test/new_relic/agent/busy_calculator_test.rb +1 -1
- data/test/new_relic/agent/configuration/server_source_test.rb +8 -3
- data/test/new_relic/agent/cross_app_monitor_test.rb +237 -0
- data/test/new_relic/agent/database_test.rb +60 -16
- data/test/new_relic/agent/error_collector_test.rb +28 -4
- data/test/new_relic/agent/event_listener_test.rb +23 -2
- data/test/new_relic/agent/instrumentation/controller_instrumentation_test.rb +53 -0
- data/test/new_relic/agent/instrumentation/metric_frame_test.rb +95 -0
- data/test/new_relic/agent/instrumentation/net_instrumentation_test.rb +414 -59
- data/test/new_relic/agent/instrumentation/task_instrumentation_test.rb +2 -5
- data/test/new_relic/agent/method_tracer_test.rb +4 -2
- data/test/new_relic/agent/new_relic_service_test.rb +108 -6
- data/test/new_relic/agent/pipe_channel_manager_test.rb +1 -1
- data/test/new_relic/agent/pipe_service_test.rb +9 -9
- data/test/new_relic/agent/rpm_agent_test.rb +0 -11
- data/test/new_relic/agent/rules_engine_test.rb +82 -0
- data/test/new_relic/agent/shim_agent_test.rb +0 -4
- data/test/new_relic/agent/sql_sampler_test.rb +7 -0
- data/test/new_relic/agent/stats_engine/gc_profiler_test.rb +85 -0
- data/test/new_relic/agent/stats_engine/metric_stats_test.rb +110 -23
- data/test/new_relic/agent/stats_engine_test.rb +1 -46
- data/test/new_relic/agent/stats_hash_test.rb +93 -0
- data/test/new_relic/agent/stats_test.rb +197 -0
- data/test/new_relic/agent/transaction_info_test.rb +63 -11
- data/test/new_relic/agent/transaction_sample_builder_test.rb +10 -3
- data/test/new_relic/agent/transaction_sampler_test.rb +92 -80
- data/test/new_relic/agent_test.rb +35 -5
- data/test/new_relic/control_test.rb +1 -1
- data/test/new_relic/fake_collector.rb +87 -9
- data/test/new_relic/helper_test.rb +24 -0
- data/test/new_relic/metric_data_test.rb +11 -11
- data/test/new_relic/metric_spec_test.rb +1 -1
- data/test/script/ci.sh +1 -1
- data/test/test_contexts.rb +0 -1
- data/test/test_helper.rb +21 -3
- metadata +32 -16
- metadata.gz.sig +0 -0
- data/lib/new_relic/agent/cross_process_monitoring.rb +0 -187
- data/lib/new_relic/stats.rb +0 -337
- data/test/new_relic/agent/cross_process_monitoring_test.rb +0 -190
- data/test/new_relic/agent/stats_engine/metric_stats/harvest_test.rb +0 -133
- data/test/new_relic/fakes_sending_data.rb +0 -30
- data/test/new_relic/stats_test.rb +0 -421
metadata.gz.sig
CHANGED
Binary file
|
@@ -1,187 +0,0 @@
|
|
1
|
-
require 'new_relic/rack/agent_hooks'
|
2
|
-
require 'new_relic/agent/thread'
|
3
|
-
|
4
|
-
module NewRelic
|
5
|
-
module Agent
|
6
|
-
class CrossProcessMonitor
|
7
|
-
|
8
|
-
def initialize(events = nil)
|
9
|
-
# When we're starting up for real in the agent, we get passed the events
|
10
|
-
# Other spots can pull from the agent, during startup the agent doesn't exist yet!
|
11
|
-
events ||= Agent.instance.events
|
12
|
-
@trusted_ids = []
|
13
|
-
|
14
|
-
events.subscribe(:finished_configuring) do
|
15
|
-
finish_setup(Agent.config)
|
16
|
-
register_event_listeners
|
17
|
-
end
|
18
|
-
end
|
19
|
-
|
20
|
-
def finish_setup(config)
|
21
|
-
@cross_process_id = config[:cross_process_id]
|
22
|
-
@encoding_key = config[:encoding_key]
|
23
|
-
@encoding_bytes = get_bytes(@encoding_key) unless @encoding_key.nil?
|
24
|
-
@trusted_ids = config[:trusted_account_ids] || []
|
25
|
-
end
|
26
|
-
|
27
|
-
# Expected sequence of events:
|
28
|
-
# :before_call will save our cross process request id to the thread
|
29
|
-
# :start_transaction will get called when a transaction starts up
|
30
|
-
# :after_call will write our response headers/metrics and clean up the thread
|
31
|
-
def register_event_listeners
|
32
|
-
NewRelic::Agent.logger.debug("Wiring up Cross Process monitoring to events after finished configuring")
|
33
|
-
|
34
|
-
events = Agent.instance.events
|
35
|
-
events.subscribe(:before_call) do |env|
|
36
|
-
save_client_cross_process_id(env)
|
37
|
-
end
|
38
|
-
|
39
|
-
events.subscribe(:start_transaction) do |name|
|
40
|
-
set_transaction_custom_parameters
|
41
|
-
end
|
42
|
-
|
43
|
-
events.subscribe(:after_call) do |env, (status_code, headers, body)|
|
44
|
-
insert_response_header(env, headers)
|
45
|
-
end
|
46
|
-
|
47
|
-
events.subscribe(:notice_error) do |_, options|
|
48
|
-
set_error_custom_parameters(options)
|
49
|
-
end
|
50
|
-
end
|
51
|
-
|
52
|
-
# Because we aren't in the right spot when our transaction actually
|
53
|
-
# starts, hold client_cross_process_id we get thread local until then.
|
54
|
-
THREAD_ID_KEY = :newrelic_client_cross_process_id
|
55
|
-
|
56
|
-
def save_client_cross_process_id(request_headers)
|
57
|
-
if should_process_request(request_headers)
|
58
|
-
NewRelic::Agent::AgentThread.current[THREAD_ID_KEY] = decoded_id(request_headers)
|
59
|
-
end
|
60
|
-
end
|
61
|
-
|
62
|
-
def clear_client_cross_process_id
|
63
|
-
NewRelic::Agent::AgentThread.current[THREAD_ID_KEY] = nil
|
64
|
-
end
|
65
|
-
|
66
|
-
def client_cross_process_id
|
67
|
-
NewRelic::Agent::AgentThread.current[THREAD_ID_KEY]
|
68
|
-
end
|
69
|
-
|
70
|
-
def insert_response_header(request_headers, response_headers)
|
71
|
-
unless client_cross_process_id.nil?
|
72
|
-
timings = NewRelic::Agent::BrowserMonitoring.timings
|
73
|
-
content_length = content_length_from_request(request_headers)
|
74
|
-
|
75
|
-
set_response_headers(response_headers, timings, content_length)
|
76
|
-
set_metrics(client_cross_process_id, timings)
|
77
|
-
|
78
|
-
clear_client_cross_process_id
|
79
|
-
end
|
80
|
-
end
|
81
|
-
|
82
|
-
def should_process_request(request_headers)
|
83
|
-
return Agent.config[:'cross_process.enabled'] &&
|
84
|
-
@cross_process_id &&
|
85
|
-
trusts?(request_headers)
|
86
|
-
end
|
87
|
-
|
88
|
-
# Expects an ID of format "12#345", and will only accept that!
|
89
|
-
def trusts?(request)
|
90
|
-
id = decoded_id(request)
|
91
|
-
split_id = id.match(/(\d+)#\d+/)
|
92
|
-
return false if split_id.nil?
|
93
|
-
|
94
|
-
@trusted_ids.include?(split_id.captures.first.to_i)
|
95
|
-
end
|
96
|
-
|
97
|
-
def set_response_headers(response_headers, timings, content_length)
|
98
|
-
response_headers['X-NewRelic-App-Data'] = build_payload(timings, content_length)
|
99
|
-
end
|
100
|
-
|
101
|
-
def build_payload(timings, content_length)
|
102
|
-
|
103
|
-
# FIXME The transaction name might not be properly encoded. use a json generator
|
104
|
-
# For now we just handle quote characters by dropping them
|
105
|
-
transaction_name = timings.transaction_name.gsub(/["']/, "")
|
106
|
-
|
107
|
-
payload = %[["#{@cross_process_id}","#{transaction_name}",#{timings.queue_time_in_seconds},#{timings.app_time_in_seconds},#{content_length}] ]
|
108
|
-
payload = obfuscate_with_key(payload)
|
109
|
-
end
|
110
|
-
|
111
|
-
def set_transaction_custom_parameters
|
112
|
-
# We expect to get the before call to set the id (if we have it) before
|
113
|
-
# this, and then write our custom parameter when the transaction starts
|
114
|
-
NewRelic::Agent.add_custom_parameters(:client_cross_process_id => client_cross_process_id) unless client_cross_process_id.nil?
|
115
|
-
end
|
116
|
-
|
117
|
-
def set_error_custom_parameters(options)
|
118
|
-
options[:client_cross_process_id] = client_cross_process_id unless client_cross_process_id.nil?
|
119
|
-
end
|
120
|
-
|
121
|
-
def set_metrics(id, timings)
|
122
|
-
metric = NewRelic::Agent.instance.stats_engine.get_stats_no_scope("ClientApplication/#{id}/all")
|
123
|
-
metric.record_data_point(timings.app_time_in_seconds)
|
124
|
-
end
|
125
|
-
|
126
|
-
def obfuscate_with_key(text)
|
127
|
-
Base64.encode64(encode_with_key(text)).chomp
|
128
|
-
end
|
129
|
-
|
130
|
-
def decode_with_key(text)
|
131
|
-
encode_with_key(Base64.decode64(text))
|
132
|
-
end
|
133
|
-
|
134
|
-
NEWRELIC_ID_HEADER_KEYS = %w{X-NewRelic-ID HTTP_X_NEWRELIC_ID X_NEWRELIC_ID}
|
135
|
-
CONTENT_LENGTH_HEADER_KEYS = %w{Content-Length HTTP_CONTENT_LENGTH CONTENT_LENGTH}
|
136
|
-
|
137
|
-
def decoded_id(request)
|
138
|
-
encoded_id = from_headers(request, NEWRELIC_ID_HEADER_KEYS)
|
139
|
-
return "" if encoded_id.nil?
|
140
|
-
|
141
|
-
decode_with_key(encoded_id)
|
142
|
-
end
|
143
|
-
|
144
|
-
def content_length_from_request(request)
|
145
|
-
from_headers(request, CONTENT_LENGTH_HEADER_KEYS) || -1
|
146
|
-
end
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
private
|
151
|
-
|
152
|
-
# Ruby 1.8.6 doesn't support the bytes method on strings.
|
153
|
-
def get_bytes(value)
|
154
|
-
return [] if value.nil?
|
155
|
-
|
156
|
-
bytes = []
|
157
|
-
value.each_byte do |b|
|
158
|
-
bytes << b
|
159
|
-
end
|
160
|
-
bytes
|
161
|
-
end
|
162
|
-
|
163
|
-
def encode_with_key(text)
|
164
|
-
key_bytes = @encoding_bytes
|
165
|
-
|
166
|
-
encoded = ""
|
167
|
-
index = 0
|
168
|
-
text.each_byte{|byte|
|
169
|
-
encoded.concat((byte ^ key_bytes[index % key_bytes.length].to_i))
|
170
|
-
index+=1
|
171
|
-
}
|
172
|
-
encoded
|
173
|
-
end
|
174
|
-
|
175
|
-
def from_headers(request, try_keys)
|
176
|
-
# For lookups, upcase all our keys on both sides just to be safe
|
177
|
-
upcased_keys = try_keys.map{|k| k.upcase}
|
178
|
-
upcased_keys.each do |header|
|
179
|
-
found_key = request.keys.find { |k| k.upcase == header }
|
180
|
-
return request[found_key] unless found_key.nil?
|
181
|
-
end
|
182
|
-
nil
|
183
|
-
end
|
184
|
-
|
185
|
-
end
|
186
|
-
end
|
187
|
-
end
|
data/lib/new_relic/stats.rb
DELETED
@@ -1,337 +0,0 @@
|
|
1
|
-
|
2
|
-
module NewRelic
|
3
|
-
module Stats
|
4
|
-
|
5
|
-
# a stat is absent if its call count equals zero
|
6
|
-
def absent?
|
7
|
-
call_count == 0
|
8
|
-
end
|
9
|
-
|
10
|
-
# outputs a useful human-readable time given a value in milliseconds
|
11
|
-
def time_str(value_ms)
|
12
|
-
case
|
13
|
-
when value_ms >= 10000
|
14
|
-
"%.1f s" % (value_ms / 1000.0)
|
15
|
-
when value_ms >= 5000
|
16
|
-
"%.2f s" % (value_ms / 1000.0)
|
17
|
-
else
|
18
|
-
"%.0f ms" % value_ms
|
19
|
-
end
|
20
|
-
end
|
21
|
-
|
22
|
-
# makes sure we aren't dividing by zero
|
23
|
-
def checked_calculation(numerator, denominator)
|
24
|
-
if denominator.nil? || denominator == 0
|
25
|
-
0.0
|
26
|
-
else
|
27
|
-
numerator.to_f / denominator
|
28
|
-
end
|
29
|
-
end
|
30
|
-
|
31
|
-
def average_call_time
|
32
|
-
checked_calculation(total_call_time, call_count)
|
33
|
-
end
|
34
|
-
def average_exclusive_time
|
35
|
-
checked_calculation(total_exclusive_time, call_count)
|
36
|
-
end
|
37
|
-
|
38
|
-
# merge by adding to average response time
|
39
|
-
# - used to compose multiple metrics e.g. dispatcher time + mongrel queue time
|
40
|
-
def sum_merge! (other_stats)
|
41
|
-
Array(other_stats).each do |other|
|
42
|
-
self.sum_attributes(other)
|
43
|
-
end
|
44
|
-
self
|
45
|
-
end
|
46
|
-
|
47
|
-
def sum_attributes(other)
|
48
|
-
update_totals(other)
|
49
|
-
stack_min_max_from(other)
|
50
|
-
self.call_count = [self.call_count, other.call_count].max
|
51
|
-
update_boundaries(other)
|
52
|
-
end
|
53
|
-
|
54
|
-
def stack_min_max_from(other)
|
55
|
-
self.min_call_time += other.min_call_time
|
56
|
-
self.max_call_time += other.max_call_time
|
57
|
-
end
|
58
|
-
|
59
|
-
def update_boundaries(other)
|
60
|
-
self.begin_time = other.begin_time if should_replace_begin_time?(other)
|
61
|
-
self.end_time = other.end_time if should_replace_end_time?(other)
|
62
|
-
end
|
63
|
-
|
64
|
-
def should_replace_end_time?(other)
|
65
|
-
end_time.to_f < other.end_time.to_f
|
66
|
-
end
|
67
|
-
|
68
|
-
def should_replace_begin_time?(other)
|
69
|
-
other.begin_time.to_f < begin_time.to_f || begin_time.to_f == 0.0
|
70
|
-
end
|
71
|
-
|
72
|
-
def update_totals(other)
|
73
|
-
self.total_call_time += other.total_call_time
|
74
|
-
self.total_exclusive_time += other.total_exclusive_time
|
75
|
-
self.sum_of_squares += other.sum_of_squares
|
76
|
-
end
|
77
|
-
|
78
|
-
def min_time_less?(other)
|
79
|
-
(other.min_call_time < min_call_time && other.call_count > 0) || call_count == 0
|
80
|
-
end
|
81
|
-
|
82
|
-
def expand_min_max_to(other)
|
83
|
-
self.min_call_time = other.min_call_time if min_time_less?(other)
|
84
|
-
self.max_call_time = other.max_call_time if other.max_call_time > max_call_time
|
85
|
-
end
|
86
|
-
|
87
|
-
def merge_attributes(other)
|
88
|
-
update_totals(other)
|
89
|
-
expand_min_max_to(other)
|
90
|
-
self.call_count += other.call_count
|
91
|
-
update_boundaries(other)
|
92
|
-
end
|
93
|
-
|
94
|
-
def merge!(other_stats)
|
95
|
-
Array(other_stats).each do |other|
|
96
|
-
merge_attributes(other)
|
97
|
-
end
|
98
|
-
|
99
|
-
self
|
100
|
-
end
|
101
|
-
|
102
|
-
def merge(other_stats)
|
103
|
-
stats = self.clone
|
104
|
-
stats.merge!(other_stats)
|
105
|
-
end
|
106
|
-
|
107
|
-
|
108
|
-
def is_reset?
|
109
|
-
call_count == 0 && total_call_time == 0.0 && total_exclusive_time == 0.0
|
110
|
-
end
|
111
|
-
|
112
|
-
def reset
|
113
|
-
self.call_count = 0
|
114
|
-
self.total_call_time = 0.0
|
115
|
-
self.total_exclusive_time = 0.0
|
116
|
-
self.min_call_time = 0.0
|
117
|
-
self.max_call_time = 0.0
|
118
|
-
self.sum_of_squares = 0.0
|
119
|
-
self.begin_time = Time.at(0)
|
120
|
-
self.end_time = Time.at(0)
|
121
|
-
end
|
122
|
-
|
123
|
-
def as_percentage_of(other_stats)
|
124
|
-
checked_calculation(total_call_time, other_stats.total_call_time) * 100.0
|
125
|
-
end
|
126
|
-
|
127
|
-
# the stat total_call_time is a percent
|
128
|
-
def as_percentage
|
129
|
-
average_call_time * 100.0
|
130
|
-
end
|
131
|
-
|
132
|
-
def duration
|
133
|
-
end_time ? (end_time - begin_time) : 0.0
|
134
|
-
end
|
135
|
-
|
136
|
-
def midpoint
|
137
|
-
begin_time + (duration/2)
|
138
|
-
end
|
139
|
-
def calls_per_minute
|
140
|
-
checked_calculation(call_count, duration) * 60
|
141
|
-
end
|
142
|
-
|
143
|
-
def total_call_time_per_minute
|
144
|
-
60.0 * time_percentage
|
145
|
-
end
|
146
|
-
|
147
|
-
def standard_deviation
|
148
|
-
return 0 if call_count < 2 || self.sum_of_squares.nil?
|
149
|
-
|
150
|
-
# Convert sum of squares into standard deviation based on
|
151
|
-
# formula for the standard deviation for the entire population
|
152
|
-
x = self.sum_of_squares - (self.call_count * (self.average_value**2))
|
153
|
-
return 0 if x <= 0
|
154
|
-
|
155
|
-
Math.sqrt(x / self.call_count)
|
156
|
-
end
|
157
|
-
|
158
|
-
# returns the time spent in this component as a percentage of the total
|
159
|
-
# time window.
|
160
|
-
def time_percentage
|
161
|
-
checked_calculation(total_call_time, duration)
|
162
|
-
end
|
163
|
-
|
164
|
-
def exclusive_time_percentage
|
165
|
-
checked_calculation(total_exclusive_time, duration)
|
166
|
-
end
|
167
|
-
|
168
|
-
alias average_value average_call_time
|
169
|
-
alias average_response_time average_call_time
|
170
|
-
alias requests_per_minute calls_per_minute
|
171
|
-
|
172
|
-
def to_s
|
173
|
-
summary
|
174
|
-
end
|
175
|
-
|
176
|
-
# Summary string to facilitate testing
|
177
|
-
def summary
|
178
|
-
format = "%m/%d/%y %I:%M%p"
|
179
|
-
"[#{Time.at(begin_time.to_f).utc.strftime(format)} UTC, #{'%2.3fs' % duration.to_f}; #{'%2i' % call_count.to_i} calls #{'%4i' % average_call_time.to_f}s]"
|
180
|
-
end
|
181
|
-
|
182
|
-
# multiply the total time and rate by the given percentage
|
183
|
-
def multiply_by(percentage)
|
184
|
-
self.total_call_time = total_call_time * percentage
|
185
|
-
self.call_count = call_count * percentage
|
186
|
-
self.sum_of_squares = sum_of_squares * percentage
|
187
|
-
|
188
|
-
self
|
189
|
-
end
|
190
|
-
|
191
|
-
# returns s,t,f
|
192
|
-
def get_apdex
|
193
|
-
[@call_count, @total_call_time.to_i, @total_exclusive_time.to_i]
|
194
|
-
end
|
195
|
-
|
196
|
-
def apdex_score
|
197
|
-
s, t, f = get_apdex
|
198
|
-
(s.to_f + (t.to_f / 2)) / (s+t+f).to_f
|
199
|
-
end
|
200
|
-
end
|
201
|
-
|
202
|
-
|
203
|
-
class StatsBase
|
204
|
-
include Stats
|
205
|
-
|
206
|
-
attr_accessor :call_count
|
207
|
-
attr_accessor :min_call_time
|
208
|
-
attr_accessor :max_call_time
|
209
|
-
attr_accessor :total_call_time
|
210
|
-
attr_accessor :total_exclusive_time
|
211
|
-
attr_accessor :sum_of_squares
|
212
|
-
|
213
|
-
def initialize
|
214
|
-
reset
|
215
|
-
end
|
216
|
-
|
217
|
-
def freeze
|
218
|
-
@end_time = Time.now
|
219
|
-
super
|
220
|
-
end
|
221
|
-
|
222
|
-
def to_json(*_)
|
223
|
-
{
|
224
|
-
'call_count' => call_count.to_i,
|
225
|
-
'min_call_time' => min_call_time.to_f,
|
226
|
-
'max_call_time' => max_call_time.to_f,
|
227
|
-
'total_call_time' => total_call_time.to_f,
|
228
|
-
'total_exclusive_time' => total_exclusive_time.to_f,
|
229
|
-
'sum_of_squares' => sum_of_squares.to_f
|
230
|
-
}.to_json(*_)
|
231
|
-
end
|
232
|
-
|
233
|
-
|
234
|
-
# In this class, we explicitly don't track begin and end time here, to save space during
|
235
|
-
# cross process serialization via xml. Still the accessor methods must be provided for merge to work.
|
236
|
-
def begin_time=(t)
|
237
|
-
end
|
238
|
-
|
239
|
-
def end_time=(t)
|
240
|
-
end
|
241
|
-
|
242
|
-
def begin_time
|
243
|
-
0.0
|
244
|
-
end
|
245
|
-
|
246
|
-
def end_time
|
247
|
-
0.0
|
248
|
-
end
|
249
|
-
end
|
250
|
-
|
251
|
-
|
252
|
-
class BasicStats < StatsBase
|
253
|
-
end
|
254
|
-
|
255
|
-
class ApdexStats < StatsBase
|
256
|
-
|
257
|
-
def record_apdex_s
|
258
|
-
@call_count += 1
|
259
|
-
end
|
260
|
-
|
261
|
-
def record_apdex_t
|
262
|
-
@total_call_time += 1
|
263
|
-
end
|
264
|
-
|
265
|
-
def record_apdex_f
|
266
|
-
@total_exclusive_time += 1
|
267
|
-
end
|
268
|
-
end
|
269
|
-
|
270
|
-
# Statistics used to track the performance of traced methods
|
271
|
-
class MethodTraceStats < StatsBase
|
272
|
-
|
273
|
-
alias data_point_count call_count
|
274
|
-
|
275
|
-
# record a single data point into the statistical gatherer. The gatherer
|
276
|
-
# will aggregate all data points collected over a specified period and upload
|
277
|
-
# its data to the NewRelic server
|
278
|
-
def record_data_point(value, exclusive_time = value)
|
279
|
-
@call_count += 1
|
280
|
-
@total_call_time += value
|
281
|
-
@min_call_time = value if value < @min_call_time || @call_count == 1
|
282
|
-
@max_call_time = value if value > @max_call_time
|
283
|
-
@total_exclusive_time += exclusive_time
|
284
|
-
|
285
|
-
@sum_of_squares += (value * value)
|
286
|
-
self
|
287
|
-
end
|
288
|
-
|
289
|
-
alias trace_call record_data_point
|
290
|
-
|
291
|
-
# Records multiple data points as one method call - this handles
|
292
|
-
# all the aggregation that would be done with multiple
|
293
|
-
# record_data_point calls
|
294
|
-
def record_multiple_data_points(total_value, count=1)
|
295
|
-
return record_data_point(total_value) if count == 1
|
296
|
-
@call_count += count
|
297
|
-
@total_call_time += total_value
|
298
|
-
avg_val = total_value / count
|
299
|
-
@min_call_time = avg_val if avg_val < @min_call_time || @call_count == count
|
300
|
-
@max_call_time = avg_val if avg_val > @max_call_time
|
301
|
-
@total_exclusive_time += total_value
|
302
|
-
@sum_of_squares += (avg_val * avg_val) * count
|
303
|
-
self
|
304
|
-
end
|
305
|
-
|
306
|
-
# increments the call_count by one
|
307
|
-
def increment_count(value = 1)
|
308
|
-
@call_count += value
|
309
|
-
end
|
310
|
-
|
311
|
-
# outputs a human-readable version of the MethodTraceStats object
|
312
|
-
def inspect
|
313
|
-
"#<NewRelic::MethodTraceStats #{summary} >"
|
314
|
-
end
|
315
|
-
|
316
|
-
end
|
317
|
-
|
318
|
-
class ScopedMethodTraceStats < MethodTraceStats
|
319
|
-
attr_accessor :unscoped_stats
|
320
|
-
def initialize(unscoped_stats)
|
321
|
-
super()
|
322
|
-
self.unscoped_stats = unscoped_stats
|
323
|
-
end
|
324
|
-
def trace_call(call_time, exclusive_time = call_time)
|
325
|
-
unscoped_stats.trace_call call_time, exclusive_time
|
326
|
-
super call_time, exclusive_time
|
327
|
-
end
|
328
|
-
# Records multiple data points as one method call - this handles
|
329
|
-
# all the aggregation that would be done with multiple
|
330
|
-
# trace_call calls
|
331
|
-
def record_multiple_data_points(total_value, count=1)
|
332
|
-
unscoped_stats.record_multiple_data_points(total_value, count)
|
333
|
-
super total_value, count
|
334
|
-
end
|
335
|
-
end
|
336
|
-
end
|
337
|
-
|