newrelic_rpm 3.5.6.55 → 3.5.7.57.beta
Sign up to get free protection for your applications and to get access to all the features.
- data.tar.gz.sig +0 -0
- data/CHANGELOG +9 -0
- data/lib/new_relic/agent/instrumentation/controller_instrumentation.rb +3 -5
- data/lib/new_relic/agent/instrumentation/queue_time.rb +42 -198
- data/lib/new_relic/version.rb +1 -1
- data/test/multiverse/suites/rails/queue_time_test.rb +2 -2
- data/test/multiverse/suites/sinatra/sinatra_test.rb +2 -6
- data/test/new_relic/agent/agent_test_controller_test.rb +9 -47
- data/test/new_relic/agent/instrumentation/controller_instrumentation_test.rb +16 -10
- data/test/new_relic/agent/instrumentation/queue_time_test.rb +53 -359
- metadata +10 -35
- metadata.gz.sig +0 -0
data.tar.gz.sig
CHANGED
Binary file
|
data/CHANGELOG
CHANGED
@@ -1,6 +1,15 @@
|
|
1
1
|
|
2
2
|
# New Relic Ruby Agent Release Notes #
|
3
3
|
|
4
|
+
## v3.5.7 ##
|
5
|
+
|
6
|
+
* Resolved some issues with tracking of frontend queue time, particularly
|
7
|
+
when the agent is running on an app hosted on Heroku. The agent will now
|
8
|
+
more reliably parse the headers described in
|
9
|
+
https://newrelic.com/docs/features/tracking-front-end-time and will
|
10
|
+
automatically detect whether the times provided are in seconds,
|
11
|
+
milliseconds or microseconds.
|
12
|
+
|
4
13
|
## v3.5.6 ##
|
5
14
|
|
6
15
|
* Use HTTPS by default
|
@@ -438,22 +438,20 @@ module NewRelic
|
|
438
438
|
end
|
439
439
|
end
|
440
440
|
|
441
|
-
include NewRelic::Agent::Instrumentation::QueueTime
|
442
|
-
|
443
441
|
# Return a Time instance representing the upstream start time.
|
444
442
|
# now is a Time instance to fall back on if no other candidate
|
445
443
|
# for the start time is found.
|
446
444
|
def _detect_upstream_wait(now)
|
447
|
-
queue_start = nil
|
448
445
|
if newrelic_request_headers
|
449
|
-
queue_start =
|
446
|
+
queue_start = QueueTime.parse_frontend_timestamp(newrelic_request_headers)
|
447
|
+
QueueTime.record_frontend_metrics(queue_start, now) if queue_start
|
450
448
|
end
|
451
449
|
queue_start || now
|
452
450
|
rescue => e
|
453
451
|
::NewRelic::Agent.logger.error("Error detecting upstream wait time:", e)
|
454
452
|
now
|
455
453
|
end
|
456
|
-
|
454
|
+
|
457
455
|
# returns the NewRelic::MethodTraceStats object associated
|
458
456
|
# with the dispatcher time measurement
|
459
457
|
def _dispatch_stat
|
@@ -1,210 +1,54 @@
|
|
1
1
|
module NewRelic
|
2
2
|
module Agent
|
3
3
|
module Instrumentation
|
4
|
+
# https://newrelic.com/docs/features/tracking-front-end-time
|
5
|
+
# Record queue time metrics based on any of three headers
|
6
|
+
# which can be set on the request.
|
4
7
|
module QueueTime
|
5
|
-
unless defined?(
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
def parse_server_time_from(env)
|
40
|
-
end_time = parse_end_time(env)
|
41
|
-
matches = get_matches_from_header(MAIN_HEADER, env)
|
42
|
-
|
43
|
-
record_individual_server_stats(end_time, matches)
|
44
|
-
record_rollup_server_stat(end_time, matches)
|
45
|
-
end
|
46
|
-
|
47
|
-
def parse_middleware_time_from(env)
|
48
|
-
end_time = parse_end_time(env)
|
49
|
-
matches = get_matches_from_header(MIDDLEWARE_HEADER, env)
|
50
|
-
|
51
|
-
record_individual_middleware_stats(end_time, matches)
|
52
|
-
oldest_time = record_rollup_middleware_stat(end_time, matches)
|
53
|
-
# notice this bit: we reset the end time to the earliest
|
54
|
-
# middleware tag so that other frontend metrics don't
|
55
|
-
# include this time.
|
56
|
-
add_end_time_header(oldest_time, env)
|
57
|
-
oldest_time
|
58
|
-
end
|
59
|
-
|
60
|
-
def parse_queue_time_from(env)
|
61
|
-
oldest_time = nil
|
62
|
-
end_time = parse_end_time(env)
|
63
|
-
alternate_length = check_for_alternate_queue_length(env)
|
64
|
-
if alternate_length
|
65
|
-
# skip all that fancy-dan stuff
|
66
|
-
NewRelic::Agent.get_stats(ALL_QUEUE_METRIC).trace_call(alternate_length)
|
67
|
-
oldest_time = (end_time - alternate_length) # should be a time
|
68
|
-
else
|
69
|
-
matches = get_matches_from_header(QUEUE_HEADER, env)
|
70
|
-
oldest_time = record_rollup_queue_stat(end_time, matches)
|
8
|
+
unless defined?(REQUEST_START_HEADER)
|
9
|
+
REQUEST_START_HEADER = 'HTTP_X_REQUEST_START'
|
10
|
+
QUEUE_START_HEADER = 'HTTP_X_QUEUE_START'
|
11
|
+
QUEUE_DURATION_HEADER = 'HTTP_X_QUEUE_TIME'
|
12
|
+
MIDDLEWARE_START_HEADER = 'HTTP_X_MIDDLEWARE_START'
|
13
|
+
ALL_QUEUE_METRIC = 'WebFrontend/QueueTime'
|
14
|
+
# any timestamps before this are thrown out and the parser
|
15
|
+
# will try again with a larger unit (2000/1/1 UTC)
|
16
|
+
EARLIEST_ACCEPTABLE_TIMESTAMP = 946684800
|
17
|
+
end
|
18
|
+
|
19
|
+
module_function
|
20
|
+
|
21
|
+
def parse_frontend_timestamp(headers)
|
22
|
+
candidate_headers = [ REQUEST_START_HEADER, QUEUE_START_HEADER,
|
23
|
+
MIDDLEWARE_START_HEADER ]
|
24
|
+
candidate_headers.map do |header|
|
25
|
+
if headers[header]
|
26
|
+
parse_timestamp(timestamp_string_from_header_value(headers[header]))
|
27
|
+
end
|
28
|
+
end.compact.min
|
29
|
+
end
|
30
|
+
|
31
|
+
def record_frontend_metrics(start_time, now=Time.now)
|
32
|
+
NewRelic::Agent.instance.stats_engine.get_stats(ALL_QUEUE_METRIC) \
|
33
|
+
.record_data_point((now - start_time).to_f)
|
34
|
+
end
|
35
|
+
|
36
|
+
def timestamp_string_from_header_value(value)
|
37
|
+
case value
|
38
|
+
when /^\s*([\d+\.]+)\s*$/ then $1
|
39
|
+
# following regexp intentionally unanchored to handle
|
40
|
+
# (ie ignore) leading server names
|
41
|
+
when /t=([\d+\.]+)/ then $1
|
71
42
|
end
|
72
|
-
# notice this bit: we reset the end time to the earliest
|
73
|
-
# queue tag or the start time minus the queue time so that
|
74
|
-
# other frontend metrics don't include this time.
|
75
|
-
add_end_time_header(oldest_time, env)
|
76
|
-
oldest_time
|
77
|
-
end
|
78
|
-
|
79
|
-
def check_for_alternate_queue_length(env)
|
80
|
-
heroku_length = check_for_heroku_queue_length(env)
|
81
|
-
return heroku_length if heroku_length
|
82
|
-
header = env[ALT_QUEUE_HEADER]
|
83
|
-
return nil unless header
|
84
|
-
(header.gsub('t=', '').to_i / 1_000_000.0)
|
85
|
-
end
|
86
|
-
|
87
|
-
def check_for_heroku_queue_length(env)
|
88
|
-
header = env[HEROKU_QUEUE_HEADER]
|
89
|
-
return nil unless header
|
90
|
-
(header.gsub(/[^0-9]/, '').to_i / 1_000.0)
|
91
|
-
end
|
92
|
-
|
93
|
-
def get_matches_from_header(header, env)
|
94
|
-
return [] if env.nil?
|
95
|
-
get_matches(env[header]).map do |name, time|
|
96
|
-
convert_to_name_time_pair(name, time.sub('.', ''))
|
97
|
-
end
|
98
|
-
end
|
99
|
-
|
100
|
-
def get_matches(string)
|
101
|
-
string.to_s.scan(HEADER_REGEX)
|
102
|
-
end
|
103
|
-
|
104
|
-
def convert_to_name_time_pair(name, time)
|
105
|
-
[name, convert_from_microseconds(time.to_i)]
|
106
|
-
end
|
107
|
-
|
108
|
-
def record_individual_stat_of_type(type, end_time, matches)
|
109
|
-
matches = matches.sort_by {|name, time| time }
|
110
|
-
matches.reverse!
|
111
|
-
matches.inject(end_time) {|end_time, pair|
|
112
|
-
name, time = pair
|
113
|
-
self.send(type, name, time, end_time) if name
|
114
|
-
time
|
115
|
-
}
|
116
|
-
end
|
117
|
-
|
118
|
-
# goes through the list of servers and records each one in
|
119
|
-
# reverse order, subtracting the time for each successive
|
120
|
-
# server from the earlier ones in the list.
|
121
|
-
# an example because it's complicated:
|
122
|
-
# start data:
|
123
|
-
# [['a', Time.at(1000)], ['b', Time.at(1001)]], start time: Time.at(1002)
|
124
|
-
# initial run: Time.at(1002), ['b', Time.at(1001)]
|
125
|
-
# next: Time.at(1001), ['a', Time.at(1000)]
|
126
|
-
# see tests for more
|
127
|
-
def record_individual_server_stats(end_time, matches) # (Time, [[String, Time]]) -> nil
|
128
|
-
record_individual_stat_of_type(:record_server_time_for, end_time, matches)
|
129
43
|
end
|
130
44
|
|
131
|
-
def
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
def record_rollup_server_stat(end_time, matches) # (Time, [String, Time]) -> nil
|
137
|
-
record_rollup_stat_of_type(ALL_SERVER_METRIC, end_time, matches)
|
138
|
-
end
|
139
|
-
|
140
|
-
def record_rollup_middleware_stat(end_time, matches)
|
141
|
-
record_rollup_stat_of_type(ALL_MIDDLEWARE_METRIC, end_time, matches)
|
142
|
-
end
|
143
|
-
|
144
|
-
def record_rollup_queue_stat(end_time, matches)
|
145
|
-
record_rollup_stat_of_type(ALL_QUEUE_METRIC, end_time, matches)
|
146
|
-
end
|
147
|
-
|
148
|
-
def record_rollup_stat_of_type(metric, end_time, matches)
|
149
|
-
oldest_time = find_oldest_time(matches) || end_time
|
150
|
-
record_time_stat(metric, oldest_time, end_time)
|
151
|
-
oldest_time
|
152
|
-
end
|
153
|
-
|
154
|
-
# searches for the first server to touch a request
|
155
|
-
def find_oldest_time(matches) # [[String, Time]] -> Time
|
156
|
-
matches.map do |name, time|
|
157
|
-
time
|
158
|
-
end.min
|
159
|
-
end
|
160
|
-
|
161
|
-
# basically just assembles the metric name
|
162
|
-
def record_server_time_for(name, start_time, end_time) # (Maybe String, Time, Time) -> nil
|
163
|
-
record_time_stat(SERVER_METRIC + name, start_time, end_time) if name
|
164
|
-
end
|
165
|
-
|
166
|
-
def record_middleware_time_for(name, start_time, end_time)
|
167
|
-
record_time_stat(MIDDLEWARE_METRIC + name, start_time, end_time)
|
168
|
-
end
|
169
|
-
|
170
|
-
# Checks that the time is not negative, and does the actual
|
171
|
-
# data recording
|
172
|
-
def record_time_stat(name, start_time, end_time) # (String, Time, Time) -> nil
|
173
|
-
total_time = end_time - start_time
|
174
|
-
if total_time < 0
|
175
|
-
raise "should not provide an end time less than start time: #{end_time.strftime('%s.%N')} is less than #{start_time.strftime('%s.%N')}. total time is #{total_time}."
|
176
|
-
else
|
177
|
-
NewRelic::Agent.get_stats(name).trace_call(total_time)
|
178
|
-
end
|
179
|
-
end
|
180
|
-
|
181
|
-
def add_end_time_header(end_time, env) # (Time, Env) -> nil
|
182
|
-
return unless end_time
|
183
|
-
env[APP_HEADER] = "t=#{convert_to_microseconds(end_time)}"
|
184
|
-
end
|
185
|
-
|
186
|
-
def parse_end_time(env)
|
187
|
-
header = env[APP_HEADER]
|
188
|
-
return Time.now unless header
|
189
|
-
convert_from_microseconds(header.gsub('t=', '').to_i)
|
190
|
-
end
|
191
|
-
|
192
|
-
# convert a time to the value provided by the header, for convenience
|
193
|
-
def convert_to_microseconds(time) # Time -> Int
|
194
|
-
raise TypeError.new('Cannot convert a non-time into microseconds') unless time.is_a?(Time) || time.is_a?(Numeric)
|
195
|
-
return time if time.is_a?(Numeric)
|
196
|
-
(time.to_f * 1_000_000).to_i
|
197
|
-
end
|
198
|
-
|
199
|
-
# convert a time from the header value (time in microseconds)
|
200
|
-
# into a ruby time object
|
201
|
-
def convert_from_microseconds(int) # Int -> Time
|
202
|
-
raise TypeError.new('Cannot convert a non-number into a time') unless int.is_a?(Time) || int.is_a?(Numeric)
|
203
|
-
return int if int.is_a?(Time)
|
204
|
-
Time.at((int / 1_000_000.0))
|
45
|
+
def parse_timestamp(string)
|
46
|
+
cut_off = Time.at(EARLIEST_ACCEPTABLE_TIMESTAMP)
|
47
|
+
[1_000_000, 1_000, 1].map do |divisor|
|
48
|
+
Time.at(string.to_f / divisor)
|
49
|
+
end.find {|candidate| candidate > cut_off }
|
205
50
|
end
|
206
51
|
end
|
207
52
|
end
|
208
53
|
end
|
209
54
|
end
|
210
|
-
|
data/lib/new_relic/version.rb
CHANGED
@@ -43,8 +43,8 @@ class QueueTimeTest < ActionDispatch::IntegrationTest
|
|
43
43
|
assert extract_queue_time_from_response > 0, "Queue time was missing or zero"
|
44
44
|
end
|
45
45
|
|
46
|
-
def get_queued(header="
|
47
|
-
get('/queue/queued', nil,
|
46
|
+
def get_queued(header="HTTP_X_REQUEST_START")
|
47
|
+
get('/queue/queued', nil,
|
48
48
|
header => "t=#{(Time.now.to_i * 1_000_000) - 1_000}")
|
49
49
|
end
|
50
50
|
|
@@ -57,12 +57,8 @@ class SinatraTest < Test::Unit::TestCase
|
|
57
57
|
end
|
58
58
|
|
59
59
|
def test_queue_time_headers_are_passed_to_agent
|
60
|
-
get '/user/login', {}, {
|
61
|
-
|
62
|
-
assert metric_names.include?("Middleware/all")
|
63
|
-
assert metric_names.include?("WebFrontend/QueueTime")
|
64
|
-
assert metric_names.include?("WebFrontend/WebServer/all")
|
65
|
-
assert ::NewRelic::Agent.agent.stats_engine.get_stats("WebFrontend/WebServer/all")
|
60
|
+
get '/user/login', {}, { 'HTTP_X_REQUEST_START' => 't=1360973845' }
|
61
|
+
assert ::NewRelic::Agent.agent.stats_engine.lookup_stats('WebFrontend/QueueTime')
|
66
62
|
end
|
67
63
|
|
68
64
|
def test_shown_errors_get_caught
|
@@ -84,7 +84,7 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
84
84
|
|
85
85
|
def test_new_queue_integration
|
86
86
|
# make this test deterministic
|
87
|
-
Time.stubs(:now => Time.at(
|
87
|
+
Time.stubs(:now => Time.at(1360973845))
|
88
88
|
|
89
89
|
NewRelic::Agent::AgentTestController.clear_headers
|
90
90
|
engine.clear_stats
|
@@ -95,35 +95,9 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
95
95
|
check_metric_time('WebFrontend/QueueTime', 1, 0.1)
|
96
96
|
end
|
97
97
|
|
98
|
-
|
99
|
-
def test_new_middleware_integration
|
100
|
-
# make this test deterministic
|
101
|
-
Time.stubs(:now => Time.at(2))
|
102
|
-
|
103
|
-
engine.clear_stats
|
104
|
-
start = ((Time.now - 1).to_f * 1_000_000).to_i
|
105
|
-
NewRelic::Agent::AgentTestController.set_some_headers 'HTTP_X_MIDDLEWARE_START'=> "t=#{start}"
|
106
|
-
get :index
|
107
|
-
|
108
|
-
check_metric_time('Middleware/all', 1, 0.1)
|
109
|
-
end
|
110
|
-
|
111
|
-
def test_new_server_time_integration
|
112
|
-
# make this test deterministic
|
113
|
-
Time.stubs(:now => Time.at(2))
|
114
|
-
|
115
|
-
NewRelic::Agent::AgentTestController.clear_headers
|
116
|
-
engine.clear_stats
|
117
|
-
start = ((Time.now - 1).to_f * 1_000_000).to_i
|
118
|
-
NewRelic::Agent::AgentTestController.set_some_headers 'HTTP_X_REQUEST_START'=> "t=#{start}"
|
119
|
-
get :index
|
120
|
-
|
121
|
-
check_metric_time('WebFrontend/WebServer/all', 1, 0.1)
|
122
|
-
end
|
123
|
-
|
124
98
|
def test_new_frontend_work_integration
|
125
99
|
# make this test deterministic
|
126
|
-
Time.stubs(:now => Time.at(
|
100
|
+
Time.stubs(:now => Time.at(1360973845))
|
127
101
|
|
128
102
|
engine.clear_stats
|
129
103
|
times = [Time.now - 3, Time.now - 2, Time.now - 1]
|
@@ -132,10 +106,7 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
132
106
|
'HTTP_X_REQUEST_START'=> "t=#{times[0]}", 'HTTP_X_QUEUE_START' => "t=#{times[1]}", 'HTTP_X_MIDDLEWARE_START' => "t=#{times[2]}"})
|
133
107
|
get :index
|
134
108
|
|
135
|
-
|
136
|
-
check_metric_time('WebFrontend/WebServer/all', 1, 0.1)
|
137
|
-
check_metric_time('Middleware/all', 1, 0.1)
|
138
|
-
check_metric_time('WebFrontend/QueueTime', 1, 0.1)
|
109
|
+
check_metric_time('WebFrontend/QueueTime', 3, 0.1)
|
139
110
|
end
|
140
111
|
|
141
112
|
def test_render_inline
|
@@ -160,10 +131,7 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
160
131
|
'Apdex/new_relic/agent/agent_test/action_with_error',
|
161
132
|
'HttpDispatcher',
|
162
133
|
'Controller/new_relic/agent/agent_test/action_with_error',
|
163
|
-
'Errors/all'
|
164
|
-
'Middleware/all',
|
165
|
-
'WebFrontend/WebServer/all',
|
166
|
-
'WebFrontend/QueueTime']
|
134
|
+
'Errors/all']
|
167
135
|
|
168
136
|
compare_metrics metrics, engine.metrics.reject{|m| m.index('Response')==0 || m.index('CPU')==0}
|
169
137
|
assert_equal 1, engine.get_stats_no_scope("Controller/new_relic/agent/agent_test/action_with_error").call_count
|
@@ -184,10 +152,7 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
184
152
|
'Apdex/new_relic/agent/agent_test/action_with_error',
|
185
153
|
'HttpDispatcher',
|
186
154
|
'Controller/new_relic/agent/agent_test/action_with_error',
|
187
|
-
'Errors/all'
|
188
|
-
'WebFrontend/QueueTime',
|
189
|
-
'Middleware/all',
|
190
|
-
'WebFrontend/WebServer/all']
|
155
|
+
'Errors/all']
|
191
156
|
|
192
157
|
compare_metrics metrics, engine.metrics.reject{|m| m.index('Response')==0 || m.index('CPU')==0}
|
193
158
|
assert_equal 1, engine.get_stats_no_scope("Controller/new_relic/agent/agent_test/action_with_error").call_count
|
@@ -208,10 +173,7 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
208
173
|
'Apdex/new_relic/agent/agent_test/action_with_before_filter_error',
|
209
174
|
'HttpDispatcher',
|
210
175
|
'Controller/new_relic/agent/agent_test/action_with_before_filter_error',
|
211
|
-
'Errors/all'
|
212
|
-
'WebFrontend/QueueTime',
|
213
|
-
'Middleware/all',
|
214
|
-
'WebFrontend/WebServer/all']
|
176
|
+
'Errors/all']
|
215
177
|
|
216
178
|
compare_metrics metrics, engine.metrics.reject{|m| m.index('Response')==0 || m.index('CPU')==0 || m.index('GC')==0}
|
217
179
|
assert_equal 1, engine.get_stats_no_scope("Controller/new_relic/agent/agent_test/action_with_before_filter_error").call_count
|
@@ -344,7 +306,7 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
344
306
|
|
345
307
|
def test_queue_headers_apache
|
346
308
|
# make this test deterministic
|
347
|
-
Time.stubs(:now => Time.at(
|
309
|
+
Time.stubs(:now => Time.at(1360973845))
|
348
310
|
|
349
311
|
NewRelic::Agent::AgentTestController.clear_headers
|
350
312
|
engine.clear_stats
|
@@ -363,7 +325,7 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
363
325
|
end
|
364
326
|
def test_queue_headers_heroku
|
365
327
|
# make this test deterministic
|
366
|
-
Time.stubs(:now => Time.at(
|
328
|
+
Time.stubs(:now => Time.at(1360973845))
|
367
329
|
|
368
330
|
engine.clear_stats
|
369
331
|
NewRelic::Agent::AgentTestController.clear_headers
|
@@ -383,7 +345,7 @@ class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase
|
|
383
345
|
|
384
346
|
def test_queue_headers_heroku_queue_length
|
385
347
|
# make this test deterministic
|
386
|
-
Time.stubs(:now => Time.at(
|
348
|
+
Time.stubs(:now => Time.at(1360973845))
|
387
349
|
|
388
350
|
engine.clear_stats
|
389
351
|
NewRelic::Agent::AgentTestController.clear_headers
|
@@ -5,30 +5,36 @@ class NewRelic::Agent::Instrumentation::ControllerInstrumentationTest < Test::Un
|
|
5
5
|
include NewRelic::Agent::Instrumentation::ControllerInstrumentation
|
6
6
|
end
|
7
7
|
|
8
|
+
def setup
|
9
|
+
@object = TestObject.new
|
10
|
+
end
|
11
|
+
|
8
12
|
def test_detect_upstream_wait_basic
|
9
13
|
start_time = Time.now
|
10
14
|
object = TestObject.new
|
11
15
|
# should return the start time above by default
|
12
|
-
object.expects(:newrelic_request_headers).returns({:request => 'headers'}).twice
|
13
|
-
|
14
|
-
|
16
|
+
@object.expects(:newrelic_request_headers).returns({:request => 'headers'}).twice
|
17
|
+
NewRelic::Agent::Instrumentation::QueueTime.expects(:parse_frontend_timestamp) \
|
18
|
+
.with({:request => 'headers'}).returns(start_time)
|
19
|
+
assert_equal(start_time, @object.send(:_detect_upstream_wait, start_time))
|
15
20
|
end
|
16
21
|
|
17
22
|
def test_detect_upstream_wait_with_upstream
|
18
23
|
start_time = Time.now
|
19
24
|
runs_at = start_time + 1
|
20
|
-
object = TestObject.new
|
21
|
-
object.expects(:newrelic_request_headers).returns(true).twice
|
22
|
-
|
23
|
-
assert_equal(start_time, object.send(:_detect_upstream_wait, runs_at))
|
25
|
+
@object = TestObject.new
|
26
|
+
@object.expects(:newrelic_request_headers).returns(true).twice
|
27
|
+
NewRelic::Agent::Instrumentation::QueueTime.expects(:parse_frontend_timestamp).returns(start_time)
|
28
|
+
assert_equal(start_time, @object.send(:_detect_upstream_wait, runs_at))
|
24
29
|
end
|
25
30
|
|
26
31
|
def test_detect_upstream_wait_swallows_errors
|
27
32
|
start_time = Time.now
|
28
33
|
object = TestObject.new
|
29
34
|
# should return the start time above when an error is raised
|
30
|
-
object.expects(:newrelic_request_headers).returns({:request => 'headers'}).twice
|
31
|
-
|
32
|
-
|
35
|
+
@object.expects(:newrelic_request_headers).returns({:request => 'headers'}).twice
|
36
|
+
NewRelic::Agent::Instrumentation::QueueTime.expects(:parse_frontend_timestamp) \
|
37
|
+
.with({:request => 'headers'}).raises("an error")
|
38
|
+
assert_equal(start_time, @object.send(:_detect_upstream_wait, start_time))
|
33
39
|
end
|
34
40
|
end
|
@@ -1,396 +1,90 @@
|
|
1
1
|
require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', '..', 'test_helper'))
|
2
2
|
class NewRelic::Agent::Instrumentation::QueueTimeTest < Test::Unit::TestCase
|
3
|
-
|
4
|
-
include NewRelic::Agent::Instrumentation::QueueTime
|
3
|
+
include NewRelic::Agent::Instrumentation
|
5
4
|
|
6
|
-
def
|
7
|
-
|
5
|
+
def test_parse_frontend_timestamp_given_queue_start_header
|
6
|
+
header = { 'HTTP_X_QUEUE_START' => format_header_time(Time.now - 60) }
|
7
|
+
assert_in_delta(seconds_ago(60), QueueTime.parse_frontend_timestamp(header), 0.1)
|
8
8
|
end
|
9
9
|
|
10
|
-
def
|
11
|
-
|
10
|
+
def test_parse_frontend_timestamp_given_request_start_header
|
11
|
+
header = { 'HTTP_X_REQUEST_START' => format_header_time(Time.now - 60) }
|
12
|
+
assert_in_delta(seconds_ago(60), QueueTime.parse_frontend_timestamp(header), 0.1)
|
12
13
|
end
|
13
14
|
|
14
|
-
def
|
15
|
-
|
16
|
-
|
17
|
-
server_start = Time.at(1000)
|
18
|
-
Time.stubs(:now).returns(Time.at(1003)) # whee!
|
19
|
-
self.expects(:add_end_time_header).with(Time.at(1003), {:env => 'hash'})
|
20
|
-
# ordering is important here, unfortunately, the mocks don't
|
21
|
-
# support that kind of checking.
|
22
|
-
self.expects(:parse_middleware_time_from).with({:env => 'hash'}).returns(middleware_start)
|
23
|
-
self.expects(:parse_queue_time_from).with({:env => 'hash'}).returns(queue_start)
|
24
|
-
self.expects(:parse_server_time_from).with({:env => 'hash'}).returns(server_start)
|
25
|
-
assert_equal(server_start, parse_frontend_headers({:env => 'hash'}), "should return the oldest start time")
|
15
|
+
def test_parse_frontend_timestamp_given_middleware_start_header
|
16
|
+
header = { 'HTTP_X_MIDDLEWARE_START' => format_header_time(Time.now - 60) }
|
17
|
+
assert_in_delta(seconds_ago(60), QueueTime.parse_frontend_timestamp(header), 0.1)
|
26
18
|
end
|
27
19
|
|
28
|
-
def
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
Time.stubs(:now).returns(Time.at(1003)) # whee!
|
33
|
-
self.expects(:add_end_time_header).with(Time.at(1003), {:env => 'hash'})
|
34
|
-
# ordering is important here, unfortunately, the mocks don't
|
35
|
-
# support that kind of checking.
|
36
|
-
self.expects(:parse_middleware_time_from).with({:env => 'hash'}).returns(middleware_start)
|
37
|
-
self.expects(:parse_queue_time_from).with({:env => 'hash'}).returns(queue_start)
|
38
|
-
self.expects(:parse_server_time_from).with({:env => 'hash'}).returns(server_start)
|
39
|
-
assert_equal(queue_start, parse_frontend_headers({:env => 'hash'}), "should return the oldest start time")
|
40
|
-
end
|
41
|
-
|
42
|
-
def test_all_combined_frontend_headers
|
43
|
-
env = {}
|
44
|
-
env[MAIN_HEADER] = "t=#{convert_to_microseconds(Time.at(1000))}"
|
45
|
-
env[QUEUE_HEADER] = "t=#{convert_to_microseconds(Time.at(1001))}"
|
46
|
-
env[MIDDLEWARE_HEADER] = "t=#{convert_to_microseconds(Time.at(1002))}"
|
47
|
-
|
48
|
-
env[APP_HEADER] = "t=#{convert_to_microseconds(Time.at(1003))}"
|
49
|
-
|
50
|
-
assert_calls_metrics('WebFrontend/WebServer/all', 'WebFrontend/QueueTime', 'Middleware/all') do
|
51
|
-
assert_equal(Time.at(1002), parse_middleware_time_from(env))
|
52
|
-
assert_equal(Time.at(1001), parse_queue_time_from(env))
|
53
|
-
assert_equal(Time.at(1000), parse_server_time_from(env))
|
54
|
-
end
|
55
|
-
|
56
|
-
check_metric_time('WebFrontend/WebServer/all', 1.0, 0.001)
|
57
|
-
check_metric_time('WebFrontend/QueueTime', 1.0, 0.001)
|
58
|
-
check_metric_time('Middleware/all', 1.0, 0.001)
|
59
|
-
end
|
60
|
-
|
61
|
-
def test_combined_middleware_and_queue
|
62
|
-
env = {}
|
63
|
-
env[QUEUE_HEADER] = "t=#{convert_to_microseconds(Time.at(1000))}"
|
64
|
-
env[MIDDLEWARE_HEADER] = "t=#{convert_to_microseconds(Time.at(1001))}"
|
65
|
-
create_test_start_time(env)
|
66
|
-
|
67
|
-
assert_calls_metrics('Middleware/all', 'WebFrontend/QueueTime') do
|
68
|
-
parse_middleware_time_from(env)
|
69
|
-
assert_equal(Time.at(1000), parse_queue_time_from(env))
|
70
|
-
end
|
71
|
-
|
72
|
-
check_metric_time('Middleware/all', 1.0, 0.001)
|
73
|
-
check_metric_time('WebFrontend/QueueTime', 1.0, 0.001)
|
74
|
-
end
|
75
|
-
|
76
|
-
def test_combined_queue_and_server
|
77
|
-
env = {}
|
78
|
-
env[MAIN_HEADER] = "t=#{convert_to_microseconds(Time.at(1000))}"
|
79
|
-
env[QUEUE_HEADER] = "t=#{convert_to_microseconds(Time.at(1001))}"
|
80
|
-
create_test_start_time(env)
|
81
|
-
|
82
|
-
assert_calls_metrics('WebFrontend/WebServer/all', 'WebFrontend/QueueTime') do
|
83
|
-
assert_equal(Time.at(1001), parse_queue_time_from(env))
|
84
|
-
parse_server_time_from(env)
|
85
|
-
end
|
86
|
-
|
87
|
-
check_metric_time('WebFrontend/WebServer/all', 1.0, 0.001)
|
88
|
-
check_metric_time('WebFrontend/QueueTime', 1.0, 0.001)
|
89
|
-
end
|
90
|
-
|
91
|
-
def test_combined_middleware_and_server
|
92
|
-
env = {}
|
93
|
-
env[MAIN_HEADER] = "t=#{convert_to_microseconds(Time.at(1000))}"
|
94
|
-
env[MIDDLEWARE_HEADER] = "t=#{convert_to_microseconds(Time.at(1001))}"
|
95
|
-
create_test_start_time(env)
|
96
|
-
|
97
|
-
assert_calls_metrics('WebFrontend/WebServer/all', 'Middleware/all') do
|
98
|
-
parse_middleware_time_from(env)
|
99
|
-
parse_server_time_from(env)
|
100
|
-
end
|
101
|
-
|
102
|
-
check_metric_time('WebFrontend/WebServer/all', 1.0, 0.001)
|
103
|
-
check_metric_time('Middleware/all', 1.0, 0.001)
|
104
|
-
end
|
105
|
-
|
106
|
-
# initial base case, a router and a static content server
|
107
|
-
def test_parse_server_time_from_initial
|
108
|
-
env = {}
|
109
|
-
create_test_start_time(env)
|
110
|
-
time1 = convert_to_microseconds(Time.at(1000))
|
111
|
-
time2 = convert_to_microseconds(Time.at(1001))
|
112
|
-
env['HTTP_X_REQUEST_START'] = "servera t=#{time1}, serverb t=#{time2}"
|
113
|
-
assert_calls_metrics('WebFrontend/WebServer/all', 'WebFrontend/WebServer/servera', 'WebFrontend/WebServer/serverb') do
|
114
|
-
parse_server_time_from(env)
|
115
|
-
end
|
116
|
-
check_metric_time('WebFrontend/WebServer/all', 2.0, 0.1)
|
117
|
-
check_metric_time('WebFrontend/WebServer/servera', 1.0, 0.1)
|
118
|
-
check_metric_time('WebFrontend/WebServer/serverb', 1.0, 0.1)
|
119
|
-
end
|
120
|
-
|
121
|
-
def test_parse_server_time_accepting_milliseconds_resolution_separator_char
|
122
|
-
env = {'HTTP_X_REQUEST_START' => "t=1000.000000"}
|
123
|
-
create_test_start_time(env)
|
124
|
-
env['HTTP_X_QUEUE_START'] = "t=1001.000000"
|
125
|
-
assert_calls_metrics('WebFrontend/WebServer/all') do
|
126
|
-
assert_equal(Time.at(1000), parse_server_time_from(env))
|
127
|
-
end
|
128
|
-
assert_calls_metrics('WebFrontend/QueueTime') do
|
129
|
-
assert_equal(Time.at(1001), parse_queue_time_from(env))
|
130
|
-
end
|
131
|
-
check_metric_time('WebFrontend/WebServer/all', 2.0, 0.1)
|
132
|
-
check_metric_time('WebFrontend/QueueTime', 1.0, 0.1)
|
133
|
-
end
|
134
|
-
|
135
|
-
# test for backwards compatibility with old header
|
136
|
-
def test_parse_server_time_from_with_no_server_name
|
137
|
-
env = {'HTTP_X_REQUEST_START' => "t=#{convert_to_microseconds(Time.at(1001))}"}
|
138
|
-
create_test_start_time(env)
|
139
|
-
assert_calls_metrics('WebFrontend/WebServer/all') do
|
140
|
-
parse_server_time_from(env)
|
141
|
-
end
|
142
|
-
check_metric_time('WebFrontend/WebServer/all', 1.0, 0.1)
|
143
|
-
end
|
144
|
-
|
145
|
-
def test_parse_server_time_from_with_bad_header
|
146
|
-
env = {'HTTP_X_REQUEST_START' => 't=t=t=t='}
|
147
|
-
create_test_start_time(env)
|
148
|
-
assert_calls_metrics('WebFrontend/WebServer/all') do
|
149
|
-
parse_server_time_from(env)
|
150
|
-
end
|
151
|
-
end
|
152
|
-
|
153
|
-
def test_parse_server_time_from_with_no_header
|
154
|
-
assert_calls_metrics('WebFrontend/WebServer/all') do
|
155
|
-
parse_server_time_from({})
|
156
|
-
end
|
157
|
-
end
|
158
|
-
|
159
|
-
def test_parse_middleware_time
|
160
|
-
env = {}
|
161
|
-
create_test_start_time(env)
|
162
|
-
time1 = convert_to_microseconds(Time.at(1000))
|
163
|
-
time2 = convert_to_microseconds(Time.at(1001))
|
164
|
-
|
165
|
-
env['HTTP_X_MIDDLEWARE_START'] = "base t=#{time1}, second t=#{time2}"
|
166
|
-
assert_calls_metrics('Middleware/all', 'Middleware/base', 'Middleware/second') do
|
167
|
-
parse_middleware_time_from(env)
|
168
|
-
end
|
169
|
-
check_metric_time('Middleware/all', 2.0, 0.1)
|
170
|
-
check_metric_time('Middleware/base', 1.0, 0.1)
|
171
|
-
check_metric_time('Middleware/second', 1.0, 0.1)
|
172
|
-
end
|
173
|
-
|
174
|
-
def test_parse_queue_time
|
175
|
-
env = {}
|
176
|
-
create_test_start_time(env)
|
177
|
-
time1 = convert_to_microseconds(Time.at(1000))
|
178
|
-
|
179
|
-
env['HTTP_X_QUEUE_START'] = "t=#{time1}"
|
180
|
-
assert_calls_metrics('WebFrontend/QueueTime') do
|
181
|
-
assert_equal(Time.at(1000), parse_queue_time_from(env))
|
182
|
-
end
|
20
|
+
def test_parse_frontend_timestamp_from_earliest_header
|
21
|
+
headers = { 'HTTP_X_REQUEST_START' => format_header_time(Time.now - 63),
|
22
|
+
'HTTP_X_QUEUE_START' => format_header_time(Time.now - 62),
|
23
|
+
'HTTP_X_MIDDLEWARE_START' => format_header_time(Time.now - 61)}
|
183
24
|
|
184
|
-
|
25
|
+
assert_in_delta(seconds_ago(63), QueueTime.parse_frontend_timestamp(headers), 0.1)
|
185
26
|
end
|
186
27
|
|
187
|
-
def
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
assert_calls_metrics('WebFrontend/QueueTime') do
|
192
|
-
assert_equal(Time.at(1001), parse_queue_time_from(env))
|
193
|
-
end
|
28
|
+
def test_parse_frontend_timestamp_from_earliest_header_out_of_order
|
29
|
+
headers = { 'HTTP_X_MIDDLEWARE_START' => format_header_time(Time.now - 63),
|
30
|
+
'HTTP_X_REQUEST_START' => format_header_time(Time.now - 62),
|
31
|
+
'HTTP_X_QUEUE_START' => format_header_time(Time.now - 61) }
|
194
32
|
|
195
|
-
|
33
|
+
assert_in_delta(seconds_ago(63), QueueTime.parse_frontend_timestamp(headers), 0.1)
|
196
34
|
end
|
197
35
|
|
198
|
-
def
|
199
|
-
|
200
|
-
|
201
|
-
env['HTTP_X_QUEUE_START'] = 't=1' # obviously incorrect
|
202
|
-
env['HTTP_X_QUEUE_TIME'] = '1000000'
|
203
|
-
assert_calls_metrics('WebFrontend/QueueTime') do
|
204
|
-
assert_equal(Time.at(1001), parse_queue_time_from(env))
|
205
|
-
end
|
206
|
-
|
207
|
-
# alternate queue should override normal header
|
208
|
-
check_metric_time('WebFrontend/QueueTime', 1.0, 0.001)
|
36
|
+
def test_parse_frontend_timestamp_from_header_in_seconds
|
37
|
+
header = { 'HTTP_X_QUEUE_START' => "t=#{Time.now.to_f - 60}" }
|
38
|
+
assert_in_delta(seconds_ago(60), QueueTime.parse_frontend_timestamp(header), 0.1)
|
209
39
|
end
|
210
40
|
|
211
|
-
def
|
212
|
-
|
213
|
-
|
214
|
-
env['HTTP_X_HEROKU_QUEUE_WAIT_TIME'] = '1000'
|
215
|
-
assert_calls_metrics('WebFrontend/QueueTime') do
|
216
|
-
assert_equal(Time.at(1001), parse_queue_time_from(env))
|
217
|
-
end
|
218
|
-
|
219
|
-
check_metric_time('WebFrontend/QueueTime', 1.0, 0.001)
|
41
|
+
def test_parse_frontend_timestamp_from_header_in_milliseconds
|
42
|
+
header = { 'HTTP_X_QUEUE_START' => "t=#{(Time.now.to_f - 60) * 1_000}" }
|
43
|
+
assert_in_delta(seconds_ago(60), QueueTime.parse_frontend_timestamp(header), 0.1)
|
220
44
|
end
|
221
45
|
|
222
|
-
def
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
env['HTTP_X_HEROKU_QUEUE_WAIT_TIME'] = '1000'
|
227
|
-
assert_calls_metrics('WebFrontend/QueueTime') do
|
228
|
-
assert_equal(Time.at(1001), parse_queue_time_from(env))
|
229
|
-
end
|
230
|
-
|
231
|
-
# heroku queue should override alternate queue
|
232
|
-
check_metric_time('WebFrontend/QueueTime', 1.0, 0.001)
|
46
|
+
def test_parse_frontend_timestamp_from_header_with_multiple_servers
|
47
|
+
now = Time.now.to_f
|
48
|
+
header = { 'HTTP_X_QUEUE_START' => "servera t=#{now - 60}, serverb t=#{now - 30}" }
|
49
|
+
assert_in_delta(seconds_ago(60), QueueTime.parse_frontend_timestamp(header), 0.1)
|
233
50
|
end
|
234
51
|
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
assert_calls_metrics('WebFrontend/WebServer/foo', 'WebFrontend/WebServer/bar') do
|
239
|
-
record_individual_server_stats(Time.at(1002), matches)
|
240
|
-
end
|
241
|
-
check_metric_time('WebFrontend/WebServer/foo', 1.0, 0.1)
|
242
|
-
check_metric_time('WebFrontend/WebServer/bar', 1.0, 0.1)
|
52
|
+
def test_parse_frontend_timestamp_from_header_missing_t_equals
|
53
|
+
header = { 'HTTP_X_REQUEST_START' => (Time.now.to_f - 60).to_s }
|
54
|
+
assert_in_delta(seconds_ago(60), QueueTime.parse_frontend_timestamp(header), 0.1)
|
243
55
|
end
|
244
56
|
|
245
|
-
def
|
246
|
-
|
247
|
-
|
57
|
+
def test_recording_queue_time_metric
|
58
|
+
assert_metric_value_in_delta(60, 'WebFrontend/QueueTime', 0.1) do
|
59
|
+
QueueTime.record_frontend_metrics(Time.at(Time.now.to_f - 60))
|
248
60
|
end
|
249
|
-
check_metric_time('WebFrontend/WebServer/all', 1.0, 0.1)
|
250
61
|
end
|
251
62
|
|
252
|
-
def
|
253
|
-
|
254
|
-
record_rollup_server_stat(Time.at(1001), [])
|
255
|
-
end
|
256
|
-
check_metric_time('WebFrontend/WebServer/all', 0.0, 0.001)
|
257
|
-
end
|
63
|
+
def test_parsing_malformed_header
|
64
|
+
header = { 'HTTP_X_REQUEST_START' => 'gobledy gook' }
|
258
65
|
|
259
|
-
|
260
|
-
assert_calls_metrics('Middleware/all') do
|
261
|
-
record_rollup_middleware_stat(Time.at(1001), [['a', Time.at(1000)]])
|
262
|
-
end
|
263
|
-
check_metric_time('Middleware/all', 1.0, 0.1)
|
66
|
+
assert_nil QueueTime.parse_frontend_timestamp(header)
|
264
67
|
end
|
265
68
|
|
266
|
-
def
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
69
|
+
def test_parse_timestamp_can_identify_unit
|
70
|
+
now = Time.now.to_f
|
71
|
+
assert_in_delta(now, QueueTime.parse_timestamp(now.to_f.to_s).to_f, 0.001)
|
72
|
+
assert_in_delta(now, QueueTime.parse_timestamp((now.to_f * 1_000).to_s).to_f, 0.001)
|
73
|
+
assert_in_delta(now, QueueTime.parse_timestamp((now.to_f * 1_000_000).to_s).to_f, 0.001)
|
271
74
|
end
|
272
75
|
|
273
|
-
def
|
274
|
-
|
275
|
-
record_rollup_queue_stat(Time.at(1001), [[nil, Time.at(1000)]])
|
276
|
-
end
|
277
|
-
check_metric_time('WebFrontend/QueueTime', 1.0, 0.1)
|
278
|
-
end
|
279
|
-
|
280
|
-
def test_record_rollup_queue_stat_no_data
|
281
|
-
assert_calls_metrics('WebFrontend/QueueTime') do
|
282
|
-
record_rollup_queue_stat(Time.at(1001), [])
|
283
|
-
end
|
284
|
-
check_metric_time('WebFrontend/QueueTime', 0.0, 0.001)
|
285
|
-
end
|
286
|
-
|
287
|
-
def test_find_oldest_time
|
288
|
-
test_array = [
|
289
|
-
['c', Time.at(1002)],
|
290
|
-
['a', Time.at(1000)],
|
291
|
-
['b', Time.at(1001)],
|
292
|
-
['d', Time.at(1000)],
|
293
|
-
]
|
294
|
-
assert_equal(find_oldest_time(test_array), Time.at(1000),
|
295
|
-
"Should be the oldest time in the array")
|
76
|
+
def format_header_time(time=Time.now)
|
77
|
+
"t=#{(time.to_f * 1_000_000).to_i}"
|
296
78
|
end
|
297
79
|
|
298
|
-
|
299
|
-
|
300
|
-
name = 'foo'
|
301
|
-
time = Time.at(1000)
|
302
|
-
start_time = Time.at(1001)
|
303
|
-
self.expects(:record_time_stat).with('WebFrontend/WebServer/foo', time, start_time)
|
304
|
-
record_server_time_for(name, time, start_time)
|
80
|
+
def seconds_ago(seconds)
|
81
|
+
Time.at(Time.now.to_f - seconds)
|
305
82
|
end
|
306
83
|
|
307
|
-
def
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
assert_raises(RuntimeError) do
|
313
|
-
record_time_stat('foo', Time.at(1001), Time.at(1000))
|
314
|
-
end
|
315
|
-
end
|
316
|
-
|
317
|
-
def test_record_time_stat_with_end_after_start
|
318
|
-
record_time_stat('WebFrontend/WebServer/foo', Time.at(1.0001), Time.at(1))
|
319
|
-
rescue RuntimeError => e
|
320
|
-
assert_match(/should not provide an end time less than start time/, e.message)
|
321
|
-
end
|
322
|
-
|
323
|
-
def test_convert_to_microseconds
|
324
|
-
assert_equal((1_000_000_000), convert_to_microseconds(Time.at(1000)), 'time at 1000 seconds past epoch should be 1,000,000,000 usec')
|
325
|
-
assert_equal 1_000_000_000, convert_to_microseconds(1_000_000_000), 'should not mess with a number if passed in'
|
326
|
-
assert_raises(TypeError) do
|
327
|
-
convert_to_microseconds('whoo yeah buddy')
|
328
|
-
end
|
329
|
-
end
|
330
|
-
|
331
|
-
def test_convert_from_microseconds
|
332
|
-
assert_equal Time.at(1000), convert_from_microseconds(1_000_000_000), 'time at 1,000,000,000 usec should be 1000 seconds after epoch'
|
333
|
-
assert_equal Time.at(1000), convert_from_microseconds(Time.at(1000)), 'should not mess with a time passed in'
|
334
|
-
assert_raises(TypeError) do
|
335
|
-
convert_from_microseconds('10000000000')
|
336
|
-
end
|
337
|
-
end
|
338
|
-
|
339
|
-
def test_add_end_time_header
|
340
|
-
env = {}
|
341
|
-
start_time = Time.at(1)
|
342
|
-
add_end_time_header(start_time, env)
|
343
|
-
assert_equal({'HTTP_X_APPLICATION_START' => "t=#{convert_to_microseconds(Time.at(1))}"}, env, "should add the header to the env hash")
|
344
|
-
end
|
345
|
-
|
346
|
-
def test_parse_end_time_base
|
347
|
-
env = {}
|
348
|
-
env['HTTP_X_APPLICATION_START'] = "t=#{convert_to_microseconds(Time.at(1))}"
|
349
|
-
start_time = parse_end_time(env)
|
350
|
-
assert_equal(Time.at(1), start_time, "should pull the correct start time from the app header")
|
351
|
-
end
|
352
|
-
|
353
|
-
def test_get_matches_from_header
|
354
|
-
env = {'A HEADER' => 't=1000000'}
|
355
|
-
self.expects(:convert_from_microseconds).with(1000000).returns(Time.at(1))
|
356
|
-
matches = get_matches_from_header('A HEADER', env)
|
357
|
-
assert_equal [[nil, Time.at(1)]], matches, "should pull the correct time from the string"
|
358
|
-
end
|
359
|
-
|
360
|
-
def test_convert_to_name_time_pair
|
361
|
-
name = :foo
|
362
|
-
time = "1000000"
|
363
|
-
|
364
|
-
pair = convert_to_name_time_pair(name, time)
|
365
|
-
assert_equal [:foo, Time.at(1)], pair
|
366
|
-
end
|
367
|
-
|
368
|
-
def test_get_matches
|
369
|
-
str = "servera t=1000000, serverb t=1000000"
|
370
|
-
matches = get_matches(str) # start a fire
|
371
|
-
assert_equal [['servera', '1000000'], ['serverb', '1000000']], matches
|
372
|
-
end
|
373
|
-
|
374
|
-
def test_matches_with_bad_data
|
375
|
-
str = "stephan is a dumb lol"
|
376
|
-
matches = get_matches(str)
|
377
|
-
assert_equal [], matches
|
378
|
-
|
379
|
-
str = "t=100"
|
380
|
-
matches = get_matches(str)
|
381
|
-
assert_equal [[nil, '100']], matches
|
382
|
-
|
383
|
-
str = nil
|
384
|
-
matches = get_matches(str)
|
385
|
-
assert_equal [], matches
|
386
|
-
end
|
387
|
-
# each server should be one second, and the total would be 2 seconds
|
388
|
-
def test_record_individual_middleware_stats
|
389
|
-
matches = [['foo', Time.at(1000)], ['bar', Time.at(1001)]]
|
390
|
-
assert_calls_metrics('Middleware/foo', 'Middleware/bar') do
|
391
|
-
record_individual_middleware_stats(Time.at(1002), matches)
|
392
|
-
end
|
393
|
-
check_metric_time('Middleware/foo', 1.0, 0.1)
|
394
|
-
check_metric_time('Middleware/bar', 1.0, 0.1)
|
84
|
+
def assert_metric_value_in_delta(expected, metric_name, delta)
|
85
|
+
stats_engine = NewRelic::Agent.instance.stats_engine
|
86
|
+
stats_engine.clear_stats
|
87
|
+
yield
|
88
|
+
assert_in_delta(expected, stats_engine.get_stats(metric_name).total_call_time, delta)
|
395
89
|
end
|
396
90
|
end
|
metadata
CHANGED
@@ -1,8 +1,8 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: newrelic_rpm
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.5.
|
5
|
-
prerelease:
|
4
|
+
version: 3.5.7.57.beta
|
5
|
+
prerelease: 9
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
8
8
|
- Jason Clark
|
@@ -41,7 +41,7 @@ cert_chain:
|
|
41
41
|
cHUySWFQWE92bTNUOEc0TzZxWnZobkxoL1VpZW4rK0RqOGVGQmVjVFBvTThw
|
42
42
|
VmpLM3BoNQpuL0V3dVpDY0U2Z2h0Q0NNCi0tLS0tRU5EIENFUlRJRklDQVRF
|
43
43
|
LS0tLS0K
|
44
|
-
date: 2013-02-
|
44
|
+
date: 2013-02-18 00:00:00.000000000 Z
|
45
45
|
dependencies: []
|
46
46
|
description: ! 'New Relic is a performance management system, developed by New Relic,
|
47
47
|
|
@@ -470,38 +470,13 @@ files:
|
|
470
470
|
- lib/new_relic/build.rb
|
471
471
|
homepage: http://www.github.com/newrelic/rpm
|
472
472
|
licenses: []
|
473
|
-
post_install_message: ! "\n# New Relic Ruby Agent Release Notes #\n\n## v3.5.
|
474
|
-
\ *
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
\
|
479
|
-
|
480
|
-
\ feature.\n\n * Fix two Resque-related issues\n\n Fixes a possible hang on
|
481
|
-
exit of an instrumented Resque master process\n (https://github.com/defunkt/resque/issues/578),
|
482
|
-
as well as a file descriptor\n leak that could occur during startup of the Resque
|
483
|
-
master process.\n\n * Fix for error graph over 100%\n\n Some errors were double
|
484
|
-
counted toward the overall error total. This\n resulted in graphs with error
|
485
|
-
percentages over 100%. This duplication did\n not impact the specific error traces
|
486
|
-
captured, only the total metric.\n\n * Notice gracefully handled errors in Sinatra\n\n
|
487
|
-
\ When show_exceptions was set to false in Sinatra, errors weren't caught\n by
|
488
|
-
New Relic's error collector. Now handled errors also have the chance\n to get
|
489
|
-
reported back.\n\n * Ruby 2.0 compatibility fixes\n\n Ruby 2.0 no longer finds
|
490
|
-
protected methods by default, but will with a flag.\n http://tenderlovemaking.com/2012/09/07/protected-methods-and-ruby-2-0.html\n\n
|
491
|
-
\ Thanks Ravil Bayramgalin and Charlie Somerville for the fixes.\n\n * Auto-detect
|
492
|
-
Trinidad as dispatcher\n\n Code already existing for detecting Trinidad as a
|
493
|
-
dispatcher, but was only\n accessible via an ENV variable. This now auto-detects
|
494
|
-
on startup. Thanks\n Robert Rasmussen for catching that.\n\n * Coercion of types
|
495
|
-
in collector communication\n\n Certain metrics can be recorded with a Ruby Rational
|
496
|
-
type, which JSON\n serializes as a string rather than a floating point value.
|
497
|
-
We now treat\n coerce each outgoing value, and log issues before sending the
|
498
|
-
data.\n\n * Developer mode fix for chart error\n\n Added require to fix a NameError
|
499
|
-
in developer mode for summary page. Thanks\n to Ryan B. Harvey.\n\n * Don't
|
500
|
-
touch deprecated RAILS_ROOT if on Rails 3\n\n Under some odd startup conditions,
|
501
|
-
we would look for the RAILS_ROOT constant\n after failing to find the ::Rails.root
|
502
|
-
in a Rails 3 app, causing deprecation\n warnings. Thanks for Adrian Irving-Beer
|
503
|
-
for the fix.\n\nSee https://github.com/newrelic/rpm/blob/master/CHANGELOG for a
|
504
|
-
full list of\nchanges.\n"
|
473
|
+
post_install_message: ! "\n# New Relic Ruby Agent Release Notes #\n\n## v3.5.7 ##\n\n
|
474
|
+
\ * Resolved some issues with tracking of frontend queue time, particularly\n when
|
475
|
+
the agent is running on an app hosted on Heroku. The agent will now\n more reliably
|
476
|
+
parse the headers described in\n https://newrelic.com/docs/features/tracking-front-end-time
|
477
|
+
and will\n automatically detect whether the times provided are in seconds,\n
|
478
|
+
\ milliseconds or microseconds.\n\nSee https://github.com/newrelic/rpm/blob/master/CHANGELOG
|
479
|
+
for a full list of\nchanges.\n"
|
505
480
|
rdoc_options:
|
506
481
|
- --line-numbers
|
507
482
|
- --inline-source
|
metadata.gz.sig
CHANGED
Binary file
|