newrelic_rpm 3.5.7.59 → 3.5.8.64.beta

Sign up to get free protection for your applications and to get access to all the features.
Files changed (99) hide show
  1. data.tar.gz.sig +3 -2
  2. data/CHANGELOG +34 -3
  3. data/LICENSE +23 -0
  4. data/lib/new_relic/agent.rb +50 -3
  5. data/lib/new_relic/agent/agent.rb +40 -60
  6. data/lib/new_relic/agent/configuration/defaults.rb +9 -3
  7. data/lib/new_relic/agent/configuration/server_source.rb +4 -0
  8. data/lib/new_relic/agent/cross_app_monitor.rb +230 -0
  9. data/lib/new_relic/agent/cross_app_tracing.rb +274 -0
  10. data/lib/new_relic/agent/database.rb +28 -10
  11. data/lib/new_relic/agent/error_collector.rb +5 -0
  12. data/lib/new_relic/agent/event_listener.rb +4 -0
  13. data/lib/new_relic/agent/instrumentation/controller_instrumentation.rb +53 -34
  14. data/lib/new_relic/agent/instrumentation/metric_frame.rb +16 -3
  15. data/lib/new_relic/agent/instrumentation/net.rb +13 -11
  16. data/lib/new_relic/agent/instrumentation/resque.rb +10 -10
  17. data/lib/new_relic/agent/instrumentation/sinatra.rb +19 -9
  18. data/lib/new_relic/agent/new_relic_service.rb +63 -9
  19. data/lib/new_relic/agent/pipe_service.rb +8 -12
  20. data/lib/new_relic/agent/rules_engine.rb +72 -0
  21. data/lib/new_relic/agent/shim_agent.rb +0 -1
  22. data/lib/new_relic/agent/sql_sampler.rb +3 -2
  23. data/lib/new_relic/agent/stats.rb +149 -0
  24. data/lib/new_relic/agent/stats_engine.rb +9 -0
  25. data/lib/new_relic/agent/stats_engine/gc_profiler.rb +1 -24
  26. data/lib/new_relic/agent/stats_engine/metric_stats.rb +84 -185
  27. data/lib/new_relic/agent/stats_engine/stats_hash.rb +58 -0
  28. data/lib/new_relic/agent/stats_engine/transactions.rb +10 -2
  29. data/lib/new_relic/agent/transaction_info.rb +31 -6
  30. data/lib/new_relic/agent/transaction_sample_builder.rb +19 -8
  31. data/lib/new_relic/agent/transaction_sampler.rb +17 -10
  32. data/lib/new_relic/helper.rb +32 -0
  33. data/lib/new_relic/local_environment.rb +24 -32
  34. data/lib/new_relic/okjson.rb +599 -0
  35. data/lib/new_relic/transaction_sample.rb +2 -1
  36. data/lib/new_relic/transaction_sample/segment.rb +2 -1
  37. data/lib/new_relic/version.rb +1 -1
  38. data/newrelic.yml +27 -41
  39. data/test/multiverse/suites/agent_only/audit_log_test.rb +2 -4
  40. data/test/multiverse/suites/agent_only/config/newrelic.yml +1 -2
  41. data/test/multiverse/suites/agent_only/{cross_process_test.rb → cross_application_tracing_test.rb} +3 -3
  42. data/test/multiverse/suites/agent_only/key_transactions_test.rb +66 -0
  43. data/test/multiverse/suites/agent_only/marshaling_test.rb +9 -22
  44. data/test/multiverse/suites/agent_only/rename_rule_test.rb +57 -0
  45. data/test/multiverse/suites/agent_only/start_up_test.rb +1 -1
  46. data/test/multiverse/suites/agent_only/thread_profiling_test.rb +17 -6
  47. data/test/multiverse/suites/rails/error_tracing_test.rb +20 -8
  48. data/test/multiverse/suites/resque/instrumentation_test.rb +2 -2
  49. data/test/multiverse/suites/sinatra/Envfile +2 -0
  50. data/test/multiverse/suites/sinatra/config/newrelic.yml +1 -0
  51. data/test/multiverse/suites/sinatra/sinatra_metric_explosion_test.rb +5 -5
  52. data/test/multiverse/suites/sinatra/sinatra_test.rb +75 -4
  53. data/test/new_relic/agent/agent/connect_test.rb +45 -1
  54. data/test/new_relic/agent/agent/start_worker_thread_test.rb +0 -3
  55. data/test/new_relic/agent/agent_test.rb +20 -40
  56. data/test/new_relic/agent/agent_test_controller_test.rb +24 -19
  57. data/test/new_relic/agent/busy_calculator_test.rb +1 -1
  58. data/test/new_relic/agent/configuration/server_source_test.rb +8 -3
  59. data/test/new_relic/agent/cross_app_monitor_test.rb +237 -0
  60. data/test/new_relic/agent/database_test.rb +60 -16
  61. data/test/new_relic/agent/error_collector_test.rb +28 -4
  62. data/test/new_relic/agent/event_listener_test.rb +23 -2
  63. data/test/new_relic/agent/instrumentation/controller_instrumentation_test.rb +53 -0
  64. data/test/new_relic/agent/instrumentation/metric_frame_test.rb +95 -0
  65. data/test/new_relic/agent/instrumentation/net_instrumentation_test.rb +414 -59
  66. data/test/new_relic/agent/instrumentation/task_instrumentation_test.rb +2 -5
  67. data/test/new_relic/agent/method_tracer_test.rb +4 -2
  68. data/test/new_relic/agent/new_relic_service_test.rb +108 -6
  69. data/test/new_relic/agent/pipe_channel_manager_test.rb +1 -1
  70. data/test/new_relic/agent/pipe_service_test.rb +9 -9
  71. data/test/new_relic/agent/rpm_agent_test.rb +0 -11
  72. data/test/new_relic/agent/rules_engine_test.rb +82 -0
  73. data/test/new_relic/agent/shim_agent_test.rb +0 -4
  74. data/test/new_relic/agent/sql_sampler_test.rb +7 -0
  75. data/test/new_relic/agent/stats_engine/gc_profiler_test.rb +85 -0
  76. data/test/new_relic/agent/stats_engine/metric_stats_test.rb +110 -23
  77. data/test/new_relic/agent/stats_engine_test.rb +1 -46
  78. data/test/new_relic/agent/stats_hash_test.rb +93 -0
  79. data/test/new_relic/agent/stats_test.rb +197 -0
  80. data/test/new_relic/agent/transaction_info_test.rb +63 -11
  81. data/test/new_relic/agent/transaction_sample_builder_test.rb +10 -3
  82. data/test/new_relic/agent/transaction_sampler_test.rb +92 -80
  83. data/test/new_relic/agent_test.rb +35 -5
  84. data/test/new_relic/control_test.rb +1 -1
  85. data/test/new_relic/fake_collector.rb +87 -9
  86. data/test/new_relic/helper_test.rb +24 -0
  87. data/test/new_relic/metric_data_test.rb +11 -11
  88. data/test/new_relic/metric_spec_test.rb +1 -1
  89. data/test/script/ci.sh +1 -1
  90. data/test/test_contexts.rb +0 -1
  91. data/test/test_helper.rb +21 -3
  92. metadata +32 -16
  93. metadata.gz.sig +0 -0
  94. data/lib/new_relic/agent/cross_process_monitoring.rb +0 -187
  95. data/lib/new_relic/stats.rb +0 -337
  96. data/test/new_relic/agent/cross_process_monitoring_test.rb +0 -190
  97. data/test/new_relic/agent/stats_engine/metric_stats/harvest_test.rb +0 -133
  98. data/test/new_relic/fakes_sending_data.rb +0 -30
  99. data/test/new_relic/stats_test.rb +0 -421
@@ -18,7 +18,6 @@ module NewRelic
18
18
  def after_fork *args; end
19
19
  def start *args; end
20
20
  def shutdown *args; end
21
- def serialize; end
22
21
  def merge_data_from *args; end
23
22
  def push_trace_execution_flag *args; end
24
23
  def pop_trace_execution_flag *args; end
@@ -99,7 +99,8 @@ module NewRelic
99
99
  if NewRelic::Agent.is_sql_recorded?
100
100
  if duration > Agent.config[:'slow_sql.explain_threshold']
101
101
  backtrace = caller.join("\n")
102
- transaction_data.sql_data << SlowSql.new(sql, metric_name, config,
102
+ transaction_data.sql_data << SlowSql.new(TransactionSampler.truncate_message(sql),
103
+ metric_name, config,
103
104
  duration, backtrace)
104
105
  end
105
106
  end
@@ -176,7 +177,7 @@ module NewRelic
176
177
  end
177
178
  end
178
179
 
179
- class SqlTrace < MethodTraceStats
180
+ class SqlTrace < Stats
180
181
  attr_reader :path
181
182
  attr_reader :url
182
183
  attr_reader :sql_id
@@ -0,0 +1,149 @@
1
+ module NewRelic
2
+ module Agent
3
+ class Stats
4
+ attr_accessor :call_count
5
+ attr_accessor :min_call_time
6
+ attr_accessor :max_call_time
7
+ attr_accessor :total_call_time
8
+ attr_accessor :total_exclusive_time
9
+ attr_accessor :sum_of_squares
10
+
11
+ def initialize
12
+ reset
13
+ end
14
+
15
+ def reset
16
+ @call_count = 0
17
+ @total_call_time = 0.0
18
+ @total_exclusive_time = 0.0
19
+ @min_call_time = 0.0
20
+ @max_call_time = 0.0
21
+ @sum_of_squares = 0.0
22
+ end
23
+
24
+ def is_reset?
25
+ call_count == 0 && total_call_time == 0.0 && total_exclusive_time == 0.0
26
+ end
27
+
28
+ def merge(other_stats)
29
+ stats = self.clone
30
+ stats.merge!(other_stats)
31
+ end
32
+
33
+ def merge!(other_stats)
34
+ Array(other_stats).each do |other|
35
+ @min_call_time = other.min_call_time if min_time_less?(other)
36
+ @max_call_time = other.max_call_time if other.max_call_time > max_call_time
37
+ @total_call_time += other.total_call_time
38
+ @total_exclusive_time += other.total_exclusive_time
39
+ @sum_of_squares += other.sum_of_squares
40
+ @call_count += other.call_count
41
+ end
42
+ self
43
+ end
44
+
45
+ def to_s
46
+ "[#{'%2i' % call_count.to_i} calls #{'%.4f' % total_call_time.to_f}s]"
47
+ end
48
+
49
+ def to_json(*_)
50
+ {
51
+ 'call_count' => call_count.to_i,
52
+ 'min_call_time' => min_call_time.to_f,
53
+ 'max_call_time' => max_call_time.to_f,
54
+ 'total_call_time' => total_call_time.to_f,
55
+ 'total_exclusive_time' => total_exclusive_time.to_f,
56
+ 'sum_of_squares' => sum_of_squares.to_f
57
+ }.to_json(*_)
58
+ end
59
+
60
+ # record a single data point into the statistical gatherer. The gatherer
61
+ # will aggregate all data points collected over a specified period and upload
62
+ # its data to the NewRelic server
63
+ def record_data_point(value, exclusive_time = value)
64
+ @call_count += 1
65
+ @total_call_time += value
66
+ @min_call_time = value if value < @min_call_time || @call_count == 1
67
+ @max_call_time = value if value > @max_call_time
68
+ @total_exclusive_time += exclusive_time
69
+
70
+ @sum_of_squares += (value * value)
71
+ self
72
+ end
73
+
74
+ alias trace_call record_data_point
75
+
76
+ # Records multiple data points as one method call - this handles
77
+ # all the aggregation that would be done with multiple
78
+ # record_data_point calls
79
+ def record_multiple_data_points(total_value, count=1)
80
+ return record_data_point(total_value) if count == 1
81
+ @call_count += count
82
+ @total_call_time += total_value
83
+ avg_val = total_value / count
84
+ @min_call_time = avg_val if avg_val < @min_call_time || @call_count == count
85
+ @max_call_time = avg_val if avg_val > @max_call_time
86
+ @total_exclusive_time += total_value
87
+ @sum_of_squares += (avg_val * avg_val) * count
88
+ self
89
+ end
90
+
91
+ # increments the call_count by one
92
+ def increment_count(value = 1)
93
+ @call_count += value
94
+ end
95
+
96
+ def inspect
97
+ "#<NewRelic::Agent::Stats #{to_s} >"
98
+ end
99
+
100
+ def ==(other)
101
+ (
102
+ @min_call_time == other.min_call_time &&
103
+ @max_call_time == other.max_call_time &&
104
+ @total_call_time == other.total_call_time &&
105
+ @total_exclusive_time == other.total_exclusive_time &&
106
+ @sum_of_squares == other.sum_of_squares &&
107
+ @call_count == other.call_count
108
+ )
109
+ end
110
+
111
+ # Apdex-related accessors
112
+ alias_method :apdex_s, :call_count
113
+ alias_method :apdex_t, :total_call_time
114
+ alias_method :apdex_f, :total_exclusive_time
115
+
116
+ def record_apdex_s
117
+ @call_count += 1
118
+ end
119
+
120
+ def record_apdex_t
121
+ @total_call_time += 1
122
+ end
123
+
124
+ def record_apdex_f
125
+ @total_exclusive_time += 1
126
+ end
127
+
128
+ protected
129
+
130
+ def min_time_less?(other)
131
+ (other.min_call_time < min_call_time && other.call_count > 0) || call_count == 0
132
+ end
133
+ end
134
+
135
+ class ChainedStats
136
+ attr_accessor :scoped_stats, :unscoped_stats
137
+
138
+ def initialize(scoped_stats, unscoped_stats)
139
+ @scoped_stats = scoped_stats
140
+ @unscoped_stats = unscoped_stats
141
+ end
142
+
143
+ def method_missing(method, *args)
144
+ unscoped_stats.send(method, *args)
145
+ scoped_stats.send(method, *args)
146
+ end
147
+ end
148
+ end
149
+ end
@@ -2,6 +2,7 @@ require 'new_relic/agent/stats_engine/metric_stats'
2
2
  require 'new_relic/agent/stats_engine/samplers'
3
3
  require 'new_relic/agent/stats_engine/transactions'
4
4
  require 'new_relic/agent/stats_engine/gc_profiler'
5
+ require 'new_relic/agent/stats_engine/stats_hash'
5
6
 
6
7
  module NewRelic
7
8
  module Agent
@@ -14,8 +15,16 @@ module NewRelic
14
15
  def initialize
15
16
  # Makes the unit tests happy
16
17
  Thread::current[:newrelic_scope_stack] = nil
18
+ @stats_lock = Mutex.new
19
+ @stats_hash = StatsHash.new
17
20
  start_sampler_thread
18
21
  end
22
+
23
+ # All access to the @stats_hash ivar should be funnelled through this
24
+ # method to ensure thread-safety.
25
+ def with_stats_lock
26
+ @stats_lock.synchronize { yield }
27
+ end
19
28
  end
20
29
  end
21
30
  end
@@ -7,7 +7,7 @@ module NewRelic
7
7
  @profiler = RailsBench.new if RailsBench.enabled?
8
8
  @profiler = Ruby19.new if Ruby19.enabled?
9
9
  @profiler = Rubinius.new if Rubinius.enabled?
10
- @profiler = RubiniusAgent.new if RubiniusAgent.enabled?
10
+ @profiler
11
11
  end
12
12
 
13
13
  def self.capture
@@ -110,29 +110,6 @@ module NewRelic
110
110
  ::Rubinius::GC.count
111
111
  end
112
112
  end
113
-
114
- class RubiniusAgent < Profiler
115
- def self.enabled?
116
- if NewRelic::LanguageSupport.using_engine?('rbx')
117
- require 'rubinius/agent'
118
- true
119
- else
120
- false
121
- end
122
- end
123
-
124
- def call_time
125
- agent = ::Rubinius::Agent.loopback
126
- (agent.get('system.gc.young.total_wallclock')[1] +
127
- agent.get('system.gc.full.total_wallclock')[1]) * 1000
128
- end
129
-
130
- def call_count
131
- agent = ::Rubinius::Agent.loopback
132
- agent.get('system.gc.young.count')[1] +
133
- agent.get('system.gc.full.count')[1]
134
- end
135
- end
136
113
  end
137
114
  end
138
115
  end
@@ -5,109 +5,48 @@ module NewRelic
5
5
  class StatsEngine
6
6
  # Handles methods related to actual Metric collection
7
7
  module MetricStats
8
- # A simple mutex-synchronized hash to make sure our statistics
9
- # are internally consistent even in truly-threaded rubies like JRuby
10
- class SynchronizedHash < ::Hash
11
- attr_reader :lock
12
-
13
- def initialize
14
- @lock = Mutex.new
15
- end
16
-
17
- def initialize_copy(old)
18
- super
19
- old.each do |key, value|
20
- self.store(key, value.dup)
21
- end
22
- end
23
-
24
- def []=(*args)
25
- @lock.synchronize { super }
26
- rescue => e
27
- log_error(e)
28
- end
29
-
30
- def clear(*args)
31
- @lock.synchronize { super }
32
- rescue => e
33
- log_error(e)
34
- end
35
-
36
- def delete(*args)
37
- @lock.synchronize { super }
38
- rescue => e
39
- log_error(e)
40
- end
41
-
42
- def delete_if(*args)
43
- @lock.synchronize { super }
44
- rescue => e
45
- log_error(e)
46
- end
47
-
48
- def reset
49
- values.each { |s| s.reset }
50
- end
51
-
52
- def log_error(e)
53
- backtraces = Thread.list.map { |t| log_thread(t) }.join("\n\n")
54
- ::NewRelic::Agent.logger.warn(
55
- "SynchronizedHash failure: #{e.class.name}: #{e.message}\n#{backtraces}")
56
- end
57
-
58
- def log_thread(t)
59
- # Ruby 1.8 doesn't expose backtrace properly, so make sure it's there
60
- if t.nil? || !t.respond_to?(:backtrace) || t.backtrace.nil?
61
- return "#{t}\n\tNo backtrace for thread"
62
- end
63
-
64
- backtrace = t.backtrace.map { |b| "\t#{b}" }.join("\n")
65
- "\t#{t}\n#{backtrace}"
66
-
67
- rescue Exception => e
68
- # JRuby 1.7.0 has a nasty habit of raising a
69
- # java.lang.NullPointerException when we iterate through threads
70
- # asking for backtraces. This line allows us to swallow java
71
- # exceptions without referencing their classes (since they don't
72
- # exist in MRI). It also prevents us from swallowing signals or
73
- # other nasty things that can happen when you rescue Exception.
74
- ::NewRelic::Agent.logger.warn(
75
- "Error collecting thread backtraces: #{e.class.name}: #{e.message}")
76
- ::NewRelic::Agent.logger.debug( e.backtrace.join("\n") )
77
-
78
- raise e if e.class.ancestors.include? Exception
8
+ # Lookup and write to the named metric in a single call.
9
+ #
10
+ # This method is thead-safe, and is preferred to the lookup / modify
11
+ # method pairs (e.g. get_stats + record_data_point)
12
+ def record_metric(metric_names_or_specs, value=nil, options={}, &blk)
13
+ defaults = {
14
+ :scoped => false,
15
+ :scope => default_scope
16
+ }
17
+ options = defaults.merge(options)
18
+ effective_scope = options[:scoped] && options[:scope]
19
+ specs = coerce_to_metric_spec_array(metric_names_or_specs, effective_scope)
20
+ with_stats_lock do
21
+ @stats_hash.record(specs, value, &blk)
79
22
  end
80
23
  end
81
24
 
82
- # Returns all of the metric names of all the stats in the engine
83
- def metrics
84
- stats_hash.keys.map(&:to_s)
85
- end
86
-
87
25
  # a simple accessor for looking up a stat with no scope -
88
26
  # returns a new stats object if no stats object for that
89
27
  # metric exists yet
90
28
  def get_stats_no_scope(metric_name)
91
- stats_hash[NewRelic::MetricSpec.new(metric_name, '')] ||= NewRelic::MethodTraceStats.new
92
- end
93
-
94
- # This version allows a caller to pass a stat class to use
95
- def get_custom_stats(metric_name, stat_class)
96
- stats_hash[NewRelic::MetricSpec.new(metric_name)] ||= stat_class.new
29
+ get_stats(metric_name, false)
97
30
  end
98
31
 
99
32
  # If use_scope is true, two chained metrics are created, one with scope and one without
100
33
  # If scoped_metric_only is true, only a scoped metric is created (used by rendering metrics which by definition are per controller only)
101
34
  def get_stats(metric_name, use_scope = true, scoped_metric_only = false, scope = nil)
102
35
  scope ||= scope_name if use_scope
103
- if scoped_metric_only
104
- spec = NewRelic::MetricSpec.new metric_name, scope
105
- stats = stats_hash[spec] ||= NewRelic::MethodTraceStats.new
106
- else
107
- stats = stats_hash[NewRelic::MetricSpec.new(metric_name)] ||= NewRelic::MethodTraceStats.new
108
- if scope && scope != metric_name
109
- spec = NewRelic::MetricSpec.new metric_name, scope
110
- stats = stats_hash[spec] ||= NewRelic::ScopedMethodTraceStats.new(stats)
36
+ stats = nil
37
+ with_stats_lock do
38
+ if scoped_metric_only
39
+ stats = @stats_hash[NewRelic::MetricSpec.new(metric_name, scope)]
40
+ else
41
+ unscoped_spec = NewRelic::MetricSpec.new(metric_name)
42
+ unscoped_stats = @stats_hash[unscoped_spec]
43
+ if scope && scope != metric_name
44
+ scoped_spec = NewRelic::MetricSpec.new(metric_name, scope)
45
+ scoped_stats = @stats_hash[scoped_spec]
46
+ stats = NewRelic::Agent::ChainedStats.new(scoped_stats, unscoped_stats)
47
+ else
48
+ stats = unscoped_stats
49
+ end
111
50
  end
112
51
  end
113
52
  stats
@@ -116,10 +55,12 @@ module NewRelic
116
55
  # Returns a stat if one exists, otherwise returns nil. If you
117
56
  # want auto-initialization, use one of get_stats or get_stats_no_scope
118
57
  def lookup_stats(metric_name, scope_name = '')
119
- stats_hash[NewRelic::MetricSpec.new(metric_name, scope_name)]
58
+ spec = NewRelic::MetricSpec.new(metric_name, scope_name)
59
+ with_stats_lock do
60
+ @stats_hash.has_key?(spec) ? @stats_hash[spec] : nil
61
+ end
120
62
  end
121
63
 
122
-
123
64
  # Helper method for timing supportability metrics
124
65
  def record_supportability_metrics_timed(metrics)
125
66
  start_time = Time.now
@@ -146,115 +87,73 @@ module NewRelic
146
87
  end
147
88
  end
148
89
 
149
- # This module was extracted from the harvest method and should
150
- # be refactored
151
- module Harvest
152
- # merge data from previous harvests into this stats engine -
153
- # takes into account the case where there are new stats for
154
- # that metric, and the case where there is no current data
155
- # for that metric
156
- def merge_data(metric_data_hash)
157
- metric_data_hash.each do |metric_spec, metric_data|
158
- new_data = lookup_stats(metric_spec.name, metric_spec.scope)
159
- if new_data
160
- new_data.merge!(metric_data.stats)
161
- else
162
- stats_hash[metric_spec] = metric_data.stats
163
- end
164
- end
165
- end
166
-
167
- private
168
- def get_stats_hash_from(engine_or_hash)
169
- if engine_or_hash.is_a?(StatsEngine)
170
- engine_or_hash.stats_hash
171
- else
172
- engine_or_hash
173
- end
174
- end
175
-
176
- def coerce_to_metric_spec(metric_spec)
177
- if metric_spec.is_a?(NewRelic::MetricSpec)
178
- metric_spec
179
- else
180
- NewRelic::MetricSpec.new(metric_spec)
181
- end
182
- end
183
-
184
- def clone_and_reset_stats(metric_spec, stats)
185
- if stats.nil?
186
- raise "Nil stats for #{metric_spec.name} (#{metric_spec.scope})"
187
- end
188
-
189
- stats_copy = stats.clone
190
- stats.reset
191
- stats_copy
192
- end
193
-
194
- # if the previous timeslice data has not been reported (due to an error of some sort)
195
- # then we need to merge this timeslice with the previously accumulated - but not sent
196
- # data
197
- def merge_old_data!(metric_spec, stats, old_data)
198
- metric_data = old_data[metric_spec]
199
- stats.merge!(metric_data.stats) unless metric_data.nil?
90
+ def reset_stats
91
+ with_stats_lock do
92
+ old = @stats_hash
93
+ @stats_hash = StatsHash.new
94
+ old
200
95
  end
96
+ end
201
97
 
202
- def add_data_to_send_unless_empty(data, stats, metric_spec, id)
203
- # don't bother collecting and reporting stats that have
204
- # zero-values for this timeslice. significant
205
- # performance boost and storage savings.
206
- return if stats.is_reset?
207
- data[metric_spec] = NewRelic::MetricData.new((id ? nil : metric_spec), stats, id)
98
+ # merge data from previous harvests into this stats engine
99
+ def merge!(other_stats_hash)
100
+ with_stats_lock do
101
+ @stats_hash.merge!(other_stats_hash)
208
102
  end
209
-
210
- def merge_stats(other_engine_or_hash, metric_ids)
211
- old_data = get_stats_hash_from(other_engine_or_hash)
212
- timeslice_data = {}
213
- stats_hash.lock.synchronize do
214
- Thread.current['newrelic_stats_hash'] = stats_hash.clone
215
- stats_hash.reset
216
- end
217
- Thread.current['newrelic_stats_hash'].each do |metric_spec, stats|
218
- metric_spec = coerce_to_metric_spec(metric_spec)
219
- stats_copy = clone_and_reset_stats(metric_spec, stats)
220
- merge_old_data!(metric_spec, stats_copy, old_data)
221
- add_data_to_send_unless_empty(timeslice_data, stats_copy, metric_spec, metric_ids[metric_spec])
222
- end
223
- timeslice_data
224
- end
225
-
226
103
  end
227
- include Harvest
228
104
 
229
105
  # Harvest the timeslice data. First recombine current statss
230
106
  # with any previously
231
107
  # unsent metrics, clear out stats cache, and return the current
232
108
  # stats.
233
- # ---
234
- # Note: this is not synchronized. There is still some risk in this and
235
- # we will revisit later to see if we can make this more robust without
236
- # sacrificing efficiency.
237
- # +++
238
- def harvest_timeslice_data(previous_timeslice_data, metric_ids)
109
+ def harvest_timeslice_data(old_stats_hash, rules_engine=RulesEngine.new)
239
110
  poll harvest_samplers
240
- merge_stats(previous_timeslice_data, metric_ids)
111
+ snapshot = reset_stats
112
+ snapshot = apply_rules_to_metric_data(rules_engine, snapshot)
113
+ snapshot.merge!(old_stats_hash)
241
114
  end
242
115
 
243
- # Remove all stats. For test code only.
244
- def clear_stats
245
- @stats_hash = SynchronizedHash.new
246
- NewRelic::Agent::BusyCalculator.reset
116
+ def apply_rules_to_metric_data(rules_engine, stats_hash)
117
+ renamed_stats = NewRelic::Agent::StatsHash.new
118
+ stats_hash.each do |spec, stats|
119
+ new_name = rules_engine.rename(spec.name)
120
+ new_spec = NewRelic::MetricSpec.new(new_name, spec.scope)
121
+ renamed_stats[new_spec].merge!(stats)
122
+ end
123
+ renamed_stats
247
124
  end
248
125
 
249
- # Reset each of the stats, such as when a new passenger instance starts up.
250
- def reset_stats
251
- stats_hash.reset
126
+ def default_scope
127
+ txn = NewRelic::Agent::TransactionInfo.get
128
+ txn.transaction_name_set? && txn.transaction_name
252
129
  end
253
130
 
254
- # returns a memoized SynchronizedHash that holds the actual
255
- # instances of Stats keyed off their MetricName
256
- def stats_hash
257
- @stats_hash ||= SynchronizedHash.new
131
+ def coerce_to_metric_spec_array(metric_names_or_specs, scope)
132
+ specs = []
133
+ Array(metric_names_or_specs).map do |name_or_spec|
134
+ case name_or_spec
135
+ when String
136
+ specs << NewRelic::MetricSpec.new(name_or_spec)
137
+ specs << NewRelic::MetricSpec.new(name_or_spec, scope) if scope
138
+ when NewRelic::MetricSpec
139
+ specs << name_or_spec
140
+ end
141
+ end
142
+ specs
143
+ end
144
+
145
+ # For use by test code only.
146
+ def clear_stats
147
+ reset_stats
148
+ NewRelic::Agent::BusyCalculator.reset
149
+ end
150
+
151
+ # Returns all of the metric names of all the stats in the engine.
152
+ # For use by test code only.
153
+ def metrics
154
+ with_stats_lock do
155
+ @stats_hash.keys.map { |spec| spec.to_s }
156
+ end
258
157
  end
259
158
  end
260
159
  end