newrelic_rpm 3.4.0.beta1 → 3.4.0.beta2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of newrelic_rpm might be problematic. Click here for more details.

data/CHANGELOG CHANGED
@@ -1,15 +1,17 @@
1
1
  v3.4.0
2
2
  * Major refactor of data transmission mechanism. This enabled child processes to send data to parent processes, which then send the data to the New Relic service. This should only affect Resque users, dramatically improving their experience.
3
3
  * Moved Resque instrumentation from rpm_contrib to main agent. Resque users should discontinue use of rpm_contrib or upgrade to 2.1.11.
4
+ * Resolve issue with configuring the Error Collector when using server-side configuration.
4
5
 
5
6
  v3.3.5
6
- * [FIX] Allow tracing of ! and ? methods
7
+ * [FIX] Allow tracing of methods ending in ! and ?
7
8
  * [PERF] Give up after scanning first 50k of the response in RUM
8
9
  auto-instrumentation.
9
- * [FIX] Don't raise when extracting metrics from SQL queries with non UTF-8
10
- * bytes. Replaced "Custom/DJ Locked Jobs" metric with three new metrics for
10
+ * [FIX] Don't raise when extracting metrics from SQL queries with non UTF-8 bytes.
11
+ * Replaced "Custom/DJ Locked Jobs" metric with new metrics for
11
12
  monitoring DelayedJob: queue_length, failed_jobs, and locked_jobs, all under
12
- Workers/DelayedJob
13
+ Workers/DelayedJob. queue_length is also broken out by queue name or priority
14
+ depending on the version of DelayedJob deployed.
13
15
 
14
16
  v3.3.4.1
15
17
  * Bug fix when rendering empty collection in Rails 3.1+
@@ -4,7 +4,6 @@ require 'net/http'
4
4
  require 'logger'
5
5
  require 'zlib'
6
6
  require 'stringio'
7
- require 'new_relic/data_serialization'
8
7
  require 'new_relic/agent/new_relic_service'
9
8
  require 'new_relic/agent/pipe_service'
10
9
 
@@ -155,6 +154,7 @@ module NewRelic
155
154
  if channel_id = options[:report_to_channel]
156
155
  @service = NewRelic::Agent::PipeService.new(channel_id)
157
156
  @connected_pid = $$
157
+ @metric_ids = {}
158
158
  end
159
159
 
160
160
  # log.debug "Agent received after_fork notice in #$$: [#{control.agent_enabled?}; monitor=#{control.monitor_mode?}; connected: #{@connected.inspect}; thread=#{@worker_thread.inspect}]"
@@ -444,9 +444,6 @@ module NewRelic
444
444
  end
445
445
 
446
446
  private
447
- # def collector
448
- # @collector ||= control.collector
449
- # end
450
447
 
451
448
  # All of this module used to be contained in the
452
449
  # start_worker_thread method - this is an artifact of
@@ -485,7 +482,7 @@ module NewRelic
485
482
  def create_and_run_worker_loop
486
483
  @worker_loop = WorkerLoop.new
487
484
  @worker_loop.run(@report_period) do
488
- save_or_transmit_data
485
+ transmit_data
489
486
  end
490
487
  end
491
488
 
@@ -869,7 +866,7 @@ module NewRelic
869
866
  config_transaction_tracer
870
867
  log_connection!(config_data)
871
868
  configure_transaction_tracer!(config_data['collect_traces'], config_data['sample_rate'])
872
- configure_error_collector!(config_data['collect_errors'])
869
+ configure_error_collector!(config_data['error_collector.enabled'])
873
870
  end
874
871
 
875
872
  # Logs when we connect to the server, for debugging purposes
@@ -1009,8 +1006,6 @@ module NewRelic
1009
1006
  # transmission later
1010
1007
  def harvest_and_send_timeslice_data
1011
1008
  now = Time.now
1012
- NewRelic::Agent.instance.stats_engine.get_stats_no_scope('Supportability/invoke_remote').record_data_point(0.0)
1013
- NewRelic::Agent.instance.stats_engine.get_stats_no_scope('Supportability/invoke_remote/metric_data').record_data_point(0.0)
1014
1009
  harvest_timeslice_data(now)
1015
1010
  # In this version of the protocol, we get back an assoc array of spec to id.
1016
1011
  metric_specs_and_ids = @service.metric_data(@last_harvest_time.to_f,
@@ -1112,21 +1107,13 @@ module NewRelic
1112
1107
  end
1113
1108
  end
1114
1109
 
1115
- def save_or_transmit_data
1116
- if NewRelic::DataSerialization.should_send_data?
1117
- log.debug "Sending data to New Relic Service"
1118
- NewRelic::Agent.load_data unless NewRelic::Control.instance.disable_serialization?
1119
- harvest_and_send_errors
1120
- harvest_and_send_slowest_sample
1121
- harvest_and_send_slowest_sql
1122
- harvest_and_send_timeslice_data
1123
- else
1124
- log.debug "Serializing agent data to disk"
1125
- NewRelic::Agent.save_data
1126
- end
1110
+ def transmit_data
1111
+ log.debug "Sending data to New Relic Service"
1112
+ harvest_and_send_errors
1113
+ harvest_and_send_slowest_sample
1114
+ harvest_and_send_slowest_sql
1115
+ harvest_and_send_timeslice_data
1127
1116
  rescue => e
1128
- NewRelic::Control.instance.disable_serialization = true
1129
- NewRelic::Control.instance.log.warn("Disabling serialization: #{e.message}")
1130
1117
  retry_count ||= 0
1131
1118
  retry_count += 1
1132
1119
  retry unless retry_count > 1
@@ -1146,7 +1133,7 @@ module NewRelic
1146
1133
  if @connected
1147
1134
  begin
1148
1135
  @service.request_timeout = 10
1149
- save_or_transmit_data
1136
+ transmit_data
1150
1137
  if @connected_pid == $$ && !@service.kind_of?(NewRelic::Agent::NewRelicService)
1151
1138
  log.debug "Sending New Relic service agent run shutdown message"
1152
1139
  @service.shutdown(Time.now.to_f)
@@ -118,7 +118,7 @@ module NewRelic
118
118
  got = pipe.read
119
119
 
120
120
  if got && !got.empty?
121
- payload = Marshal.load(got)
121
+ payload = unmarshal(got)
122
122
  if payload == 'EOF'
123
123
  pipe.close
124
124
  else
@@ -128,6 +128,16 @@ module NewRelic
128
128
  end
129
129
  end
130
130
  end
131
+
132
+ def unmarshal(data)
133
+ if NewRelic::LanguageSupport.broken_gc?
134
+ NewRelic::LanguageSupport.with_disabled_gc do
135
+ Marshal.load(data)
136
+ end
137
+ else
138
+ Marshal.load(data)
139
+ end
140
+ end
131
141
 
132
142
  def should_keep_listening?
133
143
  @started || @pipes.values.find{|pipe| !pipe.in.closed?}
@@ -1,14 +1,13 @@
1
1
  module NewRelic
2
2
  module Agent
3
3
  class PipeService
4
- attr_reader :channel_id, :buffer, :stats_engine
4
+ attr_reader :channel_id, :buffer
5
5
  attr_accessor :request_timeout, :agent_id, :collector
6
6
 
7
7
  def initialize(channel_id)
8
8
  @channel_id = channel_id
9
9
  @collector = NewRelic::Control::Server.new(:name => 'parent',
10
10
  :port => 0)
11
- @stats_engine = NewRelic::Agent::StatsEngine.new
12
11
  end
13
12
 
14
13
  def connect(config)
@@ -16,9 +15,7 @@ module NewRelic
16
15
  end
17
16
 
18
17
  def metric_data(last_harvest_time, now, unsent_timeslice_data)
19
- @stats_engine.merge_data(hash_from_metric_data(unsent_timeslice_data))
20
- stats = @stats_engine.harvest_timeslice_data({}, {})
21
- write_to_pipe(:stats => stats) if stats
18
+ write_to_pipe(:stats => hash_from_metric_data(unsent_timeslice_data))
22
19
  {}
23
20
  end
24
21
 
@@ -4,6 +4,13 @@ require 'new_relic/delayed_job_injection'
4
4
  module NewRelic
5
5
  module Agent
6
6
  module Samplers
7
+ # This sampler records the status of your delayed job table once a minute.
8
+ # It assumes jobs are cleared after being run, and failed jobs are not (otherwise
9
+ # the failed job metric is useless).
10
+ #
11
+ # In earlier versions it will break out the queue length by priority. In later
12
+ # versions of DJ where distinct queues are supported, it breaks it out by queue name.
13
+ #
7
14
  class DelayedJobSampler < NewRelic::Agent::Sampler
8
15
  def initialize
9
16
  super :delayed_job_queue
@@ -11,9 +18,6 @@ module NewRelic
11
18
  raise Unsupported, "No DJ worker present" unless NewRelic::DelayedJobInjection.worker_name
12
19
  end
13
20
 
14
- def queue_stats
15
- stats_engine.get_stats("Workers/DelayedJob/queue_length", false)
16
- end
17
21
  def error_stats
18
22
  stats_engine.get_stats("Workers/DelayedJob/failed_jobs", false)
19
23
  end
@@ -44,12 +48,38 @@ module NewRelic
44
48
  end
45
49
 
46
50
  def poll
47
- record queue_stats, queued_jobs
48
51
  record error_stats, failed_jobs
49
52
  record locked_job_stats, locked_jobs
53
+
54
+ if @queue
55
+ record_queue_length_across_dimension('queue')
56
+ else
57
+ record_queue_length_across_dimension('priority')
58
+ end
50
59
  end
51
60
 
52
61
  private
62
+
63
+ def record_queue_length_across_dimension(column)
64
+ all_count = 0
65
+ Delayed::Job.count(:group => column).each do | column_val, count |
66
+ all_count += count
67
+ record stats_engine.get_stats("Workers/DelayedJob/queue_length/#{column == 'queue' ? 'name' : column}/#{column_val}", false), count
68
+ end
69
+ record(stats_engine.get_stats("Workers/DelayedJob/queue_length/all", false), all_count)
70
+ end
71
+
72
+ # Figure out if we get the queues.
73
+ def setup
74
+ return unless @queue.nil?
75
+ @setup = true
76
+ columns = Delayed::Job.columns
77
+ columns.each do | c |
78
+ @queue = true if c.name.to_s == 'priority'
79
+ end
80
+ @queue ||= false
81
+ end
82
+
53
83
  def record(stat, size)
54
84
  stat.record_data_point size
55
85
  end
@@ -1,6 +1,5 @@
1
1
  require 'forwardable'
2
2
  require 'new_relic/control'
3
- require 'new_relic/data_serialization'
4
3
 
5
4
  # = New Relic Ruby Agent
6
5
  #
@@ -223,45 +222,6 @@ module NewRelic
223
222
  def shutdown(options={})
224
223
  agent.shutdown(options)
225
224
  end
226
-
227
- # a method used to serialize short-running processes to disk, so
228
- # we don't incur the overhead of reporting to the server for every
229
- # fork/invocation of a small job.
230
- #
231
- # Functionally, this loads the data from the file into the agent
232
- # (to avoid losing data by overwriting) and then serializes the
233
- # agent data to the file again. See also #load_data
234
- def save_data
235
- NewRelic::DataSerialization.read_and_write_to_file do |old_data|
236
- agent.merge_data_from(old_data)
237
- agent.serialize
238
- end
239
- end
240
-
241
- # used to load data from the disk during the harvest cycle to send
242
- # it. This method also clears the file so data should never be
243
- # sent more than once.
244
-
245
- # Note that only one transaction trace will be sent even if many
246
- # are serialized, since the slowest is sent.
247
- #
248
- # See also the complement to this method, #save_data - used when a
249
- # process is shutting down
250
- def load_data
251
- if !NewRelic::Control.instance['disable_serialization']
252
- NewRelic::DataSerialization.read_and_write_to_file do |old_data|
253
- agent.merge_data_from(old_data)
254
- nil # return nil so nothing is written to the file
255
- end
256
- NewRelic::DataSerialization.update_last_sent!
257
- end
258
-
259
- {
260
- :metrics => agent.stats_engine.metrics.length,
261
- :traces => agent.unsent_traces_size,
262
- :errors => agent.unsent_errors_size
263
- }
264
- end
265
225
 
266
226
  # Add instrumentation files to the agent. The argument should be
267
227
  # a glob matching ruby scripts which will be executed at the time
@@ -1,28 +1,6 @@
1
1
  module NewRelic::LanguageSupport
2
2
  extend self
3
3
 
4
- module DataSerialization
5
- def self.included(base)
6
- # need to disable GC during marshal load in 1.8.7
7
- if NewRelic::LanguageSupport.using_version?('1.8.7') &&
8
- !NewRelic::LanguageSupport.using_engine?('jruby') &&
9
- !NewRelic::LanguageSupport.using_engine?('rbx')
10
- base.class_eval do
11
- def self.load(*args)
12
- if defined?(::GC) && ::GC.respond_to?(:disable)
13
- ::GC.disable
14
- val = super
15
- ::GC.enable
16
- val
17
- else
18
- super
19
- end
20
- end
21
- end
22
- end
23
- end
24
- end
25
-
26
4
  module Control
27
5
  def self.included(base)
28
6
  # need to use syck rather than psych when possible
@@ -81,6 +59,24 @@ module NewRelic::LanguageSupport
81
59
  engine == 'ruby'
82
60
  end
83
61
  end
62
+
63
+ def broken_gc?
64
+ NewRelic::LanguageSupport.using_version?('1.8.7') &&
65
+ RUBY_PATCHLEVEL < 348 &&
66
+ !NewRelic::LanguageSupport.using_engine?('jruby') &&
67
+ !NewRelic::LanguageSupport.using_engine?('rbx')
68
+ end
69
+
70
+ def with_disabled_gc
71
+ if defined?(::GC) && ::GC.respond_to?(:disable)
72
+ ::GC.disable
73
+ val = yield
74
+ ::GC.enable
75
+ val
76
+ else
77
+ yield
78
+ end
79
+ end
84
80
 
85
81
  def using_version?(version)
86
82
  numbers = version.split('.')
@@ -4,7 +4,7 @@ module NewRelic
4
4
  MAJOR = 3
5
5
  MINOR = 4
6
6
  TINY = 0
7
- BUILD = 'beta1' # Set to nil for a release, 'beta1', 'alpha', etc for prerelease builds
7
+ BUILD = 'beta2' # Set to nil for a release, 'beta1', 'alpha', etc for prerelease builds
8
8
  STRING = [MAJOR, MINOR, TINY, BUILD].compact.join('.')
9
9
  end
10
10
 
data/newrelic_rpm.gemspec CHANGED
@@ -5,14 +5,14 @@
5
5
 
6
6
  Gem::Specification.new do |s|
7
7
  s.name = "newrelic_rpm"
8
- s.version = "3.4.0.beta1"
8
+ s.version = "3.4.0.beta2"
9
9
 
10
10
  s.required_rubygems_version = Gem::Requirement.new("> 1.3.1") if s.respond_to? :required_rubygems_version=
11
11
  s.authors = ["Bill Kayser", "Jon Guymon", "Justin George", "Darin Swanson"]
12
- s.date = "2012-05-25"
12
+ s.date = "2012-06-07"
13
13
  s.description = "New Relic is a performance management system, developed by New Relic,\nInc (http://www.newrelic.com). New Relic provides you with deep\ninformation about the performance of your web application as it runs\nin production. The New Relic Ruby Agent is dual-purposed as a either a\nGem or plugin, hosted on\nhttp://github.com/newrelic/rpm/\n"
14
14
  s.email = "support@newrelic.com"
15
- s.executables = ["newrelic", "mongrel_rpm", "newrelic_cmd"]
15
+ s.executables = ["newrelic_cmd", "newrelic", "mongrel_rpm"]
16
16
  s.extra_rdoc_files = [
17
17
  "CHANGELOG",
18
18
  "LICENSE",
@@ -105,7 +105,6 @@ Gem::Specification.new do |s|
105
105
  "lib/new_relic/control/logging_methods.rb",
106
106
  "lib/new_relic/control/profiling.rb",
107
107
  "lib/new_relic/control/server_methods.rb",
108
- "lib/new_relic/data_serialization.rb",
109
108
  "lib/new_relic/delayed_job_injection.rb",
110
109
  "lib/new_relic/helper.rb",
111
110
  "lib/new_relic/language_support.rb",
@@ -189,7 +188,6 @@ Gem::Specification.new do |s|
189
188
  "test/new_relic/control/configuration_test.rb",
190
189
  "test/new_relic/control/logging_methods_test.rb",
191
190
  "test/new_relic/control_test.rb",
192
- "test/new_relic/data_serialization_test.rb",
193
191
  "test/new_relic/delayed_job_injection_test.rb",
194
192
  "test/new_relic/fake_collector.rb",
195
193
  "test/new_relic/fake_service.rb",
@@ -363,7 +363,7 @@ class NewRelic::Agent::Agent::ConnectTest < Test::Unit::TestCase
363
363
  'data_report_period' => 'pasta sauce',
364
364
  'url_rules' => 'tamales',
365
365
  'collect_traces' => true,
366
- 'collect_errors' => true,
366
+ 'error_collector.enabled' => true,
367
367
  'sample_rate' => 10
368
368
  }
369
369
  NewRelic::Control.instance.settings['transaction_tracer'] = {'enabled' => true}
@@ -64,7 +64,7 @@ class NewRelic::Agent::Agent::StartWorkerThreadTest < Test::Unit::TestCase
64
64
  wl = mock('worker loop')
65
65
  NewRelic::Agent::WorkerLoop.expects(:new).returns(wl)
66
66
  wl.expects(:run).with(30).yields
67
- self.expects(:save_or_transmit_data)
67
+ self.expects(:transmit_data)
68
68
  create_and_run_worker_loop
69
69
  end
70
70
 
@@ -5,6 +5,7 @@ module NewRelic
5
5
  def setup
6
6
  super
7
7
  @agent = NewRelic::Agent::Agent.new
8
+ @agent.service = NewRelic::FakeService.new
8
9
  end
9
10
 
10
11
  def test_after_fork_reporting_to_channel
@@ -13,37 +14,21 @@ module NewRelic
13
14
  'Agent should use PipeService when directed to report to pipe channel')
14
15
  assert_equal 123, @agent.service.channel_id
15
16
  end
16
-
17
- def test_save_or_transmit_data_should_save
18
- NewRelic::Agent.expects(:save_data).once
19
- @agent.expects(:harvest_and_send_timeslice_data).never
20
- NewRelic::DataSerialization.expects(:should_send_data?).returns(false)
21
- @agent.instance_eval { save_or_transmit_data }
22
- end
23
17
 
24
- def test_save_or_transmit_data_should_transmit
25
- NewRelic::Control.instance.stubs(:disable_serialization?).returns(false)
26
- NewRelic::Agent.expects(:load_data)
27
- @agent.expects(:harvest_and_send_timeslice_data)
28
- @agent.expects(:harvest_and_send_slowest_sample)
29
- @agent.expects(:harvest_and_send_errors)
30
- NewRelic::DataSerialization.expects(:should_send_data?).returns(true)
31
- @agent.instance_eval { save_or_transmit_data }
18
+ def test_transmit_data_should_transmit
19
+ @agent.instance_eval { transmit_data }
20
+ assert @agent.service.agent_data.any?
32
21
  end
33
22
 
34
- def test_save_or_transmit_data_should_close_explain_db_connections
35
- NewRelic::Agent.stubs(:save_data)
36
- NewRelic::DataSerialization.expects(:should_send_data?).returns(false)
23
+ def test_transmit_data_should_close_explain_db_connections
37
24
  NewRelic::Agent::Database.expects(:close_connections)
38
- @agent.instance_eval { save_or_transmit_data }
25
+ @agent.instance_eval { transmit_data }
39
26
  end
40
27
 
41
- def test_save_or_transmit_data_should_not_close_db_connections_if_forked
42
- NewRelic::Agent.stubs(:save_data)
43
- NewRelic::DataSerialization.expects(:should_send_data?).returns(false)
28
+ def test_transmit_data_should_not_close_db_connections_if_forked
44
29
  NewRelic::Agent::Database.expects(:close_connections).never
45
30
  @agent.after_fork
46
- @agent.instance_eval { save_or_transmit_data }
31
+ @agent.instance_eval { transmit_data }
47
32
  end
48
33
 
49
34
  def test_serialize
@@ -244,6 +244,7 @@ class NewRelic::Agent::MethodTracerTest < Test::Unit::TestCase
244
244
  t1 = Time.now
245
245
  method_with_block(1,2,3,true,METRIC) do |scope|
246
246
  assert scope == METRIC
247
+ sleep 0.1 # pad the test a bit to increase the margin of error
247
248
  end
248
249
  elapsed = Time.now - t1
249
250
 
@@ -49,6 +49,10 @@ module NewRelic
49
49
  NewRelic::Agent.instance.stats_engine.get_stats_no_scope(metric) \
50
50
  .record_data_point(1.0)
51
51
 
52
+ # ensure that cached metric ids don't interfere with metric merging
53
+ NewRelic::Agent.agent.instance_variable_set(:@metric_ids,
54
+ {NewRelic::MetricSpec.new('Instance/Busy') => 1})
55
+
52
56
  NewRelic::Agent::PipeChannelManager.listener.close_all_pipes
53
57
  NewRelic::Agent.register_report_channel(:agent_test) # before fork
54
58
  pid = Process.fork do
@@ -201,25 +205,6 @@ module NewRelic
201
205
  def test_instance
202
206
  assert_equal(NewRelic::Agent.agent, NewRelic::Agent.instance, "should return the same agent for both identical methods")
203
207
  end
204
-
205
- def test_load_data_should_disable_serialization_if_an_error_is_encountered
206
- NewRelic::Control.instance['disable_serialization'] = false
207
- NewRelic::DataSerialization.stubs(:should_send_data?).returns(false)
208
- NewRelic::Agent.stubs(:save_data).raises(Errno::EACCES)
209
- begin
210
- NewRelic::Agent.instance.send(:save_or_transmit_data)
211
- rescue Errno::EACCES; end
212
- # should be true
213
- assert(NewRelic::Control.instance['disable_serialization'])
214
- NewRelic::Control.instance['disable_serialization'] = false
215
- end
216
-
217
- def test_load_data_should_not_write_files_when_serialization_disabled
218
- NewRelic::Control.instance['disable_serialization'] = true
219
- NewRelic::DataSerialization.expects(:read_and_write_to_file).never
220
- NewRelic::Agent.load_data
221
- NewRelic::Control.instance['disable_serialization'] = false
222
- end
223
208
 
224
209
  def test_register_report_channel
225
210
  NewRelic::Agent.register_report_channel(:channel_id)
@@ -34,7 +34,7 @@ module NewRelic
34
34
 
35
35
  def method_missing(method, *args)
36
36
  if @supported_methods.include?(method)
37
- @agent_data << OpenStruct.new(:method => method, :params => args)
37
+ @agent_data << OpenStruct.new(:action => method, :params => args)
38
38
  @mock[method.to_s]
39
39
  else
40
40
  super
data/test/script/ci.sh CHANGED
@@ -51,9 +51,10 @@ mkdir -p tmp
51
51
  cd tmp
52
52
 
53
53
 
54
- rpm_test_app_cache=~/.rpm_test_app_cache
54
+ #rpm_test_app_cache=~/.rpm_test_app_cache
55
+ rpm_test_app_cache=~/workspace/.rpm_test_app_cache
55
56
  (
56
- echo "updating local cache of rpm_test_app"
57
+ echo "updating local cache of rpm_test_app in $rpm_test_app_cache"
57
58
  git clone --mirror git://github.com/newrelic/rpm_test_app.git $rpm_test_app_cache || true
58
59
  cd $rpm_test_app_cache
59
60
  )
@@ -127,7 +128,3 @@ fi
127
128
  export RAILS_ENV=test
128
129
  bundle
129
130
  bundle exec rake --trace db:create:all ci:setup:testunit test:newrelic
130
-
131
-
132
-
133
-
@@ -1,11 +1,28 @@
1
1
  #!/bin/bash -e
2
2
 
3
+ export PATH=$PATH:$HOME/bin
4
+
5
+ echo $HOME
6
+ echo $PATH
7
+ #ls $HOME/bin
8
+
9
+
10
+
11
+ if [ "x$RUBY" == "x" ] ; then
12
+ export RUBY=1.9.3
13
+ fi
14
+
15
+ echo "Tests will be run using $RUBY"
16
+ #uname -a
17
+
3
18
  SCRATH_DIR=./multiverse_tmp
4
19
  script_dirname=`dirname $0`
5
20
 
6
21
  # make sure that we're in the project root
7
22
  cd "$script_dirname/../../"
8
23
 
24
+ #pwd
25
+
9
26
  if [ -x $SCRATH_DIR ] ; then
10
27
  echo "found tmp, deleting"
11
28
  rm -fr $SCRATH_DIR
@@ -14,22 +31,33 @@ fi
14
31
  mkdir $SCRATH_DIR
15
32
  cd $SCRATH_DIR
16
33
 
34
+ #pwd
35
+ if [[ $JOB_NAME =~ "Pangalactic" ]] ; then
36
+ AGENT_LOCATION="../../../../../../Ruby_Agent"
37
+ else
38
+ AGENT_LOCATION="../../Ruby_Agent"
39
+ fi
40
+
17
41
  git clone --depth=1 git@github.com:newrelic/multiverse.git multiverse
18
42
  git clone --depth=1 git@github.com:newrelic/rpm_contrib.git rpm_contrib
19
43
 
20
- if [ -x ../../Ruby_Agent ] ; then
21
- ln -s ../../Ruby_Agent ./ruby_agent
44
+ echo "Looking for Ruby Agent at $AGENT_LOCATION"
45
+ #ls -l ../../../../../../
46
+ #ls -l /home/hudson/workspace/
47
+
48
+ if [ -x $AGENT_LOCATION ] ; then
49
+ ln -s $AGENT_LOCATION ./ruby_agent
22
50
  else
23
- echo "*********** Ruby_Agent can't be found ***********"
51
+ echo "*********** Ruby_Agent not found ***********"
24
52
  exit 1
25
53
  fi
26
54
 
27
55
  cd multiverse
28
56
  #./ci_run.sh
29
57
 
30
- pwd
31
- ls -l ../
58
+ #pwd
59
+ #ls -l ../
32
60
 
33
61
  source ~/.rvm/scripts/rvm
34
- rvm use 1.9.3
62
+ rvm use $RUBY
35
63
  script/runner
metadata CHANGED
@@ -1,15 +1,15 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: newrelic_rpm
3
3
  version: !ruby/object:Gem::Version
4
- hash: 1309669669
4
+ hash: 62196359
5
5
  prerelease: 6
6
6
  segments:
7
7
  - 3
8
8
  - 4
9
9
  - 0
10
10
  - beta
11
- - 1
12
- version: 3.4.0.beta1
11
+ - 2
12
+ version: 3.4.0.beta2
13
13
  platform: ruby
14
14
  authors:
15
15
  - Bill Kayser
@@ -20,7 +20,7 @@ autorequire:
20
20
  bindir: bin
21
21
  cert_chain: []
22
22
 
23
- date: 2012-05-25 00:00:00 Z
23
+ date: 2012-06-07 00:00:00 Z
24
24
  dependencies:
25
25
  - !ruby/object:Gem::Dependency
26
26
  name: jeweler
@@ -74,9 +74,9 @@ description: |
74
74
 
75
75
  email: support@newrelic.com
76
76
  executables:
77
+ - newrelic_cmd
77
78
  - newrelic
78
79
  - mongrel_rpm
79
- - newrelic_cmd
80
80
  extensions: []
81
81
 
82
82
  extra_rdoc_files:
@@ -170,7 +170,6 @@ files:
170
170
  - lib/new_relic/control/logging_methods.rb
171
171
  - lib/new_relic/control/profiling.rb
172
172
  - lib/new_relic/control/server_methods.rb
173
- - lib/new_relic/data_serialization.rb
174
173
  - lib/new_relic/delayed_job_injection.rb
175
174
  - lib/new_relic/helper.rb
176
175
  - lib/new_relic/language_support.rb
@@ -254,7 +253,6 @@ files:
254
253
  - test/new_relic/control/configuration_test.rb
255
254
  - test/new_relic/control/logging_methods_test.rb
256
255
  - test/new_relic/control_test.rb
257
- - test/new_relic/data_serialization_test.rb
258
256
  - test/new_relic/delayed_job_injection_test.rb
259
257
  - test/new_relic/fake_collector.rb
260
258
  - test/new_relic/fake_service.rb
@@ -421,3 +419,4 @@ specification_version: 3
421
419
  summary: New Relic Ruby Agent
422
420
  test_files: []
423
421
 
422
+ has_rdoc:
@@ -1,151 +0,0 @@
1
- require 'fileutils'
2
- require 'new_relic/language_support'
3
-
4
- module NewRelic
5
- # Handles serialization of data to disk, to save on contacting the
6
- # server. Lowers both server and client overhead, if the disk is not overloaded
7
- class DataSerialization
8
- include NewRelic::LanguageSupport::DataSerialization
9
-
10
- module ClassMethods
11
- # Check whether the store is too large, too old, or the
12
- # pid file is too old. If so, we should send the data
13
- # right away. If not, we presumably store it for later sending
14
- # (handled elsewhere)
15
- def should_send_data?
16
- NewRelic::Control.instance.disable_serialization? || store_too_large? ||
17
- store_too_old? || pid_too_old? ||
18
- NewRelic::LanguageSupport.using_version?('1.8.6')
19
- rescue => e
20
- NewRelic::Control.instance.disable_serialization = true
21
- NewRelic::Control.instance.log.warn("Disabling serialization: #{e.message}")
22
- true
23
- end
24
-
25
- # A combined locked read/write from the store file - reduces
26
- # contention by not acquiring the lock and file handle twice
27
- def read_and_write_to_file
28
- with_locked_store do |f|
29
- result = (yield get_data_from_file(f))
30
- f.rewind
31
- f.truncate(0)
32
- write_contents_nonblockingly(f, dump(result)) if result
33
- end
34
- rescue Errno::ENOENT => e
35
- NewRelic::Control.instance.log.warn(e.message)
36
- end
37
-
38
- # touches the age file that determines whether we should send
39
- # data now or not
40
- def update_last_sent!
41
- FileUtils.touch(pid_file_path)
42
- end
43
-
44
- def pid_too_old?
45
- return true unless File.exists?(pid_file_path)
46
- age = (Time.now.to_i - File.mtime(pid_file_path).to_i)
47
- NewRelic::Control.instance.log.debug("Pid was #{age} seconds old, sending data") if age > 60
48
- age > 60
49
- end
50
-
51
- def store_too_old?
52
- return true unless File.exists?(file_path)
53
- age = (Time.now.to_i - File.mtime(file_path).to_i)
54
- NewRelic::Control.instance.log.debug("Store was #{age} seconds old, sending data") if age > 60
55
- age > 50
56
- end
57
-
58
- def store_too_large?
59
- return true unless File.exists?(file_path)
60
- size = File.size(file_path) > max_size
61
- NewRelic::Control.instance.log.debug("Store was oversize, sending data") if size
62
- size
63
- end
64
-
65
- private
66
-
67
- def open_arguments
68
- if defined?(Encoding)
69
- [file_path, File::RDWR | File::CREAT, {:internal_encoding => nil}]
70
- else
71
- [file_path, File::RDWR | File::CREAT]
72
- end
73
- end
74
-
75
- def with_locked_store
76
- File.open(*open_arguments) do |f|
77
- f.flock(File::LOCK_EX)
78
- begin
79
- yield(f)
80
- ensure
81
- f.flock(File::LOCK_UN)
82
- end
83
- end
84
- rescue => e
85
- NewRelic::Control.instance.log.error("Error serializing data to disk: #{e.inspect}")
86
- NewRelic::Control.instance.log.debug(e.backtrace.split("\n"))
87
- # re-raise so that serialization will be disabled higher up the stack
88
- raise e
89
- end
90
-
91
- def get_data_from_file(f)
92
- data = read_until_eof_error(f)
93
- result = load(data)
94
- f.truncate(0)
95
- result
96
- end
97
-
98
- def write_contents_nonblockingly(f, string)
99
- result = 0
100
- while (result < string.length)
101
- result += f.write_nonblock(string)
102
- end
103
- rescue Errno::EAGAIN, Errno::EINTR
104
- IO.select(nil, [f])
105
- retry
106
- end
107
-
108
- def read_until_eof_error(f)
109
- accumulator = ""
110
- while(true)
111
- accumulator << f.read_nonblock(10_000)
112
- end
113
- rescue Errno::EAGAIN, Errno::EINTR
114
- IO.select([f])
115
- retry
116
- rescue EOFError
117
- accumulator
118
- end
119
-
120
- def max_size
121
- 10_000
122
- end
123
-
124
- def dump(object)
125
- Marshal.dump(object.clone)
126
- end
127
-
128
- def load(dump)
129
- if dump.respond_to?(:size) && dump.size == 0
130
- NewRelic::Control.instance.log.debug("Spool file empty.")
131
- return nil
132
- end
133
- Marshal.load(dump)
134
- rescue ArgumentError, TypeError => e
135
- NewRelic::Control.instance.log.error("Error loading data from newrelic_agent_store.db: #{e.inspect}")
136
- NewRelic::Control.instance.log.debug(e.backtrace.inspect)
137
- nil
138
- end
139
-
140
- def file_path
141
- "#{NewRelic::Control.instance.log_path}/newrelic_agent_store.db"
142
- end
143
-
144
- def pid_file_path
145
- "#{NewRelic::Control.instance.log_path}/newrelic_agent_store.pid"
146
- end
147
- end
148
- extend ClassMethods
149
- end
150
- end
151
-
@@ -1,208 +0,0 @@
1
- require File.expand_path(File.join(File.dirname(__FILE__),'..', 'test_helper'))
2
- require 'new_relic/data_serialization'
3
- class NewRelic::DataSerializationTest < Test::Unit::TestCase
4
-
5
- attr_reader :file, :path
6
-
7
- def setup
8
- NewRelic::Control.instance['log_file_path'] = './log'
9
- @path = NewRelic::Control.instance.log_path
10
- @file = "#{path}/newrelic_agent_store.db"
11
- Dir.mkdir(path) if !File.directory?(path)
12
- FileUtils.rm_rf(@file)
13
- FileUtils.rm_rf("#{@path}/newrelic_agent_store.pid")
14
- end
15
-
16
- def teardown
17
- # this gets set to true in some tests
18
- NewRelic::Control.instance['disable_serialization'] = false
19
- mocha_teardown
20
- end
21
-
22
- def test_read_and_write_from_file_read_only
23
- File.open(file, 'w') do |f|
24
- f.write(Marshal.dump('a happy string'))
25
- end
26
- NewRelic::DataSerialization.read_and_write_to_file do |data|
27
- assert_equal('a happy string', data, "should pull the dumped item from the file")
28
- nil # must explicitly return nil or the return value will be dumped
29
- end
30
- assert_equal(0, File.size(file), "Should not leave any data in the file")
31
- end
32
-
33
- def test_read_and_write_to_file_dumping_contents
34
- expected_contents = Marshal.dump('a happy string')
35
- NewRelic::DataSerialization.read_and_write_to_file do
36
- 'a happy string'
37
- end
38
- assert_equal(expected_contents, File.read(file), "should have dumped the contents")
39
- end
40
-
41
- def test_read_and_write_to_file_yields_old_data
42
- expected_contents = 'a happy string'
43
- File.open(file, 'w') do |f|
44
- f.write(Marshal.dump(expected_contents))
45
- end
46
- contents = nil
47
- NewRelic::DataSerialization.read_and_write_to_file do |old_data|
48
- contents = old_data
49
- 'a happy string'
50
- end
51
- assert_equal(contents, expected_contents, "should have dumped the contents")
52
- end
53
-
54
- def test_read_and_write_to_file_round_trip
55
- old_data = nil
56
- NewRelic::DataSerialization.read_and_write_to_file do |data|
57
- old_data = data
58
- 'a' * 30
59
- end
60
- NewRelic::DataSerialization.read_and_write_to_file do |data|
61
- assert_equal('a'*30, data, "should be the same after serialization")
62
- end
63
- end
64
-
65
- def test_should_send_data_when_over_limit
66
- NewRelic::DataSerialization.stubs(:max_size).returns(20)
67
- NewRelic::DataSerialization.read_and_write_to_file do
68
- "a" * 30
69
- end
70
- assert(NewRelic::DataSerialization.should_send_data?, 'Should be over limit')
71
- end
72
-
73
- def test_read_until_eoferror
74
- File.open(file, 'w') do |f|
75
- f.write("a" * 10_001)
76
- end
77
- value = ""
78
- File.open(file,'r') do |f|
79
- value << NewRelic::DataSerialization.instance_eval { read_until_eof_error(f) }
80
- end
81
- assert_equal('a' * 10_001, value, "should retrieve all the contents from the string and not raise EOFerrors")
82
- end
83
-
84
- def test_write_contents_nonblockingly
85
- File.open(file, 'w') do |f|
86
- f.write("") # write nothing! NOTHING
87
- end
88
-
89
- File.open(file, 'w') do |f|
90
- NewRelic::DataSerialization.instance_eval { write_contents_nonblockingly(f, 'a' * 10_001) }
91
- end
92
- value = File.read(file)
93
- assert_equal('a' * 10_001, value, "should write a couple thousand 'a's to a file without exploding")
94
- end
95
-
96
- def test_should_send_data_disabled
97
- NewRelic::Control.instance.disable_serialization = true
98
- assert(NewRelic::DataSerialization.should_send_data?,
99
- 'should send data when disabled')
100
- end
101
-
102
- def test_should_send_data_under_limit
103
- NewRelic::DataSerialization.expects(:max_size).returns(2000)
104
- NewRelic::DataSerialization.read_and_write_to_file do |old_data|
105
- "a" * 5
106
- end
107
-
108
- assert(!NewRelic::DataSerialization.store_too_large?,
109
- 'Should be under the limit')
110
- end
111
-
112
- def test_should_handle_empty_spool_file
113
- NewRelic::Control.instance.log.expects(:error).never
114
- assert_nil NewRelic::DataSerialization.instance_eval { load('') }
115
- end
116
-
117
- def test_spool_file_location_respects_log_file_path_setting
118
- NewRelic::Control.instance.expects(:log_path).returns('./tmp')
119
- Dir.mkdir('./tmp') if !File.directory?('./tmp')
120
- NewRelic::DataSerialization.read_and_write_to_file do |_|
121
- 'a' * 30
122
- end
123
- assert(File.exists?('./tmp/newrelic_agent_store.db'),
124
- "Spool file not created at user specified location")
125
- end
126
-
127
- def test_age_file_location_respects_log_file_path_setting
128
- NewRelic::Control.instance.expects(:log_path).returns('./tmp')
129
- Dir.mkdir('./tmp') if !File.directory?('./tmp')
130
- NewRelic::DataSerialization.update_last_sent!
131
- assert(File.exists?('./tmp/newrelic_agent_store.pid'),
132
- "Age file not created at user specified location")
133
- end
134
-
135
- def test_pid_age_creates_pid_file_if_none_exists
136
- assert(!File.exists?("#{@path}/newrelic_agent_store.pid"),
137
- 'pid file found, should not be there')
138
- NewRelic::DataSerialization.update_last_sent!
139
- assert(File.exists?("#{@path}/newrelic_agent_store.pid"),
140
- 'pid file not found, should be there')
141
- end
142
-
143
- def test_should_not_create_files_if_serialization_disabled
144
- NewRelic::Control.instance['disable_serialization'] = true
145
- NewRelic::DataSerialization.should_send_data?
146
- assert(!File.exists?("#{@path}/newrelic_agent_store.db"),
147
- 'db file created when serialization disabled')
148
- assert(!File.exists?("#{@path}/newrelic_agent_store.pid"),
149
- 'pid file created when serialization disabled')
150
- end
151
-
152
- def test_loading_does_not_seg_fault_if_gc_triggers
153
- return if NewRelic::LanguageSupport.using_version?('1.8.6')
154
- require 'timeout'
155
-
156
- Thread.abort_on_exception = true
157
- rcv,snd = IO.pipe
158
-
159
- write = Thread.new do
160
- obj = ('a'..'z').inject({}){|h,s|h[s.intern]=s*1024;h}
161
- data = Marshal.dump(obj)
162
- snd.write(data[0,data.size/2])
163
- sleep(0.1)
164
- snd.write(data[(data.size/2)..-1])
165
- snd.close
166
- end
167
-
168
- read = Thread.new do
169
- lock = Mutex.new
170
- lock.synchronize do
171
- NewRelic::DataSerialization.class_eval { load(rcv) }
172
- end
173
- end
174
-
175
- gc = Thread.new do
176
- 10.times do
177
- GC.start
178
- end
179
- end
180
-
181
- Timeout::timeout(5) do
182
- write.join
183
- read.join
184
- gc.join
185
- end
186
- # should not seg fault
187
- end
188
-
189
- def test_dump_should_be_thread_safe
190
- stats_hash = {}
191
-
192
- 2000.times do |i|
193
- stats_hash[i.to_s] = NewRelic::StatsBase.new
194
- end
195
-
196
- harvest = Thread.new do
197
- NewRelic::DataSerialization.class_eval { dump(stats_hash) }
198
- end
199
-
200
- app = Thread.new do
201
- stats_hash["a"] = NewRelic::StatsBase.new
202
- end
203
-
204
- assert_nothing_raised do
205
- [app, harvest].each{|t| t.join}
206
- end
207
- end
208
- end