scout_rails 1.1.5.pre → 1.1.5.pre3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,8 @@
1
+ # 1.1.5
2
+
3
+ * sending array of transaction samples
4
+ * sending transaction samples across processes
5
+
1
6
  # 1.1.3
2
7
 
3
8
  * Rails 4 support
@@ -7,8 +7,10 @@ module ScoutRails
7
7
  def process_metrics
8
8
  logger.debug "Processing metrics"
9
9
  run_samplers
10
- metrics = layaway.deposit_and_deliver
11
- if metrics.any?
10
+ payload = layaway.deposit_and_deliver
11
+ metrics = payload[:metrics]
12
+ samples = payload[:samples]
13
+ if payload.any?
12
14
  add_metric_ids(metrics)
13
15
  logger.warn "Some data may be lost - metric size is at limit" if metrics.size == ScoutRails::Store::MAX_SIZE
14
16
  # for debugging, count the total number of requests
@@ -18,7 +20,7 @@ module ScoutRails
18
20
  controller_count += stats.call_count
19
21
  end
20
22
  end
21
- sample, samples = store.fetch_and_reset_samples!
23
+ sample = store.fetch_and_reset_sample!
22
24
  payload = Marshal.dump(:metrics => metrics, :sample => sample, :samples => samples)
23
25
  STATSD.timing('scout_rails.payload_kb',payload.size/1024)
24
26
  STATSD.timing('scout_rails.samples',samples.size)
@@ -2,8 +2,8 @@
2
2
  # 1. A centralized store for multiple Agent processes. This way, only 1 checkin is sent to Scout rather than 1 per-process.
3
3
  # 2. Bundling up reports from multiple timeslices to make updates more efficent server-side.
4
4
  #
5
- # Metrics are stored in a Hash, where the keys are Time.to_i on the minute. When depositing data,
6
- # metrics are either merged with an existing time or placed in a new key.
5
+ # Data is stored in a Hash, where the keys are Time.to_i on the minute. The value is a Hash {:metrics => Hash, :samples => Array}.
6
+ # When depositing data, the new data is either merged with an existing time or placed in a new key.
7
7
  class ScoutRails::Layaway
8
8
  attr_accessor :file
9
9
  def initialize
@@ -11,24 +11,35 @@ class ScoutRails::Layaway
11
11
  end
12
12
 
13
13
  def deposit_and_deliver
14
- new_data = ScoutRails::Agent.instance.store.metric_hash
15
- log_deposited_requests(new_data)
14
+ new_metrics = ScoutRails::Agent.instance.store.metric_hash
15
+ log_deposited_metrics(new_metrics)
16
+ log_deposited_samples(ScoutRails::Agent.instance.store.samples)
16
17
  to_deliver = {}
17
18
  file.read_and_write do |old_data|
18
19
  old_data ||= Hash.new
19
20
  # merge data
20
21
  # if (1) there's data in the file and (2) there isn't any data yet for the current minute, this means we've
21
22
  # collected all metrics for the previous slots and we're ready to deliver.
23
+ #
24
+ # Example w/2 processes:
25
+ #
26
+ # 12:00:34 ---
27
+ # Process 1: old_data.any? => false, so deposits.
28
+ # Process 2: old_data_any? => true and old_data[12:00].nil? => false, so deposits.
29
+ #
30
+ # 12:01:34 ---
31
+ # Process 1: old_data.any? => true and old_data[12:01].nil? => true, so delivers metrics.
32
+ # Process 2: old_data.any? => true and old_data[12:01].nil? => false, so deposits.
22
33
  if old_data.any? and old_data[slot].nil?
23
34
  to_deliver = old_data
24
35
  old_data = Hash.new
25
36
  elsif old_data.any?
26
- ScoutRails::Agent.instance.logger.debug "Not yet time to deliver metrics for slot [#{Time.at(old_data.keys.sort.last).strftime("%m/%d/%y %H:%M:%S %z")}]"
37
+ ScoutRails::Agent.instance.logger.debug "Not yet time to deliver payload for slot [#{Time.at(old_data.keys.sort.last).strftime("%m/%d/%y %H:%M:%S %z")}]"
27
38
  else
28
39
  ScoutRails::Agent.instance.logger.debug "There is no data in the layaway file to deliver."
29
40
  end
30
- old_data[slot]=ScoutRails::Agent.instance.store.merge_data_and_clear(old_data[slot] || Hash.new)
31
- log_saved_requests(old_data,new_data)
41
+ old_data[slot]=ScoutRails::Agent.instance.store.merge_data_and_clear(old_data[slot] || {:metrics => {}, :samples => []})
42
+ log_saved_data(old_data,new_metrics)
32
43
  old_data
33
44
  end
34
45
  to_deliver.any? ? validate_data(to_deliver) : {}
@@ -53,32 +64,37 @@ class ScoutRails::Layaway
53
64
  ScoutRails::Agent.instance.logger.debug $!.backtrace
54
65
  end
55
66
 
67
+ # Data is stored under timestamp-keys (without the second).
56
68
  def slot
57
69
  t = Time.now
58
70
  t -= t.sec
59
71
  t.to_i
60
72
  end
61
73
 
62
- def log_deposited_requests(new_data)
74
+ def log_deposited_metrics(new_metrics)
63
75
  controller_count = 0
64
- new_data.each do |meta,stats|
76
+ new_metrics.each do |meta,stats|
65
77
  if meta.metric_name =~ /\AController/
66
78
  controller_count += stats.call_count
67
79
  end
68
80
  end
69
81
  ScoutRails::Agent.instance.logger.debug "Depositing #{controller_count} requests into #{Time.at(slot).strftime("%m/%d/%y %H:%M:%S %z")} slot."
70
82
  end
83
+
84
+ def log_deposited_samples(new_samples)
85
+ ScoutRails::Agent.instance.logger.debug "Depositing #{new_samples.size} samples into #{Time.at(slot).strftime("%m/%d/%y %H:%M:%S %z")} slot."
86
+ end
71
87
 
72
- def log_saved_requests(old_data,new_data)
88
+ def log_saved_data(old_data,new_metrics)
73
89
  ScoutRails::Agent.instance.logger.debug "Saving the following #{old_data.size} time slots locally:"
74
90
  old_data.each do |k,v|
75
91
  controller_count = 0
76
- new_data.each do |meta,stats|
92
+ new_metrics.each do |meta,stats|
77
93
  if meta.metric_name =~ /\AController/
78
94
  controller_count += stats.call_count
79
95
  end
80
96
  end
81
- ScoutRails::Agent.instance.logger.debug "#{Time.at(k).strftime("%m/%d/%y %H:%M:%S %z")} => #{controller_count} requests"
97
+ ScoutRails::Agent.instance.logger.debug "#{Time.at(k).strftime("%m/%d/%y %H:%M:%S %z")} => #{controller_count} requests and #{v[:samples].size} samples"
82
98
  end
83
99
  end
84
100
  end
@@ -85,7 +85,7 @@ class ScoutRails::Store
85
85
  duplicate.each_pair do |k,v|
86
86
  duplicate[k.dup] = v.dup
87
87
  end
88
- merge_data(duplicate.merge({meta.dup => stat.dup})) # aggregrates + controller
88
+ merge_metrics(duplicate.merge({meta.dup => stat.dup})) # aggregrates + controller
89
89
  end
90
90
  end
91
91
 
@@ -137,19 +137,18 @@ class ScoutRails::Store
137
137
  # tree map of all slow transactions
138
138
  if parent_stat.total_call_time >= 2
139
139
  @samples.push(ScoutRails::TransactionSample.new(uri,parent_meta.metric_name,parent_stat.total_call_time,transaction_hash.dup))
140
+ ScoutRails::Agent.instance.logger.debug "Slow transaction sample added. Array Size: #{@samples.size}"
140
141
  end
141
142
  end
142
143
  end
143
144
 
144
- # Returns the slow samples and resets the values - used when reporting.
145
- def fetch_and_reset_samples!
145
+ # Returns the slow sample and resets the values - used when reporting.
146
+ def fetch_and_reset_sample!
146
147
  sample = @sample
147
- samples = @samples
148
148
  @transaction_sample_lock.synchronize do
149
149
  self.sample = nil
150
- self.samples = Array.new
151
150
  end
152
- [sample,samples]
151
+ sample
153
152
  end
154
153
 
155
154
  # Finds or creates the metric w/the given name in the metric_hash, and updates the time. Primarily used to
@@ -168,14 +167,7 @@ class ScoutRails::Store
168
167
 
169
168
  # Combines old and current data
170
169
  def merge_data(old_data)
171
- old_data.each do |old_meta,old_stats|
172
- if stats = metric_hash[old_meta]
173
- metric_hash[old_meta] = stats.combine!(old_stats)
174
- elsif metric_hash.size < MAX_SIZE
175
- metric_hash[old_meta] = old_stats
176
- end
177
- end
178
- metric_hash
170
+ {:metrics => merge_metrics(old_data[:metrics]), :samples => merge_samples(old_data[:samples])}
179
171
  end
180
172
 
181
173
  # Merges old and current data, clears the current in-memory metric hash, and returns
@@ -183,6 +175,25 @@ class ScoutRails::Store
183
175
  def merge_data_and_clear(old_data)
184
176
  merged = merge_data(old_data)
185
177
  self.metric_hash = {}
178
+ # TODO - is this lock needed?
179
+ @transaction_sample_lock.synchronize do
180
+ self.samples = []
181
+ end
186
182
  merged
187
183
  end
184
+
185
+ def merge_metrics(old_metrics)
186
+ old_metrics.each do |old_meta,old_stats|
187
+ if stats = metric_hash[old_meta]
188
+ metric_hash[old_meta] = stats.combine!(old_stats)
189
+ elsif metric_hash.size < MAX_SIZE
190
+ metric_hash[old_meta] = old_stats
191
+ end
192
+ end
193
+ metric_hash
194
+ end
195
+
196
+ def merge_samples(old_samples)
197
+ self.samples += old_samples
198
+ end
188
199
  end # class Store
@@ -1,3 +1,3 @@
1
1
  module ScoutRails
2
- VERSION = "1.1.5.pre"
2
+ VERSION = "1.1.5.pre3"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: scout_rails
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.5.pre
4
+ version: 1.1.5.pre3
5
5
  prerelease: 6
6
6
  platform: ruby
7
7
  authors: