request_response_stats 0.3.0 → 0.4.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 53fa4042d8fbb53e6cce2300c4b4da6005f63a62
4
- data.tar.gz: 1719a05129a6c9e664961f9318af5639a6305fd1
3
+ metadata.gz: 6a32b1fbcf4ef6f4b8c97147dc7fa0a8acf08aaa
4
+ data.tar.gz: d2c29cbd26516dd5200a897d8d0e6669cb289b96
5
5
  SHA512:
6
- metadata.gz: 4b71eb4ad1cdb2a5714e906e21fd3cc94ac644cc1091d26dd738ae049c76943af819d828a069d65ef747f993cf7c1eb7bc18a4e7c1be34e0da8bf2bfb9bdc7dd
7
- data.tar.gz: b6b5b5ff0dd49d1e8e4311b5d5910a03882a5c7a78fbc51849eaa77cbc3c7ef825bef753860478cbc71999bd1261da02873b13ffc022a4df54167f882b936296
6
+ metadata.gz: a0c8c978edb8ee2c48fbe8e1fe42d798488faea0ee17fc9623dbc278191994fe04eb951d6cb8323b69c6a2cbfa9d4a458d9de7b320242285bc0deed9ba675559
7
+ data.tar.gz: 06505e148f4c4d8b68217dcf2e8d892c71f66574030a51439e93fdd6adcfe53c5f8674ad27369a3bb1aeec44146d903c88c2b395cbe5fe4469ca8c1fe0537b97
data/.gitignore CHANGED
@@ -13,4 +13,6 @@
13
13
 
14
14
  **/.DS_Store
15
15
  **/*.swp
16
+ **/*.swo
16
17
  .byebug_history
18
+ tags
@@ -3,3 +3,9 @@ language: ruby
3
3
  rvm:
4
4
  - 2.4.1
5
5
  before_install: gem install bundler -v 1.15.4
6
+ services:
7
+ - redis-server
8
+ - mongodb
9
+ before_script:
10
+ - sleep 15
11
+ - mongo mydb_test --eval 'db.createUser({user:"travis",pwd:"test",roles:["readWrite"]});'
data/Gemfile CHANGED
@@ -4,3 +4,5 @@ git_source(:github) {|repo_name| "https://github.com/#{repo_name}" }
4
4
 
5
5
  # Specify your gem's dependencies in request_response_stats.gemspec
6
6
  gemspec
7
+
8
+ gem 'codecov', :require => false, :group => :test
data/README.md CHANGED
@@ -20,6 +20,12 @@
20
20
 
21
21
  # RequestResponseStats
22
22
 
23
+ [![Gem Version](https://badge.fury.io/rb/request_response_stats.svg)](https://badge.fury.io/rb/request_response_stats)
24
+ [![Build Status](https://travis-ci.org/goyalmunish/request_response_stats.svg?branch=master)](https://travis-ci.org/goyalmunish/request_response_stats)
25
+ [![codecov](https://codecov.io/gh/goyalmunish/request_response_stats/branch/master/graph/badge.svg)](https://codecov.io/gh/goyalmunish/request_response_stats)
26
+ [![Maintainability](https://api.codeclimate.com/v1/badges/0c231c47679470213426/maintainability)](https://codeclimate.com/github/goyalmunish/request_response_stats/maintainability)
27
+ [![Inline docs](http://inch-ci.org/github/goyalmunish/request_response_stats.svg?branch=master)](http://inch-ci.org/github/goyalmunish/request_response_stats)
28
+
23
29
  ## Prerequisites
24
30
 
25
31
  The gem uses [Redis](https://github.com/redis/redis-rb) as a temporary storage to store the captured stats data. For permanent storage of this data, [MongoDB](https://github.com/mongodb/mongoid) is being used.
@@ -67,9 +73,9 @@ Configure `config/initializers/request_response_stat_config.rb` as per your requ
67
73
 
68
74
  ### Documentation References
69
75
 
70
- Refer: http://www.rubydoc.info/gems/request_response_stats/
76
+ Reference to Ruby docs: http://www.rubydoc.info/gems/request_response_stats/
71
77
 
72
- But, you can get better documentation by running tests :wink:.
78
+ But, you can get better documentation by running tests :wink:. Refer: https://travis-ci.org/goyalmunish/request_response_stats.
73
79
 
74
80
  ### Checking current data in redis
75
81
 
@@ -1,35 +1,36 @@
1
1
  # File: app/controllers/req_res_stat_controller.rb
2
2
 
3
- # TODO: This controller is yet to be tested
3
+ # TODO: This controller is not fully tested yet
4
4
  class ReqResStatController < ApplicationController
5
+ # Note: Refer https://www.chartkick.com/ and gems: chartkick for easily creating charts using Ruby
5
6
 
6
7
  # params[:start_time] format: "2009-06-24 12:39:54 +09:00"
7
8
  # params[:end_time] format: "2009-06-24 12:39:54 +09:00"
8
9
  # params[:stat_key] popular choices: "request_count", "error_count", "min_time", "max_time", "avg_time"
9
10
  def get_stats
10
- stat_key = params[:stat_key].to_sym
11
- start_time = parse_date_time_zone params[:start_time]
12
- end_time = parse_date_time_zone params[:end_time]
13
- granularity_in_hours = params[:granularity_in_hours].to_i.hours if params[:granularity_in_hours].present?
11
+ # query conditions
12
+ start_time = params[:start_time].present? ? parase_data_time_zone(params[:start_time]) : Time.now - 7.days
13
+ end_time = params[:end_time].present? ? parase_data_time_zone(params[:end_time]) : Time.now
14
+ granularity_in_hours = params[:granularity_in_hours].present? ? params[:granularity_in_hours].to_i.hours : 1.hour
14
15
 
15
- min_values = ReqResStat.get_min(stat_key, start_time, end_time, granularity_in_hours)
16
- max_values = ReqResStat.get_max(stat_key, start_time, end_time, granularity_in_hours)
17
- avg_values = ReqResStat.get_avg(stat_key, start_time, end_time, granularity_in_hours)
16
+ # firing the base query only once
17
+ base_req_res_stats = ReqResStat.get_within(start_time, end_time)
18
18
 
19
- return_value = {
20
- start_time: start_time,
21
- end_time: end_time,
22
- granularity_in_hours: granularity_in_hours,
23
- min_values: min_values,
24
- max_values: max_values,
25
- avg_values: avg_values,
26
- }
27
-
28
- render json: return_value
19
+ # gathering data
20
+ @request_count_stats = fetch_stats_for(:request_count, start_time, end_time, granularity_in_hours, base_req_res_stats)
21
+ @max_time_stats = fetch_stats_for(:max_time, start_time, end_time, granularity_in_hours, base_req_res_stats)
22
+ @avg_time_stats = fetch_stats_for(:avg_time, start_time, end_time, granularity_in_hours, base_req_res_stats)
23
+ @min_time_stats = fetch_stats_for(:min_time, start_time, end_time, granularity_in_hours, base_req_res_stats)
29
24
 
30
- rescue Exception => ex
31
- error_message = [ex.message, ex.backtrace.join("\n")].join("\n")
32
- render json: {error_message: error_message}
25
+ render json: {
26
+ request_count_stats: @request_count_stats,
27
+ max_time_stats: @max_time_stats,
28
+ avg_time_stats: @avg_time_stats,
29
+ min_time_stats: @min_time_stats
30
+ }
31
+ rescue Exception => ex
32
+ error_message = [ex.message, ex.backtrace.join("\n")].join("\n")
33
+ render json: {error_message: error_message}
33
34
  end
34
35
 
35
36
  def get_details
@@ -45,14 +46,62 @@ class ReqResStatController < ApplicationController
45
46
  }
46
47
 
47
48
  render json: return_value
48
-
49
- rescue Exception => ex
50
- error_message = [ex.message, ex.backtrace.join("\n")].join("\n")
51
- render json: {error_message: error_message}
49
+ rescue Exception => ex
50
+ error_message = [ex.message, ex.backtrace.join("\n")].join("\n")
51
+ render json: {error_message: error_message}
52
52
  end
53
53
 
54
54
  private
55
55
 
56
+ # if `base_records` are passed, then the base_records won't be created using `start_time` and `end_time`,
57
+ # so the values for `start_time` and `end_time` are ignored if `base_records` is present
58
+ def fetch_stats_for(stat_key, start_time, end_time, granularity_in_hours, base_records=nil)
59
+ if base_records
60
+ # use the passed basic dataset
61
+ base_req_res_stats = base_records
62
+ else
63
+ # create the basic dataset
64
+ base_req_res_stats = ReqResStat.get_within(start_time, end_time)
65
+ end
66
+
67
+ min_values = line_chart_data base_req_res_stats.get_min(stat_key, start_time, end_time, granularity_in_hours)
68
+ max_values = line_chart_data base_req_res_stats.get_max(stat_key, start_time, end_time, granularity_in_hours)
69
+ avg_values = line_chart_data base_req_res_stats.get_avg(stat_key, start_time, end_time, granularity_in_hours)
70
+
71
+ stats_data = [
72
+ {
73
+ name: "Min. Values",
74
+ data: min_values
75
+ },
76
+ {
77
+ name: "Max. Values",
78
+ data: max_values
79
+ },
80
+ {
81
+ name: "Avg. Values",
82
+ data: avg_values
83
+ }
84
+ ]
85
+
86
+ data = {
87
+ start_time: start_time,
88
+ end_time: end_time,
89
+ granularity_in_hours: granularity_in_hours,
90
+ stats_data: stats_data
91
+ }
92
+
93
+ data
94
+ end
95
+
96
+ def line_chart_data(values)
97
+ values_data = {}
98
+ values.each do |elem|
99
+ values_data[elem[:start_time]] = elem[:data] || 0
100
+ end
101
+
102
+ values_data
103
+ end
104
+
56
105
  # time format: "2009-06-24 12:39:54 +09:00"
57
106
  def parse_date_time_zone(date_time_zone)
58
107
  date, time, zone = date_time_zone.split(" ")
@@ -17,7 +17,11 @@ module RequestResponseStats
17
17
  begin
18
18
  rrs = RequestResponse.new(request, response, {redis_connection: $redis, gather_stats: true, mongoid_doc_model: ReqResStat})
19
19
  rrs.capture_request_response_cycle_start_info
20
- yield
20
+ if block_given?
21
+ yield
22
+ else
23
+ raise StandardError, "No block received. Investigate!"
24
+ end
21
25
  rrs.capture_request_response_cycle_end_info
22
26
  rescue Exception => ex
23
27
  rrs.try(:capture_request_response_cycle_error_info)
@@ -45,27 +45,33 @@ module RequestResponseStats
45
45
  }.stringify_keys
46
46
 
47
47
  class << self
48
- # it returns the redis connection
48
+
49
+ # returns the redis connection
49
50
  # this method must be redefined for `RedisRecord` to be useable
50
51
  def redis
51
52
  raise StandardError, "UNDEFINED #{__method__}"
52
53
  end
53
54
 
54
55
  # get value from redis
56
+ # wrapper from redis' `get` method
55
57
  def get(key)
56
58
  redis.get(key)
57
59
  end
58
60
 
59
61
  # set value to redis
62
+ # wrapper from redis' `set` method
60
63
  def set(key, value, options={})
61
64
  redis.set(key, value, options)
62
65
  end
63
66
 
64
67
  # delete value from redis
68
+ # wrapper from redis' `del` method
65
69
  def del(key)
66
70
  redis.del(key)
67
71
  end
68
72
 
73
+ # returns all request_response_stats relevant redis keys
74
+ # by default only PUBLIC keys are returned
69
75
  def all_keys(opts={})
70
76
  support = opts[:support] || false
71
77
  if support
@@ -91,6 +97,7 @@ module RequestResponseStats
91
97
  data
92
98
  end
93
99
 
100
+ # returns collection of all relevant PUBLIC request_response_stats data from redis
94
101
  def hashify_all_data(opts={})
95
102
  support = opts[:support] || false
96
103
  req_res_stat = ActiveSupport::HashWithIndifferentAccess.new
@@ -101,10 +108,12 @@ module RequestResponseStats
101
108
  req_res_stat
102
109
  end
103
110
 
111
+ # flushes all request_response_stats data from redis
104
112
  def flush_all_keys
105
113
  redis.del(*all_keys(support: true)) if all_keys.present?
106
114
  end
107
115
 
116
+ # it has to be overridden
108
117
  def group_stats_by_time_duration
109
118
  raise StandardError, "UNDEFINED #{__method__}"
110
119
  end
@@ -148,6 +157,9 @@ module RequestResponseStats
148
157
  alerts_data
149
158
  end
150
159
 
160
+ # returns all PUBLIC request_response_stats related freezed keys from redis
161
+ # freezed key: redis key which will no longer be updated
162
+ # only freezed keys are eligible to be moved to mongo
151
163
  def freezed_keys
152
164
  all_keys.map{|k| self.new(k)}.select{|k| k.is_key_freezed?}.map{|rr| rr.key}
153
165
  end
@@ -5,10 +5,10 @@ require 'mongoid'
5
5
 
6
6
  class RequestResponseStats::ReqResStat
7
7
  include Mongoid::Document
8
- # include Mongoid::Timestamps
9
8
 
10
9
  store_in collection: "statsReqRes"
11
10
 
11
+ # defining fields
12
12
  field :key_name, type: String
13
13
  field :server_name, type: String
14
14
  field :api_name, type: String
@@ -32,9 +32,21 @@ class RequestResponseStats::ReqResStat
32
32
  field :min_gc_stat_diff, type: Hash
33
33
  field :max_gc_stat_diff, type: Hash
34
34
 
35
+ # defining indexes
36
+ index({key_name: 1}, {unique: true, background: true}) # note that this is unique index
37
+ index({start_time: -1, end_time: -1}, {unique: false, background: true})
38
+ index({api_name: 1}, {unique: false, background: true})
39
+ index({server_name: 1}, {unique: false, background: true})
40
+ index({request_count: 1}, {unique: false, background: true})
41
+ index({error_count: 1}, {unique: false, background: true})
42
+ index({max_time: 1}, {unique: false, background: true})
43
+ index({avg_time: 1}, {unique: false, background: true})
44
+ index({min_time: 1}, {unique: false, background: true})
45
+
35
46
  DEFAULT_STATS_GRANULARITY = 1.hour
36
47
  PERCISION = 2
37
48
 
49
+ # returns a string identifying server_name, api_name, and api_verb
38
50
  def server_plus_api
39
51
  [server_name, api_name, api_verb].join("_")
40
52
  end
@@ -43,26 +55,36 @@ class RequestResponseStats::ReqResStat
43
55
  # Note:
44
56
  # `start_time` and `end_time` are Time objects
45
57
  # `start_time` in inclusive but `end_time` is not
58
+ # Use `get_within` with `nil` values for `start_time` and `end_time` to minimize database hits
59
+ # for same kind of queries on same date-range of data
46
60
  def get_within(start_time, end_time)
47
- where(:start_time.gte => start_time, :end_time.lt => end_time)
61
+ if start_time || end_time
62
+ where(:start_time.gte => start_time, :end_time.lt => end_time)
63
+ else
64
+ all
65
+ end
48
66
  end
49
67
 
50
68
  # wrapper around `get_stat` for :sum stat
69
+ # for more info, check documentation for `get_stat`
51
70
  def get_sum(key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
52
71
  get_stat("sum", key, start_time, end_time, granularity)
53
72
  end
54
73
 
55
74
  # wrapper around `get_stat` for :min stat
75
+ # for more info, check documentation for `get_stat`
56
76
  def get_min(key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
57
77
  get_stat("min", key, start_time, end_time, granularity)
58
78
  end
59
79
 
60
80
  # wrapper around `get_stat` for :max stat
81
+ # for more info, check documentation for `get_stat`
61
82
  def get_max(key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
62
83
  get_stat("max", key, start_time, end_time, granularity)
63
84
  end
64
85
 
65
86
  # wrapper around `get_stat` for :avg stat
87
+ # for more info, check documentation for `get_stat`
66
88
  def get_avg(key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
67
89
  data = get_stat("sum", key, start_time, end_time, granularity)
68
90
  data.each do |e|
@@ -77,8 +99,13 @@ class RequestResponseStats::ReqResStat
77
99
  data
78
100
  end
79
101
 
102
+ # instead of aggregated values (such as in `get_stat`), it returns grouped values for given key, given granularity level,
103
+ # and given start_time and end_time
80
104
  # set `stat_type` as `nil` to return grouped but uncompacted data
81
105
  # otherwise, you can set `stat_type` as :sum, :max, :min, :avg to get grouped data
106
+ # TODO: Ignore `start_time` and `end_time` if a time-based collection is passed
107
+ # TODO: Optimize `get_time_ranges` to not to calculate time_ranges again and again for same `start_time` and `end_time` (that is,
108
+ # for same time-based collection
82
109
  def get_details(key, start_time, end_time, stat_type = nil, granularity = DEFAULT_STATS_GRANULARITY)
83
110
  # get ungrouped data
84
111
  stat_type = stat_type.to_s.to_sym if stat_type
@@ -149,6 +176,7 @@ class RequestResponseStats::ReqResStat
149
176
  time_ranges
150
177
  end
151
178
 
179
+ # it returns aggreated values for given key for given granularity in between given start_time and end_time
152
180
  # stat: ["sum", "min", "max"]
153
181
  # Note that [].sum is 0, whereas, [].min and [].max is nil
154
182
  def get_stat(stat_type, key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
@@ -37,6 +37,7 @@ module RequestResponseStats
37
37
  end
38
38
 
39
39
  # captures request info that will be used at the end of request-response cycle
40
+ # note that the captured infomation is saved only temporarily
40
41
  def capture_request_response_cycle_start_info
41
42
  return gather_stats unless gather_stats
42
43
 
@@ -156,28 +157,50 @@ module RequestResponseStats
156
157
  end
157
158
 
158
159
  # captures error info
160
+ # it is called if an exception is raised, and it in turns calls capture_request_response_cycle_end_info with capture_error: true
159
161
  def capture_request_response_cycle_error_info
160
162
  capture_request_response_cycle_end_info(capture_error: true)
161
163
  end
162
164
 
163
165
  # moves data from redis to mongo
164
- def move_data_from_redis_to_mongo
165
- moved_keys = redis_record.freezed_keys.select do |redis_key|
166
- value = redis_record.formatted_parsed_get_for_mongo(redis_key)
167
- mongo_doc = mongoid_doc_model.create(value)
168
- redis_record.del redis_key if mongo_doc
169
- mongo_doc
170
- end
166
+ # only freezed and PUBLIC keys are moved
167
+ def move_data_from_redis_to_mongo(at_once=true)
168
+ if at_once
169
+ # multiple records will be inserted to mongodb at once
170
+ # this is to minimize the index creation time
171
+ values = []
172
+ redis_keys = []
173
+ redis_record.freezed_keys.each do |redis_key|
174
+ values << redis_record.formatted_parsed_get_for_mongo(redis_key)
175
+ redis_keys << redis_key
176
+ end
177
+ mongoid_doc_model.create(values)
178
+ redis_record.del(*redis_keys) if redis_keys.size > 0
179
+
180
+ redis_keys.size
181
+ else
182
+ # records will be inserted to mongo one at a time
183
+ # corresponding key from redis will be deleted only after successful creation of mongodb record
184
+ moved_keys = redis_record.freezed_keys.select do |redis_key|
185
+ value = redis_record.formatted_parsed_get_for_mongo(redis_key)
186
+ mongo_doc = mongoid_doc_model.create(value)
187
+ redis_record.del redis_key if mongo_doc
188
+ mongo_doc
189
+ end
171
190
 
172
- moved_keys.size
191
+ moved_keys.size
192
+ end
173
193
  end
174
194
 
175
195
  private
176
196
 
197
+ # returns current time
177
198
  def get_system_current_time
178
199
  Time.now.to_f.round(SECONDS_PRECISION)
179
200
  end
180
201
 
202
+ # returns current system memory
203
+ # it uses `free` command to capture system memory info
181
204
  def get_system_memory_info_mb
182
205
  key_name = redis_record.support_key(get_server_hostname, [get_server_hostname, "memory"].join("_"))
183
206
  value = ActiveSupport::HashWithIndifferentAccess.new(redis_record.parsed_get key_name)
@@ -195,6 +218,7 @@ module RequestResponseStats
195
218
  return_value
196
219
  end
197
220
 
221
+ # returns the difference (new - old) in gc_stat
198
222
  def get_gc_stat_diff(old_gc_stat, new_gc_stat)
199
223
  stat_diff = {}
200
224
  gc_keys = new_gc_stat.keys.map{ |k| k.to_s.to_sym }
@@ -236,16 +260,22 @@ module RequestResponseStats
236
260
  stat
237
261
  end
238
262
 
263
+ # returns system used memory
264
+ # uses `get_system_memory_info` to get the info
239
265
  def get_system_used_memory_mb
240
266
  # (`free -ml | grep 'Mem:' | awk -F' ' '{ print $3 }'`.strip.to_i rescue 0).round(MEMORY_PRECISION)
241
267
  get_system_memory_info_mb[:used_memory]
242
268
  end
243
269
 
270
+ # returns used swap memory
271
+ # uses `get_system_memory_info` to get the info
244
272
  def get_system_used_swap_memory_mb
245
273
  # (`free -ml | grep 'Swap:' | awk -F' ' '{ print $3 }'`.strip.to_i rescue 0).round(MEMORY_PRECISION)
246
274
  get_system_memory_info_mb[:used_swap_memory]
247
275
  end
248
276
 
277
+ # returns system hostname
278
+ # uses linux `hostname` command to get the info
249
279
  def get_server_hostname
250
280
  (`hostname`).strip
251
281
  end
@@ -1,5 +1,5 @@
1
1
  # File: lib/request_response_stats/version.rb
2
2
 
3
3
  module RequestResponseStats
4
- VERSION = "0.3.0"
4
+ VERSION = "0.4.0"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: request_response_stats
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 0.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Munish Goyal
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2017-12-09 00:00:00.000000000 Z
11
+ date: 2017-12-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activesupport