request_response_stats 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +16 -0
- data/.rspec +2 -0
- data/.travis.yml +5 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Gemfile +6 -0
- data/LICENSE.txt +21 -0
- data/README.md +43 -0
- data/Rakefile +6 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/lib/req_res_stat_controller.rb +65 -0
- data/lib/request_response_stats/controller_concern.rb +35 -0
- data/lib/request_response_stats/custom_client.rb +76 -0
- data/lib/request_response_stats/dummy_request.rb +6 -0
- data/lib/request_response_stats/dummy_response.rb +15 -0
- data/lib/request_response_stats/redis_record.rb +226 -0
- data/lib/request_response_stats/req_res_stat.rb +170 -0
- data/lib/request_response_stats/request_response.rb +259 -0
- data/lib/request_response_stats/version.rb +5 -0
- data/lib/request_response_stats.rb +62 -0
- data/lib/request_response_stats_config.rb +64 -0
- data/lib/tasks/move_req_res_cycle_data_from_redis_to_mongo.rake +18 -0
- data/request_response_stats.gemspec +41 -0
- metadata +166 -0
@@ -0,0 +1,170 @@
|
|
1
|
+
# File: app/models/req_res_stat.rb
|
2
|
+
# File: lib/request_response_stats/req_res_stat.rb
|
3
|
+
|
4
|
+
require 'mongoid'
|
5
|
+
|
6
|
+
class RequestResponseStats::ReqResStat
|
7
|
+
include Mongoid::Document
|
8
|
+
# include Mongoid::Timestamps
|
9
|
+
|
10
|
+
store_in collection: "statsReqRes"
|
11
|
+
|
12
|
+
field :key_name, type: String
|
13
|
+
field :server_name, type: String
|
14
|
+
field :api_name, type: String
|
15
|
+
field :api_verb, type: String
|
16
|
+
field :api_controller, type: String
|
17
|
+
field :api_action, type: String
|
18
|
+
field :request_count, type: Integer
|
19
|
+
field :min_time, type: Float
|
20
|
+
field :max_time, type: Float
|
21
|
+
field :avg_time, type: Float
|
22
|
+
field :start_time, type: DateTime
|
23
|
+
field :end_time, type: DateTime
|
24
|
+
field :error_count, type: Integer
|
25
|
+
field :min_used_memory_MB, type: Integer
|
26
|
+
field :max_used_memory_MB, type: Integer
|
27
|
+
field :avg_used_memory_MB, type: Integer
|
28
|
+
field :min_swap_memory_MB, type: Integer
|
29
|
+
field :max_swap_memory_MB, type: Integer
|
30
|
+
field :avg_swap_memory_MB, type: Integer
|
31
|
+
field :avg_gc_stat_diff, type: Hash
|
32
|
+
field :min_gc_stat_diff, type: Hash
|
33
|
+
field :max_gc_stat_diff, type: Hash
|
34
|
+
|
35
|
+
DEFAULT_STATS_GRANULARITY = 1.hour
|
36
|
+
PERCISION = 2
|
37
|
+
|
38
|
+
def server_plus_api
|
39
|
+
[server_name, api_name, api_verb].join("_")
|
40
|
+
end
|
41
|
+
|
42
|
+
class << self
|
43
|
+
# Note:
|
44
|
+
# `start_time` and `end_time` are Time objects
|
45
|
+
# `start_time` in inclusive but `end_time` is not
|
46
|
+
def get_within(start_time, end_time)
|
47
|
+
where(:start_time.gte => start_time, :end_time.lt => end_time)
|
48
|
+
end
|
49
|
+
|
50
|
+
# wrapper around `get_stat` for :sum stat
|
51
|
+
def get_sum(key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
|
52
|
+
get_stat("sum", key, start_time, end_time, granularity)
|
53
|
+
end
|
54
|
+
|
55
|
+
# wrapper around `get_stat` for :min stat
|
56
|
+
def get_min(key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
|
57
|
+
get_stat("min", key, start_time, end_time, granularity)
|
58
|
+
end
|
59
|
+
|
60
|
+
# wrapper around `get_stat` for :max stat
|
61
|
+
def get_max(key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
|
62
|
+
get_stat("max", key, start_time, end_time, granularity)
|
63
|
+
end
|
64
|
+
|
65
|
+
# wrapper around `get_stat` for :avg stat
|
66
|
+
def get_avg(key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
|
67
|
+
data = get_stat("sum", key, start_time, end_time, granularity)
|
68
|
+
data.each do |e|
|
69
|
+
e[:stat_type] = "avg"
|
70
|
+
if e[:count] != 0
|
71
|
+
e[:data] = (e[:data] * 1.0 / e[:count]).try(:round, PERCISION)
|
72
|
+
else
|
73
|
+
e[:data] = 0
|
74
|
+
end
|
75
|
+
|
76
|
+
end
|
77
|
+
data
|
78
|
+
end
|
79
|
+
|
80
|
+
# set `stat_type` as `nil` to return grouped but uncompacted data
|
81
|
+
# otherwise, you can set `stat_type` as :sum, :max, :min, :avg to get grouped data
|
82
|
+
def get_details(key, start_time, end_time, stat_type = nil, granularity = DEFAULT_STATS_GRANULARITY)
|
83
|
+
# get ungrouped data
|
84
|
+
stat_type = stat_type.to_s.to_sym if stat_type
|
85
|
+
key = key.to_s.to_sym
|
86
|
+
relevant_records = get_within(start_time, end_time)
|
87
|
+
time_ranges = get_time_ranges(start_time, end_time, granularity)
|
88
|
+
stats = time_ranges.map do |time_range|
|
89
|
+
data_for_time_range = relevant_records.get_within(*time_range.values).map{ |r|
|
90
|
+
{server_plus_api: r.server_plus_api, data: r[key], key_name: r.key_name}
|
91
|
+
}
|
92
|
+
{data: data_for_time_range, **time_range}
|
93
|
+
end
|
94
|
+
|
95
|
+
# grouping data by :server_plus_api
|
96
|
+
stats.each do |r|
|
97
|
+
data = r[:data]
|
98
|
+
data = data.map{ |e| {server_plus_api: e[:server_plus_api], data: e[:data]} }
|
99
|
+
data = data.group_by { |e| e[:server_plus_api] }
|
100
|
+
r[:data] = data
|
101
|
+
end
|
102
|
+
|
103
|
+
# calculating grouped value based on stat_type
|
104
|
+
if stat_type
|
105
|
+
if [:sum, :min, :max].include? stat_type
|
106
|
+
|
107
|
+
# calculate grouped value
|
108
|
+
stats.each do |r|
|
109
|
+
data = r[:data]
|
110
|
+
data = data.map do |k, v|
|
111
|
+
# {server_plus_api: k, data: v.map{|e| e[:data]}}
|
112
|
+
element_data = v.map{|e| e[:data]}
|
113
|
+
{server_plus_api: k, count: element_data.size, data: element_data.compact.public_send(stat_type).try(:round, PERCISION)}
|
114
|
+
end
|
115
|
+
r[:data] = data
|
116
|
+
end
|
117
|
+
|
118
|
+
stats
|
119
|
+
elsif stat_type == :avg
|
120
|
+
data = get_details(key, start_time, end_time, stat_type = :sum, granularity)
|
121
|
+
data.each do |r|
|
122
|
+
r[:data].each do |e|
|
123
|
+
e[:data] = (e[:data] * 1.0 / e[:count]).try(:round, PERCISION)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
data
|
128
|
+
else
|
129
|
+
"This :stat_type is not supported"
|
130
|
+
end
|
131
|
+
else
|
132
|
+
stats
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
private
|
137
|
+
|
138
|
+
def get_time_ranges(start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
|
139
|
+
slots = (((end_time - start_time) / granularity).ceil) rescue 0
|
140
|
+
current_start_time = start_time
|
141
|
+
time_ranges = (1..slots).map do |slot|
|
142
|
+
value = {start_time: current_start_time, end_time: current_start_time + granularity}
|
143
|
+
current_start_time += granularity
|
144
|
+
|
145
|
+
value
|
146
|
+
end
|
147
|
+
time_ranges[-1][:end_time] = end_time if time_ranges[-1] && (time_ranges[-1][:end_time] > end_time)
|
148
|
+
|
149
|
+
time_ranges
|
150
|
+
end
|
151
|
+
|
152
|
+
# stat: ["sum", "min", "max"]
|
153
|
+
# Note that [].sum is 0, whereas, [].min and [].max is nil
|
154
|
+
def get_stat(stat_type, key, start_time, end_time, granularity = DEFAULT_STATS_GRANULARITY)
|
155
|
+
stat_type = stat_type.to_s.to_sym
|
156
|
+
key = key.to_s.to_sym
|
157
|
+
relevant_records = get_within(start_time, end_time)
|
158
|
+
time_ranges = get_time_ranges(start_time, end_time, granularity)
|
159
|
+
stats = time_ranges.map do |time_range|
|
160
|
+
time_range_data = relevant_records.get_within(*time_range.values).pluck(key)
|
161
|
+
data = time_range_data.compact.public_send(stat_type).try(:round, PERCISION)
|
162
|
+
{key: key, stat_type: stat_type, data: data, count: time_range_data.size, **time_range}
|
163
|
+
end
|
164
|
+
|
165
|
+
stats
|
166
|
+
end
|
167
|
+
|
168
|
+
end
|
169
|
+
|
170
|
+
end
|
@@ -0,0 +1,259 @@
|
|
1
|
+
# File: lib/request_response_stats/request_response.rb
|
2
|
+
|
3
|
+
require_relative 'redis_record'
|
4
|
+
require_relative 'req_res_stat'
|
5
|
+
|
6
|
+
module RequestResponseStats
|
7
|
+
class RequestResponse
|
8
|
+
attr_accessor :request, :response
|
9
|
+
attr_accessor :redis_record
|
10
|
+
attr_accessor :redis, :mongoid_doc_model, :gather_stats
|
11
|
+
|
12
|
+
LONGEST_REQ_RES_CYCLE = 2.hours
|
13
|
+
SECONDS_PRECISION = 3
|
14
|
+
MEMORY_PRECISION = 0
|
15
|
+
SYS_CALL_FREQ = 60.seconds
|
16
|
+
|
17
|
+
# Set `GROUP_STATS_BY_TIME_DURATION` to `false` if no time based grouping is required, otherwise you can set it to value such as `1.minute` (but within a day)
|
18
|
+
GROUP_STATS_BY_TIME_DURATION = 1.minute
|
19
|
+
|
20
|
+
# Here:
|
21
|
+
# `redis_connection` is connection to redis db
|
22
|
+
# `mongoid_doc_model` is Mongoid::Document model which specifies document schema compatible to data structure in redis
|
23
|
+
# if `gather_stats` is `false`, they new data won't be added to the redis db
|
24
|
+
def initialize(req=nil, res=nil, opts={redis_connection: $redis, mongoid_doc_model: ReqResStat, gather_stats: true})
|
25
|
+
@request = req
|
26
|
+
@response = res
|
27
|
+
@redis = opts[:redis_connection]
|
28
|
+
@mongoid_doc_model = opts[:mongoid_doc_model]
|
29
|
+
@gather_stats = opts[:gather_stats]
|
30
|
+
|
31
|
+
@redis_record = RedisRecord
|
32
|
+
|
33
|
+
# adding behavior to dependents
|
34
|
+
temp_redis = @redis # TODO: check why using @redis directly is not working. Do instance variable have specifal meaning inside defin_singleton_method block?
|
35
|
+
@redis_record.define_singleton_method(:redis) { temp_redis }
|
36
|
+
@redis_record.define_singleton_method(:group_stats_by_time_duration) { GROUP_STATS_BY_TIME_DURATION }
|
37
|
+
end
|
38
|
+
|
39
|
+
# captures request info that will be used at the end of request-response cycle
|
40
|
+
def capture_request_response_cycle_start_info
|
41
|
+
return gather_stats unless gather_stats
|
42
|
+
|
43
|
+
# get system info
|
44
|
+
current_time = get_system_current_time
|
45
|
+
|
46
|
+
# temporarily save request info
|
47
|
+
req_info = {
|
48
|
+
req_object_id: request.object_id,
|
49
|
+
res_object_id: response.object_id,
|
50
|
+
server_name: (request.env["SERVER_NAME"] rescue "some_server_name"),
|
51
|
+
req_path: (request.path rescue "some_path"),
|
52
|
+
req_http_verb: (request.method rescue "some_method"),
|
53
|
+
req_time: current_time,
|
54
|
+
req_url: (request.url rescue "some_url"),
|
55
|
+
req_format: (request.parameters["format"] rescue "some_format"),
|
56
|
+
req_controller: (request.parameters["controller"] rescue "some_controller"),
|
57
|
+
req_action: (request.parameters["action"] rescue "some_action"),
|
58
|
+
remote_ip: (request.remote_ip rescue "some_ip"),
|
59
|
+
gc_stat: get_gc_stat,
|
60
|
+
}
|
61
|
+
redis_req_key_name = redis_record.req_key(get_server_hostname, req_info[:req_object_id])
|
62
|
+
redis_record.jsonified_set(redis_req_key_name, req_info, {ex: LONGEST_REQ_RES_CYCLE}, {strict_key_check: false})
|
63
|
+
|
64
|
+
# return key_name
|
65
|
+
redis_req_key_name
|
66
|
+
end
|
67
|
+
|
68
|
+
# captures respose info and makes use of already captured request info
|
69
|
+
# to save info about current request-response cycle to redis
|
70
|
+
def capture_request_response_cycle_end_info(capture_error: false)
|
71
|
+
return gather_stats unless gather_stats
|
72
|
+
|
73
|
+
# get system info
|
74
|
+
current_time = get_system_current_time
|
75
|
+
current_used_memory = get_system_used_memory_mb
|
76
|
+
current_swap_memory = get_system_used_swap_memory_mb
|
77
|
+
current_hostname = get_server_hostname
|
78
|
+
current_gc_stat = get_gc_stat
|
79
|
+
|
80
|
+
res_info = {
|
81
|
+
req_object_id: request.object_id,
|
82
|
+
res_object_id: response.object_id,
|
83
|
+
res_time: current_time,
|
84
|
+
}
|
85
|
+
|
86
|
+
# fetching temporary request info
|
87
|
+
# return false if temporary request info cannot be found
|
88
|
+
redis_req_key_name = redis_record.req_key(get_server_hostname, res_info[:req_object_id])
|
89
|
+
req_info = ActiveSupport::HashWithIndifferentAccess.new(redis_record.parsed_get(redis_req_key_name))
|
90
|
+
return false if req_info == {}
|
91
|
+
redis_record.del redis_req_key_name
|
92
|
+
|
93
|
+
# generating request-response-cycle info
|
94
|
+
req_res_info = {
|
95
|
+
key_name: nil,
|
96
|
+
# server_name: req_info[:server_name],
|
97
|
+
server_name: current_hostname,
|
98
|
+
api_name: req_info[:req_path],
|
99
|
+
api_verb: req_info[:req_http_verb],
|
100
|
+
api_controller: req_info[:req_controller],
|
101
|
+
api_action: req_info[:req_action],
|
102
|
+
request_count: 0,
|
103
|
+
min_time: nil,
|
104
|
+
max_time: nil,
|
105
|
+
avg_time: 0,
|
106
|
+
start_time: nil, # slot starting time
|
107
|
+
end_time: nil, # slot ending time
|
108
|
+
error_count: 0,
|
109
|
+
min_used_memory_MB: nil,
|
110
|
+
max_used_memory_MB: nil,
|
111
|
+
avg_used_memory_MB: 0,
|
112
|
+
min_swap_memory_MB: nil,
|
113
|
+
max_swap_memory_MB: nil,
|
114
|
+
avg_swap_memory_MB: 0,
|
115
|
+
avg_gc_stat_diff: Hash.new(0),
|
116
|
+
min_gc_stat_diff: {},
|
117
|
+
max_gc_stat_diff: {},
|
118
|
+
}
|
119
|
+
redis_req_res_key_name = redis_record.req_res_key(req_res_info[:server_name], req_res_info[:api_name], req_res_info[:api_verb])
|
120
|
+
req_res_info[:key_name] = redis_req_res_key_name
|
121
|
+
req_res_info[:start_time], req_res_info[:end_time] = redis_record.get_slot_range_for_key(redis_req_res_key_name).map(&:to_s)
|
122
|
+
req_res_info_parsed = redis_record.parsed_get(redis_req_res_key_name)
|
123
|
+
req_res_info = if req_res_info_parsed.present?
|
124
|
+
# making use of existing value from db
|
125
|
+
ActiveSupport::HashWithIndifferentAccess.new(req_res_info_parsed)
|
126
|
+
else
|
127
|
+
# using default value
|
128
|
+
ActiveSupport::HashWithIndifferentAccess.new(req_res_info)
|
129
|
+
end
|
130
|
+
current_cycle_time = (res_info[:res_time] - req_info[:req_time]).round(SECONDS_PRECISION)
|
131
|
+
current_gc_stat_diff = get_gc_stat_diff(req_info[:gc_stat], current_gc_stat)
|
132
|
+
req_res_info[:min_time] = [req_res_info[:min_time], current_cycle_time].compact.min
|
133
|
+
req_res_info[:max_time] = [req_res_info[:max_time], current_cycle_time].compact.max
|
134
|
+
req_res_info[:avg_time] = ((req_res_info[:avg_time] * req_res_info[:request_count] + current_cycle_time)/(req_res_info[:request_count] + 1)).round(SECONDS_PRECISION)
|
135
|
+
req_res_info[:min_used_memory_MB] = [req_res_info[:min_used_memory_MB], current_used_memory].compact.min
|
136
|
+
req_res_info[:max_used_memory_MB] = [req_res_info[:max_used_memory_MB], current_used_memory].compact.max
|
137
|
+
req_res_info[:avg_used_memory_MB] = ((req_res_info[:avg_used_memory_MB] * req_res_info[:request_count] + current_used_memory)/(req_res_info[:request_count] + 1)).round(MEMORY_PRECISION)
|
138
|
+
req_res_info[:min_swap_memory_MB] = [req_res_info[:min_swap_memory_MB], current_swap_memory].compact.min
|
139
|
+
req_res_info[:max_swap_memory_MB] = [req_res_info[:max_swap_memory_MB], current_swap_memory].compact.max
|
140
|
+
req_res_info[:avg_swap_memory_MB] = (req_res_info[:avg_swap_memory_MB] * req_res_info[:request_count] + current_swap_memory)/(req_res_info[:request_count] + 1)
|
141
|
+
req_res_info[:min_gc_stat_diff] = get_min_max_sum_gc_stat_diff(:min, req_res_info[:min_gc_stat_diff], current_gc_stat_diff)
|
142
|
+
req_res_info[:max_gc_stat_diff] = get_min_max_sum_gc_stat_diff(:max, req_res_info[:min_gc_stat_diff], current_gc_stat_diff)
|
143
|
+
req_res_info[:avg_gc_stat_diff] = get_avg_gc_stat_diff(req_res_info[:request_count], req_res_info[:min_gc_stat_diff], current_gc_stat_diff)
|
144
|
+
req_res_info[:request_count] += 1 # Note: updation of `request_count` should be the last
|
145
|
+
|
146
|
+
# if error is raised
|
147
|
+
if capture_error
|
148
|
+
req_res_info[:error_count] += 1
|
149
|
+
end
|
150
|
+
|
151
|
+
# saving request-respose-cycle info to redis db
|
152
|
+
redis_record.jsonified_set(redis_req_res_key_name, req_res_info)
|
153
|
+
|
154
|
+
# return request-response-cycle info key
|
155
|
+
redis_req_res_key_name
|
156
|
+
end
|
157
|
+
|
158
|
+
# captures error info
|
159
|
+
def capture_request_response_cycle_error_info
|
160
|
+
capture_request_response_cycle_end_info(capture_error: true)
|
161
|
+
end
|
162
|
+
|
163
|
+
# moves data from redis to mongo
|
164
|
+
def move_data_from_redis_to_mongo
|
165
|
+
moved_keys = redis_record.freezed_keys.select do |redis_key|
|
166
|
+
value = redis_record.formatted_parsed_get_for_mongo(redis_key)
|
167
|
+
mongo_doc = mongoid_doc_model.create(value)
|
168
|
+
redis_record.del redis_key if mongo_doc
|
169
|
+
mongo_doc
|
170
|
+
end
|
171
|
+
|
172
|
+
moved_keys.size
|
173
|
+
end
|
174
|
+
|
175
|
+
private
|
176
|
+
|
177
|
+
def get_system_current_time
|
178
|
+
Time.now.to_f.round(SECONDS_PRECISION)
|
179
|
+
end
|
180
|
+
|
181
|
+
def get_system_memory_info_mb
|
182
|
+
key_name = redis_record.support_key(get_server_hostname, [get_server_hostname, "memory"].join("_"))
|
183
|
+
value = ActiveSupport::HashWithIndifferentAccess.new(redis_record.parsed_get key_name)
|
184
|
+
return_value = if value == {}
|
185
|
+
mem_info = (`free -ml`).split(" ") rescue []
|
186
|
+
used_memory = mem_info[8].strip.to_i rescue 0
|
187
|
+
used_swap_memory = mem_info[27].strip.to_i rescue 0
|
188
|
+
data = {used_memory: used_memory, used_swap_memory: used_swap_memory}
|
189
|
+
redis_record.set(key_name, data.to_json, {ex: SYS_CALL_FREQ})
|
190
|
+
data
|
191
|
+
else
|
192
|
+
value
|
193
|
+
end
|
194
|
+
|
195
|
+
return_value
|
196
|
+
end
|
197
|
+
|
198
|
+
def get_gc_stat_diff(old_gc_stat, new_gc_stat)
|
199
|
+
stat_diff = {}
|
200
|
+
gc_keys = new_gc_stat.keys.map{ |k| k.to_s.to_sym }
|
201
|
+
gc_keys.each do |key|
|
202
|
+
if old_gc_stat[key] && new_gc_stat[key]
|
203
|
+
stat_diff[key] = new_gc_stat[key] - old_gc_stat[key]
|
204
|
+
else
|
205
|
+
stat_diff[key] = 0
|
206
|
+
end
|
207
|
+
end
|
208
|
+
|
209
|
+
stat_diff
|
210
|
+
end
|
211
|
+
|
212
|
+
# stat_type can be :min, :max, :sum
|
213
|
+
def get_min_max_sum_gc_stat_diff(stat_type, old_gsd, new_gsd)
|
214
|
+
stat_type = stat_type.to_s.to_sym
|
215
|
+
stat = {}
|
216
|
+
stat_keys = new_gsd.keys.map{ |k| k.to_s.to_sym }
|
217
|
+
stat_keys.each do |key|
|
218
|
+
if [:min, :max, :sum].include?(stat_type)
|
219
|
+
stat[key] = [new_gsd[key], old_gsd[key]].compact.public_send(stat_type)
|
220
|
+
else
|
221
|
+
"Invalid :stat_type"
|
222
|
+
end
|
223
|
+
end
|
224
|
+
|
225
|
+
stat
|
226
|
+
end
|
227
|
+
|
228
|
+
def get_avg_gc_stat_diff(existing_request_count, old_gsd, new_gsd)
|
229
|
+
stat_type = stat_type.to_s
|
230
|
+
stat = {}
|
231
|
+
stat_keys = new_gsd.keys.map{ |k| k.to_s.to_sym }
|
232
|
+
stat_keys.each do |key|
|
233
|
+
stat[key] = (new_gsd[key] * existing_request_count + old_gsd[key])/(existing_request_count + 1)
|
234
|
+
end
|
235
|
+
|
236
|
+
stat
|
237
|
+
end
|
238
|
+
|
239
|
+
def get_system_used_memory_mb
|
240
|
+
# (`free -ml | grep 'Mem:' | awk -F' ' '{ print $3 }'`.strip.to_i rescue 0).round(MEMORY_PRECISION)
|
241
|
+
get_system_memory_info_mb[:used_memory]
|
242
|
+
end
|
243
|
+
|
244
|
+
def get_system_used_swap_memory_mb
|
245
|
+
# (`free -ml | grep 'Swap:' | awk -F' ' '{ print $3 }'`.strip.to_i rescue 0).round(MEMORY_PRECISION)
|
246
|
+
get_system_memory_info_mb[:used_swap_memory]
|
247
|
+
end
|
248
|
+
|
249
|
+
def get_server_hostname
|
250
|
+
(`hostname`).strip
|
251
|
+
end
|
252
|
+
|
253
|
+
def get_gc_stat
|
254
|
+
GC.stat
|
255
|
+
end
|
256
|
+
end
|
257
|
+
end
|
258
|
+
|
259
|
+
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# File: lib/request_response_stats.rb
|
2
|
+
|
3
|
+
# library files
|
4
|
+
require "request_response_stats/version"
|
5
|
+
require_relative 'request_response_stats/request_response'
|
6
|
+
require_relative 'request_response_stats/custom_client'
|
7
|
+
require_relative 'request_response_stats/controller_concern'
|
8
|
+
require_relative 'request_response_stats/req_res_stat'
|
9
|
+
|
10
|
+
module RequestResponseStats
|
11
|
+
# override to set it to false if you want to capture inbound requests
|
12
|
+
RR_INBOUND_STATS = true unless defined? RR_INBOUND_STATS
|
13
|
+
|
14
|
+
# override to set it to true if you want to capture inbound requests
|
15
|
+
RR_OUTBOUND_STATS = true unless defined? RR_OUTBOUND_STATS
|
16
|
+
|
17
|
+
if self.method_defined? :custom_alert_code
|
18
|
+
# override to define the code that should be run on encountring alert conditions
|
19
|
+
def self.custom_alert_code(data)
|
20
|
+
raise StandardError, "Undefined custom alter code"
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
# TODO: The following files should not be required like this, instead they should be extracted into correct
|
26
|
+
# place in Rails project using `rake` command
|
27
|
+
# require_relative 'req_res_stat_controller'
|
28
|
+
# require_relative 'request_response_stats_config'
|
29
|
+
|
30
|
+
##### Examples: #####
|
31
|
+
|
32
|
+
## Checking current redis data:
|
33
|
+
=begin
|
34
|
+
# require 'request_response_stats'
|
35
|
+
include RequestResponseStats
|
36
|
+
rrs = RequestResponse.new(nil, nil)
|
37
|
+
ap rrs.redis_record.hashify_all_data
|
38
|
+
ap rrs.redis_record.hashify_all_data.size
|
39
|
+
=end
|
40
|
+
|
41
|
+
## Manually moving data from Redis to Mongo:
|
42
|
+
# ap rrs.move_data_from_redis_to_mongo
|
43
|
+
|
44
|
+
## Deleting data from Redis and Mongo:
|
45
|
+
# rrs.redis_record.all_keys.each{|k| rrs.redis_record.del k}
|
46
|
+
# ReqResStat.all.delete_all
|
47
|
+
|
48
|
+
## Getting stats from Mongo:
|
49
|
+
=begin
|
50
|
+
ap ReqResStat.all.size
|
51
|
+
ap ReqResStat.all.first
|
52
|
+
t = Time.now
|
53
|
+
ReqResStat.get_max(:max_time, t - 2.day, t, 6.hours).map{|r| r[:data]}
|
54
|
+
ReqResStat.get_avg(:avg_time, t - 2.day, t, 6.hours).map{|r| r[:data]}
|
55
|
+
ReqResStat.get_max(:min_time, t - 2.day, t, 6.hours).map{|r| r[:data]}
|
56
|
+
ap ReqResStat.get_details(:max_time, t - 2.day, t, nil, 6.hours)
|
57
|
+
ap ReqResStat.get_details(:max_time, t - 2.day, t, :max, 6.hours)
|
58
|
+
ap ReqResStat.get_details(:max_time, t - 2.day, t, :min, 6.hours)
|
59
|
+
ap ReqResStat.get_details(:max_time, t - 2.day, t, :sum, 6.hours)
|
60
|
+
ap ReqResStat.get_details(:max_time, t - 2.day, t, :avg, 6.hours)
|
61
|
+
=end
|
62
|
+
|