ezmetrics 1.0.5 → 1.2.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/README.md +202 -12
- data/lib/ezmetrics.rb +104 -77
- data/lib/ezmetrics/benchmark.rb +95 -0
- metadata +32 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 61d26fa5d7a779917ac2ba154e82f29745a22b64261442117d15b286d0e06276
|
4
|
+
data.tar.gz: 44ff087c54cf0e5fdf8763b88deaa59f3ccdfa1e11c12ab5c1da2eebdeea7764
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6bf28edd6c644648198905bcdf50a77a0083c53b6eba312b709e9c1cb951adba1c407cbc28ac3ae0588b15185b5aa0132ef1f568ff30af0158b770d388460885
|
7
|
+
data.tar.gz: a08685fea3025b23e5dca7439e9b7f4f2313933363b01a716e68eba800e4b96fce0d976cf25111c0e7a36f655abb6497e3481d39caba881b2ee706e49958b365
|
data/README.md
CHANGED
@@ -2,8 +2,7 @@
|
|
2
2
|
|
3
3
|
[![Gem Version](https://badge.fury.io/rb/ezmetrics.svg)](https://badge.fury.io/rb/ezmetrics)
|
4
4
|
|
5
|
-
|
6
|
-
|
5
|
+
Simple, lightweight and fast metrics aggregation for Rails.
|
7
6
|
|
8
7
|
## Installation
|
9
8
|
|
@@ -15,46 +14,59 @@ gem 'ezmetrics'
|
|
15
14
|
|
16
15
|
### Getting started
|
17
16
|
|
18
|
-
This tool captures and aggregates metrics such as
|
17
|
+
This tool captures and aggregates Rails application metrics such as
|
18
|
+
|
19
19
|
- `duration`
|
20
20
|
- `views`
|
21
21
|
- `db`
|
22
22
|
- `queries`
|
23
23
|
- `status`
|
24
24
|
|
25
|
-
for
|
25
|
+
and stores them for the timeframe you specified, 60 seconds by default.
|
26
26
|
|
27
27
|
You can change the timeframe according to your needs and save the metrics by calling `log` method:
|
28
28
|
|
29
29
|
```ruby
|
30
30
|
# Store the metrics for 60 seconds (default behaviour)
|
31
|
-
EZmetrics.new.log(
|
31
|
+
EZmetrics.new.log(
|
32
|
+
duration: 100.5,
|
33
|
+
views: 40.7,
|
34
|
+
db: 59.8,
|
35
|
+
queries: 4,
|
36
|
+
status: 200
|
37
|
+
)
|
32
38
|
```
|
33
|
-
or
|
34
39
|
|
35
40
|
```ruby
|
36
41
|
# Store the metrics for 10 minutes
|
37
|
-
EZmetrics.new(10.minutes).log(
|
42
|
+
EZmetrics.new(10.minutes).log(
|
43
|
+
duration: 100.5,
|
44
|
+
views: 40.7,
|
45
|
+
db: 59.8,
|
46
|
+
queries: 4,
|
47
|
+
status: 200
|
48
|
+
)
|
38
49
|
```
|
39
50
|
|
40
|
-
|
51
|
+
---
|
52
|
+
|
53
|
+
For displaying metrics you need to call `show` method:
|
41
54
|
|
42
55
|
```ruby
|
43
56
|
# Aggregate and show metrics for last 60 seconds (default behaviour)
|
44
57
|
EZmetrics.new.show
|
45
58
|
```
|
46
59
|
|
47
|
-
or
|
48
|
-
|
49
60
|
```ruby
|
50
61
|
# Aggregate and show metrics for last 10 minutes
|
51
62
|
EZmetrics.new(10.minutes).show
|
52
63
|
```
|
53
64
|
|
54
|
-
|
65
|
+
You can combine these timeframes, for example - store for 10 minutes, display for 5 minutes.
|
55
66
|
|
67
|
+
### Capture metrics
|
56
68
|
|
57
|
-
|
69
|
+
Just add an initializer to your application:
|
58
70
|
|
59
71
|
```ruby
|
60
72
|
# config/initializers/ezmetrics.rb
|
@@ -81,6 +93,8 @@ end
|
|
81
93
|
|
82
94
|
### Display metrics
|
83
95
|
|
96
|
+
As simple as:
|
97
|
+
|
84
98
|
```ruby
|
85
99
|
EZmetrics.new.show
|
86
100
|
```
|
@@ -117,3 +131,179 @@ This will return a hash with the following structure:
|
|
117
131
|
}
|
118
132
|
```
|
119
133
|
|
134
|
+
### Aggregation
|
135
|
+
|
136
|
+
The aggregation can be easily configured by specifying aggregation options as in the following examples:
|
137
|
+
|
138
|
+
**1. Single**
|
139
|
+
|
140
|
+
```ruby
|
141
|
+
EZmetrics.new.show(duration: :max)
|
142
|
+
```
|
143
|
+
|
144
|
+
```ruby
|
145
|
+
{
|
146
|
+
duration: {
|
147
|
+
max: 9675
|
148
|
+
}
|
149
|
+
}
|
150
|
+
```
|
151
|
+
|
152
|
+
---
|
153
|
+
|
154
|
+
**2. Multiple**
|
155
|
+
|
156
|
+
```ruby
|
157
|
+
EZmetrics.new.show(queries: [:max, :avg])
|
158
|
+
```
|
159
|
+
|
160
|
+
```ruby
|
161
|
+
{
|
162
|
+
queries: {
|
163
|
+
max: 76,
|
164
|
+
avg: 26
|
165
|
+
}
|
166
|
+
}
|
167
|
+
```
|
168
|
+
|
169
|
+
---
|
170
|
+
|
171
|
+
**3. Requests**
|
172
|
+
|
173
|
+
```ruby
|
174
|
+
EZmetrics.new.show(requests: true)
|
175
|
+
```
|
176
|
+
|
177
|
+
```ruby
|
178
|
+
{
|
179
|
+
requests: {
|
180
|
+
all: 2000,
|
181
|
+
grouped: {
|
182
|
+
"2xx" => 1900,
|
183
|
+
"3xx" => 15,
|
184
|
+
"4xx" => 80,
|
185
|
+
"5xx" => 5
|
186
|
+
}
|
187
|
+
}
|
188
|
+
}
|
189
|
+
```
|
190
|
+
|
191
|
+
---
|
192
|
+
|
193
|
+
**4. Combined**
|
194
|
+
|
195
|
+
```ruby
|
196
|
+
EZmetrics.new.show(views: :avg, :db: [:avg, :max], requests: true)
|
197
|
+
```
|
198
|
+
|
199
|
+
```ruby
|
200
|
+
{
|
201
|
+
views: {
|
202
|
+
avg: 12
|
203
|
+
},
|
204
|
+
db: {
|
205
|
+
avg: 155,
|
206
|
+
max: 4382
|
207
|
+
},
|
208
|
+
requests: {
|
209
|
+
all: 2000,
|
210
|
+
grouped: {
|
211
|
+
"2xx" => 1900,
|
212
|
+
"3xx" => 15,
|
213
|
+
"4xx" => 80,
|
214
|
+
"5xx" => 5
|
215
|
+
}
|
216
|
+
}
|
217
|
+
}
|
218
|
+
```
|
219
|
+
|
220
|
+
### Partitioning
|
221
|
+
|
222
|
+
If you want to visualize your metrics by using a **line chart**, you will need to use partitioning.
|
223
|
+
|
224
|
+
To aggregate metrics, partitioned by a unit of time you need to call `partition_by({time_unit})` before calling `show`
|
225
|
+
|
226
|
+
```ruby
|
227
|
+
# Aggregate metrics for last hour, partition by minute
|
228
|
+
EZmetrics.new(1.hour).partition_by(:minute).show(duration: [:avg, :max], db: :avg)
|
229
|
+
```
|
230
|
+
|
231
|
+
This will return an array of objects with the following structure:
|
232
|
+
|
233
|
+
```ruby
|
234
|
+
[
|
235
|
+
{
|
236
|
+
timestamp: # UNIX timestamp
|
237
|
+
data: # a hash with aggregated metrics
|
238
|
+
}
|
239
|
+
]
|
240
|
+
```
|
241
|
+
|
242
|
+
like in the example below:
|
243
|
+
|
244
|
+
```ruby
|
245
|
+
[
|
246
|
+
{
|
247
|
+
timestamp: 1575242880,
|
248
|
+
data: {
|
249
|
+
duration: {
|
250
|
+
avg: 477,
|
251
|
+
max: 8566
|
252
|
+
},
|
253
|
+
db: {
|
254
|
+
avg: 387
|
255
|
+
}
|
256
|
+
}
|
257
|
+
},
|
258
|
+
{
|
259
|
+
timestamp: 1575242940,
|
260
|
+
data: {
|
261
|
+
duration: {
|
262
|
+
avg: 234,
|
263
|
+
max: 3675
|
264
|
+
},
|
265
|
+
db: {
|
266
|
+
avg: 123
|
267
|
+
}
|
268
|
+
}
|
269
|
+
}
|
270
|
+
]
|
271
|
+
```
|
272
|
+
|
273
|
+
Available time units for partitioning: `second`, `minute`, `hour`, `day`. Default: `minute`.
|
274
|
+
|
275
|
+
### Performance
|
276
|
+
|
277
|
+
The aggregation speed relies on the performance of **Redis** (data storage) and **Oj** (json serialization/parsing).
|
278
|
+
|
279
|
+
You can check the **aggregation** time by running:
|
280
|
+
|
281
|
+
```ruby
|
282
|
+
EZmetrics::Benchmark.new.measure_aggregation
|
283
|
+
```
|
284
|
+
|
285
|
+
| Interval | Duration (seconds) |
|
286
|
+
| :------: | :----------------: |
|
287
|
+
| 1 minute | 0.0 |
|
288
|
+
| 1 hour | 0.04 |
|
289
|
+
| 12 hours | 0.49 |
|
290
|
+
| 24 hours | 1.51 |
|
291
|
+
| 48 hours | 3.48 |
|
292
|
+
|
293
|
+
---
|
294
|
+
|
295
|
+
To check the **partitioned aggregation** time you need to run:
|
296
|
+
|
297
|
+
```ruby
|
298
|
+
EZmetrics::Benchmark.new.measure_aggregation(:minute)
|
299
|
+
```
|
300
|
+
|
301
|
+
| Interval | Duration (seconds) |
|
302
|
+
| :------: | :----------------: |
|
303
|
+
| 1 minute | 0.0 |
|
304
|
+
| 1 hour | 0.04 |
|
305
|
+
| 12 hours | 0.53 |
|
306
|
+
| 24 hours | 1.59 |
|
307
|
+
| 48 hours | 3.51 |
|
308
|
+
|
309
|
+
The benchmarks above were run on a _2017 Macbook Pro 2.9 GHz Intel Core i7 with 16 GB of RAM_
|
data/lib/ezmetrics.rb
CHANGED
@@ -1,11 +1,15 @@
|
|
1
|
-
require "redis"
|
2
|
-
require "
|
1
|
+
require "redis"
|
2
|
+
require "redis/connection/hiredis"
|
3
|
+
require "oj"
|
3
4
|
|
4
5
|
class EZmetrics
|
6
|
+
METRICS = [:duration, :views, :db, :queries].freeze
|
7
|
+
AGGREGATION_FUNCTIONS = [:max, :avg].freeze
|
8
|
+
PARTITION_UNITS = [:second, :minute, :hour, :day].freeze
|
9
|
+
|
5
10
|
def initialize(interval_seconds=60)
|
6
11
|
@interval_seconds = interval_seconds.to_i
|
7
12
|
@redis = Redis.new
|
8
|
-
@storage_key = "ez-metrics"
|
9
13
|
end
|
10
14
|
|
11
15
|
def log(payload={duration: 0.0, views: 0.0, db: 0.0, queries: 0, status: 200})
|
@@ -19,12 +23,12 @@ class EZmetrics
|
|
19
23
|
|
20
24
|
this_second = Time.now.to_i
|
21
25
|
status_group = "#{payload[:status].to_s[0]}xx"
|
22
|
-
@this_second_metrics = redis.get(
|
26
|
+
@this_second_metrics = redis.get(this_second)
|
23
27
|
|
24
28
|
if this_second_metrics
|
25
|
-
@this_second_metrics =
|
29
|
+
@this_second_metrics = Oj.load(this_second_metrics)
|
26
30
|
|
27
|
-
|
31
|
+
METRICS.each do |metrics_type|
|
28
32
|
update_sum(metrics_type)
|
29
33
|
update_max(metrics_type)
|
30
34
|
end
|
@@ -33,6 +37,7 @@ class EZmetrics
|
|
33
37
|
this_second_metrics["statuses"][status_group] += 1
|
34
38
|
else
|
35
39
|
@this_second_metrics = {
|
40
|
+
"second" => this_second,
|
36
41
|
"duration_sum" => safe_payload[:duration],
|
37
42
|
"duration_max" => safe_payload[:duration],
|
38
43
|
"views_sum" => safe_payload[:views],
|
@@ -47,110 +52,132 @@ class EZmetrics
|
|
47
52
|
this_second_metrics["statuses"][status_group] = 1
|
48
53
|
end
|
49
54
|
|
50
|
-
redis.setex(
|
51
|
-
|
55
|
+
redis.setex(this_second, interval_seconds, Oj.dump(this_second_metrics))
|
52
56
|
true
|
53
57
|
rescue => error
|
54
58
|
formatted_error(error)
|
55
59
|
end
|
56
60
|
|
57
|
-
def show
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
+
def show(options=nil)
|
62
|
+
@options = options || default_options
|
63
|
+
partitioned_metrics ? aggregate_partitioned_data : aggregate_data
|
64
|
+
end
|
61
65
|
|
62
|
-
|
66
|
+
def partition_by(time_unit=:minute)
|
67
|
+
time_unit = PARTITION_UNITS.include?(time_unit) ? time_unit : :minute
|
68
|
+
@partitioned_metrics = interval_metrics.group_by { |h| second_to_partition_unit(time_unit, h["second"]) }
|
69
|
+
self
|
70
|
+
end
|
63
71
|
|
64
|
-
|
72
|
+
private
|
65
73
|
|
66
|
-
|
74
|
+
attr_reader :redis, :interval_seconds, :interval_metrics, :requests,
|
75
|
+
:storage_key, :safe_payload, :this_second_metrics, :partitioned_metrics, :options
|
76
|
+
|
77
|
+
def aggregate_data
|
78
|
+
return {} unless interval_metrics.any?
|
79
|
+
@requests = interval_metrics.sum { |hash| hash["statuses"]["all"] }
|
80
|
+
build_result
|
67
81
|
rescue
|
68
|
-
|
82
|
+
{}
|
69
83
|
end
|
70
84
|
|
71
|
-
|
85
|
+
def aggregate_partitioned_data
|
86
|
+
partitioned_metrics.map do |partition, metrics|
|
87
|
+
@interval_metrics = metrics
|
88
|
+
@requests = interval_metrics.sum { |hash| hash["statuses"]["all"] }
|
89
|
+
{ timestamp: partition, data: build_result }
|
90
|
+
end
|
91
|
+
rescue
|
92
|
+
new(options)
|
93
|
+
end
|
94
|
+
|
95
|
+
def build_result
|
96
|
+
result = {}
|
97
|
+
|
98
|
+
result[:requests] = { all: requests, grouped: count_all_status_groups } if options[:requests]
|
99
|
+
|
100
|
+
options.each do |metrics, aggregation_functions|
|
101
|
+
next unless METRICS.include?(metrics)
|
102
|
+
aggregation_functions = [aggregation_functions] unless aggregation_functions.is_a?(Array)
|
103
|
+
next unless aggregation_functions.any?
|
104
|
+
|
105
|
+
aggregation_functions.each do |aggregation_function|
|
106
|
+
result[metrics] ||= {}
|
107
|
+
result[metrics][aggregation_function] = aggregate(metrics, aggregation_function)
|
108
|
+
end
|
109
|
+
end
|
110
|
+
result
|
111
|
+
ensure
|
112
|
+
result
|
113
|
+
end
|
114
|
+
|
115
|
+
def second_to_partition_unit(time_unit, second)
|
116
|
+
return second if time_unit == :second
|
117
|
+
time = Time.at(second)
|
118
|
+
return (time - time.sec - time.min * 60 - time.hour * 3600).to_i if time_unit == :day
|
119
|
+
return (time - time.sec - time.min * 60).to_i if time_unit == :hour
|
120
|
+
(time - time.sec).to_i
|
121
|
+
end
|
72
122
|
|
73
|
-
|
74
|
-
|
123
|
+
def interval_metrics
|
124
|
+
@interval_metrics ||= begin
|
125
|
+
interval_start = Time.now.to_i - interval_seconds
|
126
|
+
interval_keys = (interval_start..Time.now.to_i).to_a
|
127
|
+
redis.mget(interval_keys).compact.map { |hash| Oj.load(hash) }
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
def aggregate(metrics, aggregation_function)
|
132
|
+
return unless AGGREGATION_FUNCTIONS.include?(aggregation_function)
|
133
|
+
return avg("#{metrics}_sum") if aggregation_function == :avg
|
134
|
+
return max("#{metrics}_max") if aggregation_function == :max
|
135
|
+
end
|
75
136
|
|
76
137
|
def update_sum(metrics)
|
77
|
-
this_second_metrics["#{metrics}_sum"] += safe_payload[metrics
|
138
|
+
this_second_metrics["#{metrics}_sum"] += safe_payload[metrics]
|
78
139
|
end
|
79
140
|
|
80
141
|
def update_max(metrics)
|
81
|
-
max_value = [safe_payload[metrics
|
142
|
+
max_value = [safe_payload[metrics], this_second_metrics["#{metrics}_max"]].max
|
82
143
|
this_second_metrics["#{metrics}_max"] = max_value
|
83
144
|
end
|
84
145
|
|
85
146
|
def avg(metrics)
|
86
|
-
(interval_metrics.
|
147
|
+
(interval_metrics.sum { |h| h[metrics] }.to_f / requests).round
|
87
148
|
end
|
88
149
|
|
89
150
|
def max(metrics)
|
90
|
-
interval_metrics.
|
151
|
+
interval_metrics.max { |h| h[metrics] }[metrics].round
|
91
152
|
end
|
92
153
|
|
93
|
-
def
|
94
|
-
interval_metrics.
|
154
|
+
def count_all_status_groups
|
155
|
+
interval_metrics.inject({ "2xx" => 0, "3xx" => 0, "4xx" => 0, "5xx" => 0 }) do |result, h|
|
156
|
+
result["2xx"] += h["statuses"]["2xx"]
|
157
|
+
result["3xx"] += h["statuses"]["3xx"]
|
158
|
+
result["4xx"] += h["statuses"]["4xx"]
|
159
|
+
result["5xx"] += h["statuses"]["5xx"]
|
160
|
+
result
|
161
|
+
end
|
95
162
|
end
|
96
163
|
|
97
|
-
def
|
164
|
+
def default_options
|
98
165
|
{
|
99
|
-
|
100
|
-
|
101
|
-
|
166
|
+
duration: AGGREGATION_FUNCTIONS,
|
167
|
+
views: AGGREGATION_FUNCTIONS,
|
168
|
+
db: AGGREGATION_FUNCTIONS,
|
169
|
+
queries: AGGREGATION_FUNCTIONS,
|
170
|
+
requests: true
|
102
171
|
}
|
103
172
|
end
|
104
173
|
|
105
|
-
def
|
174
|
+
def formatted_error(error)
|
106
175
|
{
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
},
|
111
|
-
views: {
|
112
|
-
avg: avg(:views_sum),
|
113
|
-
max: max(:views_max)
|
114
|
-
},
|
115
|
-
db: {
|
116
|
-
avg: avg(:db_sum),
|
117
|
-
max: max(:db_max)
|
118
|
-
},
|
119
|
-
queries: {
|
120
|
-
avg: avg(:queries_sum),
|
121
|
-
max: max(:queries_max)
|
122
|
-
},
|
123
|
-
requests: {
|
124
|
-
all: requests,
|
125
|
-
grouped: {
|
126
|
-
"2xx" => count("2xx"),
|
127
|
-
"3xx" => count("3xx"),
|
128
|
-
"4xx" => count("4xx"),
|
129
|
-
"5xx" => count("5xx")
|
130
|
-
}
|
131
|
-
}
|
176
|
+
error: error.class.name,
|
177
|
+
message: error.message,
|
178
|
+
backtrace: error.backtrace.reject { |line| line.match(/ruby|gems/) }
|
132
179
|
}
|
133
180
|
end
|
181
|
+
end
|
134
182
|
|
135
|
-
|
136
|
-
{
|
137
|
-
duration: {
|
138
|
-
avg: 0,
|
139
|
-
max: 0
|
140
|
-
},
|
141
|
-
views: {
|
142
|
-
avg: 0,
|
143
|
-
max: 0
|
144
|
-
},
|
145
|
-
db: {
|
146
|
-
avg: 0,
|
147
|
-
max: 0
|
148
|
-
},
|
149
|
-
queries: {
|
150
|
-
avg: 0,
|
151
|
-
max: 0
|
152
|
-
},
|
153
|
-
requests: {}
|
154
|
-
}
|
155
|
-
end
|
156
|
-
end
|
183
|
+
require "ezmetrics/benchmark"
|
@@ -0,0 +1,95 @@
|
|
1
|
+
require "benchmark"
|
2
|
+
|
3
|
+
class EZmetrics::Benchmark
|
4
|
+
|
5
|
+
def initialize
|
6
|
+
@start = Time.now.to_i
|
7
|
+
@redis = Redis.new
|
8
|
+
@durations = []
|
9
|
+
@iterations = 3
|
10
|
+
@intervals = {
|
11
|
+
"1.minute" => 60,
|
12
|
+
"1.hour " => 3600,
|
13
|
+
"12.hours" => 43200,
|
14
|
+
"24.hours" => 86400,
|
15
|
+
"48.hours" => 172800
|
16
|
+
}
|
17
|
+
end
|
18
|
+
|
19
|
+
def measure_aggregation(partition_by=nil)
|
20
|
+
write_metrics
|
21
|
+
print_header
|
22
|
+
intervals.each do |interval, seconds|
|
23
|
+
result = measure_aggregation_time(interval, seconds, partition_by)
|
24
|
+
print_row(result)
|
25
|
+
end
|
26
|
+
cleanup_metrics
|
27
|
+
print_footer
|
28
|
+
end
|
29
|
+
|
30
|
+
private
|
31
|
+
|
32
|
+
attr_reader :start, :redis, :durations, :intervals, :iterations
|
33
|
+
|
34
|
+
def write_metrics
|
35
|
+
seconds = intervals.values.max
|
36
|
+
seconds.times do |i|
|
37
|
+
second = start - i
|
38
|
+
payload = {
|
39
|
+
"second" => second,
|
40
|
+
"duration_sum" => rand(10000),
|
41
|
+
"duration_max" => rand(10000),
|
42
|
+
"views_sum" => rand(1000),
|
43
|
+
"views_max" => rand(1000),
|
44
|
+
"db_sum" => rand(8000),
|
45
|
+
"db_max" => rand(8000),
|
46
|
+
"queries_sum" => rand(100),
|
47
|
+
"queries_max" => rand(100),
|
48
|
+
"statuses" => {
|
49
|
+
"2xx" => rand(1..10),
|
50
|
+
"3xx" => rand(1..10),
|
51
|
+
"4xx" => rand(1..10),
|
52
|
+
"5xx" => rand(1..10),
|
53
|
+
"all" => rand(1..40)
|
54
|
+
}
|
55
|
+
}
|
56
|
+
redis.setex(second, seconds, Oj.dump(payload))
|
57
|
+
end
|
58
|
+
nil
|
59
|
+
end
|
60
|
+
|
61
|
+
def cleanup_metrics
|
62
|
+
interval_start = Time.now.to_i - intervals.values.max - 100
|
63
|
+
interval_keys = (interval_start..Time.now.to_i).to_a
|
64
|
+
redis.del(interval_keys)
|
65
|
+
end
|
66
|
+
|
67
|
+
def measure_aggregation_time(interval, seconds, partition_by)
|
68
|
+
iterations.times do
|
69
|
+
durations << ::Benchmark.measure do
|
70
|
+
if partition_by
|
71
|
+
EZmetrics.new(seconds).partition_by(partition_by).show
|
72
|
+
else
|
73
|
+
EZmetrics.new(seconds).show
|
74
|
+
end
|
75
|
+
end.real
|
76
|
+
end
|
77
|
+
|
78
|
+
return {
|
79
|
+
interval: interval.gsub(".", " "),
|
80
|
+
duration: (durations.sum.to_f / iterations).round(2)
|
81
|
+
}
|
82
|
+
end
|
83
|
+
|
84
|
+
def print_header
|
85
|
+
print "\n#{'─'*31}\n| Interval | Duration (seconds)\n#{'─'*31}\n"
|
86
|
+
end
|
87
|
+
|
88
|
+
def print_row(result)
|
89
|
+
print "| #{result[:interval]} | #{result[:duration]}\n"
|
90
|
+
end
|
91
|
+
|
92
|
+
def print_footer
|
93
|
+
print "#{'─'*31}\n"
|
94
|
+
end
|
95
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ezmetrics
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.2.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Nicolae Rotaru
|
@@ -24,6 +24,34 @@ dependencies:
|
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: '4.0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: hiredis
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: 0.6.3
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: 0.6.3
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: oj
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '3.10'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '3.10'
|
27
55
|
- !ruby/object:Gem::Dependency
|
28
56
|
name: rspec
|
29
57
|
requirement: !ruby/object:Gem::Requirement
|
@@ -38,7 +66,7 @@ dependencies:
|
|
38
66
|
- - "~>"
|
39
67
|
- !ruby/object:Gem::Version
|
40
68
|
version: '3.5'
|
41
|
-
description:
|
69
|
+
description: Simple, lightweight and fast metrics aggregation for Rails.
|
42
70
|
email: nyku.rn@gmail.com
|
43
71
|
executables: []
|
44
72
|
extensions: []
|
@@ -47,6 +75,7 @@ files:
|
|
47
75
|
- LICENSE
|
48
76
|
- README.md
|
49
77
|
- lib/ezmetrics.rb
|
78
|
+
- lib/ezmetrics/benchmark.rb
|
50
79
|
homepage: https://github.com/nyku/ezmetrics
|
51
80
|
licenses:
|
52
81
|
- GPL-3.0
|
@@ -66,8 +95,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
66
95
|
- !ruby/object:Gem::Version
|
67
96
|
version: '0'
|
68
97
|
requirements: []
|
69
|
-
|
70
|
-
rubygems_version: 2.6.13
|
98
|
+
rubygems_version: 3.0.6
|
71
99
|
signing_key:
|
72
100
|
specification_version: 4
|
73
101
|
summary: Rails metrics aggregation tool.
|