redis-stat 0.4.3-java → 0.4.4-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 814990cec2629f891bf110dba907ce984628b81e
4
- data.tar.gz: dbfab2225e10eee3bf6805430854df961bfd8927
3
+ metadata.gz: 8141fc0b8abb82ab7927fd3dcd1b9b9264088ec2
4
+ data.tar.gz: f52fbc1e912d769910eb39c3c00c654c0b6c5010
5
5
  SHA512:
6
- metadata.gz: a56f0ec2aba3ba341a274875a0e89af1637c203fc096ee2f0d60c1265dfeccc761e06ffb161929ce9194036a974922ffc23048fe9e014f55ce9c7b1a2ee181d1
7
- data.tar.gz: 706244730e1cfd29a873bb848c0bafd2e9cee44abace52567c49c36870cd723f44e906c1fb15cdab907e203f938e7a409bcafcb1dec5d8c54f1c497f7878f41a
6
+ metadata.gz: 9fc14d060c405e106292085b02db992be984a493142757676ea3659cff0c7b2965aa86f924c7c8cb9f36bacce32834d1aeb1ce801477d258196bd22ce0ca6964
7
+ data.tar.gz: 941a314949a1809b19cb251ec0292451ccc639e608b857427a410e3fe341c8a9bf51b429a1f870069f085a640f6c9028b4b6ebcfeb80f8a7a87b82197d0182c1
@@ -40,7 +40,7 @@ class RedisStat
40
40
  @verbose = options[:verbose]
41
41
  @measures = MEASURES[ @verbose ? :verbose : :default ].map { |m| [*m].first }
42
42
  @tab_measures = MEASURES[:static].map { |m| [*m].first }
43
- @all_measures = MEASURES.values.inject(:+).uniq - [:at]
43
+ @all_measures = TYPES.keys
44
44
  @count = 0
45
45
  @style = options[:style]
46
46
  @varwidth = STDOUT.tty? && !windows
@@ -71,7 +71,7 @@ class RedisStat
71
71
  # Check elasticsearch status
72
72
  if @elasticsearch
73
73
  begin
74
- output_es info
74
+ output_es Hash[process(info, nil)]
75
75
  rescue Exception => e
76
76
  output_term_errors! format_exceptions({ :elasticsearch => e })
77
77
  exit 1
@@ -96,13 +96,16 @@ class RedisStat
96
96
  next
97
97
  end
98
98
 
99
+ info_output_all = process info, prev_info
99
100
  begin
100
- output_es info if @elasticsearch && @count > 0
101
+ output_es Hash[info_output_all] if @elasticsearch && @count > 0
102
+ rescue Interrupt
103
+ raise
101
104
  rescue Exception => e
102
105
  exceptions[:elasticsearch] = e.to_s
103
106
  end
104
107
  error_messages = format_exceptions(exceptions)
105
- info_output = process info, prev_info
108
+ info_output = @measures.map { |key| [key, info_output_all[key][:sum]] }
106
109
  unless @daemonized
107
110
  output_static_info info if @count == 0
108
111
  output_term info_output, error_messages
@@ -150,21 +153,43 @@ private
150
153
  RedisStat::Server
151
154
  end
152
155
 
153
- def collect
154
- info = {
155
- :at => Time.now.to_f,
156
- :instances => Hash.new { |h, k| h[k] = {}.insensitive }
157
- }
158
- class << info
159
- def sumf label
160
- self[:instances].values.map { |hash| hash[label].to_f }.inject(:+) || 0
161
- end
156
+ module ExtendedHash
157
+ def hosts host
158
+ host == :sum ? self.values : [self[host]]
159
+ end
160
+
161
+ def vals host, label
162
+ hosts(host).map { |hash| hash[label] }
163
+ end
164
+
165
+ def s host, label
166
+ hosts(host).map { |hash| hash[label] }.join('/')
167
+ end
168
+
169
+ def i host, label
170
+ f(host, label).to_i
171
+ end
172
+
173
+ def f host, label
174
+ hosts(host).map { |hash|
175
+ case label
176
+ when Proc
177
+ label.call(hash)
178
+ else
179
+ hash[label].to_f
180
+ end
181
+ }.inject(:+) || 0
162
182
  end
183
+ end
184
+
185
+ def collect
186
+ info = Hash.new { |h, k| h[k] = {}.insensitive }.extend(ExtendedHash)
163
187
  exceptions = {}
164
188
 
165
189
  @hosts.pmap(@hosts.length) { |host|
166
190
  begin
167
- [host, @redises[host].info.insensitive]
191
+ hash = { :at => Time.now }.insensitive
192
+ [host, hash.merge(@redises[host].info)]
168
193
  rescue Exception => e
169
194
  [host, e]
170
195
  end
@@ -176,7 +201,7 @@ private
176
201
  ks = [*k]
177
202
  v = ks.map { |e| rinfo[e] }.compact.first
178
203
  k = ks.first
179
- info[:instances][host][k] = v
204
+ info[host][k] = v
180
205
  end
181
206
  end
182
207
  end
@@ -228,7 +253,12 @@ private
228
253
  }) if @count == 0
229
254
 
230
255
  file.puts CSV.generate_line(info_output.map { |pair|
231
- [*pair.last].last
256
+ case val = [*pair.last].last
257
+ when Time
258
+ val.to_f
259
+ else
260
+ val
261
+ end
232
262
  })
233
263
  file.flush
234
264
  end
@@ -310,7 +340,7 @@ private
310
340
  tab << [nil] + @hosts.map { |h| h.bold.green }
311
341
  tab.separator!
312
342
  @tab_measures.each do |key|
313
- tab << [key.to_s.bold] + @hosts.map { |host| info[:instances][host][key] }
343
+ tab << [key.to_s.bold] + @hosts.map { |host| info[host][key] }
314
344
  end
315
345
  @os.puts tab
316
346
  end
@@ -341,18 +371,27 @@ private
341
371
  end
342
372
 
343
373
  def process info, prev_info
344
- @measures.map { |key|
345
- # [ key, [humanized, raw] ]
346
- [ key, process_how(info, prev_info, key) ]
347
- }.select { |pair| pair.last }
374
+ hosts = [:sum].concat(@hosts)
375
+ Hash[@all_measures.map { |key|
376
+ [ key,
377
+ hosts.select { |h| info.has_key?(h) || h == :sum }.inject({}) { |sum, h|
378
+ sum[h] = process_how(h, info, prev_info, key)
379
+ sum
380
+ }
381
+ ]
382
+ }]
348
383
  end
349
384
 
350
- def process_how info, prev_info, key
351
- dur = prev_info && (info[:at] - prev_info[:at])
385
+ def process_how host, info, prev_info, key
386
+ dur = prev_info && begin
387
+ max = info.vals(host, :at).compact.max
388
+ min = prev_info.vals(host, :at).compact.min
389
+ max && min && (max - min)
390
+ end
352
391
 
353
392
  get_diff = lambda do |label|
354
393
  if dur && dur > 0
355
- (info.sumf(label) - prev_info.sumf(label)) / dur
394
+ (info.f(host, label) - prev_info.f(host, label)) / dur
356
395
  else
357
396
  nil
358
397
  end
@@ -360,27 +399,29 @@ private
360
399
 
361
400
  case key
362
401
  when :at
363
- val = Time.now.strftime('%H:%M:%S')
364
- [val, val]
402
+ now = info.vals(host, :at).compact.max || Time.now
403
+ [now.strftime('%H:%M:%S'), now]
365
404
  when :used_cpu_user, :used_cpu_sys
366
405
  val = get_diff.call(key)
367
406
  val &&= (val * 100).round
368
407
  [humanize_number(val), val]
369
408
  when :keys
370
- val = Hash[ info.select { |k, v| k =~ /^db[0-9]+$/ } ].values.inject(0) { |sum, vs|
371
- sum + vs.map { |v| Hash[ v.split(',').map { |e| e.split '=' } ]['keys'].to_i }.inject(:+)
372
- }
409
+ val = info.f(host, proc { |hash|
410
+ Hash[ hash.select { |k, v| k =~ /^db[0-9]+$/ } ].values.inject(0) { |sum, vs|
411
+ sum + Hash[ vs.split(',').map { |e| e.split '=' } ]['keys'].to_i
412
+ }
413
+ })
373
414
  [humanize_number(val), val]
374
415
  when :evicted_keys_per_second, :expired_keys_per_second, :keyspace_hits_per_second,
375
416
  :keyspace_misses_per_second, :total_commands_processed_per_second
376
417
  val = get_diff.call(key.to_s.gsub(/_per_second$/, '').to_sym)
377
418
  [humanize_number(val), val]
378
419
  when :used_memory, :used_memory_rss, :aof_current_size, :aof_base_size
379
- val = info.sumf(key)
420
+ val = info.f(host, key)
380
421
  [humanize_number(val.to_i, true), val]
381
422
  when :keyspace_hit_ratio
382
- hits = info.sumf(:keyspace_hits)
383
- misses = info.sumf(:keyspace_misses)
423
+ hits = info.f(host, :keyspace_hits)
424
+ misses = info.f(host, :keyspace_misses)
384
425
  val = ratio(hits, misses)
385
426
  [humanize_number(val), val]
386
427
  when :keyspace_hit_ratio_per_second
@@ -389,8 +430,10 @@ private
389
430
  val = ratio(hits, misses)
390
431
  [humanize_number(val), val]
391
432
  else
392
- val = info.sumf(key)
393
- [humanize_number(val), val]
433
+ conv = TYPES.fetch key, :s
434
+ val = info.send(conv, host, key)
435
+ val.is_a?(String) ?
436
+ [val, val] : [humanize_number(val), val]
394
437
  end
395
438
  end
396
439
 
@@ -121,5 +121,48 @@ class RedisStat
121
121
  :pubsub_channels => 'psch',
122
122
  :pubsub_patterns => 'psp',
123
123
  }
124
+
125
+ TYPES = {
126
+ :at => :f,
127
+ :used_cpu_user => :f,
128
+ :used_cpu_sys => :f,
129
+ :connected_clients => :i,
130
+ :blocked_clients => :i,
131
+ :used_memory => :i,
132
+ :used_memory_rss => :i,
133
+ :mem_fragmentation_ratio => :f,
134
+ :total_commands_processed => :i,
135
+ :total_commands_processed_per_second => :f,
136
+ :expired_keys => :i,
137
+ :expired_keys_per_second => :f,
138
+ :evicted_keys => :i,
139
+ :evicted_keys_per_second => :f,
140
+ :keys => :i,
141
+ :keyspace_hits => :i,
142
+ :keyspace_hits_per_second => :f,
143
+ :keyspace_misses => :i,
144
+ :keyspace_misses_per_second => :f,
145
+ :keyspace_hit_ratio => :i,
146
+ :keyspace_hit_ratio_per_second => :f,
147
+ :aof_current_size => :i,
148
+ :aof_base_size => :i,
149
+ :changes_since_last_save => :i,
150
+ :rdb_changes_since_last_save => :i,
151
+ :pubsub_channels => :i,
152
+ :pubsub_patterns => :i,
153
+
154
+ :redis_version => :s,
155
+ :redis_mode => :s,
156
+ :process_id => :i,
157
+ :uptime_in_seconds => :i,
158
+ :uptime_in_days => :i,
159
+ :role => :s,
160
+ :connected_slaves => :i,
161
+ :aof_enabled => :i,
162
+ :rdb_bgsave_in_progress => :i,
163
+ :bgsave_in_progress => :i,
164
+ :rdb_last_save_time => :i,
165
+ :last_save_time => :i,
166
+ }
124
167
  end
125
168
 
@@ -6,16 +6,6 @@ class RedisStat
6
6
  class ElasticsearchSink
7
7
  attr_reader :hosts, :info, :index, :client
8
8
 
9
- TO_I = {
10
- :process_id => true,
11
- :uptime_in_seconds => true,
12
- :uptime_in_days => true,
13
- :connected_slaves => true,
14
- :aof_enabled => true,
15
- :rdb_bgsave_in_progress => true,
16
- :rdb_last_save_time => true,
17
- }
18
-
19
9
  DEFAULT_INDEX = 'redis-stat'
20
10
 
21
11
  def self.parse_url elasticsearch
@@ -38,15 +28,22 @@ class ElasticsearchSink
38
28
  end
39
29
 
40
30
  def output info
41
- convert_to_i(info).each do |host, entries|
42
- time = info[:at].to_i
31
+ @hosts.each do |host|
32
+ entries = Hash[info.map { |k, v|
33
+ if v.has_key?(host) && raw = v[host].last
34
+ [k, raw]
35
+ end
36
+ }.compact]
37
+ next if entries.empty?
38
+
39
+ time = entries[:at]
43
40
  entry = {
44
41
  :index => index,
45
42
  :type => "redis",
46
43
  :body => entries.merge({
47
44
  :@timestamp => format_time(time),
48
45
  :host => host,
49
- :at => time
46
+ :at => time.to_f
50
47
  }),
51
48
  }
52
49
 
@@ -57,27 +54,14 @@ class ElasticsearchSink
57
54
  private
58
55
  if RUBY_VERSION.start_with? '1.8.'
59
56
  def format_time time
60
- fmt = Time.at(time).strftime("%FT%T%z")
57
+ fmt = time.strftime("%FT%T%z")
61
58
  fmt[0..-3] + ':' + fmt[-2..-1]
62
59
  end
63
60
  else
64
61
  def format_time time
65
- Time.at(time).strftime("%FT%T%:z")
62
+ time.strftime("%FT%T%:z")
66
63
  end
67
64
  end
68
-
69
- def convert_to_i info
70
- Hash[info[:instances].map { |host, entries|
71
- output = {}
72
- entries.each do |name, value|
73
- convert = RedisStat::LABELS[name] || TO_I[name]
74
- if convert
75
- output[name] = value.to_i
76
- end
77
- end
78
- output.empty? ? nil : [host, output]
79
- }.compact]
80
- end
81
65
  end
82
66
  end
83
67
 
@@ -85,7 +85,7 @@ class Server < Sinatra::Base
85
85
 
86
86
  def push hosts, info, data, error
87
87
  static = Hash[settings.redis_stat.tab_measures.map { |stat|
88
- [stat, hosts.map { |h| info[:instances][h][stat] }]
88
+ [stat, hosts.map { |h| info[h][stat] }]
89
89
  }]
90
90
  data = {:at => (Time.now.to_f * 1000).to_i,
91
91
  :static => static,
@@ -102,7 +102,7 @@
102
102
  <tr id="<%= stat %>">
103
103
  <% @hosts.each do |host| %>
104
104
  <td>
105
- <%= @info[:instances][host][stat] %>
105
+ <%= @info[host][stat] %>
106
106
  </td>
107
107
  <% end %>
108
108
  </tr>
@@ -1,3 +1,3 @@
1
1
  class RedisStat
2
- VERSION = "0.4.3"
2
+ VERSION = "0.4.4"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: redis-stat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.3
4
+ version: 0.4.4
5
5
  platform: java
6
6
  authors:
7
7
  - Junegunn Choi