panoptimon 0.0.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +24 -0
- data/Gemfile +4 -0
- data/LICENSE +29 -0
- data/README.md +78 -0
- data/Rakefile +14 -0
- data/bin/panoptimon +118 -0
- data/collectors/cpu/cpu +39 -0
- data/collectors/cpu/cpu.json +1 -0
- data/collectors/disk/disk +37 -0
- data/collectors/disk/disk.json +1 -0
- data/collectors/disk/requires +1 -0
- data/collectors/disk_free/disk_free +15 -0
- data/collectors/disk_free/disk_free.json +1 -0
- data/collectors/dns/README.md +33 -0
- data/collectors/dns/dns +9 -0
- data/collectors/dns/dns.json +7 -0
- data/collectors/dns/lib/panoptimon-collector-dns/dns.rb +43 -0
- data/collectors/dns/lib/panoptimon-collector-dns.rb +2 -0
- data/collectors/dns/panoptimon-collector-dns.gemspec +4 -0
- data/collectors/files/README.md +32 -0
- data/collectors/files/files +129 -0
- data/collectors/files/files.json +14 -0
- data/collectors/files/spec/files_spec.rb +57 -0
- data/collectors/haproxy/README.md +40 -0
- data/collectors/haproxy/haproxy +16 -0
- data/collectors/haproxy/haproxy.json +3 -0
- data/collectors/haproxy/lib/panoptimon-collector-haproxy/haproxy.rb +149 -0
- data/collectors/haproxy/lib/panoptimon-collector-haproxy.rb +1 -0
- data/collectors/haproxy/notes.txt +13 -0
- data/collectors/haproxy/spec/haproxy_spec.rb +98 -0
- data/collectors/haproxy/spec/haproxy_spec.rb-get.html +22 -0
- data/collectors/haproxy/spec/haproxy_spec.rb-show_info.txt +21 -0
- data/collectors/haproxy/spec/haproxy_spec.rb-show_stat.csv +25 -0
- data/collectors/haproxy/spec/haproxy_spec.rb-show_stat2.csv +11 -0
- data/collectors/http/README.md +49 -0
- data/collectors/http/http +27 -0
- data/collectors/http/http.json +3 -0
- data/collectors/http/lib/panoptimon-collector-http/http.rb +74 -0
- data/collectors/http/lib/panoptimon-collector-http/version.rb +7 -0
- data/collectors/http/lib/panoptimon-collector-http.rb +2 -0
- data/collectors/interfaces/interfaces +33 -0
- data/collectors/interfaces/interfaces.json +1 -0
- data/collectors/iostat/iostat +53 -0
- data/collectors/iostat/iostat.json +1 -0
- data/collectors/json/README.md +27 -0
- data/collectors/json/json +37 -0
- data/collectors/json/json.json +1 -0
- data/collectors/load/load +15 -0
- data/collectors/load/load.json +1 -0
- data/collectors/memcached/memcached +55 -0
- data/collectors/memcached/memcached.json +7 -0
- data/collectors/memcached/test-notes.txt +3 -0
- data/collectors/memory/memory +33 -0
- data/collectors/memory/memory.json +1 -0
- data/collectors/mysql_status/mysql_status +52 -0
- data/collectors/mysql_status/mysql_status.json +4 -0
- data/collectors/network/network +67 -0
- data/collectors/network/network.json +18 -0
- data/collectors/nginx/README.md +32 -0
- data/collectors/nginx/lib/panoptimon-collector-nginx/nginx.rb +45 -0
- data/collectors/nginx/lib/panoptimon-collector-nginx.rb +2 -0
- data/collectors/nginx/nginx +11 -0
- data/collectors/nginx/nginx.json +3 -0
- data/collectors/nginx/panoptimon-collector-nginx.gemspec +4 -0
- data/collectors/ping/README.md +54 -0
- data/collectors/ping/ping +57 -0
- data/collectors/ping/ping.json +7 -0
- data/collectors/process/README.md +36 -0
- data/collectors/process/process +61 -0
- data/collectors/process/process.json +7 -0
- data/collectors/service/README.md +51 -0
- data/collectors/service/samples/.gitignore +1 -0
- data/collectors/service/samples/data/disconnect +11 -0
- data/collectors/service/samples/data/flappy +7 -0
- data/collectors/service/samples/data/solid +18 -0
- data/collectors/service/samples/replay +27 -0
- data/collectors/service/service +86 -0
- data/collectors/service/service.json +7 -0
- data/collectors/smtp/lib/panoptimon-collector-smtp/smtp.rb +30 -0
- data/collectors/smtp/lib/panoptimon-collector-smtp.rb +1 -0
- data/collectors/smtp/smtp +27 -0
- data/collectors/smtp/smtp.json +10 -0
- data/collectors/socket/README.md +36 -0
- data/collectors/socket/lib/panoptimon-collector-socket/socket.rb +38 -0
- data/collectors/socket/lib/panoptimon-collector-socket/tcp.rb +34 -0
- data/collectors/socket/lib/panoptimon-collector-socket/unix.rb +28 -0
- data/collectors/socket/lib/panoptimon-collector-socket.rb +3 -0
- data/collectors/socket/socket +13 -0
- data/collectors/socket/socket.json +16 -0
- data/collectors/socket/tests/tcp_spec.rb +21 -0
- data/collectors/socket/tests/unix_spec.rb +35 -0
- data/collectors/ssh/README.md +27 -0
- data/collectors/ssh/ssh +41 -0
- data/collectors/ssh/ssh.json +3 -0
- data/lib/panoptimon/collector.rb +135 -0
- data/lib/panoptimon/eventmonkeypatch/popen3.rb +40 -0
- data/lib/panoptimon/http.rb +63 -0
- data/lib/panoptimon/logger.rb +19 -0
- data/lib/panoptimon/monitor.rb +154 -0
- data/lib/panoptimon/util/string-with-as_number.rb +5 -0
- data/lib/panoptimon/util.rb +23 -0
- data/lib/panoptimon/version.rb +5 -0
- data/lib/panoptimon.rb +144 -0
- data/misc/collector_setup.rb +23 -0
- data/misc/monitor_setup.rb +25 -0
- data/misc/plugins_setup.rb +25 -0
- data/misc/riemann-cli.rb +33 -0
- data/panoptimon.gemspec +33 -0
- data/plugins/daemon_health/README.md +31 -0
- data/plugins/daemon_health/daemon_health.json +4 -0
- data/plugins/daemon_health/daemon_health.rb +34 -0
- data/plugins/daemon_health/lib/panoptimon-plugin-daemon_health/rollup.rb +64 -0
- data/plugins/daemon_health/panoptimon-plugin-daemon_health.gemspec +10 -0
- data/plugins/daemon_health/spec/moving_avg_spec.rb +24 -0
- data/plugins/email/README.md +30 -0
- data/plugins/email/email.json +3 -0
- data/plugins/email/email.rb +52 -0
- data/plugins/log_to_file/log_to_file.json +1 -0
- data/plugins/log_to_file/log_to_file.rb +8 -0
- data/plugins/log_to_logger/log_to_logger.json +3 -0
- data/plugins/log_to_logger/log_to_logger.rb +7 -0
- data/plugins/metrics_http/README.md +23 -0
- data/plugins/metrics_http/metrics_http.json +1 -0
- data/plugins/metrics_http/metrics_http.rb +17 -0
- data/plugins/riemann_stream/requires +1 -0
- data/plugins/riemann_stream/riemann_stream.json +3 -0
- data/plugins/riemann_stream/riemann_stream.rb +23 -0
- data/plugins/status_http/requires +1 -0
- data/plugins/status_http/status_http.json +1 -0
- data/plugins/status_http/status_http.rb +60 -0
- data/sample_configs/1/collectors/alls_well.json +6 -0
- data/sample_configs/1/collectors/clock/clock +12 -0
- data/sample_configs/1/collectors/clock.json +1 -0
- data/sample_configs/1/collectors/df/df.json +6 -0
- data/sample_configs/1/collectors/df/wrap_df +21 -0
- data/sample_configs/1/collectors/load.json +1 -0
- data/sample_configs/1/panoptimon.json +4 -0
- data/sample_configs/1/plugins/isup/isup.rb +3 -0
- data/sample_configs/1/plugins/isup.json +1 -0
- data/sample_configs/1/plugins/log_to_file.json +1 -0
- data/sample_configs/err_handler/collectors/fail.json +4 -0
- data/sample_configs/err_handler/collectors/noisy.json +5 -0
- data/sample_configs/err_handler/collectors/noisy_failure.json +5 -0
- data/sample_configs/err_handler/collectors/notfound.json +4 -0
- data/sample_configs/err_handler/panoptimon.json +3 -0
- data/sample_configs/err_handler/plugins/.exists +0 -0
- data/sample_configs/passthru/collectors/beep.json +5 -0
- data/sample_configs/passthru/collectors/cat/collect_this.json +1 -0
- data/sample_configs/passthru/collectors/cat.json +5 -0
- data/sample_configs/passthru/panoptimon.json +3 -0
- data/sample_configs/passthru/plugins/okcat/okcat.rb +17 -0
- data/sample_configs/passthru/plugins/okcat.json +1 -0
- data/sample_configs/plugin_error/collectors/beep.json +5 -0
- data/sample_configs/plugin_error/panoptimon.json +1 -0
- data/sample_configs/plugin_error/plugins/error_always/error_always.rb +1 -0
- data/sample_configs/plugin_error/plugins/error_always.json +1 -0
- data/sample_configs/slow/collectors/slowbeep.json +6 -0
- data/sample_configs/slow/panoptimon.json +1 -0
- data/sample_configs/slow/plugins/.exists +0 -0
- data/sample_configs/timeout_newline/collectors/slow_lf.json +8 -0
- data/sample_configs/timeout_newline/panoptimon.json +1 -0
- data/sample_configs/timeout_newline/plugins/.exists +0 -0
- data/sample_configs/timeout_not/collectors/slowly.json +7 -0
- data/sample_configs/timeout_not/panoptimon.json +1 -0
- data/sample_configs/timeout_not/plugins/.exists +0 -0
- data/spec/collector/config_spec.rb +30 -0
- data/spec/collector/initialize_spec.rb +24 -0
- data/spec/collector/metric_spec.rb +22 -0
- data/spec/passthru_spec.rb +13 -0
- data/spec/util_spec.rb +37 -0
- data/tools/link_and_enable +37 -0
- data/tools/metricify +8 -0
- metadata +319 -0
@@ -0,0 +1,57 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'rspec'
|
4
|
+
load File.expand_path('../../files', __FILE__)
|
5
|
+
require 'ostruct'
|
6
|
+
require 'tmpdir'
|
7
|
+
|
8
|
+
describe('filters') {
|
9
|
+
f = ->(props) { return OpenStruct.new(props) }
|
10
|
+
it('passes with one true property') {
|
11
|
+
filters(%w{directory}).call(f.call(directory?: true))
|
12
|
+
.should == true
|
13
|
+
}
|
14
|
+
it('fails with one false property') {
|
15
|
+
filters(%w{directory}).call(f.call(directory?: false))
|
16
|
+
.should == false
|
17
|
+
}
|
18
|
+
it('passes with 2 true properties') {
|
19
|
+
filters(%w{directory world_readable}).
|
20
|
+
call(f.call(directory?: true, world_readable?: true))
|
21
|
+
.should == true
|
22
|
+
}
|
23
|
+
it('passes with 3 true properties') {
|
24
|
+
filters(%w{directory world_readable world_writable}).
|
25
|
+
call(f.call(directory?: true, world_readable?: true,
|
26
|
+
world_writable?: true))
|
27
|
+
.should == true
|
28
|
+
}
|
29
|
+
it('fails with some false properties') {
|
30
|
+
filters(%w{directory world_readable world_writable}).
|
31
|
+
call(f.call(directory?: true, world_readable?: true,
|
32
|
+
world_writable?: false))
|
33
|
+
.should == false
|
34
|
+
}
|
35
|
+
it('works for real tmpdir (aka /tmp/)') {
|
36
|
+
filters(%w{directory world_readable world_writable}).
|
37
|
+
call(Pathname.new(Dir.tmpdir).stat)
|
38
|
+
.should == true
|
39
|
+
}
|
40
|
+
it('works for real /') {
|
41
|
+
filters(%w{directory world_readable}).
|
42
|
+
call(Pathname.new('/').stat)
|
43
|
+
.should == true
|
44
|
+
}
|
45
|
+
it('works for real /') {
|
46
|
+
filters(%w{file world_readable}).
|
47
|
+
call(Pathname.new('/').stat)
|
48
|
+
.should == false
|
49
|
+
}
|
50
|
+
it('works for real / (or so one would hope)') {
|
51
|
+
filters(%w{directory world_readable world_writable}).
|
52
|
+
call(Pathname.new('/').stat)
|
53
|
+
.should == false
|
54
|
+
}
|
55
|
+
|
56
|
+
}
|
57
|
+
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# Description
|
2
|
+
|
3
|
+
This collector provides a check on all defined resources within HAProxy.
|
4
|
+
|
5
|
+
# Configuration
|
6
|
+
|
7
|
+
The `stats_url` can point at either the http: or socket: protocol
|
8
|
+
(socket is assumed if the protocol is missing.) Note that socket usage
|
9
|
+
requires appropriate ownership/group permissions.
|
10
|
+
|
11
|
+
```json
|
12
|
+
{
|
13
|
+
"stats_url": "socket://var/run/haproxy/stats"
|
14
|
+
# or http://localhost:8080
|
15
|
+
}
|
16
|
+
```
|
17
|
+
|
18
|
+
# Output
|
19
|
+
|
20
|
+
```json
|
21
|
+
{
|
22
|
+
"uptime_sec" : ...,
|
23
|
+
"status|up" : 13,
|
24
|
+
"status|down" : 1,
|
25
|
+
"status|open" : 6,
|
26
|
+
"status|no_check" : 4,
|
27
|
+
"process_num" : 1,
|
28
|
+
"pid" : 3720,
|
29
|
+
"nbproc" : 3,
|
30
|
+
"run_queue" : 1,
|
31
|
+
"tasks" : 17,
|
32
|
+
"_info" : {
|
33
|
+
"version" : "1.4.22",
|
34
|
+
"status" : {
|
35
|
+
"FRONTEND" : {"x" : "open", "y" : "open", ...},
|
36
|
+
"BACKEND" : {"x" : "up", "y" : "down", ...
|
37
|
+
},
|
38
|
+
},
|
39
|
+
}
|
40
|
+
```
|
@@ -0,0 +1,16 @@
|
|
1
|
+
#! /usr/bin/env ruby
|
2
|
+
#
|
3
|
+
# == Panoptimon::Collector::HAProxy
|
4
|
+
#
|
5
|
+
# Collect HAProxy metrics.
|
6
|
+
|
7
|
+
$LOAD_PATH.unshift(File.expand_path(File.join(File.dirname(__FILE__), 'lib')))
|
8
|
+
|
9
|
+
require 'json'
|
10
|
+
require 'panoptimon-collector-haproxy'
|
11
|
+
|
12
|
+
raise "Configuration required" unless ARGV[0]
|
13
|
+
|
14
|
+
config = JSON.parse(ARGV[0], symbolize_names: true)
|
15
|
+
client = Panoptimon::Collector::HAProxy.new(config)
|
16
|
+
puts client.info.to_json
|
@@ -0,0 +1,149 @@
|
|
1
|
+
require 'panoptimon/util/string-with-as_number'
|
2
|
+
|
3
|
+
class Array; def to_h; Hash[self]; end; end
|
4
|
+
|
5
|
+
module Panoptimon
|
6
|
+
module Collector
|
7
|
+
class HAProxy
|
8
|
+
|
9
|
+
attr_reader :collector, :stats_url
|
10
|
+
|
11
|
+
def initialize(options={})
|
12
|
+
url = options[:stats_url] || '/var/run/haproxy.sock'
|
13
|
+
@stats_url = url.sub(%r{^socket:/}, '')
|
14
|
+
# slash after bare hostname:port
|
15
|
+
@stats_url += '/' if @stats_url =~ %r{^https?://[^/]+$}
|
16
|
+
@collector = @stats_url !~ %r{^\w+://} \
|
17
|
+
? :stats_from_sock
|
18
|
+
: :stats_from_http
|
19
|
+
end
|
20
|
+
|
21
|
+
def info
|
22
|
+
it = self.class.send(collector, stats_url)
|
23
|
+
out = {
|
24
|
+
uptime_sec: it[:info][:uptime_sec] ||
|
25
|
+
self.class._dhms_as_sec(it[:info][:uptime]),
|
26
|
+
status: it[:stats].values.reduce({}) {|c,v|
|
27
|
+
v.values.each {|h| s = h[:status].downcase.gsub(/ /, '_')
|
28
|
+
c[s] ||= 0; c[s] += 1 }
|
29
|
+
c
|
30
|
+
},
|
31
|
+
_info: {
|
32
|
+
status: [:FRONTEND, :BACKEND].map {|s|
|
33
|
+
[s, it[:stats][s].map {|n,v|
|
34
|
+
[n, v[:status].downcase]}.to_h]
|
35
|
+
}.to_h,
|
36
|
+
}.merge([:version].
|
37
|
+
map {|k| [k, it[:info][k]]}.to_h),
|
38
|
+
}.merge(
|
39
|
+
[:process_num, :pid, :nbproc, :run_queue, :tasks].
|
40
|
+
map {|k| [k, it[:info][k]]}.to_h)
|
41
|
+
end
|
42
|
+
|
43
|
+
def self._dhms_as_sec(dhms)
|
44
|
+
f = {'d' => 24*60**2, 'h' => 60**2, 'm' => 60, 's' => 1}
|
45
|
+
s = 0;
|
46
|
+
dhms.split(/(d|h|m|s) ?/).reverse.each_slice(2) {|p|
|
47
|
+
(k, v) = p
|
48
|
+
s += v.to_i * f[k]
|
49
|
+
}
|
50
|
+
return s
|
51
|
+
end
|
52
|
+
|
53
|
+
def self.stats_from_sock(path)
|
54
|
+
{
|
55
|
+
stats: _parse_stats_csv( _sock_get(path, 'show stat') ),
|
56
|
+
info: _parse_show_info( _sock_get(path, 'show info') )
|
57
|
+
}
|
58
|
+
end
|
59
|
+
|
60
|
+
def self.stats_from_http(uri)
|
61
|
+
# NOTE uri is expected to have trailing slash if needed
|
62
|
+
{
|
63
|
+
stats: _parse_stats_csv(
|
64
|
+
_http_get(uri + ';csv').split(/\n/) ),
|
65
|
+
info: _parse_html_info( _http_get(uri) )
|
66
|
+
}
|
67
|
+
end
|
68
|
+
|
69
|
+
def self._parse_html_info(body)
|
70
|
+
body =~ %r{General\sprocess\sinformation</[^>]+>
|
71
|
+
(.*?Running\stasks:\s\d+/\d+)<}xm or
|
72
|
+
raise "body: #{body} does not match expectations"
|
73
|
+
p = $1
|
74
|
+
info = {}
|
75
|
+
# TODO proper dishtml?
|
76
|
+
p.gsub!(%r{\s+}, ' ')
|
77
|
+
p.gsub!(%r{<br>}, "\n")
|
78
|
+
p.gsub!(%r{<[^>]+>}, '')
|
79
|
+
p.gsub!(%r{ +}, ' ')
|
80
|
+
{ # harvest some numbers
|
81
|
+
pid: %r{pid =\s+(\d+)},
|
82
|
+
process_num: %r{process #(\d+)},
|
83
|
+
nbproc: %r{nbproc = (\d+)},
|
84
|
+
uptime: %r{uptime = (\d+d \d+h\d+m\d+s)},
|
85
|
+
memmax_mb: %r{memmax = (unlimited|\d+)},
|
86
|
+
:'ulimit-n' => %r{ulimit-n = (\d+)},
|
87
|
+
maxsock: %r{maxsock = (\d+)},
|
88
|
+
maxconn: %r{maxconn = (\d+)},
|
89
|
+
maxpipes: %r{maxpipes = (\d+)},
|
90
|
+
currcons: %r{current conns = (\d+)},
|
91
|
+
pipesused: %r{current pipes = (\d+)/\d+},
|
92
|
+
pipesfree: %r{current pipes = \d+/(\d+)},
|
93
|
+
run_queue: %r{Running tasks: (\d+)/\d+},
|
94
|
+
tasks: %r{Running tasks: \d+/(\d+)},
|
95
|
+
}.each {|k,v|
|
96
|
+
got = p.match(v) or raise "no match for #{k} (#{v})"
|
97
|
+
info[k] = got[1].as_number || got[1]
|
98
|
+
}
|
99
|
+
info[:memmax_mb] = 0 if info[:memmax_mb] == 'unlimited'
|
100
|
+
|
101
|
+
vi = body.match(%r{<body>.*?>([^<]+)\ version\ (\d+\.\d+\.\d+),
|
102
|
+
\ released\ (\d{4}/\d{2}/\d{2})}x) or
|
103
|
+
raise "failed to find version info"
|
104
|
+
info.merge!( name: vi[1], version: vi[2], release_date: vi[3] )
|
105
|
+
return info
|
106
|
+
end
|
107
|
+
|
108
|
+
def self._http_get(uri)
|
109
|
+
require 'net/http'
|
110
|
+
uri = URI(uri)
|
111
|
+
res = ::Net::HTTP.start(uri.host, uri.port,
|
112
|
+
:use_ssl => uri.scheme == 'https'
|
113
|
+
).request(::Net::HTTP::Get.new(uri.request_uri))
|
114
|
+
raise "error: #{res.code} #{res.message}" unless
|
115
|
+
res.is_a?(::Net::HTTPSuccess)
|
116
|
+
return res.body
|
117
|
+
end
|
118
|
+
|
119
|
+
def self._parse_show_info(lines)
|
120
|
+
Hash[lines.map {|l|
|
121
|
+
(k,v) = * l.chomp.split(/:\s+/, 2);
|
122
|
+
k or next
|
123
|
+
[k.downcase.to_sym, v.as_number || v]}
|
124
|
+
]
|
125
|
+
end
|
126
|
+
|
127
|
+
def self._parse_stats_csv(lines)
|
128
|
+
head = lines.shift.chomp.sub(/^# /, '') or raise "no header row?"
|
129
|
+
hk = head.split(/,/).map {|k| k.to_sym}; hk.shift(2)
|
130
|
+
imax = hk.length - 1
|
131
|
+
h = Hash.new {|hash,key| hash[key] = {}}
|
132
|
+
lines.each {|l| f = l.chomp.split(/,/)
|
133
|
+
(n,s) = f.shift(2)
|
134
|
+
h[s.to_sym][n] = Hash[(0..imax).map {|i|
|
135
|
+
[hk[i], (f[i].nil? or f[i] == "") ? nil :
|
136
|
+
f[i].as_number || f[i]]}]
|
137
|
+
}
|
138
|
+
return h
|
139
|
+
end
|
140
|
+
|
141
|
+
def self._sock_get(path, cmd)
|
142
|
+
require "socket"
|
143
|
+
stat_socket = UNIXSocket.new(path)
|
144
|
+
stat_socket.puts(cmd)
|
145
|
+
stat_socket.readlines
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
149
|
+
end
|
@@ -0,0 +1 @@
|
|
1
|
+
require 'panoptimon-collector-haproxy/haproxy'
|
@@ -0,0 +1,13 @@
|
|
1
|
+
ruby -Icollectors/haproxy/lib -e 'require "json"; require "panoptimon-collector-haproxy";
|
2
|
+
puts Panoptimon::Collector::HAProxy._parse_stats_csv(
|
3
|
+
File.open("/tmp/haproxy-out2.csv").readlines).to_json'
|
4
|
+
|
5
|
+
ruby -Icollectors/haproxy/lib -e 'require "json"; require "panoptimon-collector-haproxy";
|
6
|
+
puts Panoptimon::Collector::HAProxy._parse_show_info(
|
7
|
+
File.open("/tmp/haproxy-show-info.txt").readlines).to_json'
|
8
|
+
|
9
|
+
ruby -Icollectors/haproxy/lib -e 'require "json"; require "panoptimon-collector-haproxy";
|
10
|
+
puts Panoptimon::Collector::HAProxy.stats_from_http("http://localhost:8099/")'
|
11
|
+
|
12
|
+
ruby -Icollectors/haproxy/lib -e 'require "json"; require "panoptimon-collector-haproxy";
|
13
|
+
puts Panoptimon::Collector::HAProxy._parse_html_info(File.open("/tmp/haproxy-out.html").readlines.join("")).to_json'
|
@@ -0,0 +1,98 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'rspec'
|
4
|
+
|
5
|
+
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
|
6
|
+
require 'panoptimon-collector-haproxy'
|
7
|
+
|
8
|
+
describe('socket usage') {
|
9
|
+
c = Panoptimon::Collector::HAProxy
|
10
|
+
it('automatically assumes a socket') {
|
11
|
+
c.new().tap {|me|
|
12
|
+
me.collector.should == :stats_from_sock
|
13
|
+
me.stats_url.should == '/var/run/haproxy.sock'
|
14
|
+
}
|
15
|
+
}
|
16
|
+
it('automatically assumes a socket (with argument)') {
|
17
|
+
c.new(stats_url: "/var/lib/haproxy.sock").tap {|me|
|
18
|
+
me.collector.should == :stats_from_sock
|
19
|
+
me.stats_url.should == '/var/lib/haproxy.sock'
|
20
|
+
}
|
21
|
+
}
|
22
|
+
it('takes explicit socket:// too') {
|
23
|
+
c.new(stats_url: "socket://var/lib/haproxy.sock").tap {|me|
|
24
|
+
me.collector.should == :stats_from_sock
|
25
|
+
me.stats_url.should == '/var/lib/haproxy.sock'
|
26
|
+
}
|
27
|
+
}
|
28
|
+
it('knows when you said otherwise') {
|
29
|
+
c.new(stats_url: "sprocket://var/lib/haproxy.sock").tap {|me|
|
30
|
+
me.collector.should_not == :stats_from_sock
|
31
|
+
}
|
32
|
+
}
|
33
|
+
|
34
|
+
it('connects and such') {
|
35
|
+
|
36
|
+
plan = [
|
37
|
+
['show stat', '-show_stat.csv'],
|
38
|
+
['show info', '-show_info.txt'],
|
39
|
+
]
|
40
|
+
c.stub(:_sock_get) {|path, cmd|
|
41
|
+
x = plan.shift
|
42
|
+
cmd.should == x[0]
|
43
|
+
File.open(File.expand_path(__FILE__ + x[1])).
|
44
|
+
readlines
|
45
|
+
}
|
46
|
+
# TODO refactor this to run through self.info or something
|
47
|
+
info = c.stats_from_sock('/fakely')
|
48
|
+
info[:stats][:BACKEND]['qrstuv'][:status].should == 'UP'
|
49
|
+
info[:info][:maxsock].should == 8018
|
50
|
+
info[:info][:maxsock].class.should == Fixnum
|
51
|
+
info[:info][:tasks].should == 6
|
52
|
+
|
53
|
+
}
|
54
|
+
}
|
55
|
+
|
56
|
+
describe('http usage') {
|
57
|
+
c = Panoptimon::Collector::HAProxy
|
58
|
+
it('recognizes url') {
|
59
|
+
c.new(stats_url: 'http://localhost:8080').tap {|me|
|
60
|
+
me.collector.should == :stats_from_http
|
61
|
+
me.stats_url.should == 'http://localhost:8080/'
|
62
|
+
}
|
63
|
+
}
|
64
|
+
|
65
|
+
it('recognizes https url') {
|
66
|
+
c.new(stats_url: 'https://localhost:8080').tap {|me|
|
67
|
+
me.collector.should == :stats_from_http
|
68
|
+
me.stats_url.should == 'https://localhost:8080/'
|
69
|
+
}
|
70
|
+
}
|
71
|
+
|
72
|
+
it('does not mangle path') {
|
73
|
+
c.new(stats_url: 'http://localhost:8080/bob').tap {|me|
|
74
|
+
me.collector.should == :stats_from_http
|
75
|
+
me.stats_url.should == 'http://localhost:8080/bob'
|
76
|
+
}
|
77
|
+
}
|
78
|
+
|
79
|
+
it('connects and such') {
|
80
|
+
plan = [
|
81
|
+
['http://localhost:8080/;csv', '-show_stat.csv'],
|
82
|
+
['http://localhost:8080/', '-get.html'],
|
83
|
+
]
|
84
|
+
c.stub(:_http_get) {|uri|
|
85
|
+
x = plan.shift
|
86
|
+
uri.should == x[0]
|
87
|
+
File.open(File.expand_path(__FILE__ + x[1])).
|
88
|
+
readlines.join('')
|
89
|
+
}
|
90
|
+
|
91
|
+
info = c.stats_from_http('http://localhost:8080/')
|
92
|
+
info[:stats][:BACKEND]['qrstuv'][:status].should == 'UP'
|
93
|
+
info[:info][:maxsock].should == 8018
|
94
|
+
info[:info][:maxsock].class.should == Fixnum
|
95
|
+
info[:info][:tasks].should == 17
|
96
|
+
}
|
97
|
+
|
98
|
+
}
|
@@ -0,0 +1,22 @@
|
|
1
|
+
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
|
2
|
+
"http://www.w3.org/TR/html4/loose.dtd">
|
3
|
+
<html><head><title>Statistics Report for HAProxy</title>
|
4
|
+
<meta http-equiv="content-type" content="text/html; charset=iso-8859-1">
|
5
|
+
<style type="text/css"><!-- --> </style></head>
|
6
|
+
<body><h1><a href="http://haproxy.1wt.eu/" style="text-decoration: none;">HAProxy version 1.4.18, released 2011/09/16</a></h1>
|
7
|
+
<h2>Statistics Report for pid 528</h2>
|
8
|
+
<hr width="100%" class="hr">
|
9
|
+
<h3>> General process information</h3>
|
10
|
+
<table border=0><tr><td align="left" nowrap width="1%">
|
11
|
+
<p><b>pid = </b> 528 (process #1, nbproc = 1)<br>
|
12
|
+
<b>uptime = </b> 36d 7h17m01s<br>
|
13
|
+
<b>system limits:</b> memmax = unlimited; ulimit-n = 8018<br>
|
14
|
+
<b>maxsock = </b> 8018; <b>maxconn = </b> 1024; <b>maxpipes = </b> 0<br>
|
15
|
+
current conns = 17; current pipes = 0/0<br>
|
16
|
+
Running tasks: 1/17<br>
|
17
|
+
</td>
|
18
|
+
</tr>
|
19
|
+
<!-- blah blah blah -->
|
20
|
+
</table>
|
21
|
+
</body>
|
22
|
+
</html>
|
@@ -0,0 +1,21 @@
|
|
1
|
+
Name: HAProxy
|
2
|
+
Version: 1.4.22
|
3
|
+
Release_date: 2012/08/09
|
4
|
+
Nbproc: 1
|
5
|
+
Process_num: 1
|
6
|
+
Pid: 1656
|
7
|
+
Uptime: 0d 9h31m50s
|
8
|
+
Uptime_sec: 34310
|
9
|
+
Memmax_MB: 0
|
10
|
+
Ulimit-n: 8018
|
11
|
+
Maxsock: 8018
|
12
|
+
Maxconn: 4000
|
13
|
+
Maxpipes: 0
|
14
|
+
CurrConns: 1
|
15
|
+
PipesUsed: 0
|
16
|
+
PipesFree: 0
|
17
|
+
Tasks: 6
|
18
|
+
Run_queue: 1
|
19
|
+
node: localhost.localdomain
|
20
|
+
description:
|
21
|
+
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,
|
2
|
+
stats,FRONTEND,,,1,2,1000,26,6696,444851,0,0,1,,,,,OPEN,,,,,,,,,1,1,0,,,,0,1,0,2,,,,0,24,0,1,0,0,,1,2,26,,,
|
3
|
+
stats,BACKEND,0,0,0,0,1000,0,6696,444851,0,0,,0,0,0,0,UP,0,0,0,,0,3136647,0,,1,1,0,,0,,1,0,,0,,,,0,0,0,0,0,0,,,,,0,0,
|
4
|
+
bert_qaas,FRONTEND,,,8,8,1000,467,1541583624,5705953051,0,0,0,,,,,OPEN,,,,,,,,,1,2,0,,,,0,0,0,5,,,,,,,,,,,0,0,0,,,
|
5
|
+
bert_qaas,xq1.in.example.com,0,0,4,5,1000,234,1120374095,2366260217,,0,,0,224,0,0,no check,1,1,0,,,,,,1,2,1,,234,,2,0,,3,,,,,,,,,,0,,,,6,224,
|
6
|
+
bert_qaas,xq3.in.example.com,0,0,4,6,1000,233,421209529,3339692834,,0,,0,222,0,0,no check,1,1,0,,,,,,1,2,2,,233,,2,0,,2,,,,,,,,,,0,,,,7,222,
|
7
|
+
bert_qaas,BACKEND,0,0,8,8,1000,467,1541583624,5705953051,0,0,,0,446,0,0,UP,2,2,0,,0,3136647,0,,1,2,0,,467,,1,0,,5,,,,,,,,,,,,,,13,446,
|
8
|
+
bert_knobs,FRONTEND,,,0,0,1000,0,0,0,0,0,0,,,,,OPEN,,,,,,,,,1,3,0,,,,0,0,0,0,,,,0,0,0,0,0,0,,0,0,0,,,
|
9
|
+
bert_knobs,xq1.in.example.com,0,0,0,0,1000,0,0,0,,0,,0,0,0,0,no check,1,1,0,,,,,,1,3,1,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,
|
10
|
+
bert_knobs,xq3.in.example.com,0,0,0,0,1000,0,0,0,,0,,0,0,0,0,no check,1,1,0,,,,,,1,3,2,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,
|
11
|
+
bert_knobs,BACKEND,0,0,0,0,1000,0,0,0,0,0,,0,0,0,0,UP,2,2,0,,0,3136647,0,,1,3,0,,0,,1,0,,0,,,,0,0,0,0,0,0,,,,,0,0,
|
12
|
+
echo_http,FRONTEND,,,0,0,1000,0,0,0,0,0,0,,,,,OPEN,,,,,,,,,1,4,0,,,,0,0,0,0,,,,0,0,0,0,0,0,,0,0,0,,,
|
13
|
+
echo_http,f1.in.example.com,0,0,0,0,1000,0,0,0,,0,,0,0,0,0,no check,1,1,0,,,,,,1,4,1,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,
|
14
|
+
echo_http,f2.in.example.com,0,0,0,0,1000,0,0,0,,0,,0,0,0,0,no check,1,1,0,,,,,,1,4,2,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,
|
15
|
+
echo_http,f3.in.example.com,0,0,0,0,1000,0,0,0,,0,,0,0,0,0,no check,1,1,0,,,,,,1,4,3,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,
|
16
|
+
echo_http,f4.in.example.com,0,0,0,0,1000,0,0,0,,0,,0,0,0,0,no check,1,1,0,,,,,,1,4,4,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,
|
17
|
+
echo_http,f5.in.example.com,0,0,0,0,1000,0,0,0,,0,,0,0,0,0,no check,1,1,0,,,,,,1,4,5,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,
|
18
|
+
echo_http,BACKEND,0,0,0,0,1000,0,0,0,0,0,,0,0,0,0,UP,5,5,0,,0,3136647,0,,1,4,0,,0,,1,0,,0,,,,0,0,0,0,0,0,,,,,0,0,
|
19
|
+
qrstuv,FRONTEND,,,8,51,1000,1333,5218374317,4978521250,0,0,0,,,,,OPEN,,,,,,,,,1,5,0,,,,0,0,0,33,,,,,,,,,,,0,0,0,,,
|
20
|
+
qrstuv,abcd.in.example.com,0,0,1,11,1000,263,762637239,729424629,,0,,0,0,0,0,no check,1,1,0,,,,,,1,5,1,,263,,2,0,,6,,,,,,,,,,0,,,,0,0,
|
21
|
+
qrstuv,abcd.in.example.com,0,0,2,11,1000,271,922439480,896042928,,0,,0,0,0,0,no check,1,1,0,,,,,,1,5,2,,271,,2,0,,7,,,,,,,,,,0,,,,0,0,
|
22
|
+
qrstuv,abcd.in.example.com,0,0,2,11,1000,272,1392243548,1312264244,,0,,0,0,0,0,no check,1,1,0,,,,,,1,5,3,,272,,2,0,,7,,,,,,,,,,0,,,,0,0,
|
23
|
+
qrstuv,abcd.in.example.com,0,0,2,11,1000,267,1401121383,1321460656,,0,,0,0,0,0,no check,1,1,0,,,,,,1,5,4,,267,,2,0,,7,,,,,,,,,,0,,,,0,0,
|
24
|
+
qrstuv,abcd.in.example.com,0,0,1,11,1000,260,739932667,719328793,,0,,0,0,0,0,no check,1,1,0,,,,,,1,5,5,,260,,2,0,,7,,,,,,,,,,0,,,,0,0,
|
25
|
+
qrstuv,BACKEND,0,0,8,51,1000,1333,5218374317,4978521250,0,0,,0,0,0,0,UP,5,5,0,,0,3136647,0,,1,5,0,,1333,,1,0,,33,,,,,,,,,,,,,,0,0,
|
@@ -0,0 +1,11 @@
|
|
1
|
+
# pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,
|
2
|
+
main,FRONTEND,,,0,0,3000,0,0,0,0,0,0,,,,,OPEN,,,,,,,,,1,1,0,,,,0,0,0,0,,,,0,0,0,0,0,0,,0,0,0,,,
|
3
|
+
static,static,0,0,0,0,,0,0,0,,0,,0,0,0,0,DOWN,1,1,0,0,1,32987,32987,,1,2,1,,0,,2,0,,0,L4CON,,0,0,0,0,0,0,0,0,,,,0,0,
|
4
|
+
static,BACKEND,0,0,0,0,0,0,0,0,0,0,,0,0,0,0,DOWN,0,0,0,,1,32987,32987,,1,2,0,,0,,1,0,,0,,,,0,0,0,0,0,0,,,,,0,0,
|
5
|
+
app,app1,0,0,0,0,,0,0,0,,0,,0,0,0,0,DOWN,1,1,0,0,1,32987,32987,,1,3,1,,0,,2,0,,0,L4CON,,0,0,0,0,0,0,0,0,,,,0,0,
|
6
|
+
app,app2,0,0,0,0,,0,0,0,,0,,0,0,0,0,DOWN,1,1,0,0,1,32986,32986,,1,3,2,,0,,2,0,,0,L4CON,,0,0,0,0,0,0,0,0,,,,0,0,
|
7
|
+
app,app3,0,0,0,0,,0,0,0,,0,,0,0,0,0,DOWN,1,1,0,0,1,32986,32986,,1,3,3,,0,,2,0,,0,L4CON,,0,0,0,0,0,0,0,0,,,,0,0,
|
8
|
+
app,app4,0,0,0,0,,0,0,0,,0,,0,0,0,0,DOWN,1,1,0,0,1,32986,32986,,1,3,4,,0,,2,0,,0,L4CON,,0,0,0,0,0,0,0,0,,,,0,0,
|
9
|
+
app,BACKEND,0,0,0,0,0,0,0,0,0,0,,0,0,0,0,DOWN,0,0,0,,1,32986,32986,,1,3,0,,0,,1,0,,0,,,,0,0,0,0,0,0,,,,,0,0,
|
10
|
+
stats,FRONTEND,,,1,2,3000,7,1694,62774,0,0,1,,,,,OPEN,,,,,,,,,1,4,0,,,,0,1,0,2,,,,0,5,0,1,0,0,,1,1,7,,,
|
11
|
+
stats,BACKEND,0,0,0,0,3000,0,1694,62774,0,0,,0,0,0,0,UP,0,0,0,,0,32987,0,,1,4,0,,0,,1,0,,0,,,,0,0,0,0,0,0,,,,,0,0,
|
@@ -0,0 +1,49 @@
|
|
1
|
+
# Config
|
2
|
+
|
3
|
+
The list of urls may include simple urls (http:// is assumed if the
|
4
|
+
scheme is omitted), or hashes for more specific behavior.
|
5
|
+
|
6
|
+
* 'name' - a shorthand name for the report rather than the full url
|
7
|
+
* 'method' - 'get' or 'head'
|
8
|
+
* 'match' - regexp match against the content (implies the 'get' method)
|
9
|
+
* 'timeout' - timeout
|
10
|
+
|
11
|
+
```json
|
12
|
+
{
|
13
|
+
"urls" : [
|
14
|
+
"localhost",
|
15
|
+
"example.com",
|
16
|
+
"https://github.com",
|
17
|
+
{"name" : "example", "url": "https://ssl.example.com:6983/whatever",
|
18
|
+
"match" : "<title>What", "timeout": 7},
|
19
|
+
],
|
20
|
+
"default_method" : "head",
|
21
|
+
"default_timeout" : 3,
|
22
|
+
|
23
|
+
"interval" : 60,
|
24
|
+
"timeout" : 25,
|
25
|
+
}
|
26
|
+
```
|
27
|
+
|
28
|
+
# Output
|
29
|
+
|
30
|
+
Output will typically contain the following, depending on the status and
|
31
|
+
request. If the request timed-out, the 'timeout' metric will be true.
|
32
|
+
|
33
|
+
```json
|
34
|
+
{
|
35
|
+
"http|localhost|code" => 200,
|
36
|
+
"http|localhost|elapsed" => 0.02, # seconds
|
37
|
+
"http|localhost|content_length" => 97, # bytes
|
38
|
+
...
|
39
|
+
"http|github.com|ssl|expires_in" => 604809, # seconds
|
40
|
+
"http|github.com|ssl|_info" => {
|
41
|
+
"issuer" : "Digicert Inc",
|
42
|
+
"serial" : "...",
|
43
|
+
"valid" : "2010-06-19 00:00:00 UTC",
|
44
|
+
"expires" : "2011-06-19 00:00:00 UTC"
|
45
|
+
}
|
46
|
+
...
|
47
|
+
"http|example|ok" => true, # only if there is a match spec'd
|
48
|
+
}
|
49
|
+
```
|
@@ -0,0 +1,27 @@
|
|
1
|
+
#! /usr/bin/env ruby
|
2
|
+
$LOAD_PATH.unshift File.expand_path(File.join(File.dirname(__FILE__), 'lib'))
|
3
|
+
|
4
|
+
require 'json'
|
5
|
+
require 'panoptimon-collector-http'
|
6
|
+
|
7
|
+
ARGV[0] or raise "arguments required"
|
8
|
+
conf = JSON.parse(ARGV[0], {symbolize_names: true})
|
9
|
+
|
10
|
+
defaults = {
|
11
|
+
method: conf[:default_method] || 'head',
|
12
|
+
timeout: conf[:default_timeout] || 3,
|
13
|
+
}
|
14
|
+
|
15
|
+
raise "must have 'urls' value in config" unless conf[:urls]
|
16
|
+
setup = conf[:urls].map {|u|
|
17
|
+
o = defaults.merge(u.is_a?(Hash) ? u : {url: u})
|
18
|
+
o[:url] = 'http://' . o[:url] unless o[:url].match(%r{^\w+://})
|
19
|
+
o[:name] ||= o[:url].to_s
|
20
|
+
o
|
21
|
+
}
|
22
|
+
|
23
|
+
output = Hash[setup.map {|o|
|
24
|
+
[o[:name], Panoptimon::Collector::HTTP.new(o[:url], o).go]
|
25
|
+
}]
|
26
|
+
puts JSON.generate(output)
|
27
|
+
|
@@ -0,0 +1,74 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'socket'
|
3
|
+
require 'openssl'
|
4
|
+
require 'net/http'
|
5
|
+
|
6
|
+
module Panoptimon
|
7
|
+
module Collector
|
8
|
+
class HTTP
|
9
|
+
|
10
|
+
attr_reader :uri, :use_ssl, :timeout, :match, :method
|
11
|
+
def initialize(uri, opt = {})
|
12
|
+
@uri = URI(uri)
|
13
|
+
@use_ssl = @uri.scheme == 'https'
|
14
|
+
@timeout = opt[:timeout]
|
15
|
+
@match = opt[:match] ? %r{#{opt[:match]}} : nil
|
16
|
+
@method = opt[:method].downcase.to_sym
|
17
|
+
@method = :get if @method == :head and @match
|
18
|
+
end
|
19
|
+
|
20
|
+
def connect
|
21
|
+
@connect ||= ::Net::HTTP.start(uri.host, uri.port,
|
22
|
+
:use_ssl => use_ssl,
|
23
|
+
:open_timeout => timeout,
|
24
|
+
)
|
25
|
+
end
|
26
|
+
|
27
|
+
def request
|
28
|
+
crass = {
|
29
|
+
head: ::Net::HTTP::Head,
|
30
|
+
get: ::Net::HTTP::Get,
|
31
|
+
}[method]
|
32
|
+
raise "method #{method} not implemented" unless crass
|
33
|
+
crass.new(uri.request_uri)
|
34
|
+
end
|
35
|
+
|
36
|
+
def certificate_info (cert, now=Time.now)
|
37
|
+
return {
|
38
|
+
expires_in: (cert.not_after - now).round(0),
|
39
|
+
_info: {
|
40
|
+
issuer: cert.issuer.to_s.match(%r{/O=([^/]+)})[1],
|
41
|
+
valid: cert.not_before.to_s,
|
42
|
+
expires: cert.not_after.to_s,
|
43
|
+
serial: sprintf('%032x', cert.serial).to_s.gsub(/(..)(?!$)/, '\1:'),
|
44
|
+
}
|
45
|
+
}
|
46
|
+
end
|
47
|
+
|
48
|
+
def go
|
49
|
+
start = Time.now
|
50
|
+
response = begin; connect.request(request)
|
51
|
+
rescue Timeout::Error; nil ; end
|
52
|
+
|
53
|
+
ans = {
|
54
|
+
elapsed: (Time.now - start).round(6),
|
55
|
+
}
|
56
|
+
|
57
|
+
return ans.merge({timeout: true}) unless response
|
58
|
+
|
59
|
+
ans.merge!({
|
60
|
+
content_length: response.header.content_length,
|
61
|
+
code: response.code,
|
62
|
+
})
|
63
|
+
ans[:ssl] = certificate_info(connect.peer_cert) if use_ssl
|
64
|
+
|
65
|
+
ans[:ok] = response.body.match(match) ? true : false if match
|
66
|
+
|
67
|
+
(ans[:_info] ||= {})[:redirect] = response.header['Location'] \
|
68
|
+
if ans[:code].to_s.match(/^3/)
|
69
|
+
|
70
|
+
return ans
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'json'
|
4
|
+
require 'panoptimon/util'
|
5
|
+
Panoptimon::Util.os(linux: true)
|
6
|
+
|
7
|
+
head = [
|
8
|
+
'rx bytes',
|
9
|
+
'rx packets',
|
10
|
+
'rx errs',
|
11
|
+
'rx drop',
|
12
|
+
'rx fifo',
|
13
|
+
'rx frame',
|
14
|
+
'rx compressed',
|
15
|
+
'rx multicast',
|
16
|
+
'tx bytes',
|
17
|
+
'tx packets',
|
18
|
+
'tx drops',
|
19
|
+
'tx fifo',
|
20
|
+
'tx colls',
|
21
|
+
'tx carrier',
|
22
|
+
'tx compressed'
|
23
|
+
]
|
24
|
+
|
25
|
+
class Array; def to_h ; Hash[*self.flatten]; end; end
|
26
|
+
|
27
|
+
state = File.read('/proc/net/dev').
|
28
|
+
split("\n").drop(2).map {|l|
|
29
|
+
(iface, row) = *l.sub(/^\s+/, '').split(/:\s+/, 2)
|
30
|
+
[iface, head.zip(row.split(/\s+/)).to_h]
|
31
|
+
}.to_h
|
32
|
+
|
33
|
+
puts JSON::generate(state)
|
@@ -0,0 +1 @@
|
|
1
|
+
{}
|