ffmapquery 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/bin/ffmapquery +464 -0
- metadata +45 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA1:
|
|
3
|
+
metadata.gz: 01bf94c1a027b56080cbfa6c55216bea3e024ed6
|
|
4
|
+
data.tar.gz: 9899344436fcaba7e256d5a175c680e7603b0752
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 6d29bdc1aa22af632ae0f5d738aaf20fcef5c036088cc578a3d33fba27842eee7e44f4904537d8cc288677df0247206bf60a9b2ae73a9e71ccb0683521befd4a
|
|
7
|
+
data.tar.gz: 542fd523c2bfc1c939964dc64fb959b18fadc43e000bd158b77e80ecd8abaa73377bdc13ec8320438018127d748a170aefb8c4f0ce3b161d94d9076d898425e4
|
data/bin/ffmapquery
ADDED
|
@@ -0,0 +1,464 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
|
|
3
|
+
# configuration hash
|
|
4
|
+
$C = Hash.new
|
|
5
|
+
|
|
6
|
+
require 'json/pure'
|
|
7
|
+
require 'timeout'
|
|
8
|
+
require 'open3'
|
|
9
|
+
require 'fileutils'
|
|
10
|
+
require 'socket'
|
|
11
|
+
require 'uri'
|
|
12
|
+
require 'net/http'
|
|
13
|
+
|
|
14
|
+
$stdout.sync=true
|
|
15
|
+
$stderr.sync=true
|
|
16
|
+
|
|
17
|
+
class Links
|
|
18
|
+
def initialize(data)
|
|
19
|
+
@data = data
|
|
20
|
+
@link_list = Hash.new
|
|
21
|
+
graph_json = JSON.parse(data)
|
|
22
|
+
graph_nodes = Hash.new
|
|
23
|
+
# Erstmal die Namen auslesen und mit ID abspeichern
|
|
24
|
+
graph_json['batadv']['nodes'].each { | n |
|
|
25
|
+
id = graph_json['batadv']['nodes'].index(n)
|
|
26
|
+
graph_nodes[n['node_id']] = id
|
|
27
|
+
graph_nodes[id] = n['node_id']
|
|
28
|
+
}
|
|
29
|
+
@links = Hash.new
|
|
30
|
+
@links.default = 0
|
|
31
|
+
# Und jetzt die Links zählen...
|
|
32
|
+
graph_json['batadv']['links'].each { | l |
|
|
33
|
+
# Namen von source und target auflösen
|
|
34
|
+
source_id = l['source']
|
|
35
|
+
target_id = l['target']
|
|
36
|
+
source_node = graph_nodes[source_id].to_s
|
|
37
|
+
target_node = graph_nodes[target_id].to_s
|
|
38
|
+
begin
|
|
39
|
+
sl = @link_list[source_node]
|
|
40
|
+
sl = Array.new unless sl
|
|
41
|
+
sl << [target_node, 1/l['tq'], (l['vpn'] ? :vpn : :mesh)]
|
|
42
|
+
@link_list[source_node] = sl
|
|
43
|
+
if l['bidirect']
|
|
44
|
+
tl = @link_list[target_node]
|
|
45
|
+
tl = Array.new unless tl
|
|
46
|
+
tl << [source_node, 1/l['tq'], (l['vpn'] ? :vpn : :mesh)]
|
|
47
|
+
@link_list[target_node] = tl
|
|
48
|
+
end
|
|
49
|
+
rescue
|
|
50
|
+
#ignored
|
|
51
|
+
end
|
|
52
|
+
if l['vpn']
|
|
53
|
+
@links[source_node + '_vpn'] += 1
|
|
54
|
+
@links[target_node + '_vpn'] += 1
|
|
55
|
+
else
|
|
56
|
+
@links[source_node + '_mesh'] += 1
|
|
57
|
+
@links[target_node + '_mesh'] += 1
|
|
58
|
+
end
|
|
59
|
+
}
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
# @param [String] hostname
|
|
63
|
+
# @return [Fixnum, Fixnum] Mesh, VPN
|
|
64
|
+
def links(hostname)
|
|
65
|
+
hostname = hostname.to_s
|
|
66
|
+
[@links[hostname + '_mesh'], @links[hostname + '_vpn']]
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def link_list(hostname)
|
|
70
|
+
@link_list[hostname]
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def hash_flatten(hash, sub = '')
|
|
75
|
+
new_hash = Hash.new
|
|
76
|
+
sub = sub + '.' if sub.length > 0
|
|
77
|
+
case hash
|
|
78
|
+
when Array
|
|
79
|
+
hash.each_index { |key|
|
|
80
|
+
data = hash[key]
|
|
81
|
+
key = sub + key.to_s
|
|
82
|
+
case data
|
|
83
|
+
when Hash
|
|
84
|
+
new_hash.merge!(hash_flatten(data, key))
|
|
85
|
+
when Array
|
|
86
|
+
new_hash.merge!(hash_flatten(data, key))
|
|
87
|
+
else
|
|
88
|
+
new_hash[key] = data
|
|
89
|
+
end
|
|
90
|
+
}
|
|
91
|
+
when Hash
|
|
92
|
+
hash.each_pair { |key, data|
|
|
93
|
+
key = sub + key.to_s
|
|
94
|
+
case data
|
|
95
|
+
when Hash
|
|
96
|
+
new_hash.merge!(hash_flatten(data, key))
|
|
97
|
+
when Array
|
|
98
|
+
new_hash.merge!(hash_flatten(data, key))
|
|
99
|
+
else
|
|
100
|
+
new_hash[key] = data
|
|
101
|
+
end
|
|
102
|
+
}
|
|
103
|
+
else
|
|
104
|
+
raise Exception.new
|
|
105
|
+
end
|
|
106
|
+
new_hash
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
class Alfred
|
|
110
|
+
attr_reader :hash
|
|
111
|
+
|
|
112
|
+
def initialize(data)
|
|
113
|
+
json_hash = JSON.parse(data)
|
|
114
|
+
@hash = hash_flatten(json_hash)
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
$mapping = {
|
|
119
|
+
:online => 'stats_online',
|
|
120
|
+
:uptime => 'stats_uptime',
|
|
121
|
+
:loadavg => 'usage_loadavg',
|
|
122
|
+
:memory_usage => 'usage_memory',
|
|
123
|
+
:rootfs_usage => 'usage_rootfs',
|
|
124
|
+
:clients => 'connected_clients',
|
|
125
|
+
:mesh => 'connected_mesh',
|
|
126
|
+
:vpn => 'connected_vpn',
|
|
127
|
+
:traffic_rx_bytes => 'traffic_fastd_rx_bytes',
|
|
128
|
+
:traffic_rx_packets => 'traffic_fastd_rx_packets',
|
|
129
|
+
:traffic_tx_bytes => 'traffic_fastd_tx_bytes',
|
|
130
|
+
:traffic_tx_packets => 'traffic_fastd_tx_packets',
|
|
131
|
+
:traffic_bytes => 'traffic_batman_bytes',
|
|
132
|
+
:traffic_batman_rx_bytes => 'traffic_batman_rx_bytes',
|
|
133
|
+
:traffic_batman_forward_bytes => 'traffic_batman_forward_bytes',
|
|
134
|
+
:traffic_batman_tx_bytes => 'traffic_batman_tx_bytes',
|
|
135
|
+
:traffic_batman_mgmtrx_bytes => 'traffic_batman_mgmtrx_bytes',
|
|
136
|
+
:traffic_batman_mgmttx_bytes => 'traffic_batman_mgmttx_bytes',
|
|
137
|
+
:traffic_packets => 'traffic_batman_packets'
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
class DataNode
|
|
141
|
+
attr_reader :hostname, :autoupdater, :firmware, :hardware, :sitecode
|
|
142
|
+
|
|
143
|
+
# @param [Hash] data
|
|
144
|
+
# @param [Links] links
|
|
145
|
+
def initialize(data, links, alfred, timestamp)
|
|
146
|
+
@data = Hash.new
|
|
147
|
+
@alfred = alfred
|
|
148
|
+
# Der Hostname ist Teil eines Dateisystempfads und darf daher nur bestimmte Chars enthalten.
|
|
149
|
+
@hostname = remove_unsafe_chars(data['nodeinfo']['hostname'])
|
|
150
|
+
@groupname = nil
|
|
151
|
+
begin
|
|
152
|
+
@autoupdater = remove_unsafe_chars(data['nodeinfo']['software']['autoupdater']['branch'])
|
|
153
|
+
rescue
|
|
154
|
+
@autoupdater = 'Unbekannt'
|
|
155
|
+
end
|
|
156
|
+
begin
|
|
157
|
+
@firmware = remove_unsafe_chars(data['nodeinfo']['software']['firmware']['release'])
|
|
158
|
+
rescue
|
|
159
|
+
@firmware = 'Unbekannt'
|
|
160
|
+
end
|
|
161
|
+
begin
|
|
162
|
+
@hardware = remove_unsafe_chars(data['nodeinfo']['hardware']['model'])
|
|
163
|
+
@hardware = 'Unbekannt' if @hardware.length < 1
|
|
164
|
+
rescue
|
|
165
|
+
@hardware = 'Unbekannt'
|
|
166
|
+
end
|
|
167
|
+
begin
|
|
168
|
+
@sitecode = remove_unsafe_chars(data['nodeinfo']['system']['site_code'])
|
|
169
|
+
@sitecode = 'default' if @sitecode.length < 1
|
|
170
|
+
rescue
|
|
171
|
+
@sitecode = 'default'
|
|
172
|
+
end
|
|
173
|
+
@node_id = data['nodeinfo']['node_id']
|
|
174
|
+
data['flags']['online'] ? @data[:online] = 1 : @data[:online] = 0
|
|
175
|
+
@data[:uptime] = data['statistics']['uptime'].to_i
|
|
176
|
+
@data[:loadavg] = data['statistics']['loadavg'].to_f
|
|
177
|
+
@data[:memory_usage] = data['statistics']['memory_usage'].to_f
|
|
178
|
+
@data[:rootfs_usage] = data['statistics']['rootfs_usage'].to_f
|
|
179
|
+
@data[:clients] = data['statistics']['clients'].to_f
|
|
180
|
+
@links = links.link_list(data['nodeinfo']['node_id'])
|
|
181
|
+
@data[:mesh], @data[:vpn] = links.links(@node_id)
|
|
182
|
+
begin
|
|
183
|
+
@mac = data['nodeinfo']['network']['mac']
|
|
184
|
+
@data[:traffic_rx_bytes] = alfred[@mac.to_s + '.traffic_fastd.rx_bytes'] if alfred[@mac.to_s + '.traffic_fastd.rx_bytes']
|
|
185
|
+
@data[:traffic_rx_packets] = alfred[@mac.to_s + '.traffic_fastd.rx_packets'] if alfred[@mac.to_s + '.traffic_fastd.rx_packets']
|
|
186
|
+
@data[:traffic_tx_bytes] = alfred[@mac.to_s + '.traffic_fastd.tx_bytes'] if alfred[@mac.to_s + '.traffic_fastd.tx_bytes']
|
|
187
|
+
@data[:traffic_tx_packets] = alfred[@mac.to_s + '.traffic_fastd.tx_packets'] if alfred[@mac.to_s + '.traffic_fastd.tx_packets']
|
|
188
|
+
|
|
189
|
+
@data[:traffic_batman_rx_bytes] = data['statistics']['traffic']['rx']['bytes'].to_i
|
|
190
|
+
@data[:traffic_batman_forward_bytes] = data['statistics']['traffic']['forward']['bytes'].to_i
|
|
191
|
+
@data[:traffic_batman_tx_bytes] = data['statistics']['traffic']['tx']['bytes'].to_i
|
|
192
|
+
@data[:traffic_batman_mgmttx_bytes] = data['statistics']['traffic']['mgmt_tx']['bytes'].to_i
|
|
193
|
+
@data[:traffic_batman_mgmtrx_bytes] = data['statistics']['traffic']['mgmt_rx']['bytes'].to_i
|
|
194
|
+
rescue
|
|
195
|
+
# ignored
|
|
196
|
+
end
|
|
197
|
+
@timestamp = timestamp
|
|
198
|
+
end
|
|
199
|
+
def setGroup(groupname)
|
|
200
|
+
@groupname = groupname.to_s
|
|
201
|
+
end
|
|
202
|
+
def get_influx_lines(socket = nil)
|
|
203
|
+
out = Array.new
|
|
204
|
+
t = @timestamp.to_i.to_s
|
|
205
|
+
measurement = "nodes"
|
|
206
|
+
tags = "region=" + $C[:region]
|
|
207
|
+
tags << ",domain=" + @sitecode
|
|
208
|
+
tags << ",group=" + @groupname if @groupname
|
|
209
|
+
tags << ",nodeid=" + @node_id
|
|
210
|
+
tags << ",node=" + @hostname
|
|
211
|
+
tags << ",autoupdater=" + @autoupdater
|
|
212
|
+
tags << ",firmware=" + @firmware
|
|
213
|
+
tags << ",hardware=" + @hardware
|
|
214
|
+
fields = ""
|
|
215
|
+
comma = ""
|
|
216
|
+
@data.each_pair { | key, dp |
|
|
217
|
+
fields << comma + $mapping[key] + "=" + dp.to_s
|
|
218
|
+
comma = ","
|
|
219
|
+
}
|
|
220
|
+
line = "#{measurement},#{tags} #{fields} #{t}"
|
|
221
|
+
out << line
|
|
222
|
+
@links.each { | l |
|
|
223
|
+
if $node_map[l.first]
|
|
224
|
+
line = "links"
|
|
225
|
+
line << ",group=#{@groupname}" if @groupname
|
|
226
|
+
line << ",region=#{$C[:region]},domain=#{@sitecode},linktype=#{l[2]},sourcenodeid=#{@node_id},sourcenode=#{@hostname},targetnodeid=#{l.first},targetnode=#{$node_map[l.first]},autoupdater=#{@autoupdater},firmware=#{@firmware},hardware=#{@hardware} tq=#{l[1].to_s} #{t}"
|
|
227
|
+
out << line
|
|
228
|
+
end
|
|
229
|
+
} if @links
|
|
230
|
+
out
|
|
231
|
+
end
|
|
232
|
+
end
|
|
233
|
+
|
|
234
|
+
class DataGroup
|
|
235
|
+
# @param [Regexp] filter
|
|
236
|
+
# @param [Fixnum] min_size
|
|
237
|
+
def initialize(name, filter, min_size = 5)
|
|
238
|
+
@name = name
|
|
239
|
+
@data_points = Hash.new
|
|
240
|
+
@filter = Regexp.new(filter)
|
|
241
|
+
@min_size = min_size
|
|
242
|
+
end
|
|
243
|
+
|
|
244
|
+
# @param [DataNode] node
|
|
245
|
+
# @return [TrueClass|FalseClass]
|
|
246
|
+
def add_member(node)
|
|
247
|
+
raise Exception unless node.class == DataNode
|
|
248
|
+
node_name = node.hostname.to_s
|
|
249
|
+
node.setGroup(@name) if node_name.match(@filter)
|
|
250
|
+
end
|
|
251
|
+
end
|
|
252
|
+
|
|
253
|
+
class NodesJson
|
|
254
|
+
attr_reader :last_update
|
|
255
|
+
# @param [Hash] data
|
|
256
|
+
# @param [Links] links
|
|
257
|
+
def initialize(data, links, alfred)
|
|
258
|
+
groups = Hash.new
|
|
259
|
+
begin
|
|
260
|
+
# Gruppen einlesen
|
|
261
|
+
Dir.entries($C[:groups_dir]).each { | group_name |
|
|
262
|
+
# bestimmte Einträge ignorieren
|
|
263
|
+
next if group_name[0] == '.'
|
|
264
|
+
next if group_name == 'README.md'
|
|
265
|
+
next if group_name == 'README.txt'
|
|
266
|
+
group_lines = File.readlines($C[:groups_dir] + '/' + group_name)
|
|
267
|
+
groups[remove_unsafe_chars(group_name)] = DataGroup.new(group_name, group_lines.first.chomp)
|
|
268
|
+
} if $C[:groups_dir]
|
|
269
|
+
rescue
|
|
270
|
+
# ignored
|
|
271
|
+
# there will simply be no groups or no groups after the failing group
|
|
272
|
+
end
|
|
273
|
+
@data = JSON.parse(data)
|
|
274
|
+
last_update = @data['timestamp']
|
|
275
|
+
@last_update = Time.new(last_update[0..3], last_update[5..6], last_update[8..9], last_update[11..12],
|
|
276
|
+
last_update[14..15], last_update[17..18], 0)
|
|
277
|
+
@last_update.localtime
|
|
278
|
+
puts "Last update: #{@last_update.to_s} (#{(Time.now - @last_update).to_i}s ago)"
|
|
279
|
+
if($C[:interval_forced])
|
|
280
|
+
$next_update = @last_update + $C[:interval]
|
|
281
|
+
else
|
|
282
|
+
$next_update = @last_update + $C[:interval] + 10 + ($C[:interval] * Random.rand(0.3))
|
|
283
|
+
end
|
|
284
|
+
$next_update.localtime
|
|
285
|
+
@nodes = Hash.new
|
|
286
|
+
$known_nodes = Array.new
|
|
287
|
+
$node_map = Hash.new
|
|
288
|
+
@data['nodes'].each_pair { | node_id, node |
|
|
289
|
+
begin
|
|
290
|
+
$node_map[node_id] = remove_unsafe_chars(node['nodeinfo']['hostname'])
|
|
291
|
+
rescue
|
|
292
|
+
# ignored
|
|
293
|
+
end
|
|
294
|
+
}
|
|
295
|
+
post_body = Array.new
|
|
296
|
+
@data['nodes'].each_pair { | node_id, node |
|
|
297
|
+
dn = DataNode.new(node, links, alfred, @last_update)
|
|
298
|
+
@nodes[node_id] = dn
|
|
299
|
+
groups.each_value { | group |
|
|
300
|
+
# Just try to add to every group. The group will reject a node if
|
|
301
|
+
# the filter does not match.
|
|
302
|
+
group.add_member(dn)
|
|
303
|
+
}
|
|
304
|
+
post_body << dn.get_influx_lines
|
|
305
|
+
}
|
|
306
|
+
$stdout.puts(post_body.join("\n")) if $C[:debug]
|
|
307
|
+
if (!$C[:dry_run])
|
|
308
|
+
# Start the HTTP request...
|
|
309
|
+
uri = URI.parse($C[:influx_url])
|
|
310
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
311
|
+
post = Net::HTTP::Post.new(uri.request_uri)
|
|
312
|
+
post.body = post_body.join("\n")
|
|
313
|
+
http.start
|
|
314
|
+
response = http.request(post)
|
|
315
|
+
http.finish
|
|
316
|
+
$stdout.puts("Sent #{post.body.length} bytes in #{post_body.length} lines, response #{response.code} #{response.msg}")
|
|
317
|
+
end
|
|
318
|
+
end
|
|
319
|
+
end
|
|
320
|
+
|
|
321
|
+
# Sollte alles entfernen, was potentiell gefährlich im Dateinamen ist...
|
|
322
|
+
# @param [String] text
|
|
323
|
+
# @param [String] r
|
|
324
|
+
# @return [String]
|
|
325
|
+
def remove_unsafe_chars(text, r = '')
|
|
326
|
+
$remove_unsafe_chars_cache = Hash.new unless $remove_unsafe_chars_cache
|
|
327
|
+
text = text.to_s
|
|
328
|
+
orig = String.new(text)
|
|
329
|
+
cache = $remove_unsafe_chars_cache[text]
|
|
330
|
+
return cache if cache
|
|
331
|
+
unsafe_chars = /[^-_0-9a-zA-Z]/
|
|
332
|
+
text[unsafe_chars] = r while text[unsafe_chars]
|
|
333
|
+
String.new($remove_unsafe_chars_cache[orig] = text)
|
|
334
|
+
end
|
|
335
|
+
|
|
336
|
+
# @param [String] config_file
|
|
337
|
+
# @return [nil]
|
|
338
|
+
def read_config(config_file)
|
|
339
|
+
conf = File.readlines(config_file)
|
|
340
|
+
comment = /^( |\t)*#/
|
|
341
|
+
options = 'alfred_json|graph_json|nodes_json|interval|interval_forced|influx_url|carbon_host|carbon_port|region|groups_dir|quiet'.split('|')
|
|
342
|
+
options_required = 'graph_json|nodes_json|interval|influx_url|region'.split('|')
|
|
343
|
+
conf.each { | line |
|
|
344
|
+
next if line.match(comment)
|
|
345
|
+
a, b = line.split('=', 2)
|
|
346
|
+
a = a.strip.chomp.to_sym
|
|
347
|
+
b = b.strip.chomp
|
|
348
|
+
# Prüfen ob der Wert ein Integer ist und ggfs. konvertieren
|
|
349
|
+
b = b.to_i if(b.to_i.to_s == b)
|
|
350
|
+
b = true if(b.to_s == "true")
|
|
351
|
+
b = true if(b.to_s == "yes")
|
|
352
|
+
b = false if(b.to_s == "false")
|
|
353
|
+
b = false if(b.to_s == "no")
|
|
354
|
+
if options.include?(a.to_s)
|
|
355
|
+
$C[a] = b
|
|
356
|
+
else
|
|
357
|
+
$stderr.puts("ERROR: Unbekannte Option: #{a}")
|
|
358
|
+
exit
|
|
359
|
+
end
|
|
360
|
+
}
|
|
361
|
+
$C.each_key { | option |
|
|
362
|
+
options_required = options_required - [option.to_s]
|
|
363
|
+
}
|
|
364
|
+
if options_required.length > 0
|
|
365
|
+
puts "ERROR: Fehlende Optionen. Die folgenden Optionen fehlen in der Konfigurationsdatei: #{options_required.join(', ')}"
|
|
366
|
+
Kernel.exit 1
|
|
367
|
+
end
|
|
368
|
+
nil
|
|
369
|
+
end
|
|
370
|
+
|
|
371
|
+
# @param [String] url
|
|
372
|
+
# @return [String] Data read from the URL
|
|
373
|
+
def download(url)
|
|
374
|
+
data = nil
|
|
375
|
+
Open3.popen2('wget', '-q', '-O', '-', '--connect-timeout=5', url) {| _, o, _ |
|
|
376
|
+
o.binmode
|
|
377
|
+
data = o.read
|
|
378
|
+
}
|
|
379
|
+
data
|
|
380
|
+
end
|
|
381
|
+
|
|
382
|
+
# @param [String] description
|
|
383
|
+
def time(description)
|
|
384
|
+
start = Time.now
|
|
385
|
+
r = yield
|
|
386
|
+
finish = Time.now
|
|
387
|
+
diff = '%.3f' % (finish - start).to_f
|
|
388
|
+
puts "#{description} took #{diff} seconds"
|
|
389
|
+
r
|
|
390
|
+
end
|
|
391
|
+
|
|
392
|
+
# @return [nil]
|
|
393
|
+
def main_loop
|
|
394
|
+
if($C[:interval_forced])
|
|
395
|
+
delay = ($next_update - Time.now).to_i
|
|
396
|
+
sleep(delay)
|
|
397
|
+
sleep(0.1) until Time.now > $next_update
|
|
398
|
+
else
|
|
399
|
+
sleep(Random.rand(10)) while Time.now < $next_update
|
|
400
|
+
end
|
|
401
|
+
begin
|
|
402
|
+
# Timeout at 95% of the interval
|
|
403
|
+
Timeout::timeout($C[:interval]*(0.95)) {
|
|
404
|
+
begin
|
|
405
|
+
alfred_json = time('download alfred_json') { download($C[:alfred_json]) }
|
|
406
|
+
alfred = time('parsing alfred.json') { Alfred.new(alfred_json).hash }
|
|
407
|
+
rescue
|
|
408
|
+
alfred = Hash.new
|
|
409
|
+
end
|
|
410
|
+
graph_json = time('download graph_json') { download($C[:graph_json]) }
|
|
411
|
+
links = time('calculating links') { Links.new(graph_json) }
|
|
412
|
+
nodes_json = time('download nodes_json') { download($C[:nodes_json]) }
|
|
413
|
+
time('calculating nodes_json') { NodesJson.new(nodes_json, links, alfred) }
|
|
414
|
+
}
|
|
415
|
+
rescue Timeout::Error
|
|
416
|
+
$stderr.puts 'WARNING: Timeout triggered.'
|
|
417
|
+
ensure
|
|
418
|
+
begin
|
|
419
|
+
$next_update = Time.now + $C[:interval] if $next_update <= Time.now
|
|
420
|
+
rescue
|
|
421
|
+
$next_update = Time.now + $C[:interval]
|
|
422
|
+
end
|
|
423
|
+
end
|
|
424
|
+
puts "Next update: #{$next_update.to_s} (in #{($next_update - Time.now).to_i} seconds)"
|
|
425
|
+
end
|
|
426
|
+
|
|
427
|
+
# Befehlszeile verarbeiten
|
|
428
|
+
while (ARGV.length > 0)
|
|
429
|
+
case a = ARGV.shift
|
|
430
|
+
when '--config'
|
|
431
|
+
read_config(ARGV.shift)
|
|
432
|
+
when '--dry-run'
|
|
433
|
+
# Don't send data to influx
|
|
434
|
+
$C[:dry_run] = true
|
|
435
|
+
when '--debug'
|
|
436
|
+
# Display data that would have been sent to graphite on stderr
|
|
437
|
+
$C[:debug] = true
|
|
438
|
+
else
|
|
439
|
+
$stderr.puts("ERROR: Unbekannte Option: #{a}")
|
|
440
|
+
Kernel.exit 1
|
|
441
|
+
end
|
|
442
|
+
end
|
|
443
|
+
|
|
444
|
+
if ($C.length < 1)
|
|
445
|
+
$stderr.puts("ERROR: Befehl: #{$0} --config <conf>")
|
|
446
|
+
Kernel.exit 1
|
|
447
|
+
end
|
|
448
|
+
|
|
449
|
+
p $C
|
|
450
|
+
|
|
451
|
+
$next_update = Time.now
|
|
452
|
+
if $C[:debug]
|
|
453
|
+
main_loop
|
|
454
|
+
else
|
|
455
|
+
if $C[:quiet]
|
|
456
|
+
begin
|
|
457
|
+
main_loop while true
|
|
458
|
+
rescue
|
|
459
|
+
Kernel.exit(1)
|
|
460
|
+
end
|
|
461
|
+
else
|
|
462
|
+
main_loop while true
|
|
463
|
+
end
|
|
464
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: ffmapquery
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.0.1
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- tokudan
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2017-07-22 00:00:00.000000000 Z
|
|
12
|
+
dependencies: []
|
|
13
|
+
description: ffmapquery
|
|
14
|
+
email:
|
|
15
|
+
executables:
|
|
16
|
+
- ffmapquery
|
|
17
|
+
extensions: []
|
|
18
|
+
extra_rdoc_files: []
|
|
19
|
+
files:
|
|
20
|
+
- bin/ffmapquery
|
|
21
|
+
homepage:
|
|
22
|
+
licenses:
|
|
23
|
+
- GPL-2.0
|
|
24
|
+
metadata: {}
|
|
25
|
+
post_install_message:
|
|
26
|
+
rdoc_options: []
|
|
27
|
+
require_paths:
|
|
28
|
+
- lib
|
|
29
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - ">="
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: '0'
|
|
34
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
35
|
+
requirements:
|
|
36
|
+
- - ">="
|
|
37
|
+
- !ruby/object:Gem::Version
|
|
38
|
+
version: '0'
|
|
39
|
+
requirements: []
|
|
40
|
+
rubyforge_project:
|
|
41
|
+
rubygems_version: 2.6.8
|
|
42
|
+
signing_key:
|
|
43
|
+
specification_version: 4
|
|
44
|
+
summary: ffmapquery
|
|
45
|
+
test_files: []
|