logstash-codec-collectd 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +15 -0
- data/.gitignore +4 -0
- data/Gemfile +4 -0
- data/Rakefile +8 -0
- data/lib/logstash/codecs/collectd.rb +485 -0
- data/logstash-codec-collectd.gemspec +26 -0
- data/rakelib/publish.rake +9 -0
- data/rakelib/vendor.rake +169 -0
- data/spec/codecs/collectd.rb +210 -0
- metadata +75 -0
checksums.yaml
ADDED
@@ -0,0 +1,15 @@
|
|
1
|
+
---
|
2
|
+
!binary "U0hBMQ==":
|
3
|
+
metadata.gz: !binary |-
|
4
|
+
MDA5OTAxYjlhNjRhZGNkZTU2YzA4NDk4OTVmMjIxN2NlMmM1MzYyMA==
|
5
|
+
data.tar.gz: !binary |-
|
6
|
+
ZTk0NzIyYWFmNTM5Y2I4OTJmNzQyOTE0ZTg5N2ZhY2RlMDFkOGIzYg==
|
7
|
+
SHA512:
|
8
|
+
metadata.gz: !binary |-
|
9
|
+
ZDkxYTgxODI4NWMxZTRkYWNjN2ZlMzU2ZTNhYTU4ZDc1ZTZmNTQyZTA0YmVh
|
10
|
+
NDJmNzU1ZjUzMTllOGM1ZDU5Nzg3MmI5ZDAzYzk3MmYyYTQyNzUwNTYzNGYx
|
11
|
+
NmZkOWE1YTdkMGZlZTMzOWY0ZWMwOWI5OTZiZWRmOTMxMjk3ZjQ=
|
12
|
+
data.tar.gz: !binary |-
|
13
|
+
ZmViMDg2NjA1Njc0Y2RmYTgyYjM5MTQ4ODQ1NmU0ZTMzMzU2ZTJjMTJiODE0
|
14
|
+
OTAyNzk5Y2MzOTU2YjEwN2YzYmFiMTlhYjQwYWE3YTlkYTc1Y2EzNjdiOTEy
|
15
|
+
NjA5OTJjYTE0OTRhYjY4ZmRhZGU0MjJmYTcyZTczNDA3ZTBlOTA=
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/Rakefile
ADDED
@@ -0,0 +1,485 @@
|
|
1
|
+
# encoding utf-8
|
2
|
+
require "date"
|
3
|
+
require "logstash/codecs/base"
|
4
|
+
require "logstash/namespace"
|
5
|
+
require "logstash/errors"
|
6
|
+
require "tempfile"
|
7
|
+
require "time"
|
8
|
+
|
9
|
+
# Read events from the connectd binary protocol over the network via udp.
|
10
|
+
# See https://collectd.org/wiki/index.php/Binary_protocol
|
11
|
+
#
|
12
|
+
# Configuration in your Logstash configuration file can be as simple as:
|
13
|
+
# input {
|
14
|
+
# udp {
|
15
|
+
# port => 28526
|
16
|
+
# buffer_size => 1452
|
17
|
+
# codec => collectd { }
|
18
|
+
# }
|
19
|
+
# }
|
20
|
+
#
|
21
|
+
# A sample collectd.conf to send to Logstash might be:
|
22
|
+
#
|
23
|
+
# Hostname "host.example.com"
|
24
|
+
# LoadPlugin interface
|
25
|
+
# LoadPlugin load
|
26
|
+
# LoadPlugin memory
|
27
|
+
# LoadPlugin network
|
28
|
+
# <Plugin interface>
|
29
|
+
# Interface "eth0"
|
30
|
+
# IgnoreSelected false
|
31
|
+
# </Plugin>
|
32
|
+
# <Plugin network>
|
33
|
+
# <Server "10.0.0.1" "25826">
|
34
|
+
# </Server>
|
35
|
+
# </Plugin>
|
36
|
+
#
|
37
|
+
# Be sure to replace "10.0.0.1" with the IP of your Logstash instance.
|
38
|
+
#
|
39
|
+
|
40
|
+
class ProtocolError < LogStash::Error; end
|
41
|
+
class HeaderError < LogStash::Error; end
|
42
|
+
class EncryptionError < LogStash::Error; end
|
43
|
+
class NaNError < LogStash::Error; end
|
44
|
+
|
45
|
+
class LogStash::Codecs::Collectd < LogStash::Codecs::Base
|
46
|
+
config_name "collectd"
|
47
|
+
milestone 1
|
48
|
+
|
49
|
+
AUTHFILEREGEX = /([^:]+): (.+)/
|
50
|
+
|
51
|
+
PLUGIN_TYPE = 2
|
52
|
+
COLLECTD_TYPE = 4
|
53
|
+
SIGNATURE_TYPE = 512
|
54
|
+
ENCRYPTION_TYPE = 528
|
55
|
+
|
56
|
+
TYPEMAP = {
|
57
|
+
0 => "host",
|
58
|
+
1 => "@timestamp",
|
59
|
+
PLUGIN_TYPE => "plugin",
|
60
|
+
3 => "plugin_instance",
|
61
|
+
COLLECTD_TYPE => "collectd_type",
|
62
|
+
5 => "type_instance",
|
63
|
+
6 => "values",
|
64
|
+
7 => "interval",
|
65
|
+
8 => "@timestamp",
|
66
|
+
9 => "interval",
|
67
|
+
256 => "message",
|
68
|
+
257 => "severity",
|
69
|
+
SIGNATURE_TYPE => "signature",
|
70
|
+
ENCRYPTION_TYPE => "encryption"
|
71
|
+
}
|
72
|
+
|
73
|
+
PLUGIN_TYPE_FIELDS = {
|
74
|
+
'host' => true,
|
75
|
+
'@timestamp' => true,
|
76
|
+
}
|
77
|
+
|
78
|
+
COLLECTD_TYPE_FIELDS = {
|
79
|
+
'host' => true,
|
80
|
+
'@timestamp' => true,
|
81
|
+
'plugin' => true,
|
82
|
+
'plugin_instance' => true,
|
83
|
+
'type_instance' => true,
|
84
|
+
}
|
85
|
+
|
86
|
+
INTERVAL_VALUES_FIELDS = {
|
87
|
+
"interval" => true,
|
88
|
+
"values" => true,
|
89
|
+
}
|
90
|
+
|
91
|
+
INTERVAL_BASE_FIELDS = {
|
92
|
+
'host' => true,
|
93
|
+
'collectd_type' => true,
|
94
|
+
'plugin' => true,
|
95
|
+
'plugin_instance' => true,
|
96
|
+
'@timestamp' => true,
|
97
|
+
'type_instance' => true,
|
98
|
+
}
|
99
|
+
|
100
|
+
INTERVAL_TYPES = {
|
101
|
+
7 => true,
|
102
|
+
9 => true,
|
103
|
+
}
|
104
|
+
|
105
|
+
SECURITY_NONE = "None"
|
106
|
+
SECURITY_SIGN = "Sign"
|
107
|
+
SECURITY_ENCR = "Encrypt"
|
108
|
+
|
109
|
+
# File path(s) to collectd types.db to use.
|
110
|
+
# The last matching pattern wins if you have identical pattern names in multiple files.
|
111
|
+
# If no types.db is provided the included types.db will be used (currently 5.4.0).
|
112
|
+
config :typesdb, :validate => :array
|
113
|
+
|
114
|
+
# Prune interval records. Defaults to true.
|
115
|
+
config :prune_intervals, :validate => :boolean, :default => true
|
116
|
+
|
117
|
+
# Security Level. Default is "None". This setting mirrors the setting from the
|
118
|
+
# collectd [Network plugin](https://collectd.org/wiki/index.php/Plugin:Network)
|
119
|
+
config :security_level, :validate => [SECURITY_NONE, SECURITY_SIGN, SECURITY_ENCR],
|
120
|
+
:default => "None"
|
121
|
+
|
122
|
+
# What to do when a value in the event is NaN (Not a Number)
|
123
|
+
# - change_value (default): Change the NaN to the value of the nan_value option and add nan_tag as a tag
|
124
|
+
# - warn: Change the NaN to the value of the nan_value option, print a warning to the log and add nan_tag as a tag
|
125
|
+
# - drop: Drop the event containing the NaN (this only drops the single event, not the whole packet)
|
126
|
+
config :nan_handling, :validate => ['change_value','warn','drop'], :default => 'change_value'
|
127
|
+
|
128
|
+
# Only relevant when nan_handeling is set to 'change_value'
|
129
|
+
# Change NaN to this configured value
|
130
|
+
config :nan_value, :validate => :number, :default => 0
|
131
|
+
|
132
|
+
# The tag to add to the event if a NaN value was found
|
133
|
+
# Set this to an empty string ('') if you don't want to tag
|
134
|
+
config :nan_tag, :validate => :string, :default => '_collectdNaN'
|
135
|
+
|
136
|
+
# Path to the authentication file. This file should have the same format as
|
137
|
+
# the [AuthFile](http://collectd.org/documentation/manpages/collectd.conf.5.shtml#authfile_filename)
|
138
|
+
# in collectd. You only need to set this option if the security_level is set to
|
139
|
+
# "Sign" or "Encrypt"
|
140
|
+
config :authfile, :validate => :string
|
141
|
+
|
142
|
+
public
|
143
|
+
def register
|
144
|
+
@logger.info("Starting Collectd codec...")
|
145
|
+
init_lambdas!
|
146
|
+
if @typesdb.nil?
|
147
|
+
@typesdb = ::File.expand_path('../../../vendor/types.db', ::File.dirname(__FILE__))
|
148
|
+
if !File.exists?(@typesdb)
|
149
|
+
raise "You must specify 'typesdb => ...' in your collectd input (I looked for '#{@typesdb}')"
|
150
|
+
end
|
151
|
+
@logger.info("Using types.db", :typesdb => @typesdb.to_s)
|
152
|
+
end
|
153
|
+
@types = get_types(@typesdb)
|
154
|
+
|
155
|
+
if ([SECURITY_SIGN, SECURITY_ENCR].include?(@security_level))
|
156
|
+
if @authfile.nil?
|
157
|
+
raise "Security level is set to #{@security_level}, but no authfile was configured"
|
158
|
+
else
|
159
|
+
# Load OpenSSL and instantiate Digest and Crypto functions
|
160
|
+
require 'openssl'
|
161
|
+
@sha256 = OpenSSL::Digest::Digest.new('sha256')
|
162
|
+
@sha1 = OpenSSL::Digest::Digest.new('sha1')
|
163
|
+
@cipher = OpenSSL::Cipher.new('AES-256-OFB')
|
164
|
+
@auth = {}
|
165
|
+
parse_authfile
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end # def register
|
169
|
+
|
170
|
+
public
|
171
|
+
def get_types(paths)
|
172
|
+
types = {}
|
173
|
+
# Get the typesdb
|
174
|
+
paths = Array(paths) # Make sure a single path is still forced into an array type
|
175
|
+
paths.each do |path|
|
176
|
+
@logger.info("Getting Collectd typesdb info", :typesdb => path.to_s)
|
177
|
+
File.open(path, 'r').each_line do |line|
|
178
|
+
typename, *line = line.strip.split
|
179
|
+
@logger.debug("typename", :typename => typename.to_s)
|
180
|
+
next if typename.nil? || typename[0,1] == '#'
|
181
|
+
types[typename] = line.collect { |l| l.strip.split(":")[0] }
|
182
|
+
end
|
183
|
+
end
|
184
|
+
@logger.debug("Collectd Types", :types => types.to_s)
|
185
|
+
return types
|
186
|
+
end # def get_types
|
187
|
+
|
188
|
+
def init_lambdas!
|
189
|
+
# Lambdas for hash + closure methodology
|
190
|
+
# This replaces when statements for fixed values and is much faster
|
191
|
+
string_decoder = lambda { |body| body.pack("C*")[0..-2] }
|
192
|
+
numeric_decoder = lambda { |body| body.slice!(0..7).pack("C*").unpack("E")[0] }
|
193
|
+
counter_decoder = lambda { |body| body.slice!(0..7).pack("C*").unpack("Q>")[0] }
|
194
|
+
gauge_decoder = lambda { |body| body.slice!(0..7).pack("C*").unpack("E")[0] }
|
195
|
+
derive_decoder = lambda { |body| body.slice!(0..7).pack("C*").unpack("q>")[0] }
|
196
|
+
# For Low-Resolution time
|
197
|
+
time_decoder = lambda do |body|
|
198
|
+
byte1, byte2 = body.pack("C*").unpack("NN")
|
199
|
+
Time.at(( ((byte1 << 32) + byte2))).utc
|
200
|
+
end
|
201
|
+
# Hi-Resolution time
|
202
|
+
hirestime_decoder = lambda do |body|
|
203
|
+
byte1, byte2 = body.pack("C*").unpack("NN")
|
204
|
+
Time.at(( ((byte1 << 32) + byte2) * (2**-30) )).utc
|
205
|
+
end
|
206
|
+
# Hi resolution intervals
|
207
|
+
hiresinterval_decoder = lambda do |body|
|
208
|
+
byte1, byte2 = body.pack("C*").unpack("NN")
|
209
|
+
Time.at(( ((byte1 << 32) + byte2) * (2**-30) )).to_i
|
210
|
+
end
|
211
|
+
# Value type decoder
|
212
|
+
value_type_decoder = lambda do |body|
|
213
|
+
body.slice!(0..1) # Prune the header
|
214
|
+
if body.length % 9 == 0 # Should be 9 fields
|
215
|
+
count = 0
|
216
|
+
retval = []
|
217
|
+
# Iterate through and take a slice each time
|
218
|
+
types = body.slice!(0..((body.length/9)-1))
|
219
|
+
while body.length > 0
|
220
|
+
# Use another hash + closure here...
|
221
|
+
v = @values_decoder[types[count]].call(body)
|
222
|
+
if types[count] == 1 && v.nan?
|
223
|
+
case @nan_handling
|
224
|
+
when 'drop'; drop = true
|
225
|
+
else
|
226
|
+
v = @nan_value
|
227
|
+
add_nan_tag = true
|
228
|
+
@nan_handling == 'warn' && @logger.warn("NaN replaced by #{@nan_value}")
|
229
|
+
end
|
230
|
+
end
|
231
|
+
retval << v
|
232
|
+
count += 1
|
233
|
+
end
|
234
|
+
else
|
235
|
+
@logger.error("Incorrect number of data fields for collectd record", :body => body.to_s)
|
236
|
+
end
|
237
|
+
return retval, drop, add_nan_tag
|
238
|
+
end
|
239
|
+
# Signature
|
240
|
+
signature_decoder = lambda do |body|
|
241
|
+
if body.length < 32
|
242
|
+
@logger.warning("SHA256 signature too small (got #{body.length} bytes instead of 32)")
|
243
|
+
elsif body.length < 33
|
244
|
+
@logger.warning("Received signature without username")
|
245
|
+
else
|
246
|
+
retval = []
|
247
|
+
# Byte 32 till the end contains the username as chars (=unsigned ints)
|
248
|
+
retval << body[32..-1].pack('C*')
|
249
|
+
# Byte 0 till 31 contain the signature
|
250
|
+
retval << body[0..31].pack('C*')
|
251
|
+
end
|
252
|
+
return retval
|
253
|
+
end
|
254
|
+
# Encryption
|
255
|
+
encryption_decoder = lambda do |body|
|
256
|
+
retval = []
|
257
|
+
user_length = (body.slice!(0) << 8) + body.slice!(0)
|
258
|
+
retval << body.slice!(0..user_length-1).pack('C*') # Username
|
259
|
+
retval << body.slice!(0..15).pack('C*') # IV
|
260
|
+
retval << body.pack('C*')
|
261
|
+
return retval
|
262
|
+
end
|
263
|
+
@id_decoder = {
|
264
|
+
0 => string_decoder,
|
265
|
+
1 => time_decoder,
|
266
|
+
2 => string_decoder,
|
267
|
+
3 => string_decoder,
|
268
|
+
4 => string_decoder,
|
269
|
+
5 => string_decoder,
|
270
|
+
6 => value_type_decoder,
|
271
|
+
7 => numeric_decoder,
|
272
|
+
8 => hirestime_decoder,
|
273
|
+
9 => hiresinterval_decoder,
|
274
|
+
256 => string_decoder,
|
275
|
+
257 => numeric_decoder,
|
276
|
+
512 => signature_decoder,
|
277
|
+
528 => encryption_decoder
|
278
|
+
}
|
279
|
+
# TYPE VALUES:
|
280
|
+
# 0: COUNTER
|
281
|
+
# 1: GAUGE
|
282
|
+
# 2: DERIVE
|
283
|
+
# 3: ABSOLUTE
|
284
|
+
@values_decoder = {
|
285
|
+
0 => counter_decoder,
|
286
|
+
1 => gauge_decoder,
|
287
|
+
2 => derive_decoder,
|
288
|
+
3 => counter_decoder
|
289
|
+
}
|
290
|
+
end # def init_lambdas!
|
291
|
+
|
292
|
+
public
|
293
|
+
def get_values(id, body)
|
294
|
+
drop = false
|
295
|
+
add_tag = false
|
296
|
+
if id == 6
|
297
|
+
retval, drop, add_nan_tag = @id_decoder[id].call(body)
|
298
|
+
# Use hash + closure/lambda to speed operations
|
299
|
+
else
|
300
|
+
retval = @id_decoder[id].call(body)
|
301
|
+
end
|
302
|
+
return retval, drop, add_nan_tag
|
303
|
+
end
|
304
|
+
|
305
|
+
private
|
306
|
+
def parse_authfile
|
307
|
+
# We keep the authfile parsed in memory so we don't have to open the file
|
308
|
+
# for every event.
|
309
|
+
@logger.debug("Parsing authfile #{@authfile}")
|
310
|
+
if !File.exist?(@authfile)
|
311
|
+
raise LogStash::ConfigurationError, "The file #{@authfile} was not found"
|
312
|
+
end
|
313
|
+
@auth.clear
|
314
|
+
@authmtime = File.stat(@authfile).mtime
|
315
|
+
File.readlines(@authfile).each do |line|
|
316
|
+
#line.chomp!
|
317
|
+
k,v = line.scan(AUTHFILEREGEX).flatten
|
318
|
+
if k && v
|
319
|
+
@logger.debug("Added authfile entry '#{k}' with key '#{v}'")
|
320
|
+
@auth[k] = v
|
321
|
+
else
|
322
|
+
@logger.info("Ignoring malformed authfile line '#{line.chomp}'")
|
323
|
+
end
|
324
|
+
end
|
325
|
+
end # def parse_authfile
|
326
|
+
|
327
|
+
private
|
328
|
+
def get_key(user)
|
329
|
+
return if @authmtime.nil? or @authfile.nil?
|
330
|
+
# Validate that our auth data is still up-to-date
|
331
|
+
parse_authfile if @authmtime < File.stat(@authfile).mtime
|
332
|
+
key = @auth[user]
|
333
|
+
@logger.warn("User #{user} is not found in the authfile #{@authfile}") if key.nil?
|
334
|
+
return key
|
335
|
+
end # def get_key
|
336
|
+
|
337
|
+
private
|
338
|
+
def verify_signature(user, signature, payload)
|
339
|
+
# The user doesn't care about the security
|
340
|
+
return true if @security_level == SECURITY_NONE
|
341
|
+
|
342
|
+
# We probably got and array of ints, pack it!
|
343
|
+
payload = payload.pack('C*') if payload.is_a?(Array)
|
344
|
+
|
345
|
+
key = get_key(user)
|
346
|
+
return false if key.nil?
|
347
|
+
|
348
|
+
return OpenSSL::HMAC.digest(@sha256, key, user+payload) == signature
|
349
|
+
end # def verify_signature
|
350
|
+
|
351
|
+
private
|
352
|
+
def decrypt_packet(user, iv, content)
|
353
|
+
# Content has to have at least a SHA1 hash (20 bytes), a header (4 bytes) and
|
354
|
+
# one byte of data
|
355
|
+
return [] if content.length < 26
|
356
|
+
content = content.pack('C*') if content.is_a?(Array)
|
357
|
+
key = get_key(user)
|
358
|
+
if key.nil?
|
359
|
+
@logger.debug("Key was nil")
|
360
|
+
return []
|
361
|
+
end
|
362
|
+
|
363
|
+
# Set the correct state of the cipher instance
|
364
|
+
@cipher.decrypt
|
365
|
+
@cipher.padding = 0
|
366
|
+
@cipher.iv = iv
|
367
|
+
@cipher.key = @sha256.digest(key);
|
368
|
+
# Decrypt the content
|
369
|
+
plaintext = @cipher.update(content) + @cipher.final
|
370
|
+
# Reset the state, as adding a new key to an already instantiated state
|
371
|
+
# results in an exception
|
372
|
+
@cipher.reset
|
373
|
+
|
374
|
+
# The plaintext contains a SHA1 hash as checksum in the first 160 bits
|
375
|
+
# (20 octets) of the rest of the data
|
376
|
+
hash = plaintext.slice!(0..19)
|
377
|
+
|
378
|
+
if @sha1.digest(plaintext) != hash
|
379
|
+
@logger.warn("Unable to decrypt packet, checksum mismatch")
|
380
|
+
return []
|
381
|
+
end
|
382
|
+
return plaintext.unpack('C*')
|
383
|
+
end # def decrypt_packet
|
384
|
+
|
385
|
+
public
|
386
|
+
def decode(payload)
|
387
|
+
payload = payload.bytes.to_a
|
388
|
+
|
389
|
+
collectd = {}
|
390
|
+
was_encrypted = false
|
391
|
+
|
392
|
+
while payload.length > 0 do
|
393
|
+
typenum = (payload.slice!(0) << 8) + payload.slice!(0)
|
394
|
+
# Get the length of the data in this part, but take into account that
|
395
|
+
# the header is 4 bytes
|
396
|
+
length = ((payload.slice!(0) << 8) + payload.slice!(0)) - 4
|
397
|
+
# Validate that the part length is correct
|
398
|
+
raise(HeaderError) if length > payload.length
|
399
|
+
|
400
|
+
body = payload.slice!(0..length-1)
|
401
|
+
|
402
|
+
field = TYPEMAP[typenum]
|
403
|
+
if field.nil?
|
404
|
+
@logger.warn("Unknown typenumber: #{typenum}")
|
405
|
+
next
|
406
|
+
end
|
407
|
+
|
408
|
+
values, drop, add_nan_tag = get_values(typenum, body)
|
409
|
+
|
410
|
+
case typenum
|
411
|
+
when SIGNATURE_TYPE
|
412
|
+
raise(EncryptionError) unless verify_signature(values[0], values[1], payload)
|
413
|
+
next
|
414
|
+
when ENCRYPTION_TYPE
|
415
|
+
payload = decrypt_packet(values[0], values[1], values[2])
|
416
|
+
raise(EncryptionError) if payload.empty?
|
417
|
+
was_encrypted = true
|
418
|
+
next
|
419
|
+
when PLUGIN_TYPE
|
420
|
+
# We've reached a new plugin, delete everything except for the the host
|
421
|
+
# field, because there's only one per packet and the timestamp field,
|
422
|
+
# because that one goes in front of the plugin
|
423
|
+
collectd.each_key do |k|
|
424
|
+
collectd.delete(k) unless PLUGIN_TYPE_FIELDS.has_key?(k)
|
425
|
+
end
|
426
|
+
when COLLECTD_TYPE
|
427
|
+
# We've reached a new type within the plugin section, delete all fields
|
428
|
+
# that could have something to do with the previous type (if any)
|
429
|
+
collectd.each_key do |k|
|
430
|
+
collectd.delete(k) unless COLLECTD_TYPE_FIELDS.has_key?(k)
|
431
|
+
end
|
432
|
+
end
|
433
|
+
|
434
|
+
raise(EncryptionError) if !was_encrypted and @security_level == SECURITY_ENCR
|
435
|
+
|
436
|
+
# Fill in the fields.
|
437
|
+
if values.is_a?(Array)
|
438
|
+
if values.length > 1 # Only do this iteration on multi-value arrays
|
439
|
+
values.each_with_index do |value, x|
|
440
|
+
begin
|
441
|
+
type = collectd['collectd_type']
|
442
|
+
key = @types[type]
|
443
|
+
key_x = key[x]
|
444
|
+
# assign
|
445
|
+
collectd[key_x] = value
|
446
|
+
rescue
|
447
|
+
@logger.error("Invalid value for type=#{type.inspect}, key=#{@types[type].inspect}, index=#{x}")
|
448
|
+
end
|
449
|
+
end
|
450
|
+
else # Otherwise it's a single value
|
451
|
+
collectd['value'] = values[0] # So name it 'value' accordingly
|
452
|
+
end
|
453
|
+
elsif field != nil # Not an array, make sure it's non-empty
|
454
|
+
collectd[field] = values # Append values to collectd under key field
|
455
|
+
end
|
456
|
+
|
457
|
+
if INTERVAL_VALUES_FIELDS.has_key?(field)
|
458
|
+
if ((@prune_intervals && !INTERVAL_TYPES.has_key?(typenum)) || !@prune_intervals)
|
459
|
+
# Prune these *specific* keys if they exist and are empty.
|
460
|
+
# This is better than looping over all keys every time.
|
461
|
+
collectd.delete('type_instance') if collectd['type_instance'] == ""
|
462
|
+
collectd.delete('plugin_instance') if collectd['plugin_instance'] == ""
|
463
|
+
if add_nan_tag
|
464
|
+
collectd['tags'] ||= []
|
465
|
+
collectd['tags'] << @nan_tag
|
466
|
+
end
|
467
|
+
# This ugly little shallow-copy hack keeps the new event from getting munged by the cleanup
|
468
|
+
# With pass-by-reference we get hosed (if we pass collectd, then clean it up rapidly, values can disappear)
|
469
|
+
if !drop # Drop the event if it's flagged true
|
470
|
+
yield LogStash::Event.new(collectd.dup)
|
471
|
+
else
|
472
|
+
raise(NaNError)
|
473
|
+
end
|
474
|
+
end
|
475
|
+
# Clean up the event
|
476
|
+
collectd.each_key do |k|
|
477
|
+
collectd.delete(k) if !INTERVAL_BASE_FIELDS.has_key?(k)
|
478
|
+
end
|
479
|
+
end
|
480
|
+
end # while payload.length > 0 do
|
481
|
+
rescue EncryptionError, ProtocolError, HeaderError, NaNError
|
482
|
+
# basically do nothing, we just want out
|
483
|
+
end # def decode
|
484
|
+
|
485
|
+
end # class LogStash::Codecs::Collectd
|
@@ -0,0 +1,26 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
|
3
|
+
s.name = 'logstash-codec-collectd'
|
4
|
+
s.version = '0.1.0'
|
5
|
+
s.licenses = ['Apache License (2.0)']
|
6
|
+
s.summary = "Read events from the connectd binary protocol"
|
7
|
+
s.description = "Read events from the connectd binary protocol"
|
8
|
+
s.authors = ["Elasticsearch"]
|
9
|
+
s.email = 'richard.pijnenburg@elasticsearch.com'
|
10
|
+
s.homepage = "http://logstash.net/"
|
11
|
+
s.require_paths = ["lib"]
|
12
|
+
|
13
|
+
# Files
|
14
|
+
s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
|
15
|
+
|
16
|
+
# Tests
|
17
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
18
|
+
|
19
|
+
# Special flag to let us know this is actually a logstash plugin
|
20
|
+
s.metadata = { "logstash_plugin" => "true", "group" => "codec" }
|
21
|
+
|
22
|
+
# Gem dependencies
|
23
|
+
s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
|
24
|
+
|
25
|
+
end
|
26
|
+
|
@@ -0,0 +1,9 @@
|
|
1
|
+
require "gem_publisher"
|
2
|
+
|
3
|
+
desc "Publish gem to RubyGems.org"
|
4
|
+
task :publish_gem do |t|
|
5
|
+
gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
|
6
|
+
gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
|
7
|
+
puts "Published #{gem}" if gem
|
8
|
+
end
|
9
|
+
|
data/rakelib/vendor.rake
ADDED
@@ -0,0 +1,169 @@
|
|
1
|
+
require "net/http"
|
2
|
+
require "uri"
|
3
|
+
require "digest/sha1"
|
4
|
+
|
5
|
+
def vendor(*args)
|
6
|
+
return File.join("vendor", *args)
|
7
|
+
end
|
8
|
+
|
9
|
+
directory "vendor/" => ["vendor"] do |task, args|
|
10
|
+
mkdir task.name
|
11
|
+
end
|
12
|
+
|
13
|
+
def fetch(url, sha1, output)
|
14
|
+
|
15
|
+
puts "Downloading #{url}"
|
16
|
+
actual_sha1 = download(url, output)
|
17
|
+
|
18
|
+
if actual_sha1 != sha1
|
19
|
+
fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
|
20
|
+
end
|
21
|
+
end # def fetch
|
22
|
+
|
23
|
+
def file_fetch(url, sha1)
|
24
|
+
filename = File.basename( URI(url).path )
|
25
|
+
output = "vendor/#{filename}"
|
26
|
+
task output => [ "vendor/" ] do
|
27
|
+
begin
|
28
|
+
actual_sha1 = file_sha1(output)
|
29
|
+
if actual_sha1 != sha1
|
30
|
+
fetch(url, sha1, output)
|
31
|
+
end
|
32
|
+
rescue Errno::ENOENT
|
33
|
+
fetch(url, sha1, output)
|
34
|
+
end
|
35
|
+
end.invoke
|
36
|
+
|
37
|
+
return output
|
38
|
+
end
|
39
|
+
|
40
|
+
def file_sha1(path)
|
41
|
+
digest = Digest::SHA1.new
|
42
|
+
fd = File.new(path, "r")
|
43
|
+
while true
|
44
|
+
begin
|
45
|
+
digest << fd.sysread(16384)
|
46
|
+
rescue EOFError
|
47
|
+
break
|
48
|
+
end
|
49
|
+
end
|
50
|
+
return digest.hexdigest
|
51
|
+
ensure
|
52
|
+
fd.close if fd
|
53
|
+
end
|
54
|
+
|
55
|
+
def download(url, output)
|
56
|
+
uri = URI(url)
|
57
|
+
digest = Digest::SHA1.new
|
58
|
+
tmp = "#{output}.tmp"
|
59
|
+
Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
|
60
|
+
request = Net::HTTP::Get.new(uri.path)
|
61
|
+
http.request(request) do |response|
|
62
|
+
fail "HTTP fetch failed for #{url}. #{response}" if response.code != "200"
|
63
|
+
size = (response["content-length"].to_i || -1).to_f
|
64
|
+
count = 0
|
65
|
+
File.open(tmp, "w") do |fd|
|
66
|
+
response.read_body do |chunk|
|
67
|
+
fd.write(chunk)
|
68
|
+
digest << chunk
|
69
|
+
if size > 0 && $stdout.tty?
|
70
|
+
count += chunk.bytesize
|
71
|
+
$stdout.write(sprintf("\r%0.2f%%", count/size * 100))
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
$stdout.write("\r \r") if $stdout.tty?
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
File.rename(tmp, output)
|
80
|
+
|
81
|
+
return digest.hexdigest
|
82
|
+
rescue SocketError => e
|
83
|
+
puts "Failure while downloading #{url}: #{e}"
|
84
|
+
raise
|
85
|
+
ensure
|
86
|
+
File.unlink(tmp) if File.exist?(tmp)
|
87
|
+
end # def download
|
88
|
+
|
89
|
+
def untar(tarball, &block)
|
90
|
+
require "archive/tar/minitar"
|
91
|
+
tgz = Zlib::GzipReader.new(File.open(tarball))
|
92
|
+
# Pull out typesdb
|
93
|
+
tar = Archive::Tar::Minitar::Input.open(tgz)
|
94
|
+
tar.each do |entry|
|
95
|
+
path = block.call(entry)
|
96
|
+
next if path.nil?
|
97
|
+
parent = File.dirname(path)
|
98
|
+
|
99
|
+
mkdir_p parent unless File.directory?(parent)
|
100
|
+
|
101
|
+
# Skip this file if the output file is the same size
|
102
|
+
if entry.directory?
|
103
|
+
mkdir path unless File.directory?(path)
|
104
|
+
else
|
105
|
+
entry_mode = entry.instance_eval { @mode } & 0777
|
106
|
+
if File.exists?(path)
|
107
|
+
stat = File.stat(path)
|
108
|
+
# TODO(sissel): Submit a patch to archive-tar-minitar upstream to
|
109
|
+
# expose headers in the entry.
|
110
|
+
entry_size = entry.instance_eval { @size }
|
111
|
+
# If file sizes are same, skip writing.
|
112
|
+
next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
|
113
|
+
end
|
114
|
+
puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
|
115
|
+
File.open(path, "w") do |fd|
|
116
|
+
# eof? check lets us skip empty files. Necessary because the API provided by
|
117
|
+
# Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
|
118
|
+
# IO object. Something about empty files in this EntryStream causes
|
119
|
+
# IO.copy_stream to throw "can't convert nil into String" on JRuby
|
120
|
+
# TODO(sissel): File a bug about this.
|
121
|
+
while !entry.eof?
|
122
|
+
chunk = entry.read(16384)
|
123
|
+
fd.write(chunk)
|
124
|
+
end
|
125
|
+
#IO.copy_stream(entry, fd)
|
126
|
+
end
|
127
|
+
File.chmod(entry_mode, path)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
tar.close
|
131
|
+
File.unlink(tarball) if File.file?(tarball)
|
132
|
+
end # def untar
|
133
|
+
|
134
|
+
def ungz(file)
|
135
|
+
|
136
|
+
outpath = file.gsub('.gz', '')
|
137
|
+
tgz = Zlib::GzipReader.new(File.open(file))
|
138
|
+
begin
|
139
|
+
File.open(outpath, "w") do |out|
|
140
|
+
IO::copy_stream(tgz, out)
|
141
|
+
end
|
142
|
+
File.unlink(file)
|
143
|
+
rescue
|
144
|
+
File.unlink(outpath) if File.file?(outpath)
|
145
|
+
raise
|
146
|
+
end
|
147
|
+
tgz.close
|
148
|
+
end
|
149
|
+
|
150
|
+
desc "Process any vendor files required for this plugin"
|
151
|
+
task "vendor" do |task, args|
|
152
|
+
|
153
|
+
@files.each do |file|
|
154
|
+
download = file_fetch(file['url'], file['sha1'])
|
155
|
+
if download =~ /.tar.gz/
|
156
|
+
prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
|
157
|
+
untar(download) do |entry|
|
158
|
+
if !file['files'].nil?
|
159
|
+
next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
|
160
|
+
out = entry.full_name.split("/").last
|
161
|
+
end
|
162
|
+
File.join('vendor', out)
|
163
|
+
end
|
164
|
+
elsif download =~ /.gz/
|
165
|
+
ungz(download)
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
end
|
@@ -0,0 +1,210 @@
|
|
1
|
+
require "logstash/codecs/collectd"
|
2
|
+
require "logstash/event"
|
3
|
+
require "insist"
|
4
|
+
require "tempfile"
|
5
|
+
|
6
|
+
describe LogStash::Codecs::Collectd do
|
7
|
+
context "None" do
|
8
|
+
subject do
|
9
|
+
next LogStash::Codecs::Collectd.new({})
|
10
|
+
end
|
11
|
+
|
12
|
+
it "should parse a normal packet" do
|
13
|
+
payload = ["000000236c6965746572732d6b6c6170746f702e70726f742e706c657869732e6575000008000c14b0a645f3eb73c30009000c00000002800000000002000e696e74657266616365000003000a776c616e30000004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0a645f3eb525e000300076c6f000004000f69665f7061636b6574730000060018000202020000000000001cd80000000000001cd80008000c14b0a645f3ebf8c10002000c656e74726f70790000030005000004000c656e74726f7079000006000f0001010000000000a063400008000c14b0a645f3eb6c700002000e696e74657266616365000003000a776c616e30000004000f69665f7061636b657473000006001800020202000000000002d233000000000001c3b10008000c14b0a645f3eb59b1000300076c6f000004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0a645f425380b00020009737761700000030005000004000973776170000005000975736564000006000f00010100000000000000000008000c14b0a645f4254c8d0005000966726565000006000f00010100000000fcffdf410008000c14b0a645f4255ae70005000b636163686564000006000f00010100000000000000000008000c14b0a645f426f09f0004000c737761705f696f0000050007696e000006000f00010200000000000000000008000c14b0a645f42701e7000500086f7574000006000f00010200000000000000000008000c14b0a645f42a0edf0002000a7573657273000004000a75736572730000050005000006000f00010100000000000022400008000c14b0a645f5967c8b0002000e70726f636573736573000004000d70735f7374617465000005000c72756e6e696e67000006000f00010100000000000000000008000c14b0a645f624706c0005000d736c656570696e67000006000f0001010000000000c067400008000c14b0a645f624861a0005000c7a6f6d62696573000006000f00010100000000000000000008000c14b0a645f62494740005000c73746f70706564000006000f00010100000000000010400008000c14b0a645f6254aa90005000b706167696e67000006000f00010100000000000000000008000c14b0a645f6255b110005000c626c6f636b6564000006000f00010100000000000000000008000c14b0a645f62763060004000e666f726b5f726174650000050005000006000f00010200000000000025390008000c14b0a64873bf8f47000200086370750000030006300000040008637075000005000975736572000006000f0001020000000000023caa0008000c14b0a64873bfc9dd000500096e696365000006000f00010200000000000000030008000c14b0a64873bfe9350005000b73797374656d000006000f00010200000000000078bc0008000c14b0a64873c004290005000969646c65000006000f00010200000000000941fe0008000c14b0a64873c020920005000977616974000006000f00010200000000000002050008000c14b0a64873c03e280005000e696e74657272757074000006000f00010200000000000000140008000c14b0a64873c04ba20005000c736f6674697271000006000f00010200000000000001890008000c14b0a64873c058860005000a737465616c000006000f00010200000000000000000008000c14b0a64873c071b80003000631000005000975736572000006000f000102000000000002440e0008000c14b0a64873c07f31000500096e696365000006000f0001020000000000000007"].pack('H*')
|
14
|
+
|
15
|
+
counter = 0
|
16
|
+
subject.decode(payload) do |event|
|
17
|
+
case counter
|
18
|
+
when 0
|
19
|
+
insist { event['host'] } == "lieters-klaptop.prot.plexis.eu"
|
20
|
+
insist { event['plugin'] } == "interface"
|
21
|
+
insist { event['plugin_instance'] } == "wlan0"
|
22
|
+
insist { event['collectd_type'] } == "if_errors"
|
23
|
+
insist { event['rx'] } == 0
|
24
|
+
insist { event['tx'] } == 0
|
25
|
+
when 2
|
26
|
+
insist { event['host'] } == "lieters-klaptop.prot.plexis.eu"
|
27
|
+
insist { event['plugin'] } == "entropy"
|
28
|
+
insist { event['collectd_type'] } == "entropy"
|
29
|
+
insist { event['value'] } == 157.0
|
30
|
+
end
|
31
|
+
counter += 1
|
32
|
+
end
|
33
|
+
insist { counter } == 28
|
34
|
+
end # it "should parse a normal packet"
|
35
|
+
|
36
|
+
it "should drop a part with an header length" do
|
37
|
+
payload = ["000000236c6965746572732d6b6c6170746f702e70726f742e706c657869732e6575000008000c14b0a645f3eb73c30009000c00000002800000000002000e696e74657266616365000003000a776c616e30000004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0a645f3eb525e000300076c6f000004000f69665f7061636b6574730000060018000202020000000000001cd80000000000001cd80008000c14b0a645f3ebf8c10002000c656e74726f70790000030005000004000c656e74726f7079000006000f0001010000000000a063400008000c14b0a645f3eb6c700002000e696e74657266616365000003000a776c616e30000004000f69665f7061636b657473000006001800020202000000000002d233000000000001c3b10008000c14b0a645f3eb59b1000300076c6f000004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0a645f425380b00020009737761700000030005000004000973776170000005000975736564000006000f00010100000000000000000008000c14b0a645f4254c8d0005000966726565000006000f00010100000000fcffdf410008000c14b0a645f4255ae70005000b636163686564000006000f00010100000000000000000008000c14b0a645f426f09f0004000c737761705f696f0000050007696e000006000f00010200000000000000000008000c14b0a645f42701e7000500086f7574000006000f00010200000000000000000008000c14b0a645f42a0edf0002000a7573657273000004000a75736572730000050005000006000f00010100000000000022400008000c14b0a645f5967c8b0002000e70726f636573736573000004000d70735f7374617465000005000c72756e6e696e67000006000f00010100000000000000000008000c14b0a645f624706c0005000d736c656570696e67000006000f0001010000000000c067400008000c14b0a645f624861a0005000c7a6f6d62696573000006000f00010100000000000000000008000c14b0a645f62494740005000c73746f70706564000006000f00010100000000000010400008000c14b0a645f6254aa90005000b706167696e67000006000f00010100000000000000000008000c14b0a645f6255b110005000c626c6f636b6564000006000f00010100000000000000000008000c14b0a645f62763060004000e666f726b5f726174650000050005000006000f00010200000000000025390008000c14b0a64873bf8f47000200086370750000030006300000040008637075000005000975736572000006000f0001020000000000023caa0008000c14b0a64873bfc9dd000500096e696365000006000f00010200000000000000030008000c14b0a64873bfe9350005000b73797374656d000006000f00010200000000000078bc0008000c14b0a64873c004290005000969646c65000006000f00010200000000000941fe0008000c14b0a64873c020920005000977616974000006000f00010200000000000002050008000c14b0a64873c03e280005000e696e74657272757074000006000f00010200000000000000140008000c14b0a64873c04ba20005000c736f6674697271000006000f00010200000000000001890008000c14b0a64873c058860005000a737465616c000006000f00010200000000000000000008000c14b0a64873c071b80003000631000005000975736572000006000f000102000000000002440e0008000c14b0a64873c07f31000500316e696365000006000f0001020000000000000007"].pack('H*')
|
38
|
+
counter = 0
|
39
|
+
subject.decode(payload) do |event|
|
40
|
+
case counter
|
41
|
+
when 0
|
42
|
+
insist { event['host'] } == "lieters-klaptop.prot.plexis.eu"
|
43
|
+
insist { event['plugin'] } == "interface"
|
44
|
+
insist { event['plugin_instance'] } == "wlan0"
|
45
|
+
insist { event['collectd_type'] } == "if_errors"
|
46
|
+
insist { event['rx'] } == 0
|
47
|
+
insist { event['tx'] } == 0
|
48
|
+
when 2
|
49
|
+
insist { event['host'] } == "lieters-klaptop.prot.plexis.eu"
|
50
|
+
insist { event['plugin'] } == "entropy"
|
51
|
+
insist { event['collectd_type'] } == "entropy"
|
52
|
+
insist { event['value'] } == 157.0
|
53
|
+
end
|
54
|
+
counter += 1
|
55
|
+
end
|
56
|
+
# One of these will fail because I altered the payload from the normal packet
|
57
|
+
insist { counter } == 27
|
58
|
+
end # it "should drop a part with an header length"
|
59
|
+
|
60
|
+
# This payload contains a NaN value
|
61
|
+
it "should replace a NaN with a zero and add tag '_collectdNaN' by default" do
|
62
|
+
payload = ["00000015746573742e6578616d706c652e636f6d000008000c14dc4c81831ef78b0009000c00000000400000000002000970696e67000004000970696e67000005001c70696e672d7461726765742e6578616d706c652e636f6d000006000f000101000000000000f87f"].pack('H*')
|
63
|
+
counter = 0
|
64
|
+
subject.decode(payload) do |event|
|
65
|
+
case counter
|
66
|
+
when 0
|
67
|
+
insist { event['host'] } == "test.example.com"
|
68
|
+
insist { event['plugin'] } == "ping"
|
69
|
+
insist { event['type_instance'] } == "ping-target.example.com"
|
70
|
+
insist { event['collectd_type'] } == "ping"
|
71
|
+
insist { event['value'] } == 0 # Not a NaN
|
72
|
+
insist { event['tags'] } == ["_collectdNaN"]
|
73
|
+
end
|
74
|
+
counter += 1
|
75
|
+
end
|
76
|
+
insist { counter } == 1
|
77
|
+
end # it "should replace a NaN with a zero and add tag '_collectdNaN' by default"
|
78
|
+
end # context "None"
|
79
|
+
|
80
|
+
context "Replace nan_value and nan_tag with non-default values" do
|
81
|
+
subject do
|
82
|
+
next LogStash::Codecs::Collectd.new({"nan_value" => 1,
|
83
|
+
"nan_tag" => "NaN_encountered"})
|
84
|
+
end
|
85
|
+
# This payload contains a NaN value
|
86
|
+
it "should replace a NaN with the specified value and tag 'NaN_encountered'" do
|
87
|
+
payload = ["00000015746573742e6578616d706c652e636f6d000008000c14dc4c81831ef78b0009000c00000000400000000002000970696e67000004000970696e67000005001c70696e672d7461726765742e6578616d706c652e636f6d000006000f000101000000000000f87f"].pack('H*')
|
88
|
+
counter = 0
|
89
|
+
subject.decode(payload) do |event|
|
90
|
+
case counter
|
91
|
+
when 0
|
92
|
+
insist { event['host'] } == "test.example.com"
|
93
|
+
insist { event['plugin'] } == "ping"
|
94
|
+
insist { event['type_instance'] } == "ping-target.example.com"
|
95
|
+
insist { event['collectd_type'] } == "ping"
|
96
|
+
insist { event['value'] } == 1 # Not a NaN
|
97
|
+
insist { event['tags'] } == ["NaN_encountered"]
|
98
|
+
end
|
99
|
+
counter += 1
|
100
|
+
end
|
101
|
+
insist { counter } == 1
|
102
|
+
end # it "should replace a NaN with the specified value and tag 'NaN_encountered'"
|
103
|
+
end # context "Replace nan_value and nan_tag with non-default values"
|
104
|
+
|
105
|
+
context "Warn on NaN event" do
|
106
|
+
subject do
|
107
|
+
next LogStash::Codecs::Collectd.new({"nan_handling" => "warn"})
|
108
|
+
end
|
109
|
+
# This payload contains a NaN value
|
110
|
+
it "should replace a NaN with a zero and receive a warning when 'nan_handling' set to warn" do
|
111
|
+
payload = ["00000015746573742e6578616d706c652e636f6d000008000c14dc4c81831ef78b0009000c00000000400000000002000970696e67000004000970696e67000005001c70696e672d7461726765742e6578616d706c652e636f6d000006000f000101000000000000f87f"].pack('H*')
|
112
|
+
counter = 0
|
113
|
+
subject.logger.should_receive(:warn).with("NaN replaced by 0")
|
114
|
+
subject.decode(payload) do |event|
|
115
|
+
case counter
|
116
|
+
when 0
|
117
|
+
insist { event['host'] } == "test.example.com"
|
118
|
+
insist { event['plugin'] } == "ping"
|
119
|
+
insist { event['type_instance'] } == "ping-target.example.com"
|
120
|
+
insist { event['collectd_type'] } == "ping"
|
121
|
+
insist { event['value'] } == 0 # Not a NaN
|
122
|
+
end
|
123
|
+
counter += 1
|
124
|
+
end
|
125
|
+
insist { counter } == 1
|
126
|
+
end # it "should replace a NaN with a zero and receive a warning when 'nan_handling' set to warn"
|
127
|
+
end # context "Warn on NaN event"
|
128
|
+
|
129
|
+
context "Drop NaN event" do
|
130
|
+
subject do
|
131
|
+
next LogStash::Codecs::Collectd.new({"nan_handling" => "drop"})
|
132
|
+
end
|
133
|
+
# This payload contains a NaN value
|
134
|
+
it "should drop an event with a NaN value when 'nan_handling' set to drop" do
|
135
|
+
payload = ["00000015746573742e6578616d706c652e636f6d000008000c14dc4c81831ef78b0009000c00000000400000000002000970696e67000004000970696e67000005001c70696e672d7461726765742e6578616d706c652e636f6d000006000f000101000000000000f87f"].pack('H*')
|
136
|
+
counter = 0
|
137
|
+
subject.decode(payload) do |event|
|
138
|
+
case counter
|
139
|
+
when 0
|
140
|
+
insist { event['host'] } == "test.example.com"
|
141
|
+
insist { event['plugin'] } == "ping"
|
142
|
+
insist { event['type_instance'] } == "ping-target.example.com"
|
143
|
+
insist { event['collectd_type'] } == "ping"
|
144
|
+
insist { event['value'] } == NaN # NaN
|
145
|
+
end
|
146
|
+
counter += 1 # Because we're dropping this, it should not increment
|
147
|
+
end
|
148
|
+
insist { counter } == 0 # We expect no increment
|
149
|
+
end # it "should drop an event with a NaN value when 'nan_handling' set to drop"
|
150
|
+
end # context "Drop NaN event"
|
151
|
+
|
152
|
+
# Create an authfile for the next tests
|
153
|
+
authfile = Tempfile.new('logstash-collectd-authfile')
|
154
|
+
File.open(authfile.path, "a") do |fd|
|
155
|
+
fd.puts("pieter: aapje1234")
|
156
|
+
end
|
157
|
+
context "Sign" do
|
158
|
+
subject do
|
159
|
+
next LogStash::Codecs::Collectd.new({"authfile" => authfile.path,
|
160
|
+
"security_level" => "Sign"})
|
161
|
+
end
|
162
|
+
|
163
|
+
it "should parse a correctly signed packet" do
|
164
|
+
payload = ["0200002a815d5d7e1e72250eee4d37251bf688fbc06ec87e3cbaf289390ef47ad7c413ce706965746572000000236c6965746572732d6b6c6170746f702e70726f742e706c657869732e6575000008000c14b0aa39ef05b3a80009000c000000028000000000020008697271000004000869727100000500084d4953000006000f00010200000000000000000008000c14b0aa39ef06c381000200096c6f616400000400096c6f616400000500050000060021000301010148e17a14ae47e13f85eb51b81e85db3f52b81e85eb51e03f0008000c14b0aa39ef0a7a150002000b6d656d6f7279000004000b6d656d6f7279000005000975736564000006000f000101000000006ce8dc410008000c14b0aa39ef0a87440005000d6275666665726564000006000f00010100000000c0eaa9410008000c14b0aa39ef0a91850005000b636163686564000006000f000101000000002887c8410008000c14b0aa39ef0a9b2f0005000966726565000006000f00010100000000580ed1410008000c14b0aa39ef1b3b8f0002000e696e74657266616365000003000974756e30000004000e69665f6f63746574730000050005000006001800020202000000000000df5f00000000000060c10008000c14b0aa39ef1b49ea0004000f69665f7061636b6574730000060018000202020000000000000177000000000000017a0008000c14b0aa39ef1b55570004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0aa39ef1b7a400003000965746830000004000e69665f6f6374657473000006001800020202000000000000000000000000000000000008000c14b0aa39ef1b85160004000f69665f7061636b657473000006001800020202000000000000000000000000000000000008000c14b0aa39ef1b93bc0004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0aa39ef1bb0bc000300076c6f000004000e69665f6f63746574730000060018000202020000000000a92d840000000000a92d840008000c14b0aa39ef1bbbdd0004000f69665f7061636b6574730000060018000202020000000000002c1e0000000000002c1e0008000c14b0aa39ef1bc8760004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0aa39ef1be36a0003000a776c616e30000004000e69665f6f6374657473000006001800020202000000001043329b0000000001432a5d0008000c14b0aa39ef1bef6c0004000f69665f7061636b6574730000060018000202020000000000043884000000000002931e0008000c14b0aa39ef1bfa8d0004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0aa39ef6e4ff5000200096469736b000003000873646100000400106469736b5f6f637465747300000600180002020200000000357c5000000000010dfb10000008000c14b0aa39ef6e8e5a0004000d6469736b5f6f7073000006001800020202000000000000a6fe0000000000049ee00008000c14b0aa39ef6eae480004000e6469736b5f74696d65000006001800020202000000000000000400000000000000120008000c14b0aa39ef6ecc2a000400106469736b5f6d6572676564000006001800020202000000000000446500000000000002460008000c14b0aa39ef6ef9dc000300097364613100000400106469736b5f6f637465747300000600180002020200000000000bf00000000000000000000008000c14b0aa39ef6f05490004000d6469736b5f6f707300000600180002020200000000000000bf0000000000000000"].pack('H*')
|
165
|
+
counter = 0
|
166
|
+
subject.decode(payload) do |event|
|
167
|
+
counter += 1
|
168
|
+
end
|
169
|
+
|
170
|
+
insist { counter } == 24
|
171
|
+
end # it "should parse a correctly signed packet"
|
172
|
+
|
173
|
+
it "should not parse an incorrectly signed packet" do
|
174
|
+
payload = ["0200002a815d5d7f1e72250eee4d37251bf688fbc06ec87e3cbaf289390ef47ad7c413ce706965746572000000236c6965746572732d6b6c6170746f702e70726f742e706c657869732e6575000008000c14b0aa39ef05b3a80009000c000000028000000000020008697271000004000869727100000500084d4953000006000f00010200000000000000000008000c14b0aa39ef06c381000200096c6f616400000400096c6f616400000500050000060021000301010148e17a14ae47e13f85eb51b81e85db3f52b81e85eb51e03f0008000c14b0aa39ef0a7a150002000b6d656d6f7279000004000b6d656d6f7279000005000975736564000006000f000101000000006ce8dc410008000c14b0aa39ef0a87440005000d6275666665726564000006000f00010100000000c0eaa9410008000c14b0aa39ef0a91850005000b636163686564000006000f000101000000002887c8410008000c14b0aa39ef0a9b2f0005000966726565000006000f00010100000000580ed1410008000c14b0aa39ef1b3b8f0002000e696e74657266616365000003000974756e30000004000e69665f6f63746574730000050005000006001800020202000000000000df5f00000000000060c10008000c14b0aa39ef1b49ea0004000f69665f7061636b6574730000060018000202020000000000000177000000000000017a0008000c14b0aa39ef1b55570004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0aa39ef1b7a400003000965746830000004000e69665f6f6374657473000006001800020202000000000000000000000000000000000008000c14b0aa39ef1b85160004000f69665f7061636b657473000006001800020202000000000000000000000000000000000008000c14b0aa39ef1b93bc0004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0aa39ef1bb0bc000300076c6f000004000e69665f6f63746574730000060018000202020000000000a92d840000000000a92d840008000c14b0aa39ef1bbbdd0004000f69665f7061636b6574730000060018000202020000000000002c1e0000000000002c1e0008000c14b0aa39ef1bc8760004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0aa39ef1be36a0003000a776c616e30000004000e69665f6f6374657473000006001800020202000000001043329b0000000001432a5d0008000c14b0aa39ef1bef6c0004000f69665f7061636b6574730000060018000202020000000000043884000000000002931e0008000c14b0aa39ef1bfa8d0004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0aa39ef6e4ff5000200096469736b000003000873646100000400106469736b5f6f637465747300000600180002020200000000357c5000000000010dfb10000008000c14b0aa39ef6e8e5a0004000d6469736b5f6f7073000006001800020202000000000000a6fe0000000000049ee00008000c14b0aa39ef6eae480004000e6469736b5f74696d65000006001800020202000000000000000400000000000000120008000c14b0aa39ef6ecc2a000400106469736b5f6d6572676564000006001800020202000000000000446500000000000002460008000c14b0aa39ef6ef9dc000300097364613100000400106469736b5f6f637465747300000600180002020200000000000bf00000000000000000000008000c14b0aa39ef6f05490004000d6469736b5f6f707300000600180002020200000000000000bf0000000000000000"].pack('H*')
|
175
|
+
counter = 0
|
176
|
+
subject.decode(payload) do |event|
|
177
|
+
counter += 1
|
178
|
+
end
|
179
|
+
|
180
|
+
insist { counter } == 0
|
181
|
+
end # it "should not parse and incorrectly signed packet"
|
182
|
+
end # context "Sign"
|
183
|
+
|
184
|
+
context "Encrypt" do
|
185
|
+
subject do
|
186
|
+
next LogStash::Codecs::Collectd.new({"authfile" => authfile.path,
|
187
|
+
"security_level" => "Encrypt"})
|
188
|
+
end
|
189
|
+
|
190
|
+
it "should parse an encrypted packet", :export_cypher => true do
|
191
|
+
payload = ["0210055b0006706965746572a8e1874742655f163fa5b1ae4c7c37cd4c271e4f6e2dc53f0a2dfb6391c11f9200645abd545de9042bc7f36c3119e5d301115acfd44ff298d2565cf20799fa322bbe2e72268ef1b5f24b8003e512b0f8f52ce5d3fb0a5aafbff83ac7a49047e2fbf908a3f8c043154feeb594953e5dbd93eafdc75866b336d25e135d2fea6efcebaf9041c86081dda8b999d816e23106a3615efee7191610d9f2eab626cccf00879d76e82a3e60f60cf594435c723ac302c605f9a3ddc6c994acb75d461fa82e57f8b9823081a80a07386b8cdeca387792a52a58f1c367cacec8ecc292b06c5101b5fdcc0320bfd473fb751bef559e51031ef4207404702fa4899b152bf264c4b0f11cf6ab37fc4c7fb996fa6d2dce9051373c5adf06bbb588d38a1251258f2fd690c55a9d2c87b916ca159b261b3fce068b91fd94ca31f90c237df7ac6fcd7c9e73d77c49b3fb93be59cdcf51ea3dcdfd00cdeff379f979cc7341369c47b741651fe5b8de82498cebf35d8c9bad1ef02384e8418d57765aeede95bbd70078516136351b39e4f1e668786ce3885ac8f0f0246337ed6842f5789536474d3c1390b846aaf859b5af6efad027439dc0e444d3a9ab289a4deab4aeecbd9514e1fabadcd7b4565b6d96f12007b600dd0cc135b0c6a521f8c9c17b109d4ba5a42d32f00757c4da50bc0e5ff2bd1114df97f3edfc25102fdc43faa2c2087a5ee9cc0137438eac807bf19f883023adb1293623e15bf94ce7bb2fb6af68978c12642b1dd04badcbf74ee9d08ed5629904376a084348fc51ea382a9d83cd41d021be24f3fea3f079de815c0a89e0c3684501eb6ead89b515cca706218702fb56fe4c8ca0b3d7969dbee7a5a12a17843f990e408974c65aaa3d719f8774098eee7d5be5adb025de24e719434073e59ee91d38192007c5df97d79174de8218ecf89d7778282814ec8ad92f9622d2b875881666d59949b9487f2b231203b570418dd69218e2e86205af2618b74f1a83bdab0465f44d0647548598018ba0180e6d9a8496854c8fbb85698c4ec56d9f524ebf37953601a0c470c360f2d8fa83215c761cbb4d8ae475bbb3dec60e6a5c7af7aab1b8bb56b8fa18619a0c240e5ccf2d02326fc08db42f74b99b9be5263061b36a1b750e061f3cad72db6480e8194a6fe78bc3403551473d03b5067a3d72457563777f398f3df4ae24c09fc66c2c0b06331fdabb33e7ef22a7e7f4a5d8e92cdaaabc7aabd2ab15cf6204e2a531ef4fdc98ed4895e71ea9e406b759d6d547b0b97c2715551c73efd415e55f0c0d73d7134b63c0636728bab0a59bff59de8a31f40f4f1f77a3e1e52d2035f69ab453dfd14889c5dfa7fcc27180cb35f92a3282dfc520716968bec6f22e99351889d53628e57f48f5ad70899881b81699454d8d5aff6791672cbf258d1130dabf27ddee7f6e105752c3773257a2a5616350551965e7c60603c8b0465169af66b52ff900be147ead7a8bfb9bf1419709b539a8f003da13abe286855850530135a1eba0231a9995736abf55b6f50aa85e42afc7b4e7574cc53b8919d0b05c4630af1e5fa98a1bd6a2b7e4fbda02c68c73d07bf0f117d63d1ed51d613464146dba12460a0769c79517a928e66417ef4ee19248a7abd1a734eb53443ff44a742d6bf96782de8593ec8561ea974b61f0f2d5ab1671c4eb323c0a07bf6d042564161c5688a722cf8de4c39346082b7a3d635bcf5e24c7ab421ed206f3a93c17d26f0b28a99e25bc3387f3f5fcd99b6560c51f055ac1887f3d84fb8ad0eb03304663bad111fcf531e4efe918143062ca1724857edd138ca9eca0476a5205c3fe1db899d4b26a8d3398df52e8548ecdfb94044e8c095df60139d00c3bc01c205d44fd81fc30ec02b20f281da57c106b86e567585e0b561555ea491eda05"].pack('H*')
|
192
|
+
counter = 0
|
193
|
+
subject.decode(payload) do |event|
|
194
|
+
counter += 1
|
195
|
+
end
|
196
|
+
|
197
|
+
insist { counter } == 24
|
198
|
+
end # it "should parse an encrypted packet"
|
199
|
+
|
200
|
+
it "should not parse unencrypted packets when encrypt is configured" do
|
201
|
+
payload = ["000000236c6965746572732d6b6c6170746f702e70726f742e706c657869732e6575000008000c14b0a645f3eb73c30009000c00000002800000000002000e696e74657266616365000003000a776c616e30000004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0a645f3eb525e000300076c6f000004000f69665f7061636b6574730000060018000202020000000000001cd80000000000001cd80008000c14b0a645f3ebf8c10002000c656e74726f70790000030005000004000c656e74726f7079000006000f0001010000000000a063400008000c14b0a645f3eb6c700002000e696e74657266616365000003000a776c616e30000004000f69665f7061636b657473000006001800020202000000000002d233000000000001c3b10008000c14b0a645f3eb59b1000300076c6f000004000e69665f6572726f7273000006001800020202000000000000000000000000000000000008000c14b0a645f425380b00020009737761700000030005000004000973776170000005000975736564000006000f00010100000000000000000008000c14b0a645f4254c8d0005000966726565000006000f00010100000000fcffdf410008000c14b0a645f4255ae70005000b636163686564000006000f00010100000000000000000008000c14b0a645f426f09f0004000c737761705f696f0000050007696e000006000f00010200000000000000000008000c14b0a645f42701e7000500086f7574000006000f00010200000000000000000008000c14b0a645f42a0edf0002000a7573657273000004000a75736572730000050005000006000f00010100000000000022400008000c14b0a645f5967c8b0002000e70726f636573736573000004000d70735f7374617465000005000c72756e6e696e67000006000f00010100000000000000000008000c14b0a645f624706c0005000d736c656570696e67000006000f0001010000000000c067400008000c14b0a645f624861a0005000c7a6f6d62696573000006000f00010100000000000000000008000c14b0a645f62494740005000c73746f70706564000006000f00010100000000000010400008000c14b0a645f6254aa90005000b706167696e67000006000f00010100000000000000000008000c14b0a645f6255b110005000c626c6f636b6564000006000f00010100000000000000000008000c14b0a645f62763060004000e666f726b5f726174650000050005000006000f00010200000000000025390008000c14b0a64873bf8f47000200086370750000030006300000040008637075000005000975736572000006000f0001020000000000023caa0008000c14b0a64873bfc9dd000500096e696365000006000f00010200000000000000030008000c14b0a64873bfe9350005000b73797374656d000006000f00010200000000000078bc0008000c14b0a64873c004290005000969646c65000006000f00010200000000000941fe0008000c14b0a64873c020920005000977616974000006000f00010200000000000002050008000c14b0a64873c03e280005000e696e74657272757074000006000f00010200000000000000140008000c14b0a64873c04ba20005000c736f6674697271000006000f00010200000000000001890008000c14b0a64873c058860005000a737465616c000006000f00010200000000000000000008000c14b0a64873c071b80003000631000005000975736572000006000f000102000000000002440e0008000c14b0a64873c07f31000500096e696365000006000f0001020000000000000007"].pack('H*')
|
202
|
+
counter = 0
|
203
|
+
subject.decode(payload) do |event|
|
204
|
+
counter += 1
|
205
|
+
end
|
206
|
+
|
207
|
+
insist { counter } == 0
|
208
|
+
end # it "should not parse unencrypted packets when encrypt is configured"
|
209
|
+
end # context "Encrypt"
|
210
|
+
end # describe LogStash::Codecs::Collectd
|
metadata
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-codec-collectd
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Elasticsearch
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2014-10-30 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: logstash
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ! '>='
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 1.4.0
|
20
|
+
- - <
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: 2.0.0
|
23
|
+
type: :runtime
|
24
|
+
prerelease: false
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - ! '>='
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: 1.4.0
|
30
|
+
- - <
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: 2.0.0
|
33
|
+
description: Read events from the connectd binary protocol
|
34
|
+
email: richard.pijnenburg@elasticsearch.com
|
35
|
+
executables: []
|
36
|
+
extensions: []
|
37
|
+
extra_rdoc_files: []
|
38
|
+
files:
|
39
|
+
- .gitignore
|
40
|
+
- Gemfile
|
41
|
+
- Rakefile
|
42
|
+
- lib/logstash/codecs/collectd.rb
|
43
|
+
- logstash-codec-collectd.gemspec
|
44
|
+
- rakelib/publish.rake
|
45
|
+
- rakelib/vendor.rake
|
46
|
+
- spec/codecs/collectd.rb
|
47
|
+
- vendor/types.db
|
48
|
+
homepage: http://logstash.net/
|
49
|
+
licenses:
|
50
|
+
- Apache License (2.0)
|
51
|
+
metadata:
|
52
|
+
logstash_plugin: 'true'
|
53
|
+
group: codec
|
54
|
+
post_install_message:
|
55
|
+
rdoc_options: []
|
56
|
+
require_paths:
|
57
|
+
- lib
|
58
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
59
|
+
requirements:
|
60
|
+
- - ! '>='
|
61
|
+
- !ruby/object:Gem::Version
|
62
|
+
version: '0'
|
63
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
64
|
+
requirements:
|
65
|
+
- - ! '>='
|
66
|
+
- !ruby/object:Gem::Version
|
67
|
+
version: '0'
|
68
|
+
requirements: []
|
69
|
+
rubyforge_project:
|
70
|
+
rubygems_version: 2.4.1
|
71
|
+
signing_key:
|
72
|
+
specification_version: 4
|
73
|
+
summary: Read events from the connectd binary protocol
|
74
|
+
test_files:
|
75
|
+
- spec/codecs/collectd.rb
|