logstash-codec-sflow 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 7734945c0489502693b7f17044c6f96bae64637b
4
- data.tar.gz: 9b353b730c693c2397f5edb013c3b0811a5f133c
3
+ metadata.gz: a9773b581653d998b843923848706abfe5851dad
4
+ data.tar.gz: 92c48583c7b35ecdf55bd707e4196b671149cc91
5
5
  SHA512:
6
- metadata.gz: 5767825ecfb36ad9a4738cf770fb0718f9a785264b7e87fabbf6e292afaa058ff49fa6761cc5e456ffbcec4f4875cc7cfd48a19597d74a8fb0ac0d3f133fada6
7
- data.tar.gz: 90e4aa370af0b55f121aa7b922e577d6153b166724048fdb3eaee630366214360b8dc0d3088a01d56a6d1ed8e98fb2475fc0036f1acbb4bc27faeeb918cfd4b0
6
+ metadata.gz: 74eb65d450dbc9b1dc05dd784a1d0b7efca1eb8f82cc09d978049d672a98802dbdcd36ce652475baa31222c9cfadb5c814a7bed2277845584db6c1ecb8877331
7
+ data.tar.gz: 68dcb680539c692397aeaecaa4cc7f80fa5f381f7852450115e9b27c09609b19cf9acd3f822bc648d1c20defa63cd79944fea6ebd0bb20f8c630b8155eeaa943
data/README.md CHANGED
@@ -65,7 +65,7 @@ bundle exec rspec
65
65
 
66
66
  - Edit Logstash `tools/Gemfile` and add the local plugin path, for example:
67
67
  ```ruby
68
- gem "logstash-filter-awesome", :path => "/your/local/logstash-filter-awesome"
68
+ gem "logstash-codec-sflow", :path => "/your/local/logstash-filter-awesome"
69
69
  ```
70
70
  - Update Logstash dependencies
71
71
  ```sh
@@ -2,6 +2,7 @@
2
2
 
3
3
  require 'bindata'
4
4
  require 'logstash/codecs/sflow/util'
5
+ require 'logstash/codecs/sflow/packet_header'
5
6
 
6
7
  class RawPacketHeader < BinData::Record
7
8
  endian :big
@@ -9,14 +10,20 @@ class RawPacketHeader < BinData::Record
9
10
  uint32 :frame_length
10
11
  uint32 :stripped
11
12
  uint32 :header_size
12
- skip :length => :header_size
13
+ choice :sample_header, :selection => :protocol do
14
+ ethernet_header 1, :size_header => lambda { header_size * 8 }
15
+ ip_header 11, :size_header => lambda { header_size * 8 }
16
+ skip :default, :length => :header_size
17
+ end
13
18
  end
14
19
 
15
20
  class EthernetFrameData < BinData::Record
16
21
  endian :big
17
22
  uint32 :packet_length
18
23
  mac_address :src_mac
24
+ skip :length => 2
19
25
  mac_address :dst_mac
26
+ skip :length => 2
20
27
  uint32 :type
21
28
  end
22
29
 
@@ -0,0 +1,100 @@
1
+ # encoding: utf-8
2
+
3
+ require 'bindata'
4
+ require 'logstash/codecs/sflow/util'
5
+
6
+
7
+ class TcpHeader < BinData::Record
8
+ mandatory_parameter :size_header
9
+
10
+ endian :big
11
+ uint16 :src_port
12
+ uint16 :dst_port
13
+ uint32 :tcp_seq_number
14
+ uint32 :tcp_ack_number
15
+ bit4 :tcp_header_length # times 4
16
+ bit3 :tcp_reserved
17
+ bit1 :tcp_is_nonce
18
+ bit1 :tcp_is_cwr
19
+ bit1 :tcp_is_ecn_echo
20
+ bit1 :tcp_is_urgent
21
+ bit1 :tcp_is_ack
22
+ bit1 :tcp_is_push
23
+ bit1 :tcp_is_reset
24
+ bit1 :tcp_is_syn
25
+ bit1 :tcp_is_fin
26
+ uint16 :tcp_window_size
27
+ uint16 :tcp_checksum
28
+ uint16 :tcp_urgent_pointer
29
+ array :tcp_options, :initial_length => lambda { (((tcp_header_length * 4) - 20)/4).ceil }, :onlyif => :is_options? do
30
+ string :tcp_option, :length => 4, :pad_byte => "\0"
31
+ end
32
+ bit :nbits => lambda { size_header - (tcp_header_length * 4 * 8) }
33
+
34
+ def is_options?
35
+ tcp_header_length.to_i > 5
36
+ end
37
+ end
38
+
39
+ class UdpHeader < BinData::Record
40
+ endian :big
41
+ uint16 :src_port
42
+ uint16 :dst_port
43
+ uint16 :udp_length
44
+ uint16 :udp_checksum
45
+ skip :length => lambda { udp_length - 64 } #skip udp data
46
+ end
47
+
48
+ class IPV4Header < BinData::Record
49
+ mandatory_parameter :size_header
50
+
51
+ endian :big
52
+ bit4 :ip_header_length # times 4
53
+ bit6 :ip_dscp
54
+ bit2 :ip_ecn
55
+ uint16 :ip_total_length
56
+ uint16 :ip_identification
57
+ bit3 :ip_flags
58
+ bit13 :ip_fragment_offset
59
+ uint8 :ip_ttl
60
+ uint8 :ip_protocol
61
+ uint16 :ip_checksum
62
+ ip4_addr :src_ip
63
+ ip4_addr :dst_ip
64
+ array :ip_options, :initial_length => lambda { (((ip_header_length * 4) - 20)/4).ceil }, :onlyif => :is_options? do
65
+ string :ip_option, :length => 4, :pad_byte => "\0"
66
+ end
67
+ choice :layer4, :selection => :ip_protocol do
68
+ tcp_header 6, :size_header => lambda { size_header - (ip_header_length * 4 * 8) }
69
+ udp_header 17
70
+ bit :default, :nbits => lambda { size_header - (ip_header_length * 4 * 8) }
71
+ end
72
+
73
+ def is_options?
74
+ ip_header_length.to_i > 5
75
+ end
76
+ end
77
+
78
+ class IPHeader < BinData::Record
79
+ mandatory_parameter :size_header
80
+
81
+ endian :big
82
+ bit4 :ip_version
83
+ choice :header, :selection => :ip_version do
84
+ ipv4_header 4, :size_header => :size_header
85
+ bit :default, :nbits => lambda { size_header - 4 }
86
+ end
87
+ end
88
+
89
+ class EthernetHeader < BinData::Record
90
+ mandatory_parameter :size_header
91
+
92
+ endian :big
93
+ mac_address :eth_dst
94
+ mac_address :eth_src
95
+ uint16 :eth_type
96
+ choice :layer3, :selection => :eth_type do
97
+ ip_header 2048, :size_header => lambda { size_header - (14 * 8) }
98
+ bit :default, :nbits => lambda { size_header - (14 * 8) }
99
+ end
100
+ end
@@ -19,13 +19,13 @@ class FlowSample < BinData::Record
19
19
  bit20 :record_entreprise
20
20
  bit12 :record_format
21
21
  uint32 :record_length
22
- choice :record_data, :selection => :record_format do
23
- raw_packet_header 1
24
- ethernet_frame_data 2
25
- ip4_data 3
26
- ip6_data 4
27
- extended_switch_data 1001
28
- extended_router_data 1002
22
+ choice :record_data, :selection => lambda { "#{record_entreprise}-#{record_format}" } do
23
+ raw_packet_header "0-1"
24
+ ethernet_frame_data "0-2"
25
+ ip4_data "0-3"
26
+ ip6_data "0-4"
27
+ extended_switch_data "0-1001"
28
+ extended_router_data "0-1002"
29
29
  skip :default, :length => :record_length
30
30
  end
31
31
  end
@@ -5,7 +5,6 @@ require 'ipaddr'
5
5
 
6
6
  class MacAddress < BinData::Primitive
7
7
  array :bytes, :type => :uint8, :initial_length => 6
8
- bit16 :padbytes
9
8
 
10
9
  def set(val)
11
10
  ints = val.split(/:/).collect { |int| int.to_i(16) }
@@ -1,21 +1,30 @@
1
1
  # encoding: utf-8
2
- require "logstash/codecs/base"
3
- require "logstash/namespace"
2
+ require 'logstash/codecs/base'
3
+ require 'logstash/namespace'
4
4
 
5
5
  # The "sflow" codec is for decoding sflow v5 flows.
6
6
  class LogStash::Codecs::Sflow < LogStash::Codecs::Base
7
- config_name "sflow"
7
+ config_name 'sflow'
8
8
 
9
9
  # Specify which sflow must not be send in the event
10
- config :removed_field, :validate => :array, :default => ['record_length', 'record_count', 'record_entreprise',
11
- 'record_format', 'sample_entreprise', 'sample_format',
12
- 'sample_length', 'sample_count', 'sflow_version',
13
- 'ip_version']
10
+ config :optional_removed_field, :validate => :array, :default => ['sflow_version', 'ip_version', 'header_size',
11
+ 'ip_header_length', 'ip_dscp', 'ip_ecn',
12
+ 'ip_total_length', 'ip_identification', 'ip_flags',
13
+ 'ip_fragment_offset', 'ip_ttl', 'ip_checksum',
14
+ 'ip_options', 'tcp_seq_number', 'tcp_ack_number',
15
+ 'tcp_header_length', 'tcp_reserved', 'tcp_is_nonce',
16
+ 'tcp_is_cwr', 'tcp_is_ecn_echo', 'tcp_is_urgent',
17
+ 'tcp_is_ack', 'tcp_is_push', 'tcp_is_reset',
18
+ 'tcp_is_syn', 'tcp_is_fin', 'tcp_window_size',
19
+ 'tcp_checksum', 'tcp_urgent_pointer', 'tcp_options']
14
20
 
15
21
 
16
22
  def initialize(params = {})
17
23
  super(params)
18
24
  @threadsafe = false
25
+ @removed_field = ['record_length', 'record_count', 'record_entreprise', 'record_format', 'sample_entreprise',
26
+ 'sample_format', 'sample_length', 'sample_count', 'sample_header', 'layer3', 'layer4',
27
+ 'tcp_nbits', 'ip_nbits'] | @optional_removed_field
19
28
  end
20
29
 
21
30
  # def initialize
@@ -29,56 +38,132 @@ class LogStash::Codecs::Sflow < LogStash::Codecs::Base
29
38
 
30
39
  public
31
40
  def decode(payload)
41
+
32
42
  decoded = SFlow.read(payload)
33
43
 
34
44
  events = []
35
45
 
36
46
  decoded['samples'].each do |sample|
47
+ #Treat case with no flow decoded (Unknown flow)
37
48
  if sample['sample_data'].to_s.eql? ''
38
49
  @logger.warn("Unknown sample entreprise #{sample['sample_entreprise'].to_s} - format #{sample['sample_format'].to_s}")
39
50
  next
40
51
  end
41
- sample['sample_data']['records'].each do |record|
42
- # Ensure that some data exist for the record
43
- if record['record_data'].to_s.eql? ''
44
- @logger.warn("Unknown record entreprise #{record['record_entreprise'].to_s}, format #{record['record_format'].to_s}")
45
- next
46
- end
47
52
 
53
+ #treat sample flow
54
+ if sample['sample_entreprise'] == 0 && sample['sample_format'] == 1
48
55
  # Create the logstash event
49
56
  event = {
50
57
  LogStash::Event::TIMESTAMP => LogStash::Timestamp.now
51
58
  }
59
+ sample['sample_data']['records'].each do |record|
60
+ # Ensure that some data exist for the record
61
+ if record['record_data'].to_s.eql? ''
62
+ @logger.warn("Unknown record entreprise #{record['record_entreprise'].to_s}, format #{record['record_format'].to_s}")
63
+ next
64
+ end
52
65
 
53
- decoded.each_pair do |k, v|
54
- unless k.to_s.eql? 'samples' or @removed_field.include? k.to_s
55
- event["#{k}"] = v
66
+ decoded.each_pair do |k, v|
67
+ unless k.to_s.eql? 'samples' or @removed_field.include? k.to_s
68
+ event["#{k}"] = v
69
+ end
56
70
  end
57
- end
58
71
 
59
- sample.each_pair do |k, v|
60
- unless k.to_s.eql? 'sample_data' or @removed_field.include? k.to_s
61
- event["#{k}"] = v
72
+ sample.each_pair do |k, v|
73
+ unless k.to_s.eql? 'sample_data' or @removed_field.include? k.to_s
74
+ event["#{k}"] = v
75
+ end
62
76
  end
63
- end
64
77
 
65
- sample['sample_data'].each_pair do |k, v|
66
- unless k.to_s.eql? 'records' or @removed_field.include? k.to_s
67
- event["#{k}"] = v
78
+ sample['sample_data'].each_pair do |k, v|
79
+ unless k.to_s.eql? 'records' or @removed_field.include? k.to_s
80
+ event["#{k}"] = v
81
+ end
68
82
  end
83
+
84
+ record.each_pair do |k, v|
85
+ unless k.to_s.eql? 'record_data' or @removed_field.include? k.to_s
86
+ event["#{k}"] = v
87
+ end
88
+ end
89
+
90
+ record['record_data'].each_pair do |k, v|
91
+ unless k.to_s.eql? 'record_data' or @removed_field.include? k.to_s
92
+ event["#{k}"] = v
93
+ end
94
+ end
95
+
96
+ unless record['record_data']['sample_header'].to_s.eql? ''
97
+ record['record_data']['sample_header'].each_pair do |k, v|
98
+ unless k.to_s.eql? 'record_data' or @removed_field.include? k.to_s
99
+ event["#{k}"] = v
100
+ end
101
+ end
102
+
103
+ if record['record_data']['sample_header'].has_key?("layer3")
104
+ record['record_data']['sample_header']['layer3']['header'].each_pair do |k, v|
105
+ unless k.to_s.eql? 'record_data' or @removed_field.include? k.to_s
106
+ event["#{k}"] = v
107
+ end
108
+ end
109
+ end
110
+
111
+ unless record['record_data']['sample_header']['layer3']['header']['layer4'].to_s.eql? ''
112
+ record['record_data']['sample_header']['layer3']['header']['layer4'].each_pair do |k, v|
113
+ unless k.to_s.eql? 'record_data' or @removed_field.include? k.to_s
114
+ event["#{k}"] = v
115
+ end
116
+ end
117
+ end
118
+ end
119
+
69
120
  end
121
+ events.push(event)
122
+
123
+ #treat counter flow
124
+ elsif sample['sample_entreprise'] == 0 && sample['sample_format'] == 2
125
+ sample['sample_data']['records'].each do |record|
126
+ # Ensure that some data exist for the record
127
+ if record['record_data'].to_s.eql? ''
128
+ @logger.warn("Unknown record entreprise #{record['record_entreprise'].to_s}, format #{record['record_format'].to_s}")
129
+ next
130
+ end
131
+
132
+ # Create the logstash event
133
+ event = {
134
+ LogStash::Event::TIMESTAMP => LogStash::Timestamp.now
135
+ }
136
+
137
+ decoded.each_pair do |k, v|
138
+ unless k.to_s.eql? 'samples' or @removed_field.include? k.to_s
139
+ event["#{k}"] = v
140
+ end
141
+ end
142
+
143
+ sample.each_pair do |k, v|
144
+ unless k.to_s.eql? 'sample_data' or @removed_field.include? k.to_s
145
+ event["#{k}"] = v
146
+ end
147
+ end
70
148
 
71
- record.each_pair do |k, v|
72
- unless k.to_s.eql? 'record_data' or @removed_field.include? k.to_s
149
+ sample['sample_data'].each_pair do |k, v|
150
+ unless k.to_s.eql? 'records' or @removed_field.include? k.to_s
151
+ event["#{k}"] = v
152
+ end
153
+ end
154
+
155
+ record.each_pair do |k, v|
156
+ unless k.to_s.eql? 'record_data' or @removed_field.include? k.to_s
157
+ event["#{k}"] = v
158
+ end
159
+ end
160
+
161
+ record['record_data'].each_pair do |k, v|
73
162
  event["#{k}"] = v
74
163
  end
75
- end
76
164
 
77
- record['record_data'].each_pair do |k, v|
78
- event["#{k}"] = v
165
+ events.push(event)
79
166
  end
80
-
81
- events.push(event)
82
167
  end
83
168
  end
84
169
 
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-codec-sflow'
4
- s.version = '0.1.0'
4
+ s.version = '0.2.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "The sflow codec is for decoding SFlow v5 flows."
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -0,0 +1,94 @@
1
+ # encoding: utf-8
2
+
3
+ require "logstash/devutils/rspec/spec_helper"
4
+ require "logstash/codecs/sflow/packet_header"
5
+
6
+ describe UdpHeader do
7
+ it "should decode udp header" do
8
+ payload = IO.read(File.join(File.dirname(__FILE__), "udp.dat"), :mode => "rb")
9
+ decoded = UdpHeader.read(payload)
10
+
11
+ decoded["src_port"].to_s.should eq("20665")
12
+ decoded["dst_port"].to_s.should eq("514")
13
+ decoded["udp_length"].to_s.should eq("147")
14
+ end
15
+ end
16
+
17
+
18
+ describe TcpHeader do
19
+ it "should decode tcp header" do
20
+ payload = IO.read(File.join(File.dirname(__FILE__), "tcp.dat"), :mode => "rb")
21
+ decoded = TcpHeader.new(:size_header => payload.bytesize * 8).read(payload)
22
+
23
+ decoded["src_port"].to_s.should eq("5672")
24
+ decoded["dst_port"].to_s.should eq("59451")
25
+ decoded["tcp_seq_number"].to_s.should eq("2671357038")
26
+ decoded["tcp_ack_number"].to_s.should eq("2651945969")
27
+ (decoded["tcp_header_length"].to_i*4).to_s.should eq("32")
28
+ decoded["tcp_is_nonce"].to_s.should eq("0")
29
+ decoded["tcp_is_cwr"].to_s.should eq("0")
30
+ decoded["tcp_is_ecn_echo"].to_s.should eq("0")
31
+ decoded["tcp_is_urgent"].to_s.should eq("0")
32
+ decoded["tcp_is_ack"].to_s.should eq("1")
33
+ decoded["tcp_is_push"].to_s.should eq("1")
34
+ decoded["tcp_is_reset"].to_s.should eq("0")
35
+ decoded["tcp_is_syn"].to_s.should eq("0")
36
+ decoded["tcp_is_fin"].to_s.should eq("0")
37
+ decoded["tcp_window_size"].to_s.should eq("147")
38
+ decoded["tcp_checksum"].to_s.should eq("13042")
39
+ decoded["tcp_urgent_pointer"].to_s.should eq("0")
40
+ end
41
+ end
42
+
43
+
44
+ describe IPHeader do
45
+ it "should decode ipv4 over tcp header" do
46
+ payload = IO.read(File.join(File.dirname(__FILE__), "ipv4_over_tcp_header.dat"), :mode => "rb")
47
+ decoded = IPHeader.new(:size_header => payload.bytesize * 8).read(payload)
48
+
49
+ decoded["ip_version"].to_s.should eq("4")
50
+ decoded["header"]["ip_header_length"].to_s.should eq("5")
51
+ decoded["header"]["ip_dscp"].to_s.should eq("0")
52
+ decoded["header"]["ip_ecn"].to_s.should eq("0")
53
+ decoded["header"]["ip_total_length"].to_s.should eq("476")
54
+ decoded["header"]["ip_identification"].to_s.should eq("30529")
55
+ decoded["header"]["ip_flags"].to_s.should eq("2")
56
+ decoded["header"]["ip_fragment_offset"].to_s.should eq("0")
57
+ decoded["header"]["ip_ttl"].to_s.should eq("62")
58
+ decoded["header"]["ip_protocol"].to_s.should eq("6")
59
+ decoded["header"]["ip_checksum"].to_s.should eq("37559")
60
+ decoded["header"]["src_ip"].to_s.should eq("10.243.27.17")
61
+ decoded["header"]["dst_ip"].to_s.should eq("10.243.0.45")
62
+ decoded["header"]["layer4"]["src_port"].to_s.should eq("5672")
63
+ decoded["header"]["layer4"]["dst_port"].to_s.should eq("59451")
64
+ decoded["header"]["layer4"]["tcp_seq_number"].to_s.should eq("2671357038")
65
+ decoded["header"]["layer4"]["tcp_ack_number"].to_s.should eq("2651945969")
66
+ (decoded["header"]["layer4"]["tcp_header_length"].to_i*4).to_s.should eq("32")
67
+ decoded["header"]["layer4"]["tcp_is_nonce"].to_s.should eq("0")
68
+ decoded["header"]["layer4"]["tcp_is_cwr"].to_s.should eq("0")
69
+ decoded["header"]["layer4"]["tcp_is_ecn_echo"].to_s.should eq("0")
70
+ decoded["header"]["layer4"]["tcp_is_urgent"].to_s.should eq("0")
71
+ decoded["header"]["layer4"]["tcp_is_ack"].to_s.should eq("1")
72
+ decoded["header"]["layer4"]["tcp_is_push"].to_s.should eq("1")
73
+ decoded["header"]["layer4"]["tcp_is_reset"].to_s.should eq("0")
74
+ decoded["header"]["layer4"]["tcp_is_syn"].to_s.should eq("0")
75
+ decoded["header"]["layer4"]["tcp_is_fin"].to_s.should eq("0")
76
+ decoded["header"]["layer4"]["tcp_window_size"].to_s.should eq("147")
77
+ decoded["header"]["layer4"]["tcp_checksum"].to_s.should eq("13042")
78
+ decoded["header"]["layer4"]["tcp_urgent_pointer"].to_s.should eq("0")
79
+ end
80
+ end
81
+
82
+
83
+ describe EthernetHeader do
84
+ it "should decode ipv4 over udp header" do
85
+ payload = IO.read(File.join(File.dirname(__FILE__), "ethernet_ipv4_over_udp_header.dat"), :mode => "rb")
86
+ decoded = EthernetHeader.new(:size_header => payload.bytesize * 8).read(payload)
87
+
88
+ decoded["eth_dst"].to_s.should eq("00:23:e9:78:16:c6")
89
+ decoded["eth_src"].to_s.should eq("58:f3:9c:81:4b:81")
90
+ decoded["eth_type"].to_s.should eq("2048")
91
+ decoded["layer3"]["header"]["dst_ip"].to_s.should eq("10.243.27.9")
92
+ decoded["layer3"]["header"]["layer4"]["dst_port"].to_s.should eq("514")
93
+ end
94
+ end
Binary file
Binary file
Binary file
@@ -2,19 +2,22 @@
2
2
 
3
3
  require "logstash/devutils/rspec/spec_helper"
4
4
  require "logstash/codecs/sflow"
5
+ require "logstash/codecs/sflow/datagram"
5
6
 
6
- describe LogStash::Codecs::Sflow do
7
- before :each do
8
- @subject = LogStash::Codecs::Sflow.new
7
+ describe SFlow do
8
+ it "should decode sflow counters" do
9
9
  payload = IO.read(File.join(File.dirname(__FILE__), "sflow_counters_sample.dat"), :mode => "rb")
10
- @subject.decode(payload)
11
- payload = IO.read(File.join(File.dirname(__FILE__), "sflow_flow_sample.dat"), :mode => "rb")
12
- @subject.decode(payload)
10
+ decoded = SFlow.read(payload)
11
+ end
12
+
13
+ it "should decode sflow 1 counters" do
14
+ payload = IO.read(File.join(File.dirname(__FILE__), "sflow_1_counters_sample.dat"), :mode => "rb")
15
+ decoded = SFlow.read(payload)
13
16
  end
14
17
 
15
- describe "#new" do
16
- it "LogStash::Codecs::Sflow" do
17
- @subject.should be_an_instance_of LogStash::Codecs::Sflow
18
- end
18
+
19
+ it "should decode sflow sample" do
20
+ payload = IO.read(File.join(File.dirname(__FILE__), "sflow_flow_sample.dat"), :mode => "rb")
21
+ decoded = SFlow.read(payload)
19
22
  end
20
23
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-codec-sflow
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Nicolas Fraison
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-12-08 00:00:00.000000000 Z
11
+ date: 2015-12-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -74,9 +74,16 @@ files:
74
74
  - lib/logstash/codecs/sflow/counter_record.rb
75
75
  - lib/logstash/codecs/sflow/datagram.rb
76
76
  - lib/logstash/codecs/sflow/flow_record.rb
77
+ - lib/logstash/codecs/sflow/packet_header.rb
77
78
  - lib/logstash/codecs/sflow/sample.rb
78
79
  - lib/logstash/codecs/sflow/util.rb
79
80
  - logstash-codec-sflow.gemspec
81
+ - spec/codecs/sflow/ethernet_ipv4_over_udp_header.dat
82
+ - spec/codecs/sflow/ipv4_over_tcp_header.dat
83
+ - spec/codecs/sflow/packet_header_spec.rb
84
+ - spec/codecs/sflow/tcp.dat
85
+ - spec/codecs/sflow/udp.dat
86
+ - spec/codecs/sflow_1_counters_sample.dat
80
87
  - spec/codecs/sflow_counters_sample.dat
81
88
  - spec/codecs/sflow_flow_sample.dat
82
89
  - spec/codecs/sflow_spec.rb
@@ -107,6 +114,12 @@ signing_key:
107
114
  specification_version: 4
108
115
  summary: The sflow codec is for decoding SFlow v5 flows.
109
116
  test_files:
117
+ - spec/codecs/sflow/ethernet_ipv4_over_udp_header.dat
118
+ - spec/codecs/sflow/ipv4_over_tcp_header.dat
119
+ - spec/codecs/sflow/packet_header_spec.rb
120
+ - spec/codecs/sflow/tcp.dat
121
+ - spec/codecs/sflow/udp.dat
122
+ - spec/codecs/sflow_1_counters_sample.dat
110
123
  - spec/codecs/sflow_counters_sample.dat
111
124
  - spec/codecs/sflow_flow_sample.dat
112
125
  - spec/codecs/sflow_spec.rb