logstash-codec-netflow 2.1.1 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +2 -19
- data/CONTRIBUTORS +0 -12
- data/Gemfile +2 -0
- data/lib/logstash/codecs/netflow.rb +27 -277
- data/lib/logstash/codecs/netflow/util.rb +6 -60
- data/logstash-codec-netflow.gemspec +3 -4
- data/spec/codecs/netflow_spec.rb +20 -265
- metadata +27 -29
- data/lib/logstash/codecs/netflow/iana2yaml.rb +0 -77
- data/lib/logstash/codecs/netflow/ipfix.yaml +0 -2333
- data/spec/codecs/ipfix.dat +0 -0
@@ -105,19 +105,19 @@ class Netflow5PDU < BinData::Record
|
|
105
105
|
end
|
106
106
|
end
|
107
107
|
|
108
|
-
class
|
108
|
+
class TemplateFlowset < BinData::Record
|
109
109
|
endian :big
|
110
110
|
array :templates, :read_until => lambda { array.num_bytes == flowset_length - 4 } do
|
111
111
|
uint16 :template_id
|
112
112
|
uint16 :field_count
|
113
|
-
array :
|
113
|
+
array :fields, :initial_length => :field_count do
|
114
114
|
uint16 :field_type
|
115
115
|
uint16 :field_length
|
116
116
|
end
|
117
117
|
end
|
118
118
|
end
|
119
119
|
|
120
|
-
class
|
120
|
+
class OptionFlowset < BinData::Record
|
121
121
|
endian :big
|
122
122
|
array :templates, :read_until => lambda { flowset_length - 4 - array.num_bytes <= 2 } do
|
123
123
|
uint16 :template_id
|
@@ -147,63 +147,9 @@ class Netflow9PDU < BinData::Record
|
|
147
147
|
uint16 :flowset_id, :assert => lambda { [0, 1, *(256..65535)].include?(flowset_id) }
|
148
148
|
uint16 :flowset_length, :assert => lambda { flowset_length > 4 }
|
149
149
|
choice :flowset_data, :selection => :flowset_id do
|
150
|
-
|
151
|
-
|
152
|
-
string
|
153
|
-
end
|
154
|
-
end
|
155
|
-
end
|
156
|
-
|
157
|
-
class IpfixTemplateFlowset < BinData::Record
|
158
|
-
endian :big
|
159
|
-
array :templates, :read_until => lambda { flowset_length - 4 - array.num_bytes <= 2 } do
|
160
|
-
uint16 :template_id
|
161
|
-
uint16 :field_count
|
162
|
-
array :record_fields, :initial_length => :field_count do
|
163
|
-
bit1 :enterprise
|
164
|
-
bit15 :field_type
|
165
|
-
uint16 :field_length
|
166
|
-
uint32 :enterprise_id, :onlyif => lambda { enterprise != 0 }
|
167
|
-
end
|
168
|
-
end
|
169
|
-
# skip :length => lambda { flowset_length - 4 - set.num_bytes } ?
|
170
|
-
end
|
171
|
-
|
172
|
-
class IpfixOptionFlowset < BinData::Record
|
173
|
-
endian :big
|
174
|
-
array :templates, :read_until => lambda { flowset_length - 4 - array.num_bytes <= 2 } do
|
175
|
-
uint16 :template_id
|
176
|
-
uint16 :field_count
|
177
|
-
uint16 :scope_count, :assert => lambda { scope_count > 0 }
|
178
|
-
array :scope_fields, :initial_length => lambda { scope_count } do
|
179
|
-
bit1 :enterprise
|
180
|
-
bit15 :field_type
|
181
|
-
uint16 :field_length
|
182
|
-
uint32 :enterprise_id, :onlyif => lambda { enterprise != 0 }
|
183
|
-
end
|
184
|
-
array :option_fields, :initial_length => lambda { field_count - scope_count } do
|
185
|
-
bit1 :enterprise
|
186
|
-
bit15 :field_type
|
187
|
-
uint16 :field_length
|
188
|
-
uint32 :enterprise_id, :onlyif => lambda { enterprise != 0 }
|
189
|
-
end
|
190
|
-
end
|
191
|
-
end
|
192
|
-
|
193
|
-
class IpfixPDU < BinData::Record
|
194
|
-
endian :big
|
195
|
-
uint16 :version
|
196
|
-
uint16 :pdu_length
|
197
|
-
uint32 :unix_sec
|
198
|
-
uint32 :flow_seq_num
|
199
|
-
uint32 :observation_domain_id
|
200
|
-
array :records, :read_until => lambda { array.num_bytes == pdu_length - 16 } do
|
201
|
-
uint16 :flowset_id, :assert => lambda { [2, 3, *(256..65535)].include?(flowset_id) }
|
202
|
-
uint16 :flowset_length, :assert => lambda { flowset_length > 4 }
|
203
|
-
choice :flowset_data, :selection => :flowset_id do
|
204
|
-
ipfix_template_flowset 2
|
205
|
-
ipfix_option_flowset 3
|
206
|
-
string :default, :read_length => lambda { flowset_length - 4 }
|
150
|
+
template_flowset 0
|
151
|
+
option_flowset 1
|
152
|
+
string :default, :read_length => lambda { flowset_length - 4 }
|
207
153
|
end
|
208
154
|
end
|
209
155
|
end
|
@@ -1,9 +1,9 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-codec-netflow'
|
4
|
-
s.version = '
|
4
|
+
s.version = '3.0.0'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
|
-
s.summary = "The netflow codec is for decoding Netflow v5/v9
|
6
|
+
s.summary = "The netflow codec is for decoding Netflow v5/v9 flows."
|
7
7
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
8
8
|
s.authors = ["Elastic"]
|
9
9
|
s.email = 'info@elastic.co'
|
@@ -20,9 +20,8 @@ Gem::Specification.new do |s|
|
|
20
20
|
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "codec" }
|
21
21
|
|
22
22
|
# Gem dependencies
|
23
|
-
s.add_runtime_dependency "logstash-core-plugin-api", "~>
|
23
|
+
s.add_runtime_dependency "logstash-core-plugin-api", "~> 2.0"
|
24
24
|
s.add_runtime_dependency 'bindata', ['>= 1.5.0']
|
25
25
|
s.add_development_dependency 'logstash-devutils'
|
26
26
|
end
|
27
27
|
|
28
|
-
|
data/spec/codecs/netflow_spec.rb
CHANGED
@@ -104,19 +104,19 @@ describe LogStash::Codecs::Netflow do
|
|
104
104
|
it "should decode raw data" do
|
105
105
|
expect(decode.size).to eq(2)
|
106
106
|
|
107
|
-
expect(decode[0]
|
108
|
-
expect(decode[0]
|
109
|
-
expect(decode[0]
|
110
|
-
expect(decode[0]
|
111
|
-
expect(decode[0]
|
112
|
-
expect(decode[0]
|
107
|
+
expect(decode[0].get("[netflow][version]")).to eq(5)
|
108
|
+
expect(decode[0].get("[netflow][ipv4_src_addr]")).to eq("10.0.2.2")
|
109
|
+
expect(decode[0].get("[netflow][ipv4_dst_addr]")).to eq("10.0.2.15")
|
110
|
+
expect(decode[0].get("[netflow][l4_src_port]")).to eq(54435)
|
111
|
+
expect(decode[0].get("[netflow][l4_dst_port]")).to eq(22)
|
112
|
+
expect(decode[0].get("[netflow][tcp_flags]")).to eq(16)
|
113
113
|
|
114
|
-
expect(decode[1]
|
115
|
-
expect(decode[1]
|
116
|
-
expect(decode[1]
|
117
|
-
expect(decode[1]
|
118
|
-
expect(decode[1]
|
119
|
-
expect(decode[1]
|
114
|
+
expect(decode[1].get("[netflow][version]")).to eq(5)
|
115
|
+
expect(decode[1].get("[netflow][ipv4_src_addr]")).to eq("10.0.2.15")
|
116
|
+
expect(decode[1].get("[netflow][ipv4_dst_addr]")).to eq("10.0.2.2")
|
117
|
+
expect(decode[1].get("[netflow][l4_src_port]")).to eq(22)
|
118
|
+
expect(decode[1].get("[netflow][l4_dst_port]")).to eq(54435)
|
119
|
+
expect(decode[1].get("[netflow][tcp_flags]")).to eq(24)
|
120
120
|
end
|
121
121
|
|
122
122
|
it "should serialize to json" do
|
@@ -193,7 +193,7 @@ describe LogStash::Codecs::Netflow do
|
|
193
193
|
|
194
194
|
it "should decode raw data" do
|
195
195
|
expect(decode.size).to eq(7)
|
196
|
-
expect(decode[0]
|
196
|
+
expect(decode[0].get("[netflow][version]")).to eq(9)
|
197
197
|
end
|
198
198
|
|
199
199
|
it "should serialize to json" do
|
@@ -235,8 +235,8 @@ describe LogStash::Codecs::Netflow do
|
|
235
235
|
end
|
236
236
|
|
237
237
|
it "should decode the mac address" do
|
238
|
-
expect(decode[1]
|
239
|
-
expect(decode[1]
|
238
|
+
expect(decode[1].get("[netflow][in_src_mac]")).to eq("00:50:56:c0:00:01")
|
239
|
+
expect(decode[1].get("[netflow][in_dst_mac]")).to eq("00:0c:29:70:86:09")
|
240
240
|
end
|
241
241
|
|
242
242
|
it "should serialize to json" do
|
@@ -292,7 +292,7 @@ describe LogStash::Codecs::Netflow do
|
|
292
292
|
|
293
293
|
it "should decode raw data" do
|
294
294
|
expect(decode.size).to eq(14)
|
295
|
-
expect(decode[1]
|
295
|
+
expect(decode[1].get("[netflow][version]")).to eq(9)
|
296
296
|
end
|
297
297
|
|
298
298
|
it "should serialize to json" do
|
@@ -379,8 +379,8 @@ describe LogStash::Codecs::Netflow do
|
|
379
379
|
|
380
380
|
it "should decode raw data" do
|
381
381
|
expect(decode.size).to eq(9)
|
382
|
-
expect(decode[1]
|
383
|
-
expect(decode[8]
|
382
|
+
expect(decode[1].get("[netflow][l4_src_port]")).to eq(123)
|
383
|
+
expect(decode[8].get("[netflow][l4_src_port]")).to eq(22)
|
384
384
|
end
|
385
385
|
|
386
386
|
it "should serialize to json" do
|
@@ -431,254 +431,9 @@ describe LogStash::Codecs::Netflow do
|
|
431
431
|
end
|
432
432
|
|
433
433
|
it "should decode raw data" do
|
434
|
-
expect(decode[0]
|
435
|
-
expect(decode[0]
|
434
|
+
expect(decode[0].get("[netflow][scope_system]")).to eq(0)
|
435
|
+
expect(decode[0].get("[netflow][total_flows_exp]")).to eq(1)
|
436
436
|
end
|
437
|
-
end
|
438
|
-
|
439
|
-
context "IPFIX" do
|
440
|
-
let(:data) do
|
441
|
-
# this netflow raw data was produced with softflowd and captured with netcat
|
442
|
-
# softflowd -D -i eth0 -v 10 -t maxlife=1 -n 127.0.01:8765
|
443
|
-
# nc -k -4 -u -l 127.0.0.1 8765 > ipfix.dat
|
444
|
-
data = []
|
445
|
-
data << IO.read(File.join(File.dirname(__FILE__), "ipfix.dat"), :mode => "rb")
|
446
|
-
end
|
447
|
-
|
448
|
-
let(:json_events) do
|
449
|
-
events = []
|
450
|
-
events << <<-END
|
451
|
-
{
|
452
|
-
"@timestamp": "2015-05-13T11:20:26.000Z",
|
453
|
-
"netflow": {
|
454
|
-
"version": 10,
|
455
|
-
"meteringProcessId": 2679,
|
456
|
-
"systemInitTimeMilliseconds": 1431516013506,
|
457
|
-
"selectorAlgorithm": 1,
|
458
|
-
"samplingPacketInterval": 1,
|
459
|
-
"samplingPacketSpace": 0
|
460
|
-
},
|
461
|
-
"@version": "1"
|
462
|
-
}
|
463
|
-
END
|
464
437
|
|
465
|
-
events << <<-END
|
466
|
-
{
|
467
|
-
"@timestamp": "2015-05-13T11:20:26.000Z",
|
468
|
-
"netflow": {
|
469
|
-
"version": 10,
|
470
|
-
"sourceIPv4Address": "192.168.253.1",
|
471
|
-
"destinationIPv4Address": "192.168.253.128",
|
472
|
-
"octetDeltaCount": 260,
|
473
|
-
"packetDeltaCount": 5,
|
474
|
-
"ingressInterface": 0,
|
475
|
-
"egressInterface": 0,
|
476
|
-
"sourceTransportPort": 60560,
|
477
|
-
"destinationTransportPort": 22,
|
478
|
-
"protocolIdentifier": 6,
|
479
|
-
"tcpControlBits": 16,
|
480
|
-
"ipVersion": 4,
|
481
|
-
"ipClassOfService": 0,
|
482
|
-
"icmpTypeCodeIPv4": 0,
|
483
|
-
"vlanId": 0,
|
484
|
-
"flowStartSysUpTime": 0,
|
485
|
-
"flowEndSysUpTime": 12726
|
486
|
-
},
|
487
|
-
"@version": "1"
|
488
|
-
}
|
489
|
-
END
|
490
|
-
|
491
|
-
events << <<-END
|
492
|
-
{
|
493
|
-
"@timestamp": "2015-05-13T11:20:26.000Z",
|
494
|
-
"netflow": {
|
495
|
-
"version": 10,
|
496
|
-
"sourceIPv4Address": "192.168.253.128",
|
497
|
-
"destinationIPv4Address": "192.168.253.1",
|
498
|
-
"octetDeltaCount": 1000,
|
499
|
-
"packetDeltaCount": 6,
|
500
|
-
"ingressInterface": 0,
|
501
|
-
"egressInterface": 0,
|
502
|
-
"sourceTransportPort": 22,
|
503
|
-
"destinationTransportPort": 60560,
|
504
|
-
"protocolIdentifier": 6,
|
505
|
-
"tcpControlBits": 24,
|
506
|
-
"ipVersion": 4,
|
507
|
-
"ipClassOfService": 0,
|
508
|
-
"icmpTypeCodeIPv4": 0,
|
509
|
-
"vlanId": 0,
|
510
|
-
"flowStartSysUpTime": 0,
|
511
|
-
"flowEndSysUpTime": 12726
|
512
|
-
},
|
513
|
-
"@version": "1"
|
514
|
-
}
|
515
|
-
END
|
516
|
-
|
517
|
-
events << <<-END
|
518
|
-
{
|
519
|
-
"@timestamp": "2015-05-13T11:20:26.000Z",
|
520
|
-
"netflow": {
|
521
|
-
"version": 10,
|
522
|
-
"sourceIPv4Address": "192.168.253.2",
|
523
|
-
"destinationIPv4Address": "192.168.253.132",
|
524
|
-
"octetDeltaCount": 601,
|
525
|
-
"packetDeltaCount": 2,
|
526
|
-
"ingressInterface": 0,
|
527
|
-
"egressInterface": 0,
|
528
|
-
"sourceTransportPort": 53,
|
529
|
-
"destinationTransportPort": 35262,
|
530
|
-
"protocolIdentifier": 17,
|
531
|
-
"tcpControlBits": 0,
|
532
|
-
"ipVersion": 4,
|
533
|
-
"ipClassOfService": 0,
|
534
|
-
"icmpTypeCodeIPv4": 0,
|
535
|
-
"vlanId": 0,
|
536
|
-
"flowStartSysUpTime": 1104,
|
537
|
-
"flowEndSysUpTime": 1142
|
538
|
-
},
|
539
|
-
"@version": "1"
|
540
|
-
}
|
541
|
-
END
|
542
|
-
|
543
|
-
events << <<-END
|
544
|
-
{
|
545
|
-
"@timestamp": "2015-05-13T11:20:26.000Z",
|
546
|
-
"netflow": {
|
547
|
-
"version": 10,
|
548
|
-
"sourceIPv4Address": "192.168.253.132",
|
549
|
-
"destinationIPv4Address": "192.168.253.2",
|
550
|
-
"octetDeltaCount": 148,
|
551
|
-
"packetDeltaCount": 2,
|
552
|
-
"ingressInterface": 0,
|
553
|
-
"egressInterface": 0,
|
554
|
-
"sourceTransportPort": 35262,
|
555
|
-
"destinationTransportPort": 53,
|
556
|
-
"protocolIdentifier": 17,
|
557
|
-
"tcpControlBits": 0,
|
558
|
-
"ipVersion": 4,
|
559
|
-
"ipClassOfService": 0,
|
560
|
-
"icmpTypeCodeIPv4": 0,
|
561
|
-
"vlanId": 0,
|
562
|
-
"flowStartSysUpTime": 1104,
|
563
|
-
"flowEndSysUpTime": 1142
|
564
|
-
},
|
565
|
-
"@version": "1"
|
566
|
-
}
|
567
|
-
END
|
568
|
-
|
569
|
-
events << <<-END
|
570
|
-
{
|
571
|
-
"@timestamp": "2015-05-13T11:20:26.000Z",
|
572
|
-
"netflow": {
|
573
|
-
"version": 10,
|
574
|
-
"sourceIPv4Address": "54.214.9.161",
|
575
|
-
"destinationIPv4Address": "192.168.253.132",
|
576
|
-
"octetDeltaCount": 5946,
|
577
|
-
"packetDeltaCount": 14,
|
578
|
-
"ingressInterface": 0,
|
579
|
-
"egressInterface": 0,
|
580
|
-
"sourceTransportPort": 443,
|
581
|
-
"destinationTransportPort": 49935,
|
582
|
-
"protocolIdentifier": 6,
|
583
|
-
"tcpControlBits": 26,
|
584
|
-
"ipVersion": 4,
|
585
|
-
"ipClassOfService": 0,
|
586
|
-
"icmpTypeCodeIPv4": 0,
|
587
|
-
"vlanId": 0,
|
588
|
-
"flowStartSysUpTime": 1142,
|
589
|
-
"flowEndSysUpTime": 2392
|
590
|
-
},
|
591
|
-
"@version": "1"
|
592
|
-
}
|
593
|
-
END
|
594
|
-
|
595
|
-
events << <<-END
|
596
|
-
{
|
597
|
-
"@timestamp": "2015-05-13T11:20:26.000Z",
|
598
|
-
"netflow": {
|
599
|
-
"version": 10,
|
600
|
-
"sourceIPv4Address": "192.168.253.132",
|
601
|
-
"destinationIPv4Address": "54.214.9.161",
|
602
|
-
"octetDeltaCount": 2608,
|
603
|
-
"packetDeltaCount": 13,
|
604
|
-
"ingressInterface": 0,
|
605
|
-
"egressInterface": 0,
|
606
|
-
"sourceTransportPort": 49935,
|
607
|
-
"destinationTransportPort": 443,
|
608
|
-
"protocolIdentifier": 6,
|
609
|
-
"tcpControlBits": 26,
|
610
|
-
"ipVersion": 4,
|
611
|
-
"ipClassOfService": 0,
|
612
|
-
"icmpTypeCodeIPv4": 0,
|
613
|
-
"vlanId": 0,
|
614
|
-
"flowStartSysUpTime": 1142,
|
615
|
-
"flowEndSysUpTime": 2392
|
616
|
-
},
|
617
|
-
"@version": "1"
|
618
|
-
}
|
619
|
-
END
|
620
|
-
|
621
|
-
events.map{|event| event.gsub(/\s+/, "")}
|
622
|
-
end
|
623
|
-
|
624
|
-
it "should decode raw data" do
|
625
|
-
expect(decode.size).to eq(7)
|
626
|
-
|
627
|
-
expect(decode[0]["[netflow][version]"]).to eq(10)
|
628
|
-
expect(decode[0]["[netflow][systemInitTimeMilliseconds]"]).to eq(1431516013506)
|
629
|
-
|
630
|
-
expect(decode[1]["[netflow][version]"]).to eq(10)
|
631
|
-
expect(decode[1]["[netflow][sourceIPv4Address]"]).to eq("192.168.253.1")
|
632
|
-
expect(decode[1]["[netflow][destinationIPv4Address]"]).to eq("192.168.253.128")
|
633
|
-
expect(decode[1]["[netflow][sourceTransportPort]"]).to eq(60560)
|
634
|
-
expect(decode[1]["[netflow][destinationTransportPort]"]).to eq(22)
|
635
|
-
expect(decode[1]["[netflow][protocolIdentifier]"]).to eq(6)
|
636
|
-
expect(decode[1]["[netflow][tcpControlBits]"]).to eq(16)
|
637
|
-
|
638
|
-
expect(decode[2]["[netflow][version]"]).to eq(10)
|
639
|
-
expect(decode[2]["[netflow][sourceIPv4Address]"]).to eq("192.168.253.128")
|
640
|
-
expect(decode[2]["[netflow][destinationIPv4Address]"]).to eq("192.168.253.1")
|
641
|
-
expect(decode[2]["[netflow][sourceTransportPort]"]).to eq(22)
|
642
|
-
expect(decode[2]["[netflow][destinationTransportPort]"]).to eq(60560)
|
643
|
-
expect(decode[2]["[netflow][protocolIdentifier]"]).to eq(6)
|
644
|
-
expect(decode[2]["[netflow][tcpControlBits]"]).to eq(24)
|
645
|
-
|
646
|
-
expect(decode[3]["[netflow][sourceIPv4Address]"]).to eq("192.168.253.2")
|
647
|
-
expect(decode[3]["[netflow][destinationIPv4Address]"]).to eq("192.168.253.132")
|
648
|
-
expect(decode[3]["[netflow][sourceTransportPort]"]).to eq(53)
|
649
|
-
expect(decode[3]["[netflow][destinationTransportPort]"]).to eq(35262)
|
650
|
-
expect(decode[3]["[netflow][protocolIdentifier]"]).to eq(17)
|
651
|
-
|
652
|
-
expect(decode[4]["[netflow][sourceIPv4Address]"]).to eq("192.168.253.132")
|
653
|
-
expect(decode[4]["[netflow][destinationIPv4Address]"]).to eq("192.168.253.2")
|
654
|
-
expect(decode[4]["[netflow][sourceTransportPort]"]).to eq(35262)
|
655
|
-
expect(decode[4]["[netflow][destinationTransportPort]"]).to eq(53)
|
656
|
-
expect(decode[4]["[netflow][protocolIdentifier]"]).to eq(17)
|
657
|
-
|
658
|
-
expect(decode[5]["[netflow][sourceIPv4Address]"]).to eq("54.214.9.161")
|
659
|
-
expect(decode[5]["[netflow][destinationIPv4Address]"]).to eq("192.168.253.132")
|
660
|
-
expect(decode[5]["[netflow][sourceTransportPort]"]).to eq(443)
|
661
|
-
expect(decode[5]["[netflow][destinationTransportPort]"]).to eq(49935)
|
662
|
-
expect(decode[5]["[netflow][protocolIdentifier]"]).to eq(6)
|
663
|
-
expect(decode[5]["[netflow][tcpControlBits]"]).to eq(26)
|
664
|
-
|
665
|
-
expect(decode[6]["[netflow][sourceIPv4Address]"]).to eq("192.168.253.132")
|
666
|
-
expect(decode[6]["[netflow][destinationIPv4Address]"]).to eq("54.214.9.161")
|
667
|
-
expect(decode[6]["[netflow][sourceTransportPort]"]).to eq(49935)
|
668
|
-
expect(decode[6]["[netflow][destinationTransportPort]"]).to eq(443)
|
669
|
-
expect(decode[6]["[netflow][protocolIdentifier]"]).to eq(6)
|
670
|
-
expect(decode[6]["[netflow][tcpControlBits]"]).to eq(26)
|
671
|
-
end
|
672
|
-
|
673
|
-
it "should serialize to json" do
|
674
|
-
expect(JSON.parse(decode[0].to_json)).to eq(JSON.parse(json_events[0]))
|
675
|
-
expect(JSON.parse(decode[1].to_json)).to eq(JSON.parse(json_events[1]))
|
676
|
-
expect(JSON.parse(decode[2].to_json)).to eq(JSON.parse(json_events[2]))
|
677
|
-
expect(JSON.parse(decode[3].to_json)).to eq(JSON.parse(json_events[3]))
|
678
|
-
expect(JSON.parse(decode[4].to_json)).to eq(JSON.parse(json_events[4]))
|
679
|
-
expect(JSON.parse(decode[5].to_json)).to eq(JSON.parse(json_events[5]))
|
680
|
-
expect(JSON.parse(decode[6].to_json)).to eq(JSON.parse(json_events[6]))
|
681
|
-
end
|
682
438
|
end
|
683
|
-
|
684
439
|
end
|
metadata
CHANGED
@@ -1,58 +1,60 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-codec-netflow
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 3.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-05-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
+
name: logstash-core-plugin-api
|
14
15
|
requirement: !ruby/object:Gem::Requirement
|
15
16
|
requirements:
|
16
|
-
- - ~>
|
17
|
+
- - "~>"
|
17
18
|
- !ruby/object:Gem::Version
|
18
|
-
version: '
|
19
|
-
name: logstash-core-plugin-api
|
20
|
-
prerelease: false
|
19
|
+
version: '2.0'
|
21
20
|
type: :runtime
|
21
|
+
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
|
-
- - ~>
|
24
|
+
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '
|
26
|
+
version: '2.0'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
|
+
name: bindata
|
28
29
|
requirement: !ruby/object:Gem::Requirement
|
29
30
|
requirements:
|
30
|
-
- -
|
31
|
+
- - ">="
|
31
32
|
- !ruby/object:Gem::Version
|
32
33
|
version: 1.5.0
|
33
|
-
name: bindata
|
34
|
-
prerelease: false
|
35
34
|
type: :runtime
|
35
|
+
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
38
|
-
- -
|
38
|
+
- - ">="
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: 1.5.0
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
|
+
name: logstash-devutils
|
42
43
|
requirement: !ruby/object:Gem::Requirement
|
43
44
|
requirements:
|
44
|
-
- -
|
45
|
+
- - ">="
|
45
46
|
- !ruby/object:Gem::Version
|
46
47
|
version: '0'
|
47
|
-
name: logstash-devutils
|
48
|
-
prerelease: false
|
49
48
|
type: :development
|
49
|
+
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
52
|
-
- -
|
52
|
+
- - ">="
|
53
53
|
- !ruby/object:Gem::Version
|
54
54
|
version: '0'
|
55
|
-
description: This gem is a Logstash plugin required to be installed on top of the
|
55
|
+
description: This gem is a Logstash plugin required to be installed on top of the
|
56
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
57
|
+
gem is not a stand-alone program
|
56
58
|
email: info@elastic.co
|
57
59
|
executables: []
|
58
60
|
extensions: []
|
@@ -65,12 +67,9 @@ files:
|
|
65
67
|
- NOTICE.TXT
|
66
68
|
- README.md
|
67
69
|
- lib/logstash/codecs/netflow.rb
|
68
|
-
- lib/logstash/codecs/netflow/iana2yaml.rb
|
69
|
-
- lib/logstash/codecs/netflow/ipfix.yaml
|
70
70
|
- lib/logstash/codecs/netflow/netflow.yaml
|
71
71
|
- lib/logstash/codecs/netflow/util.rb
|
72
72
|
- logstash-codec-netflow.gemspec
|
73
|
-
- spec/codecs/ipfix.dat
|
74
73
|
- spec/codecs/netflow5.dat
|
75
74
|
- spec/codecs/netflow5_test_invalid01.dat
|
76
75
|
- spec/codecs/netflow5_test_invalid02.dat
|
@@ -90,28 +89,27 @@ licenses:
|
|
90
89
|
metadata:
|
91
90
|
logstash_plugin: 'true'
|
92
91
|
logstash_group: codec
|
93
|
-
post_install_message:
|
92
|
+
post_install_message:
|
94
93
|
rdoc_options: []
|
95
94
|
require_paths:
|
96
95
|
- lib
|
97
96
|
required_ruby_version: !ruby/object:Gem::Requirement
|
98
97
|
requirements:
|
99
|
-
- -
|
98
|
+
- - ">="
|
100
99
|
- !ruby/object:Gem::Version
|
101
100
|
version: '0'
|
102
101
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
103
102
|
requirements:
|
104
|
-
- -
|
103
|
+
- - ">="
|
105
104
|
- !ruby/object:Gem::Version
|
106
105
|
version: '0'
|
107
106
|
requirements: []
|
108
|
-
rubyforge_project:
|
109
|
-
rubygems_version: 2.
|
110
|
-
signing_key:
|
107
|
+
rubyforge_project:
|
108
|
+
rubygems_version: 2.5.1
|
109
|
+
signing_key:
|
111
110
|
specification_version: 4
|
112
|
-
summary: The netflow codec is for decoding Netflow v5/v9
|
111
|
+
summary: The netflow codec is for decoding Netflow v5/v9 flows.
|
113
112
|
test_files:
|
114
|
-
- spec/codecs/ipfix.dat
|
115
113
|
- spec/codecs/netflow5.dat
|
116
114
|
- spec/codecs/netflow5_test_invalid01.dat
|
117
115
|
- spec/codecs/netflow5_test_invalid02.dat
|