logstash-codec-netflow 4.3.1 → 4.3.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1977177c31346ff62addb2a2140b21adcf35d1becf6c995604e16f91e1998d94
4
- data.tar.gz: f448694fb80229e0257dc48f2c3200d8d19d21055a9e17057488c81d8fce2a84
3
+ metadata.gz: 25aee1405937eb3d007e9c41fc86295eea44c81ca8f4559168fa892a3a37925c
4
+ data.tar.gz: 151a0535226ecff1c22013a847156f8f4ff4fe3e7f2abae74f2ec49719f0c844
5
5
  SHA512:
6
- metadata.gz: 87bb8f9f6cf7070f786225e7557552873c7f9be274a18cf97d14e213dbd898c1b6c91bc4008cc57221e1911206ae1f839680b09219a207c57c74ae7d9c996dc3
7
- data.tar.gz: 4963f7cf398aaf8552805f14f3be1b6423316a9880340edd226caaa591ae84f76fcb2db714b2da686964551b0d6a97928c1fe3e5c238b9da2893f2d6b6370e1d
6
+ metadata.gz: 61aaf20a488e709118f96620414c16c5440cf17f1a3bd2d8a51aea3bd435ade159e13f774573d2e8f016f3bafa0e2ec213b259c085d44138dd44b1fdc9aa70c7
7
+ data.tar.gz: b76dc8fd387086eef2c476ca79b116f6b541dd41a9947c4341941d9eba1d0874d937d5c558b29d2a56d03db790805be12b0aa0f4a0f8b1789a3646f0e98b5cc8
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 4.3.2
2
+ - Updates the milliseconds rounding for IPFIX start/end milliseconds fields.
3
+ - Fix the test to run on Logstash 8 with microseconds precision. [#206](https://github.com/logstash-plugins/logstash-codec-netflow/pull/206)
4
+
1
5
  ## 4.3.1
2
6
  - Fixed unable to initialize the plugin with Logstash 8.10+ [#205](https://github.com/logstash-plugins/logstash-codec-netflow/pull/205)
3
7
 
@@ -343,7 +343,13 @@ class LogStash::Codecs::Netflow < LogStash::Codecs::Base
343
343
  when /^flow(?:Start|End)(Milli|Micro|Nano)seconds$/
344
344
  case $1
345
345
  when 'Milli'
346
- event[@target][k.to_s] = LogStash::Timestamp.at(v.snapshot.to_f / 1_000).to_iso8601
346
+ secs = v.snapshot.to_i / 1000
347
+ micros = (v.snapshot.to_i % 1000) * 1000
348
+ # Use the 2 args Timestamp.at to avoid the precision under milliseconds. Doing math division (like /1000 on a float)
349
+ # could introduce error in representation that makes 0.192 millis to be expressed like 0.192000001 nanoseconds,
350
+ # so here we cut to millis, but there is a rounding when representing to to_iso8601, so 191998 micros becomes
351
+ # 192 millis in LogStash 8 while in previous versions it appears truncated like 191.
352
+ event[@target][k.to_s] = LogStash::Timestamp.at(secs, micros).to_iso8601
347
353
  when 'Micro', 'Nano'
348
354
  # For now we'll stick to assuming ntp timestamps,
349
355
  # Netscaler implementation may be buggy though:
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-codec-netflow'
4
- s.version = '4.3.1'
4
+ s.version = '4.3.2'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Reads Netflow v5, Netflow v9 and IPFIX data"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -17,6 +17,11 @@ describe LogStash::Codecs::Netflow do
17
17
  end
18
18
  end
19
19
 
20
+ let(:is_LS_8) do
21
+ logstash_version = Gem::Version.create(LOGSTASH_CORE_VERSION)
22
+ Gem::Requirement.create('>= 8.0').satisfied_by?(logstash_version)
23
+ end
24
+
20
25
  ### NETFLOW v5
21
26
 
22
27
  context "Netflow 5 valid 01" do
@@ -28,11 +33,13 @@ describe LogStash::Codecs::Netflow do
28
33
  data << IO.read(File.join(File.dirname(__FILE__), "netflow5.dat"), :mode => "rb")
29
34
  end
30
35
 
36
+ let(:micros) { is_LS_8 ? "328" : "" }
37
+
31
38
  let(:json_events) do
32
39
  events = []
33
40
  events << <<-END
34
41
  {
35
- "@timestamp": "2015-05-02T18:38:08.280Z",
42
+ "@timestamp": "2015-05-02T18:38:08.280#{micros}Z",
36
43
  "netflow": {
37
44
  "version": 5,
38
45
  "flow_seq_num": 0,
@@ -48,8 +55,8 @@ describe LogStash::Codecs::Netflow do
48
55
  "output_snmp": 0,
49
56
  "in_pkts": 5,
50
57
  "in_bytes": 230,
51
- "first_switched": "2015-06-21T11:40:52.194Z",
52
- "last_switched": "2015-05-02T18:38:08.476Z",
58
+ "first_switched": "2015-06-21T11:40:52.194#{micros}Z",
59
+ "last_switched": "2015-05-02T18:38:08.476#{micros}Z",
53
60
  "l4_src_port": 54435,
54
61
  "l4_dst_port": 22,
55
62
  "tcp_flags": 16,
@@ -66,7 +73,7 @@ describe LogStash::Codecs::Netflow do
66
73
 
67
74
  events << <<-END
68
75
  {
69
- "@timestamp": "2015-05-02T18:38:08.280Z",
76
+ "@timestamp": "2015-05-02T18:38:08.280#{micros}Z",
70
77
  "netflow": {
71
78
  "version": 5,
72
79
  "flow_seq_num": 0,
@@ -82,8 +89,8 @@ describe LogStash::Codecs::Netflow do
82
89
  "output_snmp": 0,
83
90
  "in_pkts": 4,
84
91
  "in_bytes": 304,
85
- "first_switched": "2015-06-21T11:40:52.194Z",
86
- "last_switched": "2015-05-02T18:38:08.476Z",
92
+ "first_switched": "2015-06-21T11:40:52.194#{micros}Z",
93
+ "last_switched": "2015-05-02T18:38:08.476#{micros}Z",
87
94
  "l4_src_port": 22,
88
95
  "l4_dst_port": 54435,
89
96
  "tcp_flags": 24,
@@ -835,11 +842,13 @@ describe LogStash::Codecs::Netflow do
835
842
  packets << IO.read(File.join(File.dirname(__FILE__), "netflow5_test_microtik.dat"), :mode => "rb")
836
843
  end
837
844
 
845
+ let(:micros) { is_LS_8 ? "932" : "" }
846
+
838
847
  let(:json_events) do
839
848
  events = []
840
849
  events << <<-END
841
850
  {
842
- "@timestamp": "2016-07-21T13:51:57.514Z",
851
+ "@timestamp": "2016-07-21T13:51:57.514#{micros}Z",
843
852
  "netflow": {
844
853
  "version": 5,
845
854
  "flow_seq_num": 8140050,
@@ -855,8 +864,8 @@ describe LogStash::Codecs::Netflow do
855
864
  "output_snmp": 46,
856
865
  "in_pkts": 13,
857
866
  "in_bytes": 11442,
858
- "first_switched": "2016-07-21T13:51:42.254Z",
859
- "last_switched": "2016-07-21T13:51:42.254Z",
867
+ "first_switched": "2016-07-21T13:51:42.254#{micros}Z",
868
+ "last_switched": "2016-07-21T13:51:42.254#{micros}Z",
860
869
  "l4_src_port": 80,
861
870
  "l4_dst_port": 51826,
862
871
  "tcp_flags": 82,
@@ -1330,7 +1339,7 @@ describe LogStash::Codecs::Netflow do
1330
1339
  "ixiaDstLongitude": 100.33540344238281,
1331
1340
  "ixiaHttpUserAgent": "",
1332
1341
  "ixiaDeviceName": "unknown",
1333
- "flowStartMilliseconds": "2018-10-25T12:24:19.881Z",
1342
+ "flowStartMilliseconds": "2018-10-25T12:24:19.882Z",
1334
1343
  "destinationIPv4Address": "202.170.60.247",
1335
1344
  "ixiaDeviceId": 0,
1336
1345
  "ixiaL7AppName": "unknown",
@@ -2029,6 +2038,9 @@ describe LogStash::Codecs::Netflow do
2029
2038
  data << IO.read(File.join(File.dirname(__FILE__), "ipfix_test_netscaler_data.dat"), :mode => "rb")
2030
2039
  end
2031
2040
 
2041
+ # in LS 8 the precision is up to nanos in LS 7 is up to millis
2042
+ let(:nanos) { is_LS_8 ? "128468" : "" }
2043
+
2032
2044
  let(:json_events) do
2033
2045
  events = []
2034
2046
  events << <<-END
@@ -2038,7 +2050,7 @@ describe LogStash::Codecs::Netflow do
2038
2050
  "netscalerHttpReqUserAgent": "Mozilla/5.0 (Commodore 64; kobo.com) Gecko/20100101 Firefox/75.0",
2039
2051
  "destinationTransportPort": 443,
2040
2052
  "netscalerHttpReqCookie": "beer=123456789abcdefghijklmnopqrstuvw; AnotherCookie=1234567890abcdefghijklmnopqr; Shameless.Plug=Thankyou.Rakuten.Kobo.Inc.For.Allowing.me.time.to.work.on.this.and.contribute.back.to.the.community; Padding=aaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbccccccccccccccddddddddddddddddddddddeeeeeeeeeeeeeeeeeeeeeffffffffffffffffffffffgggggggggggggggggggggggghhhhhhhhhhhhhhhhhiiiiiiiiiiiiiiiiiiiiiijjjjjjjjjjjjjjjjjjjjjjjjkkkkkkkkkkkkkkkkkklllllllllllllllmmmmmmmmmm; more=less; GJquote=There.is.no.spoon; GarrySays=Nice!!; LastPadding=aaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbcccccccccccccccccccdddddddddddeeeeeeee",
2041
- "flowEndMicroseconds": "2016-11-11T12:09:19.000Z",
2053
+ "flowEndMicroseconds": "2016-11-11T12:09:19.000#{nanos}Z",
2042
2054
  "netscalerHttpReqUrl": "/aa/bb/ccccc/ddddddddddddddddddddddddd",
2043
2055
  "sourceIPv4Address": "192.168.0.1",
2044
2056
  "netscalerHttpReqMethod": "GET",
@@ -2057,7 +2069,7 @@ describe LogStash::Codecs::Netflow do
2057
2069
  "netscalerHttpReqVia": "1.1 akamai.net(ghost) (AkamaiGHost)",
2058
2070
  "netscalerConnectionId": 14460661,
2059
2071
  "tcpControlBits": 24,
2060
- "flowStartMicroseconds": "2016-11-11T12:09:19.000Z",
2072
+ "flowStartMicroseconds": "2016-11-11T12:09:19.000#{nanos}Z",
2061
2073
  "ingressInterface": 8,
2062
2074
  "version": 10,
2063
2075
  "packetDeltaCount": 2,
@@ -2085,7 +2097,6 @@ describe LogStash::Codecs::Netflow do
2085
2097
  expect(decode[0].get("[netflow][version]")).to eq(10)
2086
2098
  expect(decode[0].get("[netflow][sourceIPv4Address]")).to eq('192.168.0.1')
2087
2099
  expect(decode[0].get("[netflow][destinationIPv4Address]")).to eq('10.0.0.1')
2088
- expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
2089
2100
  expect(decode[0].get("[netflow][netscalerConnectionId]")).to eq(14460661)
2090
2101
  expect(decode[1].get("[netflow][version]")).to eq(10)
2091
2102
  expect(decode[1].get("[netflow][flowId]")).to eq(14460662)
@@ -2097,6 +2108,16 @@ describe LogStash::Codecs::Netflow do
2097
2108
  expect(decode[2].get("[netflow][netscalerHttpReqXForwardedFor]")).to eq('11.222.33.255')
2098
2109
  end
2099
2110
 
2111
+ if Gem::Requirement.create('>= 8.0').satisfied_by?(Gem::Version.create(LOGSTASH_CORE_VERSION))
2112
+ it "should decode raw data decoding flowEndMicroseconds with nano precision" do
2113
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000127768Z')
2114
+ end
2115
+ else
2116
+ it "should decode raw data decoding flowEndMicroseconds with millis precision" do
2117
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
2118
+ end
2119
+ end
2120
+
2100
2121
  it "should decode variable length fields" do
2101
2122
  expect(decode[2].get("[netflow][netscalerHttpReqUrl]")).to eq('/aa/bb/ccccc/ddddddddddddddddddddddddd')
2102
2123
  expect(decode[2].get("[netflow][netscalerHttpReqHost]")).to eq('www.kobo.com')
@@ -2962,7 +2983,7 @@ describe LogStash::Codecs::Netflow do
2962
2983
  "tcpSequenceNumber": 340533701,
2963
2984
  "silkAppLabel": 0,
2964
2985
  "sourceTransportPort": 63499,
2965
- "flowEndMilliseconds": "2016-12-25T12:58:34.346Z",
2986
+ "flowEndMilliseconds": "2016-12-25T12:58:34.347Z",
2966
2987
  "flowAttributes": 0,
2967
2988
  "destinationIPv4Address": "172.16.32.215",
2968
2989
  "octetTotalCount": 172,
@@ -3065,6 +3086,11 @@ end
3065
3086
 
3066
3087
  # New subject with config, ordered testing since we need caching before data processing
3067
3088
  describe LogStash::Codecs::Netflow, 'configured with template caching', :order => :defined do
3089
+ let(:is_LS_8) do
3090
+ logstash_version = Gem::Version.create(LOGSTASH_CORE_VERSION)
3091
+ Gem::Requirement.create('>= 8.0').satisfied_by?(logstash_version)
3092
+ end
3093
+
3068
3094
  context "IPFIX Netscaler with variable length fields" do
3069
3095
  subject do
3070
3096
  LogStash::Codecs::Netflow.new(cache_config).tap do |codec|
@@ -3171,10 +3197,13 @@ describe LogStash::Codecs::Netflow, 'configured with template caching', :order =
3171
3197
  expect(JSON.parse(File.read("#{tmp_dir}/ipfix_templates.cache"))).to eq(JSON.parse(cached_templates))
3172
3198
  end
3173
3199
 
3200
+ # in LS 8 the precision is up to nanos in LS 7 is up to millis
3201
+ let(:nanos) { is_LS_8 ? "127768" : "" }
3202
+
3174
3203
  it "should decode raw data based on cached templates" do
3175
3204
  expect(decode.size).to eq(3)
3176
3205
  expect(decode[0].get("[netflow][version]")).to eq(10)
3177
- expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
3206
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq("2016-11-11T12:09:19.000#{nanos}Z")
3178
3207
  expect(decode[0].get("[netflow][netscalerConnectionId]")).to eq(14460661)
3179
3208
  expect(decode[1].get("[netflow][version]")).to eq(10)
3180
3209
  expect(decode[1].get("[netflow][observationPointId]")).to eq(167954698)
@@ -3215,7 +3244,6 @@ describe LogStash::Codecs::Netflow, 'configured with include_flowset_id for ipfi
3215
3244
  it "should decode raw data" do
3216
3245
  expect(decode.size).to eq(3)
3217
3246
  expect(decode[0].get("[netflow][version]")).to eq(10)
3218
- expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
3219
3247
  expect(decode[0].get("[netflow][netscalerConnectionId]")).to eq(14460661)
3220
3248
  expect(decode[1].get("[netflow][version]")).to eq(10)
3221
3249
  expect(decode[1].get("[netflow][observationPointId]")).to eq(167954698)
@@ -3224,6 +3252,16 @@ describe LogStash::Codecs::Netflow, 'configured with include_flowset_id for ipfi
3224
3252
  expect(decode[2].get("[netflow][netscalerAppUnitNameAppId]")).to eq(239927296)
3225
3253
  end
3226
3254
 
3255
+ if Gem::Requirement.create('>= 8.0').satisfied_by?(Gem::Version.create(LOGSTASH_CORE_VERSION))
3256
+ it "should decode raw data decoding flowEndMicroseconds with nano precision" do
3257
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000127768Z')
3258
+ end
3259
+ else
3260
+ it "should decode raw data decoding flowEndMicroseconds with millis precision" do
3261
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
3262
+ end
3263
+ end
3264
+
3227
3265
  it "should include flowset_id" do
3228
3266
  expect(decode[0].get("[netflow][flowset_id]")).to eq(258)
3229
3267
  expect(decode[1].get("[netflow][flowset_id]")).to eq(257)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-codec-netflow
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.3.1
4
+ version: 4.3.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-12-14 00:00:00.000000000 Z
11
+ date: 2023-12-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement