logstash-codec-protobuf 1.2.2 → 1.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +9 -0
- data/Gemfile +1 -1
- data/LICENSE +2 -3
- data/docs/index.asciidoc +27 -3
- data/lib/logstash/codecs/protobuf.rb +61 -14
- data/logstash-codec-protobuf.gemspec +2 -2
- data/spec/codecs/pb2_spec.rb +3 -5
- data/spec/codecs/pb3_decode_spec.rb +114 -9
- data/spec/codecs/pb3_encode_spec.rb +34 -11
- data/spec/helpers/pb3/FantasyHorse_pb.rb +44 -0
- metadata +5 -5
- data/spec/helpers/pb3/ReservationEntry_pb.rb +0 -64
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: afe37c419ffac934a07356c24954ab9534d6438363a8cbc3a4a9a9f7b4214cd0
|
4
|
+
data.tar.gz: 775f81231ce4181e43900251e5fea9906027efc5cbff2d65cd82d122b28fd977
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 02cff8a64b2bf3324d456b173b8b52761fb6ba8501f87e0011cc02328d75322552c794206fafa574919dc04e7bfe30fbb0f59e804a00f4550c57730913ff80fa
|
7
|
+
data.tar.gz: a431bd7300fa3dae4c16c82fd1dab79eefe8b1ea3552d5b9f9c8af175862867d029bd2ac398c1af445ec67e59cf12d010e61d5f75a3c159cfa64e294f7c1e178
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,12 @@
|
|
1
|
+
## 1.2.5
|
2
|
+
- Encoder bugfix: avoid pipeline crash if encoding failed.
|
3
|
+
|
4
|
+
## 1.2.4
|
5
|
+
- Encoder bugfix: avoid pipeline crash if encoding failed.
|
6
|
+
|
7
|
+
## 1.2.3
|
8
|
+
- Add oneof information to @metadata (protobuf version 3 only).
|
9
|
+
|
1
10
|
## 1.2.2
|
2
11
|
- Add type conversion feature to encoder
|
3
12
|
|
data/Gemfile
CHANGED
@@ -8,4 +8,4 @@ use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "
|
|
8
8
|
if Dir.exist?(logstash_path) && use_logstash_source
|
9
9
|
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
10
10
|
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
11
|
-
end
|
11
|
+
end
|
data/LICENSE
CHANGED
@@ -179,7 +179,7 @@
|
|
179
179
|
APPENDIX: How to apply the Apache License to your work.
|
180
180
|
|
181
181
|
To apply the Apache License to your work, attach the following
|
182
|
-
boilerplate notice, with the fields enclosed by brackets "
|
182
|
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
183
183
|
replaced with your own identifying information. (Don't include
|
184
184
|
the brackets!) The text should be enclosed in the appropriate
|
185
185
|
comment syntax for the file format. We also recommend that a
|
@@ -187,7 +187,7 @@
|
|
187
187
|
same "printed page" as the copyright notice for easier
|
188
188
|
identification within third-party archives.
|
189
189
|
|
190
|
-
Copyright
|
190
|
+
Copyright 2020 Elastic and contributors
|
191
191
|
|
192
192
|
Licensed under the Apache License, Version 2.0 (the "License");
|
193
193
|
you may not use this file except in compliance with the License.
|
@@ -200,4 +200,3 @@
|
|
200
200
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
201
201
|
See the License for the specific language governing permissions and
|
202
202
|
limitations under the License.
|
203
|
-
|
data/docs/index.asciidoc
CHANGED
@@ -207,8 +207,32 @@ Stop entire pipeline when encountering a non decodable message.
|
|
207
207
|
|
208
208
|
Convert data types to match the protobuf definition (if possible).
|
209
209
|
The protobuf encoder library is very strict with regards to data types. Example: an event has an integer field but the protobuf definition expects a float. This would lead to an exception and the event would be lost.
|
210
|
+
|
210
211
|
This feature tries to convert the datatypes to the expectations of the protobuf definitions, without modifying the data whatsoever. Examples of conversions it might attempt:
|
211
|
-
|
212
|
-
|
213
|
-
|
212
|
+
|
213
|
+
`"true" :: string => true :: boolean`
|
214
|
+
|
215
|
+
`17 :: int => 17.0 :: float`
|
216
|
+
|
217
|
+
`12345 :: number => "12345" :: string`
|
218
|
+
|
219
|
+
Available only for protobuf version 3.
|
220
|
+
|
221
|
+
[id="plugins-{type}s-{plugin}-pb3_set_oneof_metainfo"]
|
222
|
+
===== `pb3_set_oneof_metainfo`
|
223
|
+
|
224
|
+
* Value type is <<boolean,boolean>>
|
225
|
+
* Default value is false
|
226
|
+
|
227
|
+
Add meta information to `[@metadata][pb_oneof]` about which classes were chosen for [oneof](https://developers.google.com/protocol-buffers/docs/proto3#oneof) fields. A new field of name `[@metadata][pb_oneof][FOO]` will be added, where `FOO` is the name of the `oneof` field.
|
228
|
+
|
229
|
+
Example values: for the protobuf definition
|
230
|
+
[source,ruby]
|
231
|
+
oneof :horse_type do
|
232
|
+
optional :unicorn, :message, 2, "UnicornType"
|
233
|
+
optional :pegasus, :message, 3, "PegasusType"
|
234
|
+
end
|
235
|
+
|
236
|
+
the field `[@metadata][pb_oneof][horse_type]` will be set to either `pegasus` or `unicorn`.
|
237
|
+
Available only for protobuf version 3.
|
214
238
|
|
@@ -134,12 +134,22 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
134
134
|
# Recommendation: use the translate plugin to restore previous behaviour when upgrading.
|
135
135
|
config :protobuf_version, :validate => [2,3], :default => 2, :required => true
|
136
136
|
|
137
|
-
# To tolerate faulty messages that cannot be decoded, set this to false. Otherwise the pipeline will stop upon encountering a non decipherable message.
|
137
|
+
# To tolerate faulty messages that cannot be en/decoded, set this to false. Otherwise the pipeline will stop upon encountering a non decipherable message.
|
138
138
|
config :stop_on_error, :validate => :boolean, :default => false, :required => false
|
139
139
|
|
140
140
|
# Instruct the encoder to attempt converting data types to match the protobuf definitions. Available only for protobuf version 3.
|
141
141
|
config :pb3_encoder_autoconvert_types, :validate => :boolean, :default => true, :required => false
|
142
142
|
|
143
|
+
# Add meta information to `[@metadata][pb_oneof]` about which classes were chosen for [oneof](https://developers.google.com/protocol-buffers/docs/proto3#oneof) fields.
|
144
|
+
# Example values: for the protobuf definition
|
145
|
+
# ``` oneof :horse_type do
|
146
|
+
# optional :unicorn, :message, 2, "FantasyUnicorn"
|
147
|
+
# optional :pegasus, :message, 3, "FantasyPegasus"
|
148
|
+
# end
|
149
|
+
# ```
|
150
|
+
# the field `[@metadata][pb_oneof][horse_type]` will be set to either `pegasus` or `unicorn`.
|
151
|
+
# Available only for protobuf version 3.
|
152
|
+
config :pb3_set_oneof_metainfo, :validate => :boolean, :default => false, :required => false
|
143
153
|
|
144
154
|
|
145
155
|
attr_reader :execution_context
|
@@ -155,7 +165,6 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
155
165
|
@metainfo_pb2_enumlist = []
|
156
166
|
@pb3_typeconversion_tag = "_protobuf_type_converted"
|
157
167
|
|
158
|
-
|
159
168
|
if @include_path.length > 0 and not class_file.strip.empty?
|
160
169
|
raise LogStash::ConfigurationError, "Cannot use `include_path` and `class_file` at the same time"
|
161
170
|
end
|
@@ -203,16 +212,23 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
203
212
|
def decode(data)
|
204
213
|
if @protobuf_version == 3
|
205
214
|
decoded = @pb_builder.decode(data.to_s)
|
215
|
+
if @pb3_set_oneof_metainfo
|
216
|
+
meta = pb3_get_oneof_metainfo(decoded, @class_name)
|
217
|
+
end
|
206
218
|
h = pb3_deep_to_hash(decoded)
|
207
219
|
else
|
208
220
|
decoded = @pb_builder.parse(data.to_s)
|
209
221
|
h = decoded.to_hash
|
210
222
|
end
|
211
|
-
|
212
|
-
|
213
|
-
|
223
|
+
e = LogStash::Event.new(h)
|
224
|
+
if @protobuf_version == 3 and @pb3_set_oneof_metainfo
|
225
|
+
e.set("[@metadata][pb_oneof]", meta)
|
226
|
+
end
|
227
|
+
yield e if block_given?
|
228
|
+
rescue => ex
|
229
|
+
@logger.warn("Couldn't decode protobuf: #{ex.inspect}.")
|
214
230
|
if stop_on_error
|
215
|
-
raise
|
231
|
+
raise ex
|
216
232
|
else # keep original message so that the user can debug it.
|
217
233
|
yield LogStash::Event.new("message" => data, "tags" => ["_protobufdecodefailure"])
|
218
234
|
end
|
@@ -225,7 +241,9 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
225
241
|
else
|
226
242
|
protobytes = pb2_encode(event)
|
227
243
|
end
|
228
|
-
|
244
|
+
unless protobytes.nil? or protobytes.empty?
|
245
|
+
@on_event.call(event, protobytes)
|
246
|
+
end
|
229
247
|
end # def encode
|
230
248
|
|
231
249
|
|
@@ -277,10 +295,13 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
277
295
|
k = event.to_hash.keys.join(", ")
|
278
296
|
@logger.warn("Protobuf encoding error 1: Argument error (#{e.inspect}). Reason: probably mismatching protobuf definition. \
|
279
297
|
Required fields in the protobuf definition are: #{k} and fields must not begin with @ sign. The event has been discarded.")
|
298
|
+
nil
|
280
299
|
rescue TypeError => e
|
281
300
|
pb3_handle_type_errors(event, e, is_recursive_call, datahash)
|
301
|
+
nil
|
282
302
|
rescue => e
|
283
303
|
@logger.warn("Protobuf encoding error 3: #{e.inspect}. Event discarded. Input data: #{datahash}. The event has been discarded. Backtrace: #{e.backtrace}")
|
304
|
+
nil
|
284
305
|
end
|
285
306
|
|
286
307
|
|
@@ -297,9 +318,6 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
297
318
|
@logger.warn(msg)
|
298
319
|
mismatches = pb3_get_type_mismatches(datahash, "", @class_name)
|
299
320
|
|
300
|
-
msg = "Protobuf encoding info 2.2: Type mismatches found: #{mismatches}." # TODO remove
|
301
|
-
@logger.warn(msg)
|
302
|
-
|
303
321
|
event = pb3_convert_mismatched_types(event, mismatches)
|
304
322
|
# Add a (temporary) tag to handle the recursion stop
|
305
323
|
pb3_add_tag(event, @pb3_typeconversion_tag )
|
@@ -314,11 +332,20 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
314
332
|
else
|
315
333
|
@logger.warn("Protobuf encoding error 2.4.2: (#{e.inspect}). The event has been discarded.")
|
316
334
|
end
|
335
|
+
if @stop_on_error
|
336
|
+
raise e
|
337
|
+
end
|
338
|
+
nil
|
317
339
|
rescue => ex
|
318
340
|
@logger.warn("Protobuf encoding error 2.5: (#{e.inspect}). The event has been discarded. Auto-typecasting was on: #{@pb3_encoder_autoconvert_types}")
|
341
|
+
if @stop_on_error
|
342
|
+
raise ex
|
343
|
+
end
|
344
|
+
nil
|
319
345
|
end
|
320
346
|
end # pb3_handle_type_errors
|
321
347
|
|
348
|
+
|
322
349
|
def pb3_get_type_mismatches(data, key_prefix, pb_class)
|
323
350
|
mismatches = []
|
324
351
|
data.to_hash.each do |key, value|
|
@@ -354,17 +381,15 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
354
381
|
else
|
355
382
|
case value
|
356
383
|
when ::Hash, Google::Protobuf::MessageExts
|
357
|
-
|
358
384
|
is_mismatch = false
|
359
385
|
descriptor = Google::Protobuf::DescriptorPool.generated_pool.lookup(pb_class).lookup(key)
|
360
|
-
if descriptor.subtype
|
386
|
+
if !descriptor.subtype.nil?
|
361
387
|
class_of_nested_object = pb3_get_descriptorpool_name(descriptor.subtype.msgclass)
|
362
388
|
new_prefix = "#{key}."
|
363
389
|
recursive_mismatches = pb3_get_type_mismatches(value, new_prefix, class_of_nested_object)
|
364
390
|
mismatches.concat(recursive_mismatches)
|
365
391
|
end
|
366
392
|
when ::Array
|
367
|
-
|
368
393
|
expected_type = pb3_get_expected_type(key, pb_class)
|
369
394
|
is_mismatch = (expected_type != Google::Protobuf::RepeatedField)
|
370
395
|
child_type = Google::Protobuf::DescriptorPool.generated_pool.lookup(pb_class).lookup(key).type
|
@@ -510,6 +535,29 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
510
535
|
datahash
|
511
536
|
end
|
512
537
|
|
538
|
+
def pb3_get_oneof_metainfo(pb_object, pb_class_name)
|
539
|
+
meta = {}
|
540
|
+
pb_class = Google::Protobuf::DescriptorPool.generated_pool.lookup(pb_class_name).msgclass
|
541
|
+
|
542
|
+
pb_class.descriptor.each_oneof { |field|
|
543
|
+
field.each { | group_option |
|
544
|
+
if !pb_object.send(group_option.name).nil?
|
545
|
+
meta[field.name] = group_option.name
|
546
|
+
end
|
547
|
+
}
|
548
|
+
}
|
549
|
+
|
550
|
+
pb_class.descriptor.select{ |field| field.type == :message }.each { | field |
|
551
|
+
# recurse over nested protobuf classes
|
552
|
+
pb_sub_object = pb_object.send(field.name)
|
553
|
+
if !pb_sub_object.nil? and !field.subtype.nil?
|
554
|
+
pb_sub_class = pb3_get_descriptorpool_name(field.subtype.msgclass)
|
555
|
+
meta[field.name] = pb3_get_oneof_metainfo(pb_sub_object, pb_sub_class)
|
556
|
+
end
|
557
|
+
}
|
558
|
+
|
559
|
+
meta
|
560
|
+
end
|
513
561
|
|
514
562
|
|
515
563
|
def pb2_encode(event)
|
@@ -525,7 +573,6 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
525
573
|
end
|
526
574
|
|
527
575
|
|
528
|
-
|
529
576
|
def pb2_prepare_for_encoding(datahash, class_name)
|
530
577
|
if datahash.is_a?(::Hash)
|
531
578
|
# Preparation: the data cannot be encoded until certain criteria are met:
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-codec-protobuf'
|
4
|
-
s.version = '1.2.
|
4
|
+
s.version = '1.2.5'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = "Reads protobuf messages and converts to Logstash Events"
|
7
7
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
@@ -23,5 +23,5 @@ Gem::Specification.new do |s|
|
|
23
23
|
s.add_runtime_dependency 'google-protobuf', '3.5.0.pre'
|
24
24
|
s.add_runtime_dependency 'ruby-protocol-buffers' # for protobuf 2
|
25
25
|
s.add_development_dependency 'logstash-devutils'
|
26
|
-
end
|
27
26
|
|
27
|
+
end
|
data/spec/codecs/pb2_spec.rb
CHANGED
@@ -2,8 +2,6 @@
|
|
2
2
|
require "logstash/devutils/rspec/spec_helper"
|
3
3
|
require "logstash/codecs/protobuf"
|
4
4
|
require "logstash/event"
|
5
|
-
require "insist"
|
6
|
-
|
7
5
|
|
8
6
|
require 'protocol_buffers' # https://github.com/codekitchen/ruby-protocol-buffers, for protobuf2
|
9
7
|
|
@@ -117,7 +115,7 @@ describe LogStash::Codecs::Protobuf do
|
|
117
115
|
|
118
116
|
it "should return protobuf encoded data from a simple event" do
|
119
117
|
subject.on_event do |event, data|
|
120
|
-
|
118
|
+
expect(data).to be_a(String)
|
121
119
|
unicorn = Animal::UnicornEvent.parse(data)
|
122
120
|
|
123
121
|
expect(unicorn.colour ).to eq(event.get("colour") )
|
@@ -148,7 +146,7 @@ describe LogStash::Codecs::Protobuf do
|
|
148
146
|
it "should return protobuf encoded data from a complex event" do
|
149
147
|
|
150
148
|
subject.on_event do |event, data|
|
151
|
-
|
149
|
+
expect(data).to be_a(String)
|
152
150
|
jimmy = Animal::Human.parse(data)
|
153
151
|
|
154
152
|
expect(jimmy.first_name ).to eq(event.get("first_name") )
|
@@ -190,7 +188,7 @@ describe LogStash::Codecs::Protobuf do
|
|
190
188
|
it "should return protobuf encoded data from a complex event with enums" do
|
191
189
|
|
192
190
|
subject.on_event do |event, data|
|
193
|
-
|
191
|
+
expect(data).to be_a(String)
|
194
192
|
|
195
193
|
colpref = ColourProtoTest.parse(data)
|
196
194
|
|
@@ -2,7 +2,6 @@
|
|
2
2
|
require "logstash/devutils/rspec/spec_helper"
|
3
3
|
require "logstash/codecs/protobuf"
|
4
4
|
require "logstash/event"
|
5
|
-
require "insist"
|
6
5
|
|
7
6
|
require 'google/protobuf' # for protobuf3
|
8
7
|
|
@@ -74,7 +73,7 @@ describe LogStash::Codecs::Protobuf do
|
|
74
73
|
end
|
75
74
|
end # context
|
76
75
|
|
77
|
-
context "#
|
76
|
+
context "#pb3decoder_test1" do
|
78
77
|
|
79
78
|
|
80
79
|
#### Test case 1: Decode simple protobuf ####################################################################################################################
|
@@ -102,7 +101,7 @@ describe LogStash::Codecs::Protobuf do
|
|
102
101
|
end # it
|
103
102
|
end # context
|
104
103
|
|
105
|
-
context "#
|
104
|
+
context "#pb3decoder_test2" do
|
106
105
|
|
107
106
|
#### Test case 2: decode nested protobuf ####################################################################################################################
|
108
107
|
let(:plugin_unicorn) { LogStash::Codecs::Protobuf.new("class_name" => "Unicorn", "include_path" => [pb_include_path + '/pb3/unicorn_pb.rb'], "protobuf_version" => 3) }
|
@@ -125,7 +124,7 @@ describe LogStash::Codecs::Protobuf do
|
|
125
124
|
|
126
125
|
end # context
|
127
126
|
|
128
|
-
context "#
|
127
|
+
context "#pb3decoder_test3" do
|
129
128
|
|
130
129
|
#### Test case 3: decode ProbeResult ####################################################################################################################
|
131
130
|
let(:plugin_3) { LogStash::Codecs::Protobuf.new("class_name" => "ProbeResult", "include_path" => [pb_include_path + '/pb3/ProbeResult_pb.rb'], "protobuf_version" => 3) }
|
@@ -157,7 +156,7 @@ describe LogStash::Codecs::Protobuf do
|
|
157
156
|
end # it
|
158
157
|
end # context
|
159
158
|
|
160
|
-
context "#
|
159
|
+
context "#pb3decoder_test4" do
|
161
160
|
|
162
161
|
#### Test case 4: decode PBDNSMessage ####################################################################################################################
|
163
162
|
let(:plugin_4) { LogStash::Codecs::Protobuf.new("class_name" => "PBDNSMessage", "include_path" => [pb_include_path + '/pb3/dnsmessage_pb.rb'], "protobuf_version" => 3) }
|
@@ -238,7 +237,7 @@ describe LogStash::Codecs::Protobuf do
|
|
238
237
|
|
239
238
|
end # context
|
240
239
|
|
241
|
-
context "#
|
240
|
+
context "#pb3decoder_test5" do
|
242
241
|
|
243
242
|
#### Test case 5: decode test case for github issue 17 ####################################################################################################################
|
244
243
|
let(:plugin_5) { LogStash::Codecs::Protobuf.new("class_name" => "com.foo.bar.IntegerTestMessage", "include_path" => [pb_include_path + '/pb3/integertest_pb.rb'], "protobuf_version" => 3) }
|
@@ -259,7 +258,7 @@ describe LogStash::Codecs::Protobuf do
|
|
259
258
|
|
260
259
|
end # context
|
261
260
|
|
262
|
-
context "#
|
261
|
+
context "#pb3decoder_test6" do
|
263
262
|
|
264
263
|
|
265
264
|
let(:execution_context) { double("execution_context")}
|
@@ -307,7 +306,7 @@ describe LogStash::Codecs::Protobuf do
|
|
307
306
|
|
308
307
|
|
309
308
|
|
310
|
-
context "#
|
309
|
+
context "#pb3decoder_test7" do
|
311
310
|
|
312
311
|
#### Test case 6: decode test case for github issue 17 ####################################################################################################################
|
313
312
|
let(:plugin_7) { LogStash::Codecs::Protobuf.new("class_name" => "RepeatedEvents", "include_path" => [pb_include_path + '/pb3/events_pb.rb'], "protobuf_version" => 3) }
|
@@ -334,7 +333,113 @@ describe LogStash::Codecs::Protobuf do
|
|
334
333
|
end # it
|
335
334
|
|
336
335
|
|
337
|
-
end # context
|
336
|
+
end # context pb3decoder_test7
|
338
337
|
|
339
338
|
|
339
|
+
context "#pb3decoder_test8a" do
|
340
|
+
|
341
|
+
########################################################################################################################
|
342
|
+
let(:plugin_8a) { LogStash::Codecs::Protobuf.new("class_name" => "FantasyHorse", "class_file" => 'pb3/FantasyHorse_pb.rb',
|
343
|
+
"protobuf_root_directory" => pb_include_path, "protobuf_version" => 3, "pb3_set_oneof_metainfo" => true) }
|
344
|
+
before do
|
345
|
+
plugin_8a.register
|
346
|
+
end
|
347
|
+
|
348
|
+
it "should add meta information on oneof fields" do
|
349
|
+
pegasus_data = {:wings_length => 100}
|
350
|
+
horsey = FantasyPegasus.new(pegasus_data)
|
351
|
+
|
352
|
+
braid_data = {:braid_thickness => 10, :braiding_style => "french"}
|
353
|
+
tail_data = {:tail_length => 80, :braided => BraidedHorseTail.new(braid_data) }
|
354
|
+
tail = FantasyHorseTail.new(tail_data)
|
355
|
+
|
356
|
+
data = {:name=>"Reinhold", :pegasus => horsey, :tail => tail}
|
357
|
+
pb_obj = FantasyHorse.new(data)
|
358
|
+
bin = FantasyHorse.encode(pb_obj)
|
359
|
+
plugin_8a.decode(bin) do |event|
|
360
|
+
|
361
|
+
expect(event.get("name") ).to eq(data[:name])
|
362
|
+
expect(event.get("pegasus")["wings_length"] ).to eq(pegasus_data[:wings_length])
|
363
|
+
expect(event.get("tail")['tail_length'] ).to eq(tail_data[:tail_length])
|
364
|
+
expect(event.get("tail")['braided']['braiding_style'] ).to eq(braid_data[:braiding_style])
|
365
|
+
expect(event.get("@metadata")["pb_oneof"]["horse_type"] ).to eq("pegasus")
|
366
|
+
expect(event.get("@metadata")["pb_oneof"]["tail"]["hair_type"] ).to eq("braided")
|
367
|
+
|
368
|
+
end
|
369
|
+
end # it
|
370
|
+
|
371
|
+
|
372
|
+
end # context pb3decoder_test8a
|
373
|
+
|
374
|
+
|
375
|
+
|
376
|
+
|
377
|
+
context "#pb3decoder_test8b" do
|
378
|
+
|
379
|
+
########################################################################################################################
|
380
|
+
let(:plugin_8b) { LogStash::Codecs::Protobuf.new("class_name" => "FantasyHorse", "class_file" => 'pb3/FantasyHorse_pb.rb',
|
381
|
+
"protobuf_root_directory" => pb_include_path, "protobuf_version" => 3, "pb3_set_oneof_metainfo" => false) }
|
382
|
+
before do
|
383
|
+
plugin_8b.register
|
384
|
+
end
|
385
|
+
|
386
|
+
it "should not add meta information on oneof fields" do
|
387
|
+
pegasus_data = {:wings_length => 100}
|
388
|
+
horsey = FantasyPegasus.new(pegasus_data)
|
389
|
+
|
390
|
+
braid_data = {:braid_thickness => 10, :braiding_style => "french"}
|
391
|
+
tail_data = {:tail_length => 80, :braided => BraidedHorseTail.new(braid_data) }
|
392
|
+
tail = FantasyHorseTail.new(tail_data)
|
393
|
+
|
394
|
+
data = {:name=>"Winfried", :pegasus => horsey, :tail => tail}
|
395
|
+
pb_obj = FantasyHorse.new(data)
|
396
|
+
bin = FantasyHorse.encode(pb_obj)
|
397
|
+
plugin_8b.decode(bin) do |event|
|
398
|
+
expect(event.get("name") ).to eq(data[:name])
|
399
|
+
expect(event.get("pegasus")["wings_length"] ).to eq(pegasus_data[:wings_length])
|
400
|
+
expect(event.get("tail")['tail_length'] ).to eq(tail_data[:tail_length])
|
401
|
+
expect(event.get("tail")['braided']['braiding_style'] ).to eq(braid_data[:braiding_style])
|
402
|
+
expect(event.get("@metadata")["pb_oneof"]).to be_nil
|
403
|
+
|
404
|
+
end
|
405
|
+
end # it
|
406
|
+
|
407
|
+
|
408
|
+
end # context pb3decoder_test8b
|
409
|
+
|
410
|
+
|
411
|
+
context "#pb3decoder_test8c" do # same test as 8a just with different one_of options selected
|
412
|
+
|
413
|
+
########################################################################################################################
|
414
|
+
let(:plugin_8c) { LogStash::Codecs::Protobuf.new("class_name" => "FantasyHorse", "class_file" => 'pb3/FantasyHorse_pb.rb',
|
415
|
+
"protobuf_root_directory" => pb_include_path, "protobuf_version" => 3, "pb3_set_oneof_metainfo" => true) }
|
416
|
+
before do
|
417
|
+
plugin_8c.register
|
418
|
+
end
|
419
|
+
|
420
|
+
it "should add meta information on oneof fields" do
|
421
|
+
unicorn_data = {:horn_length => 30}
|
422
|
+
horsey = FantasyUnicorn.new(unicorn_data)
|
423
|
+
|
424
|
+
natural_data = {:wavyness => "B"}
|
425
|
+
tail_data = {:tail_length => 80, :natural => NaturalHorseTail.new(natural_data) }
|
426
|
+
tail = FantasyHorseTail.new(tail_data)
|
427
|
+
|
428
|
+
data = {:name=>"Hubert", :unicorn => horsey, :tail => tail}
|
429
|
+
pb_obj = FantasyHorse.new(data)
|
430
|
+
bin = FantasyHorse.encode(pb_obj)
|
431
|
+
plugin_8c.decode(bin) do |event|
|
432
|
+
expect(event.get("name") ).to eq(data[:name])
|
433
|
+
expect(event.get("unicorn")["horn_length"] ).to eq(unicorn_data[:horn_length])
|
434
|
+
expect(event.get("tail")['tail_length'] ).to eq(tail_data[:tail_length])
|
435
|
+
expect(event.get("tail")['natural']['wavyness'] ).to eq(natural_data[:wavyness])
|
436
|
+
expect(event.get("@metadata")["pb_oneof"]["horse_type"] ).to eq("unicorn")
|
437
|
+
expect(event.get("@metadata")["pb_oneof"]["tail"]["hair_type"] ).to eq("natural")
|
438
|
+
|
439
|
+
end
|
440
|
+
end # it
|
441
|
+
|
442
|
+
|
443
|
+
end # context pb3decoder_test8c
|
444
|
+
|
340
445
|
end # describe
|
@@ -1,12 +1,7 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
1
|
# encoding: utf-8
|
6
2
|
require "logstash/devutils/rspec/spec_helper"
|
7
3
|
require "logstash/codecs/protobuf"
|
8
4
|
require "logstash/event"
|
9
|
-
require "insist"
|
10
5
|
|
11
6
|
require 'google/protobuf' # for protobuf3
|
12
7
|
|
@@ -30,7 +25,7 @@ describe LogStash::Codecs::Protobuf do
|
|
30
25
|
it "should return protobuf encoded data for testcase 1" do
|
31
26
|
|
32
27
|
subject.on_event do |event, data|
|
33
|
-
|
28
|
+
expect(data).to be_a(String)
|
34
29
|
|
35
30
|
pb_builder = Google::Protobuf::DescriptorPool.generated_pool.lookup("Unicorn").msgclass
|
36
31
|
decoded_data = pb_builder.decode(data)
|
@@ -64,7 +59,7 @@ describe LogStash::Codecs::Protobuf do
|
|
64
59
|
it "should return protobuf encoded data for testcase 2" do
|
65
60
|
|
66
61
|
subject.on_event do |event, data|
|
67
|
-
|
62
|
+
expect(data).to be_a(String)
|
68
63
|
|
69
64
|
pb_builder = Google::Protobuf::DescriptorPool.generated_pool.lookup("Unicorn").msgclass
|
70
65
|
decoded_data = pb_builder.decode(data)
|
@@ -109,7 +104,7 @@ describe LogStash::Codecs::Protobuf do
|
|
109
104
|
it "should return protobuf encoded data for testcase 3" do
|
110
105
|
|
111
106
|
subject.on_event do |event, data|
|
112
|
-
|
107
|
+
expect(data).to be_a(String)
|
113
108
|
|
114
109
|
pb_builder = Google::Protobuf::DescriptorPool.generated_pool.lookup("something.rum_akamai.ProtoAkamaiRum").msgclass
|
115
110
|
decoded_data = pb_builder.decode(data)
|
@@ -160,7 +155,7 @@ describe LogStash::Codecs::Protobuf do
|
|
160
155
|
it "should fix datatypes to match the protobuf definition" do
|
161
156
|
|
162
157
|
subject.on_event do |event, data|
|
163
|
-
|
158
|
+
expect(data).to be_a(String)
|
164
159
|
|
165
160
|
pb_builder = Google::Protobuf::DescriptorPool.generated_pool.lookup("something.rum_akamai.ProtoAkamai2Rum").msgclass
|
166
161
|
decoded_data = pb_builder.decode(data)
|
@@ -199,7 +194,7 @@ context "encodePB3-e" do
|
|
199
194
|
it "should ignore empty fields" do
|
200
195
|
|
201
196
|
subject.on_event do |event, data|
|
202
|
-
|
197
|
+
expect(data).to be_a(String)
|
203
198
|
|
204
199
|
pb_builder = Google::Protobuf::DescriptorPool.generated_pool.lookup("something.rum_akamai.ProtoAkamai3Rum").msgclass
|
205
200
|
decoded_data = pb_builder.decode(data)
|
@@ -216,5 +211,33 @@ context "encodePB3-e" do
|
|
216
211
|
|
217
212
|
|
218
213
|
|
214
|
+
context "encodePB3-f" do
|
215
|
+
|
216
|
+
#### Test case 5: handle additional fields (discard event without crashing pipeline) ####################################################################################################################
|
217
|
+
|
218
|
+
subject do
|
219
|
+
next LogStash::Codecs::Protobuf.new("class_name" => "something.rum_akamai.ProtoAkamai3Rum",
|
220
|
+
"pb3_encoder_autoconvert_types" => false,
|
221
|
+
"include_path" => [pb_include_path + '/pb3/rum3_pb.rb' ], "protobuf_version" => 3)
|
222
|
+
end
|
223
|
+
|
224
|
+
event = LogStash::Event.new(
|
225
|
+
"domain" => nil, "bot" => "This field does not exist in the protobuf definition",
|
226
|
+
"header" => {"sender_id" => "23"},
|
227
|
+
"geo"=>{"organisation"=>"Jio", "rg"=>"DL", "netspeed"=>nil, "city"=>nil, "cc"=>"IN", "ovr"=>false, "postalcode"=>"110012", "isp"=>"Jio"}
|
228
|
+
)
|
229
|
+
|
230
|
+
it "should not return data" do
|
231
|
+
|
232
|
+
subject.on_event do |event, data|
|
233
|
+
expect("the on_event method should not be called").to eq("so this code should never be reached")
|
234
|
+
end
|
235
|
+
subject.encode(event)
|
236
|
+
end # it
|
237
|
+
|
238
|
+
end # context #encodePB3-f
|
239
|
+
|
240
|
+
|
241
|
+
|
219
242
|
|
220
|
-
end # describe
|
243
|
+
end # describe
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
|
+
|
3
|
+
begin; require 'google/protobuf'; rescue LoadError; end
|
4
|
+
|
5
|
+
Google::Protobuf::DescriptorPool.generated_pool.build do
|
6
|
+
add_message "FantasyHorse" do
|
7
|
+
optional :name, :string, 1
|
8
|
+
oneof :horse_type do
|
9
|
+
optional :unicorn, :message, 2, "FantasyUnicorn"
|
10
|
+
optional :pegasus, :message, 3, "FantasyPegasus"
|
11
|
+
end
|
12
|
+
optional :tail, :message, 4, "FantasyHorseTail"
|
13
|
+
end
|
14
|
+
add_message "FantasyUnicorn" do
|
15
|
+
optional :horn_length, :int32, 1
|
16
|
+
optional :horn_colour, :string, 2
|
17
|
+
end
|
18
|
+
add_message "FantasyPegasus" do
|
19
|
+
optional :wings_length, :int32, 1
|
20
|
+
optional :wings_width, :int32, 2
|
21
|
+
optional :wings_feathers, :string, 3
|
22
|
+
end
|
23
|
+
add_message "FantasyHorseTail" do
|
24
|
+
optional :tail_length, :int32, 1
|
25
|
+
oneof :hair_type do
|
26
|
+
optional :braided, :message, 2, "BraidedHorseTail"
|
27
|
+
optional :natural, :message, 3, "NaturalHorseTail"
|
28
|
+
end
|
29
|
+
end
|
30
|
+
add_message "BraidedHorseTail" do
|
31
|
+
optional :braiding_style, :string, 1
|
32
|
+
optional :braid_thickness, :int32, 2
|
33
|
+
end
|
34
|
+
add_message "NaturalHorseTail" do
|
35
|
+
optional :wavyness, :string, 1
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
FantasyHorse = Google::Protobuf::DescriptorPool.generated_pool.lookup("FantasyHorse").msgclass
|
40
|
+
FantasyUnicorn = Google::Protobuf::DescriptorPool.generated_pool.lookup("FantasyUnicorn").msgclass
|
41
|
+
FantasyPegasus = Google::Protobuf::DescriptorPool.generated_pool.lookup("FantasyPegasus").msgclass
|
42
|
+
FantasyHorseTail = Google::Protobuf::DescriptorPool.generated_pool.lookup("FantasyHorseTail").msgclass
|
43
|
+
BraidedHorseTail = Google::Protobuf::DescriptorPool.generated_pool.lookup("BraidedHorseTail").msgclass
|
44
|
+
NaturalHorseTail = Google::Protobuf::DescriptorPool.generated_pool.lookup("NaturalHorseTail").msgclass
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-codec-protobuf
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.2.
|
4
|
+
version: 1.2.5
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Inga Feick
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-02-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -102,8 +102,8 @@ files:
|
|
102
102
|
- spec/helpers/pb2/human.pb.rb
|
103
103
|
- spec/helpers/pb2/unicorn.pb.rb
|
104
104
|
- spec/helpers/pb2/unicorn_event.pb.rb
|
105
|
+
- spec/helpers/pb3/FantasyHorse_pb.rb
|
105
106
|
- spec/helpers/pb3/ProbeResult_pb.rb
|
106
|
-
- spec/helpers/pb3/ReservationEntry_pb.rb
|
107
107
|
- spec/helpers/pb3/dnsmessage_pb.rb
|
108
108
|
- spec/helpers/pb3/events.proto3
|
109
109
|
- spec/helpers/pb3/events_pb.rb
|
@@ -141,7 +141,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
141
141
|
version: '0'
|
142
142
|
requirements: []
|
143
143
|
rubyforge_project:
|
144
|
-
rubygems_version: 2.7.
|
144
|
+
rubygems_version: 2.7.10
|
145
145
|
signing_key:
|
146
146
|
specification_version: 4
|
147
147
|
summary: Reads protobuf messages and converts to Logstash Events
|
@@ -158,8 +158,8 @@ test_files:
|
|
158
158
|
- spec/helpers/pb2/human.pb.rb
|
159
159
|
- spec/helpers/pb2/unicorn.pb.rb
|
160
160
|
- spec/helpers/pb2/unicorn_event.pb.rb
|
161
|
+
- spec/helpers/pb3/FantasyHorse_pb.rb
|
161
162
|
- spec/helpers/pb3/ProbeResult_pb.rb
|
162
|
-
- spec/helpers/pb3/ReservationEntry_pb.rb
|
163
163
|
- spec/helpers/pb3/dnsmessage_pb.rb
|
164
164
|
- spec/helpers/pb3/events.proto3
|
165
165
|
- spec/helpers/pb3/events_pb.rb
|
@@ -1,64 +0,0 @@
|
|
1
|
-
require 'google/protobuf'
|
2
|
-
|
3
|
-
Google::Protobuf::DescriptorPool.generated_pool.build do
|
4
|
-
add_message "bk.sk.pk.reservations.proto.types.v0.ReservationEntry" do
|
5
|
-
optional :id, :string, 1
|
6
|
-
optional :internalAccountId, :string, 2
|
7
|
-
optional :state, :enum, 3, "bk.sk.pk.reservations.proto.types.v0.ReservationState"
|
8
|
-
optional :instructedAmount, :message, 4, "bk.sk.pk.reservations.proto.types.v0.InstructedAmount"
|
9
|
-
optional :lifetime, :message, 5, "bk.sk.pk.reservations.proto.types.v0.Lifetime"
|
10
|
-
optional :requestor, :message, 6, "bk.sk.pk.reservations.proto.types.v0.Requestor"
|
11
|
-
optional :description, :message, 7, "bk.sk.pk.reservations.proto.types.v0.Description"
|
12
|
-
optional :forceMarker, :bool, 8
|
13
|
-
optional :creationTimestamp, :string, 9
|
14
|
-
end
|
15
|
-
add_message "bk.sk.pk.reservations.proto.types.v0.Requestor" do
|
16
|
-
optional :productCode, :string, 1
|
17
|
-
optional :systemCode, :string, 2
|
18
|
-
optional :init, :string, 3
|
19
|
-
end
|
20
|
-
add_message "bk.sk.pk.reservations.proto.types.v0.Lifetime" do
|
21
|
-
optional :startDateTime, :string, 1
|
22
|
-
optional :endDateTime, :string, 2
|
23
|
-
end
|
24
|
-
add_message "bk.sk.pk.reservations.proto.types.v0.InstructedAmount" do
|
25
|
-
optional :amount, :message, 1, "bk.sk.pk.reservations.proto.types.v0.DecimalNumber"
|
26
|
-
optional :currency, :string, 2
|
27
|
-
end
|
28
|
-
add_message "bk.sk.pk.reservations.proto.types.v0.DecimalNumber" do
|
29
|
-
optional :unscaledValue, :int64, 1
|
30
|
-
optional :scale, :int32, 2
|
31
|
-
end
|
32
|
-
add_message "bk.sk.pk.reservations.proto.types.v0.Description" do
|
33
|
-
optional :text1, :string, 1
|
34
|
-
optional :text2, :string, 2
|
35
|
-
end
|
36
|
-
add_enum "bk.sk.pk.reservations.proto.types.v0.ReservationState" do
|
37
|
-
value :RESERVED, 0
|
38
|
-
value :CANCELED, 1
|
39
|
-
value :CONSUMED, 2
|
40
|
-
value :EXPIRED, 3
|
41
|
-
end
|
42
|
-
end
|
43
|
-
|
44
|
-
module Bk
|
45
|
-
module Sk
|
46
|
-
module Pk
|
47
|
-
module Reservations
|
48
|
-
module Proto
|
49
|
-
module Types
|
50
|
-
module V0
|
51
|
-
ReservationEntry = Google::Protobuf::DescriptorPool.generated_pool.lookup("bk.sk.pk.reservations.proto.types.v0.ReservationEntry").msgclass
|
52
|
-
Requestor = Google::Protobuf::DescriptorPool.generated_pool.lookup("bk.sk.pk.reservations.proto.types.v0.Requestor").msgclass
|
53
|
-
Lifetime = Google::Protobuf::DescriptorPool.generated_pool.lookup("bk.sk.pk.reservations.proto.types.v0.Lifetime").msgclass
|
54
|
-
InstructedAmount = Google::Protobuf::DescriptorPool.generated_pool.lookup("bk.sk.pk.reservations.proto.types.v0.InstructedAmount").msgclass
|
55
|
-
DecimalNumber = Google::Protobuf::DescriptorPool.generated_pool.lookup("bk.sk.pk.reservations.proto.types.v0.DecimalNumber").msgclass
|
56
|
-
Description = Google::Protobuf::DescriptorPool.generated_pool.lookup("bk.sk.pk.reservations.proto.types.v0.Description").msgclass
|
57
|
-
ReservationState = Google::Protobuf::DescriptorPool.generated_pool.lookup("bk.sk.pk.reservations.proto.types.v0.ReservationState").enummodule
|
58
|
-
end
|
59
|
-
end
|
60
|
-
end
|
61
|
-
end
|
62
|
-
end
|
63
|
-
end
|
64
|
-
end
|