logstash-codec-protobuf 1.0.3 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/LICENSE +1 -1
- data/README.md +1 -3
- data/lib/logstash/codecs/protobuf.rb +220 -63
- data/lib/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar +0 -0
- data/lib/org/apache/kafka/kafka-clients/0.11.0.0/kafka-clients-0.11.0.0.jar +0 -0
- data/lib/org/apache/logging/log4j/log4j-api/2.8.2/log4j-api-2.8.2.jar +0 -0
- data/lib/org/apache/logging/log4j/log4j-slf4j-impl/2.8.2/log4j-slf4j-impl-2.8.2.jar +0 -0
- data/lib/org/slf4j/slf4j-api/1.7.24/slf4j-api-1.7.24.jar +0 -0
- data/lib/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar +0 -0
- data/lib/org/xerial/snappy/snappy-java/1.1.2.6/snappy-java-1.1.2.6.jar +0 -0
- data/logstash-codec-protobuf.gemspec +4 -3
- data/spec/codecs/protobuf3_spec.rb +147 -0
- data/spec/codecs/protobuf_spec.rb +18 -14
- data/spec/helpers/{ColourTestcase.pb.rb → pb2/ColourTestcase.pb.rb} +0 -0
- data/spec/helpers/{ColourTestcase.proto → pb2/ColourTestcase.proto} +0 -0
- data/spec/helpers/{human.pb.rb → pb2/human.pb.rb} +0 -0
- data/spec/helpers/{unicorn.pb.rb → pb2/unicorn.pb.rb} +0 -0
- data/spec/helpers/{unicorn_event.pb.rb → pb2/unicorn_event.pb.rb} +0 -0
- data/spec/helpers/pb3/unicorn.proto3 +31 -0
- data/spec/helpers/pb3/unicorn_pb.rb +31 -0
- data/vendor/jar-dependencies/runtime-jars/kafka-clients-0.11.0.0.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/log4j-api-2.8.2.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.8.2.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lz4-1.3.0.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.24.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.25.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/snappy-java-1.1.2.6.jar +0 -0
- metadata +58 -24
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a623045ad79469eeefd501735a5fc3284c19eef3
|
4
|
+
data.tar.gz: 1a4b69f9ae131f5abe2b55270073c975edcbeab3
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0e152a7b9675dbd1c430da257a340364944f71b430bd093913705f376971650cc46933395a492f38732dc3665472ca56ebdc3e995d0c5023236b67466a7dc835
|
7
|
+
data.tar.gz: a55114ffbc512e1ee31748093f864c1683e33536f590ded503cbae5ca2ec4f61754e7d518efc722593c9af4040270438f40fa05d3fa03ae27e334d1a61ff520c
|
data/CHANGELOG.md
CHANGED
data/LICENSE
CHANGED
@@ -187,7 +187,7 @@
|
|
187
187
|
same "printed page" as the copyright notice for easier
|
188
188
|
identification within third-party archives.
|
189
189
|
|
190
|
-
Copyright (c) 2012
|
190
|
+
Copyright (c) 2012-2018 Elasticsearch http://www.elastic.co
|
191
191
|
|
192
192
|
Licensed under the Apache License, Version 2.0 (the "License");
|
193
193
|
you may not use this file except in compliance with the License.
|
data/README.md
CHANGED
@@ -5,9 +5,7 @@ This is a codec plugin for [Logstash](https://github.com/elastic/logstash) to pa
|
|
5
5
|
# Prerequisites and Installation
|
6
6
|
|
7
7
|
* prepare your ruby versions of the protobuf definitions, for example using the ruby-protoc compiler from https://github.com/codekitchen/ruby-protocol-buffers
|
8
|
-
*
|
9
|
-
* Install the plugin. From within your logstash directory, do
|
10
|
-
bin/plugin install /path/to/logstash-codec-protobuf-$VERSION.gem
|
8
|
+
* install the codec: `bin/logstash-plugin install logstash-codec-protobuf`
|
11
9
|
* use the codec in your logstash config file. See details below.
|
12
10
|
|
13
11
|
## Configuration
|
@@ -1,7 +1,8 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require 'logstash/codecs/base'
|
3
3
|
require 'logstash/util/charset'
|
4
|
-
require '
|
4
|
+
require 'google/protobuf' # for protobuf3
|
5
|
+
require 'protocol_buffers' # https://github.com/codekitchen/ruby-protocol-buffers, for protobuf2
|
5
6
|
|
6
7
|
# This codec converts protobuf encoded messages into logstash events and vice versa.
|
7
8
|
#
|
@@ -65,102 +66,255 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
65
66
|
#
|
66
67
|
config :include_path, :validate => :array, :required => true
|
67
68
|
|
69
|
+
# Protocol buffer version switch. Set to false (default) for version 2. Please note that the behaviour for enums varies between the versions.
|
70
|
+
# For protobuf 2 you will get integer representations for enums, for protobuf 3 you'll get string representations due to a different converter library.
|
71
|
+
# Recommendation: use the translate plugin to restore previous behaviour when upgrading.
|
72
|
+
config :protobuf_version_3, :validate => :boolean, :required => true, :default=>false
|
73
|
+
|
68
74
|
|
69
75
|
def register
|
70
|
-
@
|
71
|
-
|
72
|
-
|
73
|
-
@
|
76
|
+
@metainfo_messageclasses = {}
|
77
|
+
@metainfo_enumclasses = {}
|
78
|
+
include_path.each { |path| load_protobuf_definition(path) }
|
79
|
+
if @protobuf_version_3
|
80
|
+
@pb_builder = Google::Protobuf::DescriptorPool.generated_pool.lookup(class_name).msgclass
|
81
|
+
else
|
82
|
+
@pb_builder = pb2_create_instance(class_name)
|
83
|
+
end
|
74
84
|
end
|
75
85
|
|
76
86
|
|
77
87
|
def decode(data)
|
78
88
|
begin
|
79
|
-
|
80
|
-
|
89
|
+
if @protobuf_version_3
|
90
|
+
decoded = @pb_builder.decode(data.to_s)
|
91
|
+
h = pb3_deep_to_hash(decoded)
|
92
|
+
else
|
93
|
+
decoded = @pb_builder.parse(data.to_s)
|
94
|
+
h = decoded.to_hash
|
95
|
+
end
|
96
|
+
yield LogStash::Event.new(h) if block_given?
|
81
97
|
rescue => e
|
82
98
|
@logger.warn("Couldn't decode protobuf: #{e.inspect}.")
|
83
|
-
|
99
|
+
raise e
|
84
100
|
end
|
85
101
|
end # def decode
|
86
102
|
|
87
103
|
|
88
104
|
def encode(event)
|
89
|
-
|
90
|
-
|
105
|
+
if @protobuf_version_3
|
106
|
+
protobytes = pb3_encode_wrapper(event)
|
107
|
+
else
|
108
|
+
protobytes = pb2_encode_wrapper(event)
|
109
|
+
end
|
110
|
+
@on_event.call(event, protobytes)
|
91
111
|
end # def encode
|
92
112
|
|
93
113
|
|
94
114
|
private
|
95
|
-
def
|
115
|
+
def pb3_deep_to_hash(input)
|
116
|
+
if input.class.ancestors.include? Google::Protobuf::MessageExts # it's a protobuf class
|
117
|
+
result = Hash.new
|
118
|
+
input.to_hash.each {|key, value|
|
119
|
+
result[key] = pb3_deep_to_hash(value) # the key is required for the class lookup of enums.
|
120
|
+
}
|
121
|
+
elsif input.kind_of?(Array)
|
122
|
+
result = []
|
123
|
+
input.each {|value|
|
124
|
+
result << pb3_deep_to_hash(value)
|
125
|
+
}
|
126
|
+
elsif input.kind_of?(::Hash)
|
127
|
+
result = {}
|
128
|
+
input.each {|key, value|
|
129
|
+
result[key] = pb3_deep_to_hash(value)
|
130
|
+
}
|
131
|
+
elsif input.instance_of? Symbol # is an Enum
|
132
|
+
result = input.to_s.sub(':','')
|
133
|
+
else
|
134
|
+
result = input
|
135
|
+
end
|
136
|
+
result
|
137
|
+
end
|
138
|
+
|
139
|
+
def pb3_encode_wrapper(event)
|
96
140
|
begin
|
97
|
-
data =
|
98
|
-
|
99
|
-
|
100
|
-
rescue
|
101
|
-
@logger.debug("error 2
|
141
|
+
data = pb3_encode(event.to_hash, @class_name)
|
142
|
+
pb_obj = @pb_builder.new(data)
|
143
|
+
@pb_builder.encode(pb_obj)
|
144
|
+
rescue ArgumentError => e
|
145
|
+
@logger.debug("Encoding error 2. Probably mismatching protobuf definition. Required fields in the protobuf definition are: " + event.to_hash.keys.join(", ") + " and the timestamp field name must not include a @. ")
|
146
|
+
raise e
|
102
147
|
rescue => e
|
103
148
|
@logger.debug("Couldn't generate protobuf: ${e}")
|
149
|
+
raise e
|
104
150
|
end
|
105
151
|
end
|
106
152
|
|
107
153
|
|
108
|
-
def
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
154
|
+
def pb3_encode(datahash, class_name)
|
155
|
+
next unless datahash.is_a?(::Hash)
|
156
|
+
|
157
|
+
# Preparation: the data cannot be encoded until certain criteria are met:
|
158
|
+
# 1) remove @ signs from keys.
|
159
|
+
# 2) convert timestamps and other objects to strings
|
160
|
+
datahash = datahash.inject({}){|x,(k,v)| x[k.gsub(/@/,'').to_sym] = (should_convert_to_string?(v) ? v.to_s : v); x}
|
161
|
+
|
162
|
+
# Check if any of the fields in this hash are protobuf classes and if so, create a builder for them.
|
163
|
+
meta = @metainfo_messageclasses[class_name]
|
164
|
+
if meta
|
165
|
+
meta.map do | (field_name,class_name) |
|
166
|
+
key = field_name.to_sym
|
167
|
+
if datahash.include?(key)
|
168
|
+
original_value = datahash[key]
|
169
|
+
datahash[key] =
|
170
|
+
if original_value.is_a?(::Array)
|
171
|
+
# make this field an array/list of protobuf objects
|
172
|
+
# value is a list of hashed complex objects, each of which needs to be protobuffed and
|
173
|
+
# put back into the list.
|
174
|
+
original_value.map { |x| pb3_encode(x, class_name) }
|
175
|
+
original_value
|
176
|
+
else
|
177
|
+
r = pb3_encode(original_value, class_name)
|
178
|
+
builder = Google::Protobuf::DescriptorPool.generated_pool.lookup(class_name).msgclass
|
179
|
+
builder.new(r)
|
180
|
+
end # if is array
|
181
|
+
end # if datahash_include
|
182
|
+
end # do
|
183
|
+
end # if meta
|
184
|
+
# Check if any of the fields in this hash are enum classes and if so, create a builder for them.
|
185
|
+
meta = @metainfo_enumclasses[class_name]
|
186
|
+
if meta
|
187
|
+
meta.map do | (field_name,class_name) |
|
188
|
+
key = field_name.to_sym
|
189
|
+
if datahash.include?(key)
|
190
|
+
original_value = datahash[key]
|
191
|
+
datahash[key] =
|
116
192
|
if original_value.is_a?(::Array)
|
117
|
-
|
118
|
-
# value is a list of hashed complex objects, each of which needs to be protobuffed and
|
119
|
-
# put back into the list.
|
120
|
-
original_value.map { |x| _encode(x, typeinfo) }
|
193
|
+
original_value.map { |x| pb3_encode(x, class_name) }
|
121
194
|
original_value
|
122
|
-
else
|
123
|
-
|
124
|
-
|
195
|
+
else
|
196
|
+
if original_value.is_a?(Fixnum)
|
197
|
+
original_value # integers will be automatically converted into enum
|
198
|
+
else
|
199
|
+
# feature request: support for providing integers as strings or symbols.
|
200
|
+
# not fully tested yet:
|
201
|
+
# begin
|
202
|
+
# enum_lookup_name = "#{class_name}::#{original_value}"
|
203
|
+
# enum_lookup_name.split('::').inject(Object) do |mod, class_name|
|
204
|
+
# mod.const_get(class_name)
|
205
|
+
# end # do
|
206
|
+
# rescue => e
|
207
|
+
# @logger.debug("Encoding error 3: could not translate #{original_value} into enum. ${e}")
|
208
|
+
# raise e
|
209
|
+
# end
|
210
|
+
end # if is a fixnum
|
125
211
|
end # if is array
|
126
|
-
|
127
|
-
|
128
|
-
|
212
|
+
end # if datahash_include
|
213
|
+
end # do
|
214
|
+
end # if meta
|
215
|
+
datahash
|
216
|
+
end
|
217
|
+
|
218
|
+
def pb2_encode_wrapper(event)
|
219
|
+
begin
|
220
|
+
data = pb2_encode(event.to_hash, @class_name)
|
221
|
+
msg = @pb_builder.new(data)
|
222
|
+
msg.serialize_to_string
|
223
|
+
rescue NoMethodError => e
|
224
|
+
@logger.debug("Encoding error 2. Probably mismatching protobuf definition. Required fields in the protobuf definition are: " + event.to_hash.keys.join(", ") + " and the timestamp field name must not include a @. ")
|
225
|
+
raise e
|
226
|
+
rescue => e
|
227
|
+
@logger.debug("Encoding error 1: ${e}")
|
228
|
+
raise e
|
229
|
+
end
|
129
230
|
end
|
130
231
|
|
131
232
|
|
132
|
-
def
|
133
|
-
|
134
|
-
|
233
|
+
def pb2_encode(datahash, class_name)
|
234
|
+
next unless datahash.is_a?(::Hash)
|
235
|
+
|
236
|
+
# Preparation: the data cannot be encoded until certain criteria are met:
|
237
|
+
# 1) remove @ signs from keys.
|
135
238
|
# 2) convert timestamps and other objects to strings
|
136
|
-
|
137
|
-
|
239
|
+
datahash = ::Hash[datahash.map{|(k,v)| [k.to_s.dup.gsub(/@/,''), (should_convert_to_string?(v) ? v.to_s : v)] }]
|
240
|
+
|
241
|
+
# Check if any of the fields in this hash are protobuf classes and if so, create a builder for them.
|
242
|
+
meta = @metainfo_messageclasses[class_name]
|
243
|
+
if meta
|
244
|
+
meta.map do | (k,class_name) |
|
245
|
+
if datahash.include?(k)
|
246
|
+
original_value = datahash[k]
|
247
|
+
p
|
248
|
+
datahash[k] =
|
249
|
+
if original_value.is_a?(::Array)
|
250
|
+
# make this field an array/list of protobuf objects
|
251
|
+
# value is a list of hashed complex objects, each of which needs to be protobuffed and
|
252
|
+
# put back into the list.
|
253
|
+
original_value.map { |x| pb2_encode(x, class_name) }
|
254
|
+
original_value
|
255
|
+
else
|
256
|
+
proto_obj = pb2_create_instance(class_name)
|
257
|
+
proto_obj.new(pb2_encode(original_value, class_name))
|
258
|
+
end # if is array
|
259
|
+
end # if datahash_include
|
260
|
+
end # do
|
261
|
+
end # if meta
|
262
|
+
|
263
|
+
datahash
|
138
264
|
end
|
139
265
|
|
140
266
|
|
141
|
-
def
|
267
|
+
def should_convert_to_string?(v)
|
142
268
|
!(v.is_a?(Fixnum) || v.is_a?(::Hash) || v.is_a?(::Array) || [true, false].include?(v))
|
143
269
|
end
|
144
270
|
|
145
|
-
|
146
|
-
def remove_atchar(key) # necessary for @timestamp fields and the likes. Protobuf definition doesn't handle @ in field names well.
|
147
|
-
key.dup.gsub(/@/,'')
|
148
|
-
end
|
149
|
-
|
150
271
|
|
151
|
-
def
|
272
|
+
def pb2_create_instance(name)
|
152
273
|
begin
|
153
274
|
@logger.debug("Creating instance of " + name)
|
154
275
|
name.split('::').inject(Object) { |n,c| n.const_get c }
|
155
276
|
end
|
156
277
|
end
|
157
278
|
|
158
|
-
|
159
|
-
|
279
|
+
|
280
|
+
def pb3_metadata_analyis(filename)
|
281
|
+
regex_class_name = /\s*add_message "(?<name>.+?)" do\s+/ # TODO optimize both regexes for speed (negative lookahead)
|
282
|
+
regex_pbdefs = /\s*(optional|repeated)(\s*):(?<name>.+),(\s*):(?<type>\w+),(\s*)(?<position>\d+)(, \"(?<enum_class>.*?)\")?/
|
283
|
+
# Example
|
284
|
+
# optional :father, :message, 10, "Unicorn"
|
285
|
+
# repeated :favourite_numbers, :int32, 5
|
286
|
+
begin
|
287
|
+
class_name = ""
|
288
|
+
type = ""
|
289
|
+
field_name = ""
|
290
|
+
File.readlines(filename).each do |line|
|
291
|
+
if ! (line =~ regex_class_name).nil?
|
292
|
+
class_name = $1
|
293
|
+
@metainfo_messageclasses[class_name] = {}
|
294
|
+
@metainfo_enumclasses[class_name] = {}
|
295
|
+
end
|
296
|
+
if ! (line =~ regex_pbdefs).nil?
|
297
|
+
field_name = $1
|
298
|
+
type = $2
|
299
|
+
field_class_name = $4
|
300
|
+
if type == "message"
|
301
|
+
@metainfo_messageclasses[class_name][field_name] = field_class_name
|
302
|
+
elsif type == "enum"
|
303
|
+
@metainfo_enumclasses[class_name][field_name] = field_class_name
|
304
|
+
end
|
305
|
+
end
|
306
|
+
end
|
307
|
+
rescue Exception => e
|
308
|
+
@logger.warn("Error 3: unable to read pb definition from file " + filename+ ". Reason: #{e.inspect}. Last settings were: class #{class_name} field #{field_name} type #{type}. Backtrace: " + e.backtrace.inspect.to_s)
|
309
|
+
raise e
|
310
|
+
end
|
311
|
+
if class_name.nil?
|
312
|
+
@logger.warn("Error 4: class name not found in file " + filename)
|
313
|
+
raise ArgumentError, "Invalid protobuf file: " + filename
|
314
|
+
end
|
160
315
|
end
|
161
316
|
|
162
|
-
def
|
163
|
-
require filename
|
317
|
+
def pb2_metadata_analyis(filename)
|
164
318
|
regex_class_name = /\s*class\s*(?<name>.+?)\s+/
|
165
319
|
regex_module_name = /\s*module\s*(?<name>.+?)\s+/
|
166
320
|
regex_pbdefs = /\s*(optional|repeated)(\s*):(?<type>.+),(\s*):(?<name>\w+),(\s*)(?<position>\d+)/
|
@@ -179,37 +333,40 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
|
|
179
333
|
end
|
180
334
|
if ! (line =~ regex_class_name).nil? && !classname_found # because it might be declared twice in the file
|
181
335
|
class_name << $1
|
182
|
-
@
|
336
|
+
@metainfo_messageclasses[class_name] = {}
|
183
337
|
classname_found = true
|
184
338
|
end
|
185
339
|
if ! (line =~ regex_pbdefs).nil?
|
186
340
|
type = $1
|
187
341
|
field_name = $2
|
188
342
|
if type =~ /::/
|
189
|
-
@
|
343
|
+
@metainfo_messageclasses[class_name][field_name] = type.gsub!(/^:/,"")
|
190
344
|
|
191
345
|
end
|
192
346
|
end
|
193
347
|
end
|
194
348
|
rescue Exception => e
|
195
|
-
@logger.warn("
|
349
|
+
@logger.warn("Error 3: unable to read pb definition from file " + filename+ ". Reason: #{e.inspect}. Last settings were: class #{class_name} field #{field_name} type #{type}. Backtrace: " + e.backtrace.inspect.to_s)
|
350
|
+
raise e
|
196
351
|
end
|
197
352
|
if class_name.nil?
|
198
|
-
@logger.warn("
|
353
|
+
@logger.warn("Error 4: class name not found in file " + filename)
|
354
|
+
raise ArgumentError, "Invalid protobuf file: " + filename
|
199
355
|
end
|
200
356
|
end
|
201
357
|
|
202
|
-
def
|
203
|
-
f = dir_or_file.end_with? ('.rb')
|
358
|
+
def load_protobuf_definition(filename)
|
204
359
|
begin
|
205
|
-
if
|
206
|
-
@logger.debug("Including protobuf file: " +
|
207
|
-
|
360
|
+
if filename.end_with? ('.rb')
|
361
|
+
@logger.debug("Including protobuf file: " + filename)
|
362
|
+
require filename
|
363
|
+
if @protobuf_version_3
|
364
|
+
pb3_metadata_analyis(filename)
|
365
|
+
else
|
366
|
+
pb2_metadata_analyis(filename)
|
367
|
+
end
|
208
368
|
else
|
209
|
-
|
210
|
-
@logger.debug("Including protobuf path: " + dir_or_file + "/" + file)
|
211
|
-
require_with_metadata_analysis file
|
212
|
-
}
|
369
|
+
@logger.warn("Not a ruby file: " + filename)
|
213
370
|
end
|
214
371
|
end
|
215
372
|
end
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
@@ -1,9 +1,9 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-codec-protobuf'
|
4
|
-
s.version = '1.0.
|
4
|
+
s.version = '1.0.5'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
|
-
s.summary = "
|
6
|
+
s.summary = "Reads protobuf messages and converts to Logstash Events"
|
7
7
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
8
8
|
s.authors = ["Inga Feick"]
|
9
9
|
s.email = 'inga.feick@trivago.com'
|
@@ -20,7 +20,8 @@ Gem::Specification.new do |s|
|
|
20
20
|
|
21
21
|
# Gem dependencies
|
22
22
|
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
23
|
-
s.add_runtime_dependency '
|
23
|
+
s.add_runtime_dependency 'google-protobuf', '3.1'
|
24
|
+
s.add_runtime_dependency 'ruby-protocol-buffers' # for protobuf 2
|
24
25
|
s.add_development_dependency 'logstash-devutils'
|
25
26
|
end
|
26
27
|
|
@@ -0,0 +1,147 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/codecs/protobuf"
|
4
|
+
require "logstash/event"
|
5
|
+
require "insist"
|
6
|
+
|
7
|
+
|
8
|
+
require 'google/protobuf' # for protobuf3
|
9
|
+
|
10
|
+
|
11
|
+
describe LogStash::Codecs::Protobuf do
|
12
|
+
|
13
|
+
|
14
|
+
context "#decodePB3" do
|
15
|
+
|
16
|
+
|
17
|
+
#### Test case 1: Decode simple protobuf ####################################################################################################################
|
18
|
+
let(:plugin_unicorn) { LogStash::Codecs::Protobuf.new("class_name" => "Unicorn", "include_path" => ['spec/helpers/pb3/unicorn_pb.rb'], "protobuf_version_3" => true) }
|
19
|
+
before do
|
20
|
+
plugin_unicorn.register
|
21
|
+
end
|
22
|
+
|
23
|
+
it "should return an event from protobuf encoded data" do
|
24
|
+
|
25
|
+
unicorn_class = Google::Protobuf::DescriptorPool.generated_pool.lookup("Unicorn").msgclass
|
26
|
+
data = {:name => 'Pinkie', :age => 18, :is_pegasus => false, :favourite_numbers => [4711,23], :fur_colour => Colour::PINK,
|
27
|
+
:favourite_colours => [Colour::GREEN, Colour::BLUE]
|
28
|
+
}
|
29
|
+
|
30
|
+
unicorn_object = unicorn_class.new(data)
|
31
|
+
bin = unicorn_class.encode(unicorn_object)
|
32
|
+
plugin_unicorn.decode(bin) do |event|
|
33
|
+
expect(event.get("name") ).to eq(data[:name] )
|
34
|
+
expect(event.get("age") ).to eq(data[:age])
|
35
|
+
expect(event.get("fur_colour") ).to eq("PINK")
|
36
|
+
expect(event.get("favourite_numbers") ).to eq(data[:favourite_numbers])
|
37
|
+
expect(event.get("favourite_colours") ).to eq(["GREEN","BLUE"])
|
38
|
+
expect(event.get("is_pegasus") ).to eq(data[:is_pegasus] )
|
39
|
+
end
|
40
|
+
end # it
|
41
|
+
|
42
|
+
|
43
|
+
|
44
|
+
|
45
|
+
|
46
|
+
#### Test case 2: decode nested protobuf ####################################################################################################################
|
47
|
+
let(:plugin_unicorn) { LogStash::Codecs::Protobuf.new("class_name" => "Unicorn", "include_path" => ['spec/helpers/pb3/unicorn_pb.rb'], "protobuf_version_3" => true) }
|
48
|
+
before do
|
49
|
+
plugin_unicorn.register
|
50
|
+
end
|
51
|
+
|
52
|
+
it "should return an event from protobuf encoded data with nested classes" do
|
53
|
+
|
54
|
+
|
55
|
+
unicorn_class = Google::Protobuf::DescriptorPool.generated_pool.lookup("Unicorn").msgclass
|
56
|
+
|
57
|
+
father = unicorn_class.new({:name=> "Sparkle", :age => 50, :fur_colour => 3 })
|
58
|
+
data = {:name => 'Glitter', :fur_colour => Colour::GLITTER, :father => father}
|
59
|
+
|
60
|
+
unicorn_object = unicorn_class.new(data)
|
61
|
+
bin = unicorn_class.encode(unicorn_object)
|
62
|
+
plugin_unicorn.decode(bin) do |event|
|
63
|
+
expect(event.get("name") ).to eq(data[:name] )
|
64
|
+
expect(event.get("fur_colour") ).to eq("GLITTER" )
|
65
|
+
expect(event.get("father")["name"] ).to eq(data[:father][:name] )
|
66
|
+
expect(event.get("father")["age"] ).to eq(data[:father][:age] )
|
67
|
+
expect(event.get("father")["fur_colour"] ).to eq("SILVER")
|
68
|
+
|
69
|
+
end
|
70
|
+
end # it
|
71
|
+
|
72
|
+
|
73
|
+
end # context #decodePB3
|
74
|
+
|
75
|
+
|
76
|
+
context "#encodePB3-a" do
|
77
|
+
|
78
|
+
#### Test case 3: encode simple protobuf ####################################################################################################################
|
79
|
+
|
80
|
+
definitions_file = 'spec/helpers/pb3/unicorn_pb.rb'
|
81
|
+
require definitions_file
|
82
|
+
|
83
|
+
subject do
|
84
|
+
next LogStash::Codecs::Protobuf.new("class_name" => "Unicorn", "include_path" => [definitions_file], "protobuf_version_3" => true)
|
85
|
+
end
|
86
|
+
|
87
|
+
event3 = LogStash::Event.new("name" => "Pinkie", "age" => 18, "is_pegasus" => false, "favourite_numbers" => [1,2,3], "fur_colour" => Colour::PINK, "favourite_colours" => [1,5] )
|
88
|
+
|
89
|
+
it "should return protobuf encoded data for testcase 3" do
|
90
|
+
|
91
|
+
subject.on_event do |event, data|
|
92
|
+
insist { data.is_a? String }
|
93
|
+
|
94
|
+
pb_builder = Google::Protobuf::DescriptorPool.generated_pool.lookup("Unicorn").msgclass
|
95
|
+
decoded_data = pb_builder.decode(data)
|
96
|
+
expect(decoded_data.name ).to eq(event.get("name") )
|
97
|
+
expect(decoded_data.age ).to eq(event.get("age") )
|
98
|
+
expect(decoded_data.is_pegasus ).to eq(event.get("is_pegasus") )
|
99
|
+
expect(decoded_data.fur_colour ).to eq(:PINK)
|
100
|
+
expect(decoded_data.favourite_numbers ).to eq(event.get("favourite_numbers") )
|
101
|
+
expect(decoded_data.favourite_colours ).to eq([:BLUE,:WHITE] )
|
102
|
+
end # subject.on_event
|
103
|
+
subject.encode(event3)
|
104
|
+
end # it
|
105
|
+
|
106
|
+
end # context
|
107
|
+
|
108
|
+
context "#encodePB3-b" do
|
109
|
+
|
110
|
+
#### Test case 4: encode nested protobuf ####################################################################################################################
|
111
|
+
|
112
|
+
definitions_file = 'spec/helpers/pb3/unicorn_pb.rb'
|
113
|
+
require definitions_file
|
114
|
+
|
115
|
+
subject do
|
116
|
+
next LogStash::Codecs::Protobuf.new("class_name" => "Unicorn", "include_path" => [definitions_file], "protobuf_version_3" => true)
|
117
|
+
end
|
118
|
+
|
119
|
+
event4 = LogStash::Event.new("name" => "Horst", "age" => 23, "is_pegasus" => true, "mother" => \
|
120
|
+
{"name" => "Mom", "age" => 47}, "father" => {"name"=> "Daddy", "age"=> 50, "fur_colour" => 3 } # 3 == SILVER
|
121
|
+
)
|
122
|
+
|
123
|
+
it "should return protobuf encoded data for testcase 4" do
|
124
|
+
|
125
|
+
subject.on_event do |event, data|
|
126
|
+
insist { data.is_a? String }
|
127
|
+
|
128
|
+
pb_builder = Google::Protobuf::DescriptorPool.generated_pool.lookup("Unicorn").msgclass
|
129
|
+
decoded_data = pb_builder.decode(data)
|
130
|
+
|
131
|
+
expect(decoded_data.name ).to eq(event.get("name") )
|
132
|
+
expect(decoded_data.age ).to eq(event.get("age") )
|
133
|
+
expect(decoded_data.is_pegasus ).to eq(event.get("is_pegasus") )
|
134
|
+
expect(decoded_data.mother.name ).to eq(event.get("mother")["name"] )
|
135
|
+
expect(decoded_data.mother.age ).to eq(event.get("mother")["age"] )
|
136
|
+
expect(decoded_data.father.name ).to eq(event.get("father")["name"] )
|
137
|
+
expect(decoded_data.father.age ).to eq(event.get("father")["age"] )
|
138
|
+
expect(decoded_data.father.fur_colour ).to eq(:SILVER)
|
139
|
+
|
140
|
+
|
141
|
+
end # subject4.on_event
|
142
|
+
subject.encode(event4)
|
143
|
+
end # it
|
144
|
+
|
145
|
+
end # context #encodePB3
|
146
|
+
|
147
|
+
end # describe
|
@@ -2,17 +2,21 @@
|
|
2
2
|
require "logstash/devutils/rspec/spec_helper"
|
3
3
|
require "logstash/codecs/protobuf"
|
4
4
|
require "logstash/event"
|
5
|
-
require 'protocol_buffers' # https://github.com/codekitchen/ruby-protocol-buffers
|
6
5
|
require "insist"
|
7
6
|
|
7
|
+
|
8
|
+
require 'protocol_buffers' # https://github.com/codekitchen/ruby-protocol-buffers, for protobuf2
|
9
|
+
|
10
|
+
|
8
11
|
describe LogStash::Codecs::Protobuf do
|
9
12
|
|
10
13
|
|
11
|
-
|
14
|
+
|
15
|
+
context "#decodePB2" do
|
12
16
|
|
13
17
|
|
14
18
|
#### Test case 1: Decode simple protobuf bytes for unicorn ####################################################################################################################
|
15
|
-
let(:plugin_unicorn) { LogStash::Codecs::Protobuf.new("class_name" => "Animal::Unicorn", "include_path" => ['spec/helpers/unicorn.pb.rb']) }
|
19
|
+
let(:plugin_unicorn) { LogStash::Codecs::Protobuf.new("class_name" => "Animal::Unicorn", "include_path" => ['spec/helpers/pb2/unicorn.pb.rb']) }
|
16
20
|
before do
|
17
21
|
plugin_unicorn.register
|
18
22
|
end
|
@@ -37,7 +41,7 @@ describe LogStash::Codecs::Protobuf do
|
|
37
41
|
|
38
42
|
|
39
43
|
|
40
|
-
let(:plugin_human) { LogStash::Codecs::Protobuf.new("class_name" => "Animal::Human", "include_path" => ['spec/helpers/human.pb.rb']) }
|
44
|
+
let(:plugin_human) { LogStash::Codecs::Protobuf.new("class_name" => "Animal::Human", "include_path" => ['spec/helpers/pb2/human.pb.rb']) }
|
41
45
|
before do
|
42
46
|
plugin_human.register
|
43
47
|
end
|
@@ -79,7 +83,7 @@ describe LogStash::Codecs::Protobuf do
|
|
79
83
|
|
80
84
|
|
81
85
|
|
82
|
-
let(:plugin_col) { LogStash::Codecs::Protobuf.new("class_name" => "ColourProtoTest", "include_path" => ['spec/helpers/ColourTestcase.pb.rb']) }
|
86
|
+
let(:plugin_col) { LogStash::Codecs::Protobuf.new("class_name" => "ColourProtoTest", "include_path" => ['spec/helpers/pb2/ColourTestcase.pb.rb']) }
|
83
87
|
before do
|
84
88
|
plugin_col.register
|
85
89
|
end
|
@@ -98,7 +102,7 @@ describe LogStash::Codecs::Protobuf do
|
|
98
102
|
end # it
|
99
103
|
|
100
104
|
|
101
|
-
end # context
|
105
|
+
end # context decodePB2
|
102
106
|
|
103
107
|
|
104
108
|
|
@@ -106,9 +110,9 @@ describe LogStash::Codecs::Protobuf do
|
|
106
110
|
|
107
111
|
#### Test case 4: Encode simple protobuf bytes for unicorn ####################################################################################################################
|
108
112
|
|
109
|
-
context "#
|
113
|
+
context "#encodePB2-a" do
|
110
114
|
subject do
|
111
|
-
next LogStash::Codecs::Protobuf.new("class_name" => "Animal::UnicornEvent", "include_path" => ['spec/helpers/unicorn_event.pb.rb'])
|
115
|
+
next LogStash::Codecs::Protobuf.new("class_name" => "Animal::UnicornEvent", "include_path" => ['spec/helpers/pb2/unicorn_event.pb.rb'])
|
112
116
|
end
|
113
117
|
|
114
118
|
event = LogStash::Event.new("colour" => "pink", "horn_length" => 12, "last_seen" => 1410081999, "has_wings" => true)
|
@@ -134,9 +138,9 @@ describe LogStash::Codecs::Protobuf do
|
|
134
138
|
#### Test case 5: encode complex protobuf bytes for human #####################################################################################################################
|
135
139
|
|
136
140
|
|
137
|
-
context "#
|
141
|
+
context "#encodePB2-b" do
|
138
142
|
subject do
|
139
|
-
next LogStash::Codecs::Protobuf.new("class_name" => "Animal::Human", "include_path" => ['spec/helpers/human.pb.rb'])
|
143
|
+
next LogStash::Codecs::Protobuf.new("class_name" => "Animal::Human", "include_path" => ['spec/helpers/pb2/human.pb.rb'])
|
140
144
|
end
|
141
145
|
|
142
146
|
event = LogStash::Event.new("first_name" => "Jimmy", "middle_names" => ["Bob", "James"], "last_name" => "Doe" \
|
@@ -174,12 +178,12 @@ describe LogStash::Codecs::Protobuf do
|
|
174
178
|
|
175
179
|
|
176
180
|
|
177
|
-
context "#
|
181
|
+
context "#encodePB2-c" do
|
178
182
|
subject do
|
179
|
-
next LogStash::Codecs::Protobuf.new("class_name" => "ColourProtoTest", "include_path" => ['spec/helpers/ColourTestcase.pb.rb'])
|
183
|
+
next LogStash::Codecs::Protobuf.new("class_name" => "ColourProtoTest", "include_path" => ['spec/helpers/pb2/ColourTestcase.pb.rb'])
|
180
184
|
end
|
181
185
|
|
182
|
-
require 'spec/helpers/ColourTestcase.pb.rb' # otherwise we cant use the colour enums in the next line
|
186
|
+
require 'spec/helpers/pb2/ColourTestcase.pb.rb' # otherwise we cant use the colour enums in the next line
|
183
187
|
event = LogStash::Event.new("booleantest" => [false, false, true], "least_liked" => ColourProtoTest::Colour::YELLOW, "favourite_colours" => \
|
184
188
|
[ColourProtoTest::Colour::BLACK, ColourProtoTest::Colour::BLUE] )
|
185
189
|
|
@@ -202,4 +206,4 @@ describe LogStash::Codecs::Protobuf do
|
|
202
206
|
|
203
207
|
|
204
208
|
|
205
|
-
end
|
209
|
+
end # describe
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
@@ -0,0 +1,31 @@
|
|
1
|
+
syntax = "proto3";
|
2
|
+
|
3
|
+
// Compile: protoc --ruby_out=. spec/helpers/*proto3
|
4
|
+
|
5
|
+
message Unicorn {
|
6
|
+
|
7
|
+
string name = 1;
|
8
|
+
uint32 age = 2;
|
9
|
+
Colour fur_colour = 3;
|
10
|
+
repeated Colour favourite_colours = 4;
|
11
|
+
repeated int32 favourite_numbers = 5;
|
12
|
+
bool is_pegasus = 6;
|
13
|
+
string timestamp = 7; # Logstash adds this to the event, needed for encoder test
|
14
|
+
string version = 8; # Logstash adds this to the event, needed for encoder test
|
15
|
+
Unicorn mother = 9;
|
16
|
+
Unicorn father = 10;
|
17
|
+
}
|
18
|
+
|
19
|
+
enum Colour {
|
20
|
+
|
21
|
+
BLUE = 1;
|
22
|
+
PINK = 2;
|
23
|
+
SILVER = 3;
|
24
|
+
GLITTER = 4;
|
25
|
+
WHITE = 5;
|
26
|
+
GREEN = 6;
|
27
|
+
}
|
28
|
+
|
29
|
+
// TODO test float datatypes aswell as single fields
|
30
|
+
// TODO test class reference to other file as single field
|
31
|
+
// TODO test a class as list
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
|
+
# source: spec/helpers/unicorn.proto3
|
3
|
+
|
4
|
+
require 'google/protobuf'
|
5
|
+
|
6
|
+
Google::Protobuf::DescriptorPool.generated_pool.build do
|
7
|
+
add_message "Unicorn" do
|
8
|
+
optional :name, :string, 1
|
9
|
+
optional :age, :uint32, 2
|
10
|
+
optional :fur_colour, :enum, 3, "Colour"
|
11
|
+
repeated :favourite_colours, :enum, 4, "Colour"
|
12
|
+
repeated :favourite_numbers, :int32, 5
|
13
|
+
optional :is_pegasus, :bool, 6
|
14
|
+
optional :timestamp, :string, 7
|
15
|
+
optional :version, :string, 8
|
16
|
+
optional :mother, :message, 9, "Unicorn"
|
17
|
+
optional :father, :message, 10, "Unicorn"
|
18
|
+
end
|
19
|
+
add_enum "Colour" do
|
20
|
+
value :UNDEFINED, 0
|
21
|
+
value :BLUE, 1
|
22
|
+
value :PINK, 2
|
23
|
+
value :SILVER, 3
|
24
|
+
value :GLITTER, 4
|
25
|
+
value :WHITE, 5
|
26
|
+
value :GREEN, 6
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
Unicorn = Google::Protobuf::DescriptorPool.generated_pool.lookup("Unicorn").msgclass
|
31
|
+
Colour = Google::Protobuf::DescriptorPool.generated_pool.lookup("Colour").enummodule
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
metadata
CHANGED
@@ -1,22 +1,22 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-codec-protobuf
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.5
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Inga Feick
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2018-02-17 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
15
15
|
requirements:
|
16
|
-
- -
|
16
|
+
- - '>='
|
17
17
|
- !ruby/object:Gem::Version
|
18
18
|
version: '1.60'
|
19
|
-
- -
|
19
|
+
- - <=
|
20
20
|
- !ruby/object:Gem::Version
|
21
21
|
version: '2.99'
|
22
22
|
name: logstash-core-plugin-api
|
@@ -24,16 +24,30 @@ dependencies:
|
|
24
24
|
type: :runtime
|
25
25
|
version_requirements: !ruby/object:Gem::Requirement
|
26
26
|
requirements:
|
27
|
-
- -
|
27
|
+
- - '>='
|
28
28
|
- !ruby/object:Gem::Version
|
29
29
|
version: '1.60'
|
30
|
-
- -
|
30
|
+
- - <=
|
31
31
|
- !ruby/object:Gem::Version
|
32
32
|
version: '2.99'
|
33
33
|
- !ruby/object:Gem::Dependency
|
34
34
|
requirement: !ruby/object:Gem::Requirement
|
35
35
|
requirements:
|
36
|
-
- -
|
36
|
+
- - '='
|
37
|
+
- !ruby/object:Gem::Version
|
38
|
+
version: '3.1'
|
39
|
+
name: google-protobuf
|
40
|
+
prerelease: false
|
41
|
+
type: :runtime
|
42
|
+
version_requirements: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - '='
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '3.1'
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
49
|
+
requirements:
|
50
|
+
- - '>='
|
37
51
|
- !ruby/object:Gem::Version
|
38
52
|
version: '0'
|
39
53
|
name: ruby-protocol-buffers
|
@@ -41,13 +55,13 @@ dependencies:
|
|
41
55
|
type: :runtime
|
42
56
|
version_requirements: !ruby/object:Gem::Requirement
|
43
57
|
requirements:
|
44
|
-
- -
|
58
|
+
- - '>='
|
45
59
|
- !ruby/object:Gem::Version
|
46
60
|
version: '0'
|
47
61
|
- !ruby/object:Gem::Dependency
|
48
62
|
requirement: !ruby/object:Gem::Requirement
|
49
63
|
requirements:
|
50
|
-
- -
|
64
|
+
- - '>='
|
51
65
|
- !ruby/object:Gem::Version
|
52
66
|
version: '0'
|
53
67
|
name: logstash-devutils
|
@@ -55,7 +69,7 @@ dependencies:
|
|
55
69
|
type: :development
|
56
70
|
version_requirements: !ruby/object:Gem::Requirement
|
57
71
|
requirements:
|
58
|
-
- -
|
72
|
+
- - '>='
|
59
73
|
- !ruby/object:Gem::Version
|
60
74
|
version: '0'
|
61
75
|
description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
|
@@ -73,13 +87,30 @@ files:
|
|
73
87
|
- README.md
|
74
88
|
- docs/index.asciidoc
|
75
89
|
- lib/logstash/codecs/protobuf.rb
|
90
|
+
- lib/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar
|
91
|
+
- lib/org/apache/kafka/kafka-clients/0.11.0.0/kafka-clients-0.11.0.0.jar
|
92
|
+
- lib/org/apache/logging/log4j/log4j-api/2.8.2/log4j-api-2.8.2.jar
|
93
|
+
- lib/org/apache/logging/log4j/log4j-slf4j-impl/2.8.2/log4j-slf4j-impl-2.8.2.jar
|
94
|
+
- lib/org/slf4j/slf4j-api/1.7.24/slf4j-api-1.7.24.jar
|
95
|
+
- lib/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar
|
96
|
+
- lib/org/xerial/snappy/snappy-java/1.1.2.6/snappy-java-1.1.2.6.jar
|
76
97
|
- logstash-codec-protobuf.gemspec
|
98
|
+
- spec/codecs/protobuf3_spec.rb
|
77
99
|
- spec/codecs/protobuf_spec.rb
|
78
|
-
- spec/helpers/ColourTestcase.pb.rb
|
79
|
-
- spec/helpers/ColourTestcase.proto
|
80
|
-
- spec/helpers/human.pb.rb
|
81
|
-
- spec/helpers/unicorn.pb.rb
|
82
|
-
- spec/helpers/unicorn_event.pb.rb
|
100
|
+
- spec/helpers/pb2/ColourTestcase.pb.rb
|
101
|
+
- spec/helpers/pb2/ColourTestcase.proto
|
102
|
+
- spec/helpers/pb2/human.pb.rb
|
103
|
+
- spec/helpers/pb2/unicorn.pb.rb
|
104
|
+
- spec/helpers/pb2/unicorn_event.pb.rb
|
105
|
+
- spec/helpers/pb3/unicorn.proto3
|
106
|
+
- spec/helpers/pb3/unicorn_pb.rb
|
107
|
+
- vendor/jar-dependencies/runtime-jars/kafka-clients-0.11.0.0.jar
|
108
|
+
- vendor/jar-dependencies/runtime-jars/log4j-api-2.8.2.jar
|
109
|
+
- vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.8.2.jar
|
110
|
+
- vendor/jar-dependencies/runtime-jars/lz4-1.3.0.jar
|
111
|
+
- vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.24.jar
|
112
|
+
- vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.25.jar
|
113
|
+
- vendor/jar-dependencies/runtime-jars/snappy-java-1.1.2.6.jar
|
83
114
|
homepage:
|
84
115
|
licenses:
|
85
116
|
- Apache License (2.0)
|
@@ -92,24 +123,27 @@ require_paths:
|
|
92
123
|
- lib
|
93
124
|
required_ruby_version: !ruby/object:Gem::Requirement
|
94
125
|
requirements:
|
95
|
-
- -
|
126
|
+
- - '>='
|
96
127
|
- !ruby/object:Gem::Version
|
97
128
|
version: '0'
|
98
129
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
99
130
|
requirements:
|
100
|
-
- -
|
131
|
+
- - '>='
|
101
132
|
- !ruby/object:Gem::Version
|
102
133
|
version: '0'
|
103
134
|
requirements: []
|
104
135
|
rubyforge_project:
|
105
|
-
rubygems_version: 2.4.
|
136
|
+
rubygems_version: 2.4.5
|
106
137
|
signing_key:
|
107
138
|
specification_version: 4
|
108
|
-
summary:
|
139
|
+
summary: Reads protobuf messages and converts to Logstash Events
|
109
140
|
test_files:
|
141
|
+
- spec/codecs/protobuf3_spec.rb
|
110
142
|
- spec/codecs/protobuf_spec.rb
|
111
|
-
- spec/helpers/ColourTestcase.pb.rb
|
112
|
-
- spec/helpers/ColourTestcase.proto
|
113
|
-
- spec/helpers/human.pb.rb
|
114
|
-
- spec/helpers/unicorn.pb.rb
|
115
|
-
- spec/helpers/unicorn_event.pb.rb
|
143
|
+
- spec/helpers/pb2/ColourTestcase.pb.rb
|
144
|
+
- spec/helpers/pb2/ColourTestcase.proto
|
145
|
+
- spec/helpers/pb2/human.pb.rb
|
146
|
+
- spec/helpers/pb2/unicorn.pb.rb
|
147
|
+
- spec/helpers/pb2/unicorn_event.pb.rb
|
148
|
+
- spec/helpers/pb3/unicorn.proto3
|
149
|
+
- spec/helpers/pb3/unicorn_pb.rb
|