logstash-codec-protobuf 0.1.2 → 0.1.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 32ac341961786a035ed34e5e221de5e6944c2226
4
- data.tar.gz: 8bb727065267603b4d0d771e2ab1937e7eb83660
3
+ metadata.gz: 0571d20de470f854fce5c65a479811846d0c3223
4
+ data.tar.gz: 4d0447fadbd0d520ce33b299253712c7beae65d4
5
5
  SHA512:
6
- metadata.gz: ba83ef5a43fa82e25d6cb248fa8076982b43d833df1d2ee62ee69c045bedd2664ff04a5b112d880344ebaf470116a176ab008bbccad9f6aab368c30b513ce5d1
7
- data.tar.gz: 9699d09931ca8bac5d0242c4952b8496467d8c586032decdce6213ae956e43053b08039115a3deee7db7eaa74fd2d8a9700c32a12d794dcbaf338cbb01419c82
6
+ metadata.gz: defc91159982c93264d62fddfdbd4677dac242e2b6a49e310275c5cb7095e2b2be25f88cfd5e40f27381bce100711bab21ec9b96a76da6f5a7c838d481a438ee
7
+ data.tar.gz: 0923ffd9b10199f952ca12dad21e040a0b049b10129dd9332a3078a7269f73e9d8e7b163aaa7c8d205802ffe0c91aee861a47ea411c8d418e3cb8732cbf3f356
data/CHANGELOG.md CHANGED
@@ -0,0 +1,5 @@
1
+ ## 0.1.3
2
+ - Changes for compatibility with logstash 5: remove mechanism to guarantee string based hash keys.
3
+
4
+ ## 0.1.2
5
+ - First version of this plugin
data/DEVELOPER.md CHANGED
@@ -1,2 +1,2 @@
1
- # logstash-codec-example
2
- Example codec plugin. This should help bootstrap your effort to write your own codec plugin!
1
+ # logstash-codec-protobuf
2
+ No special instructions!
data/LICENSE CHANGED
@@ -1,13 +1,203 @@
1
- Copyright (c) 2012–2015 Trivago (http://www.trivago.com/)
2
1
 
3
- Licensed under the Apache License, Version 2.0 (the "License");
4
- you may not use this file except in compliance with the License.
5
- You may obtain a copy of the License at
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
6
5
 
7
- http://www.apache.org/licenses/LICENSE-2.0
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "{}"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright (c) 2012–2016 Elasticsearch http://www.elastic.co
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
8
203
 
9
- Unless required by applicable law or agreed to in writing, software
10
- distributed under the License is distributed on an "AS IS" BASIS,
11
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- See the License for the specific language governing permissions and
13
- limitations under the License.
data/NOTICE.TXT CHANGED
@@ -1,4 +1,4 @@
1
- Copyright Trivago http://www.trivago.com/
1
+ Copyright (c) 2012-2016 Elasticsearch
2
2
 
3
3
  This product includes software developed by The Apache Software
4
4
  Foundation (http://www.apache.org/).
data/README.md CHANGED
@@ -74,3 +74,4 @@ Maybe your protobuf definition does not fullfill the requirements and needs addi
74
74
 
75
75
  * maybe add support for setting undefined fields from default values in the decoder
76
76
 
77
+
@@ -1,164 +1,187 @@
1
1
  # encoding: utf-8
2
2
  require 'logstash/codecs/base'
3
3
  require 'logstash/util/charset'
4
- require 'protocol_buffers' # https://github.com/codekitchen/ruby-protocol-buffers
5
-
4
+ require 'protocol_buffers'
5
+
6
+ # This codec converts protobuf encoded messages into logstash events and vice versa.
7
+ #
8
+ # Requires the protobuf definitions as ruby files. You can create those using the [ruby-protoc compiler](https://github.com/codekitchen/ruby-protocol-buffers).
9
+ #
10
+ # The following shows a usage example for decoding events from a kafka stream:
11
+ # [source,ruby]
12
+ # kafka
13
+ # {
14
+ # zk_connect => "127.0.0.1"
15
+ # topic_id => "your_topic_goes_here"
16
+ # codec => protobuf
17
+ # {
18
+ # class_name => "Animal::Unicorn"
19
+ # include_path => ['/path/to/protobuf/definitions/UnicornProtobuf.pb.rb']
20
+ # }
21
+ # }
22
+ #
6
23
  class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
7
24
  config_name 'protobuf'
8
25
 
9
- # Required: list of strings containing directories or files with protobuf definitions
10
- config :include_path, :validate => :array, :required => true
11
-
12
- # Name of the class to decode
26
+ # Name of the class to decode.
27
+ # If your protobuf definition contains modules, prepend them to the class name with double colons like so:
28
+ # [source,ruby]
29
+ # class_name => "Foods::Dairy::Cheese"
30
+ #
31
+ # This corresponds to a protobuf definition starting as follows:
32
+ # [source,ruby]
33
+ # module Foods
34
+ # module Dairy
35
+ # class Cheese
36
+ # # here are your field definitions.
37
+ #
38
+ # If your class references other definitions: you only have to add the main class here.
13
39
  config :class_name, :validate => :string, :required => true
14
40
 
15
- # For benchmarking only, not intended for public use: change encoder strategy.
16
- # valid method names are: encoder_strategy_1 (the others are not implemented yet)
17
- config :encoder_method, :validate => :string, :default => "encoder_strategy_1"
41
+ # List of absolute pathes to files with protobuf definitions.
42
+ # When using more than one file, make sure to arrange the files in reverse order of dependency so that each class is loaded before it is
43
+ # refered to by another.
44
+ #
45
+ # Example: a class _Cheese_ referencing another protobuf class _Milk_
46
+ # [source,ruby]
47
+ # module Foods
48
+ # module Dairy
49
+ # class Cheese
50
+ # set_fully_qualified_name "Foods.Dairy.Cheese"
51
+ # optional ::Foods::Cheese::Milk, :milk, 1
52
+ # optional :int64, :unique_id, 2
53
+ # # here be more field definitions
54
+ #
55
+ # would be configured as
56
+ # [source,ruby]
57
+ # include_path => ['/path/to/protobuf/definitions/Milk.pb.rb','/path/to/protobuf/definitions/Cheese.pb.rb']
58
+ #
59
+ # When using the codec in an output plugin:
60
+ # * make sure to include all the desired fields in the protobuf definition, including timestamp.
61
+ # Remove fields that are not part of the protobuf definition from the event by using the mutate filter.
62
+ # * the @ symbol is currently not supported in field names when loading the protobuf definitions for encoding. Make sure to call the timestamp field "timestamp"
63
+ # instead of "@timestamp" in the protobuf file. Logstash event fields will be stripped of the leading @ before conversion.
64
+ #
65
+ config :include_path, :validate => :array, :required => true
18
66
 
19
67
  def register
20
- @pb_metainfo = {}
21
- include_path.each { |path| require_pb_path(path) }
22
- @obj = create_object_from_name(class_name)
23
- @logger.debug("Protobuf files successfully loaded.")
24
-
68
+ @pb_class_references = {}
69
+ include_path.each { |path| load_protobuf_classfiles(path) }
70
+ @protobuf_class = create_protobuf_object(@class_name)
71
+
25
72
  end
26
73
 
27
74
  def decode(data)
28
- decoded = @obj.parse(data.to_s)
29
- results = keys2strings(decoded.to_hash)
30
- yield LogStash::Event.new(results) if block_given?
75
+ decoded = @protobuf_class.parse(data.to_s)
76
+ yield LogStash::Event.new(decoded.to_hash) if block_given?
31
77
  end # def decode
32
78
 
33
- def keys2strings(data)
34
- if data.is_a?(::Hash)
35
- new_hash = Hash.new
36
- data.each{|k,v| new_hash[k.to_s] = keys2strings(v)}
37
- new_hash
38
- else
39
- data
40
- end
41
- end
42
-
43
79
 
44
80
  def encode(event)
45
- protobytes = generate_protobuf(event)
46
- @on_event.call(event, protobytes)
47
- end # def encode
48
-
49
- private
50
- def generate_protobuf(event)
51
- meth = self.method(encoder_method)
52
- data = meth.call(event, @class_name)
53
81
  begin
54
- msg = @obj.new(data)
55
- msg.serialize_to_string
82
+ data = prepare_nested_objects(event.to_hash, @class_name)
83
+ pbo = @protobuf_class.new(data)
84
+ protobytes = pbo.serialize_to_string
85
+ @on_event.call(event, protobytes)
56
86
  rescue NoMethodError
57
- @logger.debug("error 2: NoMethodError. Maybe mismatching protobuf definition. Required fields are: " + event.to_hash.keys.join(", "))
87
+ @logger.warn("Error 2: NoMethodError. Maybe mismatching protobuf definition? Make sure that your protobuf definition has at least these fields: " + event.to_hash.keys.join(", "))
88
+ rescue => e
89
+ @logger.warn("Could not encode protobuf: " + e.message)
58
90
  end
59
- end
60
-
61
- def encoder_strategy_1(event, class_name)
62
- _encoder_strategy_1(event.to_hash, class_name)
63
-
64
- end
91
+ end # def encode
65
92
 
66
- def _encoder_strategy_1(datahash, class_name)
67
- fields = clean_hash_keys(datahash)
68
- fields = flatten_hash_values(fields) # TODO we could merge this and the above method back into one to save one iteration, but how are we going to name it?
69
- meta = get_complex_types(class_name) # returns a hash with member names and their protobuf class names
70
- meta.map do | (k,typeinfo) |
93
+ # Creates instances of nested protobuf references recursively. TODO improve documentation
94
+ private
95
+ def prepare_nested_objects(fields, class_name)
96
+ fields = prepare_for_encoding(fields)
97
+ referenced_classes = @pb_class_references[class_name] # returns a hash with member names and their protobuf class names
98
+ referenced_classes.map do | (k,class_name) |
71
99
  if fields.include?(k)
72
- original_value = fields[k]
73
- proto_obj = create_object_from_name(typeinfo)
100
+ value = fields[k]
101
+ proto_obj = create_protobuf_object(class_name)
74
102
  fields[k] =
75
- if original_value.is_a?(::Array)
76
- ecs1_list_helper(original_value, proto_obj, typeinfo)
77
-
103
+ if value.is_a?(::Array)
104
+ # make this field an array/list of protobuf objects
105
+ # value is a list of hashed complex objects, each of which needs to be protobuffed and
106
+ # put back into the list.
107
+ value.map { |x| prepare_nested_objects(x, class_name) }
108
+ value
78
109
  else
79
- recursive_fix = _encoder_strategy_1(original_value, class_name)
80
- proto_obj.new(recursive_fix)
110
+ proto_obj.new( prepare_nested_objects(value, class_name) )
81
111
  end # if is array
82
112
  end
83
-
84
113
  end
85
-
86
114
  fields
87
115
  end
88
116
 
89
- def ecs1_list_helper(value, proto_obj, class_name)
90
- # make this field an array/list of protobuf objects
91
- # value is a list of hashed complex objects, each of which needs to be protobuffed and
92
- # put back into the list.
93
- next unless value.is_a?(::Array)
94
- value.map { |x| _encoder_strategy_1(x, class_name) }
95
- value
96
- end
97
117
 
98
- def flatten_hash_values(datahash)
99
- # 2) convert timestamps and other objects to strings
100
- next unless datahash.is_a?(::Hash)
101
-
102
- ::Hash[datahash.map{|(k,v)| [k, (convert_to_string?(v) ? v.to_s : v)] }]
103
- end
104
118
 
105
- def clean_hash_keys(datahash)
106
- # 1) remove @ signs from keys
119
+ # Removes @ characters from the member names of the event.
120
+ # Necessary for @timestamp fields and the likes. Otherwise we'd run into errors (no such method) upon creating the protobuf object.
121
+ # Then convert timestamps and other objects to strings so that they can be passed to the protobuf object constructor method.
122
+ def prepare_for_encoding(datahash)
107
123
  next unless datahash.is_a?(::Hash)
108
-
109
- ::Hash[datahash.map{|(k,v)| [remove_atchar(k.to_s), v] }]
110
- end #clean_hash_keys
124
+ ::Hash[datahash.map{|(k,v)| [k.to_s.gsub(/@/,'').to_sym, convert_value(v)] }]
125
+ end
111
126
 
112
- def convert_to_string?(v)
113
- !(v.is_a?(Fixnum) || v.is_a?(::Hash) || v.is_a?(::Array) || [true, false].include?(v))
127
+ def convert_value(v)
128
+ (convertable_to_string?(v) ? v.to_s : v)
114
129
  end
115
130
 
116
-
117
- def remove_atchar(key) # necessary for @timestamp fields and the likes. Protobuf definition doesn't handle @ in field names well.
118
- key.dup.gsub(/@/,'')
131
+ def convertable_to_string?(v)
132
+ !(v.is_a?(Fixnum) || v.is_a?(::Hash) || v.is_a?(::Array) || [true, false].include?(v))
119
133
  end
120
134
 
121
- private
122
- def create_object_from_name(name)
135
+ # Creates an instance of a protobuf class name. This instance will be used later to call the decode and encode methods on.
136
+ def create_protobuf_object(name)
123
137
  begin
124
- @logger.debug("Creating instance of " + name)
138
+
125
139
  name.split('::').inject(Object) { |n,c| n.const_get c }
126
140
  end
127
141
  end
128
142
 
129
- def get_complex_types(class_name)
130
- @pb_metainfo[class_name]
131
- end
132
143
 
133
- def require_with_metadata_analysis(filename)
134
- require filename
144
+
145
+ # Analyses a protobuf definition on which other protobuf classes it uses.
146
+ # This is needed for the encoder section of the codec.
147
+ # When encoding an event into a pb class which uses other pb classes, we need to create the
148
+ # objects for those nested classes first, so that we can reference them when encoding the topmost
149
+ # class. In order to be able to do so, this method reads each protobuf class line by line and
150
+ # stores the information in the @pb_class_references member.
151
+ # Params:
152
+ # +filename+:: the absolute path to the protobuf definition.
153
+ def load_class_reference_information(filename)
135
154
  regex_class_name = /\s*class\s*(?<name>.+?)\s+/
136
155
  regex_module_name = /\s*module\s*(?<name>.+?)\s+/
137
156
  regex_pbdefs = /\s*(optional|repeated)(\s*):(?<type>.+),(\s*):(?<name>\w+),(\s*)(?<position>\d+)/
138
- # now we also need to find out which class it contains and the protobuf definitions in it.
139
- # We'll unfortunately need that later so that we can create nested objects.
140
157
  begin
141
158
  class_name = ""
142
159
  type = ""
143
160
  field_name = ""
144
161
  classname_found = false
145
162
  File.readlines(filename).each do |line|
146
- if ! (line =~ regex_module_name).nil? && !classname_found # because it might be declared twice in the file
163
+ # Check if the current line contains the module name (but only if the class name hasn't been found yet because it might be declared twice in the file)
164
+ if ! (line =~ regex_module_name).nil? && !classname_found
165
+ # Module name found, so we start to create the class name string which starts with the module.
147
166
  class_name << $1
148
167
  class_name << "::"
149
-
150
168
  end
151
- if ! (line =~ regex_class_name).nil? && !classname_found # because it might be declared twice in the file
169
+
170
+ # Check if the current line contains the class name (but only if it hasn't been found yet because it might be declared twice in the file)
171
+ if ! (line =~ regex_class_name).nil? && !classname_found
172
+ # class name found. Let's append it to the class name string, which might already contain the module name
152
173
  class_name << $1
153
- @pb_metainfo[class_name] = {}
174
+ # initialize the hash for the field specific information that we will collect in the next step
175
+ @pb_class_references[class_name] = {}
154
176
  classname_found = true
155
177
  end
178
+
156
179
  if ! (line =~ regex_pbdefs).nil?
157
180
  type = $1
158
181
  field_name = $2
159
182
  if type =~ /::/
160
- @pb_metainfo[class_name][field_name] = type.gsub!(/^:/,"")
161
-
183
+ # the line contains a field declaration which references another class. We need to store the name of that class.
184
+ @pb_class_references[class_name][field_name] = type.gsub!(/^:/,"")
162
185
  end
163
186
  end
164
187
  end
@@ -170,16 +193,21 @@ class LogStash::Codecs::Protobuf < LogStash::Codecs::Base
170
193
  end
171
194
  end
172
195
 
173
- def require_pb_path(dir_or_file)
174
- f = dir_or_file.end_with? ('.rb')
196
+ # This method calls 'require' for the protobuf class files listed in the 'include_path' section of the config.
197
+ # When given a directory instead of a file, it will require all files in the directory.
198
+ # Params:
199
+ # +dir_or_file+:: the absolute path to the file or directory that need to be loaded
200
+ def load_protobuf_classfiles(dir_or_file)
175
201
  begin
176
- if f
177
- @logger.debug("Including protobuf file: " + dir_or_file)
178
- require_with_metadata_analysis dir_or_file
202
+ if dir_or_file.end_with? ('.rb')
203
+
204
+ require dir_or_file
205
+ load_class_reference_information dir_or_file
179
206
  else
180
207
  Dir[ dir_or_file + '/*.rb'].each { |file|
181
- @logger.debug("Including protobuf path: " + dir_or_file + "/" + file)
182
- require_with_metadata_analysis file
208
+
209
+ require file
210
+ load_class_reference_information file
183
211
  }
184
212
  end
185
213
  end
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-codec-protobuf'
4
- s.version = '0.1.2'
4
+ s.version = '0.1.3'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "This codec may be used to decode (via inputs) and encode (via outputs) protobuf messages"
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -19,8 +19,9 @@ Gem::Specification.new do |s|
19
19
  s.metadata = { "logstash_plugin" => "true", "logstash_group" => "codec" }
20
20
 
21
21
  # Gem dependencies
22
- s.add_runtime_dependency 'logstash-core', '>= 1.4.0', '< 3.0.0'
23
- s.add_runtime_dependency 'ruby-protocol-buffers' # used by the compiled version of our protobuf definition.
22
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
23
+ s.add_runtime_dependency 'ruby-protocol-buffers' # https://github.com/codekitchen/ruby-protocol-buffers
24
+ # s.add_runtime_dependency 'google-protobuf' # https://github.com/google/protobuf/tree/master/ruby
24
25
  s.add_development_dependency 'logstash-devutils'
25
26
  end
26
27
 
@@ -23,10 +23,11 @@ describe LogStash::Codecs::Protobuf do
23
23
  unicorn = Animal::Unicorn.new(data)
24
24
 
25
25
  plugin_unicorn.decode(unicorn.serialize_to_string) do |event|
26
- expect(event["colour"] ).to eq(data[:colour] )
27
- expect(event["horn_length"] ).to eq(data[:horn_length] )
28
- expect(event["last_seen"] ).to eq(data[:last_seen] )
29
- expect(event["has_wings"] ).to eq(data[:has_wings] )
26
+ expect(event.get("colour") ).to eq(data[:colour] )
27
+ expect(event.get("horn_length") ).to eq(data[:horn_length] )
28
+ expect(event.get("last_seen") ).to eq(data[:last_seen] )
29
+ expect(event.get("has_wings") ).to eq(data[:has_wings] )
30
+
30
31
  end
31
32
  end # it
32
33
 
@@ -54,18 +55,20 @@ describe LogStash::Codecs::Protobuf do
54
55
  hugo = Animal::Human.new(data)
55
56
 
56
57
  plugin_human.decode(hugo.serialize_to_string) do |event|
57
- expect(event["first_name"] ).to eq(data[:first_name] )
58
- expect(event["middle_names"] ).to eq(data[:middle_names] )
59
- expect(event["last_name"] ).to eq(data[:last_name] )
60
- expect(event["mother"]["first_name"] ).to eq(data_m[:first_name] )
61
- expect(event["father"]["first_name"] ).to eq(data_f[:first_name] )
62
- expect(event["mother"]["last_name"] ).to eq(data_m[:last_name] )
63
- expect(event["mother"]["mother"]["last_name"] ).to eq(data_gm[:last_name] )
64
- expect(event["mother"]["mother"]["first_name"] ).to eq(data_gm[:first_name] )
65
- expect(event["mother"]["mother"]["middle_names"] ).to eq(data_gm[:middle_names] )
66
- expect(event["mother"]["mother"]["vegetarian"] ).to eq(data_gm[:vegetarian] )
67
- expect(event["father"]["last_name"] ).to eq(data_f[:last_name] )
68
- expect(event["father"]["middle_names"] ).to eq(data_f[:middle_names] )
58
+ expect(event.get("first_name") ).to eq(data[:first_name] )
59
+ expect(event.get("middle_names") ).to eq(data[:middle_names] )
60
+ expect(event.get("middle_names").length ).to eq(data[:middle_names].length )
61
+ expect(event.get("[middle_names]")[1] ).to eq(data[:middle_names][1] )
62
+ expect(event.get("last_name") ).to eq(data[:last_name] )
63
+ expect(event.get("[mother][first_name]") ).to eq(data_m[:first_name] )
64
+ expect(event.get("[father][first_name]") ).to eq(data_f[:first_name] )
65
+ expect(event.get("[mother][last_name]") ).to eq(data_m[:last_name] )
66
+ expect(event.get("[mother][mother][last_name]") ).to eq(data_gm[:last_name] )
67
+ expect(event.get("[mother][mother][first_name]") ).to eq(data_gm[:first_name] )
68
+ expect(event.get("[mother][mother][middle_names]") ).to eq(data_gm[:middle_names] )
69
+ expect(event.get("[mother][mother][vegetarian]") ).to eq(data_gm[:vegetarian] )
70
+ expect(event.get("[father][last_name]") ).to eq(data_f[:last_name] )
71
+ expect(event.get("[father][middle_names]") ).to eq(data_f[:middle_names] )
69
72
  end
70
73
  end # it
71
74
 
@@ -91,9 +94,9 @@ describe LogStash::Codecs::Protobuf do
91
94
  pb = ColourProtoTest.new(data)
92
95
 
93
96
  plugin_col.decode(pb.serialize_to_string) do |event|
94
- expect(event["least_liked"] ).to eq(data[:least_liked] )
95
- expect(event["favourite_colours"] ).to eq(data[:favourite_colours] )
96
- expect(event["booleantest"] ).to eq(data[:booleantest] )
97
+ expect(event.get("least_liked") ).to eq(data[:least_liked] )
98
+ expect(event.get("favourite_colours") ).to eq(data[:favourite_colours] )
99
+ expect(event.get("booleantest") ).to eq(data[:booleantest] )
97
100
  end
98
101
  end # it
99
102
 
@@ -118,10 +121,10 @@ describe LogStash::Codecs::Protobuf do
118
121
  insist { data.is_a? String }
119
122
  unicorn = Animal::UnicornEvent.parse(data)
120
123
 
121
- expect(unicorn.colour ).to eq(event["colour"] )
122
- expect(unicorn.horn_length ).to eq(event["horn_length"] )
123
- expect(unicorn.last_seen ).to eq(event["last_seen"] )
124
- expect(unicorn.has_wings ).to eq(event["has_wings"] )
124
+ expect(unicorn.colour ).to eq(event.get("colour") )
125
+ expect(unicorn.horn_length ).to eq(event.get("horn_length") )
126
+ expect(unicorn.last_seen ).to eq(event.get("last_seen") )
127
+ expect(unicorn.has_wings ).to eq(event.get("has_wings") )
125
128
 
126
129
  end # subject.on_event
127
130
  subject.encode(event)
@@ -149,17 +152,17 @@ describe LogStash::Codecs::Protobuf do
149
152
  insist { data.is_a? String }
150
153
  jimmy = Animal::Human.parse(data)
151
154
 
152
- expect(jimmy.first_name ).to eq(event["first_name"] )
153
- expect(jimmy.middle_names ).to eq(event["middle_names"] )
154
- expect(jimmy.last_name ).to eq(event["last_name"] )
155
- expect(jimmy.mother.first_name ).to eq(event["mother"]["first_name"] )
156
- expect(jimmy.father.first_name ).to eq(event["father"]["first_name"] )
157
- expect(jimmy.mother.middle_names ).to eq(event["mother"]["middle_names"] )
158
- expect(jimmy.mother.age ).to eq(event["mother"]["age"] ) # recursion test for values
159
- expect(jimmy.mother.vegetarian ).to eq(event["mother"]["vegetarian"] ) # recursion test for values
160
- expect(jimmy.father.last_name ).to eq(event["father"]["last_name"] )
161
- expect(jimmy.father.email ).to eq(event["father"]["@email"] ) # recursion test for keys
162
- expect(jimmy.mother.last_name ).to eq(event["mother"]["last_name"] )
155
+ expect(jimmy.first_name ).to eq(event.get("first_name") )
156
+ expect(jimmy.middle_names ).to eq(event.get("middle_names") )
157
+ expect(jimmy.last_name ).to eq(event.get("last_name") )
158
+ expect(jimmy.mother.first_name ).to eq(event.get("[mother][first_name]") )
159
+ expect(jimmy.father.first_name ).to eq(event.get("[father][first_name]") )
160
+ expect(jimmy.mother.middle_names ).to eq(event.get("[mother][middle_names]") )
161
+ expect(jimmy.mother.age ).to eq(event.get("[mother][age]") ) # recursion test for values
162
+ expect(jimmy.mother.vegetarian ).to eq(event.get("[mother][vegetarian]") ) # recursion test for values
163
+ expect(jimmy.father.last_name ).to eq(event.get("[father][last_name]") )
164
+ expect(jimmy.father.email ).to eq(event.get("[father][@email]") ) # recursion test for keys
165
+ expect(jimmy.mother.last_name ).to eq(event.get("[mother][last_name]") )
163
166
 
164
167
  end # subject.on_event
165
168
  subject.encode(event)
@@ -190,9 +193,9 @@ describe LogStash::Codecs::Protobuf do
190
193
 
191
194
  colpref = ColourProtoTest.parse(data)
192
195
 
193
- expect(colpref.booleantest ).to eq(event["booleantest"] )
194
- expect(colpref.least_liked ).to eq(event["least_liked"] )
195
- expect(colpref.favourite_colours ).to eq(event["favourite_colours"] )
196
+ expect(colpref.booleantest ).to eq(event.get("booleantest") )
197
+ expect(colpref.least_liked ).to eq(event.get("least_liked") )
198
+ expect(colpref.favourite_colours ).to eq(event.get("favourite_colours") )
196
199
 
197
200
 
198
201
  end # subject.on_event
@@ -202,4 +205,30 @@ describe LogStash::Codecs::Protobuf do
202
205
 
203
206
 
204
207
 
208
+ context "#encode4" do
209
+ subject do
210
+ next LogStash::Codecs::Protobuf.new("class_name" => "ColourProtoTest", "include_path" => ['spec/helpers/ColourTestcase.pb.rb'])
211
+ end
212
+
213
+ require 'spec/helpers/ColourTestcase.pb.rb' # otherwise we cant use the colour enums in the next line
214
+ event = LogStash::Event.new("booleantest" => [false, false, true], "least_liked" => ColourProtoTest::Colour::YELLOW, "favourite_colours" => \
215
+ [ColourProtoTest::Colour::BLACK, ColourProtoTest::Colour::BLUE] )
216
+
217
+ it "should return protobuf encoded data from a complex event with enums" do
218
+
219
+ subject.on_event do |event, data|
220
+ insist { data.is_a? String }
221
+
222
+ colpref = ColourProtoTest.parse(data)
223
+
224
+ expect(colpref.booleantest ).to eq(event.get("booleantest") )
225
+ expect(colpref.least_liked ).to eq(event.get("least_liked") )
226
+ expect(colpref.favourite_colours ).to eq(event.get("favourite_colours") )
227
+
228
+
229
+ end # subject.on_event
230
+ subject.encode(event)
231
+ end # it
232
+ end # context
233
+
205
234
  end
@@ -14,6 +14,7 @@ module Animal
14
14
  optional :int32, :horn_length, 2
15
15
  optional :int32, :last_seen, 3
16
16
  optional :bool, :has_wings, 4
17
+ optional :float, :height, 5
17
18
  end
18
19
 
19
20
  end
metadata CHANGED
@@ -1,63 +1,63 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-codec-protobuf
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Inga Feick
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-04-22 00:00:00.000000000 Z
11
+ date: 2016-11-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: logstash-core
15
- version_requirements: !ruby/object:Gem::Requirement
16
- requirements:
17
- - - '>='
18
- - !ruby/object:Gem::Version
19
- version: 1.4.0
20
- - - <
21
- - !ruby/object:Gem::Version
22
- version: 3.0.0
23
14
  requirement: !ruby/object:Gem::Requirement
24
15
  requirements:
25
16
  - - '>='
26
17
  - !ruby/object:Gem::Version
27
- version: 1.4.0
28
- - - <
18
+ version: '1.60'
19
+ - - <=
29
20
  - !ruby/object:Gem::Version
30
- version: 3.0.0
21
+ version: '2.99'
22
+ name: logstash-core-plugin-api
31
23
  prerelease: false
32
24
  type: :runtime
33
- - !ruby/object:Gem::Dependency
34
- name: ruby-protocol-buffers
35
25
  version_requirements: !ruby/object:Gem::Requirement
36
26
  requirements:
37
27
  - - '>='
38
28
  - !ruby/object:Gem::Version
39
- version: '0'
29
+ version: '1.60'
30
+ - - <=
31
+ - !ruby/object:Gem::Version
32
+ version: '2.99'
33
+ - !ruby/object:Gem::Dependency
40
34
  requirement: !ruby/object:Gem::Requirement
41
35
  requirements:
42
36
  - - '>='
43
37
  - !ruby/object:Gem::Version
44
38
  version: '0'
39
+ name: ruby-protocol-buffers
45
40
  prerelease: false
46
41
  type: :runtime
47
- - !ruby/object:Gem::Dependency
48
- name: logstash-devutils
49
42
  version_requirements: !ruby/object:Gem::Requirement
50
43
  requirements:
51
44
  - - '>='
52
45
  - !ruby/object:Gem::Version
53
46
  version: '0'
47
+ - !ruby/object:Gem::Dependency
54
48
  requirement: !ruby/object:Gem::Requirement
55
49
  requirements:
56
50
  - - '>='
57
51
  - !ruby/object:Gem::Version
58
52
  version: '0'
53
+ name: logstash-devutils
59
54
  prerelease: false
60
55
  type: :development
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - '>='
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
61
  description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
62
62
  email: inga.feick@trivago.com
63
63
  executables: []