logstash-codec-zylog 1.0.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 8489c5a0265f37a193a04049f530a682af9fa867
4
+ data.tar.gz: 4635a4cd1eb6992bdb33e0f8a55106fd905ea0d2
5
+ SHA512:
6
+ metadata.gz: fb51523acf7a88105bbaee4ab8b4503f6c21e0e73650c334c1737b8cbb2d5a538b678d65a3f45ec03545c4d89f21040e5ce0d92ee7d47b279852c2987a1cc20b
7
+ data.tar.gz: 822b974b2f16d32d5ab2eb1e72dc8aa2cd8c825ad5e402a7c26c2feef5c04f2ffbddd10c1422c91e0f670d1387d90d887c1d0f91e5059daffb2217d46e931309
data/CHANGELOG.md ADDED
@@ -0,0 +1,11 @@
1
+ ## 1.0.3
2
+ - Fix some documentation issues
3
+
4
+ ## 1.0.1
5
+ - Speed improvement, better exception handling and code refactoring
6
+
7
+ ## 1.0.0
8
+ - Update to v5.0 API
9
+
10
+ ## 0.1.2
11
+ - First version of this plugin
data/CONTRIBUTORS ADDED
@@ -0,0 +1,12 @@
1
+ The following is a list of people who have contributed ideas, code, bug
2
+ reports, or in general have helped logstash along its way.
3
+
4
+ Contributors:
5
+ * Inga Feick (ingafeick)
6
+ * Nicolai Schulten (krakenfuss)
7
+
8
+
9
+ Note: If you've sent us patches, bug reports, or otherwise contributed to
10
+ Logstash, and you aren't on the list above and want to be, please let us know
11
+ and we'll make sure you're here. Contributions from folks like you are what make
12
+ open source awesome.
data/DEVELOPER.md ADDED
@@ -0,0 +1,2 @@
1
+ # logstash-codec-protobuf
2
+ No special instructions!
data/Gemfile ADDED
@@ -0,0 +1,11 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
4
+
5
+ logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
6
+ use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
7
+
8
+ if Dir.exist?(logstash_path) && use_logstash_source
9
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
10
+ gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
11
+ end
data/LICENSE ADDED
@@ -0,0 +1,203 @@
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "{}"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright (c) 2012–2016 Elasticsearch http://www.elastic.co
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
203
+
data/NOTICE.TXT ADDED
@@ -0,0 +1,4 @@
1
+ Copyright (c) 2012-2016 Elasticsearch
2
+
3
+ This product includes software developed by The Apache Software
4
+ Foundation (http://www.apache.org/).
data/README.md ADDED
@@ -0,0 +1,77 @@
1
+ # Logstash zylog codec
2
+
3
+ This is a codec plugin for [Logstash](https://github.com/elastic/logstash) to parse protobuf messages.
4
+
5
+ # Prerequisites and Installation
6
+
7
+ * prepare your ruby versions of the protobuf definitions, for example using the ruby-protoc compiler from https://github.com/codekitchen/ruby-protocol-buffers
8
+ * download the [gem file](https://rubygems.org/gems/logstash-codec-protobuf) to your computer.
9
+ * Install the plugin. From within your logstash directory, do
10
+ bin/plugin install /path/to/logstash-codec-protobuf-$VERSION.gem
11
+ * use the codec in your logstash config file. See details below.
12
+
13
+ ## Configuration
14
+
15
+ include_path (required): an array of strings with filenames or directory names where logstash can find your protobuf definitions. Please provide absolute paths. For directories it will only try to import files ending on .rb
16
+
17
+ class_name (required): the name of the protobuf class that is to be decoded or encoded.
18
+
19
+ ## Usage example: decoder
20
+
21
+ Use this as a codec in any logstash input. Just provide the name of the class that your incoming objects will be encoded in, and specify the path to the compiled definition.
22
+ Here's an example for a kafka input:
23
+
24
+ kafka
25
+ {
26
+ zk_connect => "127.0.0.1"
27
+ topic_id => "unicorns_protobuffed"
28
+ codec => protobuf
29
+ {
30
+ class_name => "Unicorn"
31
+ include_path => ['/my/path/to/compiled/protobuf/definitions/UnicornProtobuf.pb.rb']
32
+ }
33
+ }
34
+
35
+ ### Example with referenced definitions
36
+
37
+ Imagine you have the following protobuf relationship: class Cheese lives in namespace Foods::Dairy and uses another class Milk.
38
+
39
+ module Foods
40
+ module Dairy
41
+ class Cheese
42
+ set_fully_qualified_name "Foods.Dairy.Cheese"
43
+ optional ::Foods::Cheese::Milk, :milk, 1
44
+ optional :int64, :unique_id, 2
45
+ # here be more field definitions
46
+
47
+ Make sure to put the referenced Milk class first in the include_path:
48
+
49
+ include_path => ['/path/to/protobuf/definitions/Milk.pb.rb','/path/to/protobuf/definitions/Cheese.pb.rb']
50
+
51
+ Set the class name to the parent class:
52
+
53
+ class_name => "Foods::Dairy::Cheese"
54
+
55
+ ## Usage example: encoder
56
+
57
+ The configuration of the codec for encoding logstash events for a protobuf output is pretty much the same as for the decoder input usage as demonstrated above. There are some constraints though that you need to be aware of:
58
+ * the protobuf definition needs to contain all the fields that logstash typically adds to an event, in the corrent data type. Examples for this are @timestamp (string), @version (string), host, path, all of which depend on your input sources and filters aswell. If you do not want to add those fields to your protobuf definition then please use a [modify filter](https://www.elastic.co/guide/en/logstash/current/plugins-filters-mutate.html) to [remove](https://www.elastic.co/guide/en/logstash/current/plugins-filters-mutate.html#plugins-filters-mutate-remove_field) the undesired fields.
59
+ * object members starting with @ are somewhat problematic in protobuf definitions. Therefore those fields will automatically be renamed to remove the at character. This also effects the important @timestamp field. Please name it just "timestamp" in your definition.
60
+
61
+
62
+ ## Troubleshooting
63
+
64
+ ### "uninitialized constant SOME_CLASS_NAME"
65
+
66
+ If you include more than one definition class, consider the order of inclusion. This is especially relevant if you include whole directories. A definition might refer to another definition that is not loaded yet. In this case, please specify the files in the include_path variable in reverse order of reference. See 'Example with referenced definitions' above.
67
+
68
+ ### no protobuf output
69
+
70
+ Maybe your protobuf definition does not fullfill the requirements and needs additional fields. Run logstash with the --debug flag and grep for "error 2".
71
+
72
+
73
+ ## Limitations and roadmap
74
+
75
+ * maybe add support for setting undefined fields from default values in the decoder
76
+
77
+
@@ -0,0 +1,106 @@
1
+ :plugin: protobuf
2
+ :type: codec
3
+
4
+ ///////////////////////////////////////////
5
+ START - GENERATED VARIABLES, DO NOT EDIT!
6
+ ///////////////////////////////////////////
7
+ :version: %VERSION%
8
+ :release_date: %RELEASE_DATE%
9
+ :changelog_url: %CHANGELOG_URL%
10
+ :include_path: ../../../../logstash/docs/include
11
+ ///////////////////////////////////////////
12
+ END - GENERATED VARIABLES, DO NOT EDIT!
13
+ ///////////////////////////////////////////
14
+
15
+ [id="plugins-{type}s-{plugin}"]
16
+
17
+ === Protobuf codec plugin
18
+
19
+ include::{include_path}/plugin_header.asciidoc[]
20
+
21
+ ==== Description
22
+
23
+ This codec converts protobuf encoded messages into logstash events and vice versa.
24
+
25
+ Requires the protobuf definitions as ruby files. You can create those using the [ruby-protoc compiler](https://github.com/codekitchen/ruby-protocol-buffers).
26
+
27
+ The following shows a usage example for decoding events from a kafka stream:
28
+ [source,ruby]
29
+ kafka
30
+ {
31
+ zk_connect => "127.0.0.1"
32
+ topic_id => "your_topic_goes_here"
33
+ codec => protobuf
34
+ {
35
+ class_name => "Animal::Unicorn"
36
+ include_path => ['/path/to/protobuf/definitions/UnicornProtobuf.pb.rb']
37
+ }
38
+ }
39
+
40
+
41
+ [id="plugins-{type}s-{plugin}-options"]
42
+ ==== Protobuf Codec Configuration Options
43
+
44
+ [cols="<,<,<",options="header",]
45
+ |=======================================================================
46
+ |Setting |Input type|Required
47
+ | <<plugins-{type}s-{plugin}-class_name>> |<<string,string>>|Yes
48
+ | <<plugins-{type}s-{plugin}-include_path>> |<<array,array>>|Yes
49
+ |=======================================================================
50
+
51
+ &nbsp;
52
+
53
+ [id="plugins-{type}s-{plugin}-class_name"]
54
+ ===== `class_name`
55
+
56
+ * This is a required setting.
57
+ * Value type is <<string,string>>
58
+ * There is no default value for this setting.
59
+
60
+ Name of the class to decode.
61
+ If your protobuf definition contains modules, prepend them to the class name with double colons like so:
62
+ [source,ruby]
63
+ class_name => "Foods::Dairy::Cheese"
64
+
65
+ This corresponds to a protobuf definition starting as follows:
66
+ [source,ruby]
67
+ module Foods
68
+ module Dairy
69
+ class Cheese
70
+ # here are your field definitions.
71
+
72
+ If your class references other definitions: you only have to add the main class here.
73
+
74
+ [id="plugins-{type}s-{plugin}-include_path"]
75
+ ===== `include_path`
76
+
77
+ * This is a required setting.
78
+ * Value type is <<array,array>>
79
+ * There is no default value for this setting.
80
+
81
+ List of absolute pathes to files with protobuf definitions.
82
+ When using more than one file, make sure to arrange the files in reverse order of dependency so that each class is loaded before it is
83
+ refered to by another.
84
+
85
+ Example: a class _Cheese_ referencing another protobuf class _Milk_
86
+ [source,ruby]
87
+ module Foods
88
+ module Dairy
89
+ class Cheese
90
+ set_fully_qualified_name "Foods.Dairy.Cheese"
91
+ optional ::Foods::Cheese::Milk, :milk, 1
92
+ optional :int64, :unique_id, 2
93
+ # here be more field definitions
94
+
95
+ would be configured as
96
+ [source,ruby]
97
+ include_path => ['/path/to/protobuf/definitions/Milk.pb.rb','/path/to/protobuf/definitions/Cheese.pb.rb']
98
+
99
+ When using the codec in an output plugin:
100
+ * make sure to include all the desired fields in the protobuf definition, including timestamp.
101
+ Remove fields that are not part of the protobuf definition from the event by using the mutate filter.
102
+ * the @ symbol is currently not supported in field names when loading the protobuf definitions for encoding. Make sure to call the timestamp field "timestamp"
103
+ instead of "@timestamp" in the protobuf file. Logstash event fields will be stripped of the leading @ before conversion.
104
+
105
+
106
+
@@ -0,0 +1,235 @@
1
+ # encoding: utf-8
2
+ require 'logstash/codecs/base'
3
+ require 'logstash/util/charset'
4
+ require 'protocol_buffers' # https://github.com/codekitchen/ruby-protocol-buffers
5
+
6
+ # This codec converts protobuf encoded messages into logstash events and vice versa.
7
+ #
8
+ # Requires the protobuf definitions as ruby files. You can create those using the [ruby-protoc compiler](https://github.com/codekitchen/ruby-protocol-buffers).
9
+ #
10
+ # The following shows a usage example for decoding events from a kafka stream:
11
+ # [source,ruby]
12
+ # kafka
13
+ # {
14
+ # zk_connect => "127.0.0.1"
15
+ # topic_id => "your_topic_goes_here"
16
+ # codec => protobuf
17
+ # {
18
+ # class_name => "Animal::Unicorn"
19
+ # include_path => ['/path/to/protobuf/definitions/UnicornProtobuf.pb.rb']
20
+ # }
21
+ # }
22
+ #
23
+
24
+ class LogStash::Codecs::Zylog < LogStash::Codecs::Base
25
+ config_name 'zylog'
26
+
27
+ # Name of the class to decode.
28
+ # If your protobuf definition contains modules, prepend them to the class name with double colons like so:
29
+ # [source,ruby]
30
+ # class_name => "Foods::Dairy::Cheese"
31
+ #
32
+ # This corresponds to a protobuf definition starting as follows:
33
+ # [source,ruby]
34
+ # module Foods
35
+ # module Dairy
36
+ # class Cheese
37
+ # # here are your field definitions.
38
+ #
39
+ # If your class references other definitions: you only have to add the main class here.
40
+ config :class_name, :validate => :string, :required => true
41
+
42
+ # List of absolute pathes to files with protobuf definitions.
43
+ # When using more than one file, make sure to arrange the files in reverse order of dependency so that each class is loaded before it is
44
+ # refered to by another.
45
+ #
46
+ # Example: a class _Cheese_ referencing another protobuf class _Milk_
47
+ # [source,ruby]
48
+ # module Foods
49
+ # module Dairy
50
+ # class Cheese
51
+ # set_fully_qualified_name "Foods.Dairy.Cheese"
52
+ # optional ::Foods::Cheese::Milk, :milk, 1
53
+ # optional :int64, :unique_id, 2
54
+ # # here be more field definitions
55
+ #
56
+ # would be configured as
57
+ # [source,ruby]
58
+ # include_path => ['/path/to/protobuf/definitions/Milk.pb.rb','/path/to/protobuf/definitions/Cheese.pb.rb']
59
+ #
60
+ # When using the codec in an output plugin:
61
+ # * make sure to include all the desired fields in the protobuf definition, including timestamp.
62
+ # Remove fields that are not part of the protobuf definition from the event by using the mutate filter.
63
+ # * the @ symbol is currently not supported in field names when loading the protobuf definitions for encoding. Make sure to call the timestamp field "timestamp"
64
+ # instead of "@timestamp" in the protobuf file. Logstash event fields will be stripped of the leading @ before conversion.
65
+ #
66
+ config :include_path, :validate => :array, :required => true
67
+
68
+
69
+ def register
70
+ @pb_metainfo = {}
71
+ include_path.each { |path| require_pb_path(path) }
72
+ @obj = create_object_from_name(class_name)
73
+ @logger.debug("Zylog files successfully loaded.")
74
+ end
75
+
76
+
77
+ def decode(data)
78
+ begin
79
+ @logger.debug("data class type: #{data.class}.")
80
+ @logger.debug("receive data length: #{data.length}.")
81
+ if data.length >= 5
82
+ payload = data.bytes.to_a
83
+ @logger.debug("receive data byte 1th-5th: #{payload[0]} #{payload[1]} #{payload[2]} #{payload[3]} #{payload[4]}")
84
+ total_packet_length = (payload[4].ord) + (payload[3].ord << 8) + (payload[2].ord << 16) + (payload[1].ord << 24)
85
+ @logger.debug("total packet length: #{total_packet_length}.")
86
+ if data.length >= total_packet_length
87
+ data = data.slice(5, total_packet_length - 5 - 1)
88
+ @logger.debug("after extract receive data length: #{data.length}.")
89
+ else
90
+ raise(StandardError, "packet to small, length: #{data.length}.")
91
+ end
92
+ else
93
+ raise(StandardError, "packet to small, length: #{data.length}.")
94
+ end
95
+
96
+ decoded = @obj.parse(data.to_s)
97
+ yield LogStash::Event.new(decoded.to_hash) if block_given?
98
+ rescue => e
99
+ @logger.warn("Couldn't decode protobuf: #{e.inspect}.")
100
+ # raise e
101
+ end
102
+ end # def decode
103
+
104
+
105
+ def encode(event)
106
+ protobytes = generate_protobuf(event)
107
+ @on_event.call(event, protobytes)
108
+ end # def encode
109
+
110
+
111
+ private
112
+ def generate_protobuf(event)
113
+ begin
114
+ data = _encode(event, @class_name)
115
+ msg = @obj.new(data)
116
+ msg.serialize_to_string
117
+ rescue NoMethodError
118
+ @logger.debug("error 2: NoMethodError. Maybe mismatching protobuf definition. Required fields are: " + event.to_hash.keys.join(", "))
119
+ rescue => e
120
+ @logger.debug("Couldn't generate protobuf: ${e}")
121
+ end
122
+ end
123
+
124
+
125
+ def _encode(datahash, class_name)
126
+ fields = prepare_for_encoding(datahash)
127
+ meta = get_complex_types(class_name) # returns a hash with member names and their protobuf class names
128
+ meta.map do | (k,typeinfo) |
129
+ if fields.include?(k)
130
+ original_value = fields[k]
131
+ proto_obj = create_object_from_name(typeinfo)
132
+ fields[k] =
133
+ if original_value.is_a?(::Array)
134
+ # make this field an array/list of protobuf objects
135
+ # value is a list of hashed complex objects, each of which needs to be protobuffed and
136
+ # put back into the list.
137
+ original_value.map { |x| _encode(x, typeinfo) }
138
+ original_value
139
+ else
140
+ recursive_fix = _encode(original_value, class_name)
141
+ proto_obj.new(recursive_fix)
142
+ end # if is array
143
+ end
144
+ end
145
+ fields
146
+ end
147
+
148
+
149
+ def prepare_for_encoding(datahash)
150
+ # the data cannot be encoded until certain criteria are met:
151
+ # 1) remove @ signs from keys
152
+ # 2) convert timestamps and other objects to strings
153
+ next unless datahash.is_a?(::Hash)
154
+ ::Hash[datahash.map{|(k,v)| [remove_atchar(k.to_s), (convert_to_string?(v) ? v.to_s : v)] }]
155
+ end
156
+
157
+
158
+ def convert_to_string?(v)
159
+ !(v.is_a?(Fixnum) || v.is_a?(::Hash) || v.is_a?(::Array) || [true, false].include?(v))
160
+ end
161
+
162
+
163
+ def remove_atchar(key) # necessary for @timestamp fields and the likes. Zylog definition doesn't handle @ in field names well.
164
+ key.dup.gsub(/@/,'')
165
+ end
166
+
167
+
168
+ def create_object_from_name(name)
169
+ begin
170
+ @logger.debug("Creating instance of " + name)
171
+ name.split('::').inject(Object) { |n,c| n.const_get c }
172
+ end
173
+ end
174
+
175
+ def get_complex_types(class_name)
176
+ @pb_metainfo[class_name]
177
+ end
178
+
179
+ def require_with_metadata_analysis(filename)
180
+ require filename
181
+ regex_class_name = /\s*class\s*(?<name>.+?)\s+/
182
+ regex_module_name = /\s*module\s*(?<name>.+?)\s+/
183
+ regex_pbdefs = /\s*(optional|repeated)(\s*):(?<type>.+),(\s*):(?<name>\w+),(\s*)(?<position>\d+)/
184
+ # now we also need to find out which class it contains and the protobuf definitions in it.
185
+ # We'll unfortunately need that later so that we can create nested objects.
186
+ begin
187
+ class_name = ""
188
+ type = ""
189
+ field_name = ""
190
+ classname_found = false
191
+ File.readlines(filename).each do |line|
192
+ if ! (line =~ regex_module_name).nil? && !classname_found # because it might be declared twice in the file
193
+ class_name << $1
194
+ class_name << "::"
195
+
196
+ end
197
+ if ! (line =~ regex_class_name).nil? && !classname_found # because it might be declared twice in the file
198
+ class_name << $1
199
+ @pb_metainfo[class_name] = {}
200
+ classname_found = true
201
+ end
202
+ if ! (line =~ regex_pbdefs).nil?
203
+ type = $1
204
+ field_name = $2
205
+ if type =~ /::/
206
+ @pb_metainfo[class_name][field_name] = type.gsub!(/^:/,"")
207
+
208
+ end
209
+ end
210
+ end
211
+ rescue Exception => e
212
+ @logger.warn("error 3: unable to read pb definition from file " + filename+ ". Reason: #{e.inspect}. Last settings were: class #{class_name} field #{field_name} type #{type}. Backtrace: " + e.backtrace.inspect.to_s)
213
+ end
214
+ if class_name.nil?
215
+ @logger.warn("error 4: class name not found in file " + filename)
216
+ end
217
+ end
218
+
219
+ def require_pb_path(dir_or_file)
220
+ f = dir_or_file.end_with? ('.rb')
221
+ begin
222
+ if f
223
+ @logger.debug("Including protobuf file: " + dir_or_file)
224
+ require_with_metadata_analysis dir_or_file
225
+ else
226
+ Dir[ dir_or_file + '/*.rb'].each { |file|
227
+ @logger.debug("Including protobuf path: " + dir_or_file + "/" + file)
228
+ require_with_metadata_analysis file
229
+ }
230
+ end
231
+ end
232
+ end
233
+
234
+
235
+ end # class LogStash::Codecs::Zylog
@@ -0,0 +1,26 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-codec-zylog'
4
+ s.version = '1.0.6'
5
+ s.licenses = ['Apache-2.0']
6
+ s.summary = "This codec may be used to decode (via inputs) and encode (via outputs) protobuf messages"
7
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
8
+ s.authors = ["Inga Feick"]
9
+ s.email = 'inga.feick@trivago.com'
10
+ s.require_paths = ["lib"]
11
+
12
+ # Files
13
+ s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
14
+
15
+ # Tests
16
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
17
+
18
+ # Special flag to let us know this is actually a logstash plugin
19
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "codec" }
20
+
21
+ # Gem dependencies
22
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
23
+ s.add_runtime_dependency 'ruby-protocol-buffers' # used by the compiled version of our protobuf definition.
24
+ s.add_development_dependency 'logstash-devutils'
25
+ end
26
+
@@ -0,0 +1,205 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/codecs/zylog"
4
+ require "logstash/event"
5
+ require 'protocol_buffers' # https://github.com/codekitchen/ruby-protocol-buffers
6
+ require "insist"
7
+
8
+ describe LogStash::Codecs::Zylog do
9
+
10
+
11
+ context "#decode" do
12
+
13
+
14
+ #### Test case 1: Decode simple protobuf bytes for unicorn ####################################################################################################################
15
+ let(:plugin_unicorn) { LogStash::Codecs::zylog.new("class_name" => "Animal::Unicorn", "include_path" => ['spec/helpers/unicorn.pb.rb']) }
16
+ before do
17
+ plugin_unicorn.register
18
+ end
19
+
20
+ it "should return an event from protobuf encoded data" do
21
+
22
+ data = {:colour => 'rainbow', :horn_length => 18, :last_seen => 1420081471, :has_wings => true}
23
+ unicorn = Animal::Unicorn.new(data)
24
+
25
+ plugin_unicorn.decode(unicorn.serialize_to_string) do |event|
26
+ expect(event.get("colour") ).to eq(data[:colour] )
27
+ expect(event.get("horn_length") ).to eq(data[:horn_length] )
28
+ expect(event.get("last_seen") ).to eq(data[:last_seen] )
29
+ expect(event.get("has_wings") ).to eq(data[:has_wings] )
30
+ end
31
+ end # it
32
+
33
+
34
+
35
+ #### Test case 2: Decode complex protobuf bytes for human #####################################################################################################################
36
+
37
+
38
+
39
+
40
+ let(:plugin_human) { LogStash::Codecs::Zylog.new("class_name" => "Animal::Human", "include_path" => ['spec/helpers/human.pb.rb']) }
41
+ before do
42
+ plugin_human.register
43
+ end
44
+
45
+ it "should return an event from complex nested protobuf encoded data" do
46
+
47
+ data_gm = {:first_name => 'Elisabeth', :last_name => "Oliveoil", :middle_names => ["Maria","Johanna"], :vegetarian=>true}
48
+ grandmother = Animal::Human.new(data_gm)
49
+ data_m = {:first_name => 'Annemarie', :last_name => "Smørebrød", :mother => grandmother}
50
+ mother = Animal::Human.new(data_m)
51
+ data_f = {:first_name => 'Karl', :middle_names => ["Theodor-Augustin"], :last_name => "Falkenstein"}
52
+ father = Animal::Human.new(data_f)
53
+ data = {:first_name => 'Hugo', :middle_names => ["Heinz", "Peter"], :last_name => "Smørebrød",:father => father, :mother => mother}
54
+ hugo = Animal::Human.new(data)
55
+
56
+ plugin_human.decode(hugo.serialize_to_string) do |event|
57
+ expect(event.get("first_name") ).to eq(data[:first_name] )
58
+ expect(event.get("middle_names") ).to eq(data[:middle_names] )
59
+ expect(event.get("last_name") ).to eq(data[:last_name] )
60
+ expect(event.get("[mother][first_name]") ).to eq(data_m[:first_name] )
61
+ expect(event.get("[father][first_name]") ).to eq(data_f[:first_name] )
62
+ expect(event.get("[mother][last_name]") ).to eq(data_m[:last_name] )
63
+ expect(event.get("[mother][mother][last_name]") ).to eq(data_gm[:last_name] )
64
+ expect(event.get("[mother][mother][first_name]") ).to eq(data_gm[:first_name] )
65
+ expect(event.get("[mother][mother][middle_names]") ).to eq(data_gm[:middle_names] )
66
+ expect(event.get("[mother][mother][vegetarian]") ).to eq(data_gm[:vegetarian] )
67
+ expect(event.get("[father][last_name]") ).to eq(data_f[:last_name] )
68
+ expect(event.get("[father][middle_names]") ).to eq(data_f[:middle_names] )
69
+ end
70
+ end # it
71
+
72
+
73
+
74
+
75
+
76
+
77
+ #### Test case 3: Decoder test for enums #####################################################################################################################
78
+
79
+
80
+
81
+
82
+ let(:plugin_col) { LogStash::Codecs::Zylog.new("class_name" => "ColourProtoTest", "include_path" => ['spec/helpers/ColourTestcase.pb.rb']) }
83
+ before do
84
+ plugin_col.register
85
+ end
86
+
87
+ it "should return an event from protobuf encoded data with enums" do
88
+
89
+ data = {:least_liked => ColourProtoTest::Colour::YELLOW, :favourite_colours => \
90
+ [ColourProtoTest::Colour::BLACK, ColourProtoTest::Colour::BLUE], :booleantest => [true, false, true]}
91
+ pb = ColourProtoTest.new(data)
92
+
93
+ plugin_col.decode(pb.serialize_to_string) do |event|
94
+ expect(event.get("least_liked") ).to eq(data[:least_liked] )
95
+ expect(event.get("favourite_colours") ).to eq(data[:favourite_colours] )
96
+ expect(event.get("booleantest") ).to eq(data[:booleantest] )
97
+ end
98
+ end # it
99
+
100
+
101
+ end # context
102
+
103
+
104
+
105
+
106
+
107
+ #### Test case 4: Encode simple protobuf bytes for unicorn ####################################################################################################################
108
+
109
+ context "#encode" do
110
+ subject do
111
+ next LogStash::Codecs::Zylog.new("class_name" => "Animal::UnicornEvent", "include_path" => ['spec/helpers/unicorn_event.pb.rb'])
112
+ end
113
+
114
+ event = LogStash::Event.new("colour" => "pink", "horn_length" => 12, "last_seen" => 1410081999, "has_wings" => true)
115
+
116
+ it "should return protobuf encoded data from a simple event" do
117
+ subject.on_event do |event, data|
118
+ insist { data.is_a? String }
119
+ unicorn = Animal::UnicornEvent.parse(data)
120
+
121
+ expect(unicorn.colour ).to eq(event.get("colour") )
122
+ expect(unicorn.horn_length ).to eq(event.get("horn_length") )
123
+ expect(unicorn.last_seen ).to eq(event.get("last_seen") )
124
+ expect(unicorn.has_wings ).to eq(event.get("has_wings") )
125
+
126
+ end # subject.on_event
127
+ subject.encode(event)
128
+ end # it
129
+ end # context
130
+
131
+
132
+
133
+
134
+ #### Test case 5: encode complex protobuf bytes for human #####################################################################################################################
135
+
136
+
137
+ context "#encode2" do
138
+ subject do
139
+ next LogStash::Codecs::Zylog.new("class_name" => "Animal::Human", "include_path" => ['spec/helpers/human.pb.rb'])
140
+ end
141
+
142
+ event = LogStash::Event.new("first_name" => "Jimmy", "middle_names" => ["Bob", "James"], "last_name" => "Doe" \
143
+ , "mother" => {"first_name" => "Jane", "middle_names" => ["Elizabeth"], "last_name" => "Doe" , "age" => 83, "vegetarian"=> false} \
144
+ , "father" => {"first_name" => "John", "last_name" => "Doe", "@email" => "character_replacement_test@nothing" })
145
+
146
+ it "should return protobuf encoded data from a complex event" do
147
+
148
+ subject.on_event do |event, data|
149
+ insist { data.is_a? String }
150
+ jimmy = Animal::Human.parse(data)
151
+
152
+ expect(jimmy.first_name ).to eq(event.get("first_name") )
153
+ expect(jimmy.middle_names ).to eq(event.get("middle_names") )
154
+ expect(jimmy.last_name ).to eq(event.get("last_name") )
155
+ expect(jimmy.mother.first_name ).to eq(event.get("[mother][first_name]") )
156
+ expect(jimmy.father.first_name ).to eq(event.get("[father][first_name]") )
157
+ expect(jimmy.mother.middle_names ).to eq(event.get("[mother][middle_names]") )
158
+ expect(jimmy.mother.age ).to eq(event.get("[mother][age]") ) # recursion test for values
159
+ expect(jimmy.mother.vegetarian ).to eq(event.get("[mother][vegetarian]") ) # recursion test for values
160
+ expect(jimmy.father.last_name ).to eq(event.get("[father][last_name]") )
161
+ expect(jimmy.father.email ).to eq(event.get("[father][@email]") ) # recursion test for keys
162
+ expect(jimmy.mother.last_name ).to eq(event.get("[mother][last_name]") )
163
+
164
+ end # subject.on_event
165
+ subject.encode(event)
166
+ end # it
167
+ end # context
168
+
169
+
170
+
171
+
172
+
173
+ #### Test case 6: encode enums #########################################################################################################################
174
+
175
+
176
+
177
+ context "#encode3" do
178
+ subject do
179
+ next LogStash::Codecs::Zylog.new("class_name" => "ColourProtoTest", "include_path" => ['spec/helpers/ColourTestcase.pb.rb'])
180
+ end
181
+
182
+ require 'spec/helpers/ColourTestcase.pb.rb' # otherwise we cant use the colour enums in the next line
183
+ event = LogStash::Event.new("booleantest" => [false, false, true], "least_liked" => ColourProtoTest::Colour::YELLOW, "favourite_colours" => \
184
+ [ColourProtoTest::Colour::BLACK, ColourProtoTest::Colour::BLUE] )
185
+
186
+ it "should return protobuf encoded data from a complex event with enums" do
187
+
188
+ subject.on_event do |event, data|
189
+ insist { data.is_a? String }
190
+
191
+ colpref = ColourProtoTest.parse(data)
192
+
193
+ expect(colpref.booleantest ).to eq(event.get("booleantest") )
194
+ expect(colpref.least_liked ).to eq(event.get("least_liked") )
195
+ expect(colpref.favourite_colours ).to eq(event.get("favourite_colours") )
196
+
197
+
198
+ end # subject.on_event
199
+ subject.encode(event)
200
+ end # it
201
+ end # context
202
+
203
+
204
+
205
+ end
@@ -0,0 +1,35 @@
1
+ #!/usr/bin/env ruby
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+
4
+ require 'protocol_buffers'
5
+
6
+ # forward declarations
7
+ class ColourProtoTest < ::ProtocolBuffers::Message; end
8
+
9
+ class ColourProtoTest < ::ProtocolBuffers::Message
10
+ # forward declarations
11
+
12
+ # enums
13
+ module Colour
14
+ include ::ProtocolBuffers::Enum
15
+
16
+ set_fully_qualified_name "ColourProtoTest.Colour"
17
+
18
+ BLACK = 0
19
+ BLUE = 1
20
+ WHITE = 2
21
+ GREEN = 3
22
+ RED = 4
23
+ YELLOW = 5
24
+ AQUA = 6
25
+ end
26
+
27
+ set_fully_qualified_name "ColourProtoTest"
28
+
29
+ repeated ::ColourProtoTest::Colour, :favourite_colours, 1
30
+ repeated :bool, :booleantest, 2
31
+ optional ::ColourProtoTest::Colour, :least_liked, 3
32
+ optional :string, :timestamp, 4
33
+ optional :string, :version, 5
34
+ end
35
+
@@ -0,0 +1,24 @@
1
+
2
+
3
+ message ColourProtoTest {
4
+
5
+ enum Colour {
6
+ BLACK = 0;
7
+ BLUE = 1;
8
+ WHITE = 2;
9
+ GREEN = 3;
10
+ RED = 4;
11
+ YELLOW = 5;
12
+ AQUA = 6;
13
+
14
+ }
15
+
16
+ // most liked colours; test enums in arrays.
17
+ repeated Colour favourite_colours = 1;
18
+
19
+ // why not also test booleans in arrays while we're at it.
20
+ repeated bool booleantest = 2;
21
+
22
+ // least liked colour
23
+ optional Colour least_liked = 3;
24
+ }
@@ -0,0 +1,26 @@
1
+ #!/usr/bin/env ruby
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+
4
+ require 'protocol_buffers'
5
+
6
+ module Animal
7
+ # forward declarations
8
+ class Human < ::ProtocolBuffers::Message; end
9
+
10
+ class Human < ::ProtocolBuffers::Message
11
+ set_fully_qualified_name "animal.Human"
12
+
13
+ optional :string, :first_name, 1
14
+ repeated :string, :middle_names, 2
15
+ optional :string, :last_name, 3
16
+ optional ::Animal::Human, :mother, 4
17
+ optional ::Animal::Human, :father, 5
18
+ optional :string, :path, 6
19
+ optional :string, :version, 7
20
+ optional :string, :timestamp, 8
21
+ optional :string, :email, 9
22
+ optional :bool, :vegetarian, 10
23
+ optional :int32, :age, 11
24
+ end
25
+
26
+ end
@@ -0,0 +1,19 @@
1
+ #!/usr/bin/env ruby
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+
4
+ require 'protocol_buffers'
5
+
6
+ module Animal
7
+ # forward declarations
8
+ class Unicorn < ::ProtocolBuffers::Message; end
9
+
10
+ class Unicorn < ::ProtocolBuffers::Message
11
+ set_fully_qualified_name "animal.Unicorn"
12
+
13
+ optional :string, :colour, 1
14
+ optional :int32, :horn_length, 2
15
+ optional :int32, :last_seen, 3
16
+ optional :bool, :has_wings, 4
17
+ end
18
+
19
+ end
@@ -0,0 +1,24 @@
1
+ #!/usr/bin/env ruby
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+
4
+ require 'protocol_buffers'
5
+
6
+ module Animal
7
+ # forward declarations
8
+ class UnicornEvent < ::ProtocolBuffers::Message; end
9
+
10
+ class UnicornEvent < ::ProtocolBuffers::Message
11
+ set_fully_qualified_name "animal.UnicornEvent"
12
+
13
+ optional :string, :colour, 1
14
+ optional :int32, :horn_length, 2
15
+ optional :int32, :last_seen, 3
16
+ optional :string, :timestamp, 4
17
+ optional :string, :host, 5
18
+ optional :string, :path, 6
19
+ optional :string, :version, 7
20
+ optional :bool, :has_wings, 8
21
+
22
+ end
23
+
24
+ end
metadata ADDED
@@ -0,0 +1,118 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-codec-zylog
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.6
5
+ platform: ruby
6
+ authors:
7
+ - Inga Feick
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2018-07-03 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - ">="
17
+ - !ruby/object:Gem::Version
18
+ version: '1.60'
19
+ - - "<="
20
+ - !ruby/object:Gem::Version
21
+ version: '2.99'
22
+ name: logstash-core-plugin-api
23
+ prerelease: false
24
+ type: :runtime
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ">="
28
+ - !ruby/object:Gem::Version
29
+ version: '1.60'
30
+ - - "<="
31
+ - !ruby/object:Gem::Version
32
+ version: '2.99'
33
+ - !ruby/object:Gem::Dependency
34
+ requirement: !ruby/object:Gem::Requirement
35
+ requirements:
36
+ - - ">="
37
+ - !ruby/object:Gem::Version
38
+ version: '0'
39
+ name: ruby-protocol-buffers
40
+ prerelease: false
41
+ type: :runtime
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ">="
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ - !ruby/object:Gem::Dependency
48
+ requirement: !ruby/object:Gem::Requirement
49
+ requirements:
50
+ - - ">="
51
+ - !ruby/object:Gem::Version
52
+ version: '0'
53
+ name: logstash-devutils
54
+ prerelease: false
55
+ type: :development
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ">="
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
+ description: This gem is a logstash plugin required to be installed on top of the
62
+ Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not
63
+ a stand-alone program
64
+ email: inga.feick@trivago.com
65
+ executables: []
66
+ extensions: []
67
+ extra_rdoc_files: []
68
+ files:
69
+ - CHANGELOG.md
70
+ - CONTRIBUTORS
71
+ - DEVELOPER.md
72
+ - Gemfile
73
+ - LICENSE
74
+ - NOTICE.TXT
75
+ - README.md
76
+ - docs/index.asciidoc
77
+ - lib/logstash/codecs/zylog.rb
78
+ - logstash-codec-zylog.gemspec
79
+ - spec/codecs/zylog_spec.rb
80
+ - spec/helpers/ColourTestcase.pb.rb
81
+ - spec/helpers/ColourTestcase.proto
82
+ - spec/helpers/human.pb.rb
83
+ - spec/helpers/unicorn.pb.rb
84
+ - spec/helpers/unicorn_event.pb.rb
85
+ homepage:
86
+ licenses:
87
+ - Apache-2.0
88
+ metadata:
89
+ logstash_plugin: 'true'
90
+ logstash_group: codec
91
+ post_install_message:
92
+ rdoc_options: []
93
+ require_paths:
94
+ - lib
95
+ required_ruby_version: !ruby/object:Gem::Requirement
96
+ requirements:
97
+ - - ">="
98
+ - !ruby/object:Gem::Version
99
+ version: '0'
100
+ required_rubygems_version: !ruby/object:Gem::Requirement
101
+ requirements:
102
+ - - ">="
103
+ - !ruby/object:Gem::Version
104
+ version: '0'
105
+ requirements: []
106
+ rubyforge_project:
107
+ rubygems_version: 2.6.14.1
108
+ signing_key:
109
+ specification_version: 4
110
+ summary: This codec may be used to decode (via inputs) and encode (via outputs) protobuf
111
+ messages
112
+ test_files:
113
+ - spec/codecs/zylog_spec.rb
114
+ - spec/helpers/ColourTestcase.pb.rb
115
+ - spec/helpers/ColourTestcase.proto
116
+ - spec/helpers/human.pb.rb
117
+ - spec/helpers/unicorn.pb.rb
118
+ - spec/helpers/unicorn_event.pb.rb