logstash-codec-avro_schema_registry 0.9.1 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +4 -0
  3. data/DEVELOPER.md +1 -1
  4. data/README.md +68 -70
  5. data/lib/logstash/codecs/avro_schema_registry.rb +139 -12
  6. data/logstash-codec-avro_schema_registry.gemspec +4 -4
  7. data/spec/codecs/avro_schema_registry_spec.rb +3 -3
  8. data/vendor/cache/avro-1.8.2.gem +0 -0
  9. data/vendor/cache/chronic_duration-0.10.6.gem +0 -0
  10. data/vendor/cache/clamp-0.6.5.gem +0 -0
  11. data/vendor/cache/coderay-1.1.1.gem +0 -0
  12. data/vendor/cache/concurrent-ruby-1.0.0-java.gem +0 -0
  13. data/vendor/cache/diff-lcs-1.3.gem +0 -0
  14. data/vendor/cache/ffi-1.9.18-java.gem +0 -0
  15. data/vendor/cache/filesize-0.0.4.gem +0 -0
  16. data/vendor/cache/fivemat-1.3.5.gem +0 -0
  17. data/vendor/cache/gem_publisher-1.5.0.gem +0 -0
  18. data/vendor/cache/gems-0.8.3.gem +0 -0
  19. data/vendor/cache/i18n-0.6.9.gem +0 -0
  20. data/vendor/cache/insist-1.0.0.gem +0 -0
  21. data/vendor/cache/jar-dependencies-0.3.11.gem +0 -0
  22. data/vendor/cache/jrjackson-0.4.2-java.gem +0 -0
  23. data/vendor/cache/jrmonitor-0.4.2.gem +0 -0
  24. data/vendor/cache/jruby-openssl-0.9.16-java.gem +0 -0
  25. data/vendor/cache/kramdown-1.14.0.gem +0 -0
  26. data/vendor/cache/logstash-codec-line-3.0.3.gem +0 -0
  27. data/vendor/cache/logstash-core-5.4.0-java.gem +0 -0
  28. data/vendor/cache/logstash-core-plugin-api-2.1.24-java.gem +0 -0
  29. data/vendor/cache/logstash-devutils-1.3.3-java.gem +0 -0
  30. data/vendor/cache/method_source-0.8.2.gem +0 -0
  31. data/vendor/cache/minitar-0.5.4.gem +0 -0
  32. data/vendor/cache/multi_json-1.12.1.gem +0 -0
  33. data/vendor/cache/numerizer-0.1.1.gem +0 -0
  34. data/vendor/cache/polyglot-0.3.5.gem +0 -0
  35. data/vendor/cache/pry-0.10.4-java.gem +0 -0
  36. data/vendor/cache/puma-2.16.0-java.gem +0 -0
  37. data/vendor/cache/rack-1.6.8.gem +0 -0
  38. data/vendor/cache/rack-protection-1.5.3.gem +0 -0
  39. data/vendor/cache/rake-12.0.0.gem +0 -0
  40. data/vendor/cache/rspec-3.6.0.gem +0 -0
  41. data/vendor/cache/rspec-core-3.6.0.gem +0 -0
  42. data/vendor/cache/rspec-expectations-3.6.0.gem +0 -0
  43. data/vendor/cache/rspec-mocks-3.6.0.gem +0 -0
  44. data/vendor/cache/rspec-support-3.6.0.gem +0 -0
  45. data/vendor/cache/rspec-wait-0.0.9.gem +0 -0
  46. data/vendor/cache/ruby-maven-3.3.12.gem +0 -0
  47. data/vendor/cache/ruby-maven-libs-3.3.9.gem +0 -0
  48. data/vendor/cache/rubyzip-1.1.7.gem +0 -0
  49. data/vendor/cache/schema_registry-0.0.4.gem +0 -0
  50. data/vendor/cache/sinatra-1.4.8.gem +0 -0
  51. data/vendor/cache/slop-3.6.0.gem +0 -0
  52. data/vendor/cache/spoon-0.0.6.gem +0 -0
  53. data/vendor/cache/stud-0.0.22.gem +0 -0
  54. data/vendor/cache/thread_safe-0.3.6-java.gem +0 -0
  55. data/vendor/cache/tilt-2.0.7.gem +0 -0
  56. data/vendor/cache/treetop-1.4.15.gem +0 -0
  57. metadata +76 -21
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: e3deca295d01556563ba3b141432f7c99e6bfd86
4
- data.tar.gz: 15b37b40e9b9a347b5a88bd794aec5fc390d5320
3
+ metadata.gz: 4b52cb641d3dcc1ab0e81aac04e073a609cf55e4
4
+ data.tar.gz: 26d12ac1f854fb677f7f8ecb23da35967e88a3b5
5
5
  SHA512:
6
- metadata.gz: ae5dd705ee5835867fa040b37fed57d342bc6a58e5845b07b98349c3b524cd168863c83683fded460c366b3d12beb4c796af8ee7d0c147b84fc50417722e1bb0
7
- data.tar.gz: 21828534180e3dc2527fd794fb6b3622b2f724d4cbfdc8dcbfc4e2f1b0eab682fb57315c24264bf617e65fe42ea7632b7dd7bdb70dc75191779e17f0180b2c42
6
+ metadata.gz: 3307ec8a0891959b5f1e9f6860463de2b108b70f9f0a9103106bfc58e8e81b8fc7fd0a463524c88f19e684e28cf209dc9a1f3c833218be252beb8c4dc7ae3a9a
7
+ data.tar.gz: d49ca291bc9b1e4ff75d3bbcd9feec6657d22cdd596e4523d409acda8c6b0d5af7d44736aa13f5f8c08fbfc539ea9b34c5a5e2f1df8dd33014f19f03856283ad
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 1.0.0
2
+ - Add encode capability
3
+ - Try base64 decode/encode on input/output
4
+
1
5
  ## 0.9.1
2
6
  - Upgrade to logstash 5.1
3
7
 
data/DEVELOPER.md CHANGED
@@ -5,7 +5,7 @@ How to Install
5
5
  --------------
6
6
 
7
7
  ```
8
- bin/plugin install logstash-avro_schema_registry-codec
8
+ bin/plugin install logstash-codec-avro_schema_registry
9
9
  ```
10
10
 
11
11
  How to Use
data/README.md CHANGED
@@ -1,86 +1,84 @@
1
- # Logstash Plugin
1
+ # Logstash Codec - Avro Schema Registry
2
2
 
3
- This is a plugin for [Logstash](https://github.com/elastic/logstash).
3
+ This plugin is used to serialize Logstash events as
4
+ Avro datums, as well as deserializing Avro datums into
5
+ Logstash events.
4
6
 
5
- It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
7
+ Decode/encode Avro records as Logstash events using the
8
+ associated Avro schema from a Confluent schema registry.
9
+ (https://github.com/confluentinc/schema-registry)
6
10
 
7
- ## Documentation
11
+ ## Decoding (input)
8
12
 
9
- Logstash provides infrastructure to automatically generate documentation for this plugin. We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc and then into html. All plugin documentation are placed under one [central location](http://www.elastic.co/guide/en/logstash/current/).
13
+ When this codec is used to decode the input, you may pass the following options:
14
+ - ``endpoint`` - always required.
15
+ - ``username`` - optional.
16
+ - ``password`` - optional.
10
17
 
11
- - For formatting code or config example, you can use the asciidoc `[source,ruby]` directive
12
- - For more asciidoc formatting tips, see the excellent reference here https://github.com/elastic/docs#asciidoc-guide
18
+ If the input stream is binary encoded, you should use the ``ByteArrayDeserializer``
19
+ in the Kafka input config.
13
20
 
14
- ## Need Help?
21
+ ## Encoding (output)
15
22
 
16
- Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/logstash discussion forum.
23
+ This codec uses the Confluent schema registry to register a schema and
24
+ encode the data in Avro using schema_id lookups.
17
25
 
18
- ## Developing
26
+ When this codec is used to encode, you may pass the following options:
27
+ - ``endpoint`` - always required.
28
+ - ``username`` - optional.
29
+ - ``password`` - optional.
30
+ - ``schema_id`` - when provided, no other options are required.
31
+ - ``subject_name`` - required when there is no ``schema_id``.
32
+ - ``schema_version`` - when provided, the schema will be looked up in the registry.
33
+ - ``schema_uri`` - when provided, JSON schema is loaded from URL or file.
34
+ - ``schema_string`` - required when there is no ``schema_id``, ``schema_version`` or ``schema_uri``
35
+ - ``check_compatibility`` - will check schema compatibility before encoding.
36
+ - ``register_schema`` - will register the JSON schema if it does not exist.
37
+ - ``binary_encoded`` - will output the encoded event as a ByteArray.
38
+ Requires the ``ByteArraySerializer`` to be set in the Kafka output config.
19
39
 
20
- ### 1. Plugin Developement and Testing
40
+ ## Usage
21
41
 
22
- #### Code
23
- - To get started, you'll need JRuby with the Bundler gem installed.
42
+ ### Basic usage with Kafka input and output.
24
43
 
25
- - Create a new plugin or clone and existing from the GitHub [logstash-plugins](https://github.com/logstash-plugins) organization. We also provide [example plugins](https://github.com/logstash-plugins?query=example).
26
-
27
- - Install dependencies
28
- ```sh
29
- bundle install
30
- ```
31
-
32
- #### Test
33
-
34
- - Update your dependencies
35
-
36
- ```sh
37
- bundle install
38
- ```
39
-
40
- - Run tests
41
-
42
- ```sh
43
- bundle exec rspec
44
- ```
45
-
46
- ### 2. Running your unpublished Plugin in Logstash
47
-
48
- #### 2.1 Run in a local Logstash clone
49
-
50
- - Edit Logstash `Gemfile` and add the local plugin path, for example:
51
- ```ruby
52
- gem "logstash-codec-awesome", :path => "/your/local/logstash-codec-awesome"
53
- ```
54
- - Install plugin
55
- ```sh
56
- bin/logstash-plugin install --no-verify
57
44
  ```
58
- - Run Logstash with your plugin
59
- ```sh
60
- bin/logstash -e 'codec {awesome {}}'
45
+ input {
46
+ kafka {
47
+ ...
48
+ codec => avro_schema_registry {
49
+ endpoint => "http://schemas.example.com"
50
+ }
51
+ value_deserializer_class => "org.apache.kafka.common.serialization.ByteArrayDeserializer"
52
+ }
53
+ }
54
+ filter {
55
+ ...
56
+ }
57
+ output {
58
+ kafka {
59
+ ...
60
+ codec => avro_schema_registry {
61
+ endpoint => "http://schemas.example.com"
62
+ subject_name => "my_kafka_subject_name"
63
+ schema_uri => "/app/my_kafka_subject.avsc"
64
+ register_schema => true
65
+ }
66
+ }
67
+ }
61
68
  ```
62
- At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash.
63
69
 
64
- #### 2.2 Run in an installed Logstash
70
+ ### Binary encoded Kafka output
65
71
 
66
- You can use the same **2.1** method to run your plugin in an installed Logstash by editing its `Gemfile` and pointing the `:path` to your local plugin development directory or you can build the gem and install it using:
67
-
68
- - Build your plugin gem
69
- ```sh
70
- gem build logstash-codec-awesome.gemspec
71
- ```
72
- - Install the plugin from the Logstash home
73
- ```sh
74
- bin/logstash-plugin install /your/local/plugin/logstash-codec-awesome.gem
75
72
  ```
76
- - Start Logstash and proceed to test the plugin
77
-
78
- ## Contributing
79
-
80
- All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin.
81
-
82
- Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here.
83
-
84
- It is more important to the community that you are able to contribute.
85
-
86
- For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file.
73
+ output {
74
+ kafka {
75
+ ...
76
+ codec => avro_schema_registry {
77
+ endpoint => "http://schemas.example.com"
78
+ schema_id => 47
79
+ binary_encoded => true
80
+ }
81
+ value_serializer => "org.apache.kafka.common.serialization.ByteArraySerializer"
82
+ }
83
+ }
84
+ ```
@@ -1,5 +1,6 @@
1
1
  # encoding: utf-8
2
2
  require "avro"
3
+ require "open-uri"
3
4
  require "schema_registry"
4
5
  require "schema_registry/client"
5
6
  require "logstash/codecs/base"
@@ -7,73 +8,185 @@ require "logstash/namespace"
7
8
  require "logstash/event"
8
9
  require "logstash/timestamp"
9
10
  require "logstash/util"
11
+ require "base64"
10
12
 
11
13
  MAGIC_BYTE = 0
12
14
 
13
- # Read serialized Avro records as Logstash events
15
+ # == Logstash Codec - Avro Schema Registry
14
16
  #
15
17
  # This plugin is used to serialize Logstash events as
16
18
  # Avro datums, as well as deserializing Avro datums into
17
19
  # Logstash events.
18
20
  #
19
- # ==== Encoding
21
+ # Decode/encode Avro records as Logstash events using the
22
+ # associated Avro schema from a Confluent schema registry.
23
+ # (https://github.com/confluentinc/schema-registry)
20
24
  #
21
- # This codec currently does not encode. This might be added later.
22
25
  #
26
+ # ==== Decoding (input)
23
27
  #
24
- # ==== Decoding
28
+ # When this codec is used to decode the input, you may pass the following options:
29
+ # - ``endpoint`` - always required.
30
+ # - ``username`` - optional.
31
+ # - ``password`` - optional.
25
32
  #
26
- # This codec is for deserializing individual Avro records. It looks up
27
- # the associated avro schema from a Confluent schema registry.
28
- # (https://github.com/confluentinc/schema-registry)
33
+ # If the input stream is binary encoded, you should use the ``ByteArrayDeserializer``
34
+ # in the Kafka input config.
35
+ #
36
+ # ==== Encoding (output)
29
37
  #
38
+ # This codec uses the Confluent schema registry to register a schema and
39
+ # encode the data in Avro using schema_id lookups.
40
+ #
41
+ # When this codec is used to encode, you may pass the following options:
42
+ # - ``endpoint`` - always required.
43
+ # - ``username`` - optional.
44
+ # - ``password`` - optional.
45
+ # - ``schema_id`` - when provided, no other options are required.
46
+ # - ``subject_name`` - required when there is no ``schema_id``.
47
+ # - ``schema_version`` - when provided, the schema will be looked up in the registry.
48
+ # - ``schema_uri`` - when provided, JSON schema is loaded from URL or file.
49
+ # - ``schema_string`` - required when there is no ``schema_id``, ``schema_version`` or ``schema_uri``
50
+ # - ``check_compatibility`` - will check schema compatibility before encoding.
51
+ # - ``register_schema`` - will register the JSON schema if it does not exist.
52
+ # - ``binary_encoded`` - will output the encoded event as a ByteArray.
53
+ # Requires the ``ByteArraySerializer`` to be set in the Kafka output config.
30
54
  #
31
55
  # ==== Usage
32
- # Example usage with Kafka input.
56
+ # Example usage with Kafka input and output.
33
57
  #
34
58
  # [source,ruby]
35
59
  # ----------------------------------
36
60
  # input {
37
61
  # kafka {
62
+ # ...
38
63
  # codec => avro_schema_registry {
39
64
  # endpoint => "http://schemas.example.com"
40
65
  # }
66
+ # value_deserializer_class => "org.apache.kafka.common.serialization.ByteArrayDeserializer"
41
67
  # }
42
68
  # }
43
69
  # filter {
44
70
  # ...
45
71
  # }
46
72
  # output {
47
- # ...
73
+ # kafka {
74
+ # ...
75
+ # codec => avro_schema_registry {
76
+ # endpoint => "http://schemas.example.com"
77
+ # subject_name => "my_kafka_subject_name"
78
+ # schema_uri => "/app/my_kafka_subject.avsc"
79
+ # register_schema => true
80
+ # }
81
+ # }
82
+ # }
83
+ # ----------------------------------
84
+ #
85
+ # Binary encoded Kafka output
86
+ #
87
+ # [source,ruby]
88
+ # ----------------------------------
89
+ # output {
90
+ # kafka {
91
+ # ...
92
+ # codec => avro_schema_registry {
93
+ # endpoint => "http://schemas.example.com"
94
+ # schema_id => 47
95
+ # binary_encoded => true
96
+ # }
97
+ # value_serializer => "org.apache.kafka.common.serialization.ByteArraySerializer"
98
+ # }
48
99
  # }
49
100
  # ----------------------------------
101
+
50
102
  class LogStash::Codecs::AvroSchemaRegistry < LogStash::Codecs::Base
51
103
  config_name "avro_schema_registry"
104
+
105
+ EXCLUDE_ALWAYS = [ "@timestamp", "@version" ]
52
106
 
53
107
  # schema registry endpoint and credentials
54
108
  config :endpoint, :validate => :string, :required => true
55
109
  config :username, :validate => :string, :default => nil
56
110
  config :password, :validate => :string, :default => nil
57
111
 
112
+ config :schema_id, :validate => :number, :default => nil
113
+ config :subject_name, :validate => :string, :default => nil
114
+ config :schema_version, :validate => :number, :default => nil
115
+ config :schema_uri, :validate => :string, :default => nil
116
+ config :schema_string, :validate => :string, :default => nil
117
+ config :check_compatibility, :validate => :boolean, :default => false
118
+ config :register_schema, :validate => :boolean, :default => false
119
+ config :binary_encoded, :validate => :boolean, :default => false
120
+
58
121
  public
59
122
  def register
60
123
  @client = SchemaRegistry::Client.new(endpoint, username, password)
61
124
  @schemas = Hash.new
125
+ @write_schema_id = nil
62
126
  end
63
127
 
64
128
  def get_schema(schema_id)
65
- if !@schemas.has_key?(schema_id)
129
+ unless @schemas.has_key?(schema_id)
66
130
  @schemas[schema_id] = Avro::Schema.parse(@client.schema(schema_id))
67
131
  end
68
132
  @schemas[schema_id]
69
133
  end
70
134
 
135
+ def load_schema_json()
136
+ if @schema_uri
137
+ open(@schema_uri).read
138
+ elsif @schema_string
139
+ @schema_string
140
+ else
141
+ @logger.error('you must supply a schema_uri or schema_string in the config')
142
+ end
143
+ end
144
+
145
+ def get_write_schema_id()
146
+ # If schema id is passed, just use that
147
+ if @schema_id
148
+ @schema_id
149
+
150
+ else
151
+ # subject_name is required
152
+ if @subject_name == nil
153
+ @logger.error('requires a subject_name')
154
+ else
155
+ subject = @client.subject(@subject_name)
156
+
157
+ # If schema_version, load from subject API
158
+ if @schema_version != nil
159
+ schema = subject.version(@schema_version)
160
+
161
+ # Otherwise, load schema json and check with registry
162
+ else
163
+ schema_json = load_schema_json
164
+
165
+ # If not compatible, raise error
166
+ if @check_compatibility
167
+ unless subject.compatible?(schema_json)
168
+ @logger.error('the schema json is not compatible with the subject. you should fix your schema or change the compatibility level.')
169
+ end
170
+ end
171
+
172
+ if @register_schema
173
+ subject.register_schema(schema_json) unless subject.schema_registered?(schema_json)
174
+ end
175
+
176
+ schema = subject.verify_schema(schema_json)
177
+ end
178
+
179
+ schema.id
180
+ end
181
+ end
182
+ end
183
+
71
184
  public
72
185
  def decode(data)
73
186
  if data.length < 5
74
187
  @logger.error('message is too small to decode')
75
188
  else
76
- datum = StringIO.new(data)
189
+ datum = StringIO.new(Base64.strict_decode64(data)) rescue StringIO.new(data)
77
190
  magic_byte, schema_id = datum.read(5).unpack("cI>")
78
191
  if magic_byte != MAGIC_BYTE
79
192
  @logger.error('message does not start with magic byte')
@@ -88,6 +201,20 @@ class LogStash::Codecs::AvroSchemaRegistry < LogStash::Codecs::Base
88
201
 
89
202
  public
90
203
  def encode(event)
91
- @logger.error('Encode has not been implemented for this codec')
204
+ @write_schema_id ||= get_write_schema_id
205
+ schema = get_schema(@write_schema_id)
206
+ dw = Avro::IO::DatumWriter.new(schema)
207
+ buffer = StringIO.new
208
+ buffer.write(MAGIC_BYTE.chr)
209
+ buffer.write([@write_schema_id].pack("I>"))
210
+ encoder = Avro::IO::BinaryEncoder.new(buffer)
211
+ eh = event.to_hash
212
+ eh.delete_if { |key, _| EXCLUDE_ALWAYS.include? key }
213
+ dw.write(eh, encoder)
214
+ if @binary_encoded
215
+ @on_event.call(event, buffer.string.to_java_bytes)
216
+ else
217
+ @on_event.call(event, Base64.strict_encode64(buffer.string))
218
+ end
92
219
  end
93
220
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-codec-avro_schema_registry'
3
- s.version = '0.9.1'
3
+ s.version = '1.0.0'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Encode and decode avro formatted data from a Confluent schema registry"
6
6
  s.description = "Encode and decode avro formatted data from a Confluent schema registry"
@@ -18,9 +18,9 @@ Gem::Specification.new do |s|
18
18
  s.metadata = { "logstash_plugin" => "true", "logstash_group" => "codec" }
19
19
 
20
20
  # Gem dependencies
21
- s.add_runtime_dependency 'logstash-core-plugin-api', "~> 2.0"
22
- s.add_runtime_dependency 'logstash-codec-line'
21
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
22
+ s.add_runtime_dependency "logstash-codec-line"
23
23
  s.add_runtime_dependency "avro" #(Apache 2.0 license)
24
24
  s.add_runtime_dependency "schema_registry" #(MIT license)
25
- s.add_development_dependency 'logstash-devutils'
25
+ s.add_development_dependency "logstash-devutils"
26
26
  end
@@ -1,7 +1,7 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/devutils/rspec/spec_helper"
3
3
  require 'avro'
4
- require 'logstash/codecs/avro'
4
+ require 'logstash/codecs/avro_schema_registry'
5
5
  require 'logstash/event'
6
6
 
7
7
  describe LogStash::Codecs::AvroSchemaRegistry do
@@ -9,8 +9,8 @@ describe LogStash::Codecs::AvroSchemaRegistry do
9
9
  let (:test_event) { LogStash::Event.new({"foo" => "hello", "bar" => 10}) }
10
10
 
11
11
  subject do
12
- allow_any_instance_of(LogStash::Codecs::AvroSchemaRegistry).to \
13
- receive(:open_and_read).and_return(avro_config['schema_uri'])
12
+ # allow_any_instance_of(LogStash::Codecs::AvroSchemaRegistry).to \
13
+ # receive(:open_and_read).and_return(avro_config['schema_uri'])
14
14
  next LogStash::Codecs::AvroSchemaRegistry.new(avro_config)
15
15
  end
16
16
 
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
metadata CHANGED
@@ -1,80 +1,86 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-codec-avro_schema_registry
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.1
4
+ version: 1.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - RevPoint Media
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-05-02 00:00:00.000000000 Z
11
+ date: 2017-09-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: logstash-core-plugin-api
15
14
  requirement: !ruby/object:Gem::Requirement
16
15
  requirements:
17
- - - "~>"
16
+ - - ">="
18
17
  - !ruby/object:Gem::Version
19
- version: '2.0'
20
- type: :runtime
18
+ version: '1.60'
19
+ - - "<="
20
+ - !ruby/object:Gem::Version
21
+ version: '2.99'
22
+ name: logstash-core-plugin-api
21
23
  prerelease: false
24
+ type: :runtime
22
25
  version_requirements: !ruby/object:Gem::Requirement
23
26
  requirements:
24
- - - "~>"
27
+ - - ">="
28
+ - !ruby/object:Gem::Version
29
+ version: '1.60'
30
+ - - "<="
25
31
  - !ruby/object:Gem::Version
26
- version: '2.0'
32
+ version: '2.99'
27
33
  - !ruby/object:Gem::Dependency
28
- name: logstash-codec-line
29
34
  requirement: !ruby/object:Gem::Requirement
30
35
  requirements:
31
36
  - - ">="
32
37
  - !ruby/object:Gem::Version
33
38
  version: '0'
34
- type: :runtime
39
+ name: logstash-codec-line
35
40
  prerelease: false
41
+ type: :runtime
36
42
  version_requirements: !ruby/object:Gem::Requirement
37
43
  requirements:
38
44
  - - ">="
39
45
  - !ruby/object:Gem::Version
40
46
  version: '0'
41
47
  - !ruby/object:Gem::Dependency
42
- name: avro
43
48
  requirement: !ruby/object:Gem::Requirement
44
49
  requirements:
45
50
  - - ">="
46
51
  - !ruby/object:Gem::Version
47
52
  version: '0'
48
- type: :runtime
53
+ name: avro
49
54
  prerelease: false
55
+ type: :runtime
50
56
  version_requirements: !ruby/object:Gem::Requirement
51
57
  requirements:
52
58
  - - ">="
53
59
  - !ruby/object:Gem::Version
54
60
  version: '0'
55
61
  - !ruby/object:Gem::Dependency
56
- name: schema_registry
57
62
  requirement: !ruby/object:Gem::Requirement
58
63
  requirements:
59
64
  - - ">="
60
65
  - !ruby/object:Gem::Version
61
66
  version: '0'
62
- type: :runtime
67
+ name: schema_registry
63
68
  prerelease: false
69
+ type: :runtime
64
70
  version_requirements: !ruby/object:Gem::Requirement
65
71
  requirements:
66
72
  - - ">="
67
73
  - !ruby/object:Gem::Version
68
74
  version: '0'
69
75
  - !ruby/object:Gem::Dependency
70
- name: logstash-devutils
71
76
  requirement: !ruby/object:Gem::Requirement
72
77
  requirements:
73
78
  - - ">="
74
79
  - !ruby/object:Gem::Version
75
80
  version: '0'
76
- type: :development
81
+ name: logstash-devutils
77
82
  prerelease: false
83
+ type: :development
78
84
  version_requirements: !ruby/object:Gem::Requirement
79
85
  requirements:
80
86
  - - ">="
@@ -95,13 +101,62 @@ files:
95
101
  - lib/logstash/codecs/avro_schema_registry.rb
96
102
  - logstash-codec-avro_schema_registry.gemspec
97
103
  - spec/codecs/avro_schema_registry_spec.rb
104
+ - vendor/cache/avro-1.8.2.gem
105
+ - vendor/cache/chronic_duration-0.10.6.gem
106
+ - vendor/cache/clamp-0.6.5.gem
107
+ - vendor/cache/coderay-1.1.1.gem
108
+ - vendor/cache/concurrent-ruby-1.0.0-java.gem
109
+ - vendor/cache/diff-lcs-1.3.gem
110
+ - vendor/cache/ffi-1.9.18-java.gem
111
+ - vendor/cache/filesize-0.0.4.gem
112
+ - vendor/cache/fivemat-1.3.5.gem
113
+ - vendor/cache/gem_publisher-1.5.0.gem
114
+ - vendor/cache/gems-0.8.3.gem
115
+ - vendor/cache/i18n-0.6.9.gem
116
+ - vendor/cache/insist-1.0.0.gem
117
+ - vendor/cache/jar-dependencies-0.3.11.gem
118
+ - vendor/cache/jrjackson-0.4.2-java.gem
119
+ - vendor/cache/jrmonitor-0.4.2.gem
120
+ - vendor/cache/jruby-openssl-0.9.16-java.gem
121
+ - vendor/cache/kramdown-1.14.0.gem
122
+ - vendor/cache/logstash-codec-line-3.0.3.gem
123
+ - vendor/cache/logstash-core-5.4.0-java.gem
124
+ - vendor/cache/logstash-core-plugin-api-2.1.24-java.gem
125
+ - vendor/cache/logstash-devutils-1.3.3-java.gem
126
+ - vendor/cache/method_source-0.8.2.gem
127
+ - vendor/cache/minitar-0.5.4.gem
128
+ - vendor/cache/multi_json-1.12.1.gem
129
+ - vendor/cache/numerizer-0.1.1.gem
130
+ - vendor/cache/polyglot-0.3.5.gem
131
+ - vendor/cache/pry-0.10.4-java.gem
132
+ - vendor/cache/puma-2.16.0-java.gem
133
+ - vendor/cache/rack-1.6.8.gem
134
+ - vendor/cache/rack-protection-1.5.3.gem
135
+ - vendor/cache/rake-12.0.0.gem
136
+ - vendor/cache/rspec-3.6.0.gem
137
+ - vendor/cache/rspec-core-3.6.0.gem
138
+ - vendor/cache/rspec-expectations-3.6.0.gem
139
+ - vendor/cache/rspec-mocks-3.6.0.gem
140
+ - vendor/cache/rspec-support-3.6.0.gem
141
+ - vendor/cache/rspec-wait-0.0.9.gem
142
+ - vendor/cache/ruby-maven-3.3.12.gem
143
+ - vendor/cache/ruby-maven-libs-3.3.9.gem
144
+ - vendor/cache/rubyzip-1.1.7.gem
145
+ - vendor/cache/schema_registry-0.0.4.gem
146
+ - vendor/cache/sinatra-1.4.8.gem
147
+ - vendor/cache/slop-3.6.0.gem
148
+ - vendor/cache/spoon-0.0.6.gem
149
+ - vendor/cache/stud-0.0.22.gem
150
+ - vendor/cache/thread_safe-0.3.6-java.gem
151
+ - vendor/cache/tilt-2.0.7.gem
152
+ - vendor/cache/treetop-1.4.15.gem
98
153
  homepage: https://github.com/revpoint/logstash-codec-avro_schema_registry
99
154
  licenses:
100
155
  - Apache License (2.0)
101
156
  metadata:
102
157
  logstash_plugin: 'true'
103
158
  logstash_group: codec
104
- post_install_message:
159
+ post_install_message:
105
160
  rdoc_options: []
106
161
  require_paths:
107
162
  - lib
@@ -116,9 +171,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
116
171
  - !ruby/object:Gem::Version
117
172
  version: '0'
118
173
  requirements: []
119
- rubyforge_project:
120
- rubygems_version: 2.4.5
121
- signing_key:
174
+ rubyforge_project:
175
+ rubygems_version: 2.4.8
176
+ signing_key:
122
177
  specification_version: 4
123
178
  summary: Encode and decode avro formatted data from a Confluent schema registry
124
179
  test_files: