logstash-codec-avro_schema_registry 1.1.0 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 96fba808f06019f95c4e7795b896f3d1cb583f02
4
- data.tar.gz: d4dcf8c54b94c97bb591c26b6d07cc895063c5ef
3
+ metadata.gz: df24e437ee21154def83e8ad66bcbc70e58e3ecc
4
+ data.tar.gz: c7e44473770cecf6165bb38788872c45f33aa4e6
5
5
  SHA512:
6
- metadata.gz: 942512a18cd5811e85621d71b7adf4a3734b1334e26ae4f8501c8a96028c57712674498c81de6ee5fcbae201c6d69a868526a643ddcf911d858e94be7f10ba3a
7
- data.tar.gz: d1f85086d6d0ff6e23c8306164add731bae9ae4a2cfded29c5698a11dab6c24899c3fc19ee0613e648f9ce95c3bb64958261c4b1e292e8693a1de7683975aa13
6
+ metadata.gz: 56267b65a7295a4766ff67658ca2aa799609c5cfc7675bd64ec9d98ddb8848ac1eb7da8604bff67aa93cea1cc36fb435429c2addcbc77d9f3bf38e9ded7153e6
7
+ data.tar.gz: 210dfaf4d6eb1315d91727fb45fe1599d8f9537f0d018e0d2538a9bbae3938dab6e29da8a234dee04cbcdf0a95d137e4552ad8f4d037fbcc8cd4973d7ba0aab6
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 1.1.1
2
+ - Add tag_on_failure for decode
3
+
1
4
  ## 1.1.0
2
5
  - Add support for client side TLS auth to schema registry
3
6
 
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Logstash Codec - Avro Schema Registry
2
2
 
3
- ### v1.1.0
3
+ ### v1.1.1
4
4
 
5
5
  This plugin is used to serialize Logstash events as
6
6
  Avro datums, as well as deserializing Avro datums into
@@ -22,6 +22,7 @@ When this codec is used to decode the input, you may pass the following options:
22
22
  - ``endpoint`` - always required.
23
23
  - ``username`` - optional.
24
24
  - ``password`` - optional.
25
+ - ``tag_on_failure`` - tag events with ``_avroparsefailure`` when decode fails
25
26
 
26
27
  If the input stream is binary encoded, you should use the ``ByteArrayDeserializer``
27
28
  in the Kafka input config.
@@ -29,6 +29,7 @@ MAGIC_BYTE = 0
29
29
  # - ``endpoint`` - always required.
30
30
  # - ``username`` - optional.
31
31
  # - ``password`` - optional.
32
+ # - ``tag_on_failure`` - tag events with ``_avroparsefailure`` when decode fails
32
33
  #
33
34
  # If the input stream is binary encoded, you should use the ``ByteArrayDeserializer``
34
35
  # in the Kafka input config.
@@ -127,6 +128,9 @@ class LogStash::Codecs::AvroSchemaRegistry < LogStash::Codecs::Base
127
128
  config :register_schema, :validate => :boolean, :default => false
128
129
  config :binary_encoded, :validate => :boolean, :default => true
129
130
 
131
+ # tag events with `_avroparsefailure` when decode fails
132
+ config :tag_on_failure, :validate => :boolean, :default => false
133
+
130
134
  config :client_certificate, :validate => :string, :default => nil
131
135
  config :client_key, :validate => :string, :default => nil
132
136
  config :ca_certificate, :validate => :string, :default => nil
@@ -134,7 +138,6 @@ class LogStash::Codecs::AvroSchemaRegistry < LogStash::Codecs::Base
134
138
 
135
139
  public
136
140
  def register
137
-
138
141
  @client = if client_certificate != nil
139
142
  SchemaRegistry::Client.new(endpoint, username, password, SchemaRegistry::Client.connection_options(
140
143
  client_certificate: client_certificate,
@@ -200,12 +203,18 @@ class LogStash::Codecs::AvroSchemaRegistry < LogStash::Codecs::Base
200
203
 
201
204
  schema = subject.verify_schema(schema_json)
202
205
  end
203
-
206
+ # Return schema id
204
207
  schema.id
205
208
  end
206
209
  end
207
210
  end
208
211
 
212
+ def clean_event(event)
213
+ event_hash = event.to_hash
214
+ event_hash.delete_if { |key, _| EXCLUDE_ALWAYS.include? key }
215
+ event_hash
216
+ end
217
+
209
218
  public
210
219
  def decode(data)
211
220
  if data.length < 5
@@ -222,6 +231,13 @@ class LogStash::Codecs::AvroSchemaRegistry < LogStash::Codecs::Base
222
231
  yield LogStash::Event.new(datum_reader.read(decoder))
223
232
  end
224
233
  end
234
+ rescue => e
235
+ if tag_on_failure
236
+ @logger.error("Avro parse error, original data now in message field", :error => e)
237
+ yield LogStash::Event.new("message" => data, "tags" => ["_avroparsefailure"])
238
+ else
239
+ raise e
240
+ end
225
241
  end
226
242
 
227
243
  public
@@ -233,9 +249,7 @@ class LogStash::Codecs::AvroSchemaRegistry < LogStash::Codecs::Base
233
249
  buffer.write(MAGIC_BYTE.chr)
234
250
  buffer.write([@write_schema_id].pack("I>"))
235
251
  encoder = Avro::IO::BinaryEncoder.new(buffer)
236
- eh = event.to_hash
237
- eh.delete_if { |key, _| EXCLUDE_ALWAYS.include? key }
238
- dw.write(eh, encoder)
252
+ dw.write(clean_event(event), encoder)
239
253
  if @binary_encoded
240
254
  @on_event.call(event, buffer.string.to_java_bytes)
241
255
  else
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-codec-avro_schema_registry'
3
- s.version = '1.1.0'
3
+ s.version = '1.1.1'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Encode and decode avro formatted data from a Confluent schema registry"
6
6
  s.description = "Encode and decode avro formatted data from a Confluent schema registry"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-codec-avro_schema_registry
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.0
4
+ version: 1.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - RevPoint Media
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-05-14 00:00:00.000000000 Z
11
+ date: 2018-10-03 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement