fluent-plugin-kafka 0.13.0 → 0.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d2916b74ae31b54e70f789e5891e6cef47e34a714c1a76216ef0f7d8769c5f64
4
- data.tar.gz: b75ca1d9b41fb0dc2ec547a544185c4cecdfa8759587b2f456fa51ee2720bebd
3
+ metadata.gz: 05cbb3ff005fbf6f27ab1a87ebe799f176e53ebfa6cebadf2b4f4418dfb6cb7b
4
+ data.tar.gz: 45bec524fc7a727031cf65b98c4b1db41dc8f9a1bfe86696fe0e7e3df7f8a0fa
5
5
  SHA512:
6
- metadata.gz: fd9dfbae3b9b663ba4cdc502bd59138bf3713555cc736935b269ccb54e5c5228f24fb738be5ff6e2262759102fae9f98fe68bccbf76e4a579e1b47bb401f0843
7
- data.tar.gz: 785ff04d203d38d064a0b1673e91b94399e857e13db918debeed90435e0337a644c8c166971b0915b03123da998a29327deca993aa78e63587a46fc329947e5b
6
+ metadata.gz: fdee5e4b8d0e15f835d2b7d3bedb0d735e0b00b9e725fac19ed68c710a751c99279aa9966590cab0fef7c5b88a28cd7cefb80ee923423cc47e49a13ba88ac8f5
7
+ data.tar.gz: a33c14f6c4927e3cd91c8aabbf2edc35a34582aa5859b5369829721810234edcdad39a2b148d4bff3377177f707bae3e74909d1f8b553cb8ee3682d00e4d58d7
data/ChangeLog CHANGED
@@ -1,3 +1,8 @@
1
+ Release 0.13.1 - 2020/07/17
2
+
3
+ * in_kafka_group: Support ssl_verify_hostname parameter
4
+ * out_kafka2/out_rdkafka2: Support topic parameter with placeholders
5
+
1
6
  Release 0.13.0 - 2020/03/09
2
7
 
3
8
  * Accept ruby-kafka v1 or later
data/README.md CHANGED
@@ -121,7 +121,8 @@ Consume events by kafka consumer group features..
121
121
  add_prefix <tag prefix (Optional)>
122
122
  add_suffix <tag suffix (Optional)>
123
123
  retry_emit_limit <Wait retry_emit_limit x 1s when BuffereQueueLimitError happens. The default is nil and it means waiting until BufferQueueLimitError is resolved>
124
- use_record_time <If true, replace event time with contents of 'time' field of fetched record>
124
+ use_record_time (Deprecated. Use 'time_source record' instead.) <If true, replace event time with contents of 'time' field of fetched record>
125
+ time_source <source for message timestamp (now|kafka|record)> :default => now
125
126
  time_format <string (Optional when use_record_time is used)>
126
127
 
127
128
  # ruby-kafka consumer options
@@ -443,7 +444,7 @@ See ruby-kafka README for more details: https://github.com/zendesk/ruby-kafka#co
443
444
 
444
445
  To avoid the problem, there are 2 approaches:
445
446
 
446
- - Upgrade your kafka cluster to latest version. This is better becase recent version is faster and robust.
447
+ - Upgrade your kafka cluster to latest version. This is better because recent version is faster and robust.
447
448
  - Downgrade ruby-kafka/fluent-plugin-kafka to work with your older kafka.
448
449
 
449
450
  ## Contributing
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.13.0'
16
+ gem.version = '0.13.1'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -39,6 +39,8 @@ class Fluent::KafkaInput < Fluent::Input
39
39
  :deprecated => "Use 'time_source record' instead."
40
40
  config_param :time_source, :enum, :list => [:now, :kafka, :record], :default => :now,
41
41
  :desc => "Source for message timestamp."
42
+ config_param :record_time_key, :string, :default => 'time',
43
+ :desc => "Time field when time_source is 'record'"
42
44
  config_param :get_kafka_client_log, :bool, :default => false
43
45
  config_param :time_format, :string, :default => nil,
44
46
  :desc => "Time format to be used to parse 'time' field."
@@ -292,9 +294,9 @@ class Fluent::KafkaInput < Fluent::Input
292
294
  record_time = Fluent::Engine.now
293
295
  when :record
294
296
  if @time_format
295
- record_time = @time_parser.parse(record['time'])
297
+ record_time = @time_parser.parse(record[@record_time_key])
296
298
  else
297
- record_time = record['time']
299
+ record_time = record[@record_time_key]
298
300
  end
299
301
  else
300
302
  $log.fatal "BUG: invalid time_source: #{@time_source}"
@@ -29,6 +29,8 @@ class Fluent::KafkaGroupInput < Fluent::Input
29
29
  :deprecated => "Use 'time_source record' instead."
30
30
  config_param :time_source, :enum, :list => [:now, :kafka, :record], :default => :now,
31
31
  :desc => "Source for message timestamp."
32
+ config_param :record_time_key, :string, :default => 'time',
33
+ :desc => "Time field when time_source is 'record'"
32
34
  config_param :get_kafka_client_log, :bool, :default => false
33
35
  config_param :time_format, :string, :default => nil,
34
36
  :desc => "Time format to be used to parse 'time' field."
@@ -166,16 +168,17 @@ class Fluent::KafkaGroupInput < Fluent::Input
166
168
  @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
167
169
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
168
170
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
169
- sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl)
171
+ sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
170
172
  elsif @username != nil && @password != nil
171
173
  @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
172
174
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
173
175
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password,
174
- sasl_over_ssl: @sasl_over_ssl)
176
+ sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
175
177
  else
176
178
  @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
177
179
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
178
- ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
180
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab,
181
+ ssl_verify_hostname: @ssl_verify_hostname)
179
182
  end
180
183
 
181
184
  @consumer = setup_consumer
@@ -198,7 +201,14 @@ class Fluent::KafkaGroupInput < Fluent::Input
198
201
  def setup_consumer
199
202
  consumer = @kafka.consumer(@consumer_opts)
200
203
  @topics.each { |topic|
201
- consumer.subscribe(topic, start_from_beginning: @start_from_beginning, max_bytes_per_partition: @max_bytes)
204
+ if m = /^\/(.+)\/$/.match(topic)
205
+ topic_or_regex = Regexp.new(m[1])
206
+ $log.info "Subscribe to topics matching the regex #{topic}"
207
+ else
208
+ topic_or_regex = topic
209
+ $log.info "Subscribe to topic #{topic}"
210
+ end
211
+ consumer.subscribe(topic_or_regex, start_from_beginning: @start_from_beginning, max_bytes_per_partition: @max_bytes)
202
212
  }
203
213
  consumer
204
214
  end
@@ -243,9 +253,9 @@ class Fluent::KafkaGroupInput < Fluent::Input
243
253
  record_time = Fluent::Engine.now
244
254
  when :record
245
255
  if @time_format
246
- record_time = @time_parser.parse(record['time'].to_s)
256
+ record_time = @time_parser.parse(record[@record_time_key].to_s)
247
257
  else
248
- record_time = record['time']
258
+ record_time = record[@record_time_key]
249
259
  end
250
260
  else
251
261
  log.fatal "BUG: invalid time_source: #{@time_source}"
@@ -15,6 +15,7 @@ module Fluent::Plugin
15
15
  Set brokers directly:
16
16
  <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
17
17
  DESC
18
+ config_param :topic, :string, :default => nil, :desc => "kafka topic. Placeholders are supported"
18
19
  config_param :topic_key, :string, :default => 'topic', :desc => "Field for kafka topic"
19
20
  config_param :default_topic, :string, :default => nil,
20
21
  :desc => "Default output topic when record doesn't have topic field"
@@ -215,7 +216,11 @@ DESC
215
216
  # TODO: optimize write performance
216
217
  def write(chunk)
217
218
  tag = chunk.metadata.tag
218
- topic = (chunk.metadata.variables && chunk.metadata.variables[@topic_key_sym]) || @default_topic || tag
219
+ topic = if @topic
220
+ extract_placeholders(@topic, chunk)
221
+ else
222
+ (chunk.metadata.variables && chunk.metadata.variables[@topic_key_sym]) || @default_topic || tag
223
+ end
219
224
 
220
225
  messages = 0
221
226
  record_buf = nil
@@ -33,6 +33,7 @@ Set brokers directly:
33
33
  <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
34
34
  Brokers: you can choose to use either brokers or zookeeper.
35
35
  DESC
36
+ config_param :topic, :string, :default => nil, :desc => "kafka topic. Placeholders are supported"
36
37
  config_param :topic_key, :string, :default => 'topic', :desc => "Field for kafka topic"
37
38
  config_param :default_topic, :string, :default => nil,
38
39
  :desc => "Default output topic when record doesn't have topic field"
@@ -278,7 +279,11 @@ DESC
278
279
 
279
280
  def write(chunk)
280
281
  tag = chunk.metadata.tag
281
- topic = (chunk.metadata.variables && chunk.metadata.variables[@topic_key_sym]) || @default_topic || tag
282
+ topic = if @topic
283
+ extract_placeholders(@topic, chunk)
284
+ else
285
+ (chunk.metadata.variables && chunk.metadata.variables[@topic_key_sym]) || @default_topic || tag
286
+ end
282
287
 
283
288
  handlers = []
284
289
  record_buf = nil
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.0
4
+ version: 0.13.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2020-03-10 00:00:00.000000000 Z
12
+ date: 2020-07-17 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd