fluent-plugin-kafka 0.9.2 → 0.9.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 19ca96347c691e54c3e17b448f35cd854ce47631
4
- data.tar.gz: 71ffa3bf2f49fbc7b8e6524a35a42753d1fbb916
3
+ metadata.gz: 42992f4c6af7ffc02905273a11f9522d581d397e
4
+ data.tar.gz: e057965b640edc5b5aab0f2aa0250286effae66d
5
5
  SHA512:
6
- metadata.gz: b81ecd0cd9d0a8373df4de46165342b36caf67e214df542b6e790bc1363a9bd3f01662710c2076f027c618aacd2552fc1468716bf50f1e677f8e383d07b9e595
7
- data.tar.gz: dc60575fc8913313c68774b9148eeb7e6eab1bd6f2ac47570f068333ce597ad058f558c4e3540b4f27022c098ec824bb8c0cb8825b48592ac4d7a2c090363479
6
+ metadata.gz: 35880b0157f34ee5c0a8e2809c5338b417bef35d8c1e65c88964de1eb492279a4ec0c971fe0f7f760769787689067864f46fe26029954dc5b79c0e62908ecd21
7
+ data.tar.gz: e2c493ae1978845b9bb319ad8dceff26b5fb45ae1f9127e544841a9b4f1df4849a57d5abf688c322fae3a2e7366653f20c0de617ed0145028bb2e69b8f03444c
data/ChangeLog CHANGED
@@ -1,3 +1,7 @@
1
+ Release 0.9.3 - 2019/05/04
2
+
3
+ * in_kafka_group: Add connect_timeout and socket_timeout parameters
4
+
1
5
  Release 0.9.2 - 2019/03/26
2
6
 
3
7
  * out_kafka_buffered: Fix typo of partition_key usage
data/README.md CHANGED
@@ -20,7 +20,7 @@ Or install it yourself as:
20
20
 
21
21
  $ gem install fluent-plugin-kafka --no-document
22
22
 
23
- If you want to use zookeeper related parameters, you also need to install zookeeper gem. zookeeper gem includes native extension, so development tools are needed, e.g. gcc, make and etc.
23
+ If you want to use zookeeper related parameters, you also need to install zookeeper gem. zookeeper gem includes native extension, so development tools are needed, e.g. ruby-devel, gcc, make and etc.
24
24
 
25
25
  ## Requirements
26
26
 
@@ -142,7 +142,7 @@ Consuming topic name is used for event tag. So when the target topic name is `ap
142
142
 
143
143
  ### Buffered output plugin
144
144
 
145
- This plugin uses ruby-kafka producer for writing data. This plugin works with recent kafka versions.
145
+ This plugin uses ruby-kafka producer for writing data. This plugin works with recent kafka versions. This plugin is for v0.12. If you use v1, see `kafka2`.
146
146
 
147
147
  <match app.**>
148
148
  @type kafka_buffered
@@ -254,6 +254,8 @@ This plugin is for fluentd v1.0 or later. This will be `out_kafka` plugin in the
254
254
  <format>
255
255
  @type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
256
256
  </format>
257
+
258
+ # Optional. See https://docs.fluentd.org/v1.0/articles/inject-section
257
259
  <inject>
258
260
  tag_key tag
259
261
  time_key time
@@ -312,6 +314,48 @@ You need to install rdkafka gem.
312
314
  # rdkafka is C extension library so need development tools like ruby-devel, gcc and etc
313
315
  $ gem install rdkafka --no-document
314
316
 
317
+ `rdkafka2` is for fluentd v1.0 or later.
318
+
319
+ <match app.**>
320
+ @type rdkafka2
321
+
322
+ brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,.. # Set brokers directly
323
+
324
+ topic_key (string) :default => 'topic'
325
+ default_topic (string) :default => nil
326
+ partition_key (string) :default => 'partition'
327
+ partition_key_key (string) :default => 'partition_key'
328
+ message_key_key (string) :default => 'message_key'
329
+ default_topic (string) :default => nil
330
+ default_partition_key (string) :default => nil
331
+ default_message_key (string) :default => nil
332
+ exclude_topic_key (bool) :default => false
333
+ exclude_partition_key (bool) :default => false
334
+
335
+ <format>
336
+ @type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
337
+ </format>
338
+
339
+ # Optional. See https://docs.fluentd.org/v1.0/articles/inject-section
340
+ <inject>
341
+ tag_key tag
342
+ time_key time
343
+ </inject>
344
+
345
+ # See fluentd document for buffer section parameters: https://docs.fluentd.org/v1.0/articles/buffer-section
346
+ # Buffer chunk key should be same with topic_key. If value is not found in the record, default_topic is used.
347
+ <buffer topic>
348
+ flush_interval 10s
349
+ </buffer>
350
+
351
+ # You can set any rdkafka configuration via this parameter: https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
352
+ rdkafka_options {
353
+ "log_level" : 7
354
+ }
355
+ </match>
356
+
357
+ If you use v0.12, use `rdkafka` instead.
358
+
315
359
  <match kafka.**>
316
360
  @type rdkafka
317
361
 
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.9.2'
16
+ gem.version = '0.9.3'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -30,6 +30,10 @@ class Fluent::KafkaGroupInput < Fluent::Input
30
30
  :desc => "Time format to be used to parse 'time' filed."
31
31
  config_param :kafka_message_key, :string, :default => nil,
32
32
  :desc => "Set kafka's message key to this field"
33
+ config_param :connect_timeout, :integer, :default => nil,
34
+ :desc => "[Integer, nil] the timeout setting for connecting to brokers"
35
+ config_param :socket_timeout, :integer, :default => nil,
36
+ :desc => "[Integer, nil] the timeout setting for socket connection"
33
37
 
34
38
  config_param :retry_wait_seconds, :integer, :default => 30
35
39
  config_param :disable_retry_limit, :bool, :default => false,
@@ -152,16 +156,16 @@ class Fluent::KafkaGroupInput < Fluent::Input
152
156
  super
153
157
 
154
158
  if @scram_mechanism != nil && @username != nil && @password != nil
155
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
159
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
156
160
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
157
161
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
158
162
  sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl)
159
163
  elsif @username != nil && @password != nil
160
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
164
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
161
165
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
162
166
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password)
163
167
  else
164
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
168
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
165
169
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
166
170
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
167
171
  end
@@ -39,7 +39,6 @@ DESC
39
39
  config_param :default_message_key, :string, :default => nil
40
40
  config_param :partition_key, :string, :default => 'partition', :desc => "Field for kafka partition"
41
41
  config_param :default_partition, :integer, :default => nil
42
- config_param :client_id, :string, :default => 'kafka'
43
42
  config_param :output_data_type, :string, :default => 'json', :obsoleted => "Use <format> section instead"
44
43
  config_param :output_include_tag, :bool, :default => false, :obsoleted => "Use <inject> section instead"
45
44
  config_param :output_include_time, :bool, :default => false, :obsoleted => "Use <inject> section instead"
@@ -49,36 +48,36 @@ Set true to remove partition from data
49
48
  DESC
50
49
  config_param :exclude_message_key, :bool, :default => false,
51
50
  :desc => <<-DESC
52
- Set true to remove partition key from data
51
+ Set true to remove message_key from data
53
52
  DESC
54
53
  config_param :exclude_topic_key, :bool, :default => false,
55
54
  :desc => <<-DESC
56
- Set true to remove topic name key from data
55
+ Set true to remove topic key from data
57
56
  DESC
58
57
  config_param :max_send_retries, :integer, :default => 2,
59
- :desc => "Number of times to retry sending of messages to a leader."
58
+ :desc => "Number of times to retry sending of messages to a leader. Used for message.send.max.retries"
60
59
  config_param :required_acks, :integer, :default => -1,
61
- :desc => "The number of acks required per request."
60
+ :desc => "The number of acks required per request. Used for request.required.acks"
62
61
  config_param :ack_timeout, :time, :default => nil,
63
- :desc => "How long the producer waits for acks."
62
+ :desc => "How long the producer waits for acks. Used for request.timeout.ms"
64
63
  config_param :compression_codec, :string, :default => nil,
65
64
  :desc => <<-DESC
66
- The codec the producer uses to compress messages.
65
+ The codec the producer uses to compress messages. Used for compression.codec
67
66
  Supported codecs: (gzip|snappy)
68
67
  DESC
69
68
 
70
- config_param :rdkafka_buffering_max_ms, :integer, :default => nil
71
- config_param :rdkafka_buffering_max_messages, :integer, :default => nil
72
- config_param :rdkafka_message_max_bytes, :integer, :default => nil
73
- config_param :rdkafka_message_max_num, :integer, :default => nil
74
- config_param :rdkafka_delivery_handle_poll_timeout, :integer, :default => 30
75
- config_param :rdkafka_options, :hash, :default => {}
69
+ config_param :rdkafka_buffering_max_ms, :integer, :default => nil, :desc => 'Used for queue.buffering.max.ms'
70
+ config_param :rdkafka_buffering_max_messages, :integer, :default => nil, :desc => 'Used for queue.buffering.max.messages'
71
+ config_param :rdkafka_message_max_bytes, :integer, :default => nil, :desc => 'Used for message.max.bytes'
72
+ config_param :rdkafka_message_max_num, :integer, :default => nil, :desc => 'Used for batch.num.messages'
73
+ config_param :rdkafka_delivery_handle_poll_timeout, :integer, :default => 30, :desc => 'Timeout for polling message wait'
74
+ config_param :rdkafka_options, :hash, :default => {}, :desc => 'Set any rdkafka configuration. See https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md'
76
75
 
77
76
  config_param :max_enqueue_retries, :integer, :default => 3
78
77
  config_param :enqueue_retry_backoff, :integer, :default => 3
79
78
 
80
- config_param :service_name, :string, :default => nil
81
- config_param :ssl_client_cert_key_password, :string, :default => nil
79
+ config_param :service_name, :string, :default => nil, :desc => 'Used for sasl.kerberos.service.name'
80
+ config_param :ssl_client_cert_key_password, :string, :default => nil, :desc => 'Used for ssl.key.password'
82
81
 
83
82
  config_section :buffer do
84
83
  config_set_default :chunk_keys, ["topic"]
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.2
4
+ version: 0.9.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2019-03-26 00:00:00.000000000 Z
12
+ date: 2019-05-05 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd