fluent-plugin-kafka 0.15.1 → 0.15.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 56e2a0fc884e2ef670d81e61b180de9ccc91eb6b67b9f0edb8015af4212e76ce
4
- data.tar.gz: 1fa73921dba4cc833e4032f5636287659e874ab98c210b20ee5a71a0194bc40a
3
+ metadata.gz: 866fb421d7097ccbac1bde2e279d9975bce4de086dab74609cadbe50429bb312
4
+ data.tar.gz: 0e763fc4276177949f6cec2b4839ad5168892cd4d346d633f81449fdb8df84d7
5
5
  SHA512:
6
- metadata.gz: f7ec3be524feb670cd1823a948dc37b600142107c9949a596cc5b4aa4948e1a83387113ccb5319d1df3484bbcdf7d0df8981f1fbcf7281053f1a0419f80cc5e8
7
- data.tar.gz: 3c28d90f1863317f3da9efde6fc217f8754bbea942f1d94714cb4cce3eac210691416f8aab9f1e76c50ab2a372365358390e283c53e83adc24ead06468b81542
6
+ metadata.gz: b41f5cb35d1c4dea3743e513b505e11f634dfcbc33e339f188f2ac4c2b710ed1357c00779e24873c5f4a0bdd5326f8c1a731b4f2a4c323dc6fde8b85bc78ef28
7
+ data.tar.gz: df4061316f692fbe264b2344fd74f7ba1d15174bb91a617c09f5d9d3de6d50a5f0b6c8aa702ce9322d438ee6ed83b09cf620e71537e96e13db73882f01e291cb
data/ChangeLog CHANGED
@@ -1,3 +1,7 @@
1
+ Release 0.15.2 - 2020/09/30
2
+
3
+ * input: Support 3rd party parser
4
+
1
5
  Release 0.15.1 - 2020/09/17
2
6
 
3
7
  * out_kafka2: Fix wrong class name for configuration error
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.15.1'
16
+ gem.version = '0.15.2'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -113,7 +113,7 @@ class Fluent::KafkaInput < Fluent::Input
113
113
 
114
114
  require 'zookeeper' if @offset_zookeeper
115
115
 
116
- @parser_proc = setup_parser
116
+ @parser_proc = setup_parser(conf)
117
117
 
118
118
  @time_source = :record if @use_record_time
119
119
 
@@ -126,7 +126,7 @@ class Fluent::KafkaInput < Fluent::Input
126
126
  end
127
127
  end
128
128
 
129
- def setup_parser
129
+ def setup_parser(conf)
130
130
  case @format
131
131
  when 'json'
132
132
  begin
@@ -165,6 +165,14 @@ class Fluent::KafkaInput < Fluent::Input
165
165
  add_offset_in_hash(r, te, msg.offset) if @add_offset_in_record
166
166
  r
167
167
  }
168
+ else
169
+ @custom_parser = Fluent::Plugin.new_parser(conf['format'])
170
+ @custom_parser.configure(conf)
171
+ Proc.new { |msg|
172
+ @custom_parser.parse(msg.value) {|_time, record|
173
+ record
174
+ }
175
+ }
168
176
  end
169
177
  end
170
178
 
@@ -117,7 +117,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
117
117
  @max_wait_time = conf['max_wait_ms'].to_i / 1000
118
118
  end
119
119
 
120
- @parser_proc = setup_parser
120
+ @parser_proc = setup_parser(conf)
121
121
 
122
122
  @consumer_opts = {:group_id => @consumer_group}
123
123
  @consumer_opts[:session_timeout] = @session_timeout if @session_timeout
@@ -140,7 +140,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
140
140
  end
141
141
  end
142
142
 
143
- def setup_parser
143
+ def setup_parser(conf)
144
144
  case @format
145
145
  when 'json'
146
146
  begin
@@ -159,6 +159,14 @@ class Fluent::KafkaGroupInput < Fluent::Input
159
159
  Proc.new { |msg| MessagePack.unpack(msg.value) }
160
160
  when 'text'
161
161
  Proc.new { |msg| {@message_key => msg.value} }
162
+ else
163
+ @custom_parser = Fluent::Plugin.new_parser(conf['format'])
164
+ @custom_parser.configure(conf)
165
+ Proc.new { |msg|
166
+ @custom_parser.parse(msg.value) {|_time, record|
167
+ record
168
+ }
169
+ }
162
170
  end
163
171
  end
164
172
 
@@ -7,7 +7,7 @@ require 'rdkafka'
7
7
  class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
8
8
  Fluent::Plugin.register_input('rdkafka_group', self)
9
9
 
10
- helpers :thread
10
+ helpers :thread, :parser, :compat_parameters
11
11
 
12
12
  config_param :topics, :string,
13
13
  :desc => "Listening topics(separate with comma',')."
@@ -41,15 +41,19 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
41
41
  :desc => "If set true, it disables retry_limit and make Fluentd retry indefinitely (default: false)"
42
42
  config_param :retry_limit, :integer, :default => 10,
43
43
  :desc => "The maximum number of retries for connecting kafka (default: 10)"
44
-
44
+
45
45
  config_param :max_wait_time_ms, :integer, :default => 250,
46
46
  :desc => "How long to block polls in milliseconds until the server sends us data."
47
47
  config_param :max_batch_size, :integer, :default => 10000,
48
48
  :desc => "Maximum number of log lines emitted in a single batch."
49
-
49
+
50
50
  config_param :kafka_configs, :hash, :default => {},
51
51
  :desc => "Kafka configuration properties as desribed in https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md"
52
52
 
53
+ config_section :parse do
54
+ config_set_default :@type, 'json'
55
+ end
56
+
53
57
  include Fluent::KafkaPluginUtil::SSLSettings
54
58
  include Fluent::KafkaPluginUtil::SaslSettings
55
59
 
@@ -80,6 +84,8 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
80
84
  private :_config_to_array
81
85
 
82
86
  def configure(conf)
87
+ compat_parameters_convert(conf, :parser)
88
+
83
89
  super
84
90
 
85
91
  log.warn "The in_rdkafka_group consumer was not yet tested under heavy production load. Use it at your own risk!"
@@ -89,7 +95,14 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
89
95
 
90
96
  @topics = _config_to_array(@topics)
91
97
 
92
- @parser_proc = setup_parser
98
+ parser_conf = conf.elements('parse').first
99
+ unless parser_conf
100
+ raise Fluent::ConfigError, "<parse> section or format parameter is required."
101
+ end
102
+ unless parser_conf["@type"]
103
+ raise Fluent::ConfigError, "parse/@type is required."
104
+ end
105
+ @parser_proc = setup_parser(parser_conf)
93
106
 
94
107
  @time_source = :record if @use_record_time
95
108
 
@@ -98,8 +111,9 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
98
111
  end
99
112
  end
100
113
 
101
- def setup_parser
102
- case @format
114
+ def setup_parser(parser_conf)
115
+ format = parser_conf["@type"]
116
+ case format
103
117
  when 'json'
104
118
  begin
105
119
  require 'oj'
@@ -117,6 +131,13 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
117
131
  Proc.new { |msg| MessagePack.unpack(msg.payload) }
118
132
  when 'text'
119
133
  Proc.new { |msg| {@message_key => msg.payload} }
134
+ else
135
+ @custom_parser = parser_create(usage: 'in-rdkafka-plugin', conf: parser_conf)
136
+ Proc.new { |msg|
137
+ @custom_parser.parse(msg.payload) {|_time, record|
138
+ record
139
+ }
140
+ }
120
141
  end
121
142
  end
122
143
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.15.1
4
+ version: 0.15.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2020-09-17 00:00:00.000000000 Z
12
+ date: 2020-09-30 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd