fluent-plugin-kafka 0.15.0 → 0.16.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 43c3a759f4636168c932c33f45c38105ebb522b5ea8222f1b1a7eceb53343348
4
- data.tar.gz: c64a103244e721fa2de124f466f2480c960daafc713fd16f685ea4dd4a545a3d
3
+ metadata.gz: 5f03efe1dee8bff74ba9e0588956bc919244ac6ddf24ff40821d072602dab916
4
+ data.tar.gz: b081913e347f5dd28d1339a1eca3f063661217fa4f1738787161cb825c3485fc
5
5
  SHA512:
6
- metadata.gz: 707d92f2a23041b53daf6410d3fadb0e84053c4eb250b20c6dd3c72a15969273d2279b71950334187d156767bf6646a0af468a0f84e85ca683a34c127e47e363
7
- data.tar.gz: 978883c8a72152bb6b9262ccea4e6b65b91bca1a3907ea43a7930cf7b4d414f1a9f47cb593d420738a48bd47d86451f376fe4cc6e7dec6b4f2c4e81ad5213d00
6
+ metadata.gz: 03e89037f5982ca8aa121c1c1672160e95fb488ee7e9ca6bd8038e8d9b0cc9d99348ab91a2e1896e174fed4fa2b4c25fb9e65ee72b9a4ba74062c631953ca345
7
+ data.tar.gz: 5ca6f6093a1f46ab89b4b268416c16d9758102b76fcdb2bba8d2f235daa7b88e9914e48a0e4d0397df13ea9159a21ba1bbe525186670fd7b7622cfdd3d2cac2f
@@ -0,0 +1,26 @@
1
+ name: linux
2
+ on:
3
+ - push
4
+ - pull_request
5
+ jobs:
6
+ build:
7
+ runs-on: ${{ matrix.os }}
8
+ strategy:
9
+ fail-fast: false
10
+ matrix:
11
+ ruby: [ '2.4', '2.5', '2.6', '2.7', '3.0' ]
12
+ os:
13
+ - ubuntu-latest
14
+ name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
15
+ steps:
16
+ - uses: actions/checkout@v2
17
+ - uses: ruby/setup-ruby@v1
18
+ with:
19
+ ruby-version: ${{ matrix.ruby }}
20
+ - name: unit testing
21
+ env:
22
+ CI: true
23
+ run: |
24
+ gem install bundler rake
25
+ bundle install --jobs 4 --retry 3
26
+ bundle exec rake test
data/ChangeLog CHANGED
@@ -1,3 +1,24 @@
1
+ Release 0.16.1 - 2021/04/14
2
+ * out_kafka/out_kafka_buffered: Support Ruby 3.0.0 keyword arguments interop
3
+ * kafka_plugin_util: Treat empty string in read_ssl_file as nil
4
+
5
+ Release 0.16.0 - 2021/01/25
6
+
7
+ * input: Add `tag_source` and `record_tag_key` parameters for using record field as tag
8
+ * in_kafka_group: Use NumericParser for floating point
9
+
10
+ Release 0.15.3 - 2020/12/08
11
+
12
+ * in_kafka: Fix `record_time_key` parameter not working
13
+
14
+ Release 0.15.2 - 2020/09/30
15
+
16
+ * input: Support 3rd party parser
17
+
18
+ Release 0.15.1 - 2020/09/17
19
+
20
+ * out_kafka2: Fix wrong class name for configuration error
21
+
1
22
  Release 0.15.0 - 2020/09/14
2
23
 
3
24
  * Add experimental `in_rdkafka_group`
@@ -20,6 +41,7 @@ Release 0.14.0 - 2020/08/07
20
41
  Release 0.13.1 - 2020/07/17
21
42
 
22
43
  * in_kafka_group: Support ssl_verify_hostname parameter
44
+ * in_kafka_group: Support regex based topics
23
45
  * out_kafka2/out_rdkafka2: Support topic parameter with placeholders
24
46
 
25
47
  Release 0.13.0 - 2020/03/09
data/README.md CHANGED
@@ -139,6 +139,8 @@ Consume events by kafka consumer group features..
139
139
 
140
140
  See also [ruby-kafka README](https://github.com/zendesk/ruby-kafka#consuming-messages-from-kafka) for more detailed documentation about ruby-kafka options.
141
141
 
142
+ `topics` supports regex pattern since v0.13.1. If you want to use regex pattern, use `/pattern/` like `/foo.*/`.
143
+
142
144
  Consuming topic name is used for event tag. So when the target topic name is `app_event`, the tag is `app_event`. If you want to modify tag, use `add_prefix` or `add_suffix` parameter. With `add_prefix kafka`, the tag is `kafka.app_event`.
143
145
 
144
146
  ### Input plugin (@type 'rdkafka_group', supports kafka consumer groups, uses rdkafka-ruby)
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.15.0'
16
+ gem.version = '0.16.1'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -21,4 +21,5 @@ Gem::Specification.new do |gem|
21
21
  gem.add_dependency 'ruby-kafka', '>= 1.2.0', '< 2'
22
22
  gem.add_development_dependency "rake", ">= 0.9.2"
23
23
  gem.add_development_dependency "test-unit", ">= 3.0.8"
24
+ gem.add_development_dependency "webrick"
24
25
  end
@@ -31,6 +31,10 @@ class Fluent::KafkaInput < Fluent::Input
31
31
  config_param :add_suffix, :string, :default => nil,
32
32
  :desc => "tag suffix"
33
33
  config_param :add_offset_in_record, :bool, :default => false
34
+ config_param :tag_source, :enum, :list => [:topic, :record], :default => :topic,
35
+ :desc => "Source for the fluentd event tag"
36
+ config_param :record_tag_key, :string, :default => 'tag',
37
+ :desc => "Tag field when tag_source is 'record'"
34
38
 
35
39
  config_param :offset_zookeeper, :string, :default => nil
36
40
  config_param :offset_zk_root_node, :string, :default => '/fluent-plugin-kafka'
@@ -113,7 +117,7 @@ class Fluent::KafkaInput < Fluent::Input
113
117
 
114
118
  require 'zookeeper' if @offset_zookeeper
115
119
 
116
- @parser_proc = setup_parser
120
+ @parser_proc = setup_parser(conf)
117
121
 
118
122
  @time_source = :record if @use_record_time
119
123
 
@@ -126,7 +130,7 @@ class Fluent::KafkaInput < Fluent::Input
126
130
  end
127
131
  end
128
132
 
129
- def setup_parser
133
+ def setup_parser(conf)
130
134
  case @format
131
135
  when 'json'
132
136
  begin
@@ -165,6 +169,14 @@ class Fluent::KafkaInput < Fluent::Input
165
169
  add_offset_in_hash(r, te, msg.offset) if @add_offset_in_record
166
170
  r
167
171
  }
172
+ else
173
+ @custom_parser = Fluent::Plugin.new_parser(conf['format'])
174
+ @custom_parser.configure(conf)
175
+ Proc.new { |msg|
176
+ @custom_parser.parse(msg.value) {|_time, record|
177
+ record
178
+ }
179
+ }
168
180
  end
169
181
  end
170
182
 
@@ -216,6 +228,9 @@ class Fluent::KafkaInput < Fluent::Input
216
228
  router,
217
229
  @kafka_message_key,
218
230
  @time_source,
231
+ @record_time_key,
232
+ @tag_source,
233
+ @record_tag_key,
219
234
  opt)
220
235
  }
221
236
  @topic_watchers.each {|tw|
@@ -240,7 +255,7 @@ class Fluent::KafkaInput < Fluent::Input
240
255
  end
241
256
 
242
257
  class TopicWatcher < Coolio::TimerWatcher
243
- def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, offset_manager, router, kafka_message_key, time_source, options={})
258
+ def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, offset_manager, router, kafka_message_key, time_source, record_time_key, tag_source, record_tag_key, options={})
244
259
  @topic_entry = topic_entry
245
260
  @kafka = kafka
246
261
  @callback = method(:consume)
@@ -252,6 +267,9 @@ class Fluent::KafkaInput < Fluent::Input
252
267
  @router = router
253
268
  @kafka_message_key = kafka_message_key
254
269
  @time_source = time_source
270
+ @record_time_key = record_time_key
271
+ @tag_source = tag_source
272
+ @record_tag_key = record_tag_key
255
273
 
256
274
  @next_offset = @topic_entry.offset
257
275
  if @topic_entry.offset == -1 && offset_manager
@@ -288,6 +306,11 @@ class Fluent::KafkaInput < Fluent::Input
288
306
  messages.each { |msg|
289
307
  begin
290
308
  record = @parser.call(msg, @topic_entry)
309
+ if @tag_source == :record
310
+ tag = record[@record_tag_key]
311
+ tag = @add_prefix + "." + tag if @add_prefix
312
+ tag = tag + "." + @add_suffix if @add_suffix
313
+ end
291
314
  case @time_source
292
315
  when :kafka
293
316
  record_time = Fluent::EventTime.from_time(msg.create_time)
@@ -36,6 +36,10 @@ class Fluent::KafkaGroupInput < Fluent::Input
36
36
  config_param :get_kafka_client_log, :bool, :default => false
37
37
  config_param :time_format, :string, :default => nil,
38
38
  :desc => "Time format to be used to parse 'time' field."
39
+ config_param :tag_source, :enum, :list => [:topic, :record], :default => :topic,
40
+ :desc => "Source for the fluentd event tag"
41
+ config_param :record_tag_key, :string, :default => 'tag',
42
+ :desc => "Tag field when tag_source is 'record'"
39
43
  config_param :kafka_message_key, :string, :default => nil,
40
44
  :desc => "Set kafka's message key to this field"
41
45
  config_param :connect_timeout, :integer, :default => nil,
@@ -117,7 +121,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
117
121
  @max_wait_time = conf['max_wait_ms'].to_i / 1000
118
122
  end
119
123
 
120
- @parser_proc = setup_parser
124
+ @parser_proc = setup_parser(conf)
121
125
 
122
126
  @consumer_opts = {:group_id => @consumer_group}
123
127
  @consumer_opts[:session_timeout] = @session_timeout if @session_timeout
@@ -138,9 +142,13 @@ class Fluent::KafkaGroupInput < Fluent::Input
138
142
  @time_parser = Fluent::TextParser::TimeParser.new(@time_format)
139
143
  end
140
144
  end
145
+
146
+ if @time_source == :record && defined?(Fluent::NumericTimeParser)
147
+ @float_numeric_parse = Fluent::NumericTimeParser.new(:float)
148
+ end
141
149
  end
142
150
 
143
- def setup_parser
151
+ def setup_parser(conf)
144
152
  case @format
145
153
  when 'json'
146
154
  begin
@@ -159,6 +167,14 @@ class Fluent::KafkaGroupInput < Fluent::Input
159
167
  Proc.new { |msg| MessagePack.unpack(msg.value) }
160
168
  when 'text'
161
169
  Proc.new { |msg| {@message_key => msg.value} }
170
+ else
171
+ @custom_parser = Fluent::Plugin.new_parser(conf['format'])
172
+ @custom_parser.configure(conf)
173
+ Proc.new { |msg|
174
+ @custom_parser.parse(msg.value) {|_time, record|
175
+ record
176
+ }
177
+ }
162
178
  end
163
179
  end
164
180
 
@@ -236,49 +252,104 @@ class Fluent::KafkaGroupInput < Fluent::Input
236
252
  end
237
253
  end
238
254
 
255
+ def process_batch_with_record_tag(batch)
256
+ es = {}
257
+ batch.messages.each { |msg|
258
+ begin
259
+ record = @parser_proc.call(msg)
260
+ tag = record[@record_tag_key]
261
+ tag = @add_prefix + "." + tag if @add_prefix
262
+ tag = tag + "." + @add_suffix if @add_suffix
263
+ es[tag] ||= Fluent::MultiEventStream.new
264
+ case @time_source
265
+ when :kafka
266
+ record_time = Fluent::EventTime.from_time(msg.create_time)
267
+ when :now
268
+ record_time = Fluent::Engine.now
269
+ when :record
270
+ if @time_format
271
+ record_time = @time_parser.parse(record[@record_time_key].to_s)
272
+ else
273
+ record_time = record[@record_time_key]
274
+ end
275
+ else
276
+ log.fatal "BUG: invalid time_source: #{@time_source}"
277
+ end
278
+ if @kafka_message_key
279
+ record[@kafka_message_key] = msg.key
280
+ end
281
+ if @add_headers
282
+ msg.headers.each_pair { |k, v|
283
+ record[k] = v
284
+ }
285
+ end
286
+ es[tag].add(record_time, record)
287
+ rescue => e
288
+ log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
289
+ log.debug_backtrace
290
+ end
291
+ }
292
+
293
+ unless es.empty?
294
+ es.each { |tag,es|
295
+ emit_events(tag, es)
296
+ }
297
+ end
298
+ end
299
+
300
+ def process_batch(batch)
301
+ es = Fluent::MultiEventStream.new
302
+ tag = batch.topic
303
+ tag = @add_prefix + "." + tag if @add_prefix
304
+ tag = tag + "." + @add_suffix if @add_suffix
305
+
306
+ batch.messages.each { |msg|
307
+ begin
308
+ record = @parser_proc.call(msg)
309
+ case @time_source
310
+ when :kafka
311
+ record_time = Fluent::EventTime.from_time(msg.create_time)
312
+ when :now
313
+ record_time = Fluent::Engine.now
314
+ when :record
315
+ record_time = record[@record_time_key]
316
+
317
+ if @time_format
318
+ record_time = @time_parser.parse(record_time.to_s)
319
+ elsif record_time.is_a?(Float) && @float_numeric_parse
320
+ record_time = @float_numeric_parse.parse(record_time)
321
+ end
322
+ else
323
+ log.fatal "BUG: invalid time_source: #{@time_source}"
324
+ end
325
+ if @kafka_message_key
326
+ record[@kafka_message_key] = msg.key
327
+ end
328
+ if @add_headers
329
+ msg.headers.each_pair { |k, v|
330
+ record[k] = v
331
+ }
332
+ end
333
+ es.add(record_time, record)
334
+ rescue => e
335
+ log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
336
+ log.debug_backtrace
337
+ end
338
+ }
339
+
340
+ unless es.empty?
341
+ emit_events(tag, es)
342
+ end
343
+ end
344
+
239
345
  def run
240
346
  while @consumer
241
347
  begin
242
348
  @consumer.each_batch(@fetch_opts) { |batch|
243
- es = Fluent::MultiEventStream.new
244
- tag = batch.topic
245
- tag = @add_prefix + "." + tag if @add_prefix
246
- tag = tag + "." + @add_suffix if @add_suffix
247
-
248
- batch.messages.each { |msg|
249
- begin
250
- record = @parser_proc.call(msg)
251
- case @time_source
252
- when :kafka
253
- record_time = Fluent::EventTime.from_time(msg.create_time)
254
- when :now
255
- record_time = Fluent::Engine.now
256
- when :record
257
- if @time_format
258
- record_time = @time_parser.parse(record[@record_time_key].to_s)
259
- else
260
- record_time = record[@record_time_key]
261
- end
262
- else
263
- log.fatal "BUG: invalid time_source: #{@time_source}"
264
- end
265
- if @kafka_message_key
266
- record[@kafka_message_key] = msg.key
267
- end
268
- if @add_headers
269
- msg.headers.each_pair { |k, v|
270
- record[k] = v
271
- }
272
- end
273
- es.add(record_time, record)
274
- rescue => e
275
- log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
276
- log.debug_backtrace
277
- end
278
- }
279
-
280
- unless es.empty?
281
- emit_events(tag, es)
349
+ if @tag_source == :record
350
+ process_batch_with_record_tag(batch)
351
+ else
352
+ process_batch(batch)
282
353
  end
283
354
  }
284
355
  rescue ForShutdown
@@ -7,7 +7,7 @@ require 'rdkafka'
7
7
  class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
8
8
  Fluent::Plugin.register_input('rdkafka_group', self)
9
9
 
10
- helpers :thread
10
+ helpers :thread, :parser, :compat_parameters
11
11
 
12
12
  config_param :topics, :string,
13
13
  :desc => "Listening topics(separate with comma',')."
@@ -41,15 +41,19 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
41
41
  :desc => "If set true, it disables retry_limit and make Fluentd retry indefinitely (default: false)"
42
42
  config_param :retry_limit, :integer, :default => 10,
43
43
  :desc => "The maximum number of retries for connecting kafka (default: 10)"
44
-
44
+
45
45
  config_param :max_wait_time_ms, :integer, :default => 250,
46
46
  :desc => "How long to block polls in milliseconds until the server sends us data."
47
47
  config_param :max_batch_size, :integer, :default => 10000,
48
48
  :desc => "Maximum number of log lines emitted in a single batch."
49
-
49
+
50
50
  config_param :kafka_configs, :hash, :default => {},
51
51
  :desc => "Kafka configuration properties as desribed in https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md"
52
52
 
53
+ config_section :parse do
54
+ config_set_default :@type, 'json'
55
+ end
56
+
53
57
  include Fluent::KafkaPluginUtil::SSLSettings
54
58
  include Fluent::KafkaPluginUtil::SaslSettings
55
59
 
@@ -80,6 +84,8 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
80
84
  private :_config_to_array
81
85
 
82
86
  def configure(conf)
87
+ compat_parameters_convert(conf, :parser)
88
+
83
89
  super
84
90
 
85
91
  log.warn "The in_rdkafka_group consumer was not yet tested under heavy production load. Use it at your own risk!"
@@ -89,7 +95,14 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
89
95
 
90
96
  @topics = _config_to_array(@topics)
91
97
 
92
- @parser_proc = setup_parser
98
+ parser_conf = conf.elements('parse').first
99
+ unless parser_conf
100
+ raise Fluent::ConfigError, "<parse> section or format parameter is required."
101
+ end
102
+ unless parser_conf["@type"]
103
+ raise Fluent::ConfigError, "parse/@type is required."
104
+ end
105
+ @parser_proc = setup_parser(parser_conf)
93
106
 
94
107
  @time_source = :record if @use_record_time
95
108
 
@@ -98,8 +111,9 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
98
111
  end
99
112
  end
100
113
 
101
- def setup_parser
102
- case @format
114
+ def setup_parser(parser_conf)
115
+ format = parser_conf["@type"]
116
+ case format
103
117
  when 'json'
104
118
  begin
105
119
  require 'oj'
@@ -117,6 +131,13 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
117
131
  Proc.new { |msg| MessagePack.unpack(msg.payload) }
118
132
  when 'text'
119
133
  Proc.new { |msg| {@message_key => msg.payload} }
134
+ else
135
+ @custom_parser = parser_create(usage: 'in-rdkafka-plugin', conf: parser_conf)
136
+ Proc.new { |msg|
137
+ @custom_parser.parse(msg.payload) {|_time, record|
138
+ record
139
+ }
140
+ }
120
141
  end
121
142
  end
122
143
 
@@ -33,7 +33,7 @@ module Fluent
33
33
  end
34
34
 
35
35
  def read_ssl_file(path)
36
- return nil if path.nil?
36
+ return nil if path.nil? || path.respond_to?(:strip) && path.strip.empty?
37
37
 
38
38
  if path.is_a?(Array)
39
39
  path.map { |fp| File.read(fp) }
@@ -215,7 +215,7 @@ DESC
215
215
  chain.next
216
216
 
217
217
  # out_kafka is mainly for testing so don't need the performance unlike out_kafka_buffered.
218
- producer = @kafka.producer(@producer_opts)
218
+ producer = @kafka.producer(**@producer_opts)
219
219
 
220
220
  es.each do |time, record|
221
221
  if @output_include_time
@@ -128,7 +128,7 @@ DESC
128
128
  @seed_brokers = @brokers
129
129
  log.info "brokers has been set: #{@seed_brokers}"
130
130
  else
131
- raise Fluent::Config, 'No brokers specified. Need one broker at least.'
131
+ raise Fluent::ConfigError, 'No brokers specified. Need one broker at least.'
132
132
  end
133
133
 
134
134
  formatter_conf = conf.elements('format').first
@@ -239,7 +239,7 @@ DESC
239
239
  @producers_mutex.synchronize {
240
240
  producer = @producers[Thread.current.object_id]
241
241
  unless producer
242
- producer = @kafka.producer(@producer_opts)
242
+ producer = @kafka.producer(**@producer_opts)
243
243
  @producers[Thread.current.object_id] = producer
244
244
  end
245
245
  producer
@@ -0,0 +1,38 @@
1
+ require 'helper'
2
+ require 'fluent/plugin/kafka_plugin_util'
3
+
4
+ class File
5
+ def File::read(path)
6
+ path
7
+ end
8
+ end
9
+
10
+ class KafkaPluginUtilTest < Test::Unit::TestCase
11
+
12
+ def self.config_param(name, type, options)
13
+ end
14
+ include Fluent::KafkaPluginUtil::SSLSettings
15
+
16
+ def config_param
17
+ end
18
+ def setup
19
+ Fluent::Test.setup
20
+ end
21
+
22
+ def test_read_ssl_file_when_nil
23
+ assert_equal(nil, read_ssl_file(nil))
24
+ end
25
+
26
+ def test_read_ssl_file_when_empty_string
27
+ assert_equal(nil, read_ssl_file(""))
28
+ end
29
+
30
+ def test_read_ssl_file_when_non_empty_path
31
+ assert_equal("path", read_ssl_file("path"))
32
+ end
33
+
34
+ def test_read_ssl_file_when_non_empty_array
35
+ assert_equal(["a","b"], read_ssl_file(["a","b"]))
36
+ end
37
+
38
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.15.0
4
+ version: 0.16.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2020-09-14 00:00:00.000000000 Z
12
+ date: 2021-04-14 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd
@@ -93,6 +93,20 @@ dependencies:
93
93
  - - ">="
94
94
  - !ruby/object:Gem::Version
95
95
  version: 3.0.8
96
+ - !ruby/object:Gem::Dependency
97
+ name: webrick
98
+ requirement: !ruby/object:Gem::Requirement
99
+ requirements:
100
+ - - ">="
101
+ - !ruby/object:Gem::Version
102
+ version: '0'
103
+ type: :development
104
+ prerelease: false
105
+ version_requirements: !ruby/object:Gem::Requirement
106
+ requirements:
107
+ - - ">="
108
+ - !ruby/object:Gem::Version
109
+ version: '0'
96
110
  description: Fluentd plugin for Apache Kafka > 0.8
97
111
  email:
98
112
  - togachiro@gmail.com
@@ -101,6 +115,7 @@ executables: []
101
115
  extensions: []
102
116
  extra_rdoc_files: []
103
117
  files:
118
+ - ".github/workflows/linux.yml"
104
119
  - ".gitignore"
105
120
  - ".travis.yml"
106
121
  - ChangeLog
@@ -120,6 +135,7 @@ files:
120
135
  - lib/fluent/plugin/out_rdkafka.rb
121
136
  - lib/fluent/plugin/out_rdkafka2.rb
122
137
  - test/helper.rb
138
+ - test/plugin/test_kafka_plugin_util.rb
123
139
  - test/plugin/test_out_kafka.rb
124
140
  homepage: https://github.com/fluent/fluent-plugin-kafka
125
141
  licenses:
@@ -140,10 +156,11 @@ required_rubygems_version: !ruby/object:Gem::Requirement
140
156
  - !ruby/object:Gem::Version
141
157
  version: '0'
142
158
  requirements: []
143
- rubygems_version: 3.0.3
159
+ rubygems_version: 3.1.2
144
160
  signing_key:
145
161
  specification_version: 4
146
162
  summary: Fluentd plugin for Apache Kafka > 0.8
147
163
  test_files:
148
164
  - test/helper.rb
165
+ - test/plugin/test_kafka_plugin_util.rb
149
166
  - test/plugin/test_out_kafka.rb