fluent-plugin-kafka 0.15.1 → 0.16.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 56e2a0fc884e2ef670d81e61b180de9ccc91eb6b67b9f0edb8015af4212e76ce
4
- data.tar.gz: 1fa73921dba4cc833e4032f5636287659e874ab98c210b20ee5a71a0194bc40a
3
+ metadata.gz: 1f388f79dd4b9bf95418b71cd78a9e2f712fc01aa791dab8e0f5e22cd899c2b6
4
+ data.tar.gz: 00cee257a09cbfe3773d8e077b651668e9115d94f813b2fe575917789248a966
5
5
  SHA512:
6
- metadata.gz: f7ec3be524feb670cd1823a948dc37b600142107c9949a596cc5b4aa4948e1a83387113ccb5319d1df3484bbcdf7d0df8981f1fbcf7281053f1a0419f80cc5e8
7
- data.tar.gz: 3c28d90f1863317f3da9efde6fc217f8754bbea942f1d94714cb4cce3eac210691416f8aab9f1e76c50ab2a372365358390e283c53e83adc24ead06468b81542
6
+ metadata.gz: 5e23cb803cef76ac8446007b1a60e091868c99c62b839d040093232ae90456306e1eb6bef9630f4c8230e9662ac3a0039eca1728640bccd906f4ed099f5fed4a
7
+ data.tar.gz: bc2c9f55199a5227d4ce6d5ea56c91fc31ca03afc0d45b23966d81f3d3a9f1eeea356b2af161d851b5e800fe4492ca6d7df7c9d07c81942e1602768d64a21156
@@ -0,0 +1,26 @@
1
+ name: linux
2
+ on:
3
+ - push
4
+ - pull_request
5
+ jobs:
6
+ build:
7
+ runs-on: ${{ matrix.os }}
8
+ strategy:
9
+ fail-fast: false
10
+ matrix:
11
+ ruby: [ '2.4', '2.5', '2.6', '2.7', '3.0' ]
12
+ os:
13
+ - ubuntu-latest
14
+ name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
15
+ steps:
16
+ - uses: actions/checkout@v2
17
+ - uses: ruby/setup-ruby@v1
18
+ with:
19
+ ruby-version: ${{ matrix.ruby }}
20
+ - name: unit testing
21
+ env:
22
+ CI: true
23
+ run: |
24
+ gem install bundler rake
25
+ bundle install --jobs 4 --retry 3
26
+ bundle exec rake test
data/ChangeLog CHANGED
@@ -1,3 +1,23 @@
1
+ Release 0.16.2 - 2021/05/17
2
+ * in_kafka, in_kafka_group: Support Ruby 3.0 keyword arguments interop
3
+
4
+ Release 0.16.1 - 2021/04/14
5
+ * out_kafka/out_kafka_buffered: Support Ruby 3.0.0 keyword arguments interop
6
+ * kafka_plugin_util: Treat empty string in read_ssl_file as nil
7
+
8
+ Release 0.16.0 - 2021/01/25
9
+
10
+ * input: Add `tag_source` and `record_tag_key` parameters for using record field as tag
11
+ * in_kafka_group: Use NumericParser for floating point
12
+
13
+ Release 0.15.3 - 2020/12/08
14
+
15
+ * in_kafka: Fix `record_time_key` parameter not working
16
+
17
+ Release 0.15.2 - 2020/09/30
18
+
19
+ * input: Support 3rd party parser
20
+
1
21
  Release 0.15.1 - 2020/09/17
2
22
 
3
23
  * out_kafka2: Fix wrong class name for configuration error
@@ -24,6 +44,7 @@ Release 0.14.0 - 2020/08/07
24
44
  Release 0.13.1 - 2020/07/17
25
45
 
26
46
  * in_kafka_group: Support ssl_verify_hostname parameter
47
+ * in_kafka_group: Support regex based topics
27
48
  * out_kafka2/out_rdkafka2: Support topic parameter with placeholders
28
49
 
29
50
  Release 0.13.0 - 2020/03/09
data/README.md CHANGED
@@ -1,6 +1,7 @@
1
1
  # fluent-plugin-kafka, a plugin for [Fluentd](http://fluentd.org)
2
2
 
3
- [![Build Status](https://travis-ci.org/fluent/fluent-plugin-kafka.svg?branch=master)](https://travis-ci.org/fluent/fluent-plugin-kafka)
3
+ [![GitHub Actions Status](https://github.com/fluent/fluent-plugin-kafka/actions/workflows/linux.yml/badge.svg)](https://github.com/fluent/fluent-plugin-kafka/actions/workflows/linux.yml)
4
+
4
5
 
5
6
  A fluentd plugin to both consume and produce data for Apache Kafka.
6
7
 
@@ -139,6 +140,8 @@ Consume events by kafka consumer group features..
139
140
 
140
141
  See also [ruby-kafka README](https://github.com/zendesk/ruby-kafka#consuming-messages-from-kafka) for more detailed documentation about ruby-kafka options.
141
142
 
143
+ `topics` supports regex pattern since v0.13.1. If you want to use regex pattern, use `/pattern/` like `/foo.*/`.
144
+
142
145
  Consuming topic name is used for event tag. So when the target topic name is `app_event`, the tag is `app_event`. If you want to modify tag, use `add_prefix` or `add_suffix` parameter. With `add_prefix kafka`, the tag is `kafka.app_event`.
143
146
 
144
147
  ### Input plugin (@type 'rdkafka_group', supports kafka consumer groups, uses rdkafka-ruby)
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.15.1'
16
+ gem.version = '0.16.2'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -21,4 +21,5 @@ Gem::Specification.new do |gem|
21
21
  gem.add_dependency 'ruby-kafka', '>= 1.2.0', '< 2'
22
22
  gem.add_development_dependency "rake", ">= 0.9.2"
23
23
  gem.add_development_dependency "test-unit", ">= 3.0.8"
24
+ gem.add_development_dependency "webrick"
24
25
  end
@@ -31,6 +31,10 @@ class Fluent::KafkaInput < Fluent::Input
31
31
  config_param :add_suffix, :string, :default => nil,
32
32
  :desc => "tag suffix"
33
33
  config_param :add_offset_in_record, :bool, :default => false
34
+ config_param :tag_source, :enum, :list => [:topic, :record], :default => :topic,
35
+ :desc => "Source for the fluentd event tag"
36
+ config_param :record_tag_key, :string, :default => 'tag',
37
+ :desc => "Tag field when tag_source is 'record'"
34
38
 
35
39
  config_param :offset_zookeeper, :string, :default => nil
36
40
  config_param :offset_zk_root_node, :string, :default => '/fluent-plugin-kafka'
@@ -113,7 +117,7 @@ class Fluent::KafkaInput < Fluent::Input
113
117
 
114
118
  require 'zookeeper' if @offset_zookeeper
115
119
 
116
- @parser_proc = setup_parser
120
+ @parser_proc = setup_parser(conf)
117
121
 
118
122
  @time_source = :record if @use_record_time
119
123
 
@@ -126,7 +130,7 @@ class Fluent::KafkaInput < Fluent::Input
126
130
  end
127
131
  end
128
132
 
129
- def setup_parser
133
+ def setup_parser(conf)
130
134
  case @format
131
135
  when 'json'
132
136
  begin
@@ -165,6 +169,14 @@ class Fluent::KafkaInput < Fluent::Input
165
169
  add_offset_in_hash(r, te, msg.offset) if @add_offset_in_record
166
170
  r
167
171
  }
172
+ else
173
+ @custom_parser = Fluent::Plugin.new_parser(conf['format'])
174
+ @custom_parser.configure(conf)
175
+ Proc.new { |msg|
176
+ @custom_parser.parse(msg.value) {|_time, record|
177
+ record
178
+ }
179
+ }
168
180
  end
169
181
  end
170
182
 
@@ -216,6 +228,9 @@ class Fluent::KafkaInput < Fluent::Input
216
228
  router,
217
229
  @kafka_message_key,
218
230
  @time_source,
231
+ @record_time_key,
232
+ @tag_source,
233
+ @record_tag_key,
219
234
  opt)
220
235
  }
221
236
  @topic_watchers.each {|tw|
@@ -240,7 +255,7 @@ class Fluent::KafkaInput < Fluent::Input
240
255
  end
241
256
 
242
257
  class TopicWatcher < Coolio::TimerWatcher
243
- def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, offset_manager, router, kafka_message_key, time_source, options={})
258
+ def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, offset_manager, router, kafka_message_key, time_source, record_time_key, tag_source, record_tag_key, options={})
244
259
  @topic_entry = topic_entry
245
260
  @kafka = kafka
246
261
  @callback = method(:consume)
@@ -252,6 +267,9 @@ class Fluent::KafkaInput < Fluent::Input
252
267
  @router = router
253
268
  @kafka_message_key = kafka_message_key
254
269
  @time_source = time_source
270
+ @record_time_key = record_time_key
271
+ @tag_source = tag_source
272
+ @record_tag_key = record_tag_key
255
273
 
256
274
  @next_offset = @topic_entry.offset
257
275
  if @topic_entry.offset == -1 && offset_manager
@@ -276,7 +294,7 @@ class Fluent::KafkaInput < Fluent::Input
276
294
  def consume
277
295
  offset = @next_offset
278
296
  @fetch_args[:offset] = offset
279
- messages = @kafka.fetch_messages(@fetch_args)
297
+ messages = @kafka.fetch_messages(**@fetch_args)
280
298
 
281
299
  return if messages.size.zero?
282
300
 
@@ -288,6 +306,11 @@ class Fluent::KafkaInput < Fluent::Input
288
306
  messages.each { |msg|
289
307
  begin
290
308
  record = @parser.call(msg, @topic_entry)
309
+ if @tag_source == :record
310
+ tag = record[@record_tag_key]
311
+ tag = @add_prefix + "." + tag if @add_prefix
312
+ tag = tag + "." + @add_suffix if @add_suffix
313
+ end
291
314
  case @time_source
292
315
  when :kafka
293
316
  record_time = Fluent::EventTime.from_time(msg.create_time)
@@ -36,6 +36,10 @@ class Fluent::KafkaGroupInput < Fluent::Input
36
36
  config_param :get_kafka_client_log, :bool, :default => false
37
37
  config_param :time_format, :string, :default => nil,
38
38
  :desc => "Time format to be used to parse 'time' field."
39
+ config_param :tag_source, :enum, :list => [:topic, :record], :default => :topic,
40
+ :desc => "Source for the fluentd event tag"
41
+ config_param :record_tag_key, :string, :default => 'tag',
42
+ :desc => "Tag field when tag_source is 'record'"
39
43
  config_param :kafka_message_key, :string, :default => nil,
40
44
  :desc => "Set kafka's message key to this field"
41
45
  config_param :connect_timeout, :integer, :default => nil,
@@ -117,7 +121,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
117
121
  @max_wait_time = conf['max_wait_ms'].to_i / 1000
118
122
  end
119
123
 
120
- @parser_proc = setup_parser
124
+ @parser_proc = setup_parser(conf)
121
125
 
122
126
  @consumer_opts = {:group_id => @consumer_group}
123
127
  @consumer_opts[:session_timeout] = @session_timeout if @session_timeout
@@ -138,9 +142,13 @@ class Fluent::KafkaGroupInput < Fluent::Input
138
142
  @time_parser = Fluent::TextParser::TimeParser.new(@time_format)
139
143
  end
140
144
  end
145
+
146
+ if @time_source == :record && defined?(Fluent::NumericTimeParser)
147
+ @float_numeric_parse = Fluent::NumericTimeParser.new(:float)
148
+ end
141
149
  end
142
150
 
143
- def setup_parser
151
+ def setup_parser(conf)
144
152
  case @format
145
153
  when 'json'
146
154
  begin
@@ -159,6 +167,14 @@ class Fluent::KafkaGroupInput < Fluent::Input
159
167
  Proc.new { |msg| MessagePack.unpack(msg.value) }
160
168
  when 'text'
161
169
  Proc.new { |msg| {@message_key => msg.value} }
170
+ else
171
+ @custom_parser = Fluent::Plugin.new_parser(conf['format'])
172
+ @custom_parser.configure(conf)
173
+ Proc.new { |msg|
174
+ @custom_parser.parse(msg.value) {|_time, record|
175
+ record
176
+ }
177
+ }
162
178
  end
163
179
  end
164
180
 
@@ -201,7 +217,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
201
217
  end
202
218
 
203
219
  def setup_consumer
204
- consumer = @kafka.consumer(@consumer_opts)
220
+ consumer = @kafka.consumer(**@consumer_opts)
205
221
  @topics.each { |topic|
206
222
  if m = /^\/(.+)\/$/.match(topic)
207
223
  topic_or_regex = Regexp.new(m[1])
@@ -236,49 +252,104 @@ class Fluent::KafkaGroupInput < Fluent::Input
236
252
  end
237
253
  end
238
254
 
255
+ def process_batch_with_record_tag(batch)
256
+ es = {}
257
+ batch.messages.each { |msg|
258
+ begin
259
+ record = @parser_proc.call(msg)
260
+ tag = record[@record_tag_key]
261
+ tag = @add_prefix + "." + tag if @add_prefix
262
+ tag = tag + "." + @add_suffix if @add_suffix
263
+ es[tag] ||= Fluent::MultiEventStream.new
264
+ case @time_source
265
+ when :kafka
266
+ record_time = Fluent::EventTime.from_time(msg.create_time)
267
+ when :now
268
+ record_time = Fluent::Engine.now
269
+ when :record
270
+ if @time_format
271
+ record_time = @time_parser.parse(record[@record_time_key].to_s)
272
+ else
273
+ record_time = record[@record_time_key]
274
+ end
275
+ else
276
+ log.fatal "BUG: invalid time_source: #{@time_source}"
277
+ end
278
+ if @kafka_message_key
279
+ record[@kafka_message_key] = msg.key
280
+ end
281
+ if @add_headers
282
+ msg.headers.each_pair { |k, v|
283
+ record[k] = v
284
+ }
285
+ end
286
+ es[tag].add(record_time, record)
287
+ rescue => e
288
+ log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
289
+ log.debug_backtrace
290
+ end
291
+ }
292
+
293
+ unless es.empty?
294
+ es.each { |tag,es|
295
+ emit_events(tag, es)
296
+ }
297
+ end
298
+ end
299
+
300
+ def process_batch(batch)
301
+ es = Fluent::MultiEventStream.new
302
+ tag = batch.topic
303
+ tag = @add_prefix + "." + tag if @add_prefix
304
+ tag = tag + "." + @add_suffix if @add_suffix
305
+
306
+ batch.messages.each { |msg|
307
+ begin
308
+ record = @parser_proc.call(msg)
309
+ case @time_source
310
+ when :kafka
311
+ record_time = Fluent::EventTime.from_time(msg.create_time)
312
+ when :now
313
+ record_time = Fluent::Engine.now
314
+ when :record
315
+ record_time = record[@record_time_key]
316
+
317
+ if @time_format
318
+ record_time = @time_parser.parse(record_time.to_s)
319
+ elsif record_time.is_a?(Float) && @float_numeric_parse
320
+ record_time = @float_numeric_parse.parse(record_time)
321
+ end
322
+ else
323
+ log.fatal "BUG: invalid time_source: #{@time_source}"
324
+ end
325
+ if @kafka_message_key
326
+ record[@kafka_message_key] = msg.key
327
+ end
328
+ if @add_headers
329
+ msg.headers.each_pair { |k, v|
330
+ record[k] = v
331
+ }
332
+ end
333
+ es.add(record_time, record)
334
+ rescue => e
335
+ log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
336
+ log.debug_backtrace
337
+ end
338
+ }
339
+
340
+ unless es.empty?
341
+ emit_events(tag, es)
342
+ end
343
+ end
344
+
239
345
  def run
240
346
  while @consumer
241
347
  begin
242
348
  @consumer.each_batch(@fetch_opts) { |batch|
243
- es = Fluent::MultiEventStream.new
244
- tag = batch.topic
245
- tag = @add_prefix + "." + tag if @add_prefix
246
- tag = tag + "." + @add_suffix if @add_suffix
247
-
248
- batch.messages.each { |msg|
249
- begin
250
- record = @parser_proc.call(msg)
251
- case @time_source
252
- when :kafka
253
- record_time = Fluent::EventTime.from_time(msg.create_time)
254
- when :now
255
- record_time = Fluent::Engine.now
256
- when :record
257
- if @time_format
258
- record_time = @time_parser.parse(record[@record_time_key].to_s)
259
- else
260
- record_time = record[@record_time_key]
261
- end
262
- else
263
- log.fatal "BUG: invalid time_source: #{@time_source}"
264
- end
265
- if @kafka_message_key
266
- record[@kafka_message_key] = msg.key
267
- end
268
- if @add_headers
269
- msg.headers.each_pair { |k, v|
270
- record[k] = v
271
- }
272
- end
273
- es.add(record_time, record)
274
- rescue => e
275
- log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
276
- log.debug_backtrace
277
- end
278
- }
279
-
280
- unless es.empty?
281
- emit_events(tag, es)
349
+ if @tag_source == :record
350
+ process_batch_with_record_tag(batch)
351
+ else
352
+ process_batch(batch)
282
353
  end
283
354
  }
284
355
  rescue ForShutdown
@@ -7,7 +7,7 @@ require 'rdkafka'
7
7
  class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
8
8
  Fluent::Plugin.register_input('rdkafka_group', self)
9
9
 
10
- helpers :thread
10
+ helpers :thread, :parser, :compat_parameters
11
11
 
12
12
  config_param :topics, :string,
13
13
  :desc => "Listening topics(separate with comma',')."
@@ -41,15 +41,19 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
41
41
  :desc => "If set true, it disables retry_limit and make Fluentd retry indefinitely (default: false)"
42
42
  config_param :retry_limit, :integer, :default => 10,
43
43
  :desc => "The maximum number of retries for connecting kafka (default: 10)"
44
-
44
+
45
45
  config_param :max_wait_time_ms, :integer, :default => 250,
46
46
  :desc => "How long to block polls in milliseconds until the server sends us data."
47
47
  config_param :max_batch_size, :integer, :default => 10000,
48
48
  :desc => "Maximum number of log lines emitted in a single batch."
49
-
49
+
50
50
  config_param :kafka_configs, :hash, :default => {},
51
51
  :desc => "Kafka configuration properties as desribed in https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md"
52
52
 
53
+ config_section :parse do
54
+ config_set_default :@type, 'json'
55
+ end
56
+
53
57
  include Fluent::KafkaPluginUtil::SSLSettings
54
58
  include Fluent::KafkaPluginUtil::SaslSettings
55
59
 
@@ -80,6 +84,8 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
80
84
  private :_config_to_array
81
85
 
82
86
  def configure(conf)
87
+ compat_parameters_convert(conf, :parser)
88
+
83
89
  super
84
90
 
85
91
  log.warn "The in_rdkafka_group consumer was not yet tested under heavy production load. Use it at your own risk!"
@@ -89,7 +95,14 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
89
95
 
90
96
  @topics = _config_to_array(@topics)
91
97
 
92
- @parser_proc = setup_parser
98
+ parser_conf = conf.elements('parse').first
99
+ unless parser_conf
100
+ raise Fluent::ConfigError, "<parse> section or format parameter is required."
101
+ end
102
+ unless parser_conf["@type"]
103
+ raise Fluent::ConfigError, "parse/@type is required."
104
+ end
105
+ @parser_proc = setup_parser(parser_conf)
93
106
 
94
107
  @time_source = :record if @use_record_time
95
108
 
@@ -98,8 +111,9 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
98
111
  end
99
112
  end
100
113
 
101
- def setup_parser
102
- case @format
114
+ def setup_parser(parser_conf)
115
+ format = parser_conf["@type"]
116
+ case format
103
117
  when 'json'
104
118
  begin
105
119
  require 'oj'
@@ -117,6 +131,13 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
117
131
  Proc.new { |msg| MessagePack.unpack(msg.payload) }
118
132
  when 'text'
119
133
  Proc.new { |msg| {@message_key => msg.payload} }
134
+ else
135
+ @custom_parser = parser_create(usage: 'in-rdkafka-plugin', conf: parser_conf)
136
+ Proc.new { |msg|
137
+ @custom_parser.parse(msg.payload) {|_time, record|
138
+ record
139
+ }
140
+ }
120
141
  end
121
142
  end
122
143
 
@@ -33,7 +33,7 @@ module Fluent
33
33
  end
34
34
 
35
35
  def read_ssl_file(path)
36
- return nil if path.nil?
36
+ return nil if path.nil? || path.respond_to?(:strip) && path.strip.empty?
37
37
 
38
38
  if path.is_a?(Array)
39
39
  path.map { |fp| File.read(fp) }
@@ -215,7 +215,7 @@ DESC
215
215
  chain.next
216
216
 
217
217
  # out_kafka is mainly for testing so don't need the performance unlike out_kafka_buffered.
218
- producer = @kafka.producer(@producer_opts)
218
+ producer = @kafka.producer(**@producer_opts)
219
219
 
220
220
  es.each do |time, record|
221
221
  if @output_include_time
@@ -239,7 +239,7 @@ DESC
239
239
  @producers_mutex.synchronize {
240
240
  producer = @producers[Thread.current.object_id]
241
241
  unless producer
242
- producer = @kafka.producer(@producer_opts)
242
+ producer = @kafka.producer(**@producer_opts)
243
243
  @producers[Thread.current.object_id] = producer
244
244
  end
245
245
  producer
@@ -0,0 +1,38 @@
1
+ require 'helper'
2
+ require 'fluent/plugin/kafka_plugin_util'
3
+
4
+ class File
5
+ def File::read(path)
6
+ path
7
+ end
8
+ end
9
+
10
+ class KafkaPluginUtilTest < Test::Unit::TestCase
11
+
12
+ def self.config_param(name, type, options)
13
+ end
14
+ include Fluent::KafkaPluginUtil::SSLSettings
15
+
16
+ def config_param
17
+ end
18
+ def setup
19
+ Fluent::Test.setup
20
+ end
21
+
22
+ def test_read_ssl_file_when_nil
23
+ assert_equal(nil, read_ssl_file(nil))
24
+ end
25
+
26
+ def test_read_ssl_file_when_empty_string
27
+ assert_equal(nil, read_ssl_file(""))
28
+ end
29
+
30
+ def test_read_ssl_file_when_non_empty_path
31
+ assert_equal("path", read_ssl_file("path"))
32
+ end
33
+
34
+ def test_read_ssl_file_when_non_empty_array
35
+ assert_equal(["a","b"], read_ssl_file(["a","b"]))
36
+ end
37
+
38
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.15.1
4
+ version: 0.16.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2020-09-17 00:00:00.000000000 Z
12
+ date: 2021-05-17 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd
@@ -93,6 +93,20 @@ dependencies:
93
93
  - - ">="
94
94
  - !ruby/object:Gem::Version
95
95
  version: 3.0.8
96
+ - !ruby/object:Gem::Dependency
97
+ name: webrick
98
+ requirement: !ruby/object:Gem::Requirement
99
+ requirements:
100
+ - - ">="
101
+ - !ruby/object:Gem::Version
102
+ version: '0'
103
+ type: :development
104
+ prerelease: false
105
+ version_requirements: !ruby/object:Gem::Requirement
106
+ requirements:
107
+ - - ">="
108
+ - !ruby/object:Gem::Version
109
+ version: '0'
96
110
  description: Fluentd plugin for Apache Kafka > 0.8
97
111
  email:
98
112
  - togachiro@gmail.com
@@ -101,8 +115,8 @@ executables: []
101
115
  extensions: []
102
116
  extra_rdoc_files: []
103
117
  files:
118
+ - ".github/workflows/linux.yml"
104
119
  - ".gitignore"
105
- - ".travis.yml"
106
120
  - ChangeLog
107
121
  - Gemfile
108
122
  - LICENSE
@@ -120,6 +134,7 @@ files:
120
134
  - lib/fluent/plugin/out_rdkafka.rb
121
135
  - lib/fluent/plugin/out_rdkafka2.rb
122
136
  - test/helper.rb
137
+ - test/plugin/test_kafka_plugin_util.rb
123
138
  - test/plugin/test_out_kafka.rb
124
139
  homepage: https://github.com/fluent/fluent-plugin-kafka
125
140
  licenses:
@@ -140,10 +155,11 @@ required_rubygems_version: !ruby/object:Gem::Requirement
140
155
  - !ruby/object:Gem::Version
141
156
  version: '0'
142
157
  requirements: []
143
- rubygems_version: 3.0.3
158
+ rubygems_version: 3.2.5
144
159
  signing_key:
145
160
  specification_version: 4
146
161
  summary: Fluentd plugin for Apache Kafka > 0.8
147
162
  test_files:
148
163
  - test/helper.rb
164
+ - test/plugin/test_kafka_plugin_util.rb
149
165
  - test/plugin/test_out_kafka.rb
data/.travis.yml DELETED
@@ -1,21 +0,0 @@
1
- language: ruby
2
-
3
- rvm:
4
- - 2.1
5
- - 2.2
6
- - 2.3.1
7
- - 2.4.1
8
- - 2.5.0
9
- - ruby-head
10
-
11
- before_install:
12
- - gem update --system=2.7.8
13
- script:
14
- - bundle exec rake test
15
-
16
- sudo: false
17
-
18
- matrix:
19
- allow_failures:
20
- - rvm: ruby-head
21
-