fluent-plugin-deis-kafka 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (4) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +2 -1
  3. data/lib/fluent/plugin/out_deis.rb +171 -179
  4. metadata +5 -47
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 52fe5165a1eafbb97ae6af21ce01e4ac2c7aaaa0
4
- data.tar.gz: 8acc20b11e2e5c4d3d85b2ff528637259dc13925
3
+ metadata.gz: c414fda72d442beb154378e4c9eb6c1b5d87a648
4
+ data.tar.gz: 16bd58fb7bb0a822bb3574a0ec037d7cf10f33fd
5
5
  SHA512:
6
- metadata.gz: 171b8ceea5f2fd90e48550d9a9f72daadb6b59dc3b770035c4841d3fe6cc678b559f3489f6b442a6d2e9f5cae4c24535bc57a620827b3e605be4244fa9cea830
7
- data.tar.gz: d26124dca9d98030be9359460dbbb9c4eb7b593665cc59807a44b7cc0a6a9faab66766dc4f3f8fac0b25dca0573bb8fa37eaa0e8762c42dd401decedffbd7592
6
+ metadata.gz: 4fb84eaaae243d26775b3e20b0ca0a0725f586d914db8332ccb036ff07f56a0470cd90603572c262fb9335739a22c11b1aca6170a4459f9da6c8cb28fe24aec9
7
+ data.tar.gz: 0df1e72c2a52b1242018dee56bf75eeacd1fafba515594977384bfbcd7caa080c46b19ca9a30e2d57144a46ed794bc7abfee83b8868329d7ed21a6b8a22ae350
data/README.md CHANGED
@@ -1 +1,2 @@
1
- # fluentd-plugin-deis-kafka
1
+ # fluentd-plugin-deis-kafka
2
+ [![Build Status](https://travis-ci.org/topfreegames/fluent-plugin-deis-kafka.svg?branch=master)](https://travis-ci.org/topfreegames/fluent-plugin-deis-kafka) [![Dependency Status](https://gemnasium.com/badges/github.com/topfreegames/fluent-plugin-deis-kafka.svg)](https://gemnasium.com/github.com/topfreegames/fluent-plugin-deis-kafka) [![Gem Version](https://badge.fury.io/rb/fluentd.svg)](https://badge.fury.io/rb/fluentd)
@@ -1,214 +1,206 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'fluent/mixin/config_placeholders'
4
- require 'fluent/mixin/plaintextformatter'
5
- require 'fluent/mixin/rewrite_tag_name'
6
3
  require 'fluent/mixin/deis'
7
- require 'fluent/output'
4
+ require 'fluent/plugin/output'
5
+ require 'fluent/plugin_helper/compat_parameters'
8
6
  require 'influxdb'
9
7
 
10
8
  module Fluent
11
- class DeisOutput < Output
12
- Fluent::Plugin.register_output('deis', self)
13
-
14
- include Fluent::Mixin::PlainTextFormatter
15
- include Fluent::Mixin::ConfigPlaceholders
16
- include Fluent::HandleTagNameMixin
17
- include Fluent::Mixin::RewriteTagName
18
- include Fluent::Mixin::Deis
19
- config_param :brokers, :string, default: 'localhost:9092',
20
- desc: <<-DESC
21
- Set brokers directly:
22
- <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
23
- DESC
24
- config_param :client_id, :string, default: 'fluentd'
25
- config_param :metrics_topic, :string, default: 'metrics'
26
- config_param :discard_kafka_delivery_failed, :bool, default: false
27
-
28
- # ruby-kafka producer options
29
- config_param :max_send_retries, :integer, default: 2,
30
- desc: 'Number of times to retry '\
31
- 'sending of messages to a leader.'
32
- config_param :required_acks, :integer, default: 1,
33
- desc: 'The number of acks required per request.'
34
- config_param :ack_timeout, :time, default: nil,
35
- desc: 'How long the producer waits for acks.'
36
- config_param :compression_codec, :string, default: nil,
37
- desc: <<~DESC
38
- The codec the producer uses to compress messages.
39
- Supported codecs: (gzip|snappy)
40
- DESC
41
-
42
- config_param :max_send_limit_bytes, :size, default: nil
43
- config_param :kafka_agg_max_bytes, :size, default: 4 * 1024 # 4k
44
- config_param :kafka_agg_max_messages, :integer, default: nil
45
-
46
- define_method('log') { $log } unless method_defined?(:log) # rubocop:disable GlobalVars
47
-
48
- def initialize
49
- super
50
- require 'kafka'
51
- require 'fluent/plugin/kafka_producer_ext'
52
-
53
- @kafka = nil
54
- @producers = {}
55
- @producers_mutex = Mutex.new
56
- end
57
-
58
- def start
59
- super
60
- refresh_client
61
- end
9
+ module Plugin
10
+ class DeisOutput < Output
11
+ Fluent::Plugin.register_output('deis', self)
12
+
13
+ include Fluent::Mixin::Deis
14
+ config_param :brokers, :string, default: 'localhost:9092',
15
+ desc: <<-DESC
16
+ Set brokers directly:
17
+ <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
18
+ DESC
19
+ config_param :client_id, :string, default: 'fluentd'
20
+ config_param :metrics_topic, :string, default: 'metrics'
21
+ config_param :discard_kafka_delivery_failed, :bool, default: false
22
+
23
+ # ruby-kafka producer options
24
+ config_param :max_send_retries, :integer, default: 2,
25
+ desc: 'Number of times to retry '\
26
+ 'sending of messages to a leader.'
27
+ config_param :required_acks, :integer, default: 1,
28
+ desc: 'The number of acks required per request.'
29
+ config_param :ack_timeout, :time, default: nil,
30
+ desc: 'How long the producer waits for acks.'
31
+ config_param :compression_codec, :string, default: nil,
32
+ desc: <<~DESC
33
+ The codec the producer uses to compress messages.
34
+ Supported codecs: (gzip|snappy)
35
+ DESC
36
+
37
+ config_param :max_send_limit_bytes, :size, default: nil
38
+ config_param :kafka_agg_max_bytes, :size, default: 4 * 1024 # 4k
39
+ config_param :kafka_agg_max_messages, :integer, default: nil
40
+
41
+ define_method('log') { $log } unless method_defined?(:log) # rubocop:disable GlobalVars
42
+
43
+ def initialize
44
+ super
45
+ require 'kafka'
46
+ require 'fluent/plugin/kafka_producer_ext'
47
+
48
+ @kafka = nil
49
+ @producers = {}
50
+ @producers_mutex = Mutex.new
51
+ end
62
52
 
63
- def shutdown
64
- super
65
- shutdown_producers
66
- @kafka = nil
67
- end
53
+ def start
54
+ super
55
+ refresh_client
56
+ end
68
57
 
69
- def format_stream(_tag, es)
70
- es.to_msgpack_stream
71
- end
58
+ def shutdown
59
+ super
60
+ shutdown_producers
61
+ @kafka = nil
62
+ end
72
63
 
73
- def shutdown_producers
74
- @producers_mutex.synchronize do
75
- @producers.each_value(&:shutdown)
76
- @producers = {}
64
+ def shutdown_producers
65
+ @producers_mutex.synchronize do
66
+ @producers.each_value(&:shutdown)
67
+ @producers = {}
68
+ end
77
69
  end
78
- end
79
70
 
80
- def get_producer # rubocop:disable AccessorMethodName
81
- @producers_mutex.synchronize do
82
- producer = @producers[Thread.current.object_id]
83
- unless producer
84
- producer = @kafka.producer(@producer_opts)
85
- @producers[Thread.current.object_id] = producer
71
+ def get_producer # rubocop:disable AccessorMethodName
72
+ @producers_mutex.synchronize do
73
+ producer = @producers[Thread.current.object_id]
74
+ unless producer
75
+ producer = @kafka.producer(@producer_opts)
76
+ @producers[Thread.current.object_id] = producer
77
+ end
78
+ producer
86
79
  end
87
- producer
88
80
  end
89
- end
90
81
 
91
- def deliver_messages(producer, tag)
92
- if @discard_kafka_delivery_failed
93
- begin
82
+ def deliver_messages(producer, tag)
83
+ if @discard_kafka_delivery_failed
84
+ begin
85
+ producer.deliver_messages
86
+ rescue Kafka::DeliveryFailed => e
87
+ log.warn 'DeliveryFailed occurred. Discard broken event:',
88
+ error: e.to_s, error_class: e.class.to_s, tag: tag
89
+ producer.clear_buffer
90
+ end
91
+ else
94
92
  producer.deliver_messages
95
- rescue Kafka::DeliveryFailed => e
96
- log.warn 'DeliveryFailed occurred. Discard broken event:',
97
- error: e.to_s, error_class: e.class.to_s, tag: tag
98
- producer.clear_buffer
99
93
  end
100
- else
101
- producer.deliver_messages
102
94
  end
103
- end
104
95
 
105
- def refresh_client(raise_error = true)
106
- @kafka = Kafka.new(seed_brokers: @brokers.split(','), client_id: @client_id)
107
- log.info "initialized kafka producer: #{@client_id}"
108
- rescue Exception => e # rubocop:disable RescueException
109
- raise e if raise_error
110
- log.error e
111
- end
96
+ def refresh_client(raise_error = true)
97
+ @kafka = Kafka.new(seed_brokers: @brokers.split(','), client_id: @client_id)
98
+ log.info "initialized kafka producer: #{@client_id}"
99
+ rescue Exception => e # rubocop:disable RescueException
100
+ raise e if raise_error
101
+ log.error e
102
+ end
112
103
 
113
- def configure(conf)
114
- super
104
+ def configure(conf)
105
+ super
115
106
 
116
- @producer_opts = { max_retries: @max_send_retries, required_acks: @required_acks }
117
- @producer_opts[:ack_timeout] = @ack_timeout if @ack_timeout
118
- @producer_opts[:compression_codec] = @compression_codec.to_sym if @compression_codec
107
+ @producer_opts = { max_retries: @max_send_retries, required_acks: @required_acks }
108
+ @producer_opts[:ack_timeout] = @ack_timeout if @ack_timeout
109
+ @producer_opts[:compression_codec] = @compression_codec.to_sym if @compression_codec
119
110
 
120
- return unless @discard_kafka_delivery_failed
121
- log.warn "'discard_kafka_delivery_failed' option discards events which "\
122
- 'cause delivery failure, e.g. invalid topic or something.'
123
- log.warn 'If this is unexpected, you need to check your configuration or data.'
124
- end
111
+ return unless @discard_kafka_delivery_failed
112
+ log.warn "'discard_kafka_delivery_failed' option discards events which "\
113
+ 'cause delivery failure, e.g. invalid topic or something.'
114
+ log.warn 'If this is unexpected, you need to check your configuration or data.'
115
+ end
125
116
 
126
- # def emit(tag, es, chain)
127
- # super(tag, es, chain, tag)
128
- # end
129
-
130
- def filter_record(record)
131
- return unless from_router?(record)
132
- data = build_series(record)
133
- return unless data
134
- return data.map do |point|
135
- InfluxDB::PointValue.new(point).dump
136
- end.join("\n")
137
- rescue Exception => e # rubocop:disable RescueException
138
- puts "Error:#{e.backtrace}"
139
- end
117
+ # def emit(tag, es, chain)
118
+ # super(tag, es, chain, tag)
119
+ # end
120
+
121
+ def filter_record(record)
122
+ return unless from_router?(record)
123
+ data = build_series(record)
124
+ return unless data
125
+ return data.map do |point|
126
+ InfluxDB::PointValue.new(point).dump
127
+ end.join("\n")
128
+ rescue Exception => e # rubocop:disable RescueException
129
+ puts "Error:#{e.backtrace}"
130
+ end
140
131
 
141
- def write(chunk)
142
- tag = chunk.metadata.tag
143
- producer = get_producer
144
-
145
- records_by_topic = {}
146
- bytes_by_topic = {}
147
- messages = 0
148
- messages_bytes = 0
149
- record_buf = nil
150
- record_buf_bytes = nil
151
- begin
152
- Fluent::Engine.msgpack_factory.unpacker(chunk.open).each do |time, record|
153
- begin
154
- topic = @metrics_topic
155
- records_by_topic[topic] ||= 0
156
- bytes_by_topic[topic] ||= 0
157
- line = filter_record(record)
158
-
159
- next unless line
160
- record_buf_bytes = line.bytesize
161
- if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes
162
- log.warn 'record size exceeds max_send_limit_bytes. Skip event:',
163
- time: time, record: record
132
+ def write(chunk)
133
+ tag = chunk.metadata.tag
134
+ producer = get_producer
135
+
136
+ records_by_topic = {}
137
+ bytes_by_topic = {}
138
+ messages = 0
139
+ messages_bytes = 0
140
+ record_buf = nil
141
+ record_buf_bytes = nil
142
+ begin
143
+ Fluent::Engine.msgpack_factory.unpacker(chunk.open).each do |time, record|
144
+ begin
145
+ topic = @metrics_topic
146
+ records_by_topic[topic] ||= 0
147
+ bytes_by_topic[topic] ||= 0
148
+ line = filter_record(record)
149
+
150
+ next unless line
151
+ record_buf_bytes = line.bytesize
152
+ if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes
153
+ log.warn 'record size exceeds max_send_limit_bytes. Skip event:',
154
+ time: time, record: record
155
+ next
156
+ end
157
+ rescue StandardError => e
158
+ log.warn 'unexpected error during format record. Skip broken event:',
159
+ error: e.to_s, error_class: e.class.to_s, time: time, record: record
164
160
  next
165
161
  end
166
- rescue StandardError => e
167
- log.warn 'unexpected error during format record. Skip broken event:',
168
- error: e.to_s, error_class: e.class.to_s, time: time, record: record
169
- next
170
- end
171
162
 
172
- if messages.positive? &&
173
- (messages_bytes + record_buf_bytes > @kafka_agg_max_bytes) ||
174
- (@kafka_agg_max_messages && messages >= @kafka_agg_max_messages)
175
- log.debug do
176
- "#{messages} messages send because reaches the limit of batch transmission."
163
+ if messages.positive? &&
164
+ (messages_bytes + record_buf_bytes > @kafka_agg_max_bytes) ||
165
+ (@kafka_agg_max_messages && messages >= @kafka_agg_max_messages)
166
+ log.debug do
167
+ "#{messages} messages send because reaches the limit of batch transmission."
168
+ end
169
+ deliver_messages(producer, tag)
170
+ messages = 0
171
+ messages_bytes = 0
177
172
  end
178
- deliver_messages(producer, tag)
179
- messages = 0
180
- messages_bytes = 0
181
- end
182
173
 
183
- log.trace do
184
- "message will send to #{topic} with partition_key: #{partition_key},"\
185
- "partition: #{partition}, message_key: #{message_key} and value: #{record_buf}."
186
- end
174
+ log.trace do
175
+ "message will send to #{topic} with partition_key: #{partition_key},"\
176
+ "partition: #{partition}, message_key: #{message_key} and value: #{record_buf}."
177
+ end
187
178
 
188
- messages += 1
189
- producer.produce2(
190
- line,
191
- topic: topic
192
- )
193
- messages_bytes += record_buf_bytes
179
+ messages += 1
180
+ producer.produce2(
181
+ line,
182
+ topic: topic
183
+ )
184
+ messages_bytes += record_buf_bytes
194
185
 
195
- records_by_topic[topic] += 1
196
- bytes_by_topic[topic] += record_buf_bytes
197
- end
198
- if messages.positive?
199
- log.debug { "#{messages} messages send." }
200
- deliver_messages(producer, tag)
186
+ records_by_topic[topic] += 1
187
+ bytes_by_topic[topic] += record_buf_bytes
188
+ end
189
+ if messages.positive?
190
+ log.debug { "#{messages} messages send." }
191
+ deliver_messages(producer, tag)
192
+ end
193
+ log.debug { "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})" }
201
194
  end
202
- log.debug { "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})" }
195
+ rescue Exception => e # rubocop:disable RescueException
196
+ log.warn "Send exception occurred: #{e}"
197
+ log.warn "Exception Backtrace : #{e.backtrace.join("\n")}"
198
+ # For safety, refresh client and its producers
199
+ shutdown_producers
200
+ refresh_client(false)
201
+ # Raise exception to retry sendind messages
202
+ raise e
203
203
  end
204
- rescue Exception => e # rubocop:disable RescueException
205
- log.warn "Send exception occurred: #{e}"
206
- log.warn "Exception Backtrace : #{e.backtrace.join("\n")}"
207
- # For safety, refresh client and its producers
208
- shutdown_producers
209
- refresh_client(false)
210
- # Raise exception to retry sendind messages
211
- raise e
212
204
  end
213
205
  end
214
206
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-deis-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Guilherme Souza
@@ -24,48 +24,6 @@ dependencies:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '0.14'
27
- - !ruby/object:Gem::Dependency
28
- name: fluent-mixin-plaintextformatter
29
- requirement: !ruby/object:Gem::Requirement
30
- requirements:
31
- - - ">="
32
- - !ruby/object:Gem::Version
33
- version: '0'
34
- type: :runtime
35
- prerelease: false
36
- version_requirements: !ruby/object:Gem::Requirement
37
- requirements:
38
- - - ">="
39
- - !ruby/object:Gem::Version
40
- version: '0'
41
- - !ruby/object:Gem::Dependency
42
- name: fluent-mixin-config-placeholders
43
- requirement: !ruby/object:Gem::Requirement
44
- requirements:
45
- - - ">="
46
- - !ruby/object:Gem::Version
47
- version: '0'
48
- type: :runtime
49
- prerelease: false
50
- version_requirements: !ruby/object:Gem::Requirement
51
- requirements:
52
- - - ">="
53
- - !ruby/object:Gem::Version
54
- version: '0'
55
- - !ruby/object:Gem::Dependency
56
- name: fluent-mixin-rewrite-tag-name
57
- requirement: !ruby/object:Gem::Requirement
58
- requirements:
59
- - - ">="
60
- - !ruby/object:Gem::Version
61
- version: '0'
62
- type: :runtime
63
- prerelease: false
64
- version_requirements: !ruby/object:Gem::Requirement
65
- requirements:
66
- - - ">="
67
- - !ruby/object:Gem::Version
68
- version: '0'
69
27
  - !ruby/object:Gem::Dependency
70
28
  name: influxdb
71
29
  requirement: !ruby/object:Gem::Requirement
@@ -114,28 +72,28 @@ dependencies:
114
72
  requirements:
115
73
  - - "~>"
116
74
  - !ruby/object:Gem::Version
117
- version: '10.0'
75
+ version: '12.0'
118
76
  type: :development
119
77
  prerelease: false
120
78
  version_requirements: !ruby/object:Gem::Requirement
121
79
  requirements:
122
80
  - - "~>"
123
81
  - !ruby/object:Gem::Version
124
- version: '10.0'
82
+ version: '12.0'
125
83
  - !ruby/object:Gem::Dependency
126
84
  name: test-unit
127
85
  requirement: !ruby/object:Gem::Requirement
128
86
  requirements:
129
87
  - - "~>"
130
88
  - !ruby/object:Gem::Version
131
- version: 3.1.7
89
+ version: '3.2'
132
90
  type: :development
133
91
  prerelease: false
134
92
  version_requirements: !ruby/object:Gem::Requirement
135
93
  requirements:
136
94
  - - "~>"
137
95
  - !ruby/object:Gem::Version
138
- version: 3.1.7
96
+ version: '3.2'
139
97
  - !ruby/object:Gem::Dependency
140
98
  name: rubocop
141
99
  requirement: !ruby/object:Gem::Requirement