fluent-plugin-deis-kafka 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 52fe5165a1eafbb97ae6af21ce01e4ac2c7aaaa0
4
+ data.tar.gz: 8acc20b11e2e5c4d3d85b2ff528637259dc13925
5
+ SHA512:
6
+ metadata.gz: 171b8ceea5f2fd90e48550d9a9f72daadb6b59dc3b770035c4841d3fe6cc678b559f3489f6b442a6d2e9f5cae4c24535bc57a620827b3e605be4244fa9cea830
7
+ data.tar.gz: d26124dca9d98030be9359460dbbb9c4eb7b593665cc59807a44b7cc0a6a9faab66766dc4f3f8fac0b25dca0573bb8fa37eaa0e8762c42dd401decedffbd7592
data/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2017 Top Free Games
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ # fluentd-plugin-deis-kafka
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Fluent
4
+ module Mixin
5
+ module Deis
6
+ def kubernetes?(message)
7
+ !message['kubernetes'].nil?
8
+ end
9
+
10
+ def from_router?(message)
11
+ from_container?(message, 'deis-router')
12
+ end
13
+
14
+ def from_container?(message, regex)
15
+ if kubernetes? message
16
+ return true unless Regexp.new(regex).match(message['kubernetes']['container_name']).nil?
17
+ end
18
+ false
19
+ end
20
+
21
+ def build_series(message)
22
+ metric = parse_router_log(message['log'], message['kubernetes']['host'])
23
+ if metric
24
+ tags = { app: metric['app'], status_code: metric['status_code'], host: metric['host'] }
25
+ data = [
26
+ {
27
+ series: 'deis_router_request_time_ms',
28
+ values: { value: metric['request_time'] },
29
+ tags: tags
30
+ },
31
+ {
32
+ series: 'deis_router_response_time_ms',
33
+ values: { value: metric['response_time'] },
34
+ tags: tags
35
+ },
36
+ {
37
+ series: 'deis_router_bytes_sent',
38
+ values: { value: metric['bytes_sent'] },
39
+ tags: tags
40
+ }
41
+ ]
42
+ return data
43
+ end
44
+ nil
45
+ end
46
+
47
+ # {"log"=>"[2016-05-31T16:56:12+00:00] - foo - 10.164.1.1 - - - 200 - \"GET / HTTP/1.0\" - 211 - \"-\" - \"ApacheBench/2.3\" - \"~^foo\\x5C.(?<domain>.+)$\" - 10.167.243.4:80 - foo.devart.io - 0.002 - 0.046\n"}
48
+ # request_time - request processing time in seconds with a milliseconds resolution; time elapsed between the first bytes were read from the client and the log write after the last bytes were sent to the client
49
+ # response_time - keeps time spent on receiving the response from the upstream server; the time is kept in seconds with millisecond resolution.
50
+ def parse_router_log(message, host)
51
+ split_message = message.split(' - ')
52
+ return nil if split_message.length < 14
53
+ metric = {}
54
+ metric['app'] = split_message[1].strip
55
+ metric['status_code'] = split_message[4].strip
56
+ metric['bytes_sent'] = split_message[6].strip.to_f
57
+ metric['response_time'] = split_message[12].strip.to_f
58
+ metric['request_time'] = split_message[13].strip.to_f
59
+ metric['host'] = host
60
+ return metric
61
+ rescue Exception => e # rubocop:disable RescueException
62
+ puts "Error:#{e.message}"
63
+ return nil
64
+ end
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'kafka/pending_message'
4
+ require 'kafka/compressor'
5
+ require 'kafka/producer'
6
+
7
+ # for out_kafka_buffered
8
+ module Kafka
9
+ class Producer
10
+ def produce2(value, topic:)
11
+ create_time = Time.now
12
+
13
+ message = PendingMessage.new(
14
+ value,
15
+ nil,
16
+ topic,
17
+ nil,
18
+ nil,
19
+ create_time
20
+ )
21
+
22
+ @target_topics.add(topic)
23
+ @pending_message_queue.write(message)
24
+
25
+ nil
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,214 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'fluent/mixin/config_placeholders'
4
+ require 'fluent/mixin/plaintextformatter'
5
+ require 'fluent/mixin/rewrite_tag_name'
6
+ require 'fluent/mixin/deis'
7
+ require 'fluent/output'
8
+ require 'influxdb'
9
+
10
+ module Fluent
11
+ class DeisOutput < Output
12
+ Fluent::Plugin.register_output('deis', self)
13
+
14
+ include Fluent::Mixin::PlainTextFormatter
15
+ include Fluent::Mixin::ConfigPlaceholders
16
+ include Fluent::HandleTagNameMixin
17
+ include Fluent::Mixin::RewriteTagName
18
+ include Fluent::Mixin::Deis
19
+ config_param :brokers, :string, default: 'localhost:9092',
20
+ desc: <<-DESC
21
+ Set brokers directly:
22
+ <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
23
+ DESC
24
+ config_param :client_id, :string, default: 'fluentd'
25
+ config_param :metrics_topic, :string, default: 'metrics'
26
+ config_param :discard_kafka_delivery_failed, :bool, default: false
27
+
28
+ # ruby-kafka producer options
29
+ config_param :max_send_retries, :integer, default: 2,
30
+ desc: 'Number of times to retry '\
31
+ 'sending of messages to a leader.'
32
+ config_param :required_acks, :integer, default: 1,
33
+ desc: 'The number of acks required per request.'
34
+ config_param :ack_timeout, :time, default: nil,
35
+ desc: 'How long the producer waits for acks.'
36
+ config_param :compression_codec, :string, default: nil,
37
+ desc: <<~DESC
38
+ The codec the producer uses to compress messages.
39
+ Supported codecs: (gzip|snappy)
40
+ DESC
41
+
42
+ config_param :max_send_limit_bytes, :size, default: nil
43
+ config_param :kafka_agg_max_bytes, :size, default: 4 * 1024 # 4k
44
+ config_param :kafka_agg_max_messages, :integer, default: nil
45
+
46
+ define_method('log') { $log } unless method_defined?(:log) # rubocop:disable GlobalVars
47
+
48
+ def initialize
49
+ super
50
+ require 'kafka'
51
+ require 'fluent/plugin/kafka_producer_ext'
52
+
53
+ @kafka = nil
54
+ @producers = {}
55
+ @producers_mutex = Mutex.new
56
+ end
57
+
58
+ def start
59
+ super
60
+ refresh_client
61
+ end
62
+
63
+ def shutdown
64
+ super
65
+ shutdown_producers
66
+ @kafka = nil
67
+ end
68
+
69
+ def format_stream(_tag, es)
70
+ es.to_msgpack_stream
71
+ end
72
+
73
+ def shutdown_producers
74
+ @producers_mutex.synchronize do
75
+ @producers.each_value(&:shutdown)
76
+ @producers = {}
77
+ end
78
+ end
79
+
80
+ def get_producer # rubocop:disable AccessorMethodName
81
+ @producers_mutex.synchronize do
82
+ producer = @producers[Thread.current.object_id]
83
+ unless producer
84
+ producer = @kafka.producer(@producer_opts)
85
+ @producers[Thread.current.object_id] = producer
86
+ end
87
+ producer
88
+ end
89
+ end
90
+
91
+ def deliver_messages(producer, tag)
92
+ if @discard_kafka_delivery_failed
93
+ begin
94
+ producer.deliver_messages
95
+ rescue Kafka::DeliveryFailed => e
96
+ log.warn 'DeliveryFailed occurred. Discard broken event:',
97
+ error: e.to_s, error_class: e.class.to_s, tag: tag
98
+ producer.clear_buffer
99
+ end
100
+ else
101
+ producer.deliver_messages
102
+ end
103
+ end
104
+
105
+ def refresh_client(raise_error = true)
106
+ @kafka = Kafka.new(seed_brokers: @brokers.split(','), client_id: @client_id)
107
+ log.info "initialized kafka producer: #{@client_id}"
108
+ rescue Exception => e # rubocop:disable RescueException
109
+ raise e if raise_error
110
+ log.error e
111
+ end
112
+
113
+ def configure(conf)
114
+ super
115
+
116
+ @producer_opts = { max_retries: @max_send_retries, required_acks: @required_acks }
117
+ @producer_opts[:ack_timeout] = @ack_timeout if @ack_timeout
118
+ @producer_opts[:compression_codec] = @compression_codec.to_sym if @compression_codec
119
+
120
+ return unless @discard_kafka_delivery_failed
121
+ log.warn "'discard_kafka_delivery_failed' option discards events which "\
122
+ 'cause delivery failure, e.g. invalid topic or something.'
123
+ log.warn 'If this is unexpected, you need to check your configuration or data.'
124
+ end
125
+
126
+ # def emit(tag, es, chain)
127
+ # super(tag, es, chain, tag)
128
+ # end
129
+
130
+ def filter_record(record)
131
+ return unless from_router?(record)
132
+ data = build_series(record)
133
+ return unless data
134
+ return data.map do |point|
135
+ InfluxDB::PointValue.new(point).dump
136
+ end.join("\n")
137
+ rescue Exception => e # rubocop:disable RescueException
138
+ puts "Error:#{e.backtrace}"
139
+ end
140
+
141
+ def write(chunk)
142
+ tag = chunk.metadata.tag
143
+ producer = get_producer
144
+
145
+ records_by_topic = {}
146
+ bytes_by_topic = {}
147
+ messages = 0
148
+ messages_bytes = 0
149
+ record_buf = nil
150
+ record_buf_bytes = nil
151
+ begin
152
+ Fluent::Engine.msgpack_factory.unpacker(chunk.open).each do |time, record|
153
+ begin
154
+ topic = @metrics_topic
155
+ records_by_topic[topic] ||= 0
156
+ bytes_by_topic[topic] ||= 0
157
+ line = filter_record(record)
158
+
159
+ next unless line
160
+ record_buf_bytes = line.bytesize
161
+ if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes
162
+ log.warn 'record size exceeds max_send_limit_bytes. Skip event:',
163
+ time: time, record: record
164
+ next
165
+ end
166
+ rescue StandardError => e
167
+ log.warn 'unexpected error during format record. Skip broken event:',
168
+ error: e.to_s, error_class: e.class.to_s, time: time, record: record
169
+ next
170
+ end
171
+
172
+ if messages.positive? &&
173
+ (messages_bytes + record_buf_bytes > @kafka_agg_max_bytes) ||
174
+ (@kafka_agg_max_messages && messages >= @kafka_agg_max_messages)
175
+ log.debug do
176
+ "#{messages} messages send because reaches the limit of batch transmission."
177
+ end
178
+ deliver_messages(producer, tag)
179
+ messages = 0
180
+ messages_bytes = 0
181
+ end
182
+
183
+ log.trace do
184
+ "message will send to #{topic} with partition_key: #{partition_key},"\
185
+ "partition: #{partition}, message_key: #{message_key} and value: #{record_buf}."
186
+ end
187
+
188
+ messages += 1
189
+ producer.produce2(
190
+ line,
191
+ topic: topic
192
+ )
193
+ messages_bytes += record_buf_bytes
194
+
195
+ records_by_topic[topic] += 1
196
+ bytes_by_topic[topic] += record_buf_bytes
197
+ end
198
+ if messages.positive?
199
+ log.debug { "#{messages} messages send." }
200
+ deliver_messages(producer, tag)
201
+ end
202
+ log.debug { "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})" }
203
+ end
204
+ rescue Exception => e # rubocop:disable RescueException
205
+ log.warn "Send exception occurred: #{e}"
206
+ log.warn "Exception Backtrace : #{e.backtrace.join("\n")}"
207
+ # For safety, refresh client and its producers
208
+ shutdown_producers
209
+ refresh_client(false)
210
+ # Raise exception to retry sendind messages
211
+ raise e
212
+ end
213
+ end
214
+ end
metadata ADDED
@@ -0,0 +1,218 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: fluent-plugin-deis-kafka
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Guilherme Souza
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2017-09-19 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: fluentd
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '0.14'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '0.14'
27
+ - !ruby/object:Gem::Dependency
28
+ name: fluent-mixin-plaintextformatter
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: fluent-mixin-config-placeholders
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: fluent-mixin-rewrite-tag-name
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :runtime
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: influxdb
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: '0.4'
76
+ type: :runtime
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '0.4'
83
+ - !ruby/object:Gem::Dependency
84
+ name: ruby-kafka
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - "~>"
88
+ - !ruby/object:Gem::Version
89
+ version: 0.4.2
90
+ type: :runtime
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - "~>"
95
+ - !ruby/object:Gem::Version
96
+ version: 0.4.2
97
+ - !ruby/object:Gem::Dependency
98
+ name: bundler
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - "~>"
102
+ - !ruby/object:Gem::Version
103
+ version: '1.3'
104
+ type: :development
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - "~>"
109
+ - !ruby/object:Gem::Version
110
+ version: '1.3'
111
+ - !ruby/object:Gem::Dependency
112
+ name: rake
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - "~>"
116
+ - !ruby/object:Gem::Version
117
+ version: '10.0'
118
+ type: :development
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - "~>"
123
+ - !ruby/object:Gem::Version
124
+ version: '10.0'
125
+ - !ruby/object:Gem::Dependency
126
+ name: test-unit
127
+ requirement: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - "~>"
130
+ - !ruby/object:Gem::Version
131
+ version: 3.1.7
132
+ type: :development
133
+ prerelease: false
134
+ version_requirements: !ruby/object:Gem::Requirement
135
+ requirements:
136
+ - - "~>"
137
+ - !ruby/object:Gem::Version
138
+ version: 3.1.7
139
+ - !ruby/object:Gem::Dependency
140
+ name: rubocop
141
+ requirement: !ruby/object:Gem::Requirement
142
+ requirements:
143
+ - - ">="
144
+ - !ruby/object:Gem::Version
145
+ version: '0'
146
+ type: :development
147
+ prerelease: false
148
+ version_requirements: !ruby/object:Gem::Requirement
149
+ requirements:
150
+ - - ">="
151
+ - !ruby/object:Gem::Version
152
+ version: '0'
153
+ - !ruby/object:Gem::Dependency
154
+ name: pry
155
+ requirement: !ruby/object:Gem::Requirement
156
+ requirements:
157
+ - - ">="
158
+ - !ruby/object:Gem::Version
159
+ version: '0'
160
+ type: :development
161
+ prerelease: false
162
+ version_requirements: !ruby/object:Gem::Requirement
163
+ requirements:
164
+ - - ">="
165
+ - !ruby/object:Gem::Version
166
+ version: '0'
167
+ - !ruby/object:Gem::Dependency
168
+ name: simplecov
169
+ requirement: !ruby/object:Gem::Requirement
170
+ requirements:
171
+ - - ">="
172
+ - !ruby/object:Gem::Version
173
+ version: '0'
174
+ type: :development
175
+ prerelease: false
176
+ version_requirements: !ruby/object:Gem::Requirement
177
+ requirements:
178
+ - - ">="
179
+ - !ruby/object:Gem::Version
180
+ version: '0'
181
+ description: Fluentd plugin to send deis-router metricsto influxdb through kafka
182
+ email:
183
+ - backend@tfgco.com
184
+ executables: []
185
+ extensions: []
186
+ extra_rdoc_files: []
187
+ files:
188
+ - LICENSE
189
+ - README.md
190
+ - lib/fluent/mixin/deis.rb
191
+ - lib/fluent/plugin/kafka_producer_ext.rb
192
+ - lib/fluent/plugin/out_deis.rb
193
+ homepage: https://github.com/topfreegames/fluent-plugin-deis-kafka
194
+ licenses:
195
+ - MIT
196
+ metadata: {}
197
+ post_install_message:
198
+ rdoc_options: []
199
+ require_paths:
200
+ - lib
201
+ required_ruby_version: !ruby/object:Gem::Requirement
202
+ requirements:
203
+ - - ">="
204
+ - !ruby/object:Gem::Version
205
+ version: 2.0.0
206
+ required_rubygems_version: !ruby/object:Gem::Requirement
207
+ requirements:
208
+ - - ">="
209
+ - !ruby/object:Gem::Version
210
+ version: '0'
211
+ requirements: []
212
+ rubyforge_project:
213
+ rubygems_version: 2.6.13
214
+ signing_key:
215
+ specification_version: 4
216
+ summary: 'Fluentd plugin to send deis-router metricsto influxdb through kafkabased
217
+ on: https://github.com/deis/fluentd and https://github.com/fluent/fluent-plugin-kafka'
218
+ test_files: []