fluent-plugin-kafka 0.1.1 → 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/out_kafka_buffered.rb +27 -40
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d1413ec34a22790b9d9190a1c89500674dd24b99
|
4
|
+
data.tar.gz: 0d63aa90d9ac7420d46baee550eae4b27419d0bf
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 305f5754a3eaf8220e4b07e1899b7ec2002eeca0b8554b7a998ed72b5ee834ee6cb16f38fe650653bab8a4b3ac4a575f37789a0a5bb81595f147fe1bc810cb2c
|
7
|
+
data.tar.gz: 2093bbb9df3cf5c2d2f7a186b60daf55712d409abaddf10586f8ff486e43187cc715de09558bd6a3c8b6af571f97ccc21b5222a159c9580f9c6541315e4d5e08
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
|
|
12
12
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
13
|
gem.name = "fluent-plugin-kafka"
|
14
14
|
gem.require_paths = ["lib"]
|
15
|
-
gem.version = '0.1.
|
15
|
+
gem.version = '0.1.2'
|
16
16
|
gem.add_dependency 'fluentd'
|
17
17
|
gem.add_dependency 'poseidon_cluster'
|
18
18
|
gem.add_dependency 'ltsv'
|
@@ -66,14 +66,6 @@ class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
|
|
66
66
|
if @compression_codec == 'snappy'
|
67
67
|
require 'snappy'
|
68
68
|
end
|
69
|
-
case @output_data_type
|
70
|
-
when 'json'
|
71
|
-
require 'yajl'
|
72
|
-
when 'ltsv'
|
73
|
-
require 'ltsv'
|
74
|
-
when 'msgpack'
|
75
|
-
require 'msgpack'
|
76
|
-
end
|
77
69
|
|
78
70
|
@f_separator = case @field_separator
|
79
71
|
when /SPACE/i then ' '
|
@@ -82,19 +74,7 @@ class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
|
|
82
74
|
else "\t"
|
83
75
|
end
|
84
76
|
|
85
|
-
@
|
86
|
-
nil
|
87
|
-
elsif @output_data_type == 'ltsv'
|
88
|
-
nil
|
89
|
-
elsif @output_data_type == 'msgpack'
|
90
|
-
nil
|
91
|
-
elsif @output_data_type =~ /^attr:(.*)$/
|
92
|
-
$1.split(',').map(&:strip).reject(&:empty?)
|
93
|
-
else
|
94
|
-
@formatter = Fluent::Plugin.new_formatter(@output_data_type)
|
95
|
-
@formatter.configure(conf)
|
96
|
-
nil
|
97
|
-
end
|
77
|
+
@formatter_proc = setup_formatter(conf)
|
98
78
|
end
|
99
79
|
|
100
80
|
def start
|
@@ -110,24 +90,31 @@ class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
|
|
110
90
|
[tag, time, record].to_msgpack
|
111
91
|
end
|
112
92
|
|
113
|
-
def
|
114
|
-
if @
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
else
|
93
|
+
def setup_formatter(conf)
|
94
|
+
if @output_data_type == 'json'
|
95
|
+
require 'yajl'
|
96
|
+
Proc.new { |tag, time, record| Yajl::Encoder.encode(record) }
|
97
|
+
elsif @output_data_type == 'ltsv'
|
98
|
+
require 'ltsv'
|
99
|
+
Proc.new { |tag, time, record| LTSV.dump(record) }
|
100
|
+
elsif @output_data_type == 'msgpack'
|
101
|
+
require 'msgpack'
|
102
|
+
Proc.new { |tag, time, record| record.to_msgpack }
|
103
|
+
elsif @output_data_type =~ /^attr:(.*)$/
|
104
|
+
@custom_attributes = $1.split(',').map(&:strip).reject(&:empty?)
|
126
105
|
@custom_attributes.unshift('time') if @output_include_time
|
127
106
|
@custom_attributes.unshift('tag') if @output_include_tag
|
128
|
-
|
129
|
-
|
130
|
-
|
107
|
+
Proc.new { |tag, time, record|
|
108
|
+
@custom_attributes.map { |attr|
|
109
|
+
record[attr].nil? ? '' : record[attr].to_s
|
110
|
+
}.join(@f_separator)
|
111
|
+
}
|
112
|
+
else
|
113
|
+
@formatter = Fluent::Plugin.new_formatter(@output_data_type)
|
114
|
+
@formatter.configure(conf)
|
115
|
+
Proc.new { |tag, time, record|
|
116
|
+
@formatter.format(tag, time, record)
|
117
|
+
}
|
131
118
|
end
|
132
119
|
end
|
133
120
|
|
@@ -146,15 +133,15 @@ class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
|
|
146
133
|
records_by_topic[topic] ||= 0
|
147
134
|
bytes_by_topic[topic] ||= 0
|
148
135
|
|
149
|
-
record_buf = @
|
136
|
+
record_buf = @formatter_proc.call(tag, time, record)
|
150
137
|
record_buf_bytes = record_buf.bytesize
|
151
138
|
if messages.length > 0 and messages_bytes + record_buf_bytes > @kafka_agg_max_bytes
|
152
|
-
log.trace("#{messages.length} messages send.")
|
139
|
+
log.on_trace { log.trace("#{messages.length} messages send.") }
|
153
140
|
@producer.send_messages(messages)
|
154
141
|
messages = []
|
155
142
|
messages_bytes = 0
|
156
143
|
end
|
157
|
-
log.trace("message will send to #{topic} with key: #{partition_key} and value: #{record_buf}.")
|
144
|
+
log.on_trace { log.trace("message will send to #{topic} with key: #{partition_key} and value: #{record_buf}.") }
|
158
145
|
messages << Poseidon::MessageToSend.new(topic, record_buf, partition_key)
|
159
146
|
messages_bytes += record_buf_bytes
|
160
147
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-12-
|
11
|
+
date: 2015-12-11 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|