fluent-plugin-kafka-custom-ruby-version 0.9.3 → 0.9.4.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.project +1 -1
- data/ChangeLog +9 -0
- data/Gemfile +4 -4
- data/README.md +334 -333
- data/buildclean_gem.sh +4 -0
- data/fluent-plugin-kafka.gemspec +24 -24
- data/lib/fluent/plugin/in_kafka.rb +1 -1
- data/lib/fluent/plugin/kafka_plugin_util.rb +13 -0
- data/lib/fluent/plugin/out_kafka_buffered.rb +1 -1
- data/test/helper.rb +1 -1
- data/test/plugin/test_in_kafka.rb +37 -0
- metadata +11 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 75ad0a6363c5f682fc8d5298acbd6e0ac1fe8e36
|
4
|
+
data.tar.gz: 3f296115f6a57bbfebf554ee333ff31b3a46ba18
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1784282891033222f44c903ac6f18bbfdfb0750fa2605dea54f332afb41cfe18976356fb636f47324ba185120bf899ee8f5edd80747f5185936465bfbc30d515
|
7
|
+
data.tar.gz: 5a5abe1bd6281f1911796e703a46ef646d471ef20e40515b28045cd9ac9e8c7d02370a46091b07358b1e02ed9286587774d85bb256490507f5e22790155be8c0
|
data/.project
CHANGED
data/ChangeLog
CHANGED
@@ -1,3 +1,12 @@
|
|
1
|
+
Release 0.9.2 - 2019/03/26
|
2
|
+
|
3
|
+
* out_kafka_buffered: Fix typo of partition_key usage
|
4
|
+
|
5
|
+
Release 0.9.1 - 2019/03/25
|
6
|
+
|
7
|
+
* output: Support sasl_over_ssl parameter
|
8
|
+
* Support ruby-kafka 0.7.6
|
9
|
+
|
1
10
|
Release 0.9.0 - 2019/02/22
|
2
11
|
|
3
12
|
* Add v1 API based rdkafka2 output plugin
|
data/Gemfile
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
source 'https://rubygems.org'
|
2
|
-
|
3
|
-
# Specify your gem's dependencies in fluent-plugin-kafka-custom-ruby-version.gemspec
|
4
|
-
gemspec
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
|
3
|
+
# Specify your gem's dependencies in fluent-plugin-kafka-custom-ruby-version.gemspec
|
4
|
+
gemspec
|
data/README.md
CHANGED
@@ -1,333 +1,334 @@
|
|
1
|
-
#
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
-
|
29
|
-
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
-
|
39
|
-
-
|
40
|
-
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
-
|
58
|
-
-
|
59
|
-
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
<
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
brokers
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
-
|
193
|
-
-
|
194
|
-
-
|
195
|
-
-
|
196
|
-
-
|
197
|
-
-
|
198
|
-
-
|
199
|
-
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
#
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
1
|
+
# for support kafka 1.x
|
2
|
+
# fluent-plugin-kafka, a plugin for [Fluentd](http://fluentd.org)
|
3
|
+
|
4
|
+
[](https://travis-ci.org/fluent/fluent-plugin-kafka)
|
5
|
+
|
6
|
+
A fluentd plugin to both consume and produce data for Apache Kafka.
|
7
|
+
|
8
|
+
TODO: Also, I need to write tests
|
9
|
+
|
10
|
+
## Installation
|
11
|
+
|
12
|
+
Add this line to your application's Gemfile:
|
13
|
+
|
14
|
+
gem 'fluent-plugin-kafka'
|
15
|
+
|
16
|
+
And then execute:
|
17
|
+
|
18
|
+
$ bundle
|
19
|
+
|
20
|
+
Or install it yourself as:
|
21
|
+
|
22
|
+
$ gem install fluent-plugin-kafka --no-document
|
23
|
+
|
24
|
+
If you want to use zookeeper related parameters, you also need to install zookeeper gem. zookeeper gem includes native extension, so development tools are needed, e.g. gcc, make and etc.
|
25
|
+
|
26
|
+
## Requirements
|
27
|
+
|
28
|
+
- Ruby 2.1 or later
|
29
|
+
- Input plugins work with kafka v0.9 or later
|
30
|
+
- Output plugins work with kafka v0.8 or later
|
31
|
+
|
32
|
+
## Usage
|
33
|
+
|
34
|
+
### Common parameters
|
35
|
+
|
36
|
+
#### SSL authentication
|
37
|
+
|
38
|
+
- ssl_ca_cert
|
39
|
+
- ssl_client_cert
|
40
|
+
- ssl_client_cert_key
|
41
|
+
- ssl_ca_certs_from_system
|
42
|
+
|
43
|
+
Set path to SSL related files. See [Encryption and Authentication using SSL](https://github.com/zendesk/ruby-kafka#encryption-and-authentication-using-ssl) for more detail.
|
44
|
+
|
45
|
+
#### SASL authentication
|
46
|
+
|
47
|
+
##### with GSSAPI
|
48
|
+
|
49
|
+
- principal
|
50
|
+
- keytab
|
51
|
+
|
52
|
+
Set principal and path to keytab for SASL/GSSAPI authentication.
|
53
|
+
See [Authentication using SASL](https://github.com/zendesk/ruby-kafka#authentication-using-sasl) for more details.
|
54
|
+
|
55
|
+
##### with Plain/SCRAM
|
56
|
+
|
57
|
+
- username
|
58
|
+
- password
|
59
|
+
- scram_mechanism
|
60
|
+
- sasl_over_ssl
|
61
|
+
|
62
|
+
Set username, password, scram_mechanism and sasl_over_ssl for SASL/Plain or Scram authentication.
|
63
|
+
See [Authentication using SASL](https://github.com/zendesk/ruby-kafka#authentication-using-sasl) for more details.
|
64
|
+
|
65
|
+
### Input plugin (@type 'kafka')
|
66
|
+
|
67
|
+
Consume events by single consumer.
|
68
|
+
|
69
|
+
<source>
|
70
|
+
@type kafka
|
71
|
+
|
72
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
|
73
|
+
topics <listening topics(separate with comma',')>
|
74
|
+
format <input text type (text|json|ltsv|msgpack)> :default => json
|
75
|
+
message_key <key (Optional, for text format only, default is message)>
|
76
|
+
add_prefix <tag prefix (Optional)>
|
77
|
+
add_suffix <tag suffix (Optional)>
|
78
|
+
|
79
|
+
# Optionally, you can manage topic offset by using zookeeper
|
80
|
+
offset_zookeeper <zookeer node list (<zookeeper1_host>:<zookeeper1_port>,<zookeeper2_host>:<zookeeper2_port>,..)>
|
81
|
+
offset_zk_root_node <offset path in zookeeper> default => '/fluent-plugin-kafka'
|
82
|
+
|
83
|
+
# ruby-kafka consumer options
|
84
|
+
max_bytes (integer) :default => nil (Use default of ruby-kafka)
|
85
|
+
max_wait_time (integer) :default => nil (Use default of ruby-kafka)
|
86
|
+
min_bytes (integer) :default => nil (Use default of ruby-kafka)
|
87
|
+
</source>
|
88
|
+
|
89
|
+
Supports a start of processing from the assigned offset for specific topics.
|
90
|
+
|
91
|
+
<source>
|
92
|
+
@type kafka
|
93
|
+
|
94
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
|
95
|
+
format <input text type (text|json|ltsv|msgpack)>
|
96
|
+
<topic>
|
97
|
+
topic <listening topic>
|
98
|
+
partition <listening partition: default=0>
|
99
|
+
offset <listening start offset: default=-1>
|
100
|
+
</topic>
|
101
|
+
<topic>
|
102
|
+
topic <listening topic>
|
103
|
+
partition <listening partition: default=0>
|
104
|
+
offset <listening start offset: default=-1>
|
105
|
+
</topic>
|
106
|
+
</source>
|
107
|
+
|
108
|
+
See also [ruby-kafka README](https://github.com/zendesk/ruby-kafka#consuming-messages-from-kafka) for more detailed documentation about ruby-kafka.
|
109
|
+
|
110
|
+
Consuming topic name is used for event tag. So when the target topic name is `app_event`, the tag is `app_event`. If you want to modify tag, use `add_prefix` or `add_suffix` parameters. With `add_prefix kafka`, the tag is `kafka.app_event`.
|
111
|
+
|
112
|
+
### Input plugin (@type 'kafka_group', supports kafka group)
|
113
|
+
|
114
|
+
Consume events by kafka consumer group features..
|
115
|
+
|
116
|
+
<source>
|
117
|
+
@type kafka_group
|
118
|
+
|
119
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
|
120
|
+
consumer_group <consumer group name, must set>
|
121
|
+
topics <listening topics(separate with comma',')>
|
122
|
+
format <input text type (text|json|ltsv|msgpack)> :default => json
|
123
|
+
message_key <key (Optional, for text format only, default is message)>
|
124
|
+
add_prefix <tag prefix (Optional)>
|
125
|
+
add_suffix <tag suffix (Optional)>
|
126
|
+
retry_emit_limit <Wait retry_emit_limit x 1s when BuffereQueueLimitError happens. The default is nil and it means waiting until BufferQueueLimitError is resolved>
|
127
|
+
use_record_time <If true, replace event time with contents of 'time' field of fetched record>
|
128
|
+
time_format <string (Optional when use_record_time is used)>
|
129
|
+
|
130
|
+
# ruby-kafka consumer options
|
131
|
+
max_bytes (integer) :default => 1048576
|
132
|
+
max_wait_time (integer) :default => nil (Use default of ruby-kafka)
|
133
|
+
min_bytes (integer) :default => nil (Use default of ruby-kafka)
|
134
|
+
offset_commit_interval (integer) :default => nil (Use default of ruby-kafka)
|
135
|
+
offset_commit_threshold (integer) :default => nil (Use default of ruby-kafka)
|
136
|
+
fetcher_max_queue_size (integer) :default => nil (Use default of ruby-kafka)
|
137
|
+
start_from_beginning (bool) :default => true
|
138
|
+
</source>
|
139
|
+
|
140
|
+
See also [ruby-kafka README](https://github.com/zendesk/ruby-kafka#consuming-messages-from-kafka) for more detailed documentation about ruby-kafka options.
|
141
|
+
|
142
|
+
Consuming topic name is used for event tag. So when the target topic name is `app_event`, the tag is `app_event`. If you want to modify tag, use `add_prefix` or `add_suffix` parameter. With `add_prefix kafka`, the tag is `kafka.app_event`.
|
143
|
+
|
144
|
+
### Buffered output plugin
|
145
|
+
|
146
|
+
This plugin uses ruby-kafka producer for writing data. This plugin works with recent kafka versions.
|
147
|
+
|
148
|
+
<match app.**>
|
149
|
+
@type kafka_buffered
|
150
|
+
|
151
|
+
# Brokers: you can choose either brokers or zookeeper. If you are not familiar with zookeeper, use brokers parameters.
|
152
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,.. # Set brokers directly
|
153
|
+
zookeeper <zookeeper_host>:<zookeeper_port> # Set brokers via Zookeeper
|
154
|
+
zookeeper_path <broker path in zookeeper> :default => /brokers/ids # Set path in zookeeper for kafka
|
155
|
+
|
156
|
+
topic_key (string) :default => 'topic'
|
157
|
+
partition_key (string) :default => 'partition'
|
158
|
+
partition_key_key (string) :default => 'partition_key'
|
159
|
+
message_key_key (string) :default => 'message_key'
|
160
|
+
default_topic (string) :default => nil
|
161
|
+
default_partition_key (string) :default => nil
|
162
|
+
default_message_key (string) :default => nil
|
163
|
+
output_data_type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
|
164
|
+
output_include_tag (bool) :default => false
|
165
|
+
output_include_time (bool) :default => false
|
166
|
+
exclude_topic_key (bool) :default => false
|
167
|
+
exclude_partition_key (bool) :default => false
|
168
|
+
get_kafka_client_log (bool) :default => false
|
169
|
+
|
170
|
+
# See fluentd document for buffer related parameters: http://docs.fluentd.org/articles/buffer-plugin-overview
|
171
|
+
|
172
|
+
# ruby-kafka producer options
|
173
|
+
max_send_retries (integer) :default => 1
|
174
|
+
required_acks (integer) :default => -1
|
175
|
+
ack_timeout (integer) :default => nil (Use default of ruby-kafka)
|
176
|
+
compression_codec (gzip|snappy) :default => nil (No compression)
|
177
|
+
kafka_agg_max_bytes (integer) :default => 4096
|
178
|
+
kafka_agg_max_messages (integer) :default => nil (No limit)
|
179
|
+
max_send_limit_bytes (integer) :default => nil (No drop)
|
180
|
+
discard_kafka_delivery_failed (bool) :default => false (No discard)
|
181
|
+
monitoring_list (array) :default => []
|
182
|
+
</match>
|
183
|
+
|
184
|
+
`<formatter name>` of `output_data_type` uses fluentd's formatter plugins. See [formatter article](http://docs.fluentd.org/articles/formatter-plugin-overview).
|
185
|
+
|
186
|
+
ruby-kafka sometimes returns `Kafka::DeliveryFailed` error without good information.
|
187
|
+
In this case, `get_kafka_client_log` is useful for identifying the error cause.
|
188
|
+
ruby-kafka's log is routed to fluentd log so you can see ruby-kafka's log in fluentd logs.
|
189
|
+
|
190
|
+
Supports following ruby-kafka's producer options.
|
191
|
+
|
192
|
+
- max_send_retries - default: 1 - Number of times to retry sending of messages to a leader.
|
193
|
+
- required_acks - default: -1 - The number of acks required per request. If you need flush performance, set lower value, e.g. 1, 2.
|
194
|
+
- ack_timeout - default: nil - How long the producer waits for acks. The unit is seconds.
|
195
|
+
- compression_codec - default: nil - The codec the producer uses to compress messages.
|
196
|
+
- kafka_agg_max_bytes - default: 4096 - Maximum value of total message size to be included in one batch transmission.
|
197
|
+
- kafka_agg_max_messages - default: nil - Maximum number of messages to include in one batch transmission.
|
198
|
+
- max_send_limit_bytes - default: nil - Max byte size to send message to avoid MessageSizeTooLarge. For example, if you set 1000000(message.max.bytes in kafka), Message more than 1000000 byes will be dropped.
|
199
|
+
- discard_kafka_delivery_failed - default: false - discard the record where [Kafka::DeliveryFailed](http://www.rubydoc.info/gems/ruby-kafka/Kafka/DeliveryFailed) occurred
|
200
|
+
- monitoring_list - default: [] - library to be used to monitor. statsd and datadog are supported
|
201
|
+
|
202
|
+
If you want to know about detail of monitoring, see also https://github.com/zendesk/ruby-kafka#monitoring
|
203
|
+
|
204
|
+
See also [Kafka::Client](http://www.rubydoc.info/gems/ruby-kafka/Kafka/Client) for more detailed documentation about ruby-kafka.
|
205
|
+
|
206
|
+
This plugin supports compression codec "snappy" also.
|
207
|
+
Install snappy module before you use snappy compression.
|
208
|
+
|
209
|
+
$ gem install snappy --no-document
|
210
|
+
|
211
|
+
snappy gem uses native extension, so you need to install several packages before.
|
212
|
+
On Ubuntu, need development packages and snappy library.
|
213
|
+
|
214
|
+
$ sudo apt-get install build-essential autoconf automake libtool libsnappy-dev
|
215
|
+
|
216
|
+
On CentOS 7 installation is also necessary.
|
217
|
+
|
218
|
+
$ sudo yum install gcc autoconf automake libtool snappy-devel
|
219
|
+
|
220
|
+
#### Load balancing
|
221
|
+
|
222
|
+
Messages will be assigned a partition at random as default by ruby-kafka, but messages with the same partition key will always be assigned to the same partition by setting `default_partition_key` in config file.
|
223
|
+
If key name `partition_key` exists in a message, this plugin set its value of partition_key as key.
|
224
|
+
|
225
|
+
|default_partition_key|partition_key| behavior |
|
226
|
+
| --- | --- | --- |
|
227
|
+
|Not set|Not exists| All messages are assigned a partition at random |
|
228
|
+
|Set| Not exists| All messages are assigned to the specific partition |
|
229
|
+
|Not set| Exists | Messages which have partition_key record are assigned to the specific partition, others are assigned a partition at random |
|
230
|
+
|Set| Exists | Messages which have partition_key record are assigned to the specific partition with parition_key, others are assigned to the specific partition with default_parition_key |
|
231
|
+
|
232
|
+
If key name `message_key` exists in a message, this plugin publishes the value of message_key to kafka and can be read by consumers. Same message key will be assigned to all messages by setting `default_message_key` in config file. If message_key exists and if partition_key is not set explicitly, messsage_key will be used for partitioning.
|
233
|
+
|
234
|
+
### Output plugin
|
235
|
+
|
236
|
+
This plugin is for fluentd v1.0 or later. This will be `out_kafka` plugin in the future.
|
237
|
+
|
238
|
+
<match app.**>
|
239
|
+
@type kafka2
|
240
|
+
|
241
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,.. # Set brokers directly
|
242
|
+
|
243
|
+
topic_key (string) :default => 'topic'
|
244
|
+
partition_key (string) :default => 'partition'
|
245
|
+
partition_key_key (string) :default => 'partition_key'
|
246
|
+
message_key_key (string) :default => 'message_key'
|
247
|
+
default_topic (string) :default => nil
|
248
|
+
default_partition_key (string) :default => nil
|
249
|
+
default_message_key (string) :default => nil
|
250
|
+
exclude_topic_key (bool) :default => false
|
251
|
+
exclude_partition_key (bool) :default => false
|
252
|
+
get_kafka_client_log (bool) :default => false
|
253
|
+
use_default_for_unknown_topic (bool) :default => false
|
254
|
+
|
255
|
+
<format>
|
256
|
+
@type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
|
257
|
+
</format>
|
258
|
+
<inject>
|
259
|
+
tag_key tag
|
260
|
+
time_key time
|
261
|
+
</inject>
|
262
|
+
|
263
|
+
# See fluentd document for buffer related parameters: http://docs.fluentd.org/articles/buffer-plugin-overview
|
264
|
+
# Buffer chunk key should be same with topic_key. If value is not found in the record, default_topic is used.
|
265
|
+
<buffer topic>
|
266
|
+
flush_interval 10s
|
267
|
+
</buffer>
|
268
|
+
|
269
|
+
# ruby-kafka producer options
|
270
|
+
max_send_retries (integer) :default => 1
|
271
|
+
required_acks (integer) :default => -1
|
272
|
+
ack_timeout (integer) :default => nil (Use default of ruby-kafka)
|
273
|
+
compression_codec (gzip|snappy) :default => nil (No compression)
|
274
|
+
</match>
|
275
|
+
|
276
|
+
### Non-buffered output plugin
|
277
|
+
|
278
|
+
This plugin uses ruby-kafka producer for writing data. For performance and reliability concerns, use `kafka_bufferd` output instead. This is mainly for testing.
|
279
|
+
|
280
|
+
<match app.**>
|
281
|
+
@type kafka
|
282
|
+
|
283
|
+
# Brokers: you can choose either brokers or zookeeper.
|
284
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,.. # Set brokers directly
|
285
|
+
zookeeper <zookeeper_host>:<zookeeper_port> # Set brokers via Zookeeper
|
286
|
+
zookeeper_path <broker path in zookeeper> :default => /brokers/ids # Set path in zookeeper for kafka
|
287
|
+
|
288
|
+
default_topic (string) :default => nil
|
289
|
+
default_partition_key (string) :default => nil
|
290
|
+
default_message_key (string) :default => nil
|
291
|
+
output_data_type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
|
292
|
+
output_include_tag (bool) :default => false
|
293
|
+
output_include_time (bool) :default => false
|
294
|
+
exclude_topic_key (bool) :default => false
|
295
|
+
exclude_partition_key (bool) :default => false
|
296
|
+
|
297
|
+
# ruby-kafka producer options
|
298
|
+
max_send_retries (integer) :default => 1
|
299
|
+
required_acks (integer) :default => -1
|
300
|
+
ack_timeout (integer) :default => nil (Use default of ruby-kafka)
|
301
|
+
compression_codec (gzip|snappy) :default => nil
|
302
|
+
max_buffer_size (integer) :default => nil (Use default of ruby-kafka)
|
303
|
+
max_buffer_bytesize (integer) :default => nil (Use default of ruby-kafka)
|
304
|
+
</match>
|
305
|
+
|
306
|
+
This plugin also supports ruby-kafka related parameters. See Buffered output plugin section.
|
307
|
+
|
308
|
+
### rdkafka based output plugin
|
309
|
+
|
310
|
+
This plugin uses `rdkafka` instead of `ruby-kafka` for ruby client.
|
311
|
+
You need to install rdkafka gem.
|
312
|
+
|
313
|
+
# rdkafka is C extension library so need development tools like ruby-devel, gcc and etc
|
314
|
+
$ gem install rdkafka --no-document
|
315
|
+
|
316
|
+
<match kafka.**>
|
317
|
+
@type rdkafka
|
318
|
+
|
319
|
+
default_topic kafka
|
320
|
+
flush_interval 1s
|
321
|
+
output_data_type json
|
322
|
+
|
323
|
+
rdkafka_options {
|
324
|
+
"log_level" : 7
|
325
|
+
}
|
326
|
+
</match>
|
327
|
+
|
328
|
+
## Contributing
|
329
|
+
|
330
|
+
1. Fork it
|
331
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
332
|
+
3. Commit your changes (`git commit -am 'Added some feature'`)
|
333
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
334
|
+
5. Create new Pull Request
|
data/buildclean_gem.sh
ADDED
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -1,24 +1,24 @@
|
|
1
|
-
# -*- encoding: utf-8 -*-
|
2
|
-
|
3
|
-
Gem::Specification.new do |gem|
|
4
|
-
gem.authors = ["Hidemasa Togashi", "Masahiro Nakagawa"]
|
5
|
-
gem.email = ["togachiro@gmail.com", "repeatedly@gmail.com"]
|
6
|
-
gem.description = %q{Fluentd plugin for Apache Kafka > 0.8}
|
7
|
-
gem.summary = %q{Fluentd plugin for Apache Kafka > 0.8}
|
8
|
-
gem.homepage = "https://github.com/gozzip2009/fluent-plugin-kafka-custom
|
9
|
-
gem.license = "Apache-2.0"
|
10
|
-
|
11
|
-
gem.files = `git ls-files`.split($\)
|
12
|
-
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
13
|
-
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
14
|
-
gem.name = "fluent-plugin-kafka-custom-ruby-version"
|
15
|
-
gem.require_paths = ["lib"]
|
16
|
-
gem.version = '0.9.
|
17
|
-
gem.required_ruby_version = ">= 2.1.0"
|
18
|
-
|
19
|
-
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
20
|
-
gem.add_dependency 'ltsv'
|
21
|
-
gem.add_dependency
|
22
|
-
gem.add_development_dependency "rake", ">= 0.9.2"
|
23
|
-
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
24
|
-
end
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
|
3
|
+
Gem::Specification.new do |gem|
|
4
|
+
gem.authors = ["Hidemasa Togashi", "Masahiro Nakagawa"]
|
5
|
+
gem.email = ["togachiro@gmail.com", "repeatedly@gmail.com"]
|
6
|
+
gem.description = %q{Fluentd plugin for Apache Kafka > 0.8}
|
7
|
+
gem.summary = %q{Fluentd plugin for Apache Kafka > 0.8}
|
8
|
+
gem.homepage = "https://github.com/gozzip2009/fluent-plugin-kafka-custom"
|
9
|
+
gem.license = "Apache-2.0"
|
10
|
+
|
11
|
+
gem.files = `git ls-files`.split($\)
|
12
|
+
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
13
|
+
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
14
|
+
gem.name = "fluent-plugin-kafka-custom-ruby-version"
|
15
|
+
gem.require_paths = ["lib"]
|
16
|
+
gem.version = '0.9.4.32'
|
17
|
+
gem.required_ruby_version = ">= 2.1.0"
|
18
|
+
|
19
|
+
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
20
|
+
gem.add_dependency 'ltsv'
|
21
|
+
gem.add_dependency "ruby-kafka-custom"
|
22
|
+
gem.add_development_dependency "rake", ">= 0.9.2"
|
23
|
+
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
24
|
+
end
|
@@ -33,7 +33,7 @@ class Fluent::KafkaInput < Fluent::Input
|
|
33
33
|
config_param :add_offset_in_record, :bool, :default => false
|
34
34
|
|
35
35
|
config_param :offset_zookeeper, :string, :default => nil
|
36
|
-
config_param :offset_zk_root_node, :string, :default => '/fluent-plugin-kafka
|
36
|
+
config_param :offset_zk_root_node, :string, :default => '/fluent-plugin-kafka'
|
37
37
|
config_param :use_record_time, :bool, :default => false,
|
38
38
|
:desc => "Replace message timestamp with contents of 'time' field."
|
39
39
|
config_param :time_format, :string, :default => nil,
|
@@ -17,6 +17,19 @@ module Fluent
|
|
17
17
|
}
|
18
18
|
end
|
19
19
|
|
20
|
+
DummyFormatter = Object.new
|
21
|
+
|
22
|
+
def start
|
23
|
+
super
|
24
|
+
|
25
|
+
# This is bad point here but easy to fix for all kafka plugins
|
26
|
+
unless log.respond_to?(:formatter)
|
27
|
+
def log.formatter
|
28
|
+
Fluent::KafkaPluginUtil::SSLSettings::DummyFormatter
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
20
33
|
def read_ssl_file(path)
|
21
34
|
return nil if path.nil?
|
22
35
|
|
@@ -313,7 +313,7 @@ DESC
|
|
313
313
|
record['tag'] = tag if @output_include_tag
|
314
314
|
topic = (@exclude_topic_key ? record.delete(@topic_key) : record[@topic_key]) || def_topic
|
315
315
|
partition_key = (@exclude_partition_key ? record.delete(@partition_key_key) : record[@partition_key_key]) || @default_partition_key
|
316
|
-
partition = (@exclude_partition ? record.delete(@
|
316
|
+
partition = (@exclude_partition ? record.delete(@partition_key) : record[@partition_key]) || @default_partition
|
317
317
|
message_key = (@exclude_message_key ? record.delete(@message_key_key) : record[@message_key_key]) || @default_message_key
|
318
318
|
|
319
319
|
records_by_topic[topic] ||= 0
|
data/test/helper.rb
CHANGED
@@ -0,0 +1,37 @@
|
|
1
|
+
require 'fluent/input'
|
2
|
+
require 'fluent/plugin/in_kafka_group'
|
3
|
+
require 'test/unit'
|
4
|
+
|
5
|
+
class KafkaInputTest < Test::Unit::TestCase
|
6
|
+
def setup
|
7
|
+
Fluent::Test.setup
|
8
|
+
end
|
9
|
+
|
10
|
+
CONFIG = %[
|
11
|
+
brokers 172.16.2.114:9092,172.16.2.115:9092,172.16.2.116:9092
|
12
|
+
format json
|
13
|
+
consumer_group journey-playground
|
14
|
+
topics journey-playground
|
15
|
+
kafka_message_key message_key
|
16
|
+
start_from_beginning true
|
17
|
+
|
18
|
+
principal journey@KAFKA.SECURE
|
19
|
+
keytab E:\\doc_true\\kafka_client\\journey.user.service.keytab
|
20
|
+
sasl_over_ssl false
|
21
|
+
|
22
|
+
ssl_ca_cert E:\\doc_true\\kafka_client\\kafka.client.cert.pem
|
23
|
+
]
|
24
|
+
|
25
|
+
def create_driver(conf = CONFIG)
|
26
|
+
Fluent::Test::Driver::Input.new(Fluent::Plugin::MyInput).configure(conf)
|
27
|
+
end
|
28
|
+
|
29
|
+
def test_read
|
30
|
+
d = create_driver(CONFIG)
|
31
|
+
d.run(timeout: 10)
|
32
|
+
|
33
|
+
d.events.each do |tag, time, record|
|
34
|
+
print record
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka-custom-ruby-version
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.9.
|
4
|
+
version: 0.9.4.32
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2019-
|
12
|
+
date: 2019-04-30 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -46,19 +46,19 @@ dependencies:
|
|
46
46
|
- !ruby/object:Gem::Version
|
47
47
|
version: '0'
|
48
48
|
- !ruby/object:Gem::Dependency
|
49
|
-
name: ruby-kafka
|
49
|
+
name: ruby-kafka-custom
|
50
50
|
requirement: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
52
|
-
- -
|
52
|
+
- - ">="
|
53
53
|
- !ruby/object:Gem::Version
|
54
|
-
version: 0
|
54
|
+
version: '0'
|
55
55
|
type: :runtime
|
56
56
|
prerelease: false
|
57
57
|
version_requirements: !ruby/object:Gem::Requirement
|
58
58
|
requirements:
|
59
|
-
- -
|
59
|
+
- - ">="
|
60
60
|
- !ruby/object:Gem::Version
|
61
|
-
version: 0
|
61
|
+
version: '0'
|
62
62
|
- !ruby/object:Gem::Dependency
|
63
63
|
name: rake
|
64
64
|
requirement: !ruby/object:Gem::Requirement
|
@@ -103,6 +103,7 @@ files:
|
|
103
103
|
- LICENSE
|
104
104
|
- README.md
|
105
105
|
- Rakefile
|
106
|
+
- buildclean_gem.sh
|
106
107
|
- fluent-plugin-kafka.gemspec
|
107
108
|
- lib/fluent/plugin/in_kafka.rb
|
108
109
|
- lib/fluent/plugin/in_kafka_group.rb
|
@@ -114,8 +115,9 @@ files:
|
|
114
115
|
- lib/fluent/plugin/out_rdkafka.rb
|
115
116
|
- lib/fluent/plugin/out_rdkafka2.rb
|
116
117
|
- test/helper.rb
|
118
|
+
- test/plugin/test_in_kafka.rb
|
117
119
|
- test/plugin/test_out_kafka.rb
|
118
|
-
homepage: https://github.com/gozzip2009/fluent-plugin-kafka-custom
|
120
|
+
homepage: https://github.com/gozzip2009/fluent-plugin-kafka-custom
|
119
121
|
licenses:
|
120
122
|
- Apache-2.0
|
121
123
|
metadata: {}
|
@@ -141,4 +143,5 @@ specification_version: 4
|
|
141
143
|
summary: Fluentd plugin for Apache Kafka > 0.8
|
142
144
|
test_files:
|
143
145
|
- test/helper.rb
|
146
|
+
- test/plugin/test_in_kafka.rb
|
144
147
|
- test/plugin/test_out_kafka.rb
|