fluent-plugin-kafka 0.2.1 → 0.2.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/.travis.yml +17 -0
- data/README.md +5 -2
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/kafka_producer_ext.rb +24 -0
- data/lib/fluent/plugin/out_kafka_buffered.rb +3 -3
- data/test/plugin/test_out_kafka.rb +20 -1
- metadata +5 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 95430357052a2b7305214ffa1f5c0ad426e52914
|
4
|
+
data.tar.gz: 57736e74a0bc9b65441bc41086f7b9875e72a590
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a7f89337ddc1f28e8762de822738d467d16ef454a0bf64570f8bfbd13420dbbc53a2477b4cce836c8234d7aee96726a13d0fbf12ac9e33216f6cf82eea1ee174
|
7
|
+
data.tar.gz: 5568aec86102a4c40aae4e1e8b43c5e1829811e9014589e45981491fc2b1fece473f0042f1b7188e3b09d678eff8cb77e0ac63de7a00d09fe34f7b9bfe80b12f
|
data/.gitignore
ADDED
data/.travis.yml
ADDED
data/README.md
CHANGED
@@ -1,6 +1,9 @@
|
|
1
|
-
#
|
1
|
+
# fluent-plugin-kafka, a plugin for [Fluentd](http://fluentd.org)
|
2
|
+
|
3
|
+
[![Build Status](https://travis-ci.org/htgc/fluent-plugin-kafka.svg?branch=master)](https://travis-ci.org/htgc/fluent-plugin-kafka)
|
4
|
+
|
5
|
+
A fluentd plugin to both consume and produce data for Apache Kafka.
|
2
6
|
|
3
|
-
TODO: Write a gem description
|
4
7
|
TODO: Also, I need to write tests
|
5
8
|
|
6
9
|
## Installation
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
|
|
12
12
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
13
|
gem.name = "fluent-plugin-kafka"
|
14
14
|
gem.require_paths = ["lib"]
|
15
|
-
gem.version = '0.2.
|
15
|
+
gem.version = '0.2.2'
|
16
16
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
17
17
|
gem.add_dependency 'poseidon_cluster'
|
18
18
|
gem.add_dependency 'ltsv'
|
@@ -0,0 +1,24 @@
|
|
1
|
+
require 'kafka/producer'
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
class Producer
|
5
|
+
def produce2(value, key: nil, topic:, partition: nil, partition_key: nil)
|
6
|
+
create_time = Time.now
|
7
|
+
|
8
|
+
message = PendingMessage.new(
|
9
|
+
value,
|
10
|
+
key,
|
11
|
+
topic,
|
12
|
+
partition,
|
13
|
+
partition_key,
|
14
|
+
create_time,
|
15
|
+
key.to_s.bytesize + value.to_s.bytesize
|
16
|
+
)
|
17
|
+
|
18
|
+
@target_topics.add(topic)
|
19
|
+
@pending_message_queue.write(message)
|
20
|
+
|
21
|
+
nil
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -8,6 +8,7 @@ class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
|
|
8
8
|
super
|
9
9
|
|
10
10
|
require 'kafka'
|
11
|
+
require 'fluent/plugin/kafka_producer_ext'
|
11
12
|
|
12
13
|
@kafka = nil
|
13
14
|
@producers = {}
|
@@ -126,8 +127,7 @@ DESC
|
|
126
127
|
|
127
128
|
@formatter_proc = setup_formatter(conf)
|
128
129
|
|
129
|
-
@producer_opts = {max_retries: @max_send_retries, required_acks: @required_acks
|
130
|
-
max_buffer_size: @buffer.buffer_chunk_limit / 10, max_buffer_bytesize: @buffer.buffer_chunk_limit * 2}
|
130
|
+
@producer_opts = {max_retries: @max_send_retries, required_acks: @required_acks}
|
131
131
|
@producer_opts[:ack_timeout] = @ack_timeout if @ack_timeout
|
132
132
|
@producer_opts[:compression_codec] = @compression_codec.to_sym if @compression_codec
|
133
133
|
end
|
@@ -233,7 +233,7 @@ DESC
|
|
233
233
|
end
|
234
234
|
log.on_trace { log.trace("message will send to #{topic} with key: #{partition_key} and value: #{record_buf}.") }
|
235
235
|
messages += 1
|
236
|
-
producer.
|
236
|
+
producer.produce2(record_buf, topic: topic, partition_key: partition_key)
|
237
237
|
messages_bytes += record_buf_bytes
|
238
238
|
|
239
239
|
records_by_topic[topic] += 1
|
@@ -1,11 +1,16 @@
|
|
1
1
|
require 'helper'
|
2
|
+
require 'fluent/output'
|
2
3
|
|
3
4
|
class KafkaOutputTest < Test::Unit::TestCase
|
4
5
|
def setup
|
5
6
|
Fluent::Test.setup
|
6
7
|
end
|
7
8
|
|
8
|
-
|
9
|
+
BASE_CONFIG = %[
|
10
|
+
type kafka_buffered
|
11
|
+
]
|
12
|
+
|
13
|
+
CONFIG = BASE_CONFIG + %[
|
9
14
|
default_topic kitagawakeiko
|
10
15
|
brokers localhost:9092
|
11
16
|
]
|
@@ -15,6 +20,20 @@ class KafkaOutputTest < Test::Unit::TestCase
|
|
15
20
|
end
|
16
21
|
|
17
22
|
def test_configure
|
23
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
24
|
+
create_driver(BASE_CONFIG)
|
25
|
+
}
|
26
|
+
|
27
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
28
|
+
create_driver(CONFIG)
|
29
|
+
}
|
30
|
+
|
31
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
32
|
+
create_driver(CONFIG + %[
|
33
|
+
buffer_type memory
|
34
|
+
])
|
35
|
+
}
|
36
|
+
|
18
37
|
d = create_driver
|
19
38
|
assert_equal 'kitagawakeiko', d.instance.default_topic
|
20
39
|
assert_equal 'localhost:9092', d.instance.brokers
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-07-
|
11
|
+
date: 2016-07-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|
@@ -121,6 +121,8 @@ executables: []
|
|
121
121
|
extensions: []
|
122
122
|
extra_rdoc_files: []
|
123
123
|
files:
|
124
|
+
- ".gitignore"
|
125
|
+
- ".travis.yml"
|
124
126
|
- Gemfile
|
125
127
|
- LICENSE
|
126
128
|
- README.md
|
@@ -128,6 +130,7 @@ files:
|
|
128
130
|
- fluent-plugin-kafka.gemspec
|
129
131
|
- lib/fluent/plugin/in_kafka.rb
|
130
132
|
- lib/fluent/plugin/in_kafka_group.rb
|
133
|
+
- lib/fluent/plugin/kafka_producer_ext.rb
|
131
134
|
- lib/fluent/plugin/out_kafka.rb
|
132
135
|
- lib/fluent/plugin/out_kafka_buffered.rb
|
133
136
|
- test/helper.rb
|