rdkafka 0.1.11 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/.travis.yml +22 -0
- data/.yardopts +2 -0
- data/CHANGELOG.md +5 -0
- data/README.md +40 -1
- data/Rakefile +7 -2
- data/lib/rdkafka.rb +3 -1
- data/lib/rdkafka/config.rb +47 -1
- data/lib/rdkafka/consumer.rb +38 -1
- data/lib/rdkafka/consumer/message.rb +51 -0
- data/lib/rdkafka/error.rb +10 -0
- data/lib/rdkafka/ffi.rb +2 -1
- data/lib/rdkafka/producer.rb +17 -44
- data/lib/rdkafka/producer/delivery_handle.rb +54 -0
- data/lib/rdkafka/producer/delivery_report.rb +21 -0
- data/lib/rdkafka/version.rb +1 -1
- data/spec/rdkafka/config_spec.rb +46 -25
- data/spec/rdkafka/{message_spec.rb → consumer/message_spec.rb} +7 -3
- data/spec/rdkafka/producer/delivery_handle_spec.rb +63 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +13 -0
- data/spec/rdkafka/producer_spec.rb +4 -2
- metadata +14 -5
- data/lib/rdkafka/message.rb +0 -24
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 33e13eee28a6f2356635744a0490e06d236afd9d
|
4
|
+
data.tar.gz: 23a74b8693241447df18042c3cf0482a03ec9c82
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2c130f0bfa4a968186cf51490e7453e81ac879c2ff3fa082707332235a6d98fc94e25c6278bac249f05a62afbea2cc70534bd05fd24ee07e1754418cf909d156
|
7
|
+
data.tar.gz: 5c5d186816373f5cd28a5d7cc583a79cfded06522d4707da855373e06ce57ff90903c639a1232e535abfa692f27a395e699d4f6b47792a97b9777ada224c8e44
|
data/.gitignore
CHANGED
data/.travis.yml
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
language: ruby
|
2
|
+
|
3
|
+
sudo: false
|
4
|
+
|
5
|
+
rvm:
|
6
|
+
- 2.1
|
7
|
+
- 2.2
|
8
|
+
- 2.3
|
9
|
+
- 2.4
|
10
|
+
|
11
|
+
before_install:
|
12
|
+
- wget http://www.us.apache.org/dist/kafka/0.11.0.1/kafka_2.12-0.11.0.1.tgz -O kafka.tgz
|
13
|
+
- mkdir -p kafka && tar xzf kafka.tgz -C kafka --strip-components 1
|
14
|
+
- nohup bash -c "cd kafka && bin/zookeeper-server-start.sh config/zookeeper.properties &"
|
15
|
+
- nohup bash -c "cd kafka && bin/kafka-server-start.sh config/server.properties &"
|
16
|
+
|
17
|
+
before_script:
|
18
|
+
- cd ext && bundle exec rake && cd ..
|
19
|
+
- bundle exec rake create_topics
|
20
|
+
|
21
|
+
script:
|
22
|
+
- bundle exec rspec
|
data/.yardopts
ADDED
data/CHANGELOG.md
ADDED
data/README.md
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
# Rdkafka
|
2
2
|
|
3
|
+
[](https://travis-ci.org/thijsc/rdkafka-ruby)
|
3
4
|
[](https://badge.fury.io/rb/rdkafka)
|
4
5
|
|
5
6
|
The `rdkafka` gem is a modern Kafka client library for Ruby based on
|
@@ -11,7 +12,45 @@ This gem only provides a high-level Kafka consumer. If you are running
|
|
11
12
|
an older version of Kafka and/or need the legacy simple consumer we
|
12
13
|
suggest using the [Hermann](https://github.com/reiseburo/hermann) gem.
|
13
14
|
|
14
|
-
|
15
|
+
## Installation
|
16
|
+
|
17
|
+
This gem downloads and compiles librdkafka when it is installed. If you
|
18
|
+
have any problems installing the gem please open an issue.
|
19
|
+
|
20
|
+
## Usage
|
21
|
+
|
22
|
+
See the [documentation](http://www.rubydoc.info/github/thijsc/rdkafka-ruby/master) for full details on how to use this gem. Two quick examples:
|
23
|
+
|
24
|
+
### Consuming messages
|
25
|
+
|
26
|
+
```ruby
|
27
|
+
config = {
|
28
|
+
:"bootstrap.servers" => "localhost:9092",
|
29
|
+
:"group.id" => "ruby-test"
|
30
|
+
}
|
31
|
+
consumer = Rdkafka::Config.new(config).consumer
|
32
|
+
consumer.subscribe("ruby-test-topic")
|
33
|
+
|
34
|
+
consumer.each do |message|
|
35
|
+
puts "Message received: #{message}"
|
36
|
+
end
|
37
|
+
```
|
38
|
+
|
39
|
+
### Producing messages
|
40
|
+
|
41
|
+
```ruby
|
42
|
+
config = {:"bootstrap.servers" => "localhost:9092"}
|
43
|
+
producer = Rdkafka::Config.new(config).producer
|
44
|
+
|
45
|
+
100.times do |i|
|
46
|
+
puts "Producing message #{i}"
|
47
|
+
producer.produce(
|
48
|
+
topic: "ruby-test-topic",
|
49
|
+
payload: "Payload #{i}",
|
50
|
+
key: "Key #{i}"
|
51
|
+
).wait
|
52
|
+
end
|
53
|
+
```
|
15
54
|
|
16
55
|
## Development
|
17
56
|
|
data/Rakefile
CHANGED
@@ -2,8 +2,13 @@ require "./lib/rdkafka"
|
|
2
2
|
|
3
3
|
task :create_topics do
|
4
4
|
puts "Creating test topics"
|
5
|
-
|
6
|
-
|
5
|
+
kafka_topics = if ENV['TRAVIS']
|
6
|
+
'kafka/bin/kafka-topics.sh'
|
7
|
+
else
|
8
|
+
'kafka-topics'
|
9
|
+
end
|
10
|
+
`#{kafka_topics} --create --topic=produce_test_topic --zookeeper=127.0.0.1:2181 --partitions=3 --replication-factor=1`
|
11
|
+
`#{kafka_topics} --create --topic=rake_test_topic --zookeeper=127.0.0.1:2181 --partitions=3 --replication-factor=1`
|
7
12
|
end
|
8
13
|
|
9
14
|
task :produce_messages do
|
data/lib/rdkafka.rb
CHANGED
@@ -2,7 +2,9 @@ require "rdkafka/version"
|
|
2
2
|
|
3
3
|
require "rdkafka/config"
|
4
4
|
require "rdkafka/consumer"
|
5
|
+
require "rdkafka/consumer/message"
|
5
6
|
require "rdkafka/error"
|
6
7
|
require "rdkafka/ffi"
|
7
|
-
require "rdkafka/message"
|
8
8
|
require "rdkafka/producer"
|
9
|
+
require "rdkafka/producer/delivery_handle"
|
10
|
+
require "rdkafka/producer/delivery_report"
|
data/lib/rdkafka/config.rb
CHANGED
@@ -5,38 +5,72 @@ module Rdkafka
|
|
5
5
|
# the consumer and producer methods to create a client. Documentation of the available
|
6
6
|
# configuration options is available on https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md.
|
7
7
|
class Config
|
8
|
+
# @private
|
8
9
|
@@logger = Logger.new(STDOUT)
|
9
10
|
|
11
|
+
# Returns the current logger, by default this is a logger to stdout.
|
12
|
+
#
|
13
|
+
# @return [Logger]
|
10
14
|
def self.logger
|
11
15
|
@@logger
|
12
16
|
end
|
13
17
|
|
18
|
+
# Set the logger that will be used for all logging output by this library.
|
19
|
+
#
|
20
|
+
# @param logger [Logger] The logger to be used
|
21
|
+
#
|
22
|
+
# @return [nil]
|
14
23
|
def self.logger=(logger)
|
24
|
+
raise NoLoggerError if logger.nil?
|
15
25
|
@@logger=logger
|
16
26
|
end
|
17
27
|
|
28
|
+
# Default config that can be overwritten.
|
18
29
|
DEFAULT_CONFIG = {
|
19
30
|
# Request api version so advanced features work
|
20
31
|
:"api.version.request" => true
|
21
32
|
}.freeze
|
22
33
|
|
34
|
+
# Required config that cannot be overwritten.
|
23
35
|
REQUIRED_CONFIG = {
|
24
|
-
# Enable log queues so we get callbacks in our own threads
|
36
|
+
# Enable log queues so we get callbacks in our own Ruby threads
|
25
37
|
:"log.queue" => true
|
26
38
|
}.freeze
|
27
39
|
|
40
|
+
# Returns a new config with the provided options which are merged with {DEFAULT_CONFIG}.
|
41
|
+
#
|
42
|
+
# @param config_hash [Hash<String,Symbol => String>] The config options for rdkafka
|
43
|
+
#
|
44
|
+
# @return [Config]
|
28
45
|
def initialize(config_hash = {})
|
29
46
|
@config_hash = DEFAULT_CONFIG.merge(config_hash)
|
30
47
|
end
|
31
48
|
|
49
|
+
# Set a config option.
|
50
|
+
#
|
51
|
+
# @param key [String] The config option's key
|
52
|
+
# @param value [String] The config option's value
|
53
|
+
#
|
54
|
+
# @return [nil]
|
32
55
|
def []=(key, value)
|
33
56
|
@config_hash[key] = value
|
34
57
|
end
|
35
58
|
|
59
|
+
# Get a config option with the specified key
|
60
|
+
#
|
61
|
+
# @param key [String] The config option's key
|
62
|
+
#
|
63
|
+
# @return [String, nil] The config option or `nil` if it is not present
|
36
64
|
def [](key)
|
37
65
|
@config_hash[key]
|
38
66
|
end
|
39
67
|
|
68
|
+
# Create a consumer with this configuration.
|
69
|
+
#
|
70
|
+
# @raise [ConfigError] When the configuration contains invalid options
|
71
|
+
# @raise [ClientCreationError] When the native client cannot be created
|
72
|
+
#
|
73
|
+
# @return [Consumer] The created consumer
|
40
74
|
def consumer
|
41
75
|
kafka = native_kafka(native_config, :rd_kafka_consumer)
|
42
76
|
# Redirect the main queue to the consumer
|
@@ -45,6 +79,12 @@ module Rdkafka
|
|
45
79
|
Rdkafka::Consumer.new(kafka)
|
46
80
|
end
|
47
81
|
|
82
|
+
# Create a producer with this configuration.
|
83
|
+
#
|
84
|
+
# @raise [ConfigError] When the configuration contains invalid options
|
85
|
+
# @raise [ClientCreationError] When the native client cannot be created
|
86
|
+
#
|
87
|
+
# @return [Producer] The created producer
|
48
88
|
def producer
|
49
89
|
# Create Kafka config
|
50
90
|
config = native_config
|
@@ -54,9 +94,15 @@ module Rdkafka
|
|
54
94
|
Rdkafka::Producer.new(native_kafka(config, :rd_kafka_producer))
|
55
95
|
end
|
56
96
|
|
97
|
+
# Error that is returned by the underlying rdkafka error if an invalid configuration option is present.
|
57
98
|
class ConfigError < RuntimeError; end
|
99
|
+
|
100
|
+
# Error that is returned by the underlying rdkafka library if the client cannot be created.
|
58
101
|
class ClientCreationError < RuntimeError; end
|
59
102
|
|
103
|
+
# Error that is raised when trying to set a nil logger
|
104
|
+
class NoLoggerError < RuntimeError; end
|
105
|
+
|
60
106
|
private
|
61
107
|
|
62
108
|
# This method is only intented to be used to create a client,
|
data/lib/rdkafka/consumer.rb
CHANGED
@@ -1,15 +1,31 @@
|
|
1
1
|
module Rdkafka
|
2
|
+
# A consumer of Kafka messages. It uses the high-level consumer approach where the Kafka
|
3
|
+
# brokers automatically assign partitions and load balance partitions over consumers that
|
4
|
+
# have the same `:"group.id"` set in their configuration.
|
5
|
+
#
|
6
|
+
# To create a consumer set up a {Config} and call {Config#consumer consumer} on that. It is
|
7
|
+
# mandatory to set `:"group.id"` in the configuration.
|
2
8
|
class Consumer
|
3
9
|
include Enumerable
|
4
10
|
|
11
|
+
# @private
|
5
12
|
def initialize(native_kafka)
|
6
13
|
@native_kafka = native_kafka
|
7
14
|
end
|
8
15
|
|
16
|
+
# Close this consumer
|
17
|
+
# @return [nil]
|
9
18
|
def close
|
10
19
|
Rdkafka::FFI.rd_kafka_consumer_close(@native_kafka)
|
11
20
|
end
|
12
21
|
|
22
|
+
# Subscribe to one or more topics
|
23
|
+
#
|
24
|
+
# @param topics [Array<String>] One or more topic names
|
25
|
+
#
|
26
|
+
# @raise [RdkafkaError] When subscribing fails
|
27
|
+
#
|
28
|
+
# @return [nil]
|
13
29
|
def subscribe(*topics)
|
14
30
|
# Create topic partition list with topics and no partition set
|
15
31
|
tpl = Rdkafka::FFI.rd_kafka_topic_partition_list_new(topics.length)
|
@@ -30,6 +46,13 @@ module Rdkafka
|
|
30
46
|
Rdkafka::FFI.rd_kafka_topic_partition_list_destroy(tpl)
|
31
47
|
end
|
32
48
|
|
49
|
+
# Commit the current offsets of this consumer
|
50
|
+
#
|
51
|
+
# @param async [Boolean] Whether to commit async or wait for the commit to finish
|
52
|
+
#
|
53
|
+
# @raise [RdkafkaError] When comitting fails
|
54
|
+
#
|
55
|
+
# @return [nil]
|
33
56
|
def commit(async=false)
|
34
57
|
response = Rdkafka::FFI.rd_kafka_commit(@native_kafka, nil, async)
|
35
58
|
if response != 0
|
@@ -37,6 +60,13 @@ module Rdkafka
|
|
37
60
|
end
|
38
61
|
end
|
39
62
|
|
63
|
+
# Poll for the next message on one of the subscribed topics
|
64
|
+
#
|
65
|
+
# @param timeout_ms [Integer] Timeout of this poll
|
66
|
+
#
|
67
|
+
# @raise [RdkafkaError] When polling fails
|
68
|
+
#
|
69
|
+
# @return [Message, nil] A message or nil if there was no new message within the timeout
|
40
70
|
def poll(timeout_ms)
|
41
71
|
message_ptr = Rdkafka::FFI.rd_kafka_consumer_poll(@native_kafka, timeout_ms)
|
42
72
|
if message_ptr.null?
|
@@ -49,7 +79,7 @@ module Rdkafka
|
|
49
79
|
raise Rdkafka::RdkafkaError.new(native_message[:err])
|
50
80
|
end
|
51
81
|
# Create a message to pass out
|
52
|
-
Rdkafka::Message.new(native_message)
|
82
|
+
Rdkafka::Consumer::Message.new(native_message)
|
53
83
|
end
|
54
84
|
ensure
|
55
85
|
# Clean up rdkafka message if there is one
|
@@ -58,6 +88,13 @@ module Rdkafka
|
|
58
88
|
end
|
59
89
|
end
|
60
90
|
|
91
|
+
# Poll for new messages and yield for each received one
|
92
|
+
#
|
93
|
+
# @raise [RdkafkaError] When polling fails
|
94
|
+
#
|
95
|
+
# @yieldparam message [Message] Received message
|
96
|
+
#
|
97
|
+
# @return [nil]
|
61
98
|
def each(&block)
|
62
99
|
loop do
|
63
100
|
message = poll(250)
|
@@ -0,0 +1,51 @@
|
|
1
|
+
module Rdkafka
|
2
|
+
class Consumer
|
3
|
+
# A message that was consumed from a topic.
|
4
|
+
class Message
|
5
|
+
# The topic this message was consumed from
|
6
|
+
# @return [String]
|
7
|
+
attr_reader :topic
|
8
|
+
|
9
|
+
# The partition this message was consumed from
|
10
|
+
# @return [Integer]
|
11
|
+
attr_reader :partition
|
12
|
+
|
13
|
+
# This message's payload
|
14
|
+
# @return [String, nil]
|
15
|
+
attr_reader :payload
|
16
|
+
|
17
|
+
# This message's key
|
18
|
+
# @return [String, nil]
|
19
|
+
attr_reader :key
|
20
|
+
|
21
|
+
# This message's offset in it's partition
|
22
|
+
# @return [Integer]
|
23
|
+
attr_reader :offset
|
24
|
+
|
25
|
+
# This message's timestamp, if provided by the broker
|
26
|
+
# @return [Integer, nil]
|
27
|
+
attr_reader :timestamp
|
28
|
+
|
29
|
+
# @private
|
30
|
+
def initialize(native_message)
|
31
|
+
unless native_message[:rkt].null?
|
32
|
+
@topic = FFI.rd_kafka_topic_name(native_message[:rkt])
|
33
|
+
end
|
34
|
+
@partition = native_message[:partition]
|
35
|
+
unless native_message[:payload].null?
|
36
|
+
@payload = native_message[:payload].read_string(native_message[:len])
|
37
|
+
end
|
38
|
+
unless native_message[:key].null?
|
39
|
+
@key = native_message[:key].read_string(native_message[:key_len])
|
40
|
+
end
|
41
|
+
@offset = native_message[:offset]
|
42
|
+
@timestamp = FFI.rd_kafka_message_timestamp(native_message, nil)
|
43
|
+
end
|
44
|
+
|
45
|
+
# @return [String]
|
46
|
+
def to_s
|
47
|
+
"Message in '#{topic}' with key '#{key}', payload '#{payload}', partition #{partition}, offset #{offset}, timestamp #{timestamp}"
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
data/lib/rdkafka/error.rb
CHANGED
@@ -1,12 +1,18 @@
|
|
1
1
|
module Rdkafka
|
2
|
+
# Error returned by the underlying rdkafka library.
|
2
3
|
class RdkafkaError < RuntimeError
|
4
|
+
# The underlying raw error response
|
5
|
+
# @return [Integer]
|
3
6
|
attr_reader :rdkafka_response
|
4
7
|
|
8
|
+
# @private
|
5
9
|
def initialize(response)
|
6
10
|
raise TypeError.new("Response has to be an integer") unless response.is_a? Integer
|
7
11
|
@rdkafka_response = response
|
8
12
|
end
|
9
13
|
|
14
|
+
# This error's code, for example `:partition_eof`, `:msg_size_too_large`.
|
15
|
+
# @return [Symbol]
|
10
16
|
def code
|
11
17
|
code = Rdkafka::FFI.rd_kafka_err2name(@rdkafka_response).downcase
|
12
18
|
if code[0] == "_"
|
@@ -16,10 +22,14 @@ module Rdkafka
|
|
16
22
|
end
|
17
23
|
end
|
18
24
|
|
25
|
+
# Human readable representation of this error.
|
26
|
+
# @return [String]
|
19
27
|
def to_s
|
20
28
|
"#{Rdkafka::FFI.rd_kafka_err2str(@rdkafka_response)} (#{code})"
|
21
29
|
end
|
22
30
|
|
31
|
+
# Whether this error indicates the partition is EOF.
|
32
|
+
# @return [Boolean]
|
23
33
|
def is_partition_eof?
|
24
34
|
code == :partition_eof
|
25
35
|
end
|
data/lib/rdkafka/ffi.rb
CHANGED
@@ -2,6 +2,7 @@ require "ffi"
|
|
2
2
|
require "logger"
|
3
3
|
|
4
4
|
module Rdkafka
|
5
|
+
# @private
|
5
6
|
module FFI
|
6
7
|
extend ::FFI::Library
|
7
8
|
|
@@ -144,7 +145,7 @@ module Rdkafka
|
|
144
145
|
:void, [:pointer, :pointer, :pointer]
|
145
146
|
) do |client_ptr, message_ptr, opaque_ptr|
|
146
147
|
message = Message.new(message_ptr)
|
147
|
-
delivery_handle = Rdkafka::DeliveryHandle.new(message[:_private])
|
148
|
+
delivery_handle = Rdkafka::Producer::DeliveryHandle.new(message[:_private])
|
148
149
|
delivery_handle[:pending] = false
|
149
150
|
delivery_handle[:response] = message[:err]
|
150
151
|
delivery_handle[:partition] = message[:partition]
|
data/lib/rdkafka/producer.rb
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
module Rdkafka
|
2
|
+
# A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
|
2
3
|
class Producer
|
4
|
+
# @private
|
3
5
|
def initialize(native_kafka)
|
4
6
|
@closing = false
|
5
7
|
@native_kafka = native_kafka
|
@@ -16,6 +18,7 @@ module Rdkafka
|
|
16
18
|
@polling_thread.abort_on_exception = true
|
17
19
|
end
|
18
20
|
|
21
|
+
# Close this producer and wait for the internal poll queue to empty.
|
19
22
|
def close
|
20
23
|
# Indicate to polling thread that we're closing
|
21
24
|
@closing = true
|
@@ -23,6 +26,20 @@ module Rdkafka
|
|
23
26
|
@polling_thread.join
|
24
27
|
end
|
25
28
|
|
29
|
+
# Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
|
30
|
+
#
|
31
|
+
# When no partition is specified the underlying Kafka library picks a partition based on the key. If no key is specified, a random partition will be used.
|
32
|
+
# When a timestamp is provided this is used instead of the autogenerated timestamp.
|
33
|
+
#
|
34
|
+
# @param topic [String] The topic to produce to
|
35
|
+
# @param payload [String] The message's payload
|
36
|
+
# @param key [String] The message's key
|
37
|
+
# @param partition [Integer] Optional partition to produce to
|
38
|
+
# @param timestamp [Integer] Optional timestamp of this message
|
39
|
+
#
|
40
|
+
# @raise [RdkafkaError] When adding the message to rdkafka's queue failed
|
41
|
+
#
|
42
|
+
# @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
|
26
43
|
def produce(topic:, payload: nil, key: nil, partition: nil, timestamp: nil)
|
27
44
|
# Start by checking and converting the input
|
28
45
|
|
@@ -74,48 +91,4 @@ module Rdkafka
|
|
74
91
|
delivery_handle
|
75
92
|
end
|
76
93
|
end
|
77
|
-
|
78
|
-
class WaitTimeoutError < RuntimeError; end
|
79
|
-
|
80
|
-
class DeliveryHandle < ::FFI::Struct
|
81
|
-
layout :pending, :bool,
|
82
|
-
:response, :int,
|
83
|
-
:partition, :int,
|
84
|
-
:offset, :int64
|
85
|
-
|
86
|
-
def pending?
|
87
|
-
self[:pending]
|
88
|
-
end
|
89
|
-
|
90
|
-
# Wait for the delivery report
|
91
|
-
def wait(timeout_in_seconds=60)
|
92
|
-
timeout = if timeout_in_seconds
|
93
|
-
Time.now.to_i + timeout_in_seconds
|
94
|
-
else
|
95
|
-
nil
|
96
|
-
end
|
97
|
-
loop do
|
98
|
-
if pending?
|
99
|
-
if timeout && timeout <= Time.now.to_i
|
100
|
-
raise WaitTimeoutError.new("Waiting for delivery timed out after #{timeout_in_seconds} seconds")
|
101
|
-
end
|
102
|
-
sleep 0.1
|
103
|
-
next
|
104
|
-
elsif self[:response] != 0
|
105
|
-
raise RdkafkaError.new(self[:response])
|
106
|
-
else
|
107
|
-
return DeliveryReport.new(self[:partition], self[:offset])
|
108
|
-
end
|
109
|
-
end
|
110
|
-
end
|
111
|
-
end
|
112
|
-
|
113
|
-
class DeliveryReport
|
114
|
-
attr_reader :partition, :offset
|
115
|
-
|
116
|
-
def initialize(partition, offset)
|
117
|
-
@partition = partition
|
118
|
-
@offset = offset
|
119
|
-
end
|
120
|
-
end
|
121
94
|
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
module Rdkafka
|
2
|
+
class Producer
|
3
|
+
# Handle to wait for a delivery report which is returned when
|
4
|
+
# producing a message.
|
5
|
+
class DeliveryHandle < ::FFI::Struct
|
6
|
+
layout :pending, :bool,
|
7
|
+
:response, :int,
|
8
|
+
:partition, :int,
|
9
|
+
:offset, :int64
|
10
|
+
|
11
|
+
# Whether the delivery handle is still pending.
|
12
|
+
#
|
13
|
+
# @return [Boolean]
|
14
|
+
def pending?
|
15
|
+
self[:pending]
|
16
|
+
end
|
17
|
+
|
18
|
+
# Wait for the delivery report or raise an error if this takes longer than the timeout.
|
19
|
+
# If there is a timeout this does not mean the message is not delivered, rdkafka might still be working on delivering the message.
|
20
|
+
# In this case it is possible to call wait again.
|
21
|
+
#
|
22
|
+
# @param timeout_in_seconds [Integer, nil] Number of seconds to wait before timing out. If this is nil it does not time out.
|
23
|
+
#
|
24
|
+
# @raise [RdkafkaError] When delivering the message failed
|
25
|
+
# @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
|
26
|
+
#
|
27
|
+
# @return [DeliveryReport]
|
28
|
+
def wait(timeout_in_seconds=60)
|
29
|
+
timeout = if timeout_in_seconds
|
30
|
+
Time.now.to_i + timeout_in_seconds
|
31
|
+
else
|
32
|
+
nil
|
33
|
+
end
|
34
|
+
loop do
|
35
|
+
if pending?
|
36
|
+
if timeout && timeout <= Time.now.to_i
|
37
|
+
raise WaitTimeoutError.new("Waiting for delivery timed out after #{timeout_in_seconds} seconds")
|
38
|
+
end
|
39
|
+
sleep 0.1
|
40
|
+
next
|
41
|
+
elsif self[:response] != 0
|
42
|
+
raise RdkafkaError.new(self[:response])
|
43
|
+
else
|
44
|
+
return DeliveryReport.new(self[:partition], self[:offset])
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
# Error that is raised when waiting for a delivery handle to complete
|
50
|
+
# takes longer than the specified timeout.
|
51
|
+
class WaitTimeoutError < RuntimeError; end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module Rdkafka
|
2
|
+
class Producer
|
3
|
+
# Delivery report for a succesfully produced message.
|
4
|
+
class DeliveryReport
|
5
|
+
# The partition this message was produced to.
|
6
|
+
# @return [Integer]
|
7
|
+
attr_reader :partition
|
8
|
+
|
9
|
+
# The offset of the produced message.
|
10
|
+
# @return [Integer]
|
11
|
+
attr_reader :offset
|
12
|
+
|
13
|
+
private
|
14
|
+
|
15
|
+
def initialize(partition, offset)
|
16
|
+
@partition = partition
|
17
|
+
@offset = offset
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
data/lib/rdkafka/version.rb
CHANGED
data/spec/rdkafka/config_spec.rb
CHANGED
@@ -1,36 +1,57 @@
|
|
1
1
|
require "spec_helper"
|
2
2
|
|
3
3
|
describe Rdkafka::Config do
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
end
|
4
|
+
context "logger" do
|
5
|
+
it "should have a default logger" do
|
6
|
+
expect(Rdkafka::Config.logger).to be_a Logger
|
7
|
+
end
|
9
8
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
9
|
+
it "should set the logger" do
|
10
|
+
logger = Logger.new(STDOUT)
|
11
|
+
expect(Rdkafka::Config.logger).not_to eq logger
|
12
|
+
Rdkafka::Config.logger = logger
|
13
|
+
expect(Rdkafka::Config.logger).to eq logger
|
14
|
+
end
|
14
15
|
|
15
|
-
|
16
|
-
|
16
|
+
it "should not accept a nil logger" do
|
17
|
+
expect {
|
18
|
+
Rdkafka::Config.logger = nil
|
19
|
+
}.to raise_error(Rdkafka::Config::NoLoggerError)
|
20
|
+
end
|
17
21
|
end
|
18
22
|
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
config
|
23
|
-
|
24
|
-
|
23
|
+
context "configuration" do
|
24
|
+
it "should store configuration" do
|
25
|
+
config = Rdkafka::Config.new
|
26
|
+
config[:"key"] = 'value'
|
27
|
+
expect(config[:"key"]).to eq 'value'
|
28
|
+
end
|
25
29
|
|
26
|
-
|
27
|
-
|
28
|
-
|
30
|
+
it "should use default configuration" do
|
31
|
+
config = Rdkafka::Config.new
|
32
|
+
expect(config[:"api.version.request"]).to eq true
|
33
|
+
end
|
34
|
+
|
35
|
+
it "should create a consumer with valid config" do
|
36
|
+
expect(rdkafka_config.consumer).to be_a Rdkafka::Consumer
|
37
|
+
end
|
38
|
+
|
39
|
+
it "should raise an error when creating a consumer with invalid config" do
|
40
|
+
config = Rdkafka::Config.new('invalid.key' => 'value')
|
41
|
+
expect {
|
42
|
+
config.consumer
|
43
|
+
}.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
|
44
|
+
end
|
45
|
+
|
46
|
+
it "should create a producer with valid config" do
|
47
|
+
expect(rdkafka_config.producer).to be_a Rdkafka::Producer
|
48
|
+
end
|
29
49
|
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
50
|
+
it "should raise an error when creating a producer with invalid config" do
|
51
|
+
config = Rdkafka::Config.new('invalid.key' => 'value')
|
52
|
+
expect {
|
53
|
+
config.producer
|
54
|
+
}.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
|
55
|
+
end
|
35
56
|
end
|
36
57
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
require "spec_helper"
|
2
2
|
|
3
|
-
describe Rdkafka::Message do
|
3
|
+
describe Rdkafka::Consumer::Message do
|
4
4
|
let(:native_topic) do
|
5
5
|
Rdkafka::FFI.rd_kafka_topic_new(
|
6
6
|
native_client,
|
@@ -29,7 +29,7 @@ describe Rdkafka::Message do
|
|
29
29
|
end
|
30
30
|
end
|
31
31
|
end
|
32
|
-
subject { Rdkafka::Message.new(native_message) }
|
32
|
+
subject { Rdkafka::Consumer::Message.new(native_message) }
|
33
33
|
|
34
34
|
it "should have a topic" do
|
35
35
|
expect(subject.topic).to eq "topic_name"
|
@@ -72,6 +72,10 @@ describe Rdkafka::Message do
|
|
72
72
|
end
|
73
73
|
|
74
74
|
it "should have a timestamp" do
|
75
|
-
|
75
|
+
# There is no effective way to mock this this, just
|
76
|
+
# make sure it doesn't crash.
|
77
|
+
expect {
|
78
|
+
subject.timestamp
|
79
|
+
}.not_to raise_error
|
76
80
|
end
|
77
81
|
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
|
3
|
+
describe Rdkafka::Producer::DeliveryHandle do
|
4
|
+
let(:response) { 0 }
|
5
|
+
subject do
|
6
|
+
Rdkafka::Producer::DeliveryHandle.new.tap do |handle|
|
7
|
+
handle[:pending] = pending
|
8
|
+
handle[:response] = response
|
9
|
+
handle[:partition] = 2
|
10
|
+
handle[:offset] = 100
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
describe "#pending?" do
|
15
|
+
context "when true" do
|
16
|
+
let(:pending) { true }
|
17
|
+
|
18
|
+
it "should be true" do
|
19
|
+
expect(subject.pending?).to be true
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
context "when not true" do
|
24
|
+
let(:pending) { false }
|
25
|
+
|
26
|
+
it "should be false" do
|
27
|
+
expect(subject.pending?).to be false
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
describe "#wait" do
|
33
|
+
let(:pending) { true }
|
34
|
+
|
35
|
+
it "should wait until the timeout and then raise an error" do
|
36
|
+
expect {
|
37
|
+
subject.wait(0.1)
|
38
|
+
}.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
|
39
|
+
end
|
40
|
+
|
41
|
+
context "when not pending anymore and no error" do
|
42
|
+
let(:pending) { false }
|
43
|
+
|
44
|
+
it "should return a delivery report" do
|
45
|
+
report = subject.wait
|
46
|
+
|
47
|
+
expect(report.partition).to eq(2)
|
48
|
+
expect(report.offset).to eq(100)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
context "when not pending anymore and there was an error" do
|
53
|
+
let(:pending) { false }
|
54
|
+
let(:response) { 20 }
|
55
|
+
|
56
|
+
it "should raise an rdkafka error" do
|
57
|
+
expect {
|
58
|
+
subject.wait
|
59
|
+
}.to raise_error Rdkafka::RdkafkaError
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
|
3
|
+
describe Rdkafka::Producer::DeliveryReport do
|
4
|
+
subject { Rdkafka::Producer::DeliveryReport.new(2, 100) }
|
5
|
+
|
6
|
+
it "should get the partition" do
|
7
|
+
expect(subject.partition).to eq 2
|
8
|
+
end
|
9
|
+
|
10
|
+
it "should get the offset" do
|
11
|
+
expect(subject.offset).to eq 100
|
12
|
+
end
|
13
|
+
end
|
@@ -63,6 +63,7 @@ describe Rdkafka::Producer do
|
|
63
63
|
delivery_report: report
|
64
64
|
)
|
65
65
|
|
66
|
+
expect(message.partition).to eq 1
|
66
67
|
expect(message.payload.force_encoding("utf-8")).to eq "Τη γλώσσα μου έδωσαν ελληνική"
|
67
68
|
expect(message.key).to eq "key utf8"
|
68
69
|
end
|
@@ -70,7 +71,7 @@ describe Rdkafka::Producer do
|
|
70
71
|
it "should produce a message with a timestamp" do
|
71
72
|
handle = producer.produce(
|
72
73
|
topic: "produce_test_topic",
|
73
|
-
payload: "Payload
|
74
|
+
payload: "Payload timestamp",
|
74
75
|
key: "key timestamp",
|
75
76
|
timestamp: 1505069646000
|
76
77
|
)
|
@@ -88,6 +89,7 @@ describe Rdkafka::Producer do
|
|
88
89
|
delivery_report: report
|
89
90
|
)
|
90
91
|
|
92
|
+
expect(message.partition).to eq 2
|
91
93
|
expect(message.key).to eq "key timestamp"
|
92
94
|
expect(message.timestamp).to eq 1505069646000
|
93
95
|
end
|
@@ -100,6 +102,6 @@ describe Rdkafka::Producer do
|
|
100
102
|
)
|
101
103
|
expect {
|
102
104
|
handle.wait(0)
|
103
|
-
}.to raise_error Rdkafka::WaitTimeoutError
|
105
|
+
}.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
|
104
106
|
end
|
105
107
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-
|
11
|
+
date: 2017-10-13 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ffi
|
@@ -89,6 +89,9 @@ extensions:
|
|
89
89
|
extra_rdoc_files: []
|
90
90
|
files:
|
91
91
|
- ".gitignore"
|
92
|
+
- ".travis.yml"
|
93
|
+
- ".yardopts"
|
94
|
+
- CHANGELOG.md
|
92
95
|
- Gemfile
|
93
96
|
- LICENSE
|
94
97
|
- README.md
|
@@ -97,17 +100,21 @@ files:
|
|
97
100
|
- lib/rdkafka.rb
|
98
101
|
- lib/rdkafka/config.rb
|
99
102
|
- lib/rdkafka/consumer.rb
|
103
|
+
- lib/rdkafka/consumer/message.rb
|
100
104
|
- lib/rdkafka/error.rb
|
101
105
|
- lib/rdkafka/ffi.rb
|
102
|
-
- lib/rdkafka/message.rb
|
103
106
|
- lib/rdkafka/producer.rb
|
107
|
+
- lib/rdkafka/producer/delivery_handle.rb
|
108
|
+
- lib/rdkafka/producer/delivery_report.rb
|
104
109
|
- lib/rdkafka/version.rb
|
105
110
|
- rdkafka.gemspec
|
106
111
|
- spec/rdkafka/config_spec.rb
|
112
|
+
- spec/rdkafka/consumer/message_spec.rb
|
107
113
|
- spec/rdkafka/consumer_spec.rb
|
108
114
|
- spec/rdkafka/error_spec.rb
|
109
115
|
- spec/rdkafka/ffi_spec.rb
|
110
|
-
- spec/rdkafka/
|
116
|
+
- spec/rdkafka/producer/delivery_handle_spec.rb
|
117
|
+
- spec/rdkafka/producer/delivery_report_spec.rb
|
111
118
|
- spec/rdkafka/producer_spec.rb
|
112
119
|
- spec/spec_helper.rb
|
113
120
|
homepage: https://github.com/thijsc/rdkafka-ruby
|
@@ -137,9 +144,11 @@ summary: Kafka client library wrapping librdkafka using the ffi gem and futures
|
|
137
144
|
concurrent-ruby for Kafka 0.10+
|
138
145
|
test_files:
|
139
146
|
- spec/rdkafka/config_spec.rb
|
147
|
+
- spec/rdkafka/consumer/message_spec.rb
|
140
148
|
- spec/rdkafka/consumer_spec.rb
|
141
149
|
- spec/rdkafka/error_spec.rb
|
142
150
|
- spec/rdkafka/ffi_spec.rb
|
143
|
-
- spec/rdkafka/
|
151
|
+
- spec/rdkafka/producer/delivery_handle_spec.rb
|
152
|
+
- spec/rdkafka/producer/delivery_report_spec.rb
|
144
153
|
- spec/rdkafka/producer_spec.rb
|
145
154
|
- spec/spec_helper.rb
|
data/lib/rdkafka/message.rb
DELETED
@@ -1,24 +0,0 @@
|
|
1
|
-
module Rdkafka
|
2
|
-
class Message
|
3
|
-
attr_reader :topic, :partition, :payload, :key, :offset, :timestamp
|
4
|
-
|
5
|
-
def initialize(native_message)
|
6
|
-
unless native_message[:rkt].null?
|
7
|
-
@topic = FFI.rd_kafka_topic_name(native_message[:rkt])
|
8
|
-
end
|
9
|
-
@partition = native_message[:partition]
|
10
|
-
unless native_message[:payload].null?
|
11
|
-
@payload = native_message[:payload].read_string(native_message[:len])
|
12
|
-
end
|
13
|
-
unless native_message[:key].null?
|
14
|
-
@key = native_message[:key].read_string(native_message[:key_len])
|
15
|
-
end
|
16
|
-
@offset = native_message[:offset]
|
17
|
-
@timestamp = FFI.rd_kafka_message_timestamp(native_message, nil)
|
18
|
-
end
|
19
|
-
|
20
|
-
def to_s
|
21
|
-
"Message in '#{topic}' with key '#{key}', payload '#{payload}', partition '#{partition}', offset '#{offset}'"
|
22
|
-
end
|
23
|
-
end
|
24
|
-
end
|