rdkafka 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: c019ec1ebf89bf81ce40340c8866e589d86826e3
4
+ data.tar.gz: cf1fdaadf5527542dbc1847587f43d8497d7f3d6
5
+ SHA512:
6
+ metadata.gz: b58a95c81f0ae2b09d347acf12dbaf71057c4c26c25a6411eb5212723eee33d0b107db61fef385a39bb05079db586ba2d9406f6f1f097f5bb49e8c6852206a33
7
+ data.tar.gz: 3f5b7bd2277397df7acc096b3145a2e65e33df4d9ee1c7646b5d33d20992daecdd78fffb996abaccbb6b3304d131a7123a8fb94b2d7fdef4e8ad43b6403cdac5
@@ -0,0 +1,4 @@
1
+ Gemfile.lock
2
+ ext/ports
3
+ ext/tmp
4
+ *.gem
data/Gemfile ADDED
@@ -0,0 +1,3 @@
1
+ source "https://rubygems.org"
2
+
3
+ gemspec
data/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2017 Thijs Cadier
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,8 @@
1
+ # Modern Kafka client library for Ruby based on librdkafka
2
+
3
+ Kafka client library wrapping `librdkafka` using the FFI gem for Kafka 0.10+ and Ruby 2.1+".
4
+
5
+ ## Development
6
+
7
+ Run `bundle` and `cd ext && bundle exec rake compile && cd ..`. You can then run
8
+ `bundle exec rspec` to run the tests.
@@ -0,0 +1,15 @@
1
+ require File.expand_path('../../lib/rdkafka/version', __FILE__)
2
+ require "mini_portile2"
3
+ require "fileutils"
4
+
5
+ task :default do
6
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
7
+ recipe.files = ["https://github.com/edenhill/librdkafka/archive/v#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz"]
8
+ recipe.configure_options = ["--host=#{recipe.host}"]
9
+ recipe.cook
10
+ end
11
+
12
+ task :clean do
13
+ FileUtils.rm_rf "ports"
14
+ FileUtils.rm_rf "tmp"
15
+ end
@@ -0,0 +1,7 @@
1
+ require "rdkafka/version"
2
+
3
+ require "rdkafka/config"
4
+ require "rdkafka/consumer"
5
+ require "rdkafka/error"
6
+ require "rdkafka/ffi"
7
+ require "rdkafka/producer"
@@ -0,0 +1,78 @@
1
+ module Rdkafka
2
+ class Config
3
+ DEFAULT_CONFIG = {
4
+ "api.version.request" => "true"
5
+ }
6
+
7
+ def initialize(config_hash = {})
8
+ @config_hash = DEFAULT_CONFIG.merge(config_hash)
9
+ end
10
+
11
+ def []=(key, value)
12
+ @config_hash[key] = value
13
+ end
14
+
15
+ def [](key)
16
+ @config_hash[key]
17
+ end
18
+
19
+ def consumer
20
+ Rdkafka::Consumer.new(native_kafka(native_config, :rd_kafka_consumer))
21
+ end
22
+
23
+ def producer
24
+ # Create Kafka config
25
+ config = native_config
26
+ # Set callback to receive delivery reports on config
27
+ Rdkafka::FFI.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::FFI::DeliveryCallback)
28
+ # Return producer with Kafka client
29
+ Rdkafka::Producer.new(native_kafka(config, :rd_kafka_producer))
30
+ end
31
+
32
+ class ConfigError < RuntimeError; end
33
+ class ClientCreationError < RuntimeError; end
34
+
35
+ private
36
+
37
+ # This method is only intented to be used to create a client,
38
+ # using it in another way will leak memory.
39
+ def native_config
40
+ config = Rdkafka::FFI.rd_kafka_conf_new
41
+
42
+ @config_hash.each do |key, value|
43
+ error_buffer = ::FFI::MemoryPointer.from_string(" " * 256)
44
+ result = Rdkafka::FFI.rd_kafka_conf_set(
45
+ config,
46
+ key,
47
+ value,
48
+ error_buffer,
49
+ 256
50
+ )
51
+ unless result == :config_ok
52
+ raise ConfigError.new(error_buffer.read_string)
53
+ end
54
+ end
55
+
56
+ config
57
+ end
58
+
59
+ def native_kafka(config, type)
60
+ error_buffer = ::FFI::MemoryPointer.from_string(" " * 256)
61
+ handle = Rdkafka::FFI.rd_kafka_new(
62
+ type,
63
+ config,
64
+ error_buffer,
65
+ 256
66
+ )
67
+
68
+ if handle.nil?
69
+ raise ClientCreationError.new(error_buffer.read_string)
70
+ end
71
+
72
+ ::FFI::AutoPointer.new(
73
+ handle,
74
+ Rdkafka::FFI.method(:rd_kafka_destroy)
75
+ )
76
+ end
77
+ end
78
+ end
@@ -0,0 +1,7 @@
1
+ module Rdkafka
2
+ class Consumer
3
+ def initialize(native_kafka)
4
+ @native_kafka = native_kafka
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,25 @@
1
+ module Rdkafka
2
+ class RdkafkaError < RuntimeError
3
+ attr_reader :rdkafka_response
4
+
5
+ def initialize(response)
6
+ @rdkafka_response = response
7
+ end
8
+
9
+ def code
10
+ if @rdkafka_response.nil?
11
+ :unknown_error
12
+ else
13
+ Rdkafka::FFI.rd_kafka_err2name(@rdkafka_response).downcase.to_sym
14
+ end
15
+ end
16
+
17
+ def to_s
18
+ if @rdkafka_response.nil?
19
+ "Unknown error: Response code is nil"
20
+ else
21
+ Rdkafka::FFI.rd_kafka_err2str(@rdkafka_response)
22
+ end
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,79 @@
1
+ require "ffi"
2
+ require "mini_portile2"
3
+
4
+ module Rdkafka
5
+ module FFI
6
+ extend ::FFI::Library
7
+ ffi_lib "ext/ports/#{MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION).host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.dylib"
8
+
9
+ # Polling
10
+ attach_function :rd_kafka_poll, [:pointer, :int], :void
11
+
12
+ # Message struct
13
+
14
+ class Message < ::FFI::Struct
15
+ layout :err, :int,
16
+ :rkt, :pointer,
17
+ :partition, :int32,
18
+ :payload, :pointer,
19
+ :len, :size_t,
20
+ :key, :pointer,
21
+ :key_len, :size_t,
22
+ :offset, :int64,
23
+ :_private, :pointer
24
+ end
25
+
26
+ # Errors
27
+
28
+ attach_function :rd_kafka_err2name, [:int], :string
29
+ attach_function :rd_kafka_err2str, [:int], :string
30
+
31
+ # Configuration
32
+
33
+ enum :kafka_config_response, [
34
+ :config_unknown, -2,
35
+ :config_invalid, -1,
36
+ :config_ok, 0
37
+ ]
38
+
39
+ attach_function :rd_kafka_conf_new, [], :pointer
40
+ attach_function :rd_kafka_conf_set, [:pointer, :string, :string, :pointer, :int], :kafka_config_response
41
+
42
+ # Handle
43
+
44
+ enum :kafka_type, [
45
+ :rd_kafka_producer,
46
+ :rd_kafka_consumer
47
+ ]
48
+
49
+ attach_function :rd_kafka_new, [:kafka_type, :pointer, :pointer, :int], :pointer
50
+ attach_function :rd_kafka_destroy, [:pointer], :void
51
+
52
+ # Producing
53
+
54
+ RD_KAFKA_VTYPE_END = 0
55
+ RD_KAFKA_VTYPE_TOPIC = 1
56
+ RD_KAFKA_VTYPE_RKT = 2
57
+ RD_KAFKA_VTYPE_PARTITION = 3
58
+ RD_KAFKA_VTYPE_VALUE = 4
59
+ RD_KAFKA_VTYPE_KEY = 5
60
+ RD_KAFKA_VTYPE_OPAQUE = 6
61
+ RD_KAFKA_VTYPE_MSGFLAGS = 7
62
+ RD_KAFKA_VTYPE_TIMESTAMP = 8
63
+
64
+ RD_KAFKA_MSG_F_COPY = 0x2
65
+
66
+ attach_function :rd_kafka_producev, [:pointer, :varargs], :int
67
+ callback :delivery_cb, [:pointer, :pointer, :pointer], :void
68
+ attach_function :rd_kafka_conf_set_dr_msg_cb, [:pointer, :delivery_cb], :void
69
+
70
+ DeliveryCallback = Proc.new do |client_ptr, message_ptr, opaque_ptr|
71
+ message = Message.new(message_ptr)
72
+ delivery_handle = Rdkafka::DeliveryHandle.new(message[:_private])
73
+ delivery_handle[:pending] = false
74
+ delivery_handle[:response] = message[:err]
75
+ delivery_handle[:partition] = message[:partition]
76
+ delivery_handle[:offset] = message[:offset]
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,98 @@
1
+ module Rdkafka
2
+ class Producer
3
+ def initialize(native_kafka)
4
+ @native_kafka = native_kafka
5
+ # Start thread to poll client for delivery callbacks
6
+ @thread = Thread.new do
7
+ loop do
8
+ Rdkafka::FFI.rd_kafka_poll(@native_kafka, 100)
9
+ end
10
+ end.abort_on_exception = true
11
+ end
12
+
13
+ def produce(topic:, payload: nil, key: nil, partition: nil, timestamp: nil)
14
+ # Start by checking and converting the input
15
+
16
+ # Get payload length
17
+ payload_size = if payload.nil?
18
+ 0
19
+ else
20
+ payload.bytesize
21
+ end
22
+
23
+ # Get key length
24
+ key_size = if key.nil?
25
+ 0
26
+ else
27
+ key.bytesize
28
+ end
29
+
30
+ # If partition is nil use -1 to let Kafka set the partition based
31
+ # on the key/randomly if there is no key
32
+ partition = -1 if partition.nil?
33
+
34
+ # If timestamp is nil use 0 and let Kafka set one
35
+ timestamp = 0 if timestamp.nil?
36
+
37
+ delivery_handle = DeliveryHandle.new
38
+ delivery_handle[:pending] = true
39
+ delivery_handle[:response] = 0
40
+ delivery_handle[:partition] = 0
41
+ delivery_handle[:offset] = 0
42
+
43
+ # Produce the message
44
+ response = Rdkafka::FFI.rd_kafka_producev(
45
+ @native_kafka,
46
+ :int, Rdkafka::FFI::RD_KAFKA_VTYPE_TOPIC, :string, topic,
47
+ :int, Rdkafka::FFI::RD_KAFKA_VTYPE_MSGFLAGS, :int, Rdkafka::FFI::RD_KAFKA_MSG_F_COPY,
48
+ :int, Rdkafka::FFI::RD_KAFKA_VTYPE_VALUE, :buffer_in, payload, :size_t, payload_size,
49
+ :int, Rdkafka::FFI::RD_KAFKA_VTYPE_KEY, :buffer_in, key, :size_t, key_size,
50
+ :int, Rdkafka::FFI::RD_KAFKA_VTYPE_PARTITION, :int32, partition,
51
+ :int, Rdkafka::FFI::RD_KAFKA_VTYPE_TIMESTAMP, :int64, timestamp,
52
+ :int, Rdkafka::FFI::RD_KAFKA_VTYPE_OPAQUE, :pointer, delivery_handle,
53
+ :int, Rdkafka::FFI::RD_KAFKA_VTYPE_END
54
+ )
55
+
56
+ # Raise error if the produce call was not successfull
57
+ if response != 0
58
+ raise RdkafkaError.new(response)
59
+ end
60
+
61
+ delivery_handle
62
+ end
63
+ end
64
+
65
+ class DeliveryHandle < ::FFI::Struct
66
+ layout :pending, :bool,
67
+ :response, :int,
68
+ :partition, :int,
69
+ :offset, :int64
70
+
71
+ def pending?
72
+ self[:pending]
73
+ end
74
+
75
+ # Wait for the delivery report
76
+ def wait
77
+ loop do
78
+ if pending?
79
+ sleep 0.05
80
+ next
81
+ elsif self[:response] != 0
82
+ raise RdkafkaError.new(self[:response])
83
+ else
84
+ return DeliveryReport.new(self[:partition], self[:offset])
85
+ end
86
+ end
87
+ end
88
+ end
89
+
90
+ class DeliveryReport
91
+ attr_reader :partition, :offset
92
+
93
+ def initialize(partition, offset)
94
+ @partition = partition
95
+ @offset = offset
96
+ end
97
+ end
98
+ end
@@ -0,0 +1,4 @@
1
+ module Rdkafka
2
+ VERSION = "0.0.1"
3
+ LIBRDKAFKA_VERSION = "0.11.0"
4
+ end
@@ -0,0 +1,26 @@
1
+ require File.expand_path('../lib/rdkafka/version', __FILE__)
2
+
3
+ Gem::Specification.new do |gem|
4
+ gem.authors = ['Thijs Cadier']
5
+ gem.email = ["thijs@appsignal.com"]
6
+ gem.description = "Modern Kafka client library for Ruby based on librdkafka"
7
+ gem.summary = "Kafka client library wrapping librdkafka using the ffi gem and futures from concurrent-ruby for Kafka 0.10+"
8
+ gem.license = 'MIT'
9
+ gem.homepage = 'https://github.com/thijsc/rdkafka-ruby'
10
+
11
+ gem.files = `git ls-files`.split($\)
12
+ gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
13
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
+ gem.name = 'rdkafka'
15
+ gem.require_paths = ['lib']
16
+ gem.version = Rdkafka::VERSION
17
+ gem.required_ruby_version = '>= 2.0'
18
+ gem.extensions = %w(ext/Rakefile)
19
+
20
+ gem.add_dependency 'ffi', '~> 1.9'
21
+ gem.add_dependency 'mini_portile2', '~> 2.2'
22
+
23
+ gem.add_development_dependency 'pry', '~> 0.10'
24
+ gem.add_development_dependency 'rspec', '~> 3.5'
25
+ gem.add_development_dependency 'rake', '~> 12.0'
26
+ end
@@ -0,0 +1,36 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Config do
4
+ it "should store configuration" do
5
+ config = Rdkafka::Config.new
6
+ config['key'] = 'value'
7
+ expect(config['key']).to eq 'value'
8
+ end
9
+
10
+ it "should use default configuration" do
11
+ config = Rdkafka::Config.new
12
+ expect(config['api.version.request']).to eq 'true'
13
+ end
14
+
15
+ it "should create a consumer with valid config" do
16
+ expect(rdkafka_config.consumer).to be_a Rdkafka::Consumer
17
+ end
18
+
19
+ it "should raise an error when creating a consumer with invalid config" do
20
+ config = Rdkafka::Config.new('invalid.key' => 'value')
21
+ expect {
22
+ config.consumer
23
+ }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
24
+ end
25
+
26
+ it "should create a producer with valid config" do
27
+ expect(rdkafka_config.producer).to be_a Rdkafka::Producer
28
+ end
29
+
30
+ it "should raise an error when creating a producer with invalid config" do
31
+ config = Rdkafka::Config.new('invalid.key' => 'value')
32
+ expect {
33
+ config.producer
34
+ }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
35
+ end
36
+ end
@@ -0,0 +1,5 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Consumer do
4
+
5
+ end
@@ -0,0 +1,31 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::RdkafkaError do
4
+ describe "#code" do
5
+ it "should handle a nil response" do
6
+ expect(Rdkafka::RdkafkaError.new(nil).code).to eq :unknown_error
7
+ end
8
+
9
+ it "should handle an invalid response" do
10
+ expect(Rdkafka::RdkafkaError.new(933975).code).to eq :err_933975?
11
+ end
12
+
13
+ it "should return error messages from rdkafka" do
14
+ expect(Rdkafka::RdkafkaError.new(10).code).to eq :msg_size_too_large
15
+ end
16
+ end
17
+
18
+ describe "#to_s" do
19
+ it "should handle a nil response" do
20
+ expect(Rdkafka::RdkafkaError.new(nil).to_s).to eq "Unknown error: Response code is nil"
21
+ end
22
+
23
+ it "should handle an invalid response" do
24
+ expect(Rdkafka::RdkafkaError.new(933975).to_s).to eq "Err-933975?"
25
+ end
26
+
27
+ it "should return error messages from rdkafka" do
28
+ expect(Rdkafka::RdkafkaError.new(10).to_s).to eq "Broker: Message size too large"
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,7 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::FFI do
4
+ it "should successfully call librdkafka" do
5
+ Rdkafka::FFI.rd_kafka_conf_new
6
+ end
7
+ end
@@ -0,0 +1,31 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Producer do
4
+ let(:producer) do
5
+ rdkafka_config.producer
6
+ end
7
+
8
+ it "should require a topic" do
9
+ expect {
10
+ producer.produce(
11
+ payload: "payload",
12
+ key: "key"
13
+ )
14
+ }.to raise_error ArgumentError, "missing keyword: topic"
15
+ end
16
+
17
+ it "should produce a message" do
18
+ handle = producer.produce(
19
+ topic: "produce_test_topic",
20
+ payload: "payload 1",
21
+ key: "key 1"
22
+ )
23
+ expect(handle.pending?).to be true
24
+
25
+ report = handle.wait
26
+ expect(handle.pending?).to be false
27
+ expect(report).not_to be_nil
28
+ expect(report.partition).to eq 0
29
+ expect(report.offset).to be > 0
30
+ end
31
+ end
@@ -0,0 +1,4 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka do
4
+ end
@@ -0,0 +1,7 @@
1
+ require "pry"
2
+ require "rspec"
3
+ require "rdkafka"
4
+
5
+ def rdkafka_config
6
+ Rdkafka::Config.new("bootstrap.servers" => "localhost:9092")
7
+ end
metadata ADDED
@@ -0,0 +1,143 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: rdkafka
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Thijs Cadier
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2017-08-28 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: ffi
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '1.9'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '1.9'
27
+ - !ruby/object:Gem::Dependency
28
+ name: mini_portile2
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '2.2'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '2.2'
41
+ - !ruby/object:Gem::Dependency
42
+ name: pry
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '0.10'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '0.10'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rspec
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: '3.5'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: '3.5'
69
+ - !ruby/object:Gem::Dependency
70
+ name: rake
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: '12.0'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '12.0'
83
+ description: Modern Kafka client library for Ruby based on librdkafka
84
+ email:
85
+ - thijs@appsignal.com
86
+ executables: []
87
+ extensions:
88
+ - ext/Rakefile
89
+ extra_rdoc_files: []
90
+ files:
91
+ - ".gitignore"
92
+ - Gemfile
93
+ - LICENSE
94
+ - README.md
95
+ - ext/Rakefile
96
+ - lib/rdkafka.rb
97
+ - lib/rdkafka/config.rb
98
+ - lib/rdkafka/consumer.rb
99
+ - lib/rdkafka/error.rb
100
+ - lib/rdkafka/ffi.rb
101
+ - lib/rdkafka/producer.rb
102
+ - lib/rdkafka/version.rb
103
+ - rdkafka.gemspec
104
+ - spec/rdkafka/config_spec.rb
105
+ - spec/rdkafka/consumer_spec.rb
106
+ - spec/rdkafka/error_spec.rb
107
+ - spec/rdkafka/ffi_spec.rb
108
+ - spec/rdkafka/producer_spec.rb
109
+ - spec/rdkafka_spec.rb
110
+ - spec/spec_helper.rb
111
+ homepage: https://github.com/thijsc/rdkafka-ruby
112
+ licenses:
113
+ - MIT
114
+ metadata: {}
115
+ post_install_message:
116
+ rdoc_options: []
117
+ require_paths:
118
+ - lib
119
+ required_ruby_version: !ruby/object:Gem::Requirement
120
+ requirements:
121
+ - - ">="
122
+ - !ruby/object:Gem::Version
123
+ version: '2.0'
124
+ required_rubygems_version: !ruby/object:Gem::Requirement
125
+ requirements:
126
+ - - ">="
127
+ - !ruby/object:Gem::Version
128
+ version: '0'
129
+ requirements: []
130
+ rubyforge_project:
131
+ rubygems_version: 2.6.11
132
+ signing_key:
133
+ specification_version: 4
134
+ summary: Kafka client library wrapping librdkafka using the ffi gem and futures from
135
+ concurrent-ruby for Kafka 0.10+
136
+ test_files:
137
+ - spec/rdkafka/config_spec.rb
138
+ - spec/rdkafka/consumer_spec.rb
139
+ - spec/rdkafka/error_spec.rb
140
+ - spec/rdkafka/ffi_spec.rb
141
+ - spec/rdkafka/producer_spec.rb
142
+ - spec/rdkafka_spec.rb
143
+ - spec/spec_helper.rb