rdkafka 0.1.9 → 0.1.10

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 5ca27dbc2b4226938c9295a8d2a483f414fd161e
4
- data.tar.gz: 2b0e7a4333d47af7b41d5d3e5da5242549740165
3
+ metadata.gz: 47ed8b2e877ccd9b0d4f79e6c252651f497b3aad
4
+ data.tar.gz: 274091ba3bf02e06444c871c7ed4af50e417be08
5
5
  SHA512:
6
- metadata.gz: d97ac0211a5a6a2bd52b13d4779c8cac55c46207aaf8b0bb02d59fe3cffe50746b9be8c352015f218d2607c675c3374f3931d4aad54478f7880cb09b2947bda6
7
- data.tar.gz: 4316bd794f7a83723dcd2184a6a27c0ca2b45380d1989eaac539058f0a47fe68620ba851dc3f4b93d9d610826029a55f96566326ff83128f1e5ae5849d4fa846
6
+ metadata.gz: ea30c37519e22b12c8ffb38fc902ccb9cb4c2fb2ef5432aa91df5aa116c5abab5965cd0787c6de76a4f664dad7d7b97753895d5ae27852374cbae1b889245d96
7
+ data.tar.gz: e49e57f8ccd45d62250f2eef6e2a6da3f9ea90b3393239f2b2d82222adfb3bbecd6927fce42adaae6e16bca9ee1dc25e69d7a3b25942edb6f19aa5594524b710
data/.gitignore CHANGED
@@ -3,3 +3,4 @@ ext/ports
3
3
  ext/tmp
4
4
  ext/librdkafka.*
5
5
  *.gem
6
+ .yardoc
data/README.md CHANGED
@@ -1,5 +1,7 @@
1
1
  # Rdkafka
2
2
 
3
+ [![Gem Version](https://badge.fury.io/rb/rdkafka.svg)](https://badge.fury.io/rb/rdkafka)
4
+
3
5
  The `rdkafka` gem is a modern Kafka client library for Ruby based on
4
6
  [librdkafka](https://github.com/edenhill/librdkafka/).
5
7
  It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
@@ -9,6 +11,8 @@ This gem only provides a high-level Kafka consumer. If you are running
9
11
  an older version of Kafka and/or need the legacy simple consumer we
10
12
  suggest using the [Hermann](https://github.com/reiseburo/hermann) gem.
11
13
 
14
+ Documentation is available on [RubyDoc](http://www.rubydoc.info/github/thijsc/rdkafka-ruby/master).
15
+
12
16
  ## Development
13
17
 
14
18
  Run `bundle` and `cd ext && bundle exec rake compile && cd ..`. Then
data/Rakefile CHANGED
@@ -1,14 +1,17 @@
1
1
  require "./lib/rdkafka"
2
2
 
3
3
  task :create_topics do
4
- `kafka-topics --create --topic=produce_test_topic --zookeeper=127.0.0.1:2181 --partitions=1 --replication-factor=1`
5
- `kafka-topics --create --topic=rake_test_topic --zookeeper=127.0.0.1:2181 --partitions=1 --replication-factor=1`
4
+ puts "Creating test topics"
5
+ `kafka-topics --create --topic=produce_test_topic --zookeeper=127.0.0.1:2181 --partitions=3 --replication-factor=1`
6
+ `kafka-topics --create --topic=rake_test_topic --zookeeper=127.0.0.1:2181 --partitions=3 --replication-factor=1`
6
7
  end
7
8
 
8
9
  task :produce_messages do
9
- producer = Rdkafka::Config.new(
10
- :"bootstrap.servers" => "localhost:9092"
11
- ).producer
10
+ config = {:"bootstrap.servers" => "localhost:9092"}
11
+ if ENV["DEBUG"]
12
+ config[:debug] = "broker,topic,msg"
13
+ end
14
+ producer = Rdkafka::Config.new(config).producer
12
15
  100.times do |i|
13
16
  puts "Producing message #{i}"
14
17
  producer.produce(
@@ -20,14 +23,18 @@ task :produce_messages do
20
23
  end
21
24
 
22
25
  task :consume_messages do
23
- consumer = Rdkafka::Config.new(
26
+ config = {
24
27
  :"bootstrap.servers" => "localhost:9092",
25
28
  :"group.id" => "rake_test",
26
29
  :"enable.partition.eof" => false,
27
30
  :"auto.offset.reset" => "earliest"
28
- ).consumer
31
+ }
32
+ if ENV["DEBUG"]
33
+ config[:debug] = "cgrp,topic,fetch"
34
+ end
35
+ consumer = Rdkafka::Config.new(config).consumer
29
36
  consumer.subscribe("rake_test_topic")
30
37
  consumer.each do |message|
31
- puts message
38
+ puts "Message received: #{message}"
32
39
  end
33
40
  end
data/lib/rdkafka.rb CHANGED
@@ -4,4 +4,5 @@ require "rdkafka/config"
4
4
  require "rdkafka/consumer"
5
5
  require "rdkafka/error"
6
6
  require "rdkafka/ffi"
7
+ require "rdkafka/message"
7
8
  require "rdkafka/producer"
@@ -1,6 +1,9 @@
1
1
  require "logger"
2
2
 
3
3
  module Rdkafka
4
+ # Configuration for a Kafka consumer or producer. You can create an instance and use
5
+ # the consumer and producer methods to create a client. Documentation of the available
6
+ # configuration options is available on https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md.
4
7
  class Config
5
8
  @@logger = Logger.new(STDOUT)
6
9
 
@@ -13,8 +16,14 @@ module Rdkafka
13
16
  end
14
17
 
15
18
  DEFAULT_CONFIG = {
19
+ # Request api version so advanced features work
16
20
  :"api.version.request" => true
17
- }
21
+ }.freeze
22
+
23
+ REQUIRED_CONFIG = {
24
+ # Enable log queues so we get callbacks in our own threads
25
+ :"log.queue" => true
26
+ }.freeze
18
27
 
19
28
  def initialize(config_hash = {})
20
29
  @config_hash = DEFAULT_CONFIG.merge(config_hash)
@@ -30,7 +39,9 @@ module Rdkafka
30
39
 
31
40
  def consumer
32
41
  kafka = native_kafka(native_config, :rd_kafka_consumer)
42
+ # Redirect the main queue to the consumer
33
43
  Rdkafka::FFI.rd_kafka_poll_set_consumer(kafka)
44
+ # Return consumer with Kafka client
34
45
  Rdkafka::Consumer.new(kafka)
35
46
  end
36
47
 
@@ -52,7 +63,7 @@ module Rdkafka
52
63
  # using it in another way will leak memory.
53
64
  def native_config
54
65
  Rdkafka::FFI.rd_kafka_conf_new.tap do |config|
55
- @config_hash.each do |key, value|
66
+ @config_hash.merge(REQUIRED_CONFIG).each do |key, value|
56
67
  error_buffer = ::FFI::MemoryPointer.from_string(" " * 256)
57
68
  result = Rdkafka::FFI.rd_kafka_conf_set(
58
69
  config,
@@ -65,6 +76,7 @@ module Rdkafka
65
76
  raise ConfigError.new(error_buffer.read_string)
66
77
  end
67
78
  end
79
+ # Set log callback
68
80
  Rdkafka::FFI.rd_kafka_conf_set_log_cb(config, Rdkafka::FFI::LogCallback)
69
81
  end
70
82
  end
@@ -82,6 +94,12 @@ module Rdkafka
82
94
  raise ClientCreationError.new(error_buffer.read_string)
83
95
  end
84
96
 
97
+ # Redirect log to handle's queue
98
+ Rdkafka::FFI.rd_kafka_set_log_queue(
99
+ handle,
100
+ Rdkafka::FFI.rd_kafka_queue_get_main(handle)
101
+ )
102
+
85
103
  ::FFI::AutoPointer.new(
86
104
  handle,
87
105
  Rdkafka::FFI.method(:rd_kafka_destroy)
@@ -6,6 +6,10 @@ module Rdkafka
6
6
  @native_kafka = native_kafka
7
7
  end
8
8
 
9
+ def close
10
+ Rdkafka::FFI.rd_kafka_consumer_close(@native_kafka)
11
+ end
12
+
9
13
  def subscribe(*topics)
10
14
  # Create topic partition list with topics and no partition set
11
15
  tpl = Rdkafka::FFI.rd_kafka_topic_partition_list_new(topics.length)
@@ -38,17 +42,25 @@ module Rdkafka
38
42
  if message_ptr.null?
39
43
  nil
40
44
  else
41
- message = Rdkafka::FFI::Message.new(message_ptr)
42
- if message.err != 0
43
- raise Rdkafka::RdkafkaError.new(message.err)
45
+ # Create struct wrapper
46
+ native_message = Rdkafka::FFI::Message.new(message_ptr)
47
+ # Raise error if needed
48
+ if native_message[:err] != 0
49
+ raise Rdkafka::RdkafkaError.new(native_message[:err])
44
50
  end
45
- message
51
+ # Create a message to pass out
52
+ Rdkafka::Message.new(native_message)
53
+ end
54
+ ensure
55
+ # Clean up rdkafka message if there is one
56
+ unless message_ptr.null?
57
+ Rdkafka::FFI.rd_kafka_message_destroy(message_ptr)
46
58
  end
47
59
  end
48
60
 
49
61
  def each(&block)
50
62
  loop do
51
- message = poll(1000)
63
+ message = poll(250)
52
64
  if message
53
65
  block.call(message)
54
66
  else
data/lib/rdkafka/ffi.rb CHANGED
@@ -18,10 +18,11 @@ module Rdkafka
18
18
  # Polling
19
19
 
20
20
  attach_function :rd_kafka_poll, [:pointer, :int], :void, blocking: true
21
+ attach_function :rd_kafka_outq_len, [:pointer], :int, blocking: true
21
22
 
22
23
  # Message struct
23
24
 
24
- class Message < ::FFI::ManagedStruct
25
+ class Message < ::FFI::Struct
25
26
  layout :err, :int,
26
27
  :rkt, :pointer,
27
28
  :partition, :int32,
@@ -31,49 +32,10 @@ module Rdkafka
31
32
  :key_len, :size_t,
32
33
  :offset, :int64,
33
34
  :_private, :pointer
34
-
35
- def err
36
- self[:err]
37
- end
38
-
39
- def topic
40
- FFI.rd_kafka_topic_name(self[:rkt])
41
- end
42
-
43
- def partition
44
- self[:partition]
45
- end
46
-
47
- def payload
48
- if self[:payload].null?
49
- nil
50
- else
51
- self[:payload].read_string(self[:len])
52
- end
53
- end
54
-
55
- def key
56
- if self[:key].null?
57
- nil
58
- else
59
- self[:key].read_string(self[:key_len])
60
- end
61
- end
62
-
63
- def offset
64
- self[:offset]
65
- end
66
-
67
- def to_s
68
- "Message in '#{topic}' with key '#{key}', payload '#{payload}', partition '#{partition}', offset '#{offset}'"
69
- end
70
-
71
- def self.release(ptr)
72
- rd_kafka_message_destroy(ptr)
73
- end
74
35
  end
75
36
 
76
37
  attach_function :rd_kafka_message_destroy, [:pointer], :void
38
+ attach_function :rd_kafka_topic_new, [:pointer, :string, :pointer], :pointer
77
39
  attach_function :rd_kafka_topic_name, [:pointer], :string
78
40
 
79
41
  # TopicPartition ad TopicPartitionList structs
@@ -117,7 +79,13 @@ module Rdkafka
117
79
  callback :log_cb, [:pointer, :int, :string, :string], :void
118
80
  attach_function :rd_kafka_conf_set_log_cb, [:pointer, :log_cb], :void
119
81
 
120
- LogCallback = Proc.new do |client_ptr, level, level_string, line|
82
+ # Log queue
83
+ attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
84
+ attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
85
+
86
+ LogCallback = ::FFI::Function.new(
87
+ :void, [:pointer, :int, :string, :string]
88
+ ) do |_client_ptr, level, _level_string, line|
121
89
  severity = case level
122
90
  when 0 || 1 || 2
123
91
  Logger::FATAL
@@ -148,9 +116,10 @@ module Rdkafka
148
116
  # Consumer
149
117
 
150
118
  attach_function :rd_kafka_subscribe, [:pointer, :pointer], :int
151
- attach_function :rd_kafka_commit, [:pointer, :pointer, :bool], :int
119
+ attach_function :rd_kafka_commit, [:pointer, :pointer, :bool], :int, blocking: true
152
120
  attach_function :rd_kafka_poll_set_consumer, [:pointer], :void
153
121
  attach_function :rd_kafka_consumer_poll, [:pointer, :int], :pointer, blocking: true
122
+ attach_function :rd_kafka_consumer_close, [:pointer], :void, blocking: true
154
123
 
155
124
  # Producer
156
125
 
@@ -170,7 +139,9 @@ module Rdkafka
170
139
  callback :delivery_cb, [:pointer, :pointer, :pointer], :void
171
140
  attach_function :rd_kafka_conf_set_dr_msg_cb, [:pointer, :delivery_cb], :void
172
141
 
173
- DeliveryCallback = Proc.new do |client_ptr, message_ptr, opaque_ptr|
142
+ DeliveryCallback = ::FFI::Function.new(
143
+ :void, [:pointer, :pointer, :pointer]
144
+ ) do |client_ptr, message_ptr, opaque_ptr|
174
145
  message = Message.new(message_ptr)
175
146
  delivery_handle = Rdkafka::DeliveryHandle.new(message[:_private])
176
147
  delivery_handle[:pending] = false
@@ -0,0 +1,23 @@
1
+ module Rdkafka
2
+ class Message
3
+ attr_reader :topic, :partition, :payload, :key, :offset
4
+
5
+ def initialize(native_message)
6
+ unless native_message[:rkt].null?
7
+ @topic = FFI.rd_kafka_topic_name(native_message[:rkt])
8
+ end
9
+ @partition = native_message[:partition]
10
+ unless native_message[:payload].null?
11
+ @payload = native_message[:payload].read_string(native_message[:len])
12
+ end
13
+ unless native_message[:key].null?
14
+ @key = native_message[:key].read_string(native_message[:key_len])
15
+ end
16
+ @offset = native_message[:offset]
17
+ end
18
+
19
+ def to_s
20
+ "Message in '#{topic}' with key '#{key}', payload '#{payload}', partition '#{partition}', offset '#{offset}'"
21
+ end
22
+ end
23
+ end
@@ -1,13 +1,26 @@
1
1
  module Rdkafka
2
2
  class Producer
3
3
  def initialize(native_kafka)
4
+ @closing = false
4
5
  @native_kafka = native_kafka
5
6
  # Start thread to poll client for delivery callbacks
6
- @thread = Thread.new do
7
+ @polling_thread = Thread.new do
7
8
  loop do
8
- Rdkafka::FFI.rd_kafka_poll(@native_kafka, 1000)
9
+ Rdkafka::FFI.rd_kafka_poll(@native_kafka, 250)
10
+ # Exit thread if closing and the poll queue is empty
11
+ if @closing && Rdkafka::FFI.rd_kafka_outq_len(@native_kafka) == 0
12
+ break
13
+ end
9
14
  end
10
- end.abort_on_exception = true
15
+ end
16
+ @polling_thread.abort_on_exception = true
17
+ end
18
+
19
+ def close
20
+ # Indicate to polling thread that we're closing
21
+ @closing = true
22
+ # Wait for the polling thread to finish up
23
+ @polling_thread.join
11
24
  end
12
25
 
13
26
  def produce(topic:, payload: nil, key: nil, partition: nil, timestamp: nil)
@@ -75,7 +88,7 @@ module Rdkafka
75
88
  end
76
89
 
77
90
  # Wait for the delivery report
78
- def wait(timeout_in_seconds=10)
91
+ def wait(timeout_in_seconds=60)
79
92
  timeout = if timeout_in_seconds
80
93
  Time.now.to_i + timeout_in_seconds
81
94
  else
@@ -1,4 +1,4 @@
1
1
  module Rdkafka
2
- VERSION = "0.1.9"
2
+ VERSION = "0.1.10"
3
3
  LIBRDKAFKA_VERSION = "0.11.0"
4
4
  end
@@ -1,7 +1,13 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::FFI do
4
+ it "should load librdkafka" do
5
+ expect(Rdkafka::FFI.ffi_libraries.map(&:name).first).to include "librdkafka"
6
+ end
7
+
4
8
  it "should successfully call librdkafka" do
5
- Rdkafka::FFI.rd_kafka_conf_new
9
+ expect {
10
+ Rdkafka::FFI.rd_kafka_conf_new
11
+ }.not_to raise_error
6
12
  end
7
13
  end
@@ -0,0 +1,73 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Message do
4
+ let(:native_topic) do
5
+ Rdkafka::FFI.rd_kafka_topic_new(
6
+ native_client,
7
+ "topic_name",
8
+ nil
9
+ )
10
+ end
11
+ let(:payload) { nil }
12
+ let(:key) { nil }
13
+ let(:native_message) do
14
+ Rdkafka::FFI::Message.new.tap do |message|
15
+ message[:rkt] = native_topic
16
+ message[:partition] = 3
17
+ message[:offset] = 100
18
+ if payload
19
+ ptr = ::FFI::MemoryPointer.new(:char, payload.bytesize)
20
+ ptr.put_bytes(0, payload)
21
+ message[:payload] = ptr
22
+ message[:len] = payload.bytesize
23
+ end
24
+ if key
25
+ ptr = ::FFI::MemoryPointer.new(:char, key.bytesize)
26
+ ptr.put_bytes(0, key)
27
+ message[:key] = ptr
28
+ message[:key_len] = key.bytesize
29
+ end
30
+ end
31
+ end
32
+ subject { Rdkafka::Message.new(native_message) }
33
+
34
+ it "should have a topic" do
35
+ expect(subject.topic).to eq "topic_name"
36
+ end
37
+
38
+ it "should have a partition" do
39
+ expect(subject.partition).to eq 3
40
+ end
41
+
42
+ context "payload" do
43
+ it "should have a nil payload when none is present" do
44
+ expect(subject.payload).to be_nil
45
+ end
46
+
47
+ context "present payload" do
48
+ let(:payload) { "payload content" }
49
+
50
+ it "should have a payload" do
51
+ expect(subject.payload).to eq "payload content"
52
+ end
53
+ end
54
+ end
55
+
56
+ context "key" do
57
+ it "should have a nil key when none is present" do
58
+ expect(subject.key).to be_nil
59
+ end
60
+
61
+ context "present key" do
62
+ let(:key) { "key content" }
63
+
64
+ it "should have a key" do
65
+ expect(subject.key).to eq "key content"
66
+ end
67
+ end
68
+ end
69
+
70
+ it "should have an offset" do
71
+ expect(subject.offset).to eq 100
72
+ end
73
+ end
@@ -2,7 +2,6 @@ require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer do
4
4
  let(:producer) { rdkafka_config.producer }
5
- let(:consumer) { rdkafka_config.consumer }
6
5
 
7
6
  it "should require a topic" do
8
7
  expect {
@@ -14,12 +13,6 @@ describe Rdkafka::Producer do
14
13
  end
15
14
 
16
15
  it "should produce a message" do
17
- consumer.subscribe("produce_test_topic")
18
- # Make sure the consumer is running before we produce
19
- 5.times do
20
- consumer.poll
21
- end
22
-
23
16
  handle = producer.produce(
24
17
  topic: "produce_test_topic",
25
18
  payload: "payload 1",
@@ -28,20 +21,33 @@ describe Rdkafka::Producer do
28
21
  expect(handle.pending?).to be true
29
22
 
30
23
  # Check delivery handle and report
31
- report = handle.wait
24
+ report = handle.wait(5)
32
25
  expect(handle.pending?).to be false
33
26
  expect(report).not_to be_nil
34
27
  expect(report.partition).to eq 0
35
28
  expect(report.offset).to be > 0
36
29
 
30
+ # Close producer
31
+ producer.close
32
+
37
33
  # Consume message and verify it's content
38
- message = consumer.first
39
- expect(message).not_to be_nil
34
+ message = wait_for_message(
35
+ topic: "produce_test_topic",
36
+ delivery_report: report
37
+ )
40
38
  expect(message.partition).to eq 0
41
- expect(message.offset).to eq report.offset
42
39
  expect(message.payload).to eq "payload 1"
43
40
  expect(message.key).to eq "key 1"
41
+ end
44
42
 
45
- consumer.commit
43
+ it "should raise a timeout error when waiting too long" do
44
+ handle = producer.produce(
45
+ topic: "produce_test_topic",
46
+ payload: "payload 1",
47
+ key: "key 1"
48
+ )
49
+ expect {
50
+ handle.wait(0)
51
+ }.to raise_error Rdkafka::WaitTimeoutError
46
52
  end
47
53
  end
data/spec/spec_helper.rb CHANGED
@@ -6,6 +6,8 @@ def rdkafka_config
6
6
  config = {
7
7
  :"bootstrap.servers" => "localhost:9092",
8
8
  :"group.id" => "ruby_test",
9
+ :"client.id" => "test",
10
+ :"auto.offset.reset" => "earliest",
9
11
  :"enable.partition.eof" => false
10
12
  }
11
13
  if ENV["DEBUG_PRODUCER"]
@@ -15,3 +17,26 @@ def rdkafka_config
15
17
  end
16
18
  Rdkafka::Config.new(config)
17
19
  end
20
+
21
+ def native_client
22
+ config = rdkafka_config
23
+ config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
24
+ end
25
+
26
+ def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30)
27
+ offset = delivery_report.offset - 1
28
+ consumer = rdkafka_config.consumer
29
+ consumer.subscribe(topic)
30
+ timeout = Time.now.to_i + timeout_in_seconds
31
+ loop do
32
+ if timeout <= Time.now.to_i
33
+ raise "Timeout of #{timeout_in_seconds} seconds reached in wait_for_message"
34
+ end
35
+ message = consumer.poll(100)
36
+ if message && message.offset == offset
37
+ return message
38
+ end
39
+ end
40
+ ensure
41
+ consumer.close
42
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.9
4
+ version: 0.1.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-08-31 00:00:00.000000000 Z
11
+ date: 2017-09-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -99,6 +99,7 @@ files:
99
99
  - lib/rdkafka/consumer.rb
100
100
  - lib/rdkafka/error.rb
101
101
  - lib/rdkafka/ffi.rb
102
+ - lib/rdkafka/message.rb
102
103
  - lib/rdkafka/producer.rb
103
104
  - lib/rdkafka/version.rb
104
105
  - rdkafka.gemspec
@@ -106,8 +107,8 @@ files:
106
107
  - spec/rdkafka/consumer_spec.rb
107
108
  - spec/rdkafka/error_spec.rb
108
109
  - spec/rdkafka/ffi_spec.rb
110
+ - spec/rdkafka/message_spec.rb
109
111
  - spec/rdkafka/producer_spec.rb
110
- - spec/rdkafka_spec.rb
111
112
  - spec/spec_helper.rb
112
113
  homepage: https://github.com/thijsc/rdkafka-ruby
113
114
  licenses:
@@ -139,6 +140,6 @@ test_files:
139
140
  - spec/rdkafka/consumer_spec.rb
140
141
  - spec/rdkafka/error_spec.rb
141
142
  - spec/rdkafka/ffi_spec.rb
143
+ - spec/rdkafka/message_spec.rb
142
144
  - spec/rdkafka/producer_spec.rb
143
- - spec/rdkafka_spec.rb
144
145
  - spec/spec_helper.rb
data/spec/rdkafka_spec.rb DELETED
@@ -1,4 +0,0 @@
1
- require "spec_helper"
2
-
3
- describe Rdkafka do
4
- end