karafka-rdkafka 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data/.gitignore +8 -0
  4. data/.rspec +1 -0
  5. data/.semaphore/semaphore.yml +23 -0
  6. data/.yardopts +2 -0
  7. data/CHANGELOG.md +104 -0
  8. data/Gemfile +3 -0
  9. data/Guardfile +19 -0
  10. data/LICENSE +21 -0
  11. data/README.md +114 -0
  12. data/Rakefile +96 -0
  13. data/bin/console +11 -0
  14. data/docker-compose.yml +24 -0
  15. data/ext/README.md +18 -0
  16. data/ext/Rakefile +62 -0
  17. data/lib/rdkafka/abstract_handle.rb +82 -0
  18. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  19. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  20. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  21. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  22. data/lib/rdkafka/admin.rb +155 -0
  23. data/lib/rdkafka/bindings.rb +312 -0
  24. data/lib/rdkafka/callbacks.rb +106 -0
  25. data/lib/rdkafka/config.rb +299 -0
  26. data/lib/rdkafka/consumer/headers.rb +63 -0
  27. data/lib/rdkafka/consumer/message.rb +84 -0
  28. data/lib/rdkafka/consumer/partition.rb +49 -0
  29. data/lib/rdkafka/consumer/topic_partition_list.rb +164 -0
  30. data/lib/rdkafka/consumer.rb +565 -0
  31. data/lib/rdkafka/error.rb +86 -0
  32. data/lib/rdkafka/metadata.rb +92 -0
  33. data/lib/rdkafka/producer/client.rb +47 -0
  34. data/lib/rdkafka/producer/delivery_handle.rb +22 -0
  35. data/lib/rdkafka/producer/delivery_report.rb +26 -0
  36. data/lib/rdkafka/producer.rb +178 -0
  37. data/lib/rdkafka/version.rb +5 -0
  38. data/lib/rdkafka.rb +22 -0
  39. data/rdkafka.gemspec +36 -0
  40. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  41. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  42. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  43. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  44. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  45. data/spec/rdkafka/admin_spec.rb +203 -0
  46. data/spec/rdkafka/bindings_spec.rb +134 -0
  47. data/spec/rdkafka/callbacks_spec.rb +20 -0
  48. data/spec/rdkafka/config_spec.rb +182 -0
  49. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  50. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  51. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +223 -0
  52. data/spec/rdkafka/consumer_spec.rb +1008 -0
  53. data/spec/rdkafka/error_spec.rb +89 -0
  54. data/spec/rdkafka/metadata_spec.rb +78 -0
  55. data/spec/rdkafka/producer/client_spec.rb +145 -0
  56. data/spec/rdkafka/producer/delivery_handle_spec.rb +42 -0
  57. data/spec/rdkafka/producer/delivery_report_spec.rb +17 -0
  58. data/spec/rdkafka/producer_spec.rb +525 -0
  59. data/spec/spec_helper.rb +139 -0
  60. data.tar.gz.sig +0 -0
  61. metadata +277 -0
  62. metadata.gz.sig +0 -0
@@ -0,0 +1,92 @@
1
+ module Rdkafka
2
+ class Metadata
3
+ attr_reader :brokers, :topics
4
+
5
+ def initialize(native_client, topic_name = nil)
6
+ native_topic = if topic_name
7
+ Rdkafka::Bindings.rd_kafka_topic_new(native_client, topic_name, nil)
8
+ end
9
+
10
+ ptr = FFI::MemoryPointer.new(:pointer)
11
+
12
+ # If topic_flag is 1, we request info about *all* topics in the cluster. If topic_flag is 0,
13
+ # we only request info about locally known topics (or a single topic if one is passed in).
14
+ topic_flag = topic_name.nil? ? 1 : 0
15
+
16
+ # Retrieve the Metadata
17
+ result = Rdkafka::Bindings.rd_kafka_metadata(native_client, topic_flag, native_topic, ptr, 250)
18
+
19
+ # Error Handling
20
+ raise Rdkafka::RdkafkaError.new(result) unless result.zero?
21
+
22
+ metadata_from_native(ptr.read_pointer)
23
+ ensure
24
+ Rdkafka::Bindings.rd_kafka_topic_destroy(native_topic) if topic_name
25
+ Rdkafka::Bindings.rd_kafka_metadata_destroy(ptr.read_pointer)
26
+ end
27
+
28
+ private
29
+
30
+ def metadata_from_native(ptr)
31
+ metadata = Metadata.new(ptr)
32
+ @brokers = Array.new(metadata[:brokers_count]) do |i|
33
+ BrokerMetadata.new(metadata[:brokers_metadata] + (i * BrokerMetadata.size)).to_h
34
+ end
35
+
36
+ @topics = Array.new(metadata[:topics_count]) do |i|
37
+ topic = TopicMetadata.new(metadata[:topics_metadata] + (i * TopicMetadata.size))
38
+ raise Rdkafka::RdkafkaError.new(topic[:rd_kafka_resp_err]) unless topic[:rd_kafka_resp_err].zero?
39
+
40
+ partitions = Array.new(topic[:partition_count]) do |j|
41
+ partition = PartitionMetadata.new(topic[:partitions_metadata] + (j * PartitionMetadata.size))
42
+ raise Rdkafka::RdkafkaError.new(partition[:rd_kafka_resp_err]) unless partition[:rd_kafka_resp_err].zero?
43
+ partition.to_h
44
+ end
45
+ topic.to_h.merge!(partitions: partitions)
46
+ end
47
+ end
48
+
49
+ class CustomFFIStruct < FFI::Struct
50
+ def to_h
51
+ members.each_with_object({}) do |mem, hsh|
52
+ val = self.[](mem)
53
+ next if val.is_a?(FFI::Pointer) || mem == :rd_kafka_resp_err
54
+
55
+ hsh[mem] = self.[](mem)
56
+ end
57
+ end
58
+ end
59
+
60
+ class Metadata < CustomFFIStruct
61
+ layout :brokers_count, :int,
62
+ :brokers_metadata, :pointer,
63
+ :topics_count, :int,
64
+ :topics_metadata, :pointer,
65
+ :broker_id, :int32,
66
+ :broker_name, :string
67
+ end
68
+
69
+ class BrokerMetadata < CustomFFIStruct
70
+ layout :broker_id, :int32,
71
+ :broker_name, :string,
72
+ :broker_port, :int
73
+ end
74
+
75
+ class TopicMetadata < CustomFFIStruct
76
+ layout :topic_name, :string,
77
+ :partition_count, :int,
78
+ :partitions_metadata, :pointer,
79
+ :rd_kafka_resp_err, :int
80
+ end
81
+
82
+ class PartitionMetadata < CustomFFIStruct
83
+ layout :partition_id, :int32,
84
+ :rd_kafka_resp_err, :int,
85
+ :leader, :int32,
86
+ :replica_count, :int,
87
+ :replicas, :pointer,
88
+ :in_sync_replica_brokers, :int,
89
+ :isrs, :pointer
90
+ end
91
+ end
92
+ end
@@ -0,0 +1,47 @@
1
+ module Rdkafka
2
+ class Producer
3
+ class Client
4
+ def initialize(native)
5
+ @native = native
6
+
7
+ # Start thread to poll client for delivery callbacks
8
+ @polling_thread = Thread.new do
9
+ loop do
10
+ Rdkafka::Bindings.rd_kafka_poll(native, 250)
11
+ # Exit thread if closing and the poll queue is empty
12
+ if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(native) == 0
13
+ break
14
+ end
15
+ end
16
+ end
17
+ @polling_thread.abort_on_exception = true
18
+ @polling_thread[:closing] = false
19
+ end
20
+
21
+ def native
22
+ @native
23
+ end
24
+
25
+ def finalizer
26
+ ->(_) { close }
27
+ end
28
+
29
+ def closed?
30
+ @native.nil?
31
+ end
32
+
33
+ def close(object_id=nil)
34
+ return unless @native
35
+
36
+ # Indicate to polling thread that we're closing
37
+ @polling_thread[:closing] = true
38
+ # Wait for the polling thread to finish up
39
+ @polling_thread.join
40
+
41
+ Rdkafka::Bindings.rd_kafka_destroy(@native)
42
+
43
+ @native = nil
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,22 @@
1
+ module Rdkafka
2
+ class Producer
3
+ # Handle to wait for a delivery report which is returned when
4
+ # producing a message.
5
+ class DeliveryHandle < Rdkafka::AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :partition, :int,
9
+ :offset, :int64
10
+
11
+ # @return [String] the name of the operation (e.g. "delivery")
12
+ def operation_name
13
+ "delivery"
14
+ end
15
+
16
+ # @return [DeliveryReport] a report on the delivery of the message
17
+ def create_result
18
+ DeliveryReport.new(self[:partition], self[:offset])
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,26 @@
1
+ module Rdkafka
2
+ class Producer
3
+ # Delivery report for a successfully produced message.
4
+ class DeliveryReport
5
+ # The partition this message was produced to.
6
+ # @return [Integer]
7
+ attr_reader :partition
8
+
9
+ # The offset of the produced message.
10
+ # @return [Integer]
11
+ attr_reader :offset
12
+
13
+ # Error in case happen during produce.
14
+ # @return [String]
15
+ attr_reader :error
16
+
17
+ private
18
+
19
+ def initialize(partition, offset, error = nil)
20
+ @partition = partition
21
+ @offset = offset
22
+ @error = error
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,178 @@
1
+ require "objspace"
2
+
3
+ module Rdkafka
4
+ # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
5
+ class Producer
6
+ # @private
7
+ # Returns the current delivery callback, by default this is nil.
8
+ #
9
+ # @return [Proc, nil]
10
+ attr_reader :delivery_callback
11
+
12
+ # @private
13
+ # Returns the number of arguments accepted by the callback, by default this is nil.
14
+ #
15
+ # @return [Integer, nil]
16
+ attr_reader :delivery_callback_arity
17
+
18
+ # @private
19
+ def initialize(client, partitioner_name)
20
+ @client = client
21
+ @partitioner_name = partitioner_name || "consistent_random"
22
+
23
+ # Makes sure, that the producer gets closed before it gets GCed by Ruby
24
+ ObjectSpace.define_finalizer(self, client.finalizer)
25
+ end
26
+
27
+ # Set a callback that will be called every time a message is successfully produced.
28
+ # The callback is called with a {DeliveryReport} and {DeliveryHandle}
29
+ #
30
+ # @param callback [Proc, #call] The callback
31
+ #
32
+ # @return [nil]
33
+ def delivery_callback=(callback)
34
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
35
+ @delivery_callback = callback
36
+ @delivery_callback_arity = arity(callback)
37
+ end
38
+
39
+ # Close this producer and wait for the internal poll queue to empty.
40
+ def close
41
+ ObjectSpace.undefine_finalizer(self)
42
+
43
+ @client.close
44
+ end
45
+
46
+ # Partition count for a given topic.
47
+ # NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
48
+ #
49
+ # @param topic [String] The topic name.
50
+ #
51
+ # @return partition count [Integer,nil]
52
+ #
53
+ def partition_count(topic)
54
+ closed_producer_check(__method__)
55
+ Rdkafka::Metadata.new(@client.native, topic).topics&.first[:partition_count]
56
+ end
57
+
58
+ # Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
59
+ #
60
+ # When no partition is specified the underlying Kafka library picks a partition based on the key. If no key is specified, a random partition will be used.
61
+ # When a timestamp is provided this is used instead of the auto-generated timestamp.
62
+ #
63
+ # @param topic [String] The topic to produce to
64
+ # @param payload [String,nil] The message's payload
65
+ # @param key [String, nil] The message's key
66
+ # @param partition [Integer,nil] Optional partition to produce to
67
+ # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
68
+ # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
69
+ # @param headers [Hash<String,String>] Optional message headers
70
+ #
71
+ # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
72
+ #
73
+ # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
74
+ def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil)
75
+ closed_producer_check(__method__)
76
+
77
+ # Start by checking and converting the input
78
+
79
+ # Get payload length
80
+ payload_size = if payload.nil?
81
+ 0
82
+ else
83
+ payload.bytesize
84
+ end
85
+
86
+ # Get key length
87
+ key_size = if key.nil?
88
+ 0
89
+ else
90
+ key.bytesize
91
+ end
92
+
93
+ if partition_key
94
+ partition_count = partition_count(topic)
95
+ # If the topic is not present, set to -1
96
+ partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count
97
+ end
98
+
99
+ # If partition is nil, use -1 to let librdafka set the partition randomly or
100
+ # based on the key when present.
101
+ partition ||= -1
102
+
103
+ # If timestamp is nil use 0 and let Kafka set one. If an integer or time
104
+ # use it.
105
+ raw_timestamp = if timestamp.nil?
106
+ 0
107
+ elsif timestamp.is_a?(Integer)
108
+ timestamp
109
+ elsif timestamp.is_a?(Time)
110
+ (timestamp.to_i * 1000) + (timestamp.usec / 1000)
111
+ else
112
+ raise TypeError.new("Timestamp has to be nil, an Integer or a Time")
113
+ end
114
+
115
+ delivery_handle = DeliveryHandle.new
116
+ delivery_handle[:pending] = true
117
+ delivery_handle[:response] = -1
118
+ delivery_handle[:partition] = -1
119
+ delivery_handle[:offset] = -1
120
+ DeliveryHandle.register(delivery_handle)
121
+
122
+ args = [
123
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_TOPIC, :string, topic,
124
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_MSGFLAGS, :int, Rdkafka::Bindings::RD_KAFKA_MSG_F_COPY,
125
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_VALUE, :buffer_in, payload, :size_t, payload_size,
126
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_KEY, :buffer_in, key, :size_t, key_size,
127
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_PARTITION, :int32, partition,
128
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_TIMESTAMP, :int64, raw_timestamp,
129
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_OPAQUE, :pointer, delivery_handle,
130
+ ]
131
+
132
+ if headers
133
+ headers.each do |key0, value0|
134
+ key = key0.to_s
135
+ value = value0.to_s
136
+ args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_HEADER
137
+ args << :string << key
138
+ args << :pointer << value
139
+ args << :size_t << value.bytes.size
140
+ end
141
+ end
142
+
143
+ args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_END
144
+
145
+ # Produce the message
146
+ response = Rdkafka::Bindings.rd_kafka_producev(
147
+ @client.native,
148
+ *args
149
+ )
150
+
151
+ # Raise error if the produce call was not successful
152
+ if response != 0
153
+ DeliveryHandle.remove(delivery_handle.to_ptr.address)
154
+ raise RdkafkaError.new(response)
155
+ end
156
+
157
+ delivery_handle
158
+ end
159
+
160
+ # @private
161
+ def call_delivery_callback(delivery_report, delivery_handle)
162
+ return unless @delivery_callback
163
+
164
+ args = [delivery_report, delivery_handle].take(@delivery_callback_arity)
165
+ @delivery_callback.call(*args)
166
+ end
167
+
168
+ def arity(callback)
169
+ return callback.arity if callback.respond_to?(:arity)
170
+
171
+ callback.method(:call).arity
172
+ end
173
+
174
+ def closed_producer_check(method)
175
+ raise Rdkafka::ClosedProducerError.new(method) if @client.closed?
176
+ end
177
+ end
178
+ end
@@ -0,0 +1,5 @@
1
+ module Rdkafka
2
+ VERSION = "0.12.0"
3
+ LIBRDKAFKA_VERSION = "1.9.0"
4
+ LIBRDKAFKA_SOURCE_SHA256 = "59b6088b69ca6cf278c3f9de5cd6b7f3fd604212cd1c59870bc531c54147e889"
5
+ end
data/lib/rdkafka.rb ADDED
@@ -0,0 +1,22 @@
1
+ require "rdkafka/version"
2
+
3
+ require "rdkafka/abstract_handle"
4
+ require "rdkafka/admin"
5
+ require "rdkafka/admin/create_topic_handle"
6
+ require "rdkafka/admin/create_topic_report"
7
+ require "rdkafka/admin/delete_topic_handle"
8
+ require "rdkafka/admin/delete_topic_report"
9
+ require "rdkafka/bindings"
10
+ require "rdkafka/callbacks"
11
+ require "rdkafka/config"
12
+ require "rdkafka/consumer"
13
+ require "rdkafka/consumer/headers"
14
+ require "rdkafka/consumer/message"
15
+ require "rdkafka/consumer/partition"
16
+ require "rdkafka/consumer/topic_partition_list"
17
+ require "rdkafka/error"
18
+ require "rdkafka/metadata"
19
+ require "rdkafka/producer"
20
+ require "rdkafka/producer/client"
21
+ require "rdkafka/producer/delivery_handle"
22
+ require "rdkafka/producer/delivery_report"
data/rdkafka.gemspec ADDED
@@ -0,0 +1,36 @@
1
+ require File.expand_path('lib/rdkafka/version', __dir__)
2
+
3
+ Gem::Specification.new do |gem|
4
+ gem.authors = ['Thijs Cadier']
5
+ gem.email = ["thijs@appsignal.com"]
6
+ gem.description = "Modern Kafka client library for Ruby based on librdkafka"
7
+ gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.4+."
8
+ gem.license = 'MIT'
9
+ gem.homepage = 'https://github.com/thijsc/rdkafka-ruby'
10
+
11
+ gem.files = `git ls-files`.split($\)
12
+ gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
13
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
+ gem.name = 'karafka-rdkafka'
15
+ gem.require_paths = ['lib']
16
+ gem.version = Rdkafka::VERSION
17
+ gem.required_ruby_version = '>= 2.6'
18
+ gem.extensions = %w(ext/Rakefile)
19
+
20
+ if $PROGRAM_NAME.end_with?('gem')
21
+ gem.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
22
+ end
23
+
24
+ gem.cert_chain = %w[certs/cert_chain.pem]
25
+
26
+ gem.add_dependency 'ffi', '~> 1.15'
27
+ gem.add_dependency 'mini_portile2', '~> 2.6'
28
+ gem.add_dependency 'rake', '> 12'
29
+
30
+ gem.add_development_dependency 'pry'
31
+ gem.add_development_dependency 'rspec', '~> 3.5'
32
+ gem.add_development_dependency 'rake'
33
+ gem.add_development_dependency 'simplecov'
34
+ gem.add_development_dependency 'guard'
35
+ gem.add_development_dependency 'guard-rspec'
36
+ end
@@ -0,0 +1,113 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::AbstractHandle do
4
+ let(:response) { 0 }
5
+ let(:result) { -1 }
6
+
7
+ context "A subclass that does not implement the required methods" do
8
+
9
+ class BadTestHandle < Rdkafka::AbstractHandle
10
+ layout :pending, :bool,
11
+ :response, :int
12
+ end
13
+
14
+ it "raises an exception if operation_name is called" do
15
+ expect {
16
+ BadTestHandle.new.operation_name
17
+ }.to raise_exception(RuntimeError, /Must be implemented by subclass!/)
18
+ end
19
+
20
+ it "raises an exception if create_result is called" do
21
+ expect {
22
+ BadTestHandle.new.create_result
23
+ }.to raise_exception(RuntimeError, /Must be implemented by subclass!/)
24
+ end
25
+ end
26
+
27
+ class TestHandle < Rdkafka::AbstractHandle
28
+ layout :pending, :bool,
29
+ :response, :int,
30
+ :result, :int
31
+
32
+ def operation_name
33
+ "test_operation"
34
+ end
35
+
36
+ def create_result
37
+ self[:result]
38
+ end
39
+ end
40
+
41
+ subject do
42
+ TestHandle.new.tap do |handle|
43
+ handle[:pending] = pending_handle
44
+ handle[:response] = response
45
+ handle[:result] = result
46
+ end
47
+ end
48
+
49
+ describe ".register and .remove" do
50
+ let(:pending_handle) { true }
51
+
52
+ it "should register and remove a delivery handle" do
53
+ Rdkafka::AbstractHandle.register(subject)
54
+ removed = Rdkafka::AbstractHandle.remove(subject.to_ptr.address)
55
+ expect(removed).to eq subject
56
+ expect(Rdkafka::AbstractHandle::REGISTRY).to be_empty
57
+ end
58
+ end
59
+
60
+ describe "#pending?" do
61
+ context "when true" do
62
+ let(:pending_handle) { true }
63
+
64
+ it "should be true" do
65
+ expect(subject.pending?).to be true
66
+ end
67
+ end
68
+
69
+ context "when not true" do
70
+ let(:pending_handle) { false }
71
+
72
+ it "should be false" do
73
+ expect(subject.pending?).to be false
74
+ end
75
+ end
76
+ end
77
+
78
+ describe "#wait" do
79
+ let(:pending_handle) { true }
80
+
81
+ it "should wait until the timeout and then raise an error" do
82
+ expect {
83
+ subject.wait(max_wait_timeout: 0.1)
84
+ }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
85
+ end
86
+
87
+ context "when not pending anymore and no error" do
88
+ let(:pending_handle) { false }
89
+ let(:result) { 1 }
90
+
91
+ it "should return a result" do
92
+ wait_result = subject.wait
93
+ expect(wait_result).to eq(result)
94
+ end
95
+
96
+ it "should wait without a timeout" do
97
+ wait_result = subject.wait(max_wait_timeout: nil)
98
+ expect(wait_result).to eq(result)
99
+ end
100
+ end
101
+
102
+ context "when not pending anymore and there was an error" do
103
+ let(:pending_handle) { false }
104
+ let(:response) { 20 }
105
+
106
+ it "should raise an rdkafka error" do
107
+ expect {
108
+ subject.wait
109
+ }.to raise_error Rdkafka::RdkafkaError
110
+ end
111
+ end
112
+ end
113
+ end
@@ -0,0 +1,52 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Admin::CreateTopicHandle do
4
+ let(:response) { 0 }
5
+
6
+ subject do
7
+ Rdkafka::Admin::CreateTopicHandle.new.tap do |handle|
8
+ handle[:pending] = pending_handle
9
+ handle[:response] = response
10
+ handle[:error_string] = FFI::Pointer::NULL
11
+ handle[:result_name] = FFI::MemoryPointer.from_string("my-test-topic")
12
+ end
13
+ end
14
+
15
+ describe "#wait" do
16
+ let(:pending_handle) { true }
17
+
18
+ it "should wait until the timeout and then raise an error" do
19
+ expect {
20
+ subject.wait(max_wait_timeout: 0.1)
21
+ }.to raise_error Rdkafka::Admin::CreateTopicHandle::WaitTimeoutError, /create topic/
22
+ end
23
+
24
+ context "when not pending anymore and no error" do
25
+ let(:pending_handle) { false }
26
+
27
+ it "should return a create topic report" do
28
+ report = subject.wait
29
+
30
+ expect(report.error_string).to eq(nil)
31
+ expect(report.result_name).to eq("my-test-topic")
32
+ end
33
+
34
+ it "should wait without a timeout" do
35
+ report = subject.wait(max_wait_timeout: nil)
36
+
37
+ expect(report.error_string).to eq(nil)
38
+ expect(report.result_name).to eq("my-test-topic")
39
+ end
40
+ end
41
+ end
42
+
43
+ describe "#raise_error" do
44
+ let(:pending_handle) { false }
45
+
46
+ it "should raise the appropriate error" do
47
+ expect {
48
+ subject.raise_error
49
+ }.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,16 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Admin::CreateTopicReport do
4
+ subject { Rdkafka::Admin::CreateTopicReport.new(
5
+ FFI::MemoryPointer.from_string("error string"),
6
+ FFI::MemoryPointer.from_string("result name")
7
+ )}
8
+
9
+ it "should get the error string" do
10
+ expect(subject.error_string).to eq("error string")
11
+ end
12
+
13
+ it "should get the result name" do
14
+ expect(subject.result_name).to eq("result name")
15
+ end
16
+ end
@@ -0,0 +1,52 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Admin::DeleteTopicHandle do
4
+ let(:response) { 0 }
5
+
6
+ subject do
7
+ Rdkafka::Admin::DeleteTopicHandle.new.tap do |handle|
8
+ handle[:pending] = pending_handle
9
+ handle[:response] = response
10
+ handle[:error_string] = FFI::Pointer::NULL
11
+ handle[:result_name] = FFI::MemoryPointer.from_string("my-test-topic")
12
+ end
13
+ end
14
+
15
+ describe "#wait" do
16
+ let(:pending_handle) { true }
17
+
18
+ it "should wait until the timeout and then raise an error" do
19
+ expect {
20
+ subject.wait(max_wait_timeout: 0.1)
21
+ }.to raise_error Rdkafka::Admin::DeleteTopicHandle::WaitTimeoutError, /delete topic/
22
+ end
23
+
24
+ context "when not pending anymore and no error" do
25
+ let(:pending_handle) { false }
26
+
27
+ it "should return a delete topic report" do
28
+ report = subject.wait
29
+
30
+ expect(report.error_string).to eq(nil)
31
+ expect(report.result_name).to eq("my-test-topic")
32
+ end
33
+
34
+ it "should wait without a timeout" do
35
+ report = subject.wait(max_wait_timeout: nil)
36
+
37
+ expect(report.error_string).to eq(nil)
38
+ expect(report.result_name).to eq("my-test-topic")
39
+ end
40
+ end
41
+ end
42
+
43
+ describe "#raise_error" do
44
+ let(:pending_handle) { false }
45
+
46
+ it "should raise the appropriate error" do
47
+ expect {
48
+ subject.raise_error
49
+ }.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,16 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Admin::DeleteTopicReport do
4
+ subject { Rdkafka::Admin::DeleteTopicReport.new(
5
+ FFI::MemoryPointer.from_string("error string"),
6
+ FFI::MemoryPointer.from_string("result name")
7
+ )}
8
+
9
+ it "should get the error string" do
10
+ expect(subject.error_string).to eq("error string")
11
+ end
12
+
13
+ it "should get the result name" do
14
+ expect(subject.result_name).to eq("result name")
15
+ end
16
+ end