rdkafka 0.8.0 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +23 -0
  3. data/CHANGELOG.md +6 -0
  4. data/README.md +5 -2
  5. data/docker-compose.yml +2 -0
  6. data/ext/Rakefile +1 -1
  7. data/lib/rdkafka.rb +7 -0
  8. data/lib/rdkafka/abstract_handle.rb +82 -0
  9. data/lib/rdkafka/admin.rb +144 -0
  10. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  11. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  12. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  13. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  14. data/lib/rdkafka/bindings.rb +44 -17
  15. data/lib/rdkafka/callbacks.rb +106 -0
  16. data/lib/rdkafka/config.rb +14 -1
  17. data/lib/rdkafka/consumer.rb +35 -5
  18. data/lib/rdkafka/error.rb +29 -3
  19. data/lib/rdkafka/metadata.rb +6 -5
  20. data/lib/rdkafka/producer.rb +13 -2
  21. data/lib/rdkafka/producer/delivery_handle.rb +7 -53
  22. data/lib/rdkafka/version.rb +1 -1
  23. data/spec/rdkafka/abstract_handle_spec.rb +114 -0
  24. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  25. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  26. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  27. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  28. data/spec/rdkafka/admin_spec.rb +192 -0
  29. data/spec/rdkafka/callbacks_spec.rb +20 -0
  30. data/spec/rdkafka/config_spec.rb +11 -0
  31. data/spec/rdkafka/consumer_spec.rb +34 -2
  32. data/spec/rdkafka/error_spec.rb +4 -0
  33. data/spec/rdkafka/metadata_spec.rb +78 -0
  34. data/spec/rdkafka/producer/delivery_handle_spec.rb +1 -41
  35. data/spec/rdkafka/producer_spec.rb +22 -0
  36. data/spec/spec_helper.rb +28 -11
  37. metadata +26 -3
  38. data/.travis.yml +0 -48
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8bf97d10412b4f3c0801f657796fd05c94ceede3caafe6a45719c840b534752a
4
- data.tar.gz: 8cc93bffb8119cf9c97aa60cf1f6d634e66a99686e0785150ae51283a6514e8c
3
+ metadata.gz: d09f2751b883400c550fee28fca85ffdd3e9f8090a9ba8e7bb22eadcba5c05c6
4
+ data.tar.gz: 107a5b723b40ec43b10f02b1993f42ff4b30a4ba16efc21611efb860de798a3a
5
5
  SHA512:
6
- metadata.gz: e8e085b3dd9d80d77003d2c7f0fbda69371c233b121eac2594556d7d62da99a4bed4aace93e9489783c789e64bafd732faaad3de2ffb5d3a5af8568c5b43676b
7
- data.tar.gz: 5a267a9814e87e876544e04e6657829c298ff01b03cb5ff41a0205795ea2fb4f6a2147ee1a0949e52f7a069f5dc73d5ed2072867194032e8ea913ac5d6871e23
6
+ metadata.gz: 373bd71c9e1f3c1fadd4deb9e67bec7aa5821e0a7835c851f1bb64fb1ad142692145e01efa62ac357d333eebb76cef7d676ddfa53ef3a9cd08720060b4bc0937
7
+ data.tar.gz: ec6d8932a2987e419f6fd731ff038cb88d9d898eab75d7eff82a6e606fcb71c249dd20f6d200bda44629f4bb81550c06ef4e8083ac1b4736c57932ab2486fdbc
@@ -0,0 +1,23 @@
1
+ version: v1.0
2
+ name: Rdkafka Ruby
3
+
4
+ agent:
5
+ machine:
6
+ type: e1-standard-2
7
+ os_image: ubuntu1804
8
+
9
+ blocks:
10
+ - name: Run specs
11
+ task:
12
+ jobs:
13
+ - name: bundle exec rspec
14
+ matrix:
15
+ - env_var: RUBY_VERSION
16
+ values: [ "2.5.8", "2.6.6", "2.7.2", "jruby-9.2.13.0" ]
17
+ commands:
18
+ - sem-version ruby $RUBY_VERSION
19
+ - checkout
20
+ - bundle install --path vendor/bundl
21
+ - cd ext && bundle exec rake && cd ..
22
+ - docker-compose up -d --no-recreate
23
+ - bundle exec rspec
@@ -1,3 +1,9 @@
1
+ # 0.8.1
2
+ * Fix topic_flag behaviour and add tests for Metadata (geoff2k)
3
+ * Add topic admin interface (geoff2k)
4
+ * Raise an exception if @native_kafka is nil (geoff2k)
5
+ * Option to use zstd compression (jasonmartens)
6
+
1
7
  # 0.8.0
2
8
  * Upgrade librdkafka to 1.4.0
3
9
  * Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
data/README.md CHANGED
@@ -1,9 +1,8 @@
1
1
  # Rdkafka
2
2
 
3
- [![Build Status](https://travis-ci.org/appsignal/rdkafka-ruby.svg?branch=master)](https://travis-ci.org/appsignal/rdkafka-ruby)
3
+ [![Build Status](https://appsignal.semaphoreci.com/badges/rdkafka-ruby/branches/master.svg?style=shields)](https://appsignal.semaphoreci.com/projects/rdkafka-ruby)
4
4
  [![Gem Version](https://badge.fury.io/rb/rdkafka.svg)](https://badge.fury.io/rb/rdkafka)
5
5
  [![Maintainability](https://api.codeclimate.com/v1/badges/ecb1765f81571cccdb0e/maintainability)](https://codeclimate.com/github/appsignal/rdkafka-ruby/maintainability)
6
- [![Test Coverage](https://api.codeclimate.com/v1/badges/ecb1765f81571cccdb0e/test_coverage)](https://codeclimate.com/github/appsignal/rdkafka-ruby/test_coverage)
7
6
 
8
7
  The `rdkafka` gem is a modern Kafka client library for Ruby based on
9
8
  [librdkafka](https://github.com/edenhill/librdkafka/).
@@ -72,6 +71,10 @@ end
72
71
  delivery_handles.each(&:wait)
73
72
  ```
74
73
 
74
+ Note that creating a producer consumes some resources that will not be
75
+ released until it `#close` is explicitly called, so be sure to call
76
+ `Config#producer` only as necessary.
77
+
75
78
  ## Development
76
79
 
77
80
  A Docker Compose file is included to run Kafka and Zookeeper. To run
@@ -1,5 +1,7 @@
1
+ ---
1
2
 
2
3
  version: '2'
4
+
3
5
  services:
4
6
  zookeeper:
5
7
  image: confluentinc/cp-zookeeper:latest
@@ -65,7 +65,7 @@ namespace :build do
65
65
 
66
66
  recipe = MiniPortile.new("librdkafka", version)
67
67
  recipe.files << "https://github.com/edenhill/librdkafka/archive/#{ref}.tar.gz"
68
- recipe.configure_options = ["--host=#{recipe.host}"]
68
+ recipe.configure_options = ["--host=#{recipe.host}","--enable-static", "--enable-zstd"]
69
69
  recipe.cook
70
70
 
71
71
  ext = recipe.host.include?("darwin") ? "dylib" : "so"
@@ -1,6 +1,13 @@
1
1
  require "rdkafka/version"
2
2
 
3
+ require "rdkafka/abstract_handle"
4
+ require "rdkafka/admin"
5
+ require "rdkafka/admin/create_topic_handle"
6
+ require "rdkafka/admin/create_topic_report"
7
+ require "rdkafka/admin/delete_topic_handle"
8
+ require "rdkafka/admin/delete_topic_report"
3
9
  require "rdkafka/bindings"
10
+ require "rdkafka/callbacks"
4
11
  require "rdkafka/config"
5
12
  require "rdkafka/consumer"
6
13
  require "rdkafka/consumer/headers"
@@ -0,0 +1,82 @@
1
+ require "ffi"
2
+
3
+ module Rdkafka
4
+ class AbstractHandle < FFI::Struct
5
+ # Subclasses must define their own layout, and the layout must start with:
6
+ #
7
+ # layout :pending, :bool,
8
+ # :response, :int
9
+
10
+ REGISTRY = {}
11
+
12
+ CURRENT_TIME = -> { Process.clock_gettime(Process::CLOCK_MONOTONIC) }.freeze
13
+
14
+ private_constant :CURRENT_TIME
15
+
16
+ def self.register(handle)
17
+ address = handle.to_ptr.address
18
+ REGISTRY[address] = handle
19
+ end
20
+
21
+ def self.remove(address)
22
+ REGISTRY.delete(address)
23
+ end
24
+
25
+ # Whether the handle is still pending.
26
+ #
27
+ # @return [Boolean]
28
+ def pending?
29
+ self[:pending]
30
+ end
31
+
32
+ # Wait for the operation to complete or raise an error if this takes longer than the timeout.
33
+ # If there is a timeout this does not mean the operation failed, rdkafka might still be working on the operation.
34
+ # In this case it is possible to call wait again.
35
+ #
36
+ # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out. If this is nil it does not time out.
37
+ # @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the operation has completed
38
+ #
39
+ # @raise [RdkafkaError] When the operation failed
40
+ # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
41
+ #
42
+ # @return [Object] Operation-specific result
43
+ def wait(max_wait_timeout: 60, wait_timeout: 0.1)
44
+ timeout = if max_wait_timeout
45
+ CURRENT_TIME.call + max_wait_timeout
46
+ else
47
+ nil
48
+ end
49
+ loop do
50
+ if pending?
51
+ if timeout && timeout <= CURRENT_TIME.call
52
+ raise WaitTimeoutError.new("Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds")
53
+ end
54
+ sleep wait_timeout
55
+ elsif self[:response] != 0
56
+ raise_error
57
+ else
58
+ return create_result
59
+ end
60
+ end
61
+ end
62
+
63
+ # @return [String] the name of the operation (e.g. "delivery")
64
+ def operation_name
65
+ raise "Must be implemented by subclass!"
66
+ end
67
+
68
+ # @return [Object] operation-specific result
69
+ def create_result
70
+ raise "Must be implemented by subclass!"
71
+ end
72
+
73
+ # Allow subclasses to override
74
+ def raise_error
75
+ raise RdkafkaError.new(self[:response])
76
+ end
77
+
78
+ # Error that is raised when waiting for the handle to complete
79
+ # takes longer than the specified timeout.
80
+ class WaitTimeoutError < RuntimeError; end
81
+ end
82
+ end
@@ -0,0 +1,144 @@
1
+ module Rdkafka
2
+ class Admin
3
+ # @private
4
+ def initialize(native_kafka)
5
+ @native_kafka = native_kafka
6
+ @closing = false
7
+
8
+ # Start thread to poll client for callbacks
9
+ @polling_thread = Thread.new do
10
+ loop do
11
+ Rdkafka::Bindings.rd_kafka_poll(@native_kafka, 250)
12
+ # Exit thread if closing and the poll queue is empty
13
+ if @closing && Rdkafka::Bindings.rd_kafka_outq_len(@native_kafka) == 0
14
+ break
15
+ end
16
+ end
17
+ end
18
+ @polling_thread.abort_on_exception = true
19
+ end
20
+
21
+ # Close this admin instance
22
+ def close
23
+ return unless @native_kafka
24
+
25
+ # Indicate to polling thread that we're closing
26
+ @closing = true
27
+ # Wait for the polling thread to finish up
28
+ @polling_thread.join
29
+ Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
30
+ @native_kafka = nil
31
+ end
32
+
33
+ # Create a topic with the given partition count and replication factor
34
+ #
35
+ # @raise [ConfigError] When the partition count or replication factor are out of valid range
36
+ # @raise [RdkafkaError] When the topic name is invalid or the topic already exists
37
+ #
38
+ # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
39
+ def create_topic(topic_name, partition_count, replication_factor)
40
+
41
+ # Create a rd_kafka_NewTopic_t representing the new topic
42
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
43
+ new_topic_ptr = Rdkafka::Bindings.rd_kafka_NewTopic_new(
44
+ FFI::MemoryPointer.from_string(topic_name),
45
+ partition_count,
46
+ replication_factor,
47
+ error_buffer,
48
+ 256
49
+ )
50
+ if new_topic_ptr.null?
51
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
52
+ end
53
+
54
+ # Note that rd_kafka_CreateTopics can create more than one topic at a time
55
+ pointer_array = [new_topic_ptr]
56
+ topics_array_ptr = FFI::MemoryPointer.new(:pointer)
57
+ topics_array_ptr.write_array_of_pointer(pointer_array)
58
+
59
+ # Get a pointer to the queue that our request will be enqueued on
60
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
61
+ if queue_ptr.null?
62
+ Rdkafka::Bindings.rd_kafka_NewTopic_destroy(new_topic_ptr)
63
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
64
+ end
65
+
66
+ # Create and register the handle we will return to the caller
67
+ create_topic_handle = CreateTopicHandle.new
68
+ create_topic_handle[:pending] = true
69
+ create_topic_handle[:response] = -1
70
+ CreateTopicHandle.register(create_topic_handle)
71
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATETOPICS)
72
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, create_topic_handle.to_ptr)
73
+
74
+ begin
75
+ Rdkafka::Bindings.rd_kafka_CreateTopics(
76
+ @native_kafka,
77
+ topics_array_ptr,
78
+ 1,
79
+ admin_options_ptr,
80
+ queue_ptr
81
+ )
82
+ rescue Exception => err
83
+ CreateTopicHandle.remove(create_topic_handle.to_ptr.address)
84
+ raise
85
+ ensure
86
+ Rdkafka::Bindings.rd_kafka_AdminOptions_destroy(admin_options_ptr)
87
+ Rdkafka::Bindings.rd_kafka_queue_destroy(queue_ptr)
88
+ Rdkafka::Bindings.rd_kafka_NewTopic_destroy(new_topic_ptr)
89
+ end
90
+
91
+ create_topic_handle
92
+ end
93
+
94
+ # Delete the named topic
95
+ #
96
+ # @raise [RdkafkaError] When the topic name is invalid or the topic does not exist
97
+ #
98
+ # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
99
+ def delete_topic(topic_name)
100
+
101
+ # Create a rd_kafka_DeleteTopic_t representing the topic to be deleted
102
+ delete_topic_ptr = Rdkafka::Bindings.rd_kafka_DeleteTopic_new(FFI::MemoryPointer.from_string(topic_name))
103
+
104
+ # Note that rd_kafka_DeleteTopics can create more than one topic at a time
105
+ pointer_array = [delete_topic_ptr]
106
+ topics_array_ptr = FFI::MemoryPointer.new(:pointer)
107
+ topics_array_ptr.write_array_of_pointer(pointer_array)
108
+
109
+ # Get a pointer to the queue that our request will be enqueued on
110
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
111
+ if queue_ptr.null?
112
+ Rdkafka::Bindings.rd_kafka_DeleteTopic_destroy(delete_topic_ptr)
113
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
114
+ end
115
+
116
+ # Create and register the handle we will return to the caller
117
+ delete_topic_handle = DeleteTopicHandle.new
118
+ delete_topic_handle[:pending] = true
119
+ delete_topic_handle[:response] = -1
120
+ DeleteTopicHandle.register(delete_topic_handle)
121
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DELETETOPICS)
122
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, delete_topic_handle.to_ptr)
123
+
124
+ begin
125
+ Rdkafka::Bindings.rd_kafka_DeleteTopics(
126
+ @native_kafka,
127
+ topics_array_ptr,
128
+ 1,
129
+ admin_options_ptr,
130
+ queue_ptr
131
+ )
132
+ rescue Exception => err
133
+ DeleteTopicHandle.remove(delete_topic_handle.to_ptr.address)
134
+ raise
135
+ ensure
136
+ Rdkafka::Bindings.rd_kafka_AdminOptions_destroy(admin_options_ptr)
137
+ Rdkafka::Bindings.rd_kafka_queue_destroy(queue_ptr)
138
+ Rdkafka::Bindings.rd_kafka_DeleteTopic_destroy(delete_topic_ptr)
139
+ end
140
+
141
+ delete_topic_handle
142
+ end
143
+ end
144
+ end
@@ -0,0 +1,27 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class CreateTopicHandle < AbstractHandle
4
+ layout :pending, :bool,
5
+ :response, :int,
6
+ :error_string, :pointer,
7
+ :result_name, :pointer
8
+
9
+ # @return [String] the name of the operation
10
+ def operation_name
11
+ "create topic"
12
+ end
13
+
14
+ # @return [Boolean] whether the create topic was successful
15
+ def create_result
16
+ CreateTopicReport.new(self[:error_string], self[:result_name])
17
+ end
18
+
19
+ def raise_error
20
+ raise RdkafkaError.new(
21
+ self[:response],
22
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
23
+ )
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,22 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class CreateTopicReport
4
+ # Any error message generated from the CreateTopic
5
+ # @return [String]
6
+ attr_reader :error_string
7
+
8
+ # The name of the topic created
9
+ # @return [String]
10
+ attr_reader :result_name
11
+
12
+ def initialize(error_string, result_name)
13
+ if error_string != FFI::Pointer::NULL
14
+ @error_string = error_string.read_string
15
+ end
16
+ if result_name != FFI::Pointer::NULL
17
+ @result_name = @result_name = result_name.read_string
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,27 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class DeleteTopicHandle < AbstractHandle
4
+ layout :pending, :bool,
5
+ :response, :int,
6
+ :error_string, :pointer,
7
+ :result_name, :pointer
8
+
9
+ # @return [String] the name of the operation
10
+ def operation_name
11
+ "delete topic"
12
+ end
13
+
14
+ # @return [Boolean] whether the delete topic was successful
15
+ def create_result
16
+ DeleteTopicReport.new(self[:error_string], self[:result_name])
17
+ end
18
+
19
+ def raise_error
20
+ raise RdkafkaError.new(
21
+ self[:response],
22
+ broker_message: DeleteTopicReport.new(self[:error_string], self[:result_name]).error_string
23
+ )
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,22 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class DeleteTopicReport
4
+ # Any error message generated from the DeleteTopic
5
+ # @return [String]
6
+ attr_reader :error_string
7
+
8
+ # The name of the topic deleted
9
+ # @return [String]
10
+ attr_reader :result_name
11
+
12
+ def initialize(error_string, result_name)
13
+ if error_string != FFI::Pointer::NULL
14
+ @error_string = error_string.read_string
15
+ end
16
+ if result_name != FFI::Pointer::NULL
17
+ @result_name = @result_name = result_name.read_string
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -245,22 +245,49 @@ module Rdkafka
245
245
  rd_kafka_msg_partitioner_consistent_random(nil, str_ptr, str.size, partition_count, nil, nil)
246
246
  end
247
247
 
248
- DeliveryCallback = FFI::Function.new(
249
- :void, [:pointer, :pointer, :pointer]
250
- ) do |client_ptr, message_ptr, opaque_ptr|
251
- message = Message.new(message_ptr)
252
- delivery_handle_ptr_address = message[:_private].address
253
- if delivery_handle = Rdkafka::Producer::DeliveryHandle.remove(delivery_handle_ptr_address)
254
- # Update delivery handle
255
- delivery_handle[:pending] = false
256
- delivery_handle[:response] = message[:err]
257
- delivery_handle[:partition] = message[:partition]
258
- delivery_handle[:offset] = message[:offset]
259
- # Call delivery callback on opaque
260
- if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
261
- opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], message[:err]))
262
- end
263
- end
264
- end
248
+ # Create Topics
249
+
250
+ RD_KAFKA_ADMIN_OP_CREATETOPICS = 1 # rd_kafka_admin_op_t
251
+ RD_KAFKA_EVENT_CREATETOPICS_RESULT = 100 # rd_kafka_event_type_t
252
+
253
+ attach_function :rd_kafka_CreateTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :void
254
+ attach_function :rd_kafka_NewTopic_new, [:pointer, :size_t, :size_t, :pointer, :size_t], :pointer
255
+ attach_function :rd_kafka_NewTopic_destroy, [:pointer], :void
256
+ attach_function :rd_kafka_event_CreateTopics_result, [:pointer], :pointer
257
+ attach_function :rd_kafka_CreateTopics_result_topics, [:pointer, :pointer], :pointer
258
+
259
+ # Delete Topics
260
+
261
+ RD_KAFKA_ADMIN_OP_DELETETOPICS = 2 # rd_kafka_admin_op_t
262
+ RD_KAFKA_EVENT_DELETETOPICS_RESULT = 101 # rd_kafka_event_type_t
263
+
264
+ attach_function :rd_kafka_DeleteTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :int32
265
+ attach_function :rd_kafka_DeleteTopic_new, [:pointer], :pointer
266
+ attach_function :rd_kafka_DeleteTopic_destroy, [:pointer], :void
267
+ attach_function :rd_kafka_event_DeleteTopics_result, [:pointer], :pointer
268
+ attach_function :rd_kafka_DeleteTopics_result_topics, [:pointer, :pointer], :pointer
269
+
270
+ # Background Queue and Callback
271
+
272
+ attach_function :rd_kafka_queue_get_background, [:pointer], :pointer
273
+ attach_function :rd_kafka_conf_set_background_event_cb, [:pointer, :pointer], :void
274
+ attach_function :rd_kafka_queue_destroy, [:pointer], :void
275
+
276
+ # Admin Options
277
+
278
+ attach_function :rd_kafka_AdminOptions_new, [:pointer, :int32], :pointer
279
+ attach_function :rd_kafka_AdminOptions_set_opaque, [:pointer, :pointer], :void
280
+ attach_function :rd_kafka_AdminOptions_destroy, [:pointer], :void
281
+
282
+ # Extracting data from event types
283
+
284
+ attach_function :rd_kafka_event_type, [:pointer], :int32
285
+ attach_function :rd_kafka_event_opaque, [:pointer], :pointer
286
+
287
+ # Extracting data from topic results
288
+
289
+ attach_function :rd_kafka_topic_result_error, [:pointer], :int32
290
+ attach_function :rd_kafka_topic_result_error_string, [:pointer], :pointer
291
+ attach_function :rd_kafka_topic_result_name, [:pointer], :pointer
265
292
  end
266
293
  end