rdkafka 0.15.1 → 0.16.0.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +2 -5
  4. data/.gitignore +2 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +16 -1
  7. data/README.md +19 -9
  8. data/docker-compose.yml +1 -1
  9. data/ext/Rakefile +8 -0
  10. data/lib/rdkafka/abstract_handle.rb +44 -20
  11. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  12. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  13. data/lib/rdkafka/admin/create_topic_report.rb +1 -1
  14. data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
  15. data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
  16. data/lib/rdkafka/admin/describe_acl_report.rb +1 -0
  17. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  18. data/lib/rdkafka/admin/describe_configs_report.rb +54 -0
  19. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  20. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +54 -0
  21. data/lib/rdkafka/admin.rb +219 -0
  22. data/lib/rdkafka/bindings.rb +86 -3
  23. data/lib/rdkafka/callbacks.rb +103 -19
  24. data/lib/rdkafka/config.rb +69 -15
  25. data/lib/rdkafka/consumer.rb +7 -0
  26. data/lib/rdkafka/helpers/oauth.rb +58 -0
  27. data/lib/rdkafka/native_kafka.rb +32 -19
  28. data/lib/rdkafka/producer.rb +101 -4
  29. data/lib/rdkafka/version.rb +1 -1
  30. data/lib/rdkafka.rb +7 -0
  31. data/rdkafka.gemspec +1 -1
  32. data/spec/rdkafka/abstract_handle_spec.rb +34 -21
  33. data/spec/rdkafka/admin_spec.rb +336 -3
  34. data/spec/rdkafka/bindings_spec.rb +97 -0
  35. data/spec/rdkafka/config_spec.rb +53 -0
  36. data/spec/rdkafka/consumer_spec.rb +54 -0
  37. data/spec/rdkafka/native_kafka_spec.rb +8 -1
  38. data/spec/rdkafka/producer_spec.rb +85 -0
  39. data/spec/spec_helper.rb +16 -1
  40. data.tar.gz.sig +0 -0
  41. metadata +11 -4
  42. metadata.gz.sig +0 -0
data/lib/rdkafka/admin.rb CHANGED
@@ -2,6 +2,52 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ include Helpers::OAuth
6
+
7
+ class << self
8
+ # Allows us to retrieve librdkafka errors with descriptions
9
+ # Useful for debugging and building UIs, etc.
10
+ #
11
+ # @return [Hash<Integer, Hash>] hash with errors mapped by code
12
+ def describe_errors
13
+ # Memory pointers for the array of structures and count
14
+ p_error_descs = FFI::MemoryPointer.new(:pointer)
15
+ p_count = FFI::MemoryPointer.new(:size_t)
16
+
17
+ # Call the attached function
18
+ Bindings.rd_kafka_get_err_descs(p_error_descs, p_count)
19
+
20
+ # Retrieve the number of items in the array
21
+ count = p_count.read_uint
22
+
23
+ # Get the pointer to the array of error descriptions
24
+ array_of_errors = FFI::Pointer.new(Bindings::NativeErrorDesc, p_error_descs.read_pointer)
25
+
26
+ errors = {}
27
+
28
+ count.times do |i|
29
+ # Get the pointer to each struct
30
+ error_ptr = array_of_errors[i]
31
+
32
+ # Create a new instance of NativeErrorDesc for each item
33
+ error_desc = Bindings::NativeErrorDesc.new(error_ptr)
34
+
35
+ # Read values from the struct
36
+ code = error_desc[:code]
37
+
38
+ name = ''
39
+ desc = ''
40
+
41
+ name = error_desc[:name].read_string unless error_desc[:name].null?
42
+ desc = error_desc[:desc].read_string unless error_desc[:desc].null?
43
+
44
+ errors[code] = { code: code, name: name, description: desc }
45
+ end
46
+
47
+ errors
48
+ end
49
+ end
50
+
5
51
  # @private
6
52
  def initialize(native_kafka)
7
53
  @native_kafka = native_kafka
@@ -10,6 +56,19 @@ module Rdkafka
10
56
  ObjectSpace.define_finalizer(self, native_kafka.finalizer)
11
57
  end
12
58
 
59
+ # Starts the native Kafka polling thread and kicks off the init polling
60
+ # @note Not needed to run unless explicit start was disabled
61
+ def start
62
+ @native_kafka.start
63
+ end
64
+
65
+ # @return [String] admin name
66
+ def name
67
+ @name ||= @native_kafka.with_inner do |inner|
68
+ ::Rdkafka::Bindings.rd_kafka_name(inner)
69
+ end
70
+ end
71
+
13
72
  def finalizer
14
73
  ->(_) { close }
15
74
  end
@@ -605,6 +664,166 @@ module Rdkafka
605
664
  describe_acl_handle
606
665
  end
607
666
 
667
+
668
+ # Describe configs
669
+ #
670
+ # @param resources [Array<Hash>] Array where elements are hashes with two keys:
671
+ # - `:resource_type` - numerical resource type based on Kafka API
672
+ # - `:resource_name` - string with resource name
673
+ # @return [DescribeConfigsHandle] Describe config handle that can be used to wait for the
674
+ # result of fetching resources with their appropriate configs
675
+ #
676
+ # @raise [RdkafkaError]
677
+ #
678
+ # @note Several resources can be requested at one go, but only one broker at a time
679
+ def describe_configs(resources)
680
+ closed_admin_check(__method__)
681
+
682
+ handle = DescribeConfigsHandle.new
683
+ handle[:pending] = true
684
+ handle[:response] = -1
685
+
686
+ queue_ptr = @native_kafka.with_inner do |inner|
687
+ Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
688
+ end
689
+
690
+ if queue_ptr.null?
691
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
692
+ end
693
+
694
+ admin_options_ptr = @native_kafka.with_inner do |inner|
695
+ Rdkafka::Bindings.rd_kafka_AdminOptions_new(
696
+ inner,
697
+ Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DESCRIBECONFIGS
698
+ )
699
+ end
700
+
701
+ DescribeConfigsHandle.register(handle)
702
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, handle.to_ptr)
703
+
704
+ pointer_array = resources.map do |resource_details|
705
+ Rdkafka::Bindings.rd_kafka_ConfigResource_new(
706
+ resource_details.fetch(:resource_type),
707
+ FFI::MemoryPointer.from_string(
708
+ resource_details.fetch(:resource_name)
709
+ )
710
+ )
711
+ end
712
+
713
+ configs_array_ptr = FFI::MemoryPointer.new(:pointer, pointer_array.size)
714
+ configs_array_ptr.write_array_of_pointer(pointer_array)
715
+
716
+ begin
717
+ @native_kafka.with_inner do |inner|
718
+ Rdkafka::Bindings.rd_kafka_DescribeConfigs(
719
+ inner,
720
+ configs_array_ptr,
721
+ pointer_array.size,
722
+ admin_options_ptr,
723
+ queue_ptr
724
+ )
725
+ end
726
+ rescue Exception
727
+ DescribeConfigsHandle.remove(handle.to_ptr.address)
728
+
729
+ raise
730
+ ensure
731
+ Rdkafka::Bindings.rd_kafka_ConfigResource_destroy_array(
732
+ configs_array_ptr,
733
+ pointer_array.size
734
+ ) if configs_array_ptr
735
+ end
736
+
737
+ handle
738
+ end
739
+
740
+ # Alters in an incremental way all the configs provided for given resources
741
+ #
742
+ # @param resources_with_configs [Array<Hash>] resources with the configs key that contains
743
+ # name, value and the proper op_type to perform on this value.
744
+ #
745
+ # @return [IncrementalAlterConfigsHandle] Incremental alter configs handle that can be used to
746
+ # wait for the result of altering resources with their appropriate configs
747
+ #
748
+ # @raise [RdkafkaError]
749
+ #
750
+ # @note Several resources can be requested at one go, but only one broker at a time
751
+ # @note The results won't contain altered values but only the altered resources
752
+ def incremental_alter_configs(resources_with_configs)
753
+ closed_admin_check(__method__)
754
+
755
+ handle = IncrementalAlterConfigsHandle.new
756
+ handle[:pending] = true
757
+ handle[:response] = -1
758
+
759
+ queue_ptr = @native_kafka.with_inner do |inner|
760
+ Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
761
+ end
762
+
763
+ if queue_ptr.null?
764
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
765
+ end
766
+
767
+ admin_options_ptr = @native_kafka.with_inner do |inner|
768
+ Rdkafka::Bindings.rd_kafka_AdminOptions_new(
769
+ inner,
770
+ Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_INCREMENTALALTERCONFIGS
771
+ )
772
+ end
773
+
774
+ IncrementalAlterConfigsHandle.register(handle)
775
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, handle.to_ptr)
776
+
777
+ # Tu poprawnie tworzyc
778
+ pointer_array = resources_with_configs.map do |resource_details|
779
+ # First build the appropriate resource representation
780
+ resource_ptr = Rdkafka::Bindings.rd_kafka_ConfigResource_new(
781
+ resource_details.fetch(:resource_type),
782
+ FFI::MemoryPointer.from_string(
783
+ resource_details.fetch(:resource_name)
784
+ )
785
+ )
786
+
787
+ resource_details.fetch(:configs).each do |config|
788
+ Bindings.rd_kafka_ConfigResource_add_incremental_config(
789
+ resource_ptr,
790
+ config.fetch(:name),
791
+ config.fetch(:op_type),
792
+ config.fetch(:value)
793
+ )
794
+ end
795
+
796
+ resource_ptr
797
+ end
798
+
799
+ configs_array_ptr = FFI::MemoryPointer.new(:pointer, pointer_array.size)
800
+ configs_array_ptr.write_array_of_pointer(pointer_array)
801
+
802
+
803
+ begin
804
+ @native_kafka.with_inner do |inner|
805
+ Rdkafka::Bindings.rd_kafka_IncrementalAlterConfigs(
806
+ inner,
807
+ configs_array_ptr,
808
+ pointer_array.size,
809
+ admin_options_ptr,
810
+ queue_ptr
811
+ )
812
+ end
813
+ rescue Exception
814
+ IncrementalAlterConfigsHandle.remove(handle.to_ptr.address)
815
+
816
+ raise
817
+ ensure
818
+ Rdkafka::Bindings.rd_kafka_ConfigResource_destroy_array(
819
+ configs_array_ptr,
820
+ pointer_array.size
821
+ ) if configs_array_ptr
822
+ end
823
+
824
+ handle
825
+ end
826
+
608
827
  private
609
828
 
610
829
  def closed_admin_check(method)
@@ -17,6 +17,7 @@ module Rdkafka
17
17
 
18
18
  RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS = -175
19
19
  RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS = -174
20
+ RD_KAFKA_RESP_ERR__STATE = -172
20
21
  RD_KAFKA_RESP_ERR__NOENT = -156
21
22
  RD_KAFKA_RESP_ERR_NO_ERROR = 0
22
23
 
@@ -88,10 +89,58 @@ module Rdkafka
88
89
  attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
89
90
  attach_function :rd_kafka_topic_partition_list_copy, [:pointer], :pointer
90
91
 
92
+ # Configs management
93
+ #
94
+ # Structs for management of configurations
95
+ # Each configuration is attached to a resource and one resource can have many configuration
96
+ # details. Each resource will also have separate errors results if obtaining configuration
97
+ # was not possible for any reason
98
+ class ConfigResource < FFI::Struct
99
+ layout :type, :int,
100
+ :name, :string
101
+ end
102
+
103
+ attach_function :rd_kafka_DescribeConfigs, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
104
+ attach_function :rd_kafka_ConfigResource_new, [:int32, :pointer], :pointer
105
+ attach_function :rd_kafka_ConfigResource_destroy_array, [:pointer, :int32], :void
106
+ attach_function :rd_kafka_event_DescribeConfigs_result, [:pointer], :pointer
107
+ attach_function :rd_kafka_DescribeConfigs_result_resources, [:pointer, :pointer], :pointer
108
+ attach_function :rd_kafka_ConfigResource_configs, [:pointer, :pointer], :pointer
109
+ attach_function :rd_kafka_ConfigEntry_name, [:pointer], :string
110
+ attach_function :rd_kafka_ConfigEntry_value, [:pointer], :string
111
+ attach_function :rd_kafka_ConfigEntry_is_read_only, [:pointer], :int
112
+ attach_function :rd_kafka_ConfigEntry_is_default, [:pointer], :int
113
+ attach_function :rd_kafka_ConfigEntry_is_sensitive, [:pointer], :int
114
+ attach_function :rd_kafka_ConfigEntry_is_synonym, [:pointer], :int
115
+ attach_function :rd_kafka_ConfigEntry_synonyms, [:pointer, :pointer], :pointer
116
+ attach_function :rd_kafka_ConfigResource_error, [:pointer], :int
117
+ attach_function :rd_kafka_ConfigResource_error_string, [:pointer], :string
118
+ attach_function :rd_kafka_IncrementalAlterConfigs, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
119
+ attach_function :rd_kafka_IncrementalAlterConfigs_result_resources, [:pointer, :pointer], :pointer
120
+ attach_function :rd_kafka_ConfigResource_add_incremental_config, [:pointer, :string, :int32, :string], :pointer
121
+ attach_function :rd_kafka_event_IncrementalAlterConfigs_result, [:pointer], :pointer
122
+
123
+ RD_KAFKA_ADMIN_OP_DESCRIBECONFIGS = 5
124
+ RD_KAFKA_EVENT_DESCRIBECONFIGS_RESULT = 104
125
+
126
+ RD_KAFKA_ADMIN_OP_INCREMENTALALTERCONFIGS = 16
127
+ RD_KAFKA_EVENT_INCREMENTALALTERCONFIGS_RESULT = 131072
128
+
129
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_SET = 0
130
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_DELETE = 1
131
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_APPEND = 2
132
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_SUBTRACT = 3
133
+
91
134
  # Errors
135
+ class NativeErrorDesc < FFI::Struct
136
+ layout :code, :int,
137
+ :name, :pointer,
138
+ :desc, :pointer
139
+ end
92
140
 
93
141
  attach_function :rd_kafka_err2name, [:int], :string
94
142
  attach_function :rd_kafka_err2str, [:int], :string
143
+ attach_function :rd_kafka_get_err_descs, [:pointer, :pointer], :void
95
144
 
96
145
  # Configuration
97
146
 
@@ -111,28 +160,36 @@ module Rdkafka
111
160
  callback :error_cb, [:pointer, :int, :string, :pointer], :void
112
161
  attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
113
162
  attach_function :rd_kafka_rebalance_protocol, [:pointer], :string
114
-
163
+ callback :oauthbearer_token_refresh_cb, [:pointer, :string, :pointer], :void
164
+ attach_function :rd_kafka_conf_set_oauthbearer_token_refresh_cb, [:pointer, :oauthbearer_token_refresh_cb], :void
165
+ attach_function :rd_kafka_oauthbearer_set_token, [:pointer, :string, :int64, :pointer, :pointer, :int, :pointer, :int], :int
166
+ attach_function :rd_kafka_oauthbearer_set_token_failure, [:pointer, :string], :int
115
167
  # Log queue
116
168
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
117
169
  attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
170
+ # Per topic configs
171
+ attach_function :rd_kafka_topic_conf_new, [], :pointer
172
+ attach_function :rd_kafka_topic_conf_set, [:pointer, :string, :string, :pointer, :int], :kafka_config_response
118
173
 
119
174
  LogCallback = FFI::Function.new(
120
175
  :void, [:pointer, :int, :string, :string]
121
176
  ) do |_client_ptr, level, _level_string, line|
122
177
  severity = case level
123
- when 0 || 1 || 2
178
+ when 0, 1, 2
124
179
  Logger::FATAL
125
180
  when 3
126
181
  Logger::ERROR
127
182
  when 4
128
183
  Logger::WARN
129
- when 5 || 6
184
+ when 5, 6
130
185
  Logger::INFO
131
186
  when 7
132
187
  Logger::DEBUG
133
188
  else
134
189
  Logger::UNKNOWN
135
190
  end
191
+
192
+ Rdkafka::Config.ensure_log_thread
136
193
  Rdkafka::Config.log_queue << [severity, "rdkafka: #{line}"]
137
194
  end
138
195
 
@@ -159,6 +216,32 @@ module Rdkafka
159
216
  end
160
217
  end
161
218
 
219
+ # The OAuth callback is currently global and contextless.
220
+ # This means that the callback will be called for all instances, and the callback must be able to determine to which instance it is associated.
221
+ # The instance name will be provided in the callback, allowing the callback to reference the correct instance.
222
+ #
223
+ # An example of how to use the instance name in the callback is given below.
224
+ # The `refresh_token` is configured as the `oauthbearer_token_refresh_callback`.
225
+ # `instances` is a map of client names to client instances, maintained by the user.
226
+ #
227
+ # ```
228
+ # def refresh_token(config, client_name)
229
+ # client = instances[client_name]
230
+ # client.oauthbearer_set_token(
231
+ # token: 'new-token-value',
232
+ # lifetime_ms: token-lifetime-ms,
233
+ # principal_name: 'principal-name'
234
+ # )
235
+ # end
236
+ # ```
237
+ OAuthbearerTokenRefreshCallback = FFI::Function.new(
238
+ :void, [:pointer, :string, :pointer]
239
+ ) do |client_ptr, config, _opaque|
240
+ if Rdkafka::Config.oauthbearer_token_refresh_callback
241
+ Rdkafka::Config.oauthbearer_token_refresh_callback.call(config, Rdkafka::Bindings.rd_kafka_name(client_ptr))
242
+ end
243
+ end
244
+
162
245
  # Handle
163
246
 
164
247
  enum :kafka_type, [
@@ -113,6 +113,42 @@ module Rdkafka
113
113
  end
114
114
  end
115
115
 
116
+ class DescribeConfigsResult
117
+ attr_reader :result_error, :error_string, :results, :results_count
118
+
119
+ def initialize(event_ptr)
120
+ @results=[]
121
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
122
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
123
+
124
+ if @result_error == 0
125
+ configs_describe_result = Rdkafka::Bindings.rd_kafka_event_DescribeConfigs_result(event_ptr)
126
+ # Get the number of matching acls
127
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
128
+ @results = Rdkafka::Bindings.rd_kafka_DescribeConfigs_result_resources(configs_describe_result, pointer_to_size_t)
129
+ @results_count = pointer_to_size_t.read_int
130
+ end
131
+ end
132
+ end
133
+
134
+ class IncrementalAlterConfigsResult
135
+ attr_reader :result_error, :error_string, :results, :results_count
136
+
137
+ def initialize(event_ptr)
138
+ @results=[]
139
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
140
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
141
+
142
+ if @result_error == 0
143
+ incremental_alter_result = Rdkafka::Bindings.rd_kafka_event_IncrementalAlterConfigs_result(event_ptr)
144
+ # Get the number of matching acls
145
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
146
+ @results = Rdkafka::Bindings.rd_kafka_IncrementalAlterConfigs_result_resources(incremental_alter_result, pointer_to_size_t)
147
+ @results_count = pointer_to_size_t.read_int
148
+ end
149
+ end
150
+ end
151
+
116
152
  # FFI Function used for Create Topic and Delete Topic callbacks
117
153
  BackgroundEventCallbackFunction = FFI::Function.new(
118
154
  :void, [:pointer, :pointer, :pointer]
@@ -123,20 +159,24 @@ module Rdkafka
123
159
  # @private
124
160
  class BackgroundEventCallback
125
161
  def self.call(_, event_ptr, _)
126
- event_type = Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
127
- if event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
162
+ case Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
163
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
128
164
  process_create_topic(event_ptr)
129
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
165
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBECONFIGS_RESULT
166
+ process_describe_configs(event_ptr)
167
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_INCREMENTALALTERCONFIGS_RESULT
168
+ process_incremental_alter_configs(event_ptr)
169
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
130
170
  process_delete_topic(event_ptr)
131
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
171
+ when Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
132
172
  process_create_partitions(event_ptr)
133
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
173
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
134
174
  process_create_acl(event_ptr)
135
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
175
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
136
176
  process_delete_acl(event_ptr)
137
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
177
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
138
178
  process_describe_acl(event_ptr)
139
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
179
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
140
180
  process_delete_groups(event_ptr)
141
181
  end
142
182
  end
@@ -156,7 +196,44 @@ module Rdkafka
156
196
  create_topic_handle[:response] = create_topic_results[0].result_error
157
197
  create_topic_handle[:error_string] = create_topic_results[0].error_string
158
198
  create_topic_handle[:result_name] = create_topic_results[0].result_name
159
- create_topic_handle[:pending] = false
199
+
200
+ create_topic_handle.unlock
201
+ end
202
+ end
203
+
204
+ def self.process_describe_configs(event_ptr)
205
+ describe_configs = DescribeConfigsResult.new(event_ptr)
206
+ describe_configs_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
207
+
208
+ if describe_configs_handle = Rdkafka::Admin::DescribeConfigsHandle.remove(describe_configs_handle_ptr.address)
209
+ describe_configs_handle[:response] = describe_configs.result_error
210
+ describe_configs_handle[:response_string] = describe_configs.error_string
211
+ describe_configs_handle[:pending] = false
212
+
213
+ if describe_configs.result_error == 0
214
+ describe_configs_handle[:config_entries] = describe_configs.results
215
+ describe_configs_handle[:entry_count] = describe_configs.results_count
216
+ end
217
+
218
+ describe_configs_handle.unlock
219
+ end
220
+ end
221
+
222
+ def self.process_incremental_alter_configs(event_ptr)
223
+ incremental_alter = IncrementalAlterConfigsResult.new(event_ptr)
224
+ incremental_alter_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
225
+
226
+ if incremental_alter_handle = Rdkafka::Admin::IncrementalAlterConfigsHandle.remove(incremental_alter_handle_ptr.address)
227
+ incremental_alter_handle[:response] = incremental_alter.result_error
228
+ incremental_alter_handle[:response_string] = incremental_alter.error_string
229
+ incremental_alter_handle[:pending] = false
230
+
231
+ if incremental_alter.result_error == 0
232
+ incremental_alter_handle[:config_entries] = incremental_alter.results
233
+ incremental_alter_handle[:entry_count] = incremental_alter.results_count
234
+ end
235
+
236
+ incremental_alter_handle.unlock
160
237
  end
161
238
  end
162
239
 
@@ -173,7 +250,8 @@ module Rdkafka
173
250
  delete_group_handle[:response] = delete_group_results[0].result_error
174
251
  delete_group_handle[:error_string] = delete_group_results[0].error_string
175
252
  delete_group_handle[:result_name] = delete_group_results[0].result_name
176
- delete_group_handle[:pending] = false
253
+
254
+ delete_group_handle.unlock
177
255
  end
178
256
  end
179
257
 
@@ -190,7 +268,8 @@ module Rdkafka
190
268
  delete_topic_handle[:response] = delete_topic_results[0].result_error
191
269
  delete_topic_handle[:error_string] = delete_topic_results[0].error_string
192
270
  delete_topic_handle[:result_name] = delete_topic_results[0].result_name
193
- delete_topic_handle[:pending] = false
271
+
272
+ delete_topic_handle.unlock
194
273
  end
195
274
  end
196
275
 
@@ -207,7 +286,8 @@ module Rdkafka
207
286
  create_partitions_handle[:response] = create_partitions_results[0].result_error
208
287
  create_partitions_handle[:error_string] = create_partitions_results[0].error_string
209
288
  create_partitions_handle[:result_name] = create_partitions_results[0].result_name
210
- create_partitions_handle[:pending] = false
289
+
290
+ create_partitions_handle.unlock
211
291
  end
212
292
  end
213
293
 
@@ -223,7 +303,8 @@ module Rdkafka
223
303
  if create_acl_handle = Rdkafka::Admin::CreateAclHandle.remove(create_acl_handle_ptr.address)
224
304
  create_acl_handle[:response] = create_acl_results[0].result_error
225
305
  create_acl_handle[:response_string] = create_acl_results[0].error_string
226
- create_acl_handle[:pending] = false
306
+
307
+ create_acl_handle.unlock
227
308
  end
228
309
  end
229
310
 
@@ -239,11 +320,13 @@ module Rdkafka
239
320
  if delete_acl_handle = Rdkafka::Admin::DeleteAclHandle.remove(delete_acl_handle_ptr.address)
240
321
  delete_acl_handle[:response] = delete_acl_results[0].result_error
241
322
  delete_acl_handle[:response_string] = delete_acl_results[0].error_string
242
- delete_acl_handle[:pending] = false
323
+
243
324
  if delete_acl_results[0].result_error == 0
244
325
  delete_acl_handle[:matching_acls] = delete_acl_results[0].matching_acls
245
326
  delete_acl_handle[:matching_acls_count] = delete_acl_results[0].matching_acls_count
246
327
  end
328
+
329
+ delete_acl_handle.unlock
247
330
  end
248
331
  end
249
332
 
@@ -254,17 +337,18 @@ module Rdkafka
254
337
  if describe_acl_handle = Rdkafka::Admin::DescribeAclHandle.remove(describe_acl_handle_ptr.address)
255
338
  describe_acl_handle[:response] = describe_acl.result_error
256
339
  describe_acl_handle[:response_string] = describe_acl.error_string
257
- describe_acl_handle[:pending] = false
340
+
258
341
  if describe_acl.result_error == 0
259
- describe_acl_handle[:acls] = describe_acl.matching_acls
342
+ describe_acl_handle[:acls] = describe_acl.matching_acls
260
343
  describe_acl_handle[:acls_count] = describe_acl.matching_acls_count
261
344
  end
345
+
346
+ describe_acl_handle.unlock
262
347
  end
263
348
  end
264
349
  end
265
350
 
266
351
  # FFI Function used for Message Delivery callbacks
267
-
268
352
  DeliveryCallbackFunction = FFI::Function.new(
269
353
  :void, [:pointer, :pointer, :pointer]
270
354
  ) do |client_ptr, message_ptr, opaque_ptr|
@@ -284,7 +368,6 @@ module Rdkafka
284
368
  delivery_handle[:partition] = message[:partition]
285
369
  delivery_handle[:offset] = message[:offset]
286
370
  delivery_handle[:topic_name] = FFI::MemoryPointer.from_string(topic_name)
287
- delivery_handle[:pending] = false
288
371
 
289
372
  # Call delivery callback on opaque
290
373
  if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
@@ -299,9 +382,10 @@ module Rdkafka
299
382
  delivery_handle
300
383
  )
301
384
  end
385
+
386
+ delivery_handle.unlock
302
387
  end
303
388
  end
304
389
  end
305
-
306
390
  end
307
391
  end