karafka-rdkafka 0.14.10 → 0.15.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +2 -4
  4. data/.gitignore +2 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +11 -0
  7. data/README.md +19 -9
  8. data/docker-compose.yml +1 -1
  9. data/ext/Rakefile +8 -0
  10. data/lib/rdkafka/abstract_handle.rb +44 -20
  11. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  12. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  13. data/lib/rdkafka/admin/create_topic_report.rb +1 -1
  14. data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
  15. data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
  16. data/lib/rdkafka/admin/describe_acl_report.rb +1 -0
  17. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  18. data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
  19. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  20. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
  21. data/lib/rdkafka/admin.rb +174 -0
  22. data/lib/rdkafka/bindings.rb +75 -3
  23. data/lib/rdkafka/callbacks.rb +103 -19
  24. data/lib/rdkafka/config.rb +46 -9
  25. data/lib/rdkafka/consumer.rb +7 -0
  26. data/lib/rdkafka/helpers/oauth.rb +58 -0
  27. data/lib/rdkafka/native_kafka.rb +32 -19
  28. data/lib/rdkafka/producer.rb +7 -0
  29. data/lib/rdkafka/version.rb +1 -1
  30. data/lib/rdkafka.rb +7 -0
  31. data/spec/rdkafka/abstract_handle_spec.rb +34 -21
  32. data/spec/rdkafka/admin_spec.rb +328 -3
  33. data/spec/rdkafka/bindings_spec.rb +97 -0
  34. data/spec/rdkafka/config_spec.rb +33 -0
  35. data/spec/rdkafka/consumer_spec.rb +50 -1
  36. data/spec/rdkafka/native_kafka_spec.rb +8 -1
  37. data/spec/rdkafka/producer_spec.rb +43 -0
  38. data/spec/spec_helper.rb +16 -1
  39. data.tar.gz.sig +0 -0
  40. metadata +10 -3
  41. metadata.gz.sig +0 -0
data/lib/rdkafka/admin.rb CHANGED
@@ -2,6 +2,8 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ include Helpers::OAuth
6
+
5
7
  # @private
6
8
  def initialize(native_kafka)
7
9
  @native_kafka = native_kafka
@@ -10,6 +12,19 @@ module Rdkafka
10
12
  ObjectSpace.define_finalizer(self, native_kafka.finalizer)
11
13
  end
12
14
 
15
+ # Starts the native Kafka polling thread and kicks off the init polling
16
+ # @note Not needed to run unless explicit start was disabled
17
+ def start
18
+ @native_kafka.start
19
+ end
20
+
21
+ # @return [String] admin name
22
+ def name
23
+ @name ||= @native_kafka.with_inner do |inner|
24
+ ::Rdkafka::Bindings.rd_kafka_name(inner)
25
+ end
26
+ end
27
+
13
28
  def finalizer
14
29
  ->(_) { close }
15
30
  end
@@ -605,6 +620,165 @@ module Rdkafka
605
620
  describe_acl_handle
606
621
  end
607
622
 
623
+ # Describe configs
624
+ #
625
+ # @param resources [Array<Hash>] Array where elements are hashes with two keys:
626
+ # - `:resource_type` - numerical resource type based on Kafka API
627
+ # - `:resource_name` - string with resource name
628
+ # @return [DescribeConfigsHandle] Describe config handle that can be used to wait for the
629
+ # result of fetching resources with their appropriate configs
630
+ #
631
+ # @raise [RdkafkaError]
632
+ #
633
+ # @note Several resources can be requested at one go, but only one broker at a time
634
+ def describe_configs(resources)
635
+ closed_admin_check(__method__)
636
+
637
+ handle = DescribeConfigsHandle.new
638
+ handle[:pending] = true
639
+ handle[:response] = -1
640
+
641
+ queue_ptr = @native_kafka.with_inner do |inner|
642
+ Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
643
+ end
644
+
645
+ if queue_ptr.null?
646
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
647
+ end
648
+
649
+ admin_options_ptr = @native_kafka.with_inner do |inner|
650
+ Rdkafka::Bindings.rd_kafka_AdminOptions_new(
651
+ inner,
652
+ Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DESCRIBECONFIGS
653
+ )
654
+ end
655
+
656
+ DescribeConfigsHandle.register(handle)
657
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, handle.to_ptr)
658
+
659
+ pointer_array = resources.map do |resource_details|
660
+ Rdkafka::Bindings.rd_kafka_ConfigResource_new(
661
+ resource_details.fetch(:resource_type),
662
+ FFI::MemoryPointer.from_string(
663
+ resource_details.fetch(:resource_name)
664
+ )
665
+ )
666
+ end
667
+
668
+ configs_array_ptr = FFI::MemoryPointer.new(:pointer, pointer_array.size)
669
+ configs_array_ptr.write_array_of_pointer(pointer_array)
670
+
671
+ begin
672
+ @native_kafka.with_inner do |inner|
673
+ Rdkafka::Bindings.rd_kafka_DescribeConfigs(
674
+ inner,
675
+ configs_array_ptr,
676
+ pointer_array.size,
677
+ admin_options_ptr,
678
+ queue_ptr
679
+ )
680
+ end
681
+ rescue Exception
682
+ DescribeConfigsHandle.remove(handle.to_ptr.address)
683
+
684
+ raise
685
+ ensure
686
+ Rdkafka::Bindings.rd_kafka_ConfigResource_destroy_array(
687
+ configs_array_ptr,
688
+ pointer_array.size
689
+ ) if configs_array_ptr
690
+ end
691
+
692
+ handle
693
+ end
694
+
695
+ # Alters in an incremental way all the configs provided for given resources
696
+ #
697
+ # @param resources_with_configs [Array<Hash>] resources with the configs key that contains
698
+ # name, value and the proper op_type to perform on this value.
699
+ #
700
+ # @return [IncrementalAlterConfigsHandle] Incremental alter configs handle that can be used to
701
+ # wait for the result of altering resources with their appropriate configs
702
+ #
703
+ # @raise [RdkafkaError]
704
+ #
705
+ # @note Several resources can be requested at one go, but only one broker at a time
706
+ # @note The results won't contain altered values but only the altered resources
707
+ def incremental_alter_configs(resources_with_configs)
708
+ closed_admin_check(__method__)
709
+
710
+ handle = IncrementalAlterConfigsHandle.new
711
+ handle[:pending] = true
712
+ handle[:response] = -1
713
+
714
+ queue_ptr = @native_kafka.with_inner do |inner|
715
+ Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
716
+ end
717
+
718
+ if queue_ptr.null?
719
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
720
+ end
721
+
722
+ admin_options_ptr = @native_kafka.with_inner do |inner|
723
+ Rdkafka::Bindings.rd_kafka_AdminOptions_new(
724
+ inner,
725
+ Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_INCREMENTALALTERCONFIGS
726
+ )
727
+ end
728
+
729
+ IncrementalAlterConfigsHandle.register(handle)
730
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, handle.to_ptr)
731
+
732
+ # Tu poprawnie tworzyc
733
+ pointer_array = resources_with_configs.map do |resource_details|
734
+ # First build the appropriate resource representation
735
+ resource_ptr = Rdkafka::Bindings.rd_kafka_ConfigResource_new(
736
+ resource_details.fetch(:resource_type),
737
+ FFI::MemoryPointer.from_string(
738
+ resource_details.fetch(:resource_name)
739
+ )
740
+ )
741
+
742
+ resource_details.fetch(:configs).each do |config|
743
+ Bindings.rd_kafka_ConfigResource_add_incremental_config(
744
+ resource_ptr,
745
+ config.fetch(:name),
746
+ config.fetch(:op_type),
747
+ config.fetch(:value)
748
+ )
749
+ end
750
+
751
+ resource_ptr
752
+ end
753
+
754
+ configs_array_ptr = FFI::MemoryPointer.new(:pointer, pointer_array.size)
755
+ configs_array_ptr.write_array_of_pointer(pointer_array)
756
+
757
+
758
+ begin
759
+ @native_kafka.with_inner do |inner|
760
+ Rdkafka::Bindings.rd_kafka_IncrementalAlterConfigs(
761
+ inner,
762
+ configs_array_ptr,
763
+ pointer_array.size,
764
+ admin_options_ptr,
765
+ queue_ptr
766
+ )
767
+ end
768
+ rescue Exception
769
+ IncrementalAlterConfigsHandle.remove(handle.to_ptr.address)
770
+
771
+ raise
772
+ ensure
773
+ Rdkafka::Bindings.rd_kafka_ConfigResource_destroy_array(
774
+ configs_array_ptr,
775
+ pointer_array.size
776
+ ) if configs_array_ptr
777
+ end
778
+
779
+ handle
780
+ end
781
+
608
782
  private
609
783
 
610
784
  def closed_admin_check(method)
@@ -26,6 +26,7 @@ module Rdkafka
26
26
 
27
27
  RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS = -175
28
28
  RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS = -174
29
+ RD_KAFKA_RESP_ERR__STATE = -172
29
30
  RD_KAFKA_RESP_ERR__NOENT = -156
30
31
  RD_KAFKA_RESP_ERR_NO_ERROR = 0
31
32
 
@@ -97,6 +98,48 @@ module Rdkafka
97
98
  attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
98
99
  attach_function :rd_kafka_topic_partition_list_copy, [:pointer], :pointer
99
100
 
101
+ # Configs management
102
+ #
103
+ # Structs for management of configurations
104
+ # Each configuration is attached to a resource and one resource can have many configuration
105
+ # details. Each resource will also have separate errors results if obtaining configuration
106
+ # was not possible for any reason
107
+ class ConfigResource < FFI::Struct
108
+ layout :type, :int,
109
+ :name, :string
110
+ end
111
+
112
+ attach_function :rd_kafka_DescribeConfigs, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
113
+ attach_function :rd_kafka_ConfigResource_new, [:int32, :pointer], :pointer
114
+ attach_function :rd_kafka_ConfigResource_destroy_array, [:pointer, :int32], :void
115
+ attach_function :rd_kafka_event_DescribeConfigs_result, [:pointer], :pointer
116
+ attach_function :rd_kafka_DescribeConfigs_result_resources, [:pointer, :pointer], :pointer
117
+ attach_function :rd_kafka_ConfigResource_configs, [:pointer, :pointer], :pointer
118
+ attach_function :rd_kafka_ConfigEntry_name, [:pointer], :string
119
+ attach_function :rd_kafka_ConfigEntry_value, [:pointer], :string
120
+ attach_function :rd_kafka_ConfigEntry_is_read_only, [:pointer], :int
121
+ attach_function :rd_kafka_ConfigEntry_is_default, [:pointer], :int
122
+ attach_function :rd_kafka_ConfigEntry_is_sensitive, [:pointer], :int
123
+ attach_function :rd_kafka_ConfigEntry_is_synonym, [:pointer], :int
124
+ attach_function :rd_kafka_ConfigEntry_synonyms, [:pointer, :pointer], :pointer
125
+ attach_function :rd_kafka_ConfigResource_error, [:pointer], :int
126
+ attach_function :rd_kafka_ConfigResource_error_string, [:pointer], :string
127
+ attach_function :rd_kafka_IncrementalAlterConfigs, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
128
+ attach_function :rd_kafka_IncrementalAlterConfigs_result_resources, [:pointer, :pointer], :pointer
129
+ attach_function :rd_kafka_ConfigResource_add_incremental_config, [:pointer, :string, :int32, :string], :pointer
130
+ attach_function :rd_kafka_event_IncrementalAlterConfigs_result, [:pointer], :pointer
131
+
132
+ RD_KAFKA_ADMIN_OP_DESCRIBECONFIGS = 5
133
+ RD_KAFKA_EVENT_DESCRIBECONFIGS_RESULT = 104
134
+
135
+ RD_KAFKA_ADMIN_OP_INCREMENTALALTERCONFIGS = 16
136
+ RD_KAFKA_EVENT_INCREMENTALALTERCONFIGS_RESULT = 131072
137
+
138
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_SET = 0
139
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_DELETE = 1
140
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_APPEND = 2
141
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_SUBTRACT = 3
142
+
100
143
  # Errors
101
144
 
102
145
  attach_function :rd_kafka_err2name, [:int], :string
@@ -125,7 +168,10 @@ module Rdkafka
125
168
  callback :error_cb, [:pointer, :int, :string, :pointer], :void
126
169
  attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
127
170
  attach_function :rd_kafka_rebalance_protocol, [:pointer], :string
128
-
171
+ callback :oauthbearer_token_refresh_cb, [:pointer, :string, :pointer], :void
172
+ attach_function :rd_kafka_conf_set_oauthbearer_token_refresh_cb, [:pointer, :oauthbearer_token_refresh_cb], :void
173
+ attach_function :rd_kafka_oauthbearer_set_token, [:pointer, :string, :int64, :pointer, :pointer, :int, :pointer, :int], :int
174
+ attach_function :rd_kafka_oauthbearer_set_token_failure, [:pointer, :string], :int
129
175
  # Log queue
130
176
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
131
177
  attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
@@ -134,13 +180,13 @@ module Rdkafka
134
180
  :void, [:pointer, :int, :string, :string]
135
181
  ) do |_client_ptr, level, _level_string, line|
136
182
  severity = case level
137
- when 0 || 1 || 2
183
+ when 0, 1, 2
138
184
  Logger::FATAL
139
185
  when 3
140
186
  Logger::ERROR
141
187
  when 4
142
188
  Logger::WARN
143
- when 5 || 6
189
+ when 5, 6
144
190
  Logger::INFO
145
191
  when 7
146
192
  Logger::DEBUG
@@ -175,6 +221,32 @@ module Rdkafka
175
221
  end
176
222
  end
177
223
 
224
+ # The OAuth callback is currently global and contextless.
225
+ # This means that the callback will be called for all instances, and the callback must be able to determine to which instance it is associated.
226
+ # The instance name will be provided in the callback, allowing the callback to reference the correct instance.
227
+ #
228
+ # An example of how to use the instance name in the callback is given below.
229
+ # The `refresh_token` is configured as the `oauthbearer_token_refresh_callback`.
230
+ # `instances` is a map of client names to client instances, maintained by the user.
231
+ #
232
+ # ```
233
+ # def refresh_token(config, client_name)
234
+ # client = instances[client_name]
235
+ # client.oauthbearer_set_token(
236
+ # token: 'new-token-value',
237
+ # lifetime_ms: token-lifetime-ms,
238
+ # principal_name: 'principal-name'
239
+ # )
240
+ # end
241
+ # ```
242
+ OAuthbearerTokenRefreshCallback = FFI::Function.new(
243
+ :void, [:pointer, :string, :pointer]
244
+ ) do |client_ptr, config, _opaque|
245
+ if Rdkafka::Config.oauthbearer_token_refresh_callback
246
+ Rdkafka::Config.oauthbearer_token_refresh_callback.call(config, Rdkafka::Bindings.rd_kafka_name(client_ptr))
247
+ end
248
+ end
249
+
178
250
  # Handle
179
251
 
180
252
  enum :kafka_type, [
@@ -113,6 +113,42 @@ module Rdkafka
113
113
  end
114
114
  end
115
115
 
116
+ class DescribeConfigsResult
117
+ attr_reader :result_error, :error_string, :results, :results_count
118
+
119
+ def initialize(event_ptr)
120
+ @results=[]
121
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
122
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
123
+
124
+ if @result_error == 0
125
+ configs_describe_result = Rdkafka::Bindings.rd_kafka_event_DescribeConfigs_result(event_ptr)
126
+ # Get the number of matching acls
127
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
128
+ @results = Rdkafka::Bindings.rd_kafka_DescribeConfigs_result_resources(configs_describe_result, pointer_to_size_t)
129
+ @results_count = pointer_to_size_t.read_int
130
+ end
131
+ end
132
+ end
133
+
134
+ class IncrementalAlterConfigsResult
135
+ attr_reader :result_error, :error_string, :results, :results_count
136
+
137
+ def initialize(event_ptr)
138
+ @results=[]
139
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
140
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
141
+
142
+ if @result_error == 0
143
+ incremental_alter_result = Rdkafka::Bindings.rd_kafka_event_IncrementalAlterConfigs_result(event_ptr)
144
+ # Get the number of matching acls
145
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
146
+ @results = Rdkafka::Bindings.rd_kafka_IncrementalAlterConfigs_result_resources(incremental_alter_result, pointer_to_size_t)
147
+ @results_count = pointer_to_size_t.read_int
148
+ end
149
+ end
150
+ end
151
+
116
152
  # FFI Function used for Create Topic and Delete Topic callbacks
117
153
  BackgroundEventCallbackFunction = FFI::Function.new(
118
154
  :void, [:pointer, :pointer, :pointer]
@@ -123,20 +159,24 @@ module Rdkafka
123
159
  # @private
124
160
  class BackgroundEventCallback
125
161
  def self.call(_, event_ptr, _)
126
- event_type = Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
127
- if event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
162
+ case Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
163
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
128
164
  process_create_topic(event_ptr)
129
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
165
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBECONFIGS_RESULT
166
+ process_describe_configs(event_ptr)
167
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_INCREMENTALALTERCONFIGS_RESULT
168
+ process_incremental_alter_configs(event_ptr)
169
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
130
170
  process_delete_topic(event_ptr)
131
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
171
+ when Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
132
172
  process_create_partitions(event_ptr)
133
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
173
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
134
174
  process_create_acl(event_ptr)
135
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
175
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
136
176
  process_delete_acl(event_ptr)
137
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
177
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
138
178
  process_describe_acl(event_ptr)
139
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
179
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
140
180
  process_delete_groups(event_ptr)
141
181
  end
142
182
  end
@@ -156,7 +196,44 @@ module Rdkafka
156
196
  create_topic_handle[:response] = create_topic_results[0].result_error
157
197
  create_topic_handle[:error_string] = create_topic_results[0].error_string
158
198
  create_topic_handle[:result_name] = create_topic_results[0].result_name
159
- create_topic_handle[:pending] = false
199
+
200
+ create_topic_handle.unlock
201
+ end
202
+ end
203
+
204
+ def self.process_describe_configs(event_ptr)
205
+ describe_configs = DescribeConfigsResult.new(event_ptr)
206
+ describe_configs_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
207
+
208
+ if describe_configs_handle = Rdkafka::Admin::DescribeConfigsHandle.remove(describe_configs_handle_ptr.address)
209
+ describe_configs_handle[:response] = describe_configs.result_error
210
+ describe_configs_handle[:response_string] = describe_configs.error_string
211
+ describe_configs_handle[:pending] = false
212
+
213
+ if describe_configs.result_error == 0
214
+ describe_configs_handle[:config_entries] = describe_configs.results
215
+ describe_configs_handle[:entry_count] = describe_configs.results_count
216
+ end
217
+
218
+ describe_configs_handle.unlock
219
+ end
220
+ end
221
+
222
+ def self.process_incremental_alter_configs(event_ptr)
223
+ incremental_alter = IncrementalAlterConfigsResult.new(event_ptr)
224
+ incremental_alter_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
225
+
226
+ if incremental_alter_handle = Rdkafka::Admin::IncrementalAlterConfigsHandle.remove(incremental_alter_handle_ptr.address)
227
+ incremental_alter_handle[:response] = incremental_alter.result_error
228
+ incremental_alter_handle[:response_string] = incremental_alter.error_string
229
+ incremental_alter_handle[:pending] = false
230
+
231
+ if incremental_alter.result_error == 0
232
+ incremental_alter_handle[:config_entries] = incremental_alter.results
233
+ incremental_alter_handle[:entry_count] = incremental_alter.results_count
234
+ end
235
+
236
+ incremental_alter_handle.unlock
160
237
  end
161
238
  end
162
239
 
@@ -173,7 +250,8 @@ module Rdkafka
173
250
  delete_group_handle[:response] = delete_group_results[0].result_error
174
251
  delete_group_handle[:error_string] = delete_group_results[0].error_string
175
252
  delete_group_handle[:result_name] = delete_group_results[0].result_name
176
- delete_group_handle[:pending] = false
253
+
254
+ delete_group_handle.unlock
177
255
  end
178
256
  end
179
257
 
@@ -190,7 +268,8 @@ module Rdkafka
190
268
  delete_topic_handle[:response] = delete_topic_results[0].result_error
191
269
  delete_topic_handle[:error_string] = delete_topic_results[0].error_string
192
270
  delete_topic_handle[:result_name] = delete_topic_results[0].result_name
193
- delete_topic_handle[:pending] = false
271
+
272
+ delete_topic_handle.unlock
194
273
  end
195
274
  end
196
275
 
@@ -207,7 +286,8 @@ module Rdkafka
207
286
  create_partitions_handle[:response] = create_partitions_results[0].result_error
208
287
  create_partitions_handle[:error_string] = create_partitions_results[0].error_string
209
288
  create_partitions_handle[:result_name] = create_partitions_results[0].result_name
210
- create_partitions_handle[:pending] = false
289
+
290
+ create_partitions_handle.unlock
211
291
  end
212
292
  end
213
293
 
@@ -223,7 +303,8 @@ module Rdkafka
223
303
  if create_acl_handle = Rdkafka::Admin::CreateAclHandle.remove(create_acl_handle_ptr.address)
224
304
  create_acl_handle[:response] = create_acl_results[0].result_error
225
305
  create_acl_handle[:response_string] = create_acl_results[0].error_string
226
- create_acl_handle[:pending] = false
306
+
307
+ create_acl_handle.unlock
227
308
  end
228
309
  end
229
310
 
@@ -239,11 +320,13 @@ module Rdkafka
239
320
  if delete_acl_handle = Rdkafka::Admin::DeleteAclHandle.remove(delete_acl_handle_ptr.address)
240
321
  delete_acl_handle[:response] = delete_acl_results[0].result_error
241
322
  delete_acl_handle[:response_string] = delete_acl_results[0].error_string
242
- delete_acl_handle[:pending] = false
323
+
243
324
  if delete_acl_results[0].result_error == 0
244
325
  delete_acl_handle[:matching_acls] = delete_acl_results[0].matching_acls
245
326
  delete_acl_handle[:matching_acls_count] = delete_acl_results[0].matching_acls_count
246
327
  end
328
+
329
+ delete_acl_handle.unlock
247
330
  end
248
331
  end
249
332
 
@@ -254,17 +337,18 @@ module Rdkafka
254
337
  if describe_acl_handle = Rdkafka::Admin::DescribeAclHandle.remove(describe_acl_handle_ptr.address)
255
338
  describe_acl_handle[:response] = describe_acl.result_error
256
339
  describe_acl_handle[:response_string] = describe_acl.error_string
257
- describe_acl_handle[:pending] = false
340
+
258
341
  if describe_acl.result_error == 0
259
- describe_acl_handle[:acls] = describe_acl.matching_acls
342
+ describe_acl_handle[:acls] = describe_acl.matching_acls
260
343
  describe_acl_handle[:acls_count] = describe_acl.matching_acls_count
261
344
  end
345
+
346
+ describe_acl_handle.unlock
262
347
  end
263
348
  end
264
349
  end
265
350
 
266
351
  # FFI Function used for Message Delivery callbacks
267
-
268
352
  DeliveryCallbackFunction = FFI::Function.new(
269
353
  :void, [:pointer, :pointer, :pointer]
270
354
  ) do |client_ptr, message_ptr, opaque_ptr|
@@ -284,7 +368,6 @@ module Rdkafka
284
368
  delivery_handle[:partition] = message[:partition]
285
369
  delivery_handle[:offset] = message[:offset]
286
370
  delivery_handle[:topic_name] = FFI::MemoryPointer.from_string(topic_name)
287
- delivery_handle[:pending] = false
288
371
 
289
372
  # Call delivery callback on opaque
290
373
  if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
@@ -299,9 +382,10 @@ module Rdkafka
299
382
  delivery_handle
300
383
  )
301
384
  end
385
+
386
+ delivery_handle.unlock
302
387
  end
303
388
  end
304
389
  end
305
-
306
390
  end
307
391
  end
@@ -15,12 +15,13 @@ module Rdkafka
15
15
  @@opaques = ObjectSpace::WeakMap.new
16
16
  # @private
17
17
  @@log_queue = Queue.new
18
- # @private
19
18
  # We memoize thread on the first log flush
20
19
  # This allows us also to restart logger thread on forks
21
20
  @@log_thread = nil
22
21
  # @private
23
22
  @@log_mutex = Mutex.new
23
+ # @private
24
+ @@oauthbearer_token_refresh_callback = nil
24
25
 
25
26
  # Returns the current logger, by default this is a logger to stdout.
26
27
  #
@@ -104,6 +105,24 @@ module Rdkafka
104
105
  @@error_callback
105
106
  end
106
107
 
108
+ # Sets the SASL/OAUTHBEARER token refresh callback.
109
+ # This callback will be triggered when it is time to refresh the client's OAUTHBEARER token
110
+ #
111
+ # @param callback [Proc, #call] The callback
112
+ #
113
+ # @return [nil]
114
+ def self.oauthbearer_token_refresh_callback=(callback)
115
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
116
+ @@oauthbearer_token_refresh_callback = callback
117
+ end
118
+
119
+ # Returns the current oauthbearer_token_refresh_callback callback, by default this is nil.
120
+ #
121
+ # @return [Proc, nil]
122
+ def self.oauthbearer_token_refresh_callback
123
+ @@oauthbearer_token_refresh_callback
124
+ end
125
+
107
126
  # @private
108
127
  def self.opaques
109
128
  @@opaques
@@ -176,11 +195,13 @@ module Rdkafka
176
195
 
177
196
  # Creates a consumer with this configuration.
178
197
  #
198
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
199
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
179
200
  # @return [Consumer] The created consumer
180
201
  #
181
202
  # @raise [ConfigError] When the configuration contains invalid options
182
203
  # @raise [ClientCreationError] When the native client cannot be created
183
- def consumer
204
+ def consumer(native_kafka_auto_start: true)
184
205
  opaque = Opaque.new
185
206
  config = native_config(opaque)
186
207
 
@@ -200,18 +221,21 @@ module Rdkafka
200
221
  Rdkafka::NativeKafka.new(
201
222
  kafka,
202
223
  run_polling_thread: false,
203
- opaque: opaque
224
+ opaque: opaque,
225
+ auto_start: native_kafka_auto_start
204
226
  )
205
227
  )
206
228
  end
207
229
 
208
230
  # Create a producer with this configuration.
209
231
  #
232
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
233
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
210
234
  # @return [Producer] The created producer
211
235
  #
212
236
  # @raise [ConfigError] When the configuration contains invalid options
213
237
  # @raise [ClientCreationError] When the native client cannot be created
214
- def producer
238
+ def producer(native_kafka_auto_start: true)
215
239
  # Create opaque
216
240
  opaque = Opaque.new
217
241
  # Create Kafka config
@@ -220,11 +244,15 @@ module Rdkafka
220
244
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
221
245
  # Return producer with Kafka client
222
246
  partitioner_name = self[:partitioner] || self["partitioner"]
247
+
248
+ kafka = native_kafka(config, :rd_kafka_producer)
249
+
223
250
  Rdkafka::Producer.new(
224
251
  Rdkafka::NativeKafka.new(
225
- native_kafka(config, :rd_kafka_producer),
252
+ kafka,
226
253
  run_polling_thread: true,
227
- opaque: opaque
254
+ opaque: opaque,
255
+ auto_start: native_kafka_auto_start
228
256
  ),
229
257
  partitioner_name
230
258
  ).tap do |producer|
@@ -234,19 +262,25 @@ module Rdkafka
234
262
 
235
263
  # Creates an admin instance with this configuration.
236
264
  #
265
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
266
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
237
267
  # @return [Admin] The created admin instance
238
268
  #
239
269
  # @raise [ConfigError] When the configuration contains invalid options
240
270
  # @raise [ClientCreationError] When the native client cannot be created
241
- def admin
271
+ def admin(native_kafka_auto_start: true)
242
272
  opaque = Opaque.new
243
273
  config = native_config(opaque)
244
274
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
275
+
276
+ kafka = native_kafka(config, :rd_kafka_producer)
277
+
245
278
  Rdkafka::Admin.new(
246
279
  Rdkafka::NativeKafka.new(
247
- native_kafka(config, :rd_kafka_producer),
280
+ kafka,
248
281
  run_polling_thread: true,
249
- opaque: opaque
282
+ opaque: opaque,
283
+ auto_start: native_kafka_auto_start
250
284
  )
251
285
  )
252
286
  end
@@ -300,6 +334,9 @@ module Rdkafka
300
334
 
301
335
  # Set error callback
302
336
  Rdkafka::Bindings.rd_kafka_conf_set_error_cb(config, Rdkafka::Bindings::ErrorCallback)
337
+
338
+ # Set oauth callback
339
+ Rdkafka::Bindings.rd_kafka_conf_set_oauthbearer_token_refresh_cb(config, Rdkafka::Bindings::OAuthbearerTokenRefreshCallback)
303
340
  end
304
341
  end
305
342
 
@@ -13,12 +13,19 @@ module Rdkafka
13
13
  class Consumer
14
14
  include Enumerable
15
15
  include Helpers::Time
16
+ include Helpers::OAuth
16
17
 
17
18
  # @private
18
19
  def initialize(native_kafka)
19
20
  @native_kafka = native_kafka
20
21
  end
21
22
 
23
+ # Starts the native Kafka polling thread and kicks off the init polling
24
+ # @note Not needed to run unless explicit start was disabled
25
+ def start
26
+ @native_kafka.start
27
+ end
28
+
22
29
  # @return [String] consumer name
23
30
  def name
24
31
  @name ||= @native_kafka.with_inner do |inner|