rdkafka 0.15.1 → 0.16.0.beta1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +2 -4
- data/.gitignore +2 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +11 -1
- data/README.md +19 -9
- data/docker-compose.yml +1 -1
- data/ext/Rakefile +8 -0
- data/lib/rdkafka/abstract_handle.rb +44 -20
- data/lib/rdkafka/admin/create_topic_report.rb +1 -1
- data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
- data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
- data/lib/rdkafka/admin.rb +15 -0
- data/lib/rdkafka/bindings.rb +35 -3
- data/lib/rdkafka/callbacks.rb +18 -10
- data/lib/rdkafka/config.rb +69 -15
- data/lib/rdkafka/consumer.rb +7 -0
- data/lib/rdkafka/helpers/oauth.rb +58 -0
- data/lib/rdkafka/native_kafka.rb +32 -19
- data/lib/rdkafka/producer.rb +7 -0
- data/lib/rdkafka/version.rb +1 -1
- data/lib/rdkafka.rb +1 -0
- data/spec/rdkafka/abstract_handle_spec.rb +34 -21
- data/spec/rdkafka/admin_spec.rb +53 -0
- data/spec/rdkafka/bindings_spec.rb +97 -0
- data/spec/rdkafka/config_spec.rb +53 -0
- data/spec/rdkafka/consumer_spec.rb +54 -0
- data/spec/rdkafka/native_kafka_spec.rb +8 -1
- data/spec/rdkafka/producer_spec.rb +43 -0
- data/spec/spec_helper.rb +16 -1
- data.tar.gz.sig +0 -0
- metadata +4 -3
- metadata.gz.sig +0 -0
    
        data/lib/rdkafka/config.rb
    CHANGED
    
    | @@ -15,13 +15,13 @@ module Rdkafka | |
| 15 15 | 
             
                @@opaques = ObjectSpace::WeakMap.new
         | 
| 16 16 | 
             
                # @private
         | 
| 17 17 | 
             
                @@log_queue = Queue.new
         | 
| 18 | 
            -
             | 
| 19 | 
            -
                 | 
| 20 | 
            -
             | 
| 21 | 
            -
             | 
| 22 | 
            -
             | 
| 23 | 
            -
             | 
| 24 | 
            -
                 | 
| 18 | 
            +
                # We memoize thread on the first log flush
         | 
| 19 | 
            +
                # This allows us also to restart logger thread on forks
         | 
| 20 | 
            +
                @@log_thread = nil
         | 
| 21 | 
            +
                # @private
         | 
| 22 | 
            +
                @@log_mutex = Mutex.new
         | 
| 23 | 
            +
                # @private
         | 
| 24 | 
            +
                @@oauthbearer_token_refresh_callback = nil
         | 
| 25 25 |  | 
| 26 26 | 
             
                # Returns the current logger, by default this is a logger to stdout.
         | 
| 27 27 | 
             
                #
         | 
| @@ -30,6 +30,24 @@ module Rdkafka | |
| 30 30 | 
             
                  @@logger
         | 
| 31 31 | 
             
                end
         | 
| 32 32 |  | 
| 33 | 
            +
                # Makes sure that there is a thread for consuming logs
         | 
| 34 | 
            +
                # We do not spawn thread immediately and we need to check if it operates to support forking
         | 
| 35 | 
            +
                def self.ensure_log_thread
         | 
| 36 | 
            +
                  return if @@log_thread && @@log_thread.alive?
         | 
| 37 | 
            +
             | 
| 38 | 
            +
                  @@log_mutex.synchronize do
         | 
| 39 | 
            +
                    # Restart if dead (fork, crash)
         | 
| 40 | 
            +
                    @@log_thread = nil if @@log_thread && !@@log_thread.alive?
         | 
| 41 | 
            +
             | 
| 42 | 
            +
                    @@log_thread ||= Thread.start do
         | 
| 43 | 
            +
                      loop do
         | 
| 44 | 
            +
                        severity, msg = @@log_queue.pop
         | 
| 45 | 
            +
                        @@logger.add(severity, msg)
         | 
| 46 | 
            +
                      end
         | 
| 47 | 
            +
                    end
         | 
| 48 | 
            +
                  end
         | 
| 49 | 
            +
                end
         | 
| 50 | 
            +
             | 
| 33 51 | 
             
                # Returns a queue whose contents will be passed to the configured logger. Each entry
         | 
| 34 52 | 
             
                # should follow the format [Logger::Severity, String]. The benefit over calling the
         | 
| 35 53 | 
             
                # logger directly is that this is safe to use from trap contexts.
         | 
| @@ -87,6 +105,24 @@ module Rdkafka | |
| 87 105 | 
             
                  @@error_callback
         | 
| 88 106 | 
             
                end
         | 
| 89 107 |  | 
| 108 | 
            +
                # Sets the SASL/OAUTHBEARER token refresh callback.
         | 
| 109 | 
            +
                # This callback will be triggered when it is time to refresh the client's OAUTHBEARER token
         | 
| 110 | 
            +
                #
         | 
| 111 | 
            +
                # @param callback [Proc, #call] The callback
         | 
| 112 | 
            +
                #
         | 
| 113 | 
            +
                # @return [nil]
         | 
| 114 | 
            +
                def self.oauthbearer_token_refresh_callback=(callback)
         | 
| 115 | 
            +
                  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
         | 
| 116 | 
            +
                  @@oauthbearer_token_refresh_callback = callback
         | 
| 117 | 
            +
                end
         | 
| 118 | 
            +
             | 
| 119 | 
            +
                # Returns the current oauthbearer_token_refresh_callback callback, by default this is nil.
         | 
| 120 | 
            +
                #
         | 
| 121 | 
            +
                # @return [Proc, nil]
         | 
| 122 | 
            +
                def self.oauthbearer_token_refresh_callback
         | 
| 123 | 
            +
                  @@oauthbearer_token_refresh_callback
         | 
| 124 | 
            +
                end
         | 
| 125 | 
            +
             | 
| 90 126 | 
             
                # @private
         | 
| 91 127 | 
             
                def self.opaques
         | 
| 92 128 | 
             
                  @@opaques
         | 
| @@ -159,11 +195,13 @@ module Rdkafka | |
| 159 195 |  | 
| 160 196 | 
             
                # Creates a consumer with this configuration.
         | 
| 161 197 | 
             
                #
         | 
| 198 | 
            +
                # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
         | 
| 199 | 
            +
                #   automatically. Defaults to true. Set to false only when doing complex initialization.
         | 
| 162 200 | 
             
                # @return [Consumer] The created consumer
         | 
| 163 201 | 
             
                #
         | 
| 164 202 | 
             
                # @raise [ConfigError] When the configuration contains invalid options
         | 
| 165 203 | 
             
                # @raise [ClientCreationError] When the native client cannot be created
         | 
| 166 | 
            -
                def consumer
         | 
| 204 | 
            +
                def consumer(native_kafka_auto_start: true)
         | 
| 167 205 | 
             
                  opaque = Opaque.new
         | 
| 168 206 | 
             
                  config = native_config(opaque)
         | 
| 169 207 |  | 
| @@ -183,18 +221,21 @@ module Rdkafka | |
| 183 221 | 
             
                    Rdkafka::NativeKafka.new(
         | 
| 184 222 | 
             
                      kafka,
         | 
| 185 223 | 
             
                      run_polling_thread: false,
         | 
| 186 | 
            -
                      opaque: opaque
         | 
| 224 | 
            +
                      opaque: opaque,
         | 
| 225 | 
            +
                      auto_start: native_kafka_auto_start
         | 
| 187 226 | 
             
                    )
         | 
| 188 227 | 
             
                  )
         | 
| 189 228 | 
             
                end
         | 
| 190 229 |  | 
| 191 230 | 
             
                # Create a producer with this configuration.
         | 
| 192 231 | 
             
                #
         | 
| 232 | 
            +
                # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
         | 
| 233 | 
            +
                #   automatically. Defaults to true. Set to false only when doing complex initialization.
         | 
| 193 234 | 
             
                # @return [Producer] The created producer
         | 
| 194 235 | 
             
                #
         | 
| 195 236 | 
             
                # @raise [ConfigError] When the configuration contains invalid options
         | 
| 196 237 | 
             
                # @raise [ClientCreationError] When the native client cannot be created
         | 
| 197 | 
            -
                def producer
         | 
| 238 | 
            +
                def producer(native_kafka_auto_start: true)
         | 
| 198 239 | 
             
                  # Create opaque
         | 
| 199 240 | 
             
                  opaque = Opaque.new
         | 
| 200 241 | 
             
                  # Create Kafka config
         | 
| @@ -203,11 +244,15 @@ module Rdkafka | |
| 203 244 | 
             
                  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
         | 
| 204 245 | 
             
                  # Return producer with Kafka client
         | 
| 205 246 | 
             
                  partitioner_name = self[:partitioner] || self["partitioner"]
         | 
| 247 | 
            +
             | 
| 248 | 
            +
                  kafka = native_kafka(config, :rd_kafka_producer)
         | 
| 249 | 
            +
             | 
| 206 250 | 
             
                  Rdkafka::Producer.new(
         | 
| 207 251 | 
             
                    Rdkafka::NativeKafka.new(
         | 
| 208 | 
            -
                       | 
| 252 | 
            +
                      kafka,
         | 
| 209 253 | 
             
                      run_polling_thread: true,
         | 
| 210 | 
            -
                      opaque: opaque
         | 
| 254 | 
            +
                      opaque: opaque,
         | 
| 255 | 
            +
                      auto_start: native_kafka_auto_start
         | 
| 211 256 | 
             
                    ),
         | 
| 212 257 | 
             
                    partitioner_name
         | 
| 213 258 | 
             
                  ).tap do |producer|
         | 
| @@ -217,19 +262,25 @@ module Rdkafka | |
| 217 262 |  | 
| 218 263 | 
             
                # Creates an admin instance with this configuration.
         | 
| 219 264 | 
             
                #
         | 
| 265 | 
            +
                # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
         | 
| 266 | 
            +
                #   automatically. Defaults to true. Set to false only when doing complex initialization.
         | 
| 220 267 | 
             
                # @return [Admin] The created admin instance
         | 
| 221 268 | 
             
                #
         | 
| 222 269 | 
             
                # @raise [ConfigError] When the configuration contains invalid options
         | 
| 223 270 | 
             
                # @raise [ClientCreationError] When the native client cannot be created
         | 
| 224 | 
            -
                def admin
         | 
| 271 | 
            +
                def admin(native_kafka_auto_start: true)
         | 
| 225 272 | 
             
                  opaque = Opaque.new
         | 
| 226 273 | 
             
                  config = native_config(opaque)
         | 
| 227 274 | 
             
                  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
         | 
| 275 | 
            +
             | 
| 276 | 
            +
                  kafka = native_kafka(config, :rd_kafka_producer)
         | 
| 277 | 
            +
             | 
| 228 278 | 
             
                  Rdkafka::Admin.new(
         | 
| 229 279 | 
             
                    Rdkafka::NativeKafka.new(
         | 
| 230 | 
            -
                       | 
| 280 | 
            +
                      kafka,
         | 
| 231 281 | 
             
                      run_polling_thread: true,
         | 
| 232 | 
            -
                      opaque: opaque
         | 
| 282 | 
            +
                      opaque: opaque,
         | 
| 283 | 
            +
                      auto_start: native_kafka_auto_start
         | 
| 233 284 | 
             
                    )
         | 
| 234 285 | 
             
                  )
         | 
| 235 286 | 
             
                end
         | 
| @@ -283,6 +334,9 @@ module Rdkafka | |
| 283 334 |  | 
| 284 335 | 
             
                    # Set error callback
         | 
| 285 336 | 
             
                    Rdkafka::Bindings.rd_kafka_conf_set_error_cb(config, Rdkafka::Bindings::ErrorCallback)
         | 
| 337 | 
            +
             | 
| 338 | 
            +
                    # Set oauth callback
         | 
| 339 | 
            +
                    Rdkafka::Bindings.rd_kafka_conf_set_oauthbearer_token_refresh_cb(config, Rdkafka::Bindings::OAuthbearerTokenRefreshCallback)
         | 
| 286 340 | 
             
                  end
         | 
| 287 341 | 
             
                end
         | 
| 288 342 |  | 
    
        data/lib/rdkafka/consumer.rb
    CHANGED
    
    | @@ -13,12 +13,19 @@ module Rdkafka | |
| 13 13 | 
             
              class Consumer
         | 
| 14 14 | 
             
                include Enumerable
         | 
| 15 15 | 
             
                include Helpers::Time
         | 
| 16 | 
            +
                include Helpers::OAuth
         | 
| 16 17 |  | 
| 17 18 | 
             
                # @private
         | 
| 18 19 | 
             
                def initialize(native_kafka)
         | 
| 19 20 | 
             
                  @native_kafka = native_kafka
         | 
| 20 21 | 
             
                end
         | 
| 21 22 |  | 
| 23 | 
            +
                # Starts the native Kafka polling thread and kicks off the init polling
         | 
| 24 | 
            +
                # @note Not needed to run unless explicit start was disabled
         | 
| 25 | 
            +
                def start
         | 
| 26 | 
            +
                  @native_kafka.start
         | 
| 27 | 
            +
                end
         | 
| 28 | 
            +
             | 
| 22 29 | 
             
                # @return [String] consumer name
         | 
| 23 30 | 
             
                def name
         | 
| 24 31 | 
             
                  @name ||= @native_kafka.with_inner do |inner|
         | 
| @@ -0,0 +1,58 @@ | |
| 1 | 
            +
            module Rdkafka
         | 
| 2 | 
            +
              module Helpers
         | 
| 3 | 
            +
             | 
| 4 | 
            +
                module OAuth
         | 
| 5 | 
            +
             | 
| 6 | 
            +
                  # Set the OAuthBearer token
         | 
| 7 | 
            +
                  #
         | 
| 8 | 
            +
                  # @param token [String] the mandatory token value to set, often (but not necessarily) a JWS compact serialization as per https://tools.ietf.org/html/rfc7515#section-3.1.
         | 
| 9 | 
            +
                  # @param lifetime_ms [Integer] when the token expires, in terms of the number of milliseconds since the epoch. See https://currentmillis.com/.
         | 
| 10 | 
            +
                  # @param principal_name [String] the mandatory Kafka principal name associated with the token.
         | 
| 11 | 
            +
                  # @param extensions [Hash] optional SASL extensions key-value pairs to be communicated to the broker as additional key-value pairs during the initial client response as per https://tools.ietf.org/html/rfc7628#section-3.1.
         | 
| 12 | 
            +
                  # @return [Integer] 0 on success
         | 
| 13 | 
            +
                  def oauthbearer_set_token(token:, lifetime_ms:, principal_name:, extensions: nil)
         | 
| 14 | 
            +
                    error_buffer = FFI::MemoryPointer.from_string(" " * 256)
         | 
| 15 | 
            +
             | 
| 16 | 
            +
                    response = @native_kafka.with_inner do |inner|
         | 
| 17 | 
            +
                      Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(
         | 
| 18 | 
            +
                        inner, token, lifetime_ms, principal_name,
         | 
| 19 | 
            +
                        flatten_extensions(extensions), extension_size(extensions), error_buffer, 256
         | 
| 20 | 
            +
                      )
         | 
| 21 | 
            +
                    end
         | 
| 22 | 
            +
             | 
| 23 | 
            +
                    return response if response.zero?
         | 
| 24 | 
            +
             | 
| 25 | 
            +
                    oauthbearer_set_token_failure("Failed to set token: #{error_buffer.read_string}")
         | 
| 26 | 
            +
             | 
| 27 | 
            +
                    response
         | 
| 28 | 
            +
                  end
         | 
| 29 | 
            +
             | 
| 30 | 
            +
                  # Marks failed oauth token acquire in librdkafka
         | 
| 31 | 
            +
                  #
         | 
| 32 | 
            +
                  # @param reason [String] human readable error reason for failing to acquire token
         | 
| 33 | 
            +
                  def oauthbearer_set_token_failure(reason)
         | 
| 34 | 
            +
                    @native_kafka.with_inner do |inner|
         | 
| 35 | 
            +
                      Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(
         | 
| 36 | 
            +
                        inner,
         | 
| 37 | 
            +
                        reason
         | 
| 38 | 
            +
                      )
         | 
| 39 | 
            +
                    end
         | 
| 40 | 
            +
                  end
         | 
| 41 | 
            +
             | 
| 42 | 
            +
                  private
         | 
| 43 | 
            +
             | 
| 44 | 
            +
                  # Flatten the extensions hash into a string according to the spec, https://datatracker.ietf.org/doc/html/rfc7628#section-3.1
         | 
| 45 | 
            +
                  def flatten_extensions(extensions)
         | 
| 46 | 
            +
                    return nil unless extensions
         | 
| 47 | 
            +
                    "\x01#{extensions.map { |e| e.join("=") }.join("\x01")}"
         | 
| 48 | 
            +
                  end
         | 
| 49 | 
            +
             | 
| 50 | 
            +
                  # extension_size is the number of keys + values which should be a non-negative even number
         | 
| 51 | 
            +
                  # https://github.com/confluentinc/librdkafka/blob/master/src/rdkafka_sasl_oauthbearer.c#L327-L347
         | 
| 52 | 
            +
                  def extension_size(extensions)
         | 
| 53 | 
            +
                    return 0 unless extensions
         | 
| 54 | 
            +
                    extensions.size * 2
         | 
| 55 | 
            +
                  end
         | 
| 56 | 
            +
                end
         | 
| 57 | 
            +
              end
         | 
| 58 | 
            +
            end
         | 
    
        data/lib/rdkafka/native_kafka.rb
    CHANGED
    
    | @@ -4,7 +4,7 @@ module Rdkafka | |
| 4 4 | 
             
              # @private
         | 
| 5 5 | 
             
              # A wrapper around a native kafka that polls and cleanly exits
         | 
| 6 6 | 
             
              class NativeKafka
         | 
| 7 | 
            -
                def initialize(inner, run_polling_thread:, opaque:)
         | 
| 7 | 
            +
                def initialize(inner, run_polling_thread:, opaque:, auto_start: true)
         | 
| 8 8 | 
             
                  @inner = inner
         | 
| 9 9 | 
             
                  @opaque = opaque
         | 
| 10 10 | 
             
                  # Lock around external access
         | 
| @@ -28,30 +28,43 @@ module Rdkafka | |
| 28 28 | 
             
                  # counter for operations in progress using inner
         | 
| 29 29 | 
             
                  @operations_in_progress = 0
         | 
| 30 30 |  | 
| 31 | 
            -
                   | 
| 32 | 
            -
                  Rdkafka::Bindings.rd_kafka_poll(inner, 0)
         | 
| 31 | 
            +
                  @run_polling_thread = run_polling_thread
         | 
| 33 32 |  | 
| 34 | 
            -
                  if  | 
| 35 | 
            -
                    # Start thread to poll client for delivery callbacks,
         | 
| 36 | 
            -
                    # not used in consumer.
         | 
| 37 | 
            -
                    @polling_thread = Thread.new do
         | 
| 38 | 
            -
                      loop do
         | 
| 39 | 
            -
                        @poll_mutex.synchronize do
         | 
| 40 | 
            -
                          Rdkafka::Bindings.rd_kafka_poll(inner, 100)
         | 
| 41 | 
            -
                        end
         | 
| 33 | 
            +
                  start if auto_start
         | 
| 42 34 |  | 
| 43 | 
            -
             | 
| 44 | 
            -
             | 
| 45 | 
            -
             | 
| 35 | 
            +
                  @closing = false
         | 
| 36 | 
            +
                end
         | 
| 37 | 
            +
             | 
| 38 | 
            +
                def start
         | 
| 39 | 
            +
                  synchronize do
         | 
| 40 | 
            +
                    return if @started
         | 
| 41 | 
            +
             | 
| 42 | 
            +
                    @started = true
         | 
| 43 | 
            +
             | 
| 44 | 
            +
                    # Trigger initial poll to make sure oauthbearer cb and other initial cb are handled
         | 
| 45 | 
            +
                    Rdkafka::Bindings.rd_kafka_poll(@inner, 0)
         | 
| 46 | 
            +
             | 
| 47 | 
            +
                    if @run_polling_thread
         | 
| 48 | 
            +
                      # Start thread to poll client for delivery callbacks,
         | 
| 49 | 
            +
                      # not used in consumer.
         | 
| 50 | 
            +
                      @polling_thread = Thread.new do
         | 
| 51 | 
            +
                        loop do
         | 
| 52 | 
            +
                          @poll_mutex.synchronize do
         | 
| 53 | 
            +
                            Rdkafka::Bindings.rd_kafka_poll(@inner, 100)
         | 
| 54 | 
            +
                          end
         | 
| 55 | 
            +
             | 
| 56 | 
            +
                          # Exit thread if closing and the poll queue is empty
         | 
| 57 | 
            +
                          if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(@inner) == 0
         | 
| 58 | 
            +
                            break
         | 
| 59 | 
            +
                          end
         | 
| 46 60 | 
             
                        end
         | 
| 47 61 | 
             
                      end
         | 
| 48 | 
            -
                    end
         | 
| 49 62 |  | 
| 50 | 
            -
             | 
| 51 | 
            -
             | 
| 63 | 
            +
                      @polling_thread.name = "rdkafka.native_kafka##{Rdkafka::Bindings.rd_kafka_name(@inner).gsub('rdkafka', '')}"
         | 
| 64 | 
            +
                      @polling_thread.abort_on_exception = true
         | 
| 65 | 
            +
                      @polling_thread[:closing] = false
         | 
| 66 | 
            +
                    end
         | 
| 52 67 | 
             
                  end
         | 
| 53 | 
            -
             | 
| 54 | 
            -
                  @closing = false
         | 
| 55 68 | 
             
                end
         | 
| 56 69 |  | 
| 57 70 | 
             
                def with_inner
         | 
    
        data/lib/rdkafka/producer.rb
    CHANGED
    
    | @@ -4,6 +4,7 @@ module Rdkafka | |
| 4 4 | 
             
              # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
         | 
| 5 5 | 
             
              class Producer
         | 
| 6 6 | 
             
                include Helpers::Time
         | 
| 7 | 
            +
                include Helpers::OAuth
         | 
| 7 8 |  | 
| 8 9 | 
             
                # Cache partitions count for 30 seconds
         | 
| 9 10 | 
             
                PARTITIONS_COUNT_TTL = 30
         | 
| @@ -53,6 +54,12 @@ module Rdkafka | |
| 53 54 | 
             
                  end
         | 
| 54 55 | 
             
                end
         | 
| 55 56 |  | 
| 57 | 
            +
                # Starts the native Kafka polling thread and kicks off the init polling
         | 
| 58 | 
            +
                # @note Not needed to run unless explicit start was disabled
         | 
| 59 | 
            +
                def start
         | 
| 60 | 
            +
                  @native_kafka.start
         | 
| 61 | 
            +
                end
         | 
| 62 | 
            +
             | 
| 56 63 | 
             
                # @return [String] producer name
         | 
| 57 64 | 
             
                def name
         | 
| 58 65 | 
             
                  @name ||= @native_kafka.with_inner do |inner|
         | 
    
        data/lib/rdkafka/version.rb
    CHANGED
    
    
    
        data/lib/rdkafka.rb
    CHANGED
    
    
| @@ -76,37 +76,50 @@ describe Rdkafka::AbstractHandle do | |
| 76 76 | 
             
              end
         | 
| 77 77 |  | 
| 78 78 | 
             
              describe "#wait" do
         | 
| 79 | 
            -
                 | 
| 79 | 
            +
                context 'when pending_handle true' do
         | 
| 80 | 
            +
                  let(:pending_handle) { true }
         | 
| 80 81 |  | 
| 81 | 
            -
             | 
| 82 | 
            -
             | 
| 83 | 
            -
                     | 
| 84 | 
            -
             | 
| 82 | 
            +
                  it "should wait until the timeout and then raise an error" do
         | 
| 83 | 
            +
                    expect(Kernel).not_to receive(:warn)
         | 
| 84 | 
            +
                    expect {
         | 
| 85 | 
            +
                      subject.wait(max_wait_timeout: 0.1)
         | 
| 86 | 
            +
                    }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
         | 
| 87 | 
            +
                  end
         | 
| 85 88 | 
             
                end
         | 
| 86 89 |  | 
| 87 | 
            -
                context  | 
| 90 | 
            +
                context 'when pending_handle false' do
         | 
| 88 91 | 
             
                  let(:pending_handle) { false }
         | 
| 89 | 
            -
                  let(:result) { 1 }
         | 
| 90 92 |  | 
| 91 | 
            -
                  it  | 
| 92 | 
            -
                     | 
| 93 | 
            -
                     | 
| 93 | 
            +
                  it 'should show a deprecation warning when wait_timeout is set' do
         | 
| 94 | 
            +
                    expect(Kernel).to receive(:warn).with(Rdkafka::AbstractHandle::WAIT_TIMEOUT_DEPRECATION_MESSAGE)
         | 
| 95 | 
            +
                    subject.wait(wait_timeout: 0.1)
         | 
| 94 96 | 
             
                  end
         | 
| 95 97 |  | 
| 96 | 
            -
                   | 
| 97 | 
            -
                     | 
| 98 | 
            -
             | 
| 98 | 
            +
                  context "without error" do
         | 
| 99 | 
            +
                    let(:result) { 1 }
         | 
| 100 | 
            +
             | 
| 101 | 
            +
                    it "should return a result" do
         | 
| 102 | 
            +
                      expect(Kernel).not_to receive(:warn)
         | 
| 103 | 
            +
                      wait_result = subject.wait
         | 
| 104 | 
            +
                      expect(wait_result).to eq(result)
         | 
| 105 | 
            +
                    end
         | 
| 106 | 
            +
             | 
| 107 | 
            +
                    it "should wait without a timeout" do
         | 
| 108 | 
            +
                      expect(Kernel).not_to receive(:warn)
         | 
| 109 | 
            +
                      wait_result = subject.wait(max_wait_timeout: nil)
         | 
| 110 | 
            +
                      expect(wait_result).to eq(result)
         | 
| 111 | 
            +
                    end
         | 
| 99 112 | 
             
                  end
         | 
| 100 | 
            -
                end
         | 
| 101 113 |  | 
| 102 | 
            -
             | 
| 103 | 
            -
             | 
| 104 | 
            -
                  let(:response) { 20 }
         | 
| 114 | 
            +
                  context "with error" do
         | 
| 115 | 
            +
                    let(:response) { 20 }
         | 
| 105 116 |  | 
| 106 | 
            -
             | 
| 107 | 
            -
             | 
| 108 | 
            -
                       | 
| 109 | 
            -
             | 
| 117 | 
            +
                    it "should raise an rdkafka error" do
         | 
| 118 | 
            +
                      expect(Kernel).not_to receive(:warn)
         | 
| 119 | 
            +
                      expect {
         | 
| 120 | 
            +
                        subject.wait
         | 
| 121 | 
            +
                      }.to raise_error Rdkafka::RdkafkaError
         | 
| 122 | 
            +
                    end
         | 
| 110 123 | 
             
                  end
         | 
| 111 124 | 
             
                end
         | 
| 112 125 | 
             
              end
         | 
    
        data/spec/rdkafka/admin_spec.rb
    CHANGED
    
    | @@ -31,6 +31,19 @@ describe Rdkafka::Admin do | |
| 31 31 | 
             
              let(:operation)             {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
         | 
| 32 32 | 
             
              let(:permission_type)       {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
         | 
| 33 33 |  | 
| 34 | 
            +
              describe 'admin without auto-start' do
         | 
| 35 | 
            +
                let(:admin) { config.admin(native_kafka_auto_start: false) }
         | 
| 36 | 
            +
             | 
| 37 | 
            +
                it 'expect to be able to start it later and close' do
         | 
| 38 | 
            +
                  admin.start
         | 
| 39 | 
            +
                  admin.close
         | 
| 40 | 
            +
                end
         | 
| 41 | 
            +
             | 
| 42 | 
            +
                it 'expect to be able to close it without starting' do
         | 
| 43 | 
            +
                  admin.close
         | 
| 44 | 
            +
                end
         | 
| 45 | 
            +
              end
         | 
| 46 | 
            +
             | 
| 34 47 | 
             
              describe "#create_topic" do
         | 
| 35 48 | 
             
                describe "called with invalid input" do
         | 
| 36 49 | 
             
                  describe "with an invalid topic name" do
         | 
| @@ -275,6 +288,9 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m | |
| 275 288 | 
             
                    expect(create_acl_report.rdkafka_response).to eq(0)
         | 
| 276 289 | 
             
                    expect(create_acl_report.rdkafka_response_string).to eq("")
         | 
| 277 290 |  | 
| 291 | 
            +
                    # Since we create and immediately check, this is slow on loaded CIs, hence we wait
         | 
| 292 | 
            +
                    sleep(2)
         | 
| 293 | 
            +
             | 
| 278 294 | 
             
                    #describe_acl
         | 
| 279 295 | 
             
                    describe_acl_handle = admin.describe_acl(resource_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_ANY, resource_name: nil, resource_pattern_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_ANY, principal: nil, host: nil, operation: Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_ANY, permission_type: Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ANY)
         | 
| 280 296 | 
             
                    describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
         | 
| @@ -404,4 +420,41 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m | |
| 404 420 | 
             
                  end
         | 
| 405 421 | 
             
                end
         | 
| 406 422 | 
             
              end
         | 
| 423 | 
            +
             | 
| 424 | 
            +
              describe '#oauthbearer_set_token' do
         | 
| 425 | 
            +
                context 'when sasl not configured' do
         | 
| 426 | 
            +
                  it 'should return RD_KAFKA_RESP_ERR__STATE' do
         | 
| 427 | 
            +
                    response = admin.oauthbearer_set_token(
         | 
| 428 | 
            +
                      token: "foo",
         | 
| 429 | 
            +
                      lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
         | 
| 430 | 
            +
                      principal_name: "kafka-cluster"
         | 
| 431 | 
            +
                    )
         | 
| 432 | 
            +
                    expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
         | 
| 433 | 
            +
                  end
         | 
| 434 | 
            +
                end
         | 
| 435 | 
            +
             | 
| 436 | 
            +
                context 'when sasl configured' do
         | 
| 437 | 
            +
                  before do
         | 
| 438 | 
            +
                    config_sasl = rdkafka_config(
         | 
| 439 | 
            +
                      "security.protocol": "sasl_ssl",
         | 
| 440 | 
            +
                      "sasl.mechanisms": 'OAUTHBEARER'
         | 
| 441 | 
            +
                    )
         | 
| 442 | 
            +
                    $admin_sasl = config_sasl.admin
         | 
| 443 | 
            +
                  end
         | 
| 444 | 
            +
             | 
| 445 | 
            +
                  after do
         | 
| 446 | 
            +
                    $admin_sasl.close
         | 
| 447 | 
            +
                  end
         | 
| 448 | 
            +
             | 
| 449 | 
            +
                  it 'should succeed' do
         | 
| 450 | 
            +
             | 
| 451 | 
            +
                    response = $admin_sasl.oauthbearer_set_token(
         | 
| 452 | 
            +
                      token: "foo",
         | 
| 453 | 
            +
                      lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
         | 
| 454 | 
            +
                      principal_name: "kafka-cluster"
         | 
| 455 | 
            +
                    )
         | 
| 456 | 
            +
                    expect(response).to eq(0)
         | 
| 457 | 
            +
                  end
         | 
| 458 | 
            +
                end
         | 
| 459 | 
            +
              end
         | 
| 407 460 | 
             
            end
         | 
| @@ -36,6 +36,16 @@ describe Rdkafka::Bindings do | |
| 36 36 | 
             
                  expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
         | 
| 37 37 | 
             
                end
         | 
| 38 38 |  | 
| 39 | 
            +
                it "should log fatal messages" do
         | 
| 40 | 
            +
                  Rdkafka::Bindings::LogCallback.call(nil, 1, nil, "log line")
         | 
| 41 | 
            +
                  expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
         | 
| 42 | 
            +
                end
         | 
| 43 | 
            +
             | 
| 44 | 
            +
                it "should log fatal messages" do
         | 
| 45 | 
            +
                  Rdkafka::Bindings::LogCallback.call(nil, 2, nil, "log line")
         | 
| 46 | 
            +
                  expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
         | 
| 47 | 
            +
                end
         | 
| 48 | 
            +
             | 
| 39 49 | 
             
                it "should log error messages" do
         | 
| 40 50 | 
             
                  Rdkafka::Bindings::LogCallback.call(nil, 3, nil, "log line")
         | 
| 41 51 | 
             
                  expect(log_queue).to have_received(:<<).with([Logger::ERROR, "rdkafka: log line"])
         | 
| @@ -51,6 +61,11 @@ describe Rdkafka::Bindings do | |
| 51 61 | 
             
                  expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
         | 
| 52 62 | 
             
                end
         | 
| 53 63 |  | 
| 64 | 
            +
                it "should log info messages" do
         | 
| 65 | 
            +
                  Rdkafka::Bindings::LogCallback.call(nil, 6, nil, "log line")
         | 
| 66 | 
            +
                  expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
         | 
| 67 | 
            +
                end
         | 
| 68 | 
            +
             | 
| 54 69 | 
             
                it "should log debug messages" do
         | 
| 55 70 | 
             
                  Rdkafka::Bindings::LogCallback.call(nil, 7, nil, "log line")
         | 
| 56 71 | 
             
                  expect(log_queue).to have_received(:<<).with([Logger::DEBUG, "rdkafka: log line"])
         | 
| @@ -132,4 +147,86 @@ describe Rdkafka::Bindings do | |
| 132 147 | 
             
                  end
         | 
| 133 148 | 
             
                end
         | 
| 134 149 | 
             
              end
         | 
| 150 | 
            +
             | 
| 151 | 
            +
              describe "oauthbearer set token" do
         | 
| 152 | 
            +
             | 
| 153 | 
            +
                context "without args" do
         | 
| 154 | 
            +
                  it "should raise argument error" do
         | 
| 155 | 
            +
                    expect {
         | 
| 156 | 
            +
                      Rdkafka::Bindings.rd_kafka_oauthbearer_set_token
         | 
| 157 | 
            +
                    }.to raise_error(ArgumentError)
         | 
| 158 | 
            +
                  end
         | 
| 159 | 
            +
                end
         | 
| 160 | 
            +
             | 
| 161 | 
            +
                context "with args" do
         | 
| 162 | 
            +
                  before do
         | 
| 163 | 
            +
                    DEFAULT_TOKEN_EXPIRY_SECONDS = 900
         | 
| 164 | 
            +
                    $token_value = "token"
         | 
| 165 | 
            +
                    $md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
         | 
| 166 | 
            +
                    $md_principal_name = "kafka-cluster"
         | 
| 167 | 
            +
                    $extensions = nil
         | 
| 168 | 
            +
                    $extension_size = 0
         | 
| 169 | 
            +
                    $error_buffer = FFI::MemoryPointer.from_string(" " * 256)
         | 
| 170 | 
            +
                  end
         | 
| 171 | 
            +
             | 
| 172 | 
            +
                  it "should set token or capture failure" do
         | 
| 173 | 
            +
                    RdKafkaTestConsumer.with do |consumer_ptr|
         | 
| 174 | 
            +
                      response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
         | 
| 175 | 
            +
                      expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
         | 
| 176 | 
            +
                      expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
         | 
| 177 | 
            +
                    end
         | 
| 178 | 
            +
                  end
         | 
| 179 | 
            +
                end
         | 
| 180 | 
            +
              end
         | 
| 181 | 
            +
             | 
| 182 | 
            +
              describe "oauthbearer set token failure" do
         | 
| 183 | 
            +
             | 
| 184 | 
            +
                context "without args" do
         | 
| 185 | 
            +
             | 
| 186 | 
            +
                  it "should fail" do
         | 
| 187 | 
            +
                    expect {
         | 
| 188 | 
            +
                      Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
         | 
| 189 | 
            +
                    }.to raise_error(ArgumentError)
         | 
| 190 | 
            +
                  end
         | 
| 191 | 
            +
                end
         | 
| 192 | 
            +
             | 
| 193 | 
            +
                context "with args" do
         | 
| 194 | 
            +
                  it "should succeed" do
         | 
| 195 | 
            +
                    expect {
         | 
| 196 | 
            +
                      errstr = "error"
         | 
| 197 | 
            +
                      RdKafkaTestConsumer.with do |consumer_ptr|
         | 
| 198 | 
            +
                        Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
         | 
| 199 | 
            +
                      end
         | 
| 200 | 
            +
                    }.to_not raise_error
         | 
| 201 | 
            +
                  end
         | 
| 202 | 
            +
                end
         | 
| 203 | 
            +
              end
         | 
| 204 | 
            +
             | 
| 205 | 
            +
              describe "oauthbearer callback" do
         | 
| 206 | 
            +
             | 
| 207 | 
            +
                context "without an oauthbearer callback" do
         | 
| 208 | 
            +
                  it "should do nothing" do
         | 
| 209 | 
            +
                    expect {
         | 
| 210 | 
            +
                      Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
         | 
| 211 | 
            +
                    }.not_to raise_error
         | 
| 212 | 
            +
                  end
         | 
| 213 | 
            +
                end
         | 
| 214 | 
            +
             | 
| 215 | 
            +
                context "with an oauthbearer callback" do
         | 
| 216 | 
            +
                  before do
         | 
| 217 | 
            +
                    Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
         | 
| 218 | 
            +
                      $received_config = config
         | 
| 219 | 
            +
                      $received_client_name = client_name
         | 
| 220 | 
            +
                    end
         | 
| 221 | 
            +
                  end
         | 
| 222 | 
            +
             | 
| 223 | 
            +
                  it "should call the oauth bearer callback and receive config and client name" do
         | 
| 224 | 
            +
                    RdKafkaTestConsumer.with do |consumer_ptr|
         | 
| 225 | 
            +
                      Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
         | 
| 226 | 
            +
                        expect($received_config).to eq("{}")
         | 
| 227 | 
            +
                        expect($received_client_name).to match(/consumer/)
         | 
| 228 | 
            +
                    end
         | 
| 229 | 
            +
                  end
         | 
| 230 | 
            +
                end
         | 
| 231 | 
            +
              end
         | 
| 135 232 | 
             
            end
         | 
    
        data/spec/rdkafka/config_spec.rb
    CHANGED
    
    | @@ -22,6 +22,7 @@ describe Rdkafka::Config do | |
| 22 22 | 
             
                it "supports logging queue" do
         | 
| 23 23 | 
             
                  log = StringIO.new
         | 
| 24 24 | 
             
                  Rdkafka::Config.logger = Logger.new(log)
         | 
| 25 | 
            +
                  Rdkafka::Config.ensure_log_thread
         | 
| 25 26 |  | 
| 26 27 | 
             
                  Rdkafka::Config.log_queue << [Logger::FATAL, "I love testing"]
         | 
| 27 28 | 
             
                  20.times do
         | 
| @@ -31,6 +32,25 @@ describe Rdkafka::Config do | |
| 31 32 |  | 
| 32 33 | 
             
                  expect(log.string).to include "FATAL -- : I love testing"
         | 
| 33 34 | 
             
                end
         | 
| 35 | 
            +
             | 
| 36 | 
            +
                it "expect to start new logger thread after fork and work" do
         | 
| 37 | 
            +
                  reader, writer = IO.pipe
         | 
| 38 | 
            +
             | 
| 39 | 
            +
                  pid = fork do
         | 
| 40 | 
            +
                    $stdout.reopen(writer)
         | 
| 41 | 
            +
                    Rdkafka::Config.logger = Logger.new($stdout)
         | 
| 42 | 
            +
                    reader.close
         | 
| 43 | 
            +
                    producer = rdkafka_producer_config(debug: 'all').producer
         | 
| 44 | 
            +
                    producer.close
         | 
| 45 | 
            +
                    writer.close
         | 
| 46 | 
            +
                    sleep(1)
         | 
| 47 | 
            +
                  end
         | 
| 48 | 
            +
             | 
| 49 | 
            +
                  writer.close
         | 
| 50 | 
            +
                  Process.wait(pid)
         | 
| 51 | 
            +
                  output = reader.read
         | 
| 52 | 
            +
                  expect(output.split("\n").size).to be >= 20
         | 
| 53 | 
            +
                end
         | 
| 34 54 | 
             
              end
         | 
| 35 55 |  | 
| 36 56 | 
             
              context "statistics callback" do
         | 
| @@ -95,6 +115,39 @@ describe Rdkafka::Config do | |
| 95 115 | 
             
                end
         | 
| 96 116 | 
             
              end
         | 
| 97 117 |  | 
| 118 | 
            +
              context "oauthbearer calllback" do
         | 
| 119 | 
            +
                context "with a proc/lambda" do
         | 
| 120 | 
            +
                  it "should set the callback" do
         | 
| 121 | 
            +
                    expect {
         | 
| 122 | 
            +
                      Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
         | 
| 123 | 
            +
                        puts config
         | 
| 124 | 
            +
                        puts client_name
         | 
| 125 | 
            +
                      end
         | 
| 126 | 
            +
                    }.not_to raise_error
         | 
| 127 | 
            +
                    expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
         | 
| 128 | 
            +
                  end
         | 
| 129 | 
            +
                end
         | 
| 130 | 
            +
             | 
| 131 | 
            +
                context "with a callable object" do
         | 
| 132 | 
            +
                  it "should set the callback" do
         | 
| 133 | 
            +
                    callback = Class.new do
         | 
| 134 | 
            +
                      def call(config, client_name); end
         | 
| 135 | 
            +
                    end
         | 
| 136 | 
            +
             | 
| 137 | 
            +
                    expect {
         | 
| 138 | 
            +
                      Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
         | 
| 139 | 
            +
                    }.not_to raise_error
         | 
| 140 | 
            +
                    expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
         | 
| 141 | 
            +
                  end
         | 
| 142 | 
            +
                end
         | 
| 143 | 
            +
             | 
| 144 | 
            +
                it "should not accept a callback that's not callable" do
         | 
| 145 | 
            +
                  expect {
         | 
| 146 | 
            +
                    Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
         | 
| 147 | 
            +
                  }.to raise_error(TypeError)
         | 
| 148 | 
            +
                end
         | 
| 149 | 
            +
              end
         | 
| 150 | 
            +
             | 
| 98 151 | 
             
              context "configuration" do
         | 
| 99 152 | 
             
                it "should store configuration" do
         | 
| 100 153 | 
             
                  config = Rdkafka::Config.new
         |