rdkafka 0.24.2 → 0.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +10 -0
  3. data/Gemfile +9 -0
  4. data/README.md +2 -1
  5. data/dist/{librdkafka-2.11.1.tar.gz → librdkafka-2.12.1.tar.gz} +0 -0
  6. data/docker-compose-ssl.yml +1 -1
  7. data/docker-compose.yml +1 -1
  8. data/lib/rdkafka/abstract_handle.rb +23 -5
  9. data/lib/rdkafka/admin/acl_binding_result.rb +1 -1
  10. data/lib/rdkafka/admin/config_resource_binding_result.rb +1 -0
  11. data/lib/rdkafka/admin/create_acl_handle.rb +3 -0
  12. data/lib/rdkafka/admin/create_acl_report.rb +3 -0
  13. data/lib/rdkafka/admin/create_partitions_handle.rb +3 -0
  14. data/lib/rdkafka/admin/create_partitions_report.rb +1 -0
  15. data/lib/rdkafka/admin/create_topic_handle.rb +3 -0
  16. data/lib/rdkafka/admin/create_topic_report.rb +3 -0
  17. data/lib/rdkafka/admin/delete_acl_handle.rb +3 -0
  18. data/lib/rdkafka/admin/delete_acl_report.rb +3 -0
  19. data/lib/rdkafka/admin/delete_groups_handle.rb +5 -0
  20. data/lib/rdkafka/admin/delete_groups_report.rb +3 -0
  21. data/lib/rdkafka/admin/delete_topic_handle.rb +3 -0
  22. data/lib/rdkafka/admin/delete_topic_report.rb +3 -0
  23. data/lib/rdkafka/admin/describe_acl_handle.rb +3 -0
  24. data/lib/rdkafka/admin/describe_acl_report.rb +3 -0
  25. data/lib/rdkafka/admin/describe_configs_handle.rb +3 -0
  26. data/lib/rdkafka/admin/describe_configs_report.rb +6 -0
  27. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +3 -0
  28. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +6 -0
  29. data/lib/rdkafka/admin.rb +108 -112
  30. data/lib/rdkafka/bindings.rb +62 -29
  31. data/lib/rdkafka/callbacks.rb +71 -11
  32. data/lib/rdkafka/config.rb +20 -8
  33. data/lib/rdkafka/consumer/headers.rb +3 -2
  34. data/lib/rdkafka/consumer/message.rb +7 -3
  35. data/lib/rdkafka/consumer/partition.rb +6 -2
  36. data/lib/rdkafka/consumer/topic_partition_list.rb +8 -8
  37. data/lib/rdkafka/consumer.rb +40 -28
  38. data/lib/rdkafka/defaults.rb +106 -0
  39. data/lib/rdkafka/error.rb +16 -1
  40. data/lib/rdkafka/helpers/oauth.rb +11 -5
  41. data/lib/rdkafka/metadata.rb +29 -5
  42. data/lib/rdkafka/native_kafka.rb +26 -2
  43. data/lib/rdkafka/producer/delivery_report.rb +6 -2
  44. data/lib/rdkafka/producer/partitions_count_cache.rb +24 -14
  45. data/lib/rdkafka/producer.rb +49 -23
  46. data/lib/rdkafka/version.rb +6 -3
  47. data/lib/rdkafka.rb +1 -0
  48. data/rdkafka.gemspec +0 -7
  49. data/renovate.json +1 -8
  50. metadata +4 -87
@@ -17,9 +17,9 @@ module Rdkafka
17
17
  #
18
18
  # 2. Edge case handling
19
19
  # If a user configures `statistics.interval.ms` much higher than the default cache TTL
20
- # (30 seconds), the cache will still function correctly. When statistics updates don't
21
- # occur frequently enough, the cache entries will expire naturally, triggering a
22
- # blocking refresh when needed.
20
+ # ({Defaults::PARTITIONS_COUNT_CACHE_TTL_MS}ms), the cache will still function correctly.
21
+ # When statistics updates don't occur frequently enough, the cache entries will expire
22
+ # naturally, triggering a blocking refresh when needed.
23
23
  #
24
24
  # 3. User configuration awareness
25
25
  # The cache respects user-defined settings. If `topic.metadata.refresh.interval.ms` is
@@ -46,22 +46,32 @@ module Rdkafka
46
46
  class PartitionsCountCache
47
47
  include Helpers::Time
48
48
 
49
- # Default time-to-live for cached partition counts in seconds
50
- #
51
- # @note This default was chosen to balance freshness of metadata with performance
52
- # optimization. Most Kafka cluster topology changes are planned operations, making 30
53
- # seconds a reasonable compromise.
54
- DEFAULT_TTL = 30
55
-
56
49
  # Creates a new partition count cache
57
50
  #
58
- # @param ttl [Integer] Time-to-live in seconds for cached values
59
- def initialize(ttl = DEFAULT_TTL)
51
+ # @param ttl [Integer, nil] DEPRECATED: Use ttl_ms instead.
52
+ # Time-to-live in seconds for cached values. Will be removed in v1.0.0.
53
+ # @param ttl_ms [Integer, nil] Time-to-live in milliseconds for cached values.
54
+ # Defaults to {Defaults::PARTITIONS_COUNT_CACHE_TTL_MS}.
55
+ def initialize(ttl = :not_provided, ttl_ms: :not_provided)
60
56
  @counts = {}
61
57
  @mutex_hash = {}
62
58
  # Used only for @mutex_hash access to ensure thread-safety when creating new mutexes
63
59
  @mutex_for_hash = Mutex.new
64
- @ttl = ttl
60
+
61
+ # Determine which TTL value to use
62
+ if ttl != :not_provided && ttl_ms != :not_provided
63
+ warn "DEPRECATION WARNING: Both ttl and ttl_ms were provided to PartitionsCountCache. " \
64
+ "Using ttl_ms. The ttl parameter is deprecated and will be removed in v1.0.0."
65
+ @ttl_ms = ttl_ms
66
+ elsif ttl != :not_provided
67
+ warn "DEPRECATION WARNING: ttl (seconds) parameter for PartitionsCountCache is deprecated. " \
68
+ "Use ttl_ms (milliseconds) instead. This parameter will be removed in v1.0.0."
69
+ @ttl_ms = (ttl * 1000).to_i
70
+ elsif ttl_ms == :not_provided
71
+ @ttl_ms = Defaults::PARTITIONS_COUNT_CACHE_TTL_MS
72
+ else
73
+ @ttl_ms = ttl_ms
74
+ end
65
75
  end
66
76
 
67
77
  # Reads partition count for a topic with automatic refresh when expired
@@ -209,7 +219,7 @@ module Rdkafka
209
219
  # @param timestamp [Float] Monotonic timestamp to check
210
220
  # @return [Boolean] true if expired, false otherwise
211
221
  def expired?(timestamp)
212
- monotonic_now - timestamp > @ttl
222
+ (monotonic_now - timestamp) * 1_000 > @ttl_ms
213
223
  end
214
224
  end
215
225
  end
@@ -14,10 +14,9 @@ module Rdkafka
14
14
  # then. Since the partitions count can only grow and should be same for all consumers and
15
15
  # producers, we can use a global cache as long as we ensure that updates only move up.
16
16
  #
17
+ # @return [Rdkafka::Producer::PartitionsCountCache]
17
18
  # @note It is critical to remember, that not all users may have statistics callbacks enabled,
18
19
  # hence we should not make assumption that this cache is always updated from the stats.
19
- #
20
- # @return [Rdkafka::Producer::PartitionsCountCache]
21
20
  def self.partitions_count_cache
22
21
  @@partitions_count_cache
23
22
  end
@@ -64,12 +63,13 @@ module Rdkafka
64
63
  end
65
64
 
66
65
  # Sets alternative set of configuration details that can be set per topic
67
- # @note It is not allowed to re-set the same topic config twice because of the underlying
68
- # librdkafka caching
66
+ #
69
67
  # @param topic [String] The topic name
70
68
  # @param config [Hash] config we want to use per topic basis
71
69
  # @param config_hash [Integer] hash of the config. We expect it here instead of computing it,
72
70
  # because it is already computed during the retrieval attempt in the `#produce` flow.
71
+ # @note It is not allowed to re-set the same topic config twice because of the underlying
72
+ # librdkafka caching
73
73
  def set_topic_config(topic, config, config_hash)
74
74
  # Ensure lock on topic reference just in case
75
75
  @native_kafka.with_inner do |inner|
@@ -125,8 +125,7 @@ module Rdkafka
125
125
  # Set a callback that will be called every time a message is successfully produced.
126
126
  # The callback is called with a {DeliveryReport} and {DeliveryHandle}
127
127
  #
128
- # @param callback [Proc, #call] The callback
129
- #
128
+ # @param callback [Proc, #call] callable object to handle delivery reports
130
129
  # @return [nil]
131
130
  def delivery_callback=(callback)
132
131
  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
@@ -168,7 +167,7 @@ module Rdkafka
168
167
  # should be no other errors.
169
168
  #
170
169
  # @note For `timed_out` we do not raise an error to keep it backwards compatible
171
- def flush(timeout_ms=5_000)
170
+ def flush(timeout_ms = Defaults::PRODUCER_FLUSH_TIMEOUT_MS)
172
171
  closed_producer_check(__method__)
173
172
 
174
173
  code = nil
@@ -208,11 +207,37 @@ module Rdkafka
208
207
  code.zero? || raise(Rdkafka::RdkafkaError.new(code))
209
208
 
210
209
  # Wait for the purge to affect everything
211
- sleep(0.001) until flush(100)
210
+ sleep(Defaults::PRODUCER_PURGE_SLEEP_INTERVAL_MS / 1000.0) until flush(Defaults::PRODUCER_PURGE_FLUSH_TIMEOUT_MS)
212
211
 
213
212
  true
214
213
  end
215
214
 
215
+ # Returns the number of messages and requests waiting to be sent to the broker as well as
216
+ # delivery reports queued for the application.
217
+ #
218
+ # This provides visibility into the producer's internal queue depth, useful for:
219
+ # - Monitoring producer backpressure
220
+ # - Implementing custom flow control
221
+ # - Debugging message delivery issues
222
+ # - Graceful shutdown logic (wait until queue is empty)
223
+ #
224
+ # @return [Integer] the number of messages in the queue
225
+ # @raise [Rdkafka::ClosedProducerError] if called on a closed producer
226
+ #
227
+ # @note This method is thread-safe as it uses the @native_kafka.with_inner synchronization
228
+ #
229
+ # @example
230
+ # producer.queue_size #=> 42
231
+ def queue_size
232
+ closed_producer_check(__method__)
233
+
234
+ @native_kafka.with_inner do |inner|
235
+ Rdkafka::Bindings.rd_kafka_outq_len(inner)
236
+ end
237
+ end
238
+
239
+ alias queue_length queue_size
240
+
216
241
  # Partition count for a given topic.
217
242
  #
218
243
  # @param topic [String] The topic name.
@@ -237,13 +262,13 @@ module Rdkafka
237
262
  topic_metadata = ::Rdkafka::Metadata.new(inner, topic).topics&.first
238
263
  end
239
264
 
240
- topic_metadata ? topic_metadata[:partition_count] : -1
265
+ topic_metadata ? topic_metadata[:partition_count] : Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
241
266
  end
242
267
  rescue Rdkafka::RdkafkaError => e
243
268
  # If the topic does not exist, it will be created or if not allowed another error will be
244
- # raised. We here return -1 so this can happen without early error happening on metadata
245
- # discovery.
246
- return -1 if e.code == :unknown_topic_or_part
269
+ # raised. We here return RD_KAFKA_PARTITION_UA so this can happen without early error
270
+ # happening on metadata discovery.
271
+ return Rdkafka::Bindings::RD_KAFKA_PARTITION_UA if e.code == :unknown_topic_or_part
247
272
 
248
273
  raise(e)
249
274
  end
@@ -254,14 +279,15 @@ module Rdkafka
254
279
  # When a timestamp is provided this is used instead of the auto-generated timestamp.
255
280
  #
256
281
  # @param topic [String] The topic to produce to
257
- # @param payload [String,nil] The message's payload
258
- # @param key [String, nil] The message's key
259
- # @param partition [Integer,nil] Optional partition to produce to
282
+ # @param payload [String, nil]
283
+ # @param key [String, nil]
284
+ # @param partition [Integer, nil] Optional partition to produce to
260
285
  # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
261
- # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
262
- # @param headers [Hash<String,String|Array<String>>] Optional message headers. Values can be either a single string or an array of strings to support duplicate headers per KIP-82
286
+ # @param timestamp [Time, Integer, nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
287
+ # @param headers [Hash{String => String, Array<String>}] Optional message headers. Values can be either a single string or an array of strings to support duplicate headers per KIP-82
263
288
  # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
264
289
  # @param topic_config [Hash] topic config for given message dispatch. Allows to send messages to topics with different configuration
290
+ # @param partitioner [String] name of the partitioner to use
265
291
  #
266
292
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
267
293
  #
@@ -318,9 +344,9 @@ module Rdkafka
318
344
  selected_partitioner) if partition_count.positive?
319
345
  end
320
346
 
321
- # If partition is nil, use -1 to let librdafka set the partition randomly or
347
+ # If partition is nil, use RD_KAFKA_PARTITION_UA to let librdafka set the partition randomly or
322
348
  # based on the key when present.
323
- partition ||= -1
349
+ partition ||= Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
324
350
 
325
351
  # If timestamp is nil use 0 and let Kafka set one. If an integer or time
326
352
  # use it.
@@ -338,9 +364,9 @@ module Rdkafka
338
364
  delivery_handle.label = label
339
365
  delivery_handle.topic = topic
340
366
  delivery_handle[:pending] = true
341
- delivery_handle[:response] = -1
342
- delivery_handle[:partition] = -1
343
- delivery_handle[:offset] = -1
367
+ delivery_handle[:response] = Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
368
+ delivery_handle[:partition] = Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
369
+ delivery_handle[:offset] = Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
344
370
  DeliveryHandle.register(delivery_handle)
345
371
 
346
372
  args = [
@@ -387,7 +413,7 @@ module Rdkafka
387
413
  end
388
414
 
389
415
  # Raise error if the produce call was not successful
390
- if response != 0
416
+ if response != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
391
417
  DeliveryHandle.remove(delivery_handle.to_ptr.address)
392
418
  raise RdkafkaError.new(response)
393
419
  end
@@ -1,7 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.24.2"
5
- LIBRDKAFKA_VERSION = "2.11.1"
6
- LIBRDKAFKA_SOURCE_SHA256 = "a2c87186b081e2705bb7d5338d5a01bc88d43273619b372ccb7bb0d264d0ca9f"
4
+ # Current rdkafka-ruby gem version
5
+ VERSION = "0.25.0"
6
+ # Target librdkafka version to be used
7
+ LIBRDKAFKA_VERSION = "2.12.1"
8
+ # SHA256 hash of the librdkafka source tarball for verification
9
+ LIBRDKAFKA_SOURCE_SHA256 = "ec103fa05cb0f251e375f6ea0b6112cfc9d0acd977dc5b69fdc54242ba38a16f"
7
10
  end
data/lib/rdkafka.rb CHANGED
@@ -8,6 +8,7 @@ require "json"
8
8
  require "rdkafka/version"
9
9
  require "rdkafka/helpers/time"
10
10
  require "rdkafka/helpers/oauth"
11
+ require "rdkafka/defaults"
11
12
  require "rdkafka/abstract_handle"
12
13
  require "rdkafka/admin"
13
14
  require "rdkafka/admin/create_topic_handle"
data/rdkafka.gemspec CHANGED
@@ -73,13 +73,6 @@ Gem::Specification.new do |gem|
73
73
  gem.add_dependency 'mini_portile2', '~> 2.6'
74
74
  gem.add_dependency 'rake', '> 12'
75
75
 
76
- gem.add_development_dependency 'ostruct'
77
- gem.add_development_dependency 'pry'
78
- gem.add_development_dependency 'rspec', '~> 3.5'
79
- gem.add_development_dependency 'rake'
80
- gem.add_development_dependency 'simplecov'
81
- gem.add_development_dependency 'warning'
82
-
83
76
  gem.metadata = {
84
77
  'funding_uri' => 'https://karafka.io/#become-pro',
85
78
  'homepage_uri' => 'https://karafka.io',
data/renovate.json CHANGED
@@ -3,18 +3,11 @@
3
3
  "extends": [
4
4
  "config:recommended"
5
5
  ],
6
+ "minimumReleaseAge": "7 days",
6
7
  "github-actions": {
7
8
  "enabled": true,
8
9
  "pinDigests": true
9
10
  },
10
- "packageRules": [
11
- {
12
- "matchManagers": [
13
- "github-actions"
14
- ],
15
- "minimumReleaseAge": "7 days"
16
- }
17
- ],
18
11
  "customManagers": [
19
12
  {
20
13
  "customType": "regex",
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.24.2
4
+ version: 0.25.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -80,90 +80,6 @@ dependencies:
80
80
  - - ">"
81
81
  - !ruby/object:Gem::Version
82
82
  version: '12'
83
- - !ruby/object:Gem::Dependency
84
- name: ostruct
85
- requirement: !ruby/object:Gem::Requirement
86
- requirements:
87
- - - ">="
88
- - !ruby/object:Gem::Version
89
- version: '0'
90
- type: :development
91
- prerelease: false
92
- version_requirements: !ruby/object:Gem::Requirement
93
- requirements:
94
- - - ">="
95
- - !ruby/object:Gem::Version
96
- version: '0'
97
- - !ruby/object:Gem::Dependency
98
- name: pry
99
- requirement: !ruby/object:Gem::Requirement
100
- requirements:
101
- - - ">="
102
- - !ruby/object:Gem::Version
103
- version: '0'
104
- type: :development
105
- prerelease: false
106
- version_requirements: !ruby/object:Gem::Requirement
107
- requirements:
108
- - - ">="
109
- - !ruby/object:Gem::Version
110
- version: '0'
111
- - !ruby/object:Gem::Dependency
112
- name: rspec
113
- requirement: !ruby/object:Gem::Requirement
114
- requirements:
115
- - - "~>"
116
- - !ruby/object:Gem::Version
117
- version: '3.5'
118
- type: :development
119
- prerelease: false
120
- version_requirements: !ruby/object:Gem::Requirement
121
- requirements:
122
- - - "~>"
123
- - !ruby/object:Gem::Version
124
- version: '3.5'
125
- - !ruby/object:Gem::Dependency
126
- name: rake
127
- requirement: !ruby/object:Gem::Requirement
128
- requirements:
129
- - - ">="
130
- - !ruby/object:Gem::Version
131
- version: '0'
132
- type: :development
133
- prerelease: false
134
- version_requirements: !ruby/object:Gem::Requirement
135
- requirements:
136
- - - ">="
137
- - !ruby/object:Gem::Version
138
- version: '0'
139
- - !ruby/object:Gem::Dependency
140
- name: simplecov
141
- requirement: !ruby/object:Gem::Requirement
142
- requirements:
143
- - - ">="
144
- - !ruby/object:Gem::Version
145
- version: '0'
146
- type: :development
147
- prerelease: false
148
- version_requirements: !ruby/object:Gem::Requirement
149
- requirements:
150
- - - ">="
151
- - !ruby/object:Gem::Version
152
- version: '0'
153
- - !ruby/object:Gem::Dependency
154
- name: warning
155
- requirement: !ruby/object:Gem::Requirement
156
- requirements:
157
- - - ">="
158
- - !ruby/object:Gem::Version
159
- version: '0'
160
- type: :development
161
- prerelease: false
162
- version_requirements: !ruby/object:Gem::Requirement
163
- requirements:
164
- - - ">="
165
- - !ruby/object:Gem::Version
166
- version: '0'
167
83
  description: Modern Kafka client library for Ruby based on librdkafka
168
84
  email:
169
85
  - contact@karafka.io
@@ -178,7 +94,7 @@ files:
178
94
  - README.md
179
95
  - Rakefile
180
96
  - bin/verify_kafka_warnings
181
- - dist/librdkafka-2.11.1.tar.gz
97
+ - dist/librdkafka-2.12.1.tar.gz
182
98
  - dist/patches/rdkafka_global_init.patch
183
99
  - docker-compose-ssl.yml
184
100
  - docker-compose.yml
@@ -215,6 +131,7 @@ files:
215
131
  - lib/rdkafka/consumer/message.rb
216
132
  - lib/rdkafka/consumer/partition.rb
217
133
  - lib/rdkafka/consumer/topic_partition_list.rb
134
+ - lib/rdkafka/defaults.rb
218
135
  - lib/rdkafka/error.rb
219
136
  - lib/rdkafka/helpers/oauth.rb
220
137
  - lib/rdkafka/helpers/time.rb
@@ -251,7 +168,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
251
168
  - !ruby/object:Gem::Version
252
169
  version: '0'
253
170
  requirements: []
254
- rubygems_version: 3.6.9
171
+ rubygems_version: 4.0.3
255
172
  specification_version: 4
256
173
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
257
174
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+