karafka-rdkafka 0.13.2 → 0.13.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +9 -4
  4. data/.gitignore +4 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/CHANGELOG.md +54 -26
  9. data/{LICENSE → MIT-LICENSE} +2 -1
  10. data/README.md +19 -20
  11. data/certs/cert_chain.pem +21 -21
  12. data/docker-compose.yml +16 -15
  13. data/ext/README.md +1 -1
  14. data/ext/Rakefile +1 -1
  15. data/karafka-rdkafka.gemspec +2 -2
  16. data/lib/rdkafka/abstract_handle.rb +41 -27
  17. data/lib/rdkafka/admin/create_partitions_handle.rb +6 -3
  18. data/lib/rdkafka/admin/create_topic_handle.rb +6 -3
  19. data/lib/rdkafka/admin/delete_topic_handle.rb +6 -3
  20. data/lib/rdkafka/admin.rb +6 -7
  21. data/lib/rdkafka/bindings.rb +24 -6
  22. data/lib/rdkafka/config.rb +53 -19
  23. data/lib/rdkafka/consumer/headers.rb +2 -4
  24. data/lib/rdkafka/consumer.rb +119 -93
  25. data/lib/rdkafka/error.rb +60 -1
  26. data/lib/rdkafka/helpers/time.rb +14 -0
  27. data/lib/rdkafka/metadata.rb +4 -4
  28. data/lib/rdkafka/native_kafka.rb +6 -1
  29. data/lib/rdkafka/producer/delivery_handle.rb +16 -1
  30. data/lib/rdkafka/producer/delivery_report.rb +3 -2
  31. data/lib/rdkafka/producer.rb +89 -17
  32. data/lib/rdkafka/version.rb +3 -3
  33. data/lib/rdkafka.rb +10 -1
  34. data/renovate.json +6 -0
  35. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  36. data/spec/rdkafka/admin/create_topic_handle_spec.rb +4 -4
  37. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  38. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +3 -3
  39. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  40. data/spec/rdkafka/admin_spec.rb +1 -2
  41. data/spec/rdkafka/bindings_spec.rb +0 -1
  42. data/spec/rdkafka/callbacks_spec.rb +0 -2
  43. data/spec/rdkafka/config_spec.rb +8 -2
  44. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  45. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  46. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  47. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
  48. data/spec/rdkafka/consumer_spec.rb +122 -38
  49. data/spec/rdkafka/error_spec.rb +0 -2
  50. data/spec/rdkafka/metadata_spec.rb +2 -3
  51. data/spec/rdkafka/native_kafka_spec.rb +2 -3
  52. data/spec/rdkafka/producer/delivery_handle_spec.rb +15 -2
  53. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  54. data/spec/rdkafka/producer_spec.rb +293 -1
  55. data/spec/spec_helper.rb +7 -1
  56. data.tar.gz.sig +0 -0
  57. metadata +31 -28
  58. metadata.gz.sig +0 -0
  59. data/certs/karafka-pro.pem +0 -11
data/lib/rdkafka/admin.rb CHANGED
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "objspace"
4
-
5
3
  module Rdkafka
6
4
  class Admin
7
5
  # @private
@@ -30,11 +28,12 @@ module Rdkafka
30
28
 
31
29
  # Create a topic with the given partition count and replication factor
32
30
  #
31
+ # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of
32
+ # creating the topic
33
+ #
33
34
  # @raise [ConfigError] When the partition count or replication factor are out of valid range
34
35
  # @raise [RdkafkaError] When the topic name is invalid or the topic already exists
35
36
  # @raise [RdkafkaError] When the topic configuration is invalid
36
- #
37
- # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
38
37
  def create_topic(topic_name, partition_count, replication_factor, topic_config={})
39
38
  closed_admin_check(__method__)
40
39
 
@@ -107,11 +106,11 @@ module Rdkafka
107
106
  create_topic_handle
108
107
  end
109
108
 
110
- # Delete the named topic
109
+ # Deletes the named topic
111
110
  #
111
+ # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of
112
+ # deleting the topic
112
113
  # @raise [RdkafkaError] When the topic name is invalid or the topic does not exist
113
- #
114
- # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
115
114
  def delete_topic(topic_name)
116
115
  closed_admin_check(__method__)
117
116
 
@@ -1,11 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "ffi"
4
- require "json"
5
- require "logger"
6
-
7
3
  module Rdkafka
8
4
  # @private
5
+ #
6
+ # @note
7
+ # There are two types of responses related to errors:
8
+ # - rd_kafka_error_t - a C object that we need to remap into an error or null when no error
9
+ # - rd_kafka_resp_err_t - response error code (numeric) that we can use directly
10
+ #
11
+ # It is critical to ensure, that we handle them correctly. The result type should be:
12
+ # - rd_kafka_error_t - :pointer
13
+ # - rd_kafka_resp_err_t - :int
9
14
  module Bindings
10
15
  extend FFI::Library
11
16
 
@@ -35,7 +40,7 @@ module Rdkafka
35
40
 
36
41
  # Polling
37
42
 
38
- attach_function :rd_kafka_flush, [:pointer, :int], :void, blocking: true
43
+ attach_function :rd_kafka_flush, [:pointer, :int], :int, blocking: true
39
44
  attach_function :rd_kafka_poll, [:pointer, :int], :void, blocking: true
40
45
  attach_function :rd_kafka_outq_len, [:pointer], :int, blocking: true
41
46
 
@@ -96,6 +101,11 @@ module Rdkafka
96
101
 
97
102
  attach_function :rd_kafka_err2name, [:int], :string
98
103
  attach_function :rd_kafka_err2str, [:int], :string
104
+ attach_function :rd_kafka_error_is_fatal, [:pointer], :int
105
+ attach_function :rd_kafka_error_is_retriable, [:pointer], :int
106
+ attach_function :rd_kafka_error_txn_requires_abort, [:pointer], :int
107
+ attach_function :rd_kafka_error_destroy, [:pointer], :void
108
+ attach_function :rd_kafka_error_code, [:pointer], :int
99
109
 
100
110
  # Configuration
101
111
 
@@ -157,7 +167,7 @@ module Rdkafka
157
167
  :void, [:pointer, :int, :string, :pointer]
158
168
  ) do |_client_prr, err_code, reason, _opaque|
159
169
  if Rdkafka::Config.error_callback
160
- error = Rdkafka::RdkafkaError.new(err_code, broker_message: reason)
170
+ error = Rdkafka::RdkafkaError.build(err_code, broker_message: reason)
161
171
  error.set_backtrace(caller)
162
172
  Rdkafka::Config.error_callback.call(error)
163
173
  end
@@ -194,6 +204,7 @@ module Rdkafka
194
204
  attach_function :rd_kafka_resume_partitions, [:pointer, :pointer], :int, blocking: true
195
205
  attach_function :rd_kafka_seek, [:pointer, :int32, :int64, :int], :int, blocking: true
196
206
  attach_function :rd_kafka_offsets_for_times, [:pointer, :pointer, :int], :int, blocking: true
207
+ attach_function :rd_kafka_position, [:pointer, :pointer], :int, blocking: true
197
208
 
198
209
  # Headers
199
210
  attach_function :rd_kafka_header_get_all, [:pointer, :size_t, :pointer, :pointer, SizePtr], :int
@@ -255,12 +266,19 @@ module Rdkafka
255
266
  RD_KAFKA_VTYPE_TIMESTAMP = 8
256
267
  RD_KAFKA_VTYPE_HEADER = 9
257
268
  RD_KAFKA_VTYPE_HEADERS = 10
269
+ RD_KAFKA_PURGE_F_QUEUE = 1
270
+ RD_KAFKA_PURGE_F_INFLIGHT = 2
258
271
 
259
272
  RD_KAFKA_MSG_F_COPY = 0x2
260
273
 
261
274
  attach_function :rd_kafka_producev, [:pointer, :varargs], :int, blocking: true
275
+ attach_function :rd_kafka_purge, [:pointer, :int], :int, blocking: true
262
276
  callback :delivery_cb, [:pointer, :pointer, :pointer], :void
263
277
  attach_function :rd_kafka_conf_set_dr_msg_cb, [:pointer, :delivery_cb], :void
278
+ attach_function :rd_kafka_init_transactions, [:pointer, :int], :pointer, blocking: true
279
+ attach_function :rd_kafka_begin_transaction, [:pointer], :pointer, blocking: true
280
+ attach_function :rd_kafka_abort_transaction, [:pointer, :int], :pointer, blocking: true
281
+ attach_function :rd_kafka_commit_transaction, [:pointer, :int], :pointer, blocking: true
264
282
 
265
283
  # Partitioner
266
284
  PARTITIONERS = %w(random consistent consistent_random murmur2 murmur2_random fnv1a fnv1a_random).each_with_object({}) do |name, hsh|
@@ -1,11 +1,9 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "logger"
4
-
5
3
  module Rdkafka
6
4
  # Configuration for a Kafka consumer or producer. You can create an instance and use
7
5
  # the consumer and producer methods to create a client. Documentation of the available
8
- # configuration options is available on https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md.
6
+ # configuration options is available on https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md.
9
7
  class Config
10
8
  # @private
11
9
  @@logger = Logger.new(STDOUT)
@@ -14,7 +12,7 @@ module Rdkafka
14
12
  # @private
15
13
  @@error_callback = nil
16
14
  # @private
17
- @@opaques = {}
15
+ @@opaques = ObjectSpace::WeakMap.new
18
16
  # @private
19
17
  @@log_queue = Queue.new
20
18
 
@@ -53,13 +51,13 @@ module Rdkafka
53
51
 
54
52
  # Set a callback that will be called every time the underlying client emits statistics.
55
53
  # You can configure if and how often this happens using `statistics.interval.ms`.
56
- # The callback is called with a hash that's documented here: https://github.com/edenhill/librdkafka/blob/master/STATISTICS.md
54
+ # The callback is called with a hash that's documented here: https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
57
55
  #
58
56
  # @param callback [Proc, #call] The callback
59
57
  #
60
58
  # @return [nil]
61
59
  def self.statistics_callback=(callback)
62
- raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
60
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
63
61
  @@statistics_callback = callback
64
62
  end
65
63
 
@@ -114,6 +112,7 @@ module Rdkafka
114
112
  def initialize(config_hash = {})
115
113
  @config_hash = DEFAULT_CONFIG.merge(config_hash)
116
114
  @consumer_rebalance_listener = nil
115
+ @consumer_poll_set = true
117
116
  end
118
117
 
119
118
  # Set a config option.
@@ -142,12 +141,28 @@ module Rdkafka
142
141
  @consumer_rebalance_listener = listener
143
142
  end
144
143
 
145
- # Create a consumer with this configuration.
144
+ # Should we use a single queue for the underlying consumer and events.
146
145
  #
147
- # @raise [ConfigError] When the configuration contains invalid options
148
- # @raise [ClientCreationError] When the native client cannot be created
146
+ # This is an advanced API that allows for more granular control of the polling process.
147
+ # When this value is set to `false` (`true` by defualt), there will be two queues that need to
148
+ # be polled:
149
+ # - main librdkafka queue for events
150
+ # - consumer queue with messages and rebalances
151
+ #
152
+ # It is recommended to use the defaults and only set it to `false` in advance multi-threaded
153
+ # and complex cases where granular events handling control is needed.
154
+ #
155
+ # @param poll_set [Boolean]
156
+ def consumer_poll_set=(poll_set)
157
+ @consumer_poll_set = poll_set
158
+ end
159
+
160
+ # Creates a consumer with this configuration.
149
161
  #
150
162
  # @return [Consumer] The created consumer
163
+ #
164
+ # @raise [ConfigError] When the configuration contains invalid options
165
+ # @raise [ClientCreationError] When the native client cannot be created
151
166
  def consumer
152
167
  opaque = Opaque.new
153
168
  config = native_config(opaque)
@@ -160,19 +175,25 @@ module Rdkafka
160
175
  # Create native client
161
176
  kafka = native_kafka(config, :rd_kafka_consumer)
162
177
 
163
- # Redirect the main queue to the consumer
164
- Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka)
178
+ # Redirect the main queue to the consumer queue
179
+ Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka) if @consumer_poll_set
165
180
 
166
181
  # Return consumer with Kafka client
167
- Rdkafka::Consumer.new(Rdkafka::NativeKafka.new(kafka, run_polling_thread: false))
182
+ Rdkafka::Consumer.new(
183
+ Rdkafka::NativeKafka.new(
184
+ kafka,
185
+ run_polling_thread: false,
186
+ opaque: opaque
187
+ )
188
+ )
168
189
  end
169
190
 
170
191
  # Create a producer with this configuration.
171
192
  #
193
+ # @return [Producer] The created producer
194
+ #
172
195
  # @raise [ConfigError] When the configuration contains invalid options
173
196
  # @raise [ClientCreationError] When the native client cannot be created
174
- #
175
- # @return [Producer] The created producer
176
197
  def producer
177
198
  # Create opaque
178
199
  opaque = Opaque.new
@@ -182,22 +203,35 @@ module Rdkafka
182
203
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
183
204
  # Return producer with Kafka client
184
205
  partitioner_name = self[:partitioner] || self["partitioner"]
185
- Rdkafka::Producer.new(Rdkafka::NativeKafka.new(native_kafka(config, :rd_kafka_producer), run_polling_thread: true), partitioner_name).tap do |producer|
206
+ Rdkafka::Producer.new(
207
+ Rdkafka::NativeKafka.new(
208
+ native_kafka(config, :rd_kafka_producer),
209
+ run_polling_thread: true,
210
+ opaque: opaque
211
+ ),
212
+ partitioner_name
213
+ ).tap do |producer|
186
214
  opaque.producer = producer
187
215
  end
188
216
  end
189
217
 
190
- # Create an admin instance with this configuration.
218
+ # Creates an admin instance with this configuration.
219
+ #
220
+ # @return [Admin] The created admin instance
191
221
  #
192
222
  # @raise [ConfigError] When the configuration contains invalid options
193
223
  # @raise [ClientCreationError] When the native client cannot be created
194
- #
195
- # @return [Admin] The created admin instance
196
224
  def admin
197
225
  opaque = Opaque.new
198
226
  config = native_config(opaque)
199
227
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
200
- Rdkafka::Admin.new(Rdkafka::NativeKafka.new(native_kafka(config, :rd_kafka_producer), run_polling_thread: true))
228
+ Rdkafka::Admin.new(
229
+ Rdkafka::NativeKafka.new(
230
+ native_kafka(config, :rd_kafka_producer),
231
+ run_polling_thread: true,
232
+ opaque: opaque
233
+ )
234
+ )
201
235
  end
202
236
 
203
237
  # Error that is returned by the underlying rdkafka error if an invalid configuration option is present.
@@ -18,13 +18,11 @@ module Rdkafka
18
18
 
19
19
  # Reads a librdkafka native message's headers and returns them as a Ruby Hash
20
20
  #
21
- # @param [librdkakfa message] native_message
21
+ # @private
22
22
  #
23
+ # @param [librdkakfa message] native_message
23
24
  # @return [Hash<String, String>] headers Hash for the native_message
24
- #
25
25
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
26
- #
27
- # @private
28
26
  def self.from_native(native_message)
29
27
  headers_ptrptr = FFI::MemoryPointer.new(:pointer)
30
28
  err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)