karafka-rdkafka 0.13.8 → 0.13.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.gitignore +4 -0
  4. data/.rspec +1 -0
  5. data/.ruby-gemset +1 -0
  6. data/.ruby-version +1 -0
  7. data/CHANGELOG.md +41 -32
  8. data/{LICENSE → MIT-LICENSE} +2 -1
  9. data/README.md +11 -11
  10. data/dist/librdkafka_2.2.0.tar.gz +0 -0
  11. data/ext/README.md +1 -1
  12. data/ext/Rakefile +53 -26
  13. data/lib/rdkafka/abstract_handle.rb +37 -24
  14. data/lib/rdkafka/admin.rb +6 -7
  15. data/lib/rdkafka/bindings.rb +0 -4
  16. data/lib/rdkafka/config.rb +30 -15
  17. data/lib/rdkafka/consumer/headers.rb +2 -4
  18. data/lib/rdkafka/consumer.rb +50 -53
  19. data/lib/rdkafka/helpers/time.rb +14 -0
  20. data/lib/rdkafka/producer.rb +8 -15
  21. data/lib/rdkafka/version.rb +1 -1
  22. data/lib/rdkafka.rb +10 -1
  23. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  24. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
  25. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  26. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
  27. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  28. data/spec/rdkafka/admin_spec.rb +0 -1
  29. data/spec/rdkafka/bindings_spec.rb +0 -1
  30. data/spec/rdkafka/callbacks_spec.rb +0 -2
  31. data/spec/rdkafka/config_spec.rb +8 -2
  32. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  33. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  34. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  35. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
  36. data/spec/rdkafka/consumer_spec.rb +47 -1
  37. data/spec/rdkafka/error_spec.rb +0 -2
  38. data/spec/rdkafka/metadata_spec.rb +0 -1
  39. data/spec/rdkafka/native_kafka_spec.rb +0 -2
  40. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
  41. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  42. data/spec/rdkafka/producer_spec.rb +0 -1
  43. data.tar.gz.sig +3 -2
  44. metadata +8 -4
  45. metadata.gz.sig +0 -0
@@ -1,11 +1,9 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "logger"
4
-
5
3
  module Rdkafka
6
4
  # Configuration for a Kafka consumer or producer. You can create an instance and use
7
5
  # the consumer and producer methods to create a client. Documentation of the available
8
- # configuration options is available on https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md.
6
+ # configuration options is available on https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md.
9
7
  class Config
10
8
  # @private
11
9
  @@logger = Logger.new(STDOUT)
@@ -53,13 +51,13 @@ module Rdkafka
53
51
 
54
52
  # Set a callback that will be called every time the underlying client emits statistics.
55
53
  # You can configure if and how often this happens using `statistics.interval.ms`.
56
- # The callback is called with a hash that's documented here: https://github.com/edenhill/librdkafka/blob/master/STATISTICS.md
54
+ # The callback is called with a hash that's documented here: https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
57
55
  #
58
56
  # @param callback [Proc, #call] The callback
59
57
  #
60
58
  # @return [nil]
61
59
  def self.statistics_callback=(callback)
62
- raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
60
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
63
61
  @@statistics_callback = callback
64
62
  end
65
63
 
@@ -114,6 +112,7 @@ module Rdkafka
114
112
  def initialize(config_hash = {})
115
113
  @config_hash = DEFAULT_CONFIG.merge(config_hash)
116
114
  @consumer_rebalance_listener = nil
115
+ @consumer_poll_set = true
117
116
  end
118
117
 
119
118
  # Set a config option.
@@ -142,12 +141,28 @@ module Rdkafka
142
141
  @consumer_rebalance_listener = listener
143
142
  end
144
143
 
145
- # Create a consumer with this configuration.
144
+ # Should we use a single queue for the underlying consumer and events.
146
145
  #
147
- # @raise [ConfigError] When the configuration contains invalid options
148
- # @raise [ClientCreationError] When the native client cannot be created
146
+ # This is an advanced API that allows for more granular control of the polling process.
147
+ # When this value is set to `false` (`true` by defualt), there will be two queues that need to
148
+ # be polled:
149
+ # - main librdkafka queue for events
150
+ # - consumer queue with messages and rebalances
151
+ #
152
+ # It is recommended to use the defaults and only set it to `false` in advance multi-threaded
153
+ # and complex cases where granular events handling control is needed.
154
+ #
155
+ # @param poll_set [Boolean]
156
+ def consumer_poll_set=(poll_set)
157
+ @consumer_poll_set = poll_set
158
+ end
159
+
160
+ # Creates a consumer with this configuration.
149
161
  #
150
162
  # @return [Consumer] The created consumer
163
+ #
164
+ # @raise [ConfigError] When the configuration contains invalid options
165
+ # @raise [ClientCreationError] When the native client cannot be created
151
166
  def consumer
152
167
  opaque = Opaque.new
153
168
  config = native_config(opaque)
@@ -160,8 +175,8 @@ module Rdkafka
160
175
  # Create native client
161
176
  kafka = native_kafka(config, :rd_kafka_consumer)
162
177
 
163
- # Redirect the main queue to the consumer
164
- Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka)
178
+ # Redirect the main queue to the consumer queue
179
+ Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka) if @consumer_poll_set
165
180
 
166
181
  # Return consumer with Kafka client
167
182
  Rdkafka::Consumer.new(
@@ -175,10 +190,10 @@ module Rdkafka
175
190
 
176
191
  # Create a producer with this configuration.
177
192
  #
193
+ # @return [Producer] The created producer
194
+ #
178
195
  # @raise [ConfigError] When the configuration contains invalid options
179
196
  # @raise [ClientCreationError] When the native client cannot be created
180
- #
181
- # @return [Producer] The created producer
182
197
  def producer
183
198
  # Create opaque
184
199
  opaque = Opaque.new
@@ -200,12 +215,12 @@ module Rdkafka
200
215
  end
201
216
  end
202
217
 
203
- # Create an admin instance with this configuration.
218
+ # Creates an admin instance with this configuration.
219
+ #
220
+ # @return [Admin] The created admin instance
204
221
  #
205
222
  # @raise [ConfigError] When the configuration contains invalid options
206
223
  # @raise [ClientCreationError] When the native client cannot be created
207
- #
208
- # @return [Admin] The created admin instance
209
224
  def admin
210
225
  opaque = Opaque.new
211
226
  config = native_config(opaque)
@@ -18,13 +18,11 @@ module Rdkafka
18
18
 
19
19
  # Reads a librdkafka native message's headers and returns them as a Ruby Hash
20
20
  #
21
- # @param [librdkakfa message] native_message
21
+ # @private
22
22
  #
23
+ # @param [librdkakfa message] native_message
23
24
  # @return [Hash<String, String>] headers Hash for the native_message
24
- #
25
25
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
26
- #
27
- # @private
28
26
  def self.from_native(native_message)
29
27
  headers_ptrptr = FFI::MemoryPointer.new(:pointer)
30
28
  err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)
@@ -12,6 +12,7 @@ module Rdkafka
12
12
  # `each_slice` to consume batches of messages.
13
13
  class Consumer
14
14
  include Enumerable
15
+ include Helpers::Time
15
16
 
16
17
  # @private
17
18
  def initialize(native_kafka)
@@ -54,13 +55,11 @@ module Rdkafka
54
55
  @native_kafka.closed?
55
56
  end
56
57
 
57
- # Subscribe to one or more topics letting Kafka handle partition assignments.
58
+ # Subscribes to one or more topics letting Kafka handle partition assignments.
58
59
  #
59
60
  # @param topics [Array<String>] One or more topic names
60
- #
61
- # @raise [RdkafkaError] When subscribing fails
62
- #
63
61
  # @return [nil]
62
+ # @raise [RdkafkaError] When subscribing fails
64
63
  def subscribe(*topics)
65
64
  closed_consumer_check(__method__)
66
65
 
@@ -83,9 +82,8 @@ module Rdkafka
83
82
 
84
83
  # Unsubscribe from all subscribed topics.
85
84
  #
86
- # @raise [RdkafkaError] When unsubscribing fails
87
- #
88
85
  # @return [nil]
86
+ # @raise [RdkafkaError] When unsubscribing fails
89
87
  def unsubscribe
90
88
  closed_consumer_check(__method__)
91
89
 
@@ -101,10 +99,8 @@ module Rdkafka
101
99
  # Pause producing or consumption for the provided list of partitions
102
100
  #
103
101
  # @param list [TopicPartitionList] The topic with partitions to pause
104
- #
105
- # @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
106
- #
107
102
  # @return [nil]
103
+ # @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
108
104
  def pause(list)
109
105
  closed_consumer_check(__method__)
110
106
 
@@ -128,13 +124,11 @@ module Rdkafka
128
124
  end
129
125
  end
130
126
 
131
- # Resume producing consumption for the provided list of partitions
127
+ # Resumes producing consumption for the provided list of partitions
132
128
  #
133
129
  # @param list [TopicPartitionList] The topic with partitions to pause
134
- #
135
- # @raise [RdkafkaError] When resume subscription fails.
136
- #
137
130
  # @return [nil]
131
+ # @raise [RdkafkaError] When resume subscription fails.
138
132
  def resume(list)
139
133
  closed_consumer_check(__method__)
140
134
 
@@ -157,11 +151,10 @@ module Rdkafka
157
151
  end
158
152
  end
159
153
 
160
- # Return the current subscription to topics and partitions
161
- #
162
- # @raise [RdkafkaError] When getting the subscription fails.
154
+ # Returns the current subscription to topics and partitions
163
155
  #
164
156
  # @return [TopicPartitionList]
157
+ # @raise [RdkafkaError] When getting the subscription fails.
165
158
  def subscription
166
159
  closed_consumer_check(__method__)
167
160
 
@@ -184,7 +177,6 @@ module Rdkafka
184
177
  # Atomic assignment of partitions to consume
185
178
  #
186
179
  # @param list [TopicPartitionList] The topic with partitions to assign
187
- #
188
180
  # @raise [RdkafkaError] When assigning fails
189
181
  def assign(list)
190
182
  closed_consumer_check(__method__)
@@ -208,9 +200,8 @@ module Rdkafka
208
200
 
209
201
  # Returns the current partition assignment.
210
202
  #
211
- # @raise [RdkafkaError] When getting the assignment fails.
212
- #
213
203
  # @return [TopicPartitionList]
204
+ # @raise [RdkafkaError] When getting the assignment fails.
214
205
  def assignment
215
206
  closed_consumer_check(__method__)
216
207
 
@@ -244,14 +235,14 @@ module Rdkafka
244
235
  end
245
236
 
246
237
  # Return the current committed offset per partition for this consumer group.
247
- # The offset field of each requested partition will either be set to stored offset or to -1001 in case there was no stored offset for that partition.
238
+ # The offset field of each requested partition will either be set to stored offset or to -1001
239
+ # in case there was no stored offset for that partition.
248
240
  #
249
- # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil to use the current subscription.
241
+ # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil
242
+ # to use the current subscription.
250
243
  # @param timeout_ms [Integer] The timeout for fetching this information.
251
- #
252
- # @raise [RdkafkaError] When getting the committed positions fails.
253
- #
254
244
  # @return [TopicPartitionList]
245
+ # @raise [RdkafkaError] When getting the committed positions fails.
255
246
  def committed(list=nil, timeout_ms=1200)
256
247
  closed_consumer_check(__method__)
257
248
 
@@ -307,10 +298,8 @@ module Rdkafka
307
298
  # @param topic [String] The topic to query
308
299
  # @param partition [Integer] The partition to query
309
300
  # @param timeout_ms [Integer] The timeout for querying the broker
310
- #
311
- # @raise [RdkafkaError] When querying the broker fails.
312
- #
313
301
  # @return [Integer] The low and high watermark
302
+ # @raise [RdkafkaError] When querying the broker fails.
314
303
  def query_watermark_offsets(topic, partition, timeout_ms=200)
315
304
  closed_consumer_check(__method__)
316
305
 
@@ -343,10 +332,9 @@ module Rdkafka
343
332
  #
344
333
  # @param topic_partition_list [TopicPartitionList] The list to calculate lag for.
345
334
  # @param watermark_timeout_ms [Integer] The timeout for each query watermark call.
346
- #
335
+ # @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag
336
+ # per partition
347
337
  # @raise [RdkafkaError] When querying the broker fails.
348
- #
349
- # @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag per partition
350
338
  def lag(topic_partition_list, watermark_timeout_ms=100)
351
339
  out = {}
352
340
 
@@ -395,10 +383,8 @@ module Rdkafka
395
383
  # When using this `enable.auto.offset.store` should be set to `false` in the config.
396
384
  #
397
385
  # @param message [Rdkafka::Consumer::Message] The message which offset will be stored
398
- #
399
- # @raise [RdkafkaError] When storing the offset fails
400
- #
401
386
  # @return [nil]
387
+ # @raise [RdkafkaError] When storing the offset fails
402
388
  def store_offset(message)
403
389
  closed_consumer_check(__method__)
404
390
 
@@ -430,10 +416,8 @@ module Rdkafka
430
416
  # message at the given offset.
431
417
  #
432
418
  # @param message [Rdkafka::Consumer::Message] The message to which to seek
433
- #
434
- # @raise [RdkafkaError] When seeking fails
435
- #
436
419
  # @return [nil]
420
+ # @raise [RdkafkaError] When seeking fails
437
421
  def seek(message)
438
422
  closed_consumer_check(__method__)
439
423
 
@@ -503,10 +487,8 @@ module Rdkafka
503
487
  #
504
488
  # @param list [TopicPartitionList,nil] The topic with partitions to commit
505
489
  # @param async [Boolean] Whether to commit async or wait for the commit to finish
506
- #
507
- # @raise [RdkafkaError] When committing fails
508
- #
509
490
  # @return [nil]
491
+ # @raise [RdkafkaError] When committing fails
510
492
  def commit(list=nil, async=false)
511
493
  closed_consumer_check(__method__)
512
494
 
@@ -532,10 +514,8 @@ module Rdkafka
532
514
  # Poll for the next message on one of the subscribed topics
533
515
  #
534
516
  # @param timeout_ms [Integer] Timeout of this poll
535
- #
536
- # @raise [RdkafkaError] When polling fails
537
- #
538
517
  # @return [Message, nil] A message or nil if there was no new message within the timeout
518
+ # @raise [RdkafkaError] When polling fails
539
519
  def poll(timeout_ms)
540
520
  closed_consumer_check(__method__)
541
521
 
@@ -561,17 +541,40 @@ module Rdkafka
561
541
  end
562
542
  end
563
543
 
544
+ # Polls the main rdkafka queue (not the consumer one). Do **NOT** use it if `consumer_poll_set`
545
+ # was set to `true`.
546
+ #
547
+ # Events will cause application-provided callbacks to be called.
548
+ #
549
+ # Events (in the context of the consumer):
550
+ # - error callbacks
551
+ # - stats callbacks
552
+ # - any other callbacks supported by librdkafka that are not part of the consumer_poll, that
553
+ # would have a callback configured and activated.
554
+ #
555
+ # This method needs to be called at regular intervals to serve any queued callbacks waiting to
556
+ # be called. When in use, does **NOT** replace `#poll` but needs to run complementary with it.
557
+ #
558
+ # @param timeout_ms [Integer] poll timeout. If set to 0 will run async, when set to -1 will
559
+ # block until any events available.
560
+ #
561
+ # @note This method technically should be called `#poll` and the current `#poll` should be
562
+ # called `#consumer_poll` though we keep the current naming convention to make it backward
563
+ # compatible.
564
+ def events_poll(timeout_ms = 0)
565
+ @native_kafka.with_inner do |inner|
566
+ Rdkafka::Bindings.rd_kafka_poll(inner, timeout_ms)
567
+ end
568
+ end
569
+
564
570
  # Poll for new messages and yield for each received one. Iteration
565
571
  # will end when the consumer is closed.
566
572
  #
567
- # If `enable.partition.eof` is turned on in the config this will raise an
568
- # error when an eof is reached, so you probably want to disable that when
569
- # using this method of iteration.
573
+ # If `enable.partition.eof` is turned on in the config this will raise an error when an eof is
574
+ # reached, so you probably want to disable that when using this method of iteration.
570
575
  #
571
576
  # @raise [RdkafkaError] When polling fails
572
- #
573
577
  # @yieldparam message [Message] Received message
574
- #
575
578
  # @return [nil]
576
579
  def each
577
580
  loop do
@@ -624,9 +627,7 @@ module Rdkafka
624
627
  # that you may or may not see again.
625
628
  #
626
629
  # @param max_items [Integer] Maximum size of the yielded array of messages
627
- #
628
630
  # @param bytes_threshold [Integer] Threshold number of total message bytes in the yielded array of messages
629
- #
630
631
  # @param timeout_ms [Integer] max time to wait for up to max_items
631
632
  #
632
633
  # @raise [RdkafkaError] When polling fails
@@ -673,10 +674,6 @@ module Rdkafka
673
674
  end
674
675
 
675
676
  private
676
- def monotonic_now
677
- # needed because Time.now can go backwards
678
- Process.clock_gettime(Process::CLOCK_MONOTONIC)
679
- end
680
677
 
681
678
  def closed_consumer_check(method)
682
679
  raise Rdkafka::ClosedConsumerError.new(method) if closed?
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ # Namespace for some small utilities used in multiple components
5
+ module Helpers
6
+ # Time related methods used across Karafka
7
+ module Time
8
+ # @return [Float] current monotonic time in seconds with microsecond precision
9
+ def monotonic_now
10
+ ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
11
+ end
12
+ end
13
+ end
14
+ end
@@ -1,10 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "objspace"
4
-
5
3
  module Rdkafka
6
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
7
5
  class Producer
6
+ include Helpers::Time
7
+
8
8
  # Cache partitions count for 30 seconds
9
9
  PARTITIONS_COUNT_TTL = 30
10
10
 
@@ -167,18 +167,16 @@ module Rdkafka
167
167
  end
168
168
 
169
169
  # Partition count for a given topic.
170
- # NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
171
170
  #
172
171
  # @param topic [String] The topic name.
172
+ # @return [Integer] partition count for a given topic
173
173
  #
174
- # @return partition count [Integer,nil]
175
- #
176
- # We cache the partition count for a given topic for given time
177
- # This prevents us in case someone uses `partition_key` from querying for the count with
178
- # each message. Instead we query once every 30 seconds at most
174
+ # @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
175
+ # auto-created after returning nil.
179
176
  #
180
- # @param topic [String] topic name
181
- # @return [Integer] partition count for a given topic
177
+ # @note We cache the partition count for a given topic for given time.
178
+ # This prevents us in case someone uses `partition_key` from querying for the count with
179
+ # each message. Instead we query once every 30 seconds at most
182
180
  def partition_count(topic)
183
181
  closed_producer_check(__method__)
184
182
 
@@ -308,11 +306,6 @@ module Rdkafka
308
306
 
309
307
  private
310
308
 
311
- def monotonic_now
312
- # needed because Time.now can go backwards
313
- Process.clock_gettime(Process::CLOCK_MONOTONIC)
314
- end
315
-
316
309
  def closed_producer_check(method)
317
310
  raise Rdkafka::ClosedProducerError.new(method) if closed?
318
311
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.13.8"
4
+ VERSION = "0.13.10"
5
5
  LIBRDKAFKA_VERSION = "2.2.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "af9a820cbecbc64115629471df7c7cecd40403b6c34bfdbb9223152677a47226"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -1,7 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "rdkafka/version"
3
+ require "logger"
4
+ require "objspace"
5
+ require "ffi"
6
+ require "json"
4
7
 
8
+ require "rdkafka/version"
9
+ require "rdkafka/helpers/time"
5
10
  require "rdkafka/abstract_handle"
6
11
  require "rdkafka/admin"
7
12
  require "rdkafka/admin/create_topic_handle"
@@ -24,3 +29,7 @@ require "rdkafka/native_kafka"
24
29
  require "rdkafka/producer"
25
30
  require "rdkafka/producer/delivery_handle"
26
31
  require "rdkafka/producer/delivery_report"
32
+
33
+ # Main Rdkafka namespace of this gem
34
+ module Rdkafka
35
+ end
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::AbstractHandle do
6
4
  let(:response) { 0 }
7
5
  let(:result) { -1 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicReport do
6
4
  subject { Rdkafka::Admin::CreateTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicReport do
6
4
  subject { Rdkafka::Admin::DeleteTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
 
6
5
  describe Rdkafka::Admin do
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require 'zlib'
5
4
 
6
5
  describe Rdkafka::Bindings do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Callbacks do
6
4
 
7
5
  # The code in the call back functions is 100% covered by other specs. Due to
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Config do
6
4
  context "logger" do
7
5
  it "should have a default logger" do
@@ -115,6 +113,14 @@ describe Rdkafka::Config do
115
113
  consumer.close
116
114
  end
117
115
 
116
+ it "should create a consumer with consumer_poll_set set to false" do
117
+ config = rdkafka_consumer_config
118
+ config.consumer_poll_set = false
119
+ consumer = config.consumer
120
+ expect(consumer).to be_a Rdkafka::Consumer
121
+ consumer.close
122
+ end
123
+
118
124
  it "should raise an error when creating a consumer with invalid config" do
119
125
  config = Rdkafka::Config.new('invalid.key' => 'value')
120
126
  expect {
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Headers do
6
4
  let(:headers) do
7
5
  { # Note String keys!
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Message do
6
4
  let(:native_client) { new_native_client }
7
5
  let(:native_topic) { new_native_topic(native_client: native_client) }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Partition do
6
4
  let(:offset) { 100 }
7
5
  let(:err) { 0 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::TopicPartitionList do
6
4
  it "should create a new list and add unassigned topics" do
7
5
  list = Rdkafka::Consumer::TopicPartitionList.new
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
  require 'securerandom'
6
5
 
@@ -55,6 +54,30 @@ describe Rdkafka::Consumer do
55
54
  consumer.subscription
56
55
  }.to raise_error(Rdkafka::RdkafkaError)
57
56
  end
57
+
58
+ context "when using consumer without the poll set" do
59
+ let(:consumer) do
60
+ config = rdkafka_consumer_config
61
+ config.consumer_poll_set = false
62
+ config.consumer
63
+ end
64
+
65
+ it "should subscribe, unsubscribe and return the subscription" do
66
+ expect(consumer.subscription).to be_empty
67
+
68
+ consumer.subscribe("consume_test_topic")
69
+
70
+ expect(consumer.subscription).not_to be_empty
71
+ expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
72
+ list.add_topic("consume_test_topic")
73
+ end
74
+ expect(consumer.subscription).to eq expected_subscription
75
+
76
+ consumer.unsubscribe
77
+
78
+ expect(consumer.subscription).to be_empty
79
+ end
80
+ end
58
81
  end
59
82
 
60
83
  describe "#pause and #resume" do
@@ -1077,6 +1100,29 @@ describe Rdkafka::Consumer do
1077
1100
  end
1078
1101
  end
1079
1102
 
1103
+ # Only relevant in case of a consumer with separate queues
1104
+ describe '#events_poll' do
1105
+ let(:stats) { [] }
1106
+
1107
+ before { Rdkafka::Config.statistics_callback = ->(published) { stats << published } }
1108
+
1109
+ after { Rdkafka::Config.statistics_callback = nil }
1110
+
1111
+ let(:consumer) do
1112
+ config = rdkafka_consumer_config('statistics.interval.ms': 100)
1113
+ config.consumer_poll_set = false
1114
+ config.consumer
1115
+ end
1116
+
1117
+ it "expect to run events_poll, operate and propagate stats on events_poll and not poll" do
1118
+ consumer.subscribe("consume_test_topic")
1119
+ consumer.poll(1_000)
1120
+ expect(stats).to be_empty
1121
+ consumer.events_poll(-1)
1122
+ expect(stats).not_to be_empty
1123
+ end
1124
+ end
1125
+
1080
1126
  describe "a rebalance listener" do
1081
1127
  let(:consumer) do
1082
1128
  config = rdkafka_consumer_config
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::RdkafkaError do
6
4
  it "should raise a type error for a nil response" do
7
5
  expect {