karafka-rdkafka 0.13.7 → 0.13.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.gitignore +4 -0
  4. data/.rspec +1 -0
  5. data/.ruby-gemset +1 -0
  6. data/.ruby-version +1 -0
  7. data/CHANGELOG.md +40 -31
  8. data/{LICENSE → MIT-LICENSE} +2 -1
  9. data/README.md +11 -11
  10. data/ext/README.md +1 -1
  11. data/ext/Rakefile +1 -1
  12. data/lib/rdkafka/abstract_handle.rb +37 -24
  13. data/lib/rdkafka/admin.rb +6 -7
  14. data/lib/rdkafka/bindings.rb +1 -4
  15. data/lib/rdkafka/config.rb +30 -15
  16. data/lib/rdkafka/consumer/headers.rb +2 -4
  17. data/lib/rdkafka/consumer.rb +83 -53
  18. data/lib/rdkafka/helpers/time.rb +14 -0
  19. data/lib/rdkafka/producer.rb +8 -15
  20. data/lib/rdkafka/version.rb +1 -1
  21. data/lib/rdkafka.rb +10 -1
  22. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  23. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
  24. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  25. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
  26. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  27. data/spec/rdkafka/admin_spec.rb +0 -1
  28. data/spec/rdkafka/bindings_spec.rb +0 -1
  29. data/spec/rdkafka/callbacks_spec.rb +0 -2
  30. data/spec/rdkafka/config_spec.rb +8 -2
  31. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  32. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  33. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  34. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
  35. data/spec/rdkafka/consumer_spec.rb +122 -38
  36. data/spec/rdkafka/error_spec.rb +0 -2
  37. data/spec/rdkafka/metadata_spec.rb +0 -1
  38. data/spec/rdkafka/native_kafka_spec.rb +0 -2
  39. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
  40. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  41. data/spec/rdkafka/producer_spec.rb +0 -1
  42. data/spec/spec_helper.rb +1 -1
  43. data.tar.gz.sig +0 -0
  44. metadata +7 -4
  45. metadata.gz.sig +0 -0
@@ -12,6 +12,7 @@ module Rdkafka
12
12
  # `each_slice` to consume batches of messages.
13
13
  class Consumer
14
14
  include Enumerable
15
+ include Helpers::Time
15
16
 
16
17
  # @private
17
18
  def initialize(native_kafka)
@@ -29,6 +30,13 @@ module Rdkafka
29
30
  ->(_) { close }
30
31
  end
31
32
 
33
+ # @return [String] consumer name
34
+ def name
35
+ @name ||= @native_kafka.with_inner do |inner|
36
+ ::Rdkafka::Bindings.rd_kafka_name(inner)
37
+ end
38
+ end
39
+
32
40
  # Close this consumer
33
41
  # @return [nil]
34
42
  def close
@@ -47,13 +55,11 @@ module Rdkafka
47
55
  @native_kafka.closed?
48
56
  end
49
57
 
50
- # Subscribe to one or more topics letting Kafka handle partition assignments.
58
+ # Subscribes to one or more topics letting Kafka handle partition assignments.
51
59
  #
52
60
  # @param topics [Array<String>] One or more topic names
53
- #
54
- # @raise [RdkafkaError] When subscribing fails
55
- #
56
61
  # @return [nil]
62
+ # @raise [RdkafkaError] When subscribing fails
57
63
  def subscribe(*topics)
58
64
  closed_consumer_check(__method__)
59
65
 
@@ -76,9 +82,8 @@ module Rdkafka
76
82
 
77
83
  # Unsubscribe from all subscribed topics.
78
84
  #
79
- # @raise [RdkafkaError] When unsubscribing fails
80
- #
81
85
  # @return [nil]
86
+ # @raise [RdkafkaError] When unsubscribing fails
82
87
  def unsubscribe
83
88
  closed_consumer_check(__method__)
84
89
 
@@ -94,10 +99,8 @@ module Rdkafka
94
99
  # Pause producing or consumption for the provided list of partitions
95
100
  #
96
101
  # @param list [TopicPartitionList] The topic with partitions to pause
97
- #
98
- # @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
99
- #
100
102
  # @return [nil]
103
+ # @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
101
104
  def pause(list)
102
105
  closed_consumer_check(__method__)
103
106
 
@@ -121,13 +124,11 @@ module Rdkafka
121
124
  end
122
125
  end
123
126
 
124
- # Resume producing consumption for the provided list of partitions
127
+ # Resumes producing consumption for the provided list of partitions
125
128
  #
126
129
  # @param list [TopicPartitionList] The topic with partitions to pause
127
- #
128
- # @raise [RdkafkaError] When resume subscription fails.
129
- #
130
130
  # @return [nil]
131
+ # @raise [RdkafkaError] When resume subscription fails.
131
132
  def resume(list)
132
133
  closed_consumer_check(__method__)
133
134
 
@@ -150,11 +151,10 @@ module Rdkafka
150
151
  end
151
152
  end
152
153
 
153
- # Return the current subscription to topics and partitions
154
- #
155
- # @raise [RdkafkaError] When getting the subscription fails.
154
+ # Returns the current subscription to topics and partitions
156
155
  #
157
156
  # @return [TopicPartitionList]
157
+ # @raise [RdkafkaError] When getting the subscription fails.
158
158
  def subscription
159
159
  closed_consumer_check(__method__)
160
160
 
@@ -177,7 +177,6 @@ module Rdkafka
177
177
  # Atomic assignment of partitions to consume
178
178
  #
179
179
  # @param list [TopicPartitionList] The topic with partitions to assign
180
- #
181
180
  # @raise [RdkafkaError] When assigning fails
182
181
  def assign(list)
183
182
  closed_consumer_check(__method__)
@@ -201,9 +200,8 @@ module Rdkafka
201
200
 
202
201
  # Returns the current partition assignment.
203
202
  #
204
- # @raise [RdkafkaError] When getting the assignment fails.
205
- #
206
203
  # @return [TopicPartitionList]
204
+ # @raise [RdkafkaError] When getting the assignment fails.
207
205
  def assignment
208
206
  closed_consumer_check(__method__)
209
207
 
@@ -237,14 +235,14 @@ module Rdkafka
237
235
  end
238
236
 
239
237
  # Return the current committed offset per partition for this consumer group.
240
- # The offset field of each requested partition will either be set to stored offset or to -1001 in case there was no stored offset for that partition.
238
+ # The offset field of each requested partition will either be set to stored offset or to -1001
239
+ # in case there was no stored offset for that partition.
241
240
  #
242
- # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil to use the current subscription.
241
+ # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil
242
+ # to use the current subscription.
243
243
  # @param timeout_ms [Integer] The timeout for fetching this information.
244
- #
245
- # @raise [RdkafkaError] When getting the committed positions fails.
246
- #
247
244
  # @return [TopicPartitionList]
245
+ # @raise [RdkafkaError] When getting the committed positions fails.
248
246
  def committed(list=nil, timeout_ms=1200)
249
247
  closed_consumer_check(__method__)
250
248
 
@@ -269,15 +267,39 @@ module Rdkafka
269
267
  end
270
268
  end
271
269
 
270
+ # Return the current positions (offsets) for topics and partitions.
271
+ # The offset field of each requested partition will be set to the offset of the last consumed message + 1, or nil in case there was no previous message.
272
+ #
273
+ # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil to use the current subscription.
274
+ #
275
+ # @raise [RdkafkaError] When getting the positions fails.
276
+ #
277
+ # @return [TopicPartitionList]
278
+ def position(list=nil)
279
+ if list.nil?
280
+ list = assignment
281
+ elsif !list.is_a?(TopicPartitionList)
282
+ raise TypeError.new("list has to be nil or a TopicPartitionList")
283
+ end
284
+
285
+ tpl = list.to_native_tpl
286
+
287
+ response = @native_kafka.with_inner do |inner|
288
+ Rdkafka::Bindings.rd_kafka_position(inner, tpl)
289
+ end
290
+
291
+ Rdkafka::RdkafkaError.validate!(response)
292
+
293
+ TopicPartitionList.from_native_tpl(tpl)
294
+ end
295
+
272
296
  # Query broker for low (oldest/beginning) and high (newest/end) offsets for a partition.
273
297
  #
274
298
  # @param topic [String] The topic to query
275
299
  # @param partition [Integer] The partition to query
276
300
  # @param timeout_ms [Integer] The timeout for querying the broker
277
- #
278
- # @raise [RdkafkaError] When querying the broker fails.
279
- #
280
301
  # @return [Integer] The low and high watermark
302
+ # @raise [RdkafkaError] When querying the broker fails.
281
303
  def query_watermark_offsets(topic, partition, timeout_ms=200)
282
304
  closed_consumer_check(__method__)
283
305
 
@@ -310,10 +332,9 @@ module Rdkafka
310
332
  #
311
333
  # @param topic_partition_list [TopicPartitionList] The list to calculate lag for.
312
334
  # @param watermark_timeout_ms [Integer] The timeout for each query watermark call.
313
- #
335
+ # @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag
336
+ # per partition
314
337
  # @raise [RdkafkaError] When querying the broker fails.
315
- #
316
- # @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag per partition
317
338
  def lag(topic_partition_list, watermark_timeout_ms=100)
318
339
  out = {}
319
340
 
@@ -362,10 +383,8 @@ module Rdkafka
362
383
  # When using this `enable.auto.offset.store` should be set to `false` in the config.
363
384
  #
364
385
  # @param message [Rdkafka::Consumer::Message] The message which offset will be stored
365
- #
366
- # @raise [RdkafkaError] When storing the offset fails
367
- #
368
386
  # @return [nil]
387
+ # @raise [RdkafkaError] When storing the offset fails
369
388
  def store_offset(message)
370
389
  closed_consumer_check(__method__)
371
390
 
@@ -397,10 +416,8 @@ module Rdkafka
397
416
  # message at the given offset.
398
417
  #
399
418
  # @param message [Rdkafka::Consumer::Message] The message to which to seek
400
- #
401
- # @raise [RdkafkaError] When seeking fails
402
- #
403
419
  # @return [nil]
420
+ # @raise [RdkafkaError] When seeking fails
404
421
  def seek(message)
405
422
  closed_consumer_check(__method__)
406
423
 
@@ -470,10 +487,8 @@ module Rdkafka
470
487
  #
471
488
  # @param list [TopicPartitionList,nil] The topic with partitions to commit
472
489
  # @param async [Boolean] Whether to commit async or wait for the commit to finish
473
- #
474
- # @raise [RdkafkaError] When committing fails
475
- #
476
490
  # @return [nil]
491
+ # @raise [RdkafkaError] When committing fails
477
492
  def commit(list=nil, async=false)
478
493
  closed_consumer_check(__method__)
479
494
 
@@ -499,10 +514,8 @@ module Rdkafka
499
514
  # Poll for the next message on one of the subscribed topics
500
515
  #
501
516
  # @param timeout_ms [Integer] Timeout of this poll
502
- #
503
- # @raise [RdkafkaError] When polling fails
504
- #
505
517
  # @return [Message, nil] A message or nil if there was no new message within the timeout
518
+ # @raise [RdkafkaError] When polling fails
506
519
  def poll(timeout_ms)
507
520
  closed_consumer_check(__method__)
508
521
 
@@ -528,17 +541,40 @@ module Rdkafka
528
541
  end
529
542
  end
530
543
 
544
+ # Polls the main rdkafka queue (not the consumer one). Do **NOT** use it if `consumer_poll_set`
545
+ # was set to `true`.
546
+ #
547
+ # Events will cause application-provided callbacks to be called.
548
+ #
549
+ # Events (in the context of the consumer):
550
+ # - error callbacks
551
+ # - stats callbacks
552
+ # - any other callbacks supported by librdkafka that are not part of the consumer_poll, that
553
+ # would have a callback configured and activated.
554
+ #
555
+ # This method needs to be called at regular intervals to serve any queued callbacks waiting to
556
+ # be called. When in use, does **NOT** replace `#poll` but needs to run complementary with it.
557
+ #
558
+ # @param timeout_ms [Integer] poll timeout. If set to 0 will run async, when set to -1 will
559
+ # block until any events available.
560
+ #
561
+ # @note This method technically should be called `#poll` and the current `#poll` should be
562
+ # called `#consumer_poll` though we keep the current naming convention to make it backward
563
+ # compatible.
564
+ def events_poll(timeout_ms = 0)
565
+ @native_kafka.with_inner do |inner|
566
+ Rdkafka::Bindings.rd_kafka_poll(inner, timeout_ms)
567
+ end
568
+ end
569
+
531
570
  # Poll for new messages and yield for each received one. Iteration
532
571
  # will end when the consumer is closed.
533
572
  #
534
- # If `enable.partition.eof` is turned on in the config this will raise an
535
- # error when an eof is reached, so you probably want to disable that when
536
- # using this method of iteration.
573
+ # If `enable.partition.eof` is turned on in the config this will raise an error when an eof is
574
+ # reached, so you probably want to disable that when using this method of iteration.
537
575
  #
538
576
  # @raise [RdkafkaError] When polling fails
539
- #
540
577
  # @yieldparam message [Message] Received message
541
- #
542
578
  # @return [nil]
543
579
  def each
544
580
  loop do
@@ -591,9 +627,7 @@ module Rdkafka
591
627
  # that you may or may not see again.
592
628
  #
593
629
  # @param max_items [Integer] Maximum size of the yielded array of messages
594
- #
595
630
  # @param bytes_threshold [Integer] Threshold number of total message bytes in the yielded array of messages
596
- #
597
631
  # @param timeout_ms [Integer] max time to wait for up to max_items
598
632
  #
599
633
  # @raise [RdkafkaError] When polling fails
@@ -640,10 +674,6 @@ module Rdkafka
640
674
  end
641
675
 
642
676
  private
643
- def monotonic_now
644
- # needed because Time.now can go backwards
645
- Process.clock_gettime(Process::CLOCK_MONOTONIC)
646
- end
647
677
 
648
678
  def closed_consumer_check(method)
649
679
  raise Rdkafka::ClosedConsumerError.new(method) if closed?
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ # Namespace for some small utilities used in multiple components
5
+ module Helpers
6
+ # Time related methods used across Karafka
7
+ module Time
8
+ # @return [Float] current monotonic time in seconds with microsecond precision
9
+ def monotonic_now
10
+ ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
11
+ end
12
+ end
13
+ end
14
+ end
@@ -1,10 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "objspace"
4
-
5
3
  module Rdkafka
6
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
7
5
  class Producer
6
+ include Helpers::Time
7
+
8
8
  # Cache partitions count for 30 seconds
9
9
  PARTITIONS_COUNT_TTL = 30
10
10
 
@@ -167,18 +167,16 @@ module Rdkafka
167
167
  end
168
168
 
169
169
  # Partition count for a given topic.
170
- # NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
171
170
  #
172
171
  # @param topic [String] The topic name.
172
+ # @return [Integer] partition count for a given topic
173
173
  #
174
- # @return partition count [Integer,nil]
175
- #
176
- # We cache the partition count for a given topic for given time
177
- # This prevents us in case someone uses `partition_key` from querying for the count with
178
- # each message. Instead we query once every 30 seconds at most
174
+ # @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
175
+ # auto-created after returning nil.
179
176
  #
180
- # @param topic [String] topic name
181
- # @return [Integer] partition count for a given topic
177
+ # @note We cache the partition count for a given topic for given time.
178
+ # This prevents us in case someone uses `partition_key` from querying for the count with
179
+ # each message. Instead we query once every 30 seconds at most
182
180
  def partition_count(topic)
183
181
  closed_producer_check(__method__)
184
182
 
@@ -308,11 +306,6 @@ module Rdkafka
308
306
 
309
307
  private
310
308
 
311
- def monotonic_now
312
- # needed because Time.now can go backwards
313
- Process.clock_gettime(Process::CLOCK_MONOTONIC)
314
- end
315
-
316
309
  def closed_producer_check(method)
317
310
  raise Rdkafka::ClosedProducerError.new(method) if closed?
318
311
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.13.7"
4
+ VERSION = "0.13.9"
5
5
  LIBRDKAFKA_VERSION = "2.2.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "af9a820cbecbc64115629471df7c7cecd40403b6c34bfdbb9223152677a47226"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -1,7 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "rdkafka/version"
3
+ require "logger"
4
+ require "objspace"
5
+ require "ffi"
6
+ require "json"
4
7
 
8
+ require "rdkafka/version"
9
+ require "rdkafka/helpers/time"
5
10
  require "rdkafka/abstract_handle"
6
11
  require "rdkafka/admin"
7
12
  require "rdkafka/admin/create_topic_handle"
@@ -24,3 +29,7 @@ require "rdkafka/native_kafka"
24
29
  require "rdkafka/producer"
25
30
  require "rdkafka/producer/delivery_handle"
26
31
  require "rdkafka/producer/delivery_report"
32
+
33
+ # Main Rdkafka namespace of this gem
34
+ module Rdkafka
35
+ end
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::AbstractHandle do
6
4
  let(:response) { 0 }
7
5
  let(:result) { -1 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicReport do
6
4
  subject { Rdkafka::Admin::CreateTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicReport do
6
4
  subject { Rdkafka::Admin::DeleteTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
 
6
5
  describe Rdkafka::Admin do
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require 'zlib'
5
4
 
6
5
  describe Rdkafka::Bindings do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Callbacks do
6
4
 
7
5
  # The code in the call back functions is 100% covered by other specs. Due to
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Config do
6
4
  context "logger" do
7
5
  it "should have a default logger" do
@@ -115,6 +113,14 @@ describe Rdkafka::Config do
115
113
  consumer.close
116
114
  end
117
115
 
116
+ it "should create a consumer with consumer_poll_set set to false" do
117
+ config = rdkafka_consumer_config
118
+ config.consumer_poll_set = false
119
+ consumer = config.consumer
120
+ expect(consumer).to be_a Rdkafka::Consumer
121
+ consumer.close
122
+ end
123
+
118
124
  it "should raise an error when creating a consumer with invalid config" do
119
125
  config = Rdkafka::Config.new('invalid.key' => 'value')
120
126
  expect {
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Headers do
6
4
  let(:headers) do
7
5
  { # Note String keys!
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Message do
6
4
  let(:native_client) { new_native_client }
7
5
  let(:native_topic) { new_native_topic(native_client: native_client) }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Partition do
6
4
  let(:offset) { 100 }
7
5
  let(:err) { 0 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::TopicPartitionList do
6
4
  it "should create a new list and add unassigned topics" do
7
5
  list = Rdkafka::Consumer::TopicPartitionList.new