karafka-rdkafka 0.13.7 → 0.13.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4fe6b47d334265ef793c32b215dca1d97adfa42a51215b9492f1d36b58a84403
4
- data.tar.gz: 4fc125147d796f981314640779adf5234376b0d0357c0321683002820fcb3301
3
+ metadata.gz: 4082c381a9d131273cc005b61b06f0e4c73b27044213d730fa5c7faeec606e07
4
+ data.tar.gz: 06754bdba16fc3feaf648670e766cdbbc60342b1a43902ef08e42a5acfd4b2ac
5
5
  SHA512:
6
- metadata.gz: f7fc5d7dc99653f117b4bef3baa0d6e772537f4a7df6375a6a33692d4a9728f81308e11c3cd873fbb5686fb83cf2021a5866eb252e10661b3fe512639e3e3349
7
- data.tar.gz: 9d23217c18093187f1fecc5afbbdbe05d2a6be3cb99adb0dc20ed7458da19c7035cc51eda664e09c3acaa5ac3dd3539b0c58c91cd6d6faed3c1d21edc9662dde
6
+ metadata.gz: 89c0b5078f97c31d0e209ebbf13279a2d079aef35f7b4c2a4bf92cac17a69ccfed1769f84d1d06ca734c9a95174b0b5fa4fa9b6933f63e30fe71c8493bda4ed1
7
+ data.tar.gz: 490a160689be7c4e261b2b8ff1ffdbcb5aec53cfe1dd0acabd0da33eff708cb7a1d09afe3c3a6752fc9a6345c005f1f0919604035a97240c5f9ed120870e802c
checksums.yaml.gz.sig CHANGED
Binary file
data/CHANGELOG.md CHANGED
@@ -1,4 +1,7 @@
1
- # 0.13.7 (Unreleased)
1
+ # 0.13.8 (2023-10-31)
2
+ - [Enhancement] Get consumer position (thijsc & mensfeld)
3
+
4
+ # 0.13.7 (2023-10-31)
2
5
  - [Change] Drop support for Ruby 2.6 due to incompatibilities in usage of `ObjectSpace::WeakMap`
3
6
  - [Fix] Fix dangling Opaque references.
4
7
 
@@ -208,6 +208,7 @@ module Rdkafka
208
208
  attach_function :rd_kafka_resume_partitions, [:pointer, :pointer], :int, blocking: true
209
209
  attach_function :rd_kafka_seek, [:pointer, :int32, :int64, :int], :int, blocking: true
210
210
  attach_function :rd_kafka_offsets_for_times, [:pointer, :pointer, :int], :int, blocking: true
211
+ attach_function :rd_kafka_position, [:pointer, :pointer], :int, blocking: true
211
212
 
212
213
  # Headers
213
214
  attach_function :rd_kafka_header_get_all, [:pointer, :size_t, :pointer, :pointer, SizePtr], :int
@@ -29,6 +29,13 @@ module Rdkafka
29
29
  ->(_) { close }
30
30
  end
31
31
 
32
+ # @return [String] consumer name
33
+ def name
34
+ @name ||= @native_kafka.with_inner do |inner|
35
+ ::Rdkafka::Bindings.rd_kafka_name(inner)
36
+ end
37
+ end
38
+
32
39
  # Close this consumer
33
40
  # @return [nil]
34
41
  def close
@@ -269,6 +276,32 @@ module Rdkafka
269
276
  end
270
277
  end
271
278
 
279
+ # Return the current positions (offsets) for topics and partitions.
280
+ # The offset field of each requested partition will be set to the offset of the last consumed message + 1, or nil in case there was no previous message.
281
+ #
282
+ # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil to use the current subscription.
283
+ #
284
+ # @raise [RdkafkaError] When getting the positions fails.
285
+ #
286
+ # @return [TopicPartitionList]
287
+ def position(list=nil)
288
+ if list.nil?
289
+ list = assignment
290
+ elsif !list.is_a?(TopicPartitionList)
291
+ raise TypeError.new("list has to be nil or a TopicPartitionList")
292
+ end
293
+
294
+ tpl = list.to_native_tpl
295
+
296
+ response = @native_kafka.with_inner do |inner|
297
+ Rdkafka::Bindings.rd_kafka_position(inner, tpl)
298
+ end
299
+
300
+ Rdkafka::RdkafkaError.validate!(response)
301
+
302
+ TopicPartitionList.from_native_tpl(tpl)
303
+ end
304
+
272
305
  # Query broker for low (oldest/beginning) and high (newest/end) offsets for a partition.
273
306
  #
274
307
  # @param topic [String] The topic to query
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.13.7"
4
+ VERSION = "0.13.8"
5
5
  LIBRDKAFKA_VERSION = "2.2.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "af9a820cbecbc64115629471df7c7cecd40403b6c34bfdbb9223152677a47226"
7
7
  end
@@ -337,8 +337,9 @@ describe Rdkafka::Consumer do
337
337
  end
338
338
  end
339
339
 
340
- describe "#commit, #committed and #store_offset" do
341
- # Make sure there's a stored offset
340
+
341
+ describe "#position, #commit, #committed and #store_offset" do
342
+ # Make sure there are messages to work with
342
343
  let!(:report) do
343
344
  producer.produce(
344
345
  topic: "consume_test_topic",
@@ -356,29 +357,33 @@ describe Rdkafka::Consumer do
356
357
  )
357
358
  end
358
359
 
359
- it "should only accept a topic partition list in committed" do
360
- expect {
361
- consumer.committed("list")
362
- }.to raise_error TypeError
360
+ describe "#position" do
361
+ it "should only accept a topic partition list in position if not nil" do
362
+ expect {
363
+ consumer.position("list")
364
+ }.to raise_error TypeError
365
+ end
363
366
  end
364
367
 
365
- it "should commit in sync mode" do
366
- expect {
367
- consumer.commit(nil, true)
368
- }.not_to raise_error
369
- end
368
+ describe "#committed" do
369
+ it "should only accept a topic partition list in commit if not nil" do
370
+ expect {
371
+ consumer.commit("list")
372
+ }.to raise_error TypeError
373
+ end
370
374
 
371
- it "should only accept a topic partition list in commit if not nil" do
372
- expect {
373
- consumer.commit("list")
374
- }.to raise_error TypeError
375
+ it "should commit in sync mode" do
376
+ expect {
377
+ consumer.commit(nil, true)
378
+ }.not_to raise_error
379
+ end
375
380
  end
376
381
 
377
382
  context "with a committed consumer" do
378
383
  before :all do
379
384
  # Make sure there are some messages.
380
385
  handles = []
381
- producer = rdkafka_producer_config.producer
386
+ producer = rdkafka_config.producer
382
387
  10.times do
383
388
  (0..2).each do |i|
384
389
  handles << producer.produce(
@@ -422,31 +427,33 @@ describe Rdkafka::Consumer do
422
427
  }.to raise_error(Rdkafka::RdkafkaError)
423
428
  end
424
429
 
425
- it "should fetch the committed offsets for the current assignment" do
426
- partitions = consumer.committed.to_h["consume_test_topic"]
427
- expect(partitions).not_to be_nil
428
- expect(partitions[0].offset).to eq 1
429
- end
430
+ describe "#committed" do
431
+ it "should fetch the committed offsets for the current assignment" do
432
+ partitions = consumer.committed.to_h["consume_test_topic"]
433
+ expect(partitions).not_to be_nil
434
+ expect(partitions[0].offset).to eq 1
435
+ end
430
436
 
431
- it "should fetch the committed offsets for a specified topic partition list" do
432
- list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
433
- list.add_topic("consume_test_topic", [0, 1, 2])
437
+ it "should fetch the committed offsets for a specified topic partition list" do
438
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
439
+ list.add_topic("consume_test_topic", [0, 1, 2])
440
+ end
441
+ partitions = consumer.committed(list).to_h["consume_test_topic"]
442
+ expect(partitions).not_to be_nil
443
+ expect(partitions[0].offset).to eq 1
444
+ expect(partitions[1].offset).to eq 1
445
+ expect(partitions[2].offset).to eq 1
434
446
  end
435
- partitions = consumer.committed(list).to_h["consume_test_topic"]
436
- expect(partitions).not_to be_nil
437
- expect(partitions[0].offset).to eq 1
438
- expect(partitions[1].offset).to eq 1
439
- expect(partitions[2].offset).to eq 1
440
- end
441
447
 
442
- it "should raise an error when getting committed fails" do
443
- expect(Rdkafka::Bindings).to receive(:rd_kafka_committed).and_return(20)
444
- list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
445
- list.add_topic("consume_test_topic", [0, 1, 2])
448
+ it "should raise an error when getting committed fails" do
449
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_committed).and_return(20)
450
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
451
+ list.add_topic("consume_test_topic", [0, 1, 2])
452
+ end
453
+ expect {
454
+ consumer.committed(list)
455
+ }.to raise_error Rdkafka::RdkafkaError
446
456
  end
447
- expect {
448
- consumer.committed(list)
449
- }.to raise_error Rdkafka::RdkafkaError
450
457
  end
451
458
 
452
459
  describe "#store_offset" do
@@ -467,6 +474,8 @@ describe Rdkafka::Consumer do
467
474
  @new_consumer.store_offset(message)
468
475
  @new_consumer.commit
469
476
 
477
+ # TODO use position here, should be at offset
478
+
470
479
  list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
471
480
  list.add_topic("consume_test_topic", [0, 1, 2])
472
481
  end
@@ -481,6 +490,35 @@ describe Rdkafka::Consumer do
481
490
  @new_consumer.store_offset(message)
482
491
  }.to raise_error Rdkafka::RdkafkaError
483
492
  end
493
+
494
+ describe "#position" do
495
+ it "should fetch the positions for the current assignment" do
496
+ consumer.store_offset(message)
497
+
498
+ partitions = consumer.position.to_h["consume_test_topic"]
499
+ expect(partitions).not_to be_nil
500
+ expect(partitions[0].offset).to eq message.offset + 1
501
+ end
502
+
503
+ it "should fetch the positions for a specified assignment" do
504
+ consumer.store_offset(message)
505
+
506
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
507
+ list.add_topic_and_partitions_with_offsets("consume_test_topic", 0 => nil, 1 => nil, 2 => nil)
508
+ end
509
+ partitions = consumer.position(list).to_h["consume_test_topic"]
510
+ expect(partitions).not_to be_nil
511
+ expect(partitions[0].offset).to eq message.offset + 1
512
+ end
513
+
514
+ it "should raise an error when getting the position fails" do
515
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_position).and_return(20)
516
+
517
+ expect {
518
+ consumer.position
519
+ }.to raise_error(Rdkafka::RdkafkaError)
520
+ end
521
+ end
484
522
  end
485
523
  end
486
524
  end
data/spec/spec_helper.rb CHANGED
@@ -36,7 +36,7 @@ def rdkafka_consumer_config(config_overrides={})
36
36
  # Add consumer specific fields to it
37
37
  config[:"auto.offset.reset"] = "earliest"
38
38
  config[:"enable.partition.eof"] = false
39
- config[:"group.id"] = "ruby-test-#{Random.new.rand(0..1_000_000)}"
39
+ config[:"group.id"] = "ruby-test-#{SecureRandom.uuid}"
40
40
  # Enable debug mode if required
41
41
  if ENV["DEBUG_CONSUMER"]
42
42
  config[:debug] = "cgrp,topic,fetch"
data.tar.gz.sig CHANGED
@@ -1,4 +1,2 @@
1
- �>o���S���Q�Vz���'��f�P���o'��&O션���Obb3k����<-�i��߰M��ϘG���D��Pt��V��[������d[��ʆ>�")�G*:A�(Q0٪6���VV�R�5֜P}<o��S۲��I70b\
2
- T��J���#����f
3
- 2$��yD
4
- .�oV0(�OEq��-<[tH��@��[׀|V�-����'���+$Y��:Xnl�h)�� ��(���'�:�@�RI�Yq�ٺ�mPX~�R�7P���@�*�\oڎY��r*UdZL�ݻ{iRho?�g�����Y�p�D�z��E��Ęٍ�Ci������
1
+ إ�D�̡}f���ř��w�Oٯ뽊t�#Q�2�Q��K����+a���(�m ��
2
+ ���U�� ^��5�.�+��O��+�u����.;�À��q����L�`x
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.7
4
+ version: 0.13.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
metadata.gz.sig CHANGED
Binary file