karafka-rdkafka 0.13.2 → 0.13.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +9 -4
  4. data/.gitignore +4 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/CHANGELOG.md +54 -26
  9. data/{LICENSE → MIT-LICENSE} +2 -1
  10. data/README.md +19 -20
  11. data/certs/cert_chain.pem +21 -21
  12. data/docker-compose.yml +16 -15
  13. data/ext/README.md +1 -1
  14. data/ext/Rakefile +1 -1
  15. data/karafka-rdkafka.gemspec +2 -2
  16. data/lib/rdkafka/abstract_handle.rb +41 -27
  17. data/lib/rdkafka/admin/create_partitions_handle.rb +6 -3
  18. data/lib/rdkafka/admin/create_topic_handle.rb +6 -3
  19. data/lib/rdkafka/admin/delete_topic_handle.rb +6 -3
  20. data/lib/rdkafka/admin.rb +6 -7
  21. data/lib/rdkafka/bindings.rb +24 -6
  22. data/lib/rdkafka/config.rb +53 -19
  23. data/lib/rdkafka/consumer/headers.rb +2 -4
  24. data/lib/rdkafka/consumer.rb +119 -93
  25. data/lib/rdkafka/error.rb +60 -1
  26. data/lib/rdkafka/helpers/time.rb +14 -0
  27. data/lib/rdkafka/metadata.rb +4 -4
  28. data/lib/rdkafka/native_kafka.rb +6 -1
  29. data/lib/rdkafka/producer/delivery_handle.rb +16 -1
  30. data/lib/rdkafka/producer/delivery_report.rb +3 -2
  31. data/lib/rdkafka/producer.rb +89 -17
  32. data/lib/rdkafka/version.rb +3 -3
  33. data/lib/rdkafka.rb +10 -1
  34. data/renovate.json +6 -0
  35. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  36. data/spec/rdkafka/admin/create_topic_handle_spec.rb +4 -4
  37. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  38. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +3 -3
  39. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  40. data/spec/rdkafka/admin_spec.rb +1 -2
  41. data/spec/rdkafka/bindings_spec.rb +0 -1
  42. data/spec/rdkafka/callbacks_spec.rb +0 -2
  43. data/spec/rdkafka/config_spec.rb +8 -2
  44. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  45. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  46. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  47. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
  48. data/spec/rdkafka/consumer_spec.rb +122 -38
  49. data/spec/rdkafka/error_spec.rb +0 -2
  50. data/spec/rdkafka/metadata_spec.rb +2 -3
  51. data/spec/rdkafka/native_kafka_spec.rb +2 -3
  52. data/spec/rdkafka/producer/delivery_handle_spec.rb +15 -2
  53. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  54. data/spec/rdkafka/producer_spec.rb +293 -1
  55. data/spec/spec_helper.rb +7 -1
  56. data.tar.gz.sig +0 -0
  57. metadata +31 -28
  58. metadata.gz.sig +0 -0
  59. data/certs/karafka-pro.pem +0 -11
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "zlib"
5
4
 
6
5
  describe Rdkafka::Producer do
@@ -559,6 +558,22 @@ describe Rdkafka::Producer do
559
558
  end
560
559
  end
561
560
 
561
+ context "when not being able to deliver the message" do
562
+ let(:producer) do
563
+ rdkafka_producer_config(
564
+ "bootstrap.servers": "localhost:9093",
565
+ "message.timeout.ms": 100
566
+ ).producer
567
+ end
568
+
569
+ it "should contain the error in the response when not deliverable" do
570
+ handler = producer.produce(topic: 'produce_test_topic', payload: nil)
571
+ # Wait for the async callbacks and delivery registry to update
572
+ sleep(2)
573
+ expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
574
+ end
575
+ end
576
+
562
577
  describe '#partition_count' do
563
578
  it { expect(producer.partition_count('example_topic')).to eq(1) }
564
579
 
@@ -627,4 +642,281 @@ describe Rdkafka::Producer do
627
642
  end
628
643
  end
629
644
  end
645
+
646
+ describe '#flush' do
647
+ it "should return flush when it can flush all outstanding messages or when no messages" do
648
+ producer.produce(
649
+ topic: "produce_test_topic",
650
+ payload: "payload headers",
651
+ key: "key headers",
652
+ headers: {}
653
+ )
654
+
655
+ expect(producer.flush(5_000)).to eq(true)
656
+ end
657
+
658
+ context 'when it cannot flush due to a timeout' do
659
+ let(:producer) do
660
+ rdkafka_producer_config(
661
+ "bootstrap.servers": "localhost:9093",
662
+ "message.timeout.ms": 2_000
663
+ ).producer
664
+ end
665
+
666
+ after do
667
+ # Allow rdkafka to evict message preventing memory-leak
668
+ sleep(2)
669
+ end
670
+
671
+ it "should return false on flush when cannot deliver and beyond timeout" do
672
+ producer.produce(
673
+ topic: "produce_test_topic",
674
+ payload: "payload headers",
675
+ key: "key headers",
676
+ headers: {}
677
+ )
678
+
679
+ expect(producer.flush(1_000)).to eq(false)
680
+ end
681
+ end
682
+
683
+ context 'when there is a different error' do
684
+ before { allow(Rdkafka::Bindings).to receive(:rd_kafka_flush).and_return(-199) }
685
+
686
+ it 'should raise it' do
687
+ expect { producer.flush }.to raise_error(Rdkafka::RdkafkaError)
688
+ end
689
+ end
690
+ end
691
+
692
+ describe '#purge' do
693
+ context 'when no outgoing messages' do
694
+ it { expect(producer.purge).to eq(true) }
695
+ end
696
+
697
+ context 'when librdkafka purge returns an error' do
698
+ before { expect(Rdkafka::Bindings).to receive(:rd_kafka_purge).and_return(-153) }
699
+
700
+ it 'expect to raise an error' do
701
+ expect { producer.purge }.to raise_error(Rdkafka::RdkafkaError, /retry/)
702
+ end
703
+ end
704
+
705
+ context 'when there are outgoing things in the queue' do
706
+ let(:producer) do
707
+ rdkafka_producer_config(
708
+ "bootstrap.servers": "localhost:9093",
709
+ "message.timeout.ms": 2_000
710
+ ).producer
711
+ end
712
+
713
+ it "should should purge and move forward" do
714
+ producer.produce(
715
+ topic: "produce_test_topic",
716
+ payload: "payload headers"
717
+ )
718
+
719
+ expect(producer.purge).to eq(true)
720
+ expect(producer.flush(1_000)).to eq(true)
721
+ end
722
+
723
+ it "should materialize the delivery handles" do
724
+ handle = producer.produce(
725
+ topic: "produce_test_topic",
726
+ payload: "payload headers"
727
+ )
728
+
729
+ expect(producer.purge).to eq(true)
730
+
731
+ expect { handle.wait }.to raise_error(Rdkafka::RdkafkaError, /purge_queue/)
732
+ end
733
+
734
+ context "when using delivery_callback" do
735
+ let(:delivery_reports) { [] }
736
+
737
+ let(:delivery_callback) do
738
+ ->(delivery_report) { delivery_reports << delivery_report }
739
+ end
740
+
741
+ before { producer.delivery_callback = delivery_callback }
742
+
743
+ it "should run the callback" do
744
+ handle = producer.produce(
745
+ topic: "produce_test_topic",
746
+ payload: "payload headers"
747
+ )
748
+
749
+ expect(producer.purge).to eq(true)
750
+ # queue purge
751
+ expect(delivery_reports[0].error).to eq(-152)
752
+ end
753
+ end
754
+ end
755
+ end
756
+
757
+ context 'when working with transactions' do
758
+ let(:producer) do
759
+ rdkafka_producer_config(
760
+ 'transactional.id': SecureRandom.uuid,
761
+ 'transaction.timeout.ms': 5_000
762
+ ).producer
763
+ end
764
+
765
+ it 'expect not to allow to produce without transaction init' do
766
+ expect do
767
+ producer.produce(topic: 'produce_test_topic', payload: 'data')
768
+ end.to raise_error(Rdkafka::RdkafkaError, /Erroneous state \(state\)/)
769
+ end
770
+
771
+ it 'expect to raise error when transactions are initialized but producing not in one' do
772
+ producer.init_transactions
773
+
774
+ expect do
775
+ producer.produce(topic: 'produce_test_topic', payload: 'data')
776
+ end.to raise_error(Rdkafka::RdkafkaError, /Erroneous state \(state\)/)
777
+ end
778
+
779
+ it 'expect to allow to produce within a transaction, finalize and ship data' do
780
+ producer.init_transactions
781
+ producer.begin_transaction
782
+ handle1 = producer.produce(topic: 'produce_test_topic', payload: 'data1', partition: 1)
783
+ handle2 = producer.produce(topic: 'example_topic', payload: 'data2', partition: 0)
784
+ producer.commit_transaction
785
+
786
+ report1 = handle1.wait(max_wait_timeout: 15)
787
+ report2 = handle2.wait(max_wait_timeout: 15)
788
+
789
+ message1 = wait_for_message(
790
+ topic: "produce_test_topic",
791
+ delivery_report: report1,
792
+ consumer: consumer
793
+ )
794
+
795
+ expect(message1.partition).to eq 1
796
+ expect(message1.payload).to eq "data1"
797
+ expect(message1.timestamp).to be_within(10).of(Time.now)
798
+
799
+ message2 = wait_for_message(
800
+ topic: "example_topic",
801
+ delivery_report: report2,
802
+ consumer: consumer
803
+ )
804
+
805
+ expect(message2.partition).to eq 0
806
+ expect(message2.payload).to eq "data2"
807
+ expect(message2.timestamp).to be_within(10).of(Time.now)
808
+ end
809
+
810
+ it 'expect not to send data and propagate purge queue error on abort' do
811
+ producer.init_transactions
812
+ producer.begin_transaction
813
+ handle1 = producer.produce(topic: 'produce_test_topic', payload: 'data1', partition: 1)
814
+ handle2 = producer.produce(topic: 'example_topic', payload: 'data2', partition: 0)
815
+ producer.abort_transaction
816
+
817
+ expect { handle1.wait(max_wait_timeout: 15) }
818
+ .to raise_error(Rdkafka::RdkafkaError, /Purged in queue \(purge_queue\)/)
819
+ expect { handle2.wait(max_wait_timeout: 15) }
820
+ .to raise_error(Rdkafka::RdkafkaError, /Purged in queue \(purge_queue\)/)
821
+ end
822
+
823
+ it 'expect to have non retryable, non abortable and not fatal error on abort' do
824
+ producer.init_transactions
825
+ producer.begin_transaction
826
+ handle = producer.produce(topic: 'produce_test_topic', payload: 'data1', partition: 1)
827
+ producer.abort_transaction
828
+
829
+ response = handle.wait(raise_response_error: false)
830
+
831
+ expect(response.error).to be_a(Rdkafka::RdkafkaError)
832
+ expect(response.error.retryable?).to eq(false)
833
+ expect(response.error.fatal?).to eq(false)
834
+ expect(response.error.abortable?).to eq(false)
835
+ end
836
+
837
+ # This may not always crash, depends on load but no other way to check it
838
+ context 'when timeout is too short and error occurs and we can abort' do
839
+ let(:producer) do
840
+ rdkafka_producer_config(
841
+ 'transactional.id': SecureRandom.uuid,
842
+ 'transaction.timeout.ms': 1_000
843
+ ).producer
844
+ end
845
+
846
+ it 'expect to allow to produce within a transaction, finalize and not ship data' do
847
+ producer.init_transactions
848
+ producer.begin_transaction
849
+
850
+ sleep(5)
851
+
852
+ handle1 = producer.produce(topic: 'produce_test_topic', payload: 'data1', partition: 1)
853
+ handle2 = producer.produce(topic: 'example_topic', payload: 'data2', partition: 0)
854
+
855
+ begin
856
+ producer.commit_transaction(15_000)
857
+ rescue Rdkafka::RdkafkaError => e
858
+ next unless e.abortable?
859
+
860
+ begin
861
+ producer.abort_transaction(15_000)
862
+ rescue Rdkafka::RdkafkaError => e
863
+ nil
864
+ end
865
+
866
+ expect { handle1.wait(max_wait_timeout: 15) }.to raise_error(Rdkafka::RdkafkaError)
867
+ expect { handle2.wait(max_wait_timeout: 15) }.to raise_error(Rdkafka::RdkafkaError)
868
+ end
869
+ end
870
+ end
871
+
872
+ context 'fencing against previous active producer with same transactional id' do
873
+ let(:transactional_id) { SecureRandom.uuid }
874
+
875
+ let(:producer1) do
876
+ rdkafka_producer_config(
877
+ 'transactional.id': transactional_id,
878
+ 'transaction.timeout.ms': 10_000
879
+ ).producer
880
+ end
881
+
882
+ let(:producer2) do
883
+ rdkafka_producer_config(
884
+ 'transactional.id': transactional_id,
885
+ 'transaction.timeout.ms': 10_000
886
+ ).producer
887
+ end
888
+
889
+ after do
890
+ producer1.close
891
+ producer2.close
892
+ end
893
+
894
+ it 'expect older producer not to be able to commit when fanced out' do
895
+ producer1.init_transactions
896
+ producer1.begin_transaction
897
+ producer1.produce(topic: 'produce_test_topic', payload: 'data1', partition: 1)
898
+
899
+ producer2.init_transactions
900
+ producer2.begin_transaction
901
+ producer2.produce(topic: 'produce_test_topic', payload: 'data1', partition: 1)
902
+
903
+ expect { producer1.commit_transaction }
904
+ .to raise_error(Rdkafka::RdkafkaError, /This instance has been fenced/)
905
+
906
+ error = false
907
+
908
+ begin
909
+ producer1.commit_transaction
910
+ rescue Rdkafka::RdkafkaError => e
911
+ error = e
912
+ end
913
+
914
+ expect(error.fatal?).to eq(true)
915
+ expect(error.abortable?).to eq(false)
916
+ expect(error.retryable?).to eq(false)
917
+
918
+ expect { producer2.commit_transaction }.not_to raise_error
919
+ end
920
+ end
921
+ end
630
922
  end
data/spec/spec_helper.rb CHANGED
@@ -11,6 +11,7 @@ require "pry"
11
11
  require "rspec"
12
12
  require "rdkafka"
13
13
  require "timeout"
14
+ require 'securerandom'
14
15
 
15
16
  def rdkafka_base_config
16
17
  {
@@ -35,7 +36,7 @@ def rdkafka_consumer_config(config_overrides={})
35
36
  # Add consumer specific fields to it
36
37
  config[:"auto.offset.reset"] = "earliest"
37
38
  config[:"enable.partition.eof"] = false
38
- config[:"group.id"] = "ruby-test-#{Random.new.rand(0..1_000_000)}"
39
+ config[:"group.id"] = "ruby-test-#{SecureRandom.uuid}"
39
40
  # Enable debug mode if required
40
41
  if ENV["DEBUG_CONSUMER"]
41
42
  config[:debug] = "cgrp,topic,fetch"
@@ -106,6 +107,10 @@ def wait_for_unassignment(consumer)
106
107
  end
107
108
  end
108
109
 
110
+ def objects_of_type_count(type)
111
+ ObjectSpace.each_object(type).count
112
+ end
113
+
109
114
  def notify_listener(listener, &block)
110
115
  # 1. subscribe and poll
111
116
  consumer.subscribe("consume_test_topic")
@@ -134,6 +139,7 @@ RSpec.configure do |config|
134
139
  rake_test_topic: 3,
135
140
  watermarks_test_topic: 3,
136
141
  partitioner_test_topic: 25,
142
+ example_topic: 1
137
143
  }.each do |topic, partitions|
138
144
  create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
139
145
  begin
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.2
4
+ version: 0.13.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -12,30 +12,30 @@ cert_chain:
12
12
  -----BEGIN CERTIFICATE-----
13
13
  MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
14
14
  YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
15
- MB4XDTIyMDgxOTE3MjEzN1oXDTIzMDgxOTE3MjEzN1owPzEQMA4GA1UEAwwHY29u
15
+ MB4XDTIzMDgyMTA3MjU1NFoXDTI0MDgyMDA3MjU1NFowPzEQMA4GA1UEAwwHY29u
16
16
  dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
17
- bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAODzeO3L6lxdATzMHKNW
18
- jFA/GGunoPuylO/BMzy8RiQHh7VIvysAKs0tHhTx3g2D0STDpF+hcQcPELFikiT2
19
- F+1wOHj/SsrK7VKqfA8+gq04hKc5sQoX2Egf9k3V0YJ3eZ6R/koHkQ8A0TVt0w6F
20
- ZQckoV4MqnEAx0g/FZN3mnHTlJ3VFLSBqJEIe+S6FZMl92mSv+hTrlUG8VaYxSfN
21
- lTCvnKk284F6QZq5XIENLRmcDd/3aPBLnLwNnyMyhB+6gK8cUO+CFlDO5tjo/aBA
22
- rUnl++wGG0JooF1ed0v+evOn9KoMBG6rHewcf79qJbVOscbD8qSAmo+sCXtcFryr
23
- KRMTB8gNbowJkFRJDEe8tfRy11u1fYzFg/qNO82FJd62rKAw2wN0C29yCeQOPRb1
24
- Cw9Y4ZwK9VFNEcV9L+3pHTHn2XfuZHtDaG198VweiF6raFO4yiEYccodH/USP0L5
25
- cbcCFtmu/4HDSxL1ByQXO84A0ybJuk3/+aPUSXe9C9U8fwIDAQABo3cwdTAJBgNV
26
- HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUSlcEakb7gfn/5E2WY6z73BF/
27
- iZkwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
28
- bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEA1aS+E7RXJ1w9g9mJ
29
- G0NzFxe64OEuENosNlvYQCbRKGCXAU1qqelYkBQHseRgRKxLICrnypRo9IEobyHa
30
- vDnJ4r7Tsb34dleqQW2zY/obG+cia3Ym2JsegXWF7dDOzCXJ4FN8MFoT2jHlqLLw
31
- yrap0YO5zx0GSQ0Dwy8h2n2v2vanMEeCx7iNm3ERgR5WuN5sjzWoz2A/JLEEcK0C
32
- EnAGKCWAd1fuG8IemDjT1edsd5FyYR4bIX0m+99oDuFZyPiiIbalmyYiSBBp59Yb
33
- Q0P8zeBi4OfwCZNcxqz0KONmw9JLNv6DgyEAH5xe/4JzhMEgvIRiPj0pHfA7oqQF
34
- KUNqvD1KlxbEC+bZfE5IZhnqYLdld/Ksqd22FI1RBhiS1Ejfsj99LVIm9cBuZEY2
35
- Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
36
- MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
17
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAOuZpyQKEwsTG9plLat7
18
+ 8bUaNuNBEnouTsNMr6X+XTgvyrAxTuocdsyP1sNCjdS1B8RiiDH1/Nt9qpvlBWon
19
+ sdJ1SYhaWNVfqiYStTDnCx3PRMmHRdD4KqUWKpN6VpZ1O/Zu+9Mw0COmvXgZuuO9
20
+ wMSJkXRo6dTCfMedLAIxjMeBIxtoLR2e6Jm6MR8+8WYYVWrO9kSOOt5eKQLBY7aK
21
+ b/Dc40EcJKPg3Z30Pia1M9ZyRlb6SOj6SKpHRqc7vbVQxjEw6Jjal1lZ49m3YZMd
22
+ ArMAs9lQZNdSw5/UX6HWWURLowg6k10RnhTUtYyzO9BFev0JFJftHnmuk8vtb+SD
23
+ 5VPmjFXg2VOcw0B7FtG75Vackk8QKfgVe3nSPhVpew2CSPlbJzH80wChbr19+e3+
24
+ YGr1tOiaJrL6c+PNmb0F31NXMKpj/r+n15HwlTMRxQrzFcgjBlxf2XFGnPQXHhBm
25
+ kp1OFnEq4GG9sON4glRldkwzi/f/fGcZmo5fm3d+0ZdNgwIDAQABo3cwdTAJBgNV
26
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUPVH5+dLA80A1kJ2Uz5iGwfOa
27
+ 1+swHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
28
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAnpa0jcN7JzREHMTQ
29
+ bfZ+xcvlrzuROMY6A3zIZmQgbnoZZNuX4cMRrT1p1HuwXpxdpHPw7dDjYqWw3+1h
30
+ 3mXLeMuk7amjQpYoSWU/OIZMhIsARra22UN8qkkUlUj3AwTaChVKN/bPJOM2DzfU
31
+ kz9vUgLeYYFfQbZqeI6SsM7ltilRV4W8D9yNUQQvOxCFxtLOetJ00fC/E7zMUzbK
32
+ IBwYFQYsbI6XQzgAIPW6nGSYKgRhkfpmquXSNKZRIQ4V6bFrufa+DzD0bt2ZA3ah
33
+ fMmJguyb5L2Gf1zpDXzFSPMG7YQFLzwYz1zZZvOU7/UCpQsHpID/YxqDp4+Dgb+Y
34
+ qma0whX8UG/gXFV2pYWpYOfpatvahwi+A1TwPQsuZwkkhi1OyF1At3RY+hjSXyav
35
+ AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
+ msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-06-28 00:00:00.000000000 Z
38
+ date: 2023-11-07 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -165,7 +165,7 @@ dependencies:
165
165
  version: '0'
166
166
  description: Modern Kafka client library for Ruby based on librdkafka
167
167
  email:
168
- - thijs@appsignal.com
168
+ - contact@karafka.io
169
169
  executables: []
170
170
  extensions:
171
171
  - ext/Rakefile
@@ -174,15 +174,16 @@ files:
174
174
  - ".github/workflows/ci.yml"
175
175
  - ".gitignore"
176
176
  - ".rspec"
177
+ - ".ruby-gemset"
178
+ - ".ruby-version"
177
179
  - ".yardopts"
178
180
  - CHANGELOG.md
179
181
  - Gemfile
180
182
  - Guardfile
181
- - LICENSE
183
+ - MIT-LICENSE
182
184
  - README.md
183
185
  - Rakefile
184
186
  - certs/cert_chain.pem
185
- - certs/karafka-pro.pem
186
187
  - docker-compose.yml
187
188
  - ext/README.md
188
189
  - ext/Rakefile
@@ -205,12 +206,14 @@ files:
205
206
  - lib/rdkafka/consumer/partition.rb
206
207
  - lib/rdkafka/consumer/topic_partition_list.rb
207
208
  - lib/rdkafka/error.rb
209
+ - lib/rdkafka/helpers/time.rb
208
210
  - lib/rdkafka/metadata.rb
209
211
  - lib/rdkafka/native_kafka.rb
210
212
  - lib/rdkafka/producer.rb
211
213
  - lib/rdkafka/producer/delivery_handle.rb
212
214
  - lib/rdkafka/producer/delivery_report.rb
213
215
  - lib/rdkafka/version.rb
216
+ - renovate.json
214
217
  - spec/rdkafka/abstract_handle_spec.rb
215
218
  - spec/rdkafka/admin/create_topic_handle_spec.rb
216
219
  - spec/rdkafka/admin/create_topic_report_spec.rb
@@ -251,14 +254,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
251
254
  requirements:
252
255
  - - ">="
253
256
  - !ruby/object:Gem::Version
254
- version: '2.6'
257
+ version: '2.7'
255
258
  required_rubygems_version: !ruby/object:Gem::Requirement
256
259
  requirements:
257
260
  - - ">="
258
261
  - !ruby/object:Gem::Version
259
262
  version: '0'
260
263
  requirements: []
261
- rubygems_version: 3.3.4
264
+ rubygems_version: 3.4.19
262
265
  signing_key:
263
266
  specification_version: 4
264
267
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
metadata.gz.sig CHANGED
Binary file
@@ -1,11 +0,0 @@
1
- -----BEGIN RSA PUBLIC KEY-----
2
- MIIBigKCAYEApcd6ybskiNs9WUvBGVUE8GdWDehjZ9TyjSj/fDl/UcMYqY0R5YX9
3
- tnYxEwZZRMdVltKWxr88Qmshh1IQz6CpJVbcfYjt/158pSGPm+AUua6tkLqIvZDM
4
- ocFOMafmroI+BMuL+Zu5QH7HC2tkT16jclGYfMQkJjXVUQTk2UZr+94+8RlUz/CH
5
- Y6hPA7xPgIyPfyPCxz1VWzAwXwT++NCJQPBr5MqT84LNSEzUSlR9pFNShf3UCUT+
6
- 8LWOvjFSNGmMMSsbo2T7/+dz9/FM02YG00EO0x04qteggwcaEYLFrigDN6/fM0ih
7
- BXZILnMUqC/qrfW2YFg4ZqKZJuxaALqqkPxrkBDYqoqcAloqn36jBSke6tc/2I/J
8
- 2Afq3r53UoAbUH7h5I/L8YeaiA4MYjAuq724lHlrOmIr4D6yjYC0a1LGlPjLk869
9
- 2nsVXNgomhVb071E6amR+rJJnfvkdZgCmEBFnqnBV5A1u4qgNsa2rVcD+gJRvb2T
10
- aQtjlQWKPx5xAgMBAAE=
11
- -----END RSA PUBLIC KEY-----