rdkafka 0.10.0 → 0.12.0.beta.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9f954490d858591f12e5a7fc1fed82e1b5c6b42049c5ed66d2d76bb352e03a7f
4
- data.tar.gz: 0ae8cbac09963d52a3075cece9e0bf6c5895e31f87995d7997ee7f04e52a10c8
3
+ metadata.gz: a58b7e8b03dec32027bf3d9ec513a87e6bb0a8abf2f6598cd34a965bebce2dbb
4
+ data.tar.gz: 603c985bd19a2213671b600680a748279e21d7b4aeb3f23a7f1efa16a5e1229c
5
5
  SHA512:
6
- metadata.gz: 33d43862b6b7cee1a3284221f75fae0c6bdfa6df4faa4396378c96e34a8f1a2395202dd01d6a5f2baac5557010505f548e7862bdd14202de6b3f85c77ca95309
7
- data.tar.gz: 22d6243a853bfd630497eab13a8b6823f1a1d4a358aee53a98e68e5a0255f5ca68932db6d261cbad34dadeca5854522d8fceb15b43004ce8719a1d5d9aa3e9bd
6
+ metadata.gz: 9e5a5bd4c911f93572d0f8be2e5f576b08ee0ff86ab773e9e7d9db1182002e62c1c57b03cfe6e7b686c9890c35f6fca811fe10daad3a0d4d57bd67a5340c7fa1
7
+ data.tar.gz: 5fd3c57d9797f3b1f7c636f8190cee0b8cfc56f14daa03a2be21da6ff11fff1b7e00e233ab10a37b280b06ac2a197c155ebf1fdd6dfc8a97af9ce8bd38a341e1
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --format documentation
@@ -13,7 +13,7 @@ blocks:
13
13
  - name: bundle exec rspec
14
14
  matrix:
15
15
  - env_var: RUBY_VERSION
16
- values: [ "2.5.8", "2.6.6", "2.7.2", "3.0.0", "jruby-9.2.13.0" ]
16
+ values: [ "2.6.8", "2.7.4", "3.0.2", "jruby-9.3.1.0"]
17
17
  commands:
18
18
  - sem-version ruby $RUBY_VERSION
19
19
  - checkout
data/CHANGELOG.md CHANGED
@@ -1,3 +1,8 @@
1
+ # 0.11.0
2
+ * Upgrade librdkafka to 1.8.2
3
+ * Bump supported minimum Ruby version to 2.6
4
+ * Better homebrew path detection
5
+
1
6
  # 0.10.0
2
7
  * Upgrade librdkafka to 1.5.0
3
8
  * Add error callback config
@@ -45,7 +50,7 @@
45
50
  * Use default Homebrew openssl location if present
46
51
  * Consumer lag handles empty topics
47
52
  * End iteration in consumer when it is closed
48
- * Add suport for storing message offsets
53
+ * Add support for storing message offsets
49
54
  * Add missing runtime dependency to rake
50
55
 
51
56
  # 0.4.1
data/Guardfile ADDED
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ logger level: :error
4
+
5
+ guard :rspec, cmd: "bundle exec rspec --format #{ENV.fetch("FORMAT", "documentation")}" do
6
+ require "guard/rspec/dsl"
7
+ dsl = Guard::RSpec::Dsl.new(self)
8
+
9
+ # Ruby files
10
+ ruby = dsl.ruby
11
+ dsl.watch_spec_files_for(ruby.lib_files)
12
+ watch(%r{^lib/(.+)\.rb}) { |m| "spec/#{m[1]}_spec.rb" }
13
+
14
+ # RSpec files
15
+ rspec = dsl.rspec
16
+ watch(rspec.spec_helper) { rspec.spec_dir }
17
+ watch(rspec.spec_support) { rspec.spec_dir }
18
+ watch(rspec.spec_files)
19
+ end
data/README.md CHANGED
@@ -7,7 +7,9 @@
7
7
  The `rdkafka` gem is a modern Kafka client library for Ruby based on
8
8
  [librdkafka](https://github.com/edenhill/librdkafka/).
9
9
  It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
10
- gem and targets Kafka 1.0+ and Ruby 2.4+.
10
+ gem and targets Kafka 1.0+ and Ruby versions that are under security or
11
+ active maintenance. We remove Ruby version from our CI builds if they
12
+ become EOL.
11
13
 
12
14
  `rdkafka` was written because we needed a reliable Ruby client for
13
15
  Kafka that supports modern Kafka at [AppSignal](https://appsignal.com).
data/bin/console ADDED
@@ -0,0 +1,11 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # frozen_string_literal: true
4
+
5
+ ENV["IRBRC"] = File.join(File.dirname(__FILE__), ".irbrc")
6
+
7
+ require "bundler/setup"
8
+ require "rdkafka"
9
+
10
+ require "irb"
11
+ IRB.start(__FILE__)
data/docker-compose.yml CHANGED
@@ -4,13 +4,13 @@ version: '2'
4
4
 
5
5
  services:
6
6
  zookeeper:
7
- image: confluentinc/cp-zookeeper:latest
7
+ image: confluentinc/cp-zookeeper:5.2.6
8
8
  environment:
9
9
  ZOOKEEPER_CLIENT_PORT: 2181
10
10
  ZOOKEEPER_TICK_TIME: 2000
11
11
 
12
12
  kafka:
13
- image: confluentinc/cp-kafka:latest
13
+ image: confluentinc/cp-kafka:5.2.5-10
14
14
  depends_on:
15
15
  - zookeeper
16
16
  ports:
@@ -18,7 +18,7 @@ services:
18
18
  environment:
19
19
  KAFKA_BROKER_ID: 1
20
20
  KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
21
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
21
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:29092,PLAINTEXT_HOST://localhost:9092
22
22
  KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
23
23
  KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
24
24
  KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
data/ext/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ext
2
2
 
3
- This gem dependes on the `librdkafka` C library. It is downloaded when
3
+ This gem depends on the `librdkafka` C library. It is downloaded when
4
4
  this gem is installed.
5
5
 
6
6
  To update the `librdkafka` version follow the following steps:
data/ext/Rakefile CHANGED
@@ -4,30 +4,14 @@ require "fileutils"
4
4
  require "open-uri"
5
5
 
6
6
  task :default => :clean do
7
- # MiniPortile#download_file_http is a monkey patch that removes the download
8
- # progress indicator. This indicator relies on the 'Content Length' response
9
- # headers, which is not set by GitHub
10
- class MiniPortile
11
- def download_file_http(url, full_path, _count)
12
- filename = File.basename(full_path)
13
- with_tempfile(filename, full_path) do |temp_file|
14
- params = { 'Accept-Encoding' => 'identity' }
15
- OpenURI.open_uri(url, 'rb', params) do |io|
16
- temp_file.write(io.read)
17
- end
18
- output
19
- end
20
- end
21
- end
22
-
23
7
  # Download and compile librdkafka
24
8
  recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
25
9
 
26
10
  # Use default homebrew openssl if we're on mac and the directory exists
27
11
  # and each of flags is not empty
28
- if recipe.host&.include?("darwin") && Dir.exist?("/usr/local/opt/openssl")
29
- ENV["CPPFLAGS"] = "-I/usr/local/opt/openssl/include" unless ENV["CPPFLAGS"]
30
- ENV["LDFLAGS"] = "-L/usr/local/opt/openssl/lib" unless ENV["LDFLAGS"]
12
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
13
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
14
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
31
15
  end
32
16
 
33
17
  recipe.files << {
@@ -179,7 +179,7 @@ module Rdkafka
179
179
  # Set callback to receive delivery reports on config
180
180
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
181
181
  # Return producer with Kafka client
182
- Rdkafka::Producer.new(native_kafka(config, :rd_kafka_producer)).tap do |producer|
182
+ Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer))).tap do |producer|
183
183
  opaque.producer = producer
184
184
  end
185
185
  end
@@ -498,11 +498,11 @@ module Rdkafka
498
498
  # Exception behavior is more complicated than with `each`, in that if
499
499
  # :yield_on_error is true, and an exception is raised during the
500
500
  # poll, and messages have already been received, they will be yielded to
501
- # the caller before the exception is allowed to propogate.
501
+ # the caller before the exception is allowed to propagate.
502
502
  #
503
503
  # If you are setting either auto.commit or auto.offset.store to false in
504
504
  # the consumer configuration, then you should let yield_on_error keep its
505
- # default value of false because you are gauranteed to see these messages
505
+ # default value of false because you are guaranteed to see these messages
506
506
  # again. However, if both auto.commit and auto.offset.store are set to
507
507
  # true, you should set yield_on_error to true so you can process messages
508
508
  # that you may or may not see again.
@@ -518,7 +518,7 @@ module Rdkafka
518
518
  # @yield [messages, pending_exception]
519
519
  # @yieldparam messages [Array] An array of received Message
520
520
  # @yieldparam pending_exception [Exception] normally nil, or an exception
521
- # which will be propogated after processing of the partial batch is complete.
521
+ # which will be propagated after processing of the partial batch is complete.
522
522
  #
523
523
  # @return [nil]
524
524
  def each_batch(max_items: 100, bytes_threshold: Float::INFINITY, timeout_ms: 250, yield_on_error: false, &block)
@@ -0,0 +1,47 @@
1
+ module Rdkafka
2
+ class Producer
3
+ class Client
4
+ def initialize(native)
5
+ @native = native
6
+
7
+ # Start thread to poll client for delivery callbacks
8
+ @polling_thread = Thread.new do
9
+ loop do
10
+ Rdkafka::Bindings.rd_kafka_poll(native, 250)
11
+ # Exit thread if closing and the poll queue is empty
12
+ if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(native) == 0
13
+ break
14
+ end
15
+ end
16
+ end
17
+ @polling_thread.abort_on_exception = true
18
+ @polling_thread[:closing] = false
19
+ end
20
+
21
+ def native
22
+ @native
23
+ end
24
+
25
+ def finalizer
26
+ ->(_) { close }
27
+ end
28
+
29
+ def closed?
30
+ @native.nil?
31
+ end
32
+
33
+ def close(object_id=nil)
34
+ return unless @native
35
+
36
+ # Indicate to polling thread that we're closing
37
+ @polling_thread[:closing] = true
38
+ # Wait for the polling thread to finish up
39
+ @polling_thread.join
40
+
41
+ Rdkafka::Bindings.rd_kafka_destroy(@native)
42
+
43
+ @native = nil
44
+ end
45
+ end
46
+ end
47
+ end
@@ -11,7 +11,7 @@ module Rdkafka
11
11
  attr_reader :offset
12
12
 
13
13
  # Error in case happen during produce.
14
- # @return [string]
14
+ # @return [String]
15
15
  attr_reader :error
16
16
 
17
17
  private
@@ -1,4 +1,4 @@
1
- require "securerandom"
1
+ require "objspace"
2
2
 
3
3
  module Rdkafka
4
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
@@ -10,25 +10,11 @@ module Rdkafka
10
10
  attr_reader :delivery_callback
11
11
 
12
12
  # @private
13
- def initialize(native_kafka)
14
- @id = SecureRandom.uuid
15
- @closing = false
16
- @native_kafka = native_kafka
13
+ def initialize(client)
14
+ @client = client
17
15
 
18
16
  # Makes sure, that the producer gets closed before it gets GCed by Ruby
19
- ObjectSpace.define_finalizer(@id, proc { close })
20
-
21
- # Start thread to poll client for delivery callbacks
22
- @polling_thread = Thread.new do
23
- loop do
24
- Rdkafka::Bindings.rd_kafka_poll(@native_kafka, 250)
25
- # Exit thread if closing and the poll queue is empty
26
- if @closing && Rdkafka::Bindings.rd_kafka_outq_len(@native_kafka) == 0
27
- break
28
- end
29
- end
30
- end
31
- @polling_thread.abort_on_exception = true
17
+ ObjectSpace.define_finalizer(self, client.finalizer)
32
18
  end
33
19
 
34
20
  # Set a callback that will be called every time a message is successfully produced.
@@ -44,16 +30,9 @@ module Rdkafka
44
30
 
45
31
  # Close this producer and wait for the internal poll queue to empty.
46
32
  def close
47
- ObjectSpace.undefine_finalizer(@id)
48
-
49
- return unless @native_kafka
33
+ ObjectSpace.undefine_finalizer(self)
50
34
 
51
- # Indicate to polling thread that we're closing
52
- @closing = true
53
- # Wait for the polling thread to finish up
54
- @polling_thread.join
55
- Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
56
- @native_kafka = nil
35
+ @client.close
57
36
  end
58
37
 
59
38
  # Partition count for a given topic.
@@ -65,7 +44,7 @@ module Rdkafka
65
44
  #
66
45
  def partition_count(topic)
67
46
  closed_producer_check(__method__)
68
- Rdkafka::Metadata.new(@native_kafka, topic).topics&.first[:partition_count]
47
+ Rdkafka::Metadata.new(@client.native, topic).topics&.first[:partition_count]
69
48
  end
70
49
 
71
50
  # Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
@@ -75,8 +54,9 @@ module Rdkafka
75
54
  #
76
55
  # @param topic [String] The topic to produce to
77
56
  # @param payload [String,nil] The message's payload
78
- # @param key [String] The message's key
57
+ # @param key [String, nil] The message's key
79
58
  # @param partition [Integer,nil] Optional partition to produce to
59
+ # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
80
60
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
81
61
  # @param headers [Hash<String,String>] Optional message headers
82
62
  #
@@ -156,7 +136,7 @@ module Rdkafka
156
136
 
157
137
  # Produce the message
158
138
  response = Rdkafka::Bindings.rd_kafka_producev(
159
- @native_kafka,
139
+ @client.native,
160
140
  *args
161
141
  )
162
142
 
@@ -175,7 +155,7 @@ module Rdkafka
175
155
  end
176
156
 
177
157
  def closed_producer_check(method)
178
- raise Rdkafka::ClosedProducerError.new(method) if @native_kafka.nil?
158
+ raise Rdkafka::ClosedProducerError.new(method) if @client.closed?
179
159
  end
180
160
  end
181
161
  end
@@ -1,5 +1,5 @@
1
1
  module Rdkafka
2
- VERSION = "0.10.0"
3
- LIBRDKAFKA_VERSION = "1.5.0"
4
- LIBRDKAFKA_SOURCE_SHA256 = "f7fee59fdbf1286ec23ef0b35b2dfb41031c8727c90ced6435b8cf576f23a656"
2
+ VERSION = "0.12.0.beta.0"
3
+ LIBRDKAFKA_VERSION = "1.8.2"
4
+ LIBRDKAFKA_SOURCE_SHA256 = "6a747d293a7a4613bd2897e28e8791476fbe1ae7361f2530a876e0fd483482a6"
5
5
  end
data/lib/rdkafka.rb CHANGED
@@ -17,5 +17,6 @@ require "rdkafka/consumer/topic_partition_list"
17
17
  require "rdkafka/error"
18
18
  require "rdkafka/metadata"
19
19
  require "rdkafka/producer"
20
+ require "rdkafka/producer/client"
20
21
  require "rdkafka/producer/delivery_handle"
21
22
  require "rdkafka/producer/delivery_report"
data/rdkafka.gemspec CHANGED
@@ -14,15 +14,17 @@ Gem::Specification.new do |gem|
14
14
  gem.name = 'rdkafka'
15
15
  gem.require_paths = ['lib']
16
16
  gem.version = Rdkafka::VERSION
17
- gem.required_ruby_version = '>= 2.4'
17
+ gem.required_ruby_version = '>= 2.6'
18
18
  gem.extensions = %w(ext/Rakefile)
19
19
 
20
- gem.add_dependency 'ffi', '~> 1.9'
21
- gem.add_dependency 'mini_portile2', '~> 2.1'
22
- gem.add_dependency 'rake', '>= 12.3'
20
+ gem.add_dependency 'ffi', '~> 1.15'
21
+ gem.add_dependency 'mini_portile2', '~> 2.6'
22
+ gem.add_dependency 'rake', '> 12'
23
23
 
24
- gem.add_development_dependency 'pry', '~> 0.10'
24
+ gem.add_development_dependency 'pry'
25
25
  gem.add_development_dependency 'rspec', '~> 3.5'
26
- gem.add_development_dependency 'rake', '~> 12.0'
27
- gem.add_development_dependency 'simplecov', '~> 0.15'
26
+ gem.add_development_dependency 'rake'
27
+ gem.add_development_dependency 'simplecov'
28
+ gem.add_development_dependency 'guard'
29
+ gem.add_development_dependency 'guard-rspec'
28
30
  end
@@ -111,4 +111,3 @@ describe Rdkafka::AbstractHandle do
111
111
  end
112
112
  end
113
113
  end
114
-
@@ -52,7 +52,7 @@ describe Rdkafka::Admin do
52
52
  end
53
53
 
54
54
  describe "with an invalid partition count" do
55
- let(:topic_partition_count) { -1 }
55
+ let(:topic_partition_count) { -999 }
56
56
 
57
57
  it "raises an exception" do
58
58
  expect {
@@ -108,7 +108,7 @@ describe Rdkafka::Config do
108
108
  end
109
109
 
110
110
  it "should create a consumer with valid config" do
111
- consumer = rdkafka_config.consumer
111
+ consumer = rdkafka_consumer_config.consumer
112
112
  expect(consumer).to be_a Rdkafka::Consumer
113
113
  consumer.close
114
114
  end
@@ -136,7 +136,7 @@ describe Rdkafka::Config do
136
136
  end
137
137
 
138
138
  it "should create a producer with valid config" do
139
- producer = rdkafka_config.producer
139
+ producer = rdkafka_consumer_config.producer
140
140
  expect(producer).to be_a Rdkafka::Producer
141
141
  producer.close
142
142
  end
@@ -3,9 +3,8 @@ require "ostruct"
3
3
  require 'securerandom'
4
4
 
5
5
  describe Rdkafka::Consumer do
6
- let(:config) { rdkafka_config }
7
- let(:consumer) { config.consumer }
8
- let(:producer) { config.producer }
6
+ let(:consumer) { rdkafka_consumer_config.consumer }
7
+ let(:producer) { rdkafka_producer_config.producer }
9
8
 
10
9
  after { consumer.close }
11
10
  after { producer.close }
@@ -328,7 +327,7 @@ describe Rdkafka::Consumer do
328
327
  before :all do
329
328
  # Make sure there are some messages.
330
329
  handles = []
331
- producer = rdkafka_config.producer
330
+ producer = rdkafka_producer_config.producer
332
331
  10.times do
333
332
  (0..2).each do |i|
334
333
  handles << producer.produce(
@@ -404,7 +403,7 @@ describe Rdkafka::Consumer do
404
403
  config = {}
405
404
  config[:'enable.auto.offset.store'] = false
406
405
  config[:'enable.auto.commit'] = false
407
- @new_consumer = rdkafka_config(config).consumer
406
+ @new_consumer = rdkafka_consumer_config(config).consumer
408
407
  @new_consumer.subscribe("consume_test_topic")
409
408
  wait_for_assignment(@new_consumer)
410
409
  end
@@ -459,13 +458,13 @@ describe Rdkafka::Consumer do
459
458
  end
460
459
 
461
460
  describe "#lag" do
462
- let(:config) { rdkafka_config(:"enable.partition.eof" => true) }
461
+ let(:consumer) { rdkafka_consumer_config(:"enable.partition.eof" => true).consumer }
463
462
 
464
463
  it "should calculate the consumer lag" do
465
464
  # Make sure there's a message in every partition and
466
465
  # wait for the message to make sure everything is committed.
467
466
  (0..2).each do |i|
468
- report = producer.produce(
467
+ producer.produce(
469
468
  topic: "consume_test_topic",
470
469
  key: "key lag #{i}",
471
470
  partition: i
@@ -508,7 +507,7 @@ describe Rdkafka::Consumer do
508
507
 
509
508
  # Produce message on every topic again
510
509
  (0..2).each do |i|
511
- report = producer.produce(
510
+ producer.produce(
512
511
  topic: "consume_test_topic",
513
512
  key: "key lag #{i}",
514
513
  partition: i
@@ -824,8 +823,12 @@ describe Rdkafka::Consumer do
824
823
 
825
824
  context "error raised from poll and yield_on_error is true" do
826
825
  it "should yield buffered exceptions on rebalance, then break" do
827
- config = rdkafka_config({:"enable.auto.commit" => false,
828
- :"enable.auto.offset.store" => false })
826
+ config = rdkafka_consumer_config(
827
+ {
828
+ :"enable.auto.commit" => false,
829
+ :"enable.auto.offset.store" => false
830
+ }
831
+ )
829
832
  consumer = config.consumer
830
833
  consumer.subscribe(topic_name)
831
834
  loop_count = 0
@@ -864,8 +867,12 @@ describe Rdkafka::Consumer do
864
867
 
865
868
  context "error raised from poll and yield_on_error is false" do
866
869
  it "should yield buffered exceptions on rebalance, then break" do
867
- config = rdkafka_config({:"enable.auto.commit" => false,
868
- :"enable.auto.offset.store" => false })
870
+ config = rdkafka_consumer_config(
871
+ {
872
+ :"enable.auto.commit" => false,
873
+ :"enable.auto.offset.store" => false
874
+ }
875
+ )
869
876
  consumer = config.consumer
870
877
  consumer.subscribe(topic_name)
871
878
  loop_count = 0
@@ -902,51 +909,64 @@ describe Rdkafka::Consumer do
902
909
  end
903
910
 
904
911
  describe "a rebalance listener" do
905
- it "should get notifications" do
906
- listener = Struct.new(:queue) do
907
- def on_partitions_assigned(consumer, list)
908
- collect(:assign, list)
909
- end
912
+ let(:consumer) do
913
+ config = rdkafka_consumer_config
914
+ config.consumer_rebalance_listener = listener
915
+ config.consumer
916
+ end
910
917
 
911
- def on_partitions_revoked(consumer, list)
912
- collect(:revoke, list)
913
- end
918
+ context "with a working listener" do
919
+ let(:listener) do
920
+ Struct.new(:queue) do
921
+ def on_partitions_assigned(consumer, list)
922
+ collect(:assign, list)
923
+ end
914
924
 
915
- def collect(name, list)
916
- partitions = list.to_h.map { |key, values| [key, values.map(&:partition)] }.flatten
917
- queue << ([name] + partitions)
918
- end
919
- end.new([])
925
+ def on_partitions_revoked(consumer, list)
926
+ collect(:revoke, list)
927
+ end
928
+
929
+ def collect(name, list)
930
+ partitions = list.to_h.map { |key, values| [key, values.map(&:partition)] }.flatten
931
+ queue << ([name] + partitions)
932
+ end
933
+ end.new([])
934
+ end
920
935
 
921
- notify_listener(listener)
936
+ it "should get notifications" do
937
+ notify_listener(listener)
922
938
 
923
- expect(listener.queue).to eq([
924
- [:assign, "consume_test_topic", 0, 1, 2],
925
- [:revoke, "consume_test_topic", 0, 1, 2]
926
- ])
939
+ expect(listener.queue).to eq([
940
+ [:assign, "consume_test_topic", 0, 1, 2],
941
+ [:revoke, "consume_test_topic", 0, 1, 2]
942
+ ])
943
+ end
927
944
  end
928
945
 
929
- it 'should handle callback exceptions' do
930
- listener = Struct.new(:queue) do
931
- def on_partitions_assigned(consumer, list)
932
- queue << :assigned
933
- raise 'boom'
934
- end
946
+ context "with a broken listener" do
947
+ let(:listener) do
948
+ Struct.new(:queue) do
949
+ def on_partitions_assigned(consumer, list)
950
+ queue << :assigned
951
+ raise 'boom'
952
+ end
935
953
 
936
- def on_partitions_revoked(consumer, list)
937
- queue << :revoked
938
- raise 'boom'
939
- end
940
- end.new([])
954
+ def on_partitions_revoked(consumer, list)
955
+ queue << :revoked
956
+ raise 'boom'
957
+ end
958
+ end.new([])
959
+ end
941
960
 
942
- notify_listener(listener)
961
+ it 'should handle callback exceptions' do
962
+ notify_listener(listener)
943
963
 
944
- expect(listener.queue).to eq([:assigned, :revoked])
964
+ expect(listener.queue).to eq([:assigned, :revoked])
965
+ end
945
966
  end
946
967
 
947
968
  def notify_listener(listener)
948
969
  # 1. subscribe and poll
949
- config.consumer_rebalance_listener = listener
950
970
  consumer.subscribe("consume_test_topic")
951
971
  wait_for_assignment(consumer)
952
972
  consumer.poll(100)
@@ -2,7 +2,7 @@ require "spec_helper"
2
2
  require "securerandom"
3
3
 
4
4
  describe Rdkafka::Metadata do
5
- let(:config) { rdkafka_config }
5
+ let(:config) { rdkafka_consumer_config }
6
6
  let(:native_config) { config.send(:native_config) }
7
7
  let(:native_kafka) { config.send(:native_kafka, native_config, :rd_kafka_consumer) }
8
8
 
@@ -0,0 +1,144 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Producer::Client do
4
+ let(:native) { double }
5
+ let(:closing) { false }
6
+ let(:thread) { double(Thread) }
7
+
8
+ subject(:client) { described_class.new(native) }
9
+
10
+ before do
11
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(native, 250)
12
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).and_return(0)
13
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
14
+ allow(Thread).to receive(:new).and_return(thread)
15
+
16
+ allow(thread).to receive(:[]=).with(:closing, anything)
17
+ allow(thread).to receive(:join)
18
+ allow(thread).to receive(:abort_on_exception=).with(anything)
19
+ end
20
+
21
+ context "defaults" do
22
+ it "sets the thread to abort on exception" do
23
+ expect(thread).to receive(:abort_on_exception=).with(true)
24
+
25
+ client
26
+ end
27
+
28
+ it "sets the thread `closing` flag to false" do
29
+ expect(thread).to receive(:[]=).with(:closing, false)
30
+
31
+ client
32
+ end
33
+ end
34
+
35
+ context "the polling thread" do
36
+ it "is created" do
37
+ expect(Thread).to receive(:new)
38
+
39
+ client
40
+ end
41
+
42
+ it "polls the native with default 250ms timeout" do
43
+ polling_loop_expects do
44
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(native, 250)
45
+ end
46
+ end
47
+
48
+ it "check the out queue of native client" do
49
+ polling_loop_expects do
50
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native)
51
+ end
52
+ end
53
+ end
54
+
55
+ def polling_loop_expects(&block)
56
+ Thread.current[:closing] = true # this forces the loop break with line #12
57
+
58
+ allow(Thread).to receive(:new).and_yield do |_|
59
+ block.call
60
+ end.and_return(thread)
61
+
62
+ client
63
+ end
64
+
65
+ it "exposes `native` client" do
66
+ expect(client.native).to eq(native)
67
+ end
68
+
69
+ context "when client was not yet closed (`nil`)" do
70
+ it "is not closed" do
71
+ expect(client.closed?).to eq(false)
72
+ end
73
+
74
+ context "and attempt to close" do
75
+ it "calls the `destroy` binding" do
76
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
77
+
78
+ client.close
79
+ end
80
+
81
+ it "indicates to the polling thread that it is closing" do
82
+ expect(thread).to receive(:[]=).with(:closing, true)
83
+
84
+ client.close
85
+ end
86
+
87
+ it "joins the polling thread" do
88
+ expect(thread).to receive(:join)
89
+
90
+ client.close
91
+ end
92
+
93
+ it "closes and unassign the native client" do
94
+ client.close
95
+
96
+ expect(client.native).to eq(nil)
97
+ expect(client.closed?).to eq(true)
98
+ end
99
+ end
100
+ end
101
+
102
+ context "when client was already closed" do
103
+ before { client.close }
104
+
105
+ it "is closed" do
106
+ expect(client.closed?).to eq(true)
107
+ end
108
+
109
+ context "and attempt to close again" do
110
+ it "does not call the `destroy` binding" do
111
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
112
+
113
+ client.close
114
+ end
115
+
116
+ it "does not indicate to the polling thread that it is closing" do
117
+ expect(thread).not_to receive(:[]=).with(:closing, true)
118
+
119
+ client.close
120
+ end
121
+
122
+ it "does not join the polling thread" do
123
+ expect(thread).not_to receive(:join)
124
+
125
+ client.close
126
+ end
127
+
128
+ it "does not close and unassign the native client again" do
129
+ client.close
130
+
131
+ expect(client.native).to eq(nil)
132
+ expect(client.closed?).to eq(true)
133
+ end
134
+ end
135
+ end
136
+
137
+ it "provide a finalizer Proc that closes the `native` client" do
138
+ expect(client.closed?).to eq(false)
139
+
140
+ client.finalizer.call("some-ignored-object-id")
141
+
142
+ expect(client.closed?).to eq(true)
143
+ end
144
+ end
@@ -1,8 +1,9 @@
1
1
  require "spec_helper"
2
+ require "zlib"
2
3
 
3
4
  describe Rdkafka::Producer do
4
- let(:producer) { rdkafka_config.producer }
5
- let(:consumer) { rdkafka_config.consumer }
5
+ let(:producer) { rdkafka_producer_config.producer }
6
+ let(:consumer) { rdkafka_consumer_config.consumer }
6
7
 
7
8
  after do
8
9
  # Registry should always end up being empty
@@ -388,7 +389,7 @@ describe Rdkafka::Producer do
388
389
  reader.close
389
390
 
390
391
  # Avoids sharing the socket between processes.
391
- producer = rdkafka_config.producer
392
+ producer = rdkafka_producer_config.producer
392
393
 
393
394
  handle = producer.produce(
394
395
  topic: "produce_test_topic",
data/spec/spec_helper.rb CHANGED
@@ -8,27 +8,57 @@ end
8
8
  require "pry"
9
9
  require "rspec"
10
10
  require "rdkafka"
11
+ require "timeout"
11
12
 
12
- def rdkafka_config(config_overrides={})
13
- config = {
13
+ def rdkafka_base_config
14
+ {
14
15
  :"api.version.request" => false,
15
16
  :"broker.version.fallback" => "1.0",
16
17
  :"bootstrap.servers" => "localhost:9092",
17
- :"group.id" => "ruby-test-#{Random.new.rand(0..1_000_000)}",
18
- :"auto.offset.reset" => "earliest",
19
- :"enable.partition.eof" => false
20
18
  }
19
+ end
20
+
21
+ def rdkafka_config(config_overrides={})
22
+ # Generate the base config
23
+ config = rdkafka_base_config
24
+ # Merge overrides
25
+ config.merge!(config_overrides)
26
+ # Return it
27
+ Rdkafka::Config.new(config)
28
+ end
29
+
30
+ def rdkafka_consumer_config(config_overrides={})
31
+ # Generate the base config
32
+ config = rdkafka_base_config
33
+ # Add consumer specific fields to it
34
+ config[:"auto.offset.reset"] = "earliest"
35
+ config[:"enable.partition.eof"] = false
36
+ config[:"group.id"] = "ruby-test-#{Random.new.rand(0..1_000_000)}"
37
+ # Enable debug mode if required
38
+ if ENV["DEBUG_CONSUMER"]
39
+ config[:debug] = "cgrp,topic,fetch"
40
+ end
41
+ # Merge overrides
42
+ config.merge!(config_overrides)
43
+ # Return it
44
+ Rdkafka::Config.new(config)
45
+ end
46
+
47
+ def rdkafka_producer_config(config_overrides={})
48
+ # Generate the base config
49
+ config = rdkafka_base_config
50
+ # Enable debug mode if required
21
51
  if ENV["DEBUG_PRODUCER"]
22
52
  config[:debug] = "broker,topic,msg"
23
- elsif ENV["DEBUG_CONSUMER"]
24
- config[:debug] = "cgrp,topic,fetch"
25
53
  end
54
+ # Merge overrides
26
55
  config.merge!(config_overrides)
56
+ # Return it
27
57
  Rdkafka::Config.new(config)
28
58
  end
29
59
 
30
60
  def new_native_client
31
- config = rdkafka_config
61
+ config = rdkafka_consumer_config
32
62
  config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
33
63
  end
34
64
 
@@ -42,7 +72,7 @@ end
42
72
 
43
73
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
44
74
  new_consumer = !!consumer
45
- consumer ||= rdkafka_config.consumer
75
+ consumer ||= rdkafka_consumer_config.consumer
46
76
  consumer.subscribe(topic)
47
77
  timeout = Time.now.to_i + timeout_in_seconds
48
78
  loop do
@@ -75,6 +105,9 @@ def wait_for_unassignment(consumer)
75
105
  end
76
106
 
77
107
  RSpec.configure do |config|
108
+ config.filter_run focus: true
109
+ config.run_all_when_everything_filtered = true
110
+
78
111
  config.before(:suite) do
79
112
  admin = rdkafka_config.admin
80
113
  {
@@ -95,4 +128,12 @@ RSpec.configure do |config|
95
128
  end
96
129
  admin.close
97
130
  end
131
+
132
+ config.around(:each) do |example|
133
+ # Timeout specs after a minute. If they take longer
134
+ # they are probably stuck
135
+ Timeout::timeout(60) do
136
+ example.run
137
+ end
138
+ end
98
139
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.10.0
4
+ version: 0.12.0.beta.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-09-07 00:00:00.000000000 Z
11
+ date: 2022-03-03 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -16,56 +16,56 @@ dependencies:
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '1.9'
19
+ version: '1.15'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '1.9'
26
+ version: '1.15'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: mini_portile2
29
29
  requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
31
  - - "~>"
32
32
  - !ruby/object:Gem::Version
33
- version: '2.1'
33
+ version: '2.6'
34
34
  type: :runtime
35
35
  prerelease: false
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
38
  - - "~>"
39
39
  - !ruby/object:Gem::Version
40
- version: '2.1'
40
+ version: '2.6'
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: rake
43
43
  requirement: !ruby/object:Gem::Requirement
44
44
  requirements:
45
- - - ">="
45
+ - - ">"
46
46
  - !ruby/object:Gem::Version
47
- version: '12.3'
47
+ version: '12'
48
48
  type: :runtime
49
49
  prerelease: false
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
- - - ">="
52
+ - - ">"
53
53
  - !ruby/object:Gem::Version
54
- version: '12.3'
54
+ version: '12'
55
55
  - !ruby/object:Gem::Dependency
56
56
  name: pry
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
- - - "~>"
59
+ - - ">="
60
60
  - !ruby/object:Gem::Version
61
- version: '0.10'
61
+ version: '0'
62
62
  type: :development
63
63
  prerelease: false
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
- - - "~>"
66
+ - - ">="
67
67
  - !ruby/object:Gem::Version
68
- version: '0.10'
68
+ version: '0'
69
69
  - !ruby/object:Gem::Dependency
70
70
  name: rspec
71
71
  requirement: !ruby/object:Gem::Requirement
@@ -84,46 +84,78 @@ dependencies:
84
84
  name: rake
85
85
  requirement: !ruby/object:Gem::Requirement
86
86
  requirements:
87
- - - "~>"
87
+ - - ">="
88
88
  - !ruby/object:Gem::Version
89
- version: '12.0'
89
+ version: '0'
90
90
  type: :development
91
91
  prerelease: false
92
92
  version_requirements: !ruby/object:Gem::Requirement
93
93
  requirements:
94
- - - "~>"
94
+ - - ">="
95
95
  - !ruby/object:Gem::Version
96
- version: '12.0'
96
+ version: '0'
97
97
  - !ruby/object:Gem::Dependency
98
98
  name: simplecov
99
99
  requirement: !ruby/object:Gem::Requirement
100
100
  requirements:
101
- - - "~>"
101
+ - - ">="
102
102
  - !ruby/object:Gem::Version
103
- version: '0.15'
103
+ version: '0'
104
104
  type: :development
105
105
  prerelease: false
106
106
  version_requirements: !ruby/object:Gem::Requirement
107
107
  requirements:
108
- - - "~>"
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
111
+ - !ruby/object:Gem::Dependency
112
+ name: guard
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - ">="
109
116
  - !ruby/object:Gem::Version
110
- version: '0.15'
117
+ version: '0'
118
+ type: :development
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - ">="
123
+ - !ruby/object:Gem::Version
124
+ version: '0'
125
+ - !ruby/object:Gem::Dependency
126
+ name: guard-rspec
127
+ requirement: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - ">="
130
+ - !ruby/object:Gem::Version
131
+ version: '0'
132
+ type: :development
133
+ prerelease: false
134
+ version_requirements: !ruby/object:Gem::Requirement
135
+ requirements:
136
+ - - ">="
137
+ - !ruby/object:Gem::Version
138
+ version: '0'
111
139
  description: Modern Kafka client library for Ruby based on librdkafka
112
140
  email:
113
141
  - thijs@appsignal.com
114
- executables: []
142
+ executables:
143
+ - console
115
144
  extensions:
116
145
  - ext/Rakefile
117
146
  extra_rdoc_files: []
118
147
  files:
119
148
  - ".gitignore"
149
+ - ".rspec"
120
150
  - ".semaphore/semaphore.yml"
121
151
  - ".yardopts"
122
152
  - CHANGELOG.md
123
153
  - Gemfile
154
+ - Guardfile
124
155
  - LICENSE
125
156
  - README.md
126
157
  - Rakefile
158
+ - bin/console
127
159
  - docker-compose.yml
128
160
  - ext/README.md
129
161
  - ext/Rakefile
@@ -145,6 +177,7 @@ files:
145
177
  - lib/rdkafka/error.rb
146
178
  - lib/rdkafka/metadata.rb
147
179
  - lib/rdkafka/producer.rb
180
+ - lib/rdkafka/producer/client.rb
148
181
  - lib/rdkafka/producer/delivery_handle.rb
149
182
  - lib/rdkafka/producer/delivery_report.rb
150
183
  - lib/rdkafka/version.rb
@@ -164,6 +197,7 @@ files:
164
197
  - spec/rdkafka/consumer_spec.rb
165
198
  - spec/rdkafka/error_spec.rb
166
199
  - spec/rdkafka/metadata_spec.rb
200
+ - spec/rdkafka/producer/client_spec.rb
167
201
  - spec/rdkafka/producer/delivery_handle_spec.rb
168
202
  - spec/rdkafka/producer/delivery_report_spec.rb
169
203
  - spec/rdkafka/producer_spec.rb
@@ -180,12 +214,12 @@ required_ruby_version: !ruby/object:Gem::Requirement
180
214
  requirements:
181
215
  - - ">="
182
216
  - !ruby/object:Gem::Version
183
- version: '2.4'
217
+ version: '2.6'
184
218
  required_rubygems_version: !ruby/object:Gem::Requirement
185
219
  requirements:
186
- - - ">="
220
+ - - ">"
187
221
  - !ruby/object:Gem::Version
188
- version: '0'
222
+ version: 1.3.1
189
223
  requirements: []
190
224
  rubygems_version: 3.1.4
191
225
  signing_key:
@@ -209,6 +243,7 @@ test_files:
209
243
  - spec/rdkafka/consumer_spec.rb
210
244
  - spec/rdkafka/error_spec.rb
211
245
  - spec/rdkafka/metadata_spec.rb
246
+ - spec/rdkafka/producer/client_spec.rb
212
247
  - spec/rdkafka/producer/delivery_handle_spec.rb
213
248
  - spec/rdkafka/producer/delivery_report_spec.rb
214
249
  - spec/rdkafka/producer_spec.rb