rdkafka 0.13.0 → 0.14.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +58 -0
  4. data/.gitignore +4 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/CHANGELOG.md +130 -111
  9. data/{LICENSE → MIT-LICENSE} +2 -1
  10. data/README.md +19 -20
  11. data/certs/cert_chain.pem +26 -0
  12. data/docker-compose.yml +16 -15
  13. data/ext/README.md +1 -1
  14. data/ext/Rakefile +1 -1
  15. data/lib/rdkafka/abstract_handle.rb +40 -26
  16. data/lib/rdkafka/admin.rb +6 -7
  17. data/lib/rdkafka/bindings.rb +8 -5
  18. data/lib/rdkafka/config.rb +30 -17
  19. data/lib/rdkafka/consumer/headers.rb +2 -4
  20. data/lib/rdkafka/consumer/topic_partition_list.rb +3 -1
  21. data/lib/rdkafka/consumer.rb +92 -53
  22. data/lib/rdkafka/helpers/time.rb +14 -0
  23. data/lib/rdkafka/metadata.rb +22 -1
  24. data/lib/rdkafka/native_kafka.rb +6 -1
  25. data/lib/rdkafka/producer.rb +85 -7
  26. data/lib/rdkafka/version.rb +3 -3
  27. data/lib/rdkafka.rb +10 -1
  28. data/rdkafka.gemspec +17 -3
  29. data/renovate.json +6 -0
  30. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  31. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
  32. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  33. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
  34. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  35. data/spec/rdkafka/admin_spec.rb +1 -2
  36. data/spec/rdkafka/bindings_spec.rb +0 -1
  37. data/spec/rdkafka/callbacks_spec.rb +0 -2
  38. data/spec/rdkafka/config_spec.rb +0 -2
  39. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  40. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  41. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  42. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +19 -2
  43. data/spec/rdkafka/consumer_spec.rb +143 -39
  44. data/spec/rdkafka/error_spec.rb +0 -2
  45. data/spec/rdkafka/metadata_spec.rb +2 -3
  46. data/spec/rdkafka/native_kafka_spec.rb +2 -3
  47. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
  48. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  49. data/spec/rdkafka/producer_spec.rb +157 -1
  50. data.tar.gz.sig +0 -0
  51. metadata +52 -13
  52. metadata.gz.sig +0 -0
  53. data/.semaphore/semaphore.yml +0 -27
@@ -1,10 +1,15 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "objspace"
4
-
5
3
  module Rdkafka
6
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
7
5
  class Producer
6
+ include Helpers::Time
7
+
8
+ # Cache partitions count for 30 seconds
9
+ PARTITIONS_COUNT_TTL = 30
10
+
11
+ private_constant :PARTITIONS_COUNT_TTL
12
+
8
13
  # @private
9
14
  # Returns the current delivery callback, by default this is nil.
10
15
  #
@@ -24,6 +29,26 @@ module Rdkafka
24
29
 
25
30
  # Makes sure, that native kafka gets closed before it gets GCed by Ruby
26
31
  ObjectSpace.define_finalizer(self, native_kafka.finalizer)
32
+
33
+ @_partitions_count_cache = Hash.new do |cache, topic|
34
+ topic_metadata = nil
35
+
36
+ @native_kafka.with_inner do |inner|
37
+ topic_metadata = ::Rdkafka::Metadata.new(inner, topic).topics&.first
38
+ end
39
+
40
+ cache[topic] = [
41
+ monotonic_now,
42
+ topic_metadata ? topic_metadata[:partition_count] : nil
43
+ ]
44
+ end
45
+ end
46
+
47
+ # @return [String] producer name
48
+ def name
49
+ @name ||= @native_kafka.with_inner do |inner|
50
+ ::Rdkafka::Bindings.rd_kafka_name(inner)
51
+ end
27
52
  end
28
53
 
29
54
  # Set a callback that will be called every time a message is successfully produced.
@@ -54,25 +79,77 @@ module Rdkafka
54
79
  # in seconds. Call this before closing a producer to ensure delivery of all messages.
55
80
  #
56
81
  # @param timeout_ms [Integer] how long should we wait for flush of all messages
82
+ # @return [Boolean] true if no more data and all was flushed, false in case there are still
83
+ # outgoing messages after the timeout
84
+ #
85
+ # @note We raise an exception for other errors because based on the librdkafka docs, there
86
+ # should be no other errors.
87
+ #
88
+ # @note For `timed_out` we do not raise an error to keep it backwards compatible
57
89
  def flush(timeout_ms=5_000)
58
90
  closed_producer_check(__method__)
59
91
 
92
+ code = nil
93
+
60
94
  @native_kafka.with_inner do |inner|
61
- Rdkafka::Bindings.rd_kafka_flush(inner, timeout_ms)
95
+ code = Rdkafka::Bindings.rd_kafka_flush(inner, timeout_ms)
62
96
  end
97
+
98
+ # Early skip not to build the error message
99
+ return true if code.zero?
100
+
101
+ error = Rdkafka::RdkafkaError.new(code)
102
+
103
+ return false if error.code == :timed_out
104
+
105
+ raise(error)
106
+ end
107
+
108
+ # Purges the outgoing queue and releases all resources.
109
+ #
110
+ # Useful when closing the producer with outgoing messages to unstable clusters or when for
111
+ # any other reasons waiting cannot go on anymore. This purges both the queue and all the
112
+ # inflight requests + updates the delivery handles statuses so they can be materialized into
113
+ # `purge_queue` errors.
114
+ def purge
115
+ closed_producer_check(__method__)
116
+
117
+ code = nil
118
+
119
+ @native_kafka.with_inner do |inner|
120
+ code = Bindings.rd_kafka_purge(
121
+ inner,
122
+ Bindings::RD_KAFKA_PURGE_F_QUEUE | Bindings::RD_KAFKA_PURGE_F_INFLIGHT
123
+ )
124
+ end
125
+
126
+ code.zero? || raise(Rdkafka::RdkafkaError.new(code))
127
+
128
+ # Wait for the purge to affect everything
129
+ sleep(0.001) until flush(100)
130
+
131
+ true
63
132
  end
64
133
 
65
134
  # Partition count for a given topic.
66
- # NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
67
135
  #
68
136
  # @param topic [String] The topic name.
137
+ # @return [Integer] partition count for a given topic
138
+ #
139
+ # @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
140
+ # auto-created after returning nil.
69
141
  #
70
- # @return partition count [Integer,nil]
142
+ # @note We cache the partition count for a given topic for given time.
143
+ # This prevents us in case someone uses `partition_key` from querying for the count with
144
+ # each message. Instead we query once every 30 seconds at most
71
145
  def partition_count(topic)
72
146
  closed_producer_check(__method__)
73
- @native_kafka.with_inner do |inner|
74
- Rdkafka::Metadata.new(inner, topic).topics&.first[:partition_count]
147
+
148
+ @_partitions_count_cache.delete_if do |_, cached|
149
+ monotonic_now - cached.first > PARTITIONS_COUNT_TTL
75
150
  end
151
+
152
+ @_partitions_count_cache[topic].last
76
153
  end
77
154
 
78
155
  # Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
@@ -193,6 +270,7 @@ module Rdkafka
193
270
  end
194
271
 
195
272
  private
273
+
196
274
  def closed_producer_check(method)
197
275
  raise Rdkafka::ClosedProducerError.new(method) if closed?
198
276
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.13.0"
5
- LIBRDKAFKA_VERSION = "2.0.2"
6
- LIBRDKAFKA_SOURCE_SHA256 = "f321bcb1e015a34114c83cf1aa7b99ee260236aab096b85c003170c90a47ca9d"
4
+ VERSION = "0.14.0"
5
+ LIBRDKAFKA_VERSION = "2.2.0"
6
+ LIBRDKAFKA_SOURCE_SHA256 = "af9a820cbecbc64115629471df7c7cecd40403b6c34bfdbb9223152677a47226"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -1,7 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "rdkafka/version"
3
+ require "logger"
4
+ require "objspace"
5
+ require "ffi"
6
+ require "json"
4
7
 
8
+ require "rdkafka/version"
9
+ require "rdkafka/helpers/time"
5
10
  require "rdkafka/abstract_handle"
6
11
  require "rdkafka/admin"
7
12
  require "rdkafka/admin/create_topic_handle"
@@ -22,3 +27,7 @@ require "rdkafka/native_kafka"
22
27
  require "rdkafka/producer"
23
28
  require "rdkafka/producer/delivery_handle"
24
29
  require "rdkafka/producer/delivery_report"
30
+
31
+ # Main Rdkafka namespace of this gem
32
+ module Rdkafka
33
+ end
data/rdkafka.gemspec CHANGED
@@ -4,11 +4,10 @@ require File.expand_path('lib/rdkafka/version', __dir__)
4
4
 
5
5
  Gem::Specification.new do |gem|
6
6
  gem.authors = ['Thijs Cadier']
7
- gem.email = ["thijs@appsignal.com"]
7
+ gem.email = ["contact@karafka.io"]
8
8
  gem.description = "Modern Kafka client library for Ruby based on librdkafka"
9
9
  gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.4+."
10
10
  gem.license = 'MIT'
11
- gem.homepage = 'https://github.com/thijsc/rdkafka-ruby'
12
11
 
13
12
  gem.files = `git ls-files`.split($\)
14
13
  gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
@@ -16,8 +15,13 @@ Gem::Specification.new do |gem|
16
15
  gem.name = 'rdkafka'
17
16
  gem.require_paths = ['lib']
18
17
  gem.version = Rdkafka::VERSION
19
- gem.required_ruby_version = '>= 2.6'
18
+ gem.required_ruby_version = '>= 2.7'
20
19
  gem.extensions = %w(ext/Rakefile)
20
+ gem.cert_chain = %w[certs/cert_chain.pem]
21
+
22
+ if $PROGRAM_NAME.end_with?('gem')
23
+ gem.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
24
+ end
21
25
 
22
26
  gem.add_dependency 'ffi', '~> 1.15'
23
27
  gem.add_dependency 'mini_portile2', '~> 2.6'
@@ -29,4 +33,14 @@ Gem::Specification.new do |gem|
29
33
  gem.add_development_dependency 'simplecov'
30
34
  gem.add_development_dependency 'guard'
31
35
  gem.add_development_dependency 'guard-rspec'
36
+
37
+ gem.metadata = {
38
+ 'funding_uri' => 'https://karafka.io/#become-pro',
39
+ 'homepage_uri' => 'https://karafka.io',
40
+ 'changelog_uri' => 'https://github.com/karafka/rdkafka-ruby/blob/main/CHANGELOG.md',
41
+ 'bug_tracker_uri' => 'https://github.com/karafka/rdkafka-ruby/issues',
42
+ 'source_code_uri' => 'https://github.com/karafka/rdkafka-ruby',
43
+ 'documentation_uri' => 'https://github.com/karafka/rdkafka-ruby/blob/main/README.md',
44
+ 'rubygems_mfa_required' => 'true'
45
+ }
32
46
  end
data/renovate.json ADDED
@@ -0,0 +1,6 @@
1
+ {
2
+ "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3
+ "extends": [
4
+ "config:base"
5
+ ]
6
+ }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::AbstractHandle do
6
4
  let(:response) { 0 }
7
5
  let(:result) { -1 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicReport do
6
4
  subject { Rdkafka::Admin::CreateTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicReport do
6
4
  subject { Rdkafka::Admin::DeleteTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
 
6
5
  describe Rdkafka::Admin do
@@ -33,7 +32,7 @@ describe Rdkafka::Admin do
33
32
  }.to raise_exception { |ex|
34
33
  expect(ex).to be_a(Rdkafka::RdkafkaError)
35
34
  expect(ex.message).to match(/Broker: Invalid topic \(topic_exception\)/)
36
- expect(ex.broker_message).to match(/Topic name.*is illegal, it contains a character other than ASCII alphanumerics/)
35
+ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or more characters other than ASCII alphanumerics, '.', '_' and '-'/)
37
36
  }
38
37
  end
39
38
  end
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require 'zlib'
5
4
 
6
5
  describe Rdkafka::Bindings do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Callbacks do
6
4
 
7
5
  # The code in the call back functions is 100% covered by other specs. Due to
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Config do
6
4
  context "logger" do
7
5
  it "should have a default logger" do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Headers do
6
4
  let(:headers) do
7
5
  { # Note String keys!
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Message do
6
4
  let(:native_client) { new_native_client }
7
5
  let(:native_topic) { new_native_topic(native_client: native_client) }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Partition do
6
4
  let(:offset) { 100 }
7
5
  let(:err) { 0 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::TopicPartitionList do
6
4
  it "should create a new list and add unassigned topics" do
7
5
  list = Rdkafka::Consumer::TopicPartitionList.new
@@ -221,5 +219,24 @@ describe Rdkafka::Consumer::TopicPartitionList do
221
219
 
222
220
  expect(list).to eq other
223
221
  end
222
+
223
+ it "should create a native list with timetamp offsets if offsets are Time" do
224
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
225
+ list.add_topic_and_partitions_with_offsets("topic", 0 => Time.at(1505069646, 250_000))
226
+ end
227
+
228
+ tpl = list.to_native_tpl
229
+
230
+ compare_list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
231
+ list.add_topic_and_partitions_with_offsets(
232
+ "topic",
233
+ 0 => (Time.at(1505069646, 250_000).to_f * 1000).floor
234
+ )
235
+ end
236
+
237
+ native_list = Rdkafka::Consumer::TopicPartitionList.from_native_tpl(tpl)
238
+
239
+ expect(native_list).to eq compare_list
240
+ end
224
241
  end
225
242
  end
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
  require 'securerandom'
6
5
 
@@ -11,6 +10,10 @@ describe Rdkafka::Consumer do
11
10
  after { consumer.close }
12
11
  after { producer.close }
13
12
 
13
+ describe '#name' do
14
+ it { expect(consumer.name).to include('rdkafka#consumer-') }
15
+ end
16
+
14
17
  describe "#subscribe, #unsubscribe and #subscription" do
15
18
  it "should subscribe, unsubscribe and return the subscription" do
16
19
  expect(consumer.subscription).to be_empty
@@ -311,8 +314,9 @@ describe Rdkafka::Consumer do
311
314
  end
312
315
  end
313
316
 
314
- describe "#commit, #committed and #store_offset" do
315
- # Make sure there's a stored offset
317
+
318
+ describe "#position, #commit, #committed and #store_offset" do
319
+ # Make sure there are messages to work with
316
320
  let!(:report) do
317
321
  producer.produce(
318
322
  topic: "consume_test_topic",
@@ -330,29 +334,33 @@ describe Rdkafka::Consumer do
330
334
  )
331
335
  end
332
336
 
333
- it "should only accept a topic partition list in committed" do
334
- expect {
335
- consumer.committed("list")
336
- }.to raise_error TypeError
337
+ describe "#position" do
338
+ it "should only accept a topic partition list in position if not nil" do
339
+ expect {
340
+ consumer.position("list")
341
+ }.to raise_error TypeError
342
+ end
337
343
  end
338
344
 
339
- it "should commit in sync mode" do
340
- expect {
341
- consumer.commit(nil, true)
342
- }.not_to raise_error
343
- end
345
+ describe "#committed" do
346
+ it "should only accept a topic partition list in commit if not nil" do
347
+ expect {
348
+ consumer.commit("list")
349
+ }.to raise_error TypeError
350
+ end
344
351
 
345
- it "should only accept a topic partition list in commit if not nil" do
346
- expect {
347
- consumer.commit("list")
348
- }.to raise_error TypeError
352
+ it "should commit in sync mode" do
353
+ expect {
354
+ consumer.commit(nil, true)
355
+ }.not_to raise_error
356
+ end
349
357
  end
350
358
 
351
359
  context "with a committed consumer" do
352
360
  before :all do
353
361
  # Make sure there are some messages.
354
362
  handles = []
355
- producer = rdkafka_producer_config.producer
363
+ producer = rdkafka_config.producer
356
364
  10.times do
357
365
  (0..2).each do |i|
358
366
  handles << producer.produce(
@@ -396,31 +404,33 @@ describe Rdkafka::Consumer do
396
404
  }.to raise_error(Rdkafka::RdkafkaError)
397
405
  end
398
406
 
399
- it "should fetch the committed offsets for the current assignment" do
400
- partitions = consumer.committed.to_h["consume_test_topic"]
401
- expect(partitions).not_to be_nil
402
- expect(partitions[0].offset).to eq 1
403
- end
407
+ describe "#committed" do
408
+ it "should fetch the committed offsets for the current assignment" do
409
+ partitions = consumer.committed.to_h["consume_test_topic"]
410
+ expect(partitions).not_to be_nil
411
+ expect(partitions[0].offset).to eq 1
412
+ end
404
413
 
405
- it "should fetch the committed offsets for a specified topic partition list" do
406
- list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
407
- list.add_topic("consume_test_topic", [0, 1, 2])
414
+ it "should fetch the committed offsets for a specified topic partition list" do
415
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
416
+ list.add_topic("consume_test_topic", [0, 1, 2])
417
+ end
418
+ partitions = consumer.committed(list).to_h["consume_test_topic"]
419
+ expect(partitions).not_to be_nil
420
+ expect(partitions[0].offset).to eq 1
421
+ expect(partitions[1].offset).to eq 1
422
+ expect(partitions[2].offset).to eq 1
408
423
  end
409
- partitions = consumer.committed(list).to_h["consume_test_topic"]
410
- expect(partitions).not_to be_nil
411
- expect(partitions[0].offset).to eq 1
412
- expect(partitions[1].offset).to eq 1
413
- expect(partitions[2].offset).to eq 1
414
- end
415
424
 
416
- it "should raise an error when getting committed fails" do
417
- expect(Rdkafka::Bindings).to receive(:rd_kafka_committed).and_return(20)
418
- list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
419
- list.add_topic("consume_test_topic", [0, 1, 2])
425
+ it "should raise an error when getting committed fails" do
426
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_committed).and_return(20)
427
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
428
+ list.add_topic("consume_test_topic", [0, 1, 2])
429
+ end
430
+ expect {
431
+ consumer.committed(list)
432
+ }.to raise_error Rdkafka::RdkafkaError
420
433
  end
421
- expect {
422
- consumer.committed(list)
423
- }.to raise_error Rdkafka::RdkafkaError
424
434
  end
425
435
 
426
436
  describe "#store_offset" do
@@ -441,6 +451,8 @@ describe Rdkafka::Consumer do
441
451
  @new_consumer.store_offset(message)
442
452
  @new_consumer.commit
443
453
 
454
+ # TODO use position here, should be at offset
455
+
444
456
  list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
445
457
  list.add_topic("consume_test_topic", [0, 1, 2])
446
458
  end
@@ -455,6 +467,35 @@ describe Rdkafka::Consumer do
455
467
  @new_consumer.store_offset(message)
456
468
  }.to raise_error Rdkafka::RdkafkaError
457
469
  end
470
+
471
+ describe "#position" do
472
+ it "should fetch the positions for the current assignment" do
473
+ consumer.store_offset(message)
474
+
475
+ partitions = consumer.position.to_h["consume_test_topic"]
476
+ expect(partitions).not_to be_nil
477
+ expect(partitions[0].offset).to eq message.offset + 1
478
+ end
479
+
480
+ it "should fetch the positions for a specified assignment" do
481
+ consumer.store_offset(message)
482
+
483
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
484
+ list.add_topic_and_partitions_with_offsets("consume_test_topic", 0 => nil, 1 => nil, 2 => nil)
485
+ end
486
+ partitions = consumer.position(list).to_h["consume_test_topic"]
487
+ expect(partitions).not_to be_nil
488
+ expect(partitions[0].offset).to eq message.offset + 1
489
+ end
490
+
491
+ it "should raise an error when getting the position fails" do
492
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_position).and_return(20)
493
+
494
+ expect {
495
+ consumer.position
496
+ }.to raise_error(Rdkafka::RdkafkaError)
497
+ end
498
+ end
458
499
  end
459
500
  end
460
501
  end
@@ -950,6 +991,69 @@ describe Rdkafka::Consumer do
950
991
  end
951
992
  end
952
993
 
994
+ describe "#offsets_for_times" do
995
+ it "should raise when not TopicPartitionList" do
996
+ expect { consumer.offsets_for_times([]) }.to raise_error(TypeError)
997
+ end
998
+
999
+ it "should raise an error when offsets_for_times fails" do
1000
+ tpl = Rdkafka::Consumer::TopicPartitionList.new
1001
+
1002
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_offsets_for_times).and_return(7)
1003
+
1004
+ expect { consumer.offsets_for_times(tpl) }.to raise_error(Rdkafka::RdkafkaError)
1005
+ end
1006
+
1007
+ context "when subscribed" do
1008
+ let(:timeout) { 1000 }
1009
+
1010
+ before do
1011
+ consumer.subscribe("consume_test_topic")
1012
+
1013
+ # 1. partitions are assigned
1014
+ wait_for_assignment(consumer)
1015
+ expect(consumer.assignment).not_to be_empty
1016
+
1017
+ # 2. eat unrelated messages
1018
+ while(consumer.poll(timeout)) do; end
1019
+ end
1020
+
1021
+ after { consumer.unsubscribe }
1022
+
1023
+ def send_one_message(val)
1024
+ producer.produce(
1025
+ topic: "consume_test_topic",
1026
+ payload: "payload #{val}",
1027
+ key: "key 0",
1028
+ partition: 0
1029
+ ).wait
1030
+ end
1031
+
1032
+ it "returns a TopicParticionList with updated offsets" do
1033
+ send_one_message("a")
1034
+ send_one_message("b")
1035
+ send_one_message("c")
1036
+
1037
+ consumer.poll(timeout)
1038
+ message = consumer.poll(timeout)
1039
+ consumer.poll(timeout)
1040
+
1041
+ tpl = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
1042
+ list.add_topic_and_partitions_with_offsets(
1043
+ "consume_test_topic",
1044
+ [
1045
+ [0, message.timestamp]
1046
+ ]
1047
+ )
1048
+ end
1049
+
1050
+ tpl_response = consumer.offsets_for_times(tpl)
1051
+
1052
+ expect(tpl_response.to_h["consume_test_topic"][0].offset).to eq message.offset
1053
+ end
1054
+ end
1055
+ end
1056
+
953
1057
  describe "a rebalance listener" do
954
1058
  let(:consumer) do
955
1059
  config = rdkafka_consumer_config
@@ -1024,7 +1128,7 @@ describe Rdkafka::Consumer do
1024
1128
  :assign => [ nil ],
1025
1129
  :assignment => nil,
1026
1130
  :committed => [],
1027
- :query_watermark_offsets => [ nil, nil ],
1131
+ :query_watermark_offsets => [ nil, nil ]
1028
1132
  }.each do |method, args|
1029
1133
  it "raises an exception if #{method} is called" do
1030
1134
  expect {
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::RdkafkaError do
6
4
  it "should raise a type error for a nil response" do
7
5
  expect {
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "securerandom"
5
4
 
6
5
  describe Rdkafka::Metadata do
@@ -31,7 +30,7 @@ describe Rdkafka::Metadata do
31
30
  it "#brokers returns our single broker" do
32
31
  expect(subject.brokers.length).to eq(1)
33
32
  expect(subject.brokers[0][:broker_id]).to eq(1)
34
- expect(subject.brokers[0][:broker_name]).to eq("localhost")
33
+ expect(subject.brokers[0][:broker_name]).to eq("127.0.0.1")
35
34
  expect(subject.brokers[0][:broker_port]).to eq(9092)
36
35
  end
37
36
 
@@ -54,7 +53,7 @@ describe Rdkafka::Metadata do
54
53
  it "#brokers returns our single broker" do
55
54
  expect(subject.brokers.length).to eq(1)
56
55
  expect(subject.brokers[0][:broker_id]).to eq(1)
57
- expect(subject.brokers[0][:broker_name]).to eq("localhost")
56
+ expect(subject.brokers[0][:broker_name]).to eq("127.0.0.1")
58
57
  expect(subject.brokers[0][:broker_port]).to eq(9092)
59
58
  end
60
59
 
@@ -1,14 +1,13 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::NativeKafka do
6
4
  let(:config) { rdkafka_producer_config }
7
5
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
8
6
  let(:closing) { false }
9
7
  let(:thread) { double(Thread) }
8
+ let(:opaque) { Rdkafka::Opaque.new }
10
9
 
11
- subject(:client) { described_class.new(native, run_polling_thread: true) }
10
+ subject(:client) { described_class.new(native, run_polling_thread: true, opaque: opaque) }
12
11
 
13
12
  before do
14
13
  allow(Thread).to receive(:new).and_return(thread)