rdkafka 0.5.0 → 0.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +23 -0
  3. data/CHANGELOG.md +23 -0
  4. data/README.md +9 -9
  5. data/docker-compose.yml +17 -11
  6. data/ext/README.md +3 -15
  7. data/ext/Rakefile +23 -3
  8. data/lib/rdkafka.rb +8 -0
  9. data/lib/rdkafka/abstract_handle.rb +82 -0
  10. data/lib/rdkafka/admin.rb +144 -0
  11. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  12. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  13. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  14. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  15. data/lib/rdkafka/bindings.rb +63 -17
  16. data/lib/rdkafka/callbacks.rb +106 -0
  17. data/lib/rdkafka/config.rb +18 -7
  18. data/lib/rdkafka/consumer.rb +162 -46
  19. data/lib/rdkafka/consumer/headers.rb +7 -5
  20. data/lib/rdkafka/consumer/partition.rb +1 -1
  21. data/lib/rdkafka/consumer/topic_partition_list.rb +6 -16
  22. data/lib/rdkafka/error.rb +35 -4
  23. data/lib/rdkafka/metadata.rb +92 -0
  24. data/lib/rdkafka/producer.rb +43 -15
  25. data/lib/rdkafka/producer/delivery_handle.rb +7 -49
  26. data/lib/rdkafka/producer/delivery_report.rb +7 -2
  27. data/lib/rdkafka/version.rb +3 -3
  28. data/rdkafka.gemspec +3 -3
  29. data/spec/rdkafka/abstract_handle_spec.rb +114 -0
  30. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  31. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  32. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  33. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  34. data/spec/rdkafka/admin_spec.rb +192 -0
  35. data/spec/rdkafka/bindings_spec.rb +20 -2
  36. data/spec/rdkafka/callbacks_spec.rb +20 -0
  37. data/spec/rdkafka/config_spec.rb +17 -2
  38. data/spec/rdkafka/consumer/message_spec.rb +6 -1
  39. data/spec/rdkafka/consumer_spec.rb +145 -19
  40. data/spec/rdkafka/error_spec.rb +7 -3
  41. data/spec/rdkafka/metadata_spec.rb +78 -0
  42. data/spec/rdkafka/producer/delivery_handle_spec.rb +3 -43
  43. data/spec/rdkafka/producer/delivery_report_spec.rb +5 -1
  44. data/spec/rdkafka/producer_spec.rb +147 -72
  45. data/spec/spec_helper.rb +34 -6
  46. metadata +34 -10
  47. data/.travis.yml +0 -34
@@ -1,4 +1,5 @@
1
1
  require "spec_helper"
2
+ require 'zlib'
2
3
 
3
4
  describe Rdkafka::Bindings do
4
5
  it "should load librdkafka" do
@@ -7,12 +8,12 @@ describe Rdkafka::Bindings do
7
8
 
8
9
  describe ".lib_extension" do
9
10
  it "should know the lib extension for darwin" do
10
- expect(Gem::Platform.local).to receive(:os).and_return("darwin-aaa")
11
+ stub_const('RbConfig::CONFIG', 'host_os' =>'darwin')
11
12
  expect(Rdkafka::Bindings.lib_extension).to eq "dylib"
12
13
  end
13
14
 
14
15
  it "should know the lib extension for linux" do
15
- expect(Gem::Platform.local).to receive(:os).and_return("linux")
16
+ stub_const('RbConfig::CONFIG', 'host_os' =>'linux')
16
17
  expect(Rdkafka::Bindings.lib_extension).to eq "so"
17
18
  end
18
19
  end
@@ -60,6 +61,23 @@ describe Rdkafka::Bindings do
60
61
  end
61
62
  end
62
63
 
64
+ describe "partitioner" do
65
+ let(:partition_key) { ('a'..'z').to_a.shuffle.take(15).join('') }
66
+ let(:partition_count) { rand(50) + 1 }
67
+
68
+ it "should return the same partition for a similar string and the same partition count" do
69
+ result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
70
+ result_2 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
71
+ expect(result_1).to eq(result_2)
72
+ end
73
+
74
+ it "should match the old partitioner" do
75
+ result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
76
+ result_2 = (Zlib.crc32(partition_key) % partition_count)
77
+ expect(result_1).to eq(result_2)
78
+ end
79
+ end
80
+
63
81
  describe "stats callback" do
64
82
  context "without a stats callback" do
65
83
  it "should do nothing" do
@@ -0,0 +1,20 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Callbacks do
4
+
5
+ # The code in the call back functions is 100% covered by other specs. Due to
6
+ # the large number of collaborators, and the fact that FFI does not play
7
+ # nicely with doubles, it was very difficult to construct tests that were
8
+ # not over-mocked.
9
+
10
+ # For debugging purposes, if you suspect that you are running into trouble in
11
+ # one of the callback functions, it may be helpful to surround the inner body
12
+ # of the method with something like:
13
+ #
14
+ # begin
15
+ # <method body>
16
+ # rescue => ex; puts ex.inspect; puts ex.backtrace; end;
17
+ #
18
+ # This will output to STDOUT any exceptions that are being raised in the callback.
19
+
20
+ end
@@ -50,7 +50,9 @@ describe Rdkafka::Config do
50
50
  end
51
51
 
52
52
  it "should create a consumer with valid config" do
53
- expect(rdkafka_config.consumer).to be_a Rdkafka::Consumer
53
+ consumer = rdkafka_config.consumer
54
+ expect(consumer).to be_a Rdkafka::Consumer
55
+ consumer.close
54
56
  end
55
57
 
56
58
  it "should raise an error when creating a consumer with invalid config" do
@@ -76,7 +78,9 @@ describe Rdkafka::Config do
76
78
  end
77
79
 
78
80
  it "should create a producer with valid config" do
79
- expect(rdkafka_config.producer).to be_a Rdkafka::Producer
81
+ producer = rdkafka_config.producer
82
+ expect(producer).to be_a Rdkafka::Producer
83
+ producer.close
80
84
  end
81
85
 
82
86
  it "should raise an error when creating a producer with invalid config" do
@@ -86,6 +90,17 @@ describe Rdkafka::Config do
86
90
  }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
87
91
  end
88
92
 
93
+ it "should allow configuring zstd compression" do
94
+ config = Rdkafka::Config.new('compression.codec' => 'zstd')
95
+ begin
96
+ expect(config.producer).to be_a Rdkafka::Producer
97
+ config.producer.close
98
+ rescue Rdkafka::Config::ConfigError => ex
99
+ pending "Zstd compression not supported on this machine"
100
+ raise ex
101
+ end
102
+ end
103
+
89
104
  it "should raise an error when client creation fails for a consumer" do
90
105
  config = Rdkafka::Config.new(
91
106
  "security.protocol" => "SSL",
@@ -1,7 +1,8 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Consumer::Message do
4
- let(:native_topic) { new_native_topic }
4
+ let(:native_client) { new_native_client }
5
+ let(:native_topic) { new_native_topic(native_client: native_client) }
5
6
  let(:payload) { nil }
6
7
  let(:key) { nil }
7
8
  let(:native_message) do
@@ -24,6 +25,10 @@ describe Rdkafka::Consumer::Message do
24
25
  end
25
26
  end
26
27
 
28
+ after(:each) do
29
+ Rdkafka::Bindings.rd_kafka_destroy(native_client)
30
+ end
31
+
27
32
  subject { Rdkafka::Consumer::Message.new(native_message) }
28
33
 
29
34
  before do
@@ -1,11 +1,15 @@
1
1
  require "spec_helper"
2
+ require "ostruct"
2
3
 
3
4
  describe Rdkafka::Consumer do
4
5
  let(:config) { rdkafka_config }
5
6
  let(:consumer) { config.consumer }
6
7
  let(:producer) { config.producer }
7
8
 
8
- describe "#subscripe, #unsubscribe and #subscription" do
9
+ after { consumer.close }
10
+ after { producer.close }
11
+
12
+ describe "#subscribe, #unsubscribe and #subscription" do
9
13
  it "should subscribe, unsubscribe and return the subscription" do
10
14
  expect(consumer.subscription).to be_empty
11
15
 
@@ -75,7 +79,7 @@ describe Rdkafka::Consumer do
75
79
  tpl.add_topic("consume_test_topic", (0..2))
76
80
  consumer.pause(tpl)
77
81
 
78
- # 6. unsure that messages are not available
82
+ # 6. ensure that messages are not available
79
83
  records = consumer.poll(timeout)
80
84
  expect(records).to be_nil
81
85
 
@@ -84,10 +88,9 @@ describe Rdkafka::Consumer do
84
88
  tpl.add_topic("consume_test_topic", (0..2))
85
89
  consumer.resume(tpl)
86
90
 
87
- # 8. ensure that message is successfuly consumed
91
+ # 8. ensure that message is successfully consumed
88
92
  records = consumer.poll(timeout)
89
93
  expect(records).not_to be_nil
90
- consumer.commit
91
94
  end
92
95
  end
93
96
 
@@ -124,6 +127,90 @@ describe Rdkafka::Consumer do
124
127
  end
125
128
  end
126
129
 
130
+ describe "#seek" do
131
+ it "should raise an error when seeking fails" do
132
+ fake_msg = OpenStruct.new(topic: "consume_test_topic", partition: 0, offset: 0)
133
+
134
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_seek).and_return(20)
135
+ expect {
136
+ consumer.seek(fake_msg)
137
+ }.to raise_error Rdkafka::RdkafkaError
138
+ end
139
+
140
+ context "subscription" do
141
+ let(:timeout) { 1000 }
142
+
143
+ before do
144
+ consumer.subscribe("consume_test_topic")
145
+
146
+ # 1. partitions are assigned
147
+ wait_for_assignment(consumer)
148
+ expect(consumer.assignment).not_to be_empty
149
+
150
+ # 2. eat unrelated messages
151
+ while(consumer.poll(timeout)) do; end
152
+ end
153
+ after { consumer.unsubscribe }
154
+
155
+ def send_one_message(val)
156
+ producer.produce(
157
+ topic: "consume_test_topic",
158
+ payload: "payload #{val}",
159
+ key: "key 1",
160
+ partition: 0
161
+ ).wait
162
+ end
163
+
164
+ it "works when a partition is paused" do
165
+ # 3. get reference message
166
+ send_one_message(:a)
167
+ message1 = consumer.poll(timeout)
168
+ expect(message1&.payload).to eq "payload a"
169
+
170
+ # 4. pause the subscription
171
+ tpl = Rdkafka::Consumer::TopicPartitionList.new
172
+ tpl.add_topic("consume_test_topic", 1)
173
+ consumer.pause(tpl)
174
+
175
+ # 5. seek to previous message
176
+ consumer.seek(message1)
177
+
178
+ # 6. resume the subscription
179
+ tpl = Rdkafka::Consumer::TopicPartitionList.new
180
+ tpl.add_topic("consume_test_topic", 1)
181
+ consumer.resume(tpl)
182
+
183
+ # 7. ensure same message is read again
184
+ message2 = consumer.poll(timeout)
185
+ consumer.commit
186
+ expect(message1.offset).to eq message2.offset
187
+ expect(message1.payload).to eq message2.payload
188
+ end
189
+
190
+ it "allows skipping messages" do
191
+ # 3. send messages
192
+ send_one_message(:a)
193
+ send_one_message(:b)
194
+ send_one_message(:c)
195
+
196
+ # 4. get reference message
197
+ message = consumer.poll(timeout)
198
+ expect(message&.payload).to eq "payload a"
199
+
200
+ # 5. seek over one message
201
+ fake_msg = message.dup
202
+ fake_msg.instance_variable_set(:@offset, fake_msg.offset + 2)
203
+ consumer.seek(fake_msg)
204
+
205
+ # 6. ensure that only one message is available
206
+ records = consumer.poll(timeout)
207
+ expect(records&.payload).to eq "payload c"
208
+ records = consumer.poll(timeout)
209
+ expect(records).to be_nil
210
+ end
211
+ end
212
+ end
213
+
127
214
  describe "#assign and #assignment" do
128
215
  it "should return an empty assignment if nothing is assigned" do
129
216
  expect(consumer.assignment).to be_empty
@@ -185,7 +272,9 @@ describe Rdkafka::Consumer do
185
272
  it "should close a consumer" do
186
273
  consumer.subscribe("consume_test_topic")
187
274
  consumer.close
188
- expect(consumer.poll(100)).to be_nil
275
+ expect {
276
+ consumer.poll(100)
277
+ }.to raise_error(Rdkafka::ClosedConsumerError, /poll/)
189
278
  end
190
279
  end
191
280
 
@@ -226,11 +315,11 @@ describe Rdkafka::Consumer do
226
315
  }.to raise_error TypeError
227
316
  end
228
317
 
229
- context "with a commited consumer" do
318
+ context "with a committed consumer" do
230
319
  before :all do
231
- # Make sure there are some message
232
- producer = rdkafka_config.producer
320
+ # Make sure there are some messages.
233
321
  handles = []
322
+ producer = rdkafka_config.producer
234
323
  10.times do
235
324
  (0..2).each do |i|
236
325
  handles << producer.produce(
@@ -242,6 +331,7 @@ describe Rdkafka::Consumer do
242
331
  end
243
332
  end
244
333
  handles.each(&:wait)
334
+ producer.close
245
335
  end
246
336
 
247
337
  before do
@@ -302,20 +392,26 @@ describe Rdkafka::Consumer do
302
392
 
303
393
  describe "#store_offset" do
304
394
  before do
395
+ config = {}
305
396
  config[:'enable.auto.offset.store'] = false
306
397
  config[:'enable.auto.commit'] = false
307
- consumer.subscribe("consume_test_topic")
308
- wait_for_assignment(consumer)
398
+ @new_consumer = rdkafka_config(config).consumer
399
+ @new_consumer.subscribe("consume_test_topic")
400
+ wait_for_assignment(@new_consumer)
401
+ end
402
+
403
+ after do
404
+ @new_consumer.close
309
405
  end
310
406
 
311
407
  it "should store the offset for a message" do
312
- consumer.store_offset(message)
313
- consumer.commit
408
+ @new_consumer.store_offset(message)
409
+ @new_consumer.commit
314
410
 
315
411
  list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
316
412
  list.add_topic("consume_test_topic", [0, 1, 2])
317
413
  end
318
- partitions = consumer.committed(list).to_h["consume_test_topic"]
414
+ partitions = @new_consumer.committed(list).to_h["consume_test_topic"]
319
415
  expect(partitions).not_to be_nil
320
416
  expect(partitions[message.partition].offset).to eq(message.offset + 1)
321
417
  end
@@ -323,7 +419,7 @@ describe Rdkafka::Consumer do
323
419
  it "should raise an error with invalid input" do
324
420
  allow(message).to receive(:partition).and_return(9999)
325
421
  expect {
326
- consumer.store_offset(message)
422
+ @new_consumer.store_offset(message)
327
423
  }.to raise_error Rdkafka::RdkafkaError
328
424
  end
329
425
  end
@@ -467,12 +563,12 @@ describe Rdkafka::Consumer do
467
563
  payload: "payload 1",
468
564
  key: "key 1"
469
565
  ).wait
470
-
471
566
  consumer.subscribe("consume_test_topic")
472
- message = consumer.poll(5000)
473
- expect(message).to be_a Rdkafka::Consumer::Message
567
+ message = consumer.each {|m| break m}
474
568
 
475
- # Message content is tested in producer spec
569
+ expect(message).to be_a Rdkafka::Consumer::Message
570
+ expect(message.payload).to eq('payload 1')
571
+ expect(message.key).to eq('key 1')
476
572
  end
477
573
 
478
574
  it "should raise an error when polling fails" do
@@ -568,8 +664,9 @@ describe Rdkafka::Consumer do
568
664
  # should break the each loop.
569
665
  consumer.each_with_index do |message, i|
570
666
  expect(message).to be_a Rdkafka::Consumer::Message
571
- consumer.close if i == 10
667
+ break if i == 10
572
668
  end
669
+ consumer.close
573
670
  end
574
671
  end
575
672
 
@@ -629,4 +726,33 @@ describe Rdkafka::Consumer do
629
726
  consumer.close
630
727
  end
631
728
  end
729
+
730
+ context "methods that should not be called after a consumer has been closed" do
731
+ before do
732
+ consumer.close
733
+ end
734
+
735
+ # Affected methods and a non-invalid set of parameters for the method
736
+ {
737
+ :subscribe => [ nil ],
738
+ :unsubscribe => nil,
739
+ :pause => [ nil ],
740
+ :resume => [ nil ],
741
+ :subscription => nil,
742
+ :assign => [ nil ],
743
+ :assignment => nil,
744
+ :committed => [],
745
+ :query_watermark_offsets => [ nil, nil ],
746
+ }.each do |method, args|
747
+ it "raises an exception if #{method} is called" do
748
+ expect {
749
+ if args.nil?
750
+ consumer.public_send(method)
751
+ else
752
+ consumer.public_send(method, *args)
753
+ end
754
+ }.to raise_exception(Rdkafka::ClosedConsumerError, /#{method.to_s}/)
755
+ end
756
+ end
757
+ end
632
758
  end
@@ -11,6 +11,10 @@ describe Rdkafka::RdkafkaError do
11
11
  expect(Rdkafka::RdkafkaError.new(10, "message prefix").message_prefix).to eq "message prefix"
12
12
  end
13
13
 
14
+ it "should create an error with a broker message" do
15
+ expect(Rdkafka::RdkafkaError.new(10, broker_message: "broker message").broker_message).to eq "broker message"
16
+ end
17
+
14
18
  describe "#code" do
15
19
  it "should handle an invalid response" do
16
20
  expect(Rdkafka::RdkafkaError.new(933975).code).to eq :err_933975?
@@ -71,15 +75,15 @@ describe Rdkafka::RdkafkaError do
71
75
  end
72
76
 
73
77
  it "should not equal another error with a different error code" do
74
- expect(subject).to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
78
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
75
79
  end
76
80
 
77
81
  it "should not equal another error with a different message" do
78
- expect(subject).to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
82
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
79
83
  end
80
84
 
81
85
  it "should not equal another error with no message" do
82
- expect(subject).to eq Rdkafka::RdkafkaError.new(10)
86
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10)
83
87
  end
84
88
  end
85
89
  end
@@ -0,0 +1,78 @@
1
+ require "spec_helper"
2
+ require "securerandom"
3
+
4
+ describe Rdkafka::Metadata do
5
+ let(:config) { rdkafka_config }
6
+ let(:native_config) { config.send(:native_config) }
7
+ let(:native_kafka) { config.send(:native_kafka, native_config, :rd_kafka_consumer) }
8
+
9
+ after do
10
+ Rdkafka::Bindings.rd_kafka_consumer_close(native_kafka)
11
+ Rdkafka::Bindings.rd_kafka_destroy(native_kafka)
12
+ end
13
+
14
+ context "passing in a topic name" do
15
+ context "that is non-existent topic" do
16
+ let(:topic_name) { SecureRandom.uuid.to_s }
17
+
18
+ it "raises an appropriate exception" do
19
+ expect {
20
+ described_class.new(native_kafka, topic_name)
21
+ }.to raise_exception(Rdkafka::RdkafkaError, "Broker: Leader not available (leader_not_available)")
22
+ end
23
+ end
24
+
25
+ context "that is one of our test topics" do
26
+ subject { described_class.new(native_kafka, topic_name) }
27
+ let(:topic_name) { "partitioner_test_topic" }
28
+
29
+ it "#brokers returns our single broker" do
30
+ expect(subject.brokers.length).to eq(1)
31
+ expect(subject.brokers[0][:broker_id]).to eq(1)
32
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
33
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
34
+ end
35
+
36
+ it "#topics returns data on our test topic" do
37
+ expect(subject.topics.length).to eq(1)
38
+ expect(subject.topics[0][:partition_count]).to eq(25)
39
+ expect(subject.topics[0][:partitions].length).to eq(25)
40
+ expect(subject.topics[0][:topic_name]).to eq(topic_name)
41
+ end
42
+ end
43
+ end
44
+
45
+ context "not passing in a topic name" do
46
+ subject { described_class.new(native_kafka, topic_name) }
47
+ let(:topic_name) { nil }
48
+ let(:test_topics) {
49
+ %w(consume_test_topic empty_test_topic load_test_topic produce_test_topic rake_test_topic watermarks_test_topic partitioner_test_topic)
50
+ } # Test topics crated in spec_helper.rb
51
+
52
+ it "#brokers returns our single broker" do
53
+ expect(subject.brokers.length).to eq(1)
54
+ expect(subject.brokers[0][:broker_id]).to eq(1)
55
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
56
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
57
+ end
58
+
59
+ it "#topics returns data about all of our test topics" do
60
+ result = subject.topics.map { |topic| topic[:topic_name] }
61
+ expect(result).to include(*test_topics)
62
+ end
63
+ end
64
+
65
+ context "when a non-zero error code is returned" do
66
+ let(:topic_name) { SecureRandom.uuid.to_s }
67
+
68
+ before do
69
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_metadata).and_return(-165)
70
+ end
71
+
72
+ it "creating the instance raises an exception" do
73
+ expect {
74
+ described_class.new(native_kafka, topic_name)
75
+ }.to raise_error(Rdkafka::RdkafkaError, /Local: Required feature not supported by broker \(unsupported_feature\)/)
76
+ end
77
+ end
78
+ end