karafka-rdkafka 0.13.7 → 0.13.9

Sign up to get free protection for your applications and to get access to all the features.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.gitignore +4 -0
  4. data/.rspec +1 -0
  5. data/.ruby-gemset +1 -0
  6. data/.ruby-version +1 -0
  7. data/CHANGELOG.md +40 -31
  8. data/{LICENSE → MIT-LICENSE} +2 -1
  9. data/README.md +11 -11
  10. data/ext/README.md +1 -1
  11. data/ext/Rakefile +1 -1
  12. data/lib/rdkafka/abstract_handle.rb +37 -24
  13. data/lib/rdkafka/admin.rb +6 -7
  14. data/lib/rdkafka/bindings.rb +1 -4
  15. data/lib/rdkafka/config.rb +30 -15
  16. data/lib/rdkafka/consumer/headers.rb +2 -4
  17. data/lib/rdkafka/consumer.rb +83 -53
  18. data/lib/rdkafka/helpers/time.rb +14 -0
  19. data/lib/rdkafka/producer.rb +8 -15
  20. data/lib/rdkafka/version.rb +1 -1
  21. data/lib/rdkafka.rb +10 -1
  22. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  23. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
  24. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  25. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
  26. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  27. data/spec/rdkafka/admin_spec.rb +0 -1
  28. data/spec/rdkafka/bindings_spec.rb +0 -1
  29. data/spec/rdkafka/callbacks_spec.rb +0 -2
  30. data/spec/rdkafka/config_spec.rb +8 -2
  31. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  32. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  33. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  34. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
  35. data/spec/rdkafka/consumer_spec.rb +122 -38
  36. data/spec/rdkafka/error_spec.rb +0 -2
  37. data/spec/rdkafka/metadata_spec.rb +0 -1
  38. data/spec/rdkafka/native_kafka_spec.rb +0 -2
  39. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
  40. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  41. data/spec/rdkafka/producer_spec.rb +0 -1
  42. data/spec/spec_helper.rb +1 -1
  43. data.tar.gz.sig +0 -0
  44. metadata +7 -4
  45. metadata.gz.sig +0 -0
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
  require 'securerandom'
6
5
 
@@ -55,6 +54,30 @@ describe Rdkafka::Consumer do
55
54
  consumer.subscription
56
55
  }.to raise_error(Rdkafka::RdkafkaError)
57
56
  end
57
+
58
+ context "when using consumer without the poll set" do
59
+ let(:consumer) do
60
+ config = rdkafka_consumer_config
61
+ config.consumer_poll_set = false
62
+ config.consumer
63
+ end
64
+
65
+ it "should subscribe, unsubscribe and return the subscription" do
66
+ expect(consumer.subscription).to be_empty
67
+
68
+ consumer.subscribe("consume_test_topic")
69
+
70
+ expect(consumer.subscription).not_to be_empty
71
+ expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
72
+ list.add_topic("consume_test_topic")
73
+ end
74
+ expect(consumer.subscription).to eq expected_subscription
75
+
76
+ consumer.unsubscribe
77
+
78
+ expect(consumer.subscription).to be_empty
79
+ end
80
+ end
58
81
  end
59
82
 
60
83
  describe "#pause and #resume" do
@@ -337,8 +360,9 @@ describe Rdkafka::Consumer do
337
360
  end
338
361
  end
339
362
 
340
- describe "#commit, #committed and #store_offset" do
341
- # Make sure there's a stored offset
363
+
364
+ describe "#position, #commit, #committed and #store_offset" do
365
+ # Make sure there are messages to work with
342
366
  let!(:report) do
343
367
  producer.produce(
344
368
  topic: "consume_test_topic",
@@ -356,29 +380,33 @@ describe Rdkafka::Consumer do
356
380
  )
357
381
  end
358
382
 
359
- it "should only accept a topic partition list in committed" do
360
- expect {
361
- consumer.committed("list")
362
- }.to raise_error TypeError
383
+ describe "#position" do
384
+ it "should only accept a topic partition list in position if not nil" do
385
+ expect {
386
+ consumer.position("list")
387
+ }.to raise_error TypeError
388
+ end
363
389
  end
364
390
 
365
- it "should commit in sync mode" do
366
- expect {
367
- consumer.commit(nil, true)
368
- }.not_to raise_error
369
- end
391
+ describe "#committed" do
392
+ it "should only accept a topic partition list in commit if not nil" do
393
+ expect {
394
+ consumer.commit("list")
395
+ }.to raise_error TypeError
396
+ end
370
397
 
371
- it "should only accept a topic partition list in commit if not nil" do
372
- expect {
373
- consumer.commit("list")
374
- }.to raise_error TypeError
398
+ it "should commit in sync mode" do
399
+ expect {
400
+ consumer.commit(nil, true)
401
+ }.not_to raise_error
402
+ end
375
403
  end
376
404
 
377
405
  context "with a committed consumer" do
378
406
  before :all do
379
407
  # Make sure there are some messages.
380
408
  handles = []
381
- producer = rdkafka_producer_config.producer
409
+ producer = rdkafka_config.producer
382
410
  10.times do
383
411
  (0..2).each do |i|
384
412
  handles << producer.produce(
@@ -422,31 +450,33 @@ describe Rdkafka::Consumer do
422
450
  }.to raise_error(Rdkafka::RdkafkaError)
423
451
  end
424
452
 
425
- it "should fetch the committed offsets for the current assignment" do
426
- partitions = consumer.committed.to_h["consume_test_topic"]
427
- expect(partitions).not_to be_nil
428
- expect(partitions[0].offset).to eq 1
429
- end
453
+ describe "#committed" do
454
+ it "should fetch the committed offsets for the current assignment" do
455
+ partitions = consumer.committed.to_h["consume_test_topic"]
456
+ expect(partitions).not_to be_nil
457
+ expect(partitions[0].offset).to eq 1
458
+ end
430
459
 
431
- it "should fetch the committed offsets for a specified topic partition list" do
432
- list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
433
- list.add_topic("consume_test_topic", [0, 1, 2])
460
+ it "should fetch the committed offsets for a specified topic partition list" do
461
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
462
+ list.add_topic("consume_test_topic", [0, 1, 2])
463
+ end
464
+ partitions = consumer.committed(list).to_h["consume_test_topic"]
465
+ expect(partitions).not_to be_nil
466
+ expect(partitions[0].offset).to eq 1
467
+ expect(partitions[1].offset).to eq 1
468
+ expect(partitions[2].offset).to eq 1
434
469
  end
435
- partitions = consumer.committed(list).to_h["consume_test_topic"]
436
- expect(partitions).not_to be_nil
437
- expect(partitions[0].offset).to eq 1
438
- expect(partitions[1].offset).to eq 1
439
- expect(partitions[2].offset).to eq 1
440
- end
441
470
 
442
- it "should raise an error when getting committed fails" do
443
- expect(Rdkafka::Bindings).to receive(:rd_kafka_committed).and_return(20)
444
- list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
445
- list.add_topic("consume_test_topic", [0, 1, 2])
471
+ it "should raise an error when getting committed fails" do
472
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_committed).and_return(20)
473
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
474
+ list.add_topic("consume_test_topic", [0, 1, 2])
475
+ end
476
+ expect {
477
+ consumer.committed(list)
478
+ }.to raise_error Rdkafka::RdkafkaError
446
479
  end
447
- expect {
448
- consumer.committed(list)
449
- }.to raise_error Rdkafka::RdkafkaError
450
480
  end
451
481
 
452
482
  describe "#store_offset" do
@@ -467,6 +497,8 @@ describe Rdkafka::Consumer do
467
497
  @new_consumer.store_offset(message)
468
498
  @new_consumer.commit
469
499
 
500
+ # TODO use position here, should be at offset
501
+
470
502
  list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
471
503
  list.add_topic("consume_test_topic", [0, 1, 2])
472
504
  end
@@ -481,6 +513,35 @@ describe Rdkafka::Consumer do
481
513
  @new_consumer.store_offset(message)
482
514
  }.to raise_error Rdkafka::RdkafkaError
483
515
  end
516
+
517
+ describe "#position" do
518
+ it "should fetch the positions for the current assignment" do
519
+ consumer.store_offset(message)
520
+
521
+ partitions = consumer.position.to_h["consume_test_topic"]
522
+ expect(partitions).not_to be_nil
523
+ expect(partitions[0].offset).to eq message.offset + 1
524
+ end
525
+
526
+ it "should fetch the positions for a specified assignment" do
527
+ consumer.store_offset(message)
528
+
529
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
530
+ list.add_topic_and_partitions_with_offsets("consume_test_topic", 0 => nil, 1 => nil, 2 => nil)
531
+ end
532
+ partitions = consumer.position(list).to_h["consume_test_topic"]
533
+ expect(partitions).not_to be_nil
534
+ expect(partitions[0].offset).to eq message.offset + 1
535
+ end
536
+
537
+ it "should raise an error when getting the position fails" do
538
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_position).and_return(20)
539
+
540
+ expect {
541
+ consumer.position
542
+ }.to raise_error(Rdkafka::RdkafkaError)
543
+ end
544
+ end
484
545
  end
485
546
  end
486
547
  end
@@ -1039,6 +1100,29 @@ describe Rdkafka::Consumer do
1039
1100
  end
1040
1101
  end
1041
1102
 
1103
+ # Only relevant in case of a consumer with separate queues
1104
+ describe '#events_poll' do
1105
+ let(:stats) { [] }
1106
+
1107
+ before { Rdkafka::Config.statistics_callback = ->(published) { stats << published } }
1108
+
1109
+ after { Rdkafka::Config.statistics_callback = nil }
1110
+
1111
+ let(:consumer) do
1112
+ config = rdkafka_consumer_config('statistics.interval.ms': 100)
1113
+ config.consumer_poll_set = false
1114
+ config.consumer
1115
+ end
1116
+
1117
+ it "expect to run events_poll, operate and propagate stats on events_poll and not poll" do
1118
+ consumer.subscribe("consume_test_topic")
1119
+ consumer.poll(1_000)
1120
+ expect(stats).to be_empty
1121
+ consumer.events_poll(-1)
1122
+ expect(stats).not_to be_empty
1123
+ end
1124
+ end
1125
+
1042
1126
  describe "a rebalance listener" do
1043
1127
  let(:consumer) do
1044
1128
  config = rdkafka_consumer_config
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::RdkafkaError do
6
4
  it "should raise a type error for a nil response" do
7
5
  expect {
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "securerandom"
5
4
 
6
5
  describe Rdkafka::Metadata do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::NativeKafka do
6
4
  let(:config) { rdkafka_producer_config }
7
5
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Producer::DeliveryHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Producer::DeliveryReport do
6
4
  subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
7
5
 
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "zlib"
5
4
 
6
5
  describe Rdkafka::Producer do
data/spec/spec_helper.rb CHANGED
@@ -36,7 +36,7 @@ def rdkafka_consumer_config(config_overrides={})
36
36
  # Add consumer specific fields to it
37
37
  config[:"auto.offset.reset"] = "earliest"
38
38
  config[:"enable.partition.eof"] = false
39
- config[:"group.id"] = "ruby-test-#{Random.new.rand(0..1_000_000)}"
39
+ config[:"group.id"] = "ruby-test-#{SecureRandom.uuid}"
40
40
  # Enable debug mode if required
41
41
  if ENV["DEBUG_CONSUMER"]
42
42
  config[:debug] = "cgrp,topic,fetch"
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.7
4
+ version: 0.13.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-10-31 00:00:00.000000000 Z
38
+ date: 2023-11-07 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -174,11 +174,13 @@ files:
174
174
  - ".github/workflows/ci.yml"
175
175
  - ".gitignore"
176
176
  - ".rspec"
177
+ - ".ruby-gemset"
178
+ - ".ruby-version"
177
179
  - ".yardopts"
178
180
  - CHANGELOG.md
179
181
  - Gemfile
180
182
  - Guardfile
181
- - LICENSE
183
+ - MIT-LICENSE
182
184
  - README.md
183
185
  - Rakefile
184
186
  - certs/cert_chain.pem
@@ -204,6 +206,7 @@ files:
204
206
  - lib/rdkafka/consumer/partition.rb
205
207
  - lib/rdkafka/consumer/topic_partition_list.rb
206
208
  - lib/rdkafka/error.rb
209
+ - lib/rdkafka/helpers/time.rb
207
210
  - lib/rdkafka/metadata.rb
208
211
  - lib/rdkafka/native_kafka.rb
209
212
  - lib/rdkafka/producer.rb
@@ -258,7 +261,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
258
261
  - !ruby/object:Gem::Version
259
262
  version: '0'
260
263
  requirements: []
261
- rubygems_version: 3.3.4
264
+ rubygems_version: 3.4.19
262
265
  signing_key:
263
266
  specification_version: 4
264
267
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
metadata.gz.sig CHANGED
Binary file