karafka-rdkafka 0.13.8 → 0.14.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.gitignore +4 -0
  4. data/.rspec +1 -0
  5. data/.ruby-gemset +1 -0
  6. data/.ruby-version +1 -0
  7. data/CHANGELOG.md +42 -32
  8. data/{LICENSE → MIT-LICENSE} +2 -1
  9. data/README.md +11 -11
  10. data/docker-compose.yml +1 -1
  11. data/ext/README.md +1 -1
  12. data/ext/Rakefile +1 -1
  13. data/lib/rdkafka/abstract_handle.rb +37 -24
  14. data/lib/rdkafka/admin.rb +6 -7
  15. data/lib/rdkafka/bindings.rb +0 -4
  16. data/lib/rdkafka/config.rb +30 -15
  17. data/lib/rdkafka/consumer/headers.rb +2 -4
  18. data/lib/rdkafka/consumer.rb +52 -55
  19. data/lib/rdkafka/helpers/time.rb +14 -0
  20. data/lib/rdkafka/producer.rb +8 -15
  21. data/lib/rdkafka/version.rb +3 -3
  22. data/lib/rdkafka.rb +10 -1
  23. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  24. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
  25. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  26. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
  27. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  28. data/spec/rdkafka/admin_spec.rb +0 -1
  29. data/spec/rdkafka/bindings_spec.rb +0 -1
  30. data/spec/rdkafka/callbacks_spec.rb +0 -2
  31. data/spec/rdkafka/config_spec.rb +8 -2
  32. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  33. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  34. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  35. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
  36. data/spec/rdkafka/consumer_spec.rb +47 -1
  37. data/spec/rdkafka/error_spec.rb +0 -2
  38. data/spec/rdkafka/metadata_spec.rb +0 -1
  39. data/spec/rdkafka/native_kafka_spec.rb +0 -2
  40. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
  41. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  42. data/spec/rdkafka/producer_spec.rb +0 -1
  43. data.tar.gz.sig +3 -2
  44. metadata +9 -6
  45. metadata.gz.sig +0 -0
@@ -12,6 +12,7 @@ module Rdkafka
12
12
  # `each_slice` to consume batches of messages.
13
13
  class Consumer
14
14
  include Enumerable
15
+ include Helpers::Time
15
16
 
16
17
  # @private
17
18
  def initialize(native_kafka)
@@ -54,13 +55,11 @@ module Rdkafka
54
55
  @native_kafka.closed?
55
56
  end
56
57
 
57
- # Subscribe to one or more topics letting Kafka handle partition assignments.
58
+ # Subscribes to one or more topics letting Kafka handle partition assignments.
58
59
  #
59
60
  # @param topics [Array<String>] One or more topic names
60
- #
61
- # @raise [RdkafkaError] When subscribing fails
62
- #
63
61
  # @return [nil]
62
+ # @raise [RdkafkaError] When subscribing fails
64
63
  def subscribe(*topics)
65
64
  closed_consumer_check(__method__)
66
65
 
@@ -83,9 +82,8 @@ module Rdkafka
83
82
 
84
83
  # Unsubscribe from all subscribed topics.
85
84
  #
86
- # @raise [RdkafkaError] When unsubscribing fails
87
- #
88
85
  # @return [nil]
86
+ # @raise [RdkafkaError] When unsubscribing fails
89
87
  def unsubscribe
90
88
  closed_consumer_check(__method__)
91
89
 
@@ -101,10 +99,8 @@ module Rdkafka
101
99
  # Pause producing or consumption for the provided list of partitions
102
100
  #
103
101
  # @param list [TopicPartitionList] The topic with partitions to pause
104
- #
105
- # @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
106
- #
107
102
  # @return [nil]
103
+ # @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
108
104
  def pause(list)
109
105
  closed_consumer_check(__method__)
110
106
 
@@ -128,13 +124,11 @@ module Rdkafka
128
124
  end
129
125
  end
130
126
 
131
- # Resume producing consumption for the provided list of partitions
127
+ # Resumes producing consumption for the provided list of partitions
132
128
  #
133
129
  # @param list [TopicPartitionList] The topic with partitions to pause
134
- #
135
- # @raise [RdkafkaError] When resume subscription fails.
136
- #
137
130
  # @return [nil]
131
+ # @raise [RdkafkaError] When resume subscription fails.
138
132
  def resume(list)
139
133
  closed_consumer_check(__method__)
140
134
 
@@ -157,11 +151,10 @@ module Rdkafka
157
151
  end
158
152
  end
159
153
 
160
- # Return the current subscription to topics and partitions
161
- #
162
- # @raise [RdkafkaError] When getting the subscription fails.
154
+ # Returns the current subscription to topics and partitions
163
155
  #
164
156
  # @return [TopicPartitionList]
157
+ # @raise [RdkafkaError] When getting the subscription fails.
165
158
  def subscription
166
159
  closed_consumer_check(__method__)
167
160
 
@@ -184,7 +177,6 @@ module Rdkafka
184
177
  # Atomic assignment of partitions to consume
185
178
  #
186
179
  # @param list [TopicPartitionList] The topic with partitions to assign
187
- #
188
180
  # @raise [RdkafkaError] When assigning fails
189
181
  def assign(list)
190
182
  closed_consumer_check(__method__)
@@ -208,9 +200,8 @@ module Rdkafka
208
200
 
209
201
  # Returns the current partition assignment.
210
202
  #
211
- # @raise [RdkafkaError] When getting the assignment fails.
212
- #
213
203
  # @return [TopicPartitionList]
204
+ # @raise [RdkafkaError] When getting the assignment fails.
214
205
  def assignment
215
206
  closed_consumer_check(__method__)
216
207
 
@@ -244,14 +235,14 @@ module Rdkafka
244
235
  end
245
236
 
246
237
  # Return the current committed offset per partition for this consumer group.
247
- # The offset field of each requested partition will either be set to stored offset or to -1001 in case there was no stored offset for that partition.
238
+ # The offset field of each requested partition will either be set to stored offset or to -1001
239
+ # in case there was no stored offset for that partition.
248
240
  #
249
- # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil to use the current subscription.
241
+ # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil
242
+ # to use the current subscription.
250
243
  # @param timeout_ms [Integer] The timeout for fetching this information.
251
- #
252
- # @raise [RdkafkaError] When getting the committed positions fails.
253
- #
254
244
  # @return [TopicPartitionList]
245
+ # @raise [RdkafkaError] When getting the committed positions fails.
255
246
  def committed(list=nil, timeout_ms=1200)
256
247
  closed_consumer_check(__method__)
257
248
 
@@ -307,11 +298,9 @@ module Rdkafka
307
298
  # @param topic [String] The topic to query
308
299
  # @param partition [Integer] The partition to query
309
300
  # @param timeout_ms [Integer] The timeout for querying the broker
310
- #
311
- # @raise [RdkafkaError] When querying the broker fails.
312
- #
313
301
  # @return [Integer] The low and high watermark
314
- def query_watermark_offsets(topic, partition, timeout_ms=200)
302
+ # @raise [RdkafkaError] When querying the broker fails.
303
+ def query_watermark_offsets(topic, partition, timeout_ms=1000)
315
304
  closed_consumer_check(__method__)
316
305
 
317
306
  low = FFI::MemoryPointer.new(:int64, 1)
@@ -343,11 +332,10 @@ module Rdkafka
343
332
  #
344
333
  # @param topic_partition_list [TopicPartitionList] The list to calculate lag for.
345
334
  # @param watermark_timeout_ms [Integer] The timeout for each query watermark call.
346
- #
335
+ # @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag
336
+ # per partition
347
337
  # @raise [RdkafkaError] When querying the broker fails.
348
- #
349
- # @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag per partition
350
- def lag(topic_partition_list, watermark_timeout_ms=100)
338
+ def lag(topic_partition_list, watermark_timeout_ms=1000)
351
339
  out = {}
352
340
 
353
341
  topic_partition_list.to_h.each do |topic, partitions|
@@ -395,10 +383,8 @@ module Rdkafka
395
383
  # When using this `enable.auto.offset.store` should be set to `false` in the config.
396
384
  #
397
385
  # @param message [Rdkafka::Consumer::Message] The message which offset will be stored
398
- #
399
- # @raise [RdkafkaError] When storing the offset fails
400
- #
401
386
  # @return [nil]
387
+ # @raise [RdkafkaError] When storing the offset fails
402
388
  def store_offset(message)
403
389
  closed_consumer_check(__method__)
404
390
 
@@ -430,10 +416,8 @@ module Rdkafka
430
416
  # message at the given offset.
431
417
  #
432
418
  # @param message [Rdkafka::Consumer::Message] The message to which to seek
433
- #
434
- # @raise [RdkafkaError] When seeking fails
435
- #
436
419
  # @return [nil]
420
+ # @raise [RdkafkaError] When seeking fails
437
421
  def seek(message)
438
422
  closed_consumer_check(__method__)
439
423
 
@@ -503,10 +487,8 @@ module Rdkafka
503
487
  #
504
488
  # @param list [TopicPartitionList,nil] The topic with partitions to commit
505
489
  # @param async [Boolean] Whether to commit async or wait for the commit to finish
506
- #
507
- # @raise [RdkafkaError] When committing fails
508
- #
509
490
  # @return [nil]
491
+ # @raise [RdkafkaError] When committing fails
510
492
  def commit(list=nil, async=false)
511
493
  closed_consumer_check(__method__)
512
494
 
@@ -532,10 +514,8 @@ module Rdkafka
532
514
  # Poll for the next message on one of the subscribed topics
533
515
  #
534
516
  # @param timeout_ms [Integer] Timeout of this poll
535
- #
536
- # @raise [RdkafkaError] When polling fails
537
- #
538
517
  # @return [Message, nil] A message or nil if there was no new message within the timeout
518
+ # @raise [RdkafkaError] When polling fails
539
519
  def poll(timeout_ms)
540
520
  closed_consumer_check(__method__)
541
521
 
@@ -561,17 +541,40 @@ module Rdkafka
561
541
  end
562
542
  end
563
543
 
544
+ # Polls the main rdkafka queue (not the consumer one). Do **NOT** use it if `consumer_poll_set`
545
+ # was set to `true`.
546
+ #
547
+ # Events will cause application-provided callbacks to be called.
548
+ #
549
+ # Events (in the context of the consumer):
550
+ # - error callbacks
551
+ # - stats callbacks
552
+ # - any other callbacks supported by librdkafka that are not part of the consumer_poll, that
553
+ # would have a callback configured and activated.
554
+ #
555
+ # This method needs to be called at regular intervals to serve any queued callbacks waiting to
556
+ # be called. When in use, does **NOT** replace `#poll` but needs to run complementary with it.
557
+ #
558
+ # @param timeout_ms [Integer] poll timeout. If set to 0 will run async, when set to -1 will
559
+ # block until any events available.
560
+ #
561
+ # @note This method technically should be called `#poll` and the current `#poll` should be
562
+ # called `#consumer_poll` though we keep the current naming convention to make it backward
563
+ # compatible.
564
+ def events_poll(timeout_ms = 0)
565
+ @native_kafka.with_inner do |inner|
566
+ Rdkafka::Bindings.rd_kafka_poll(inner, timeout_ms)
567
+ end
568
+ end
569
+
564
570
  # Poll for new messages and yield for each received one. Iteration
565
571
  # will end when the consumer is closed.
566
572
  #
567
- # If `enable.partition.eof` is turned on in the config this will raise an
568
- # error when an eof is reached, so you probably want to disable that when
569
- # using this method of iteration.
573
+ # If `enable.partition.eof` is turned on in the config this will raise an error when an eof is
574
+ # reached, so you probably want to disable that when using this method of iteration.
570
575
  #
571
576
  # @raise [RdkafkaError] When polling fails
572
- #
573
577
  # @yieldparam message [Message] Received message
574
- #
575
578
  # @return [nil]
576
579
  def each
577
580
  loop do
@@ -624,9 +627,7 @@ module Rdkafka
624
627
  # that you may or may not see again.
625
628
  #
626
629
  # @param max_items [Integer] Maximum size of the yielded array of messages
627
- #
628
630
  # @param bytes_threshold [Integer] Threshold number of total message bytes in the yielded array of messages
629
- #
630
631
  # @param timeout_ms [Integer] max time to wait for up to max_items
631
632
  #
632
633
  # @raise [RdkafkaError] When polling fails
@@ -673,10 +674,6 @@ module Rdkafka
673
674
  end
674
675
 
675
676
  private
676
- def monotonic_now
677
- # needed because Time.now can go backwards
678
- Process.clock_gettime(Process::CLOCK_MONOTONIC)
679
- end
680
677
 
681
678
  def closed_consumer_check(method)
682
679
  raise Rdkafka::ClosedConsumerError.new(method) if closed?
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ # Namespace for some small utilities used in multiple components
5
+ module Helpers
6
+ # Time related methods used across Karafka
7
+ module Time
8
+ # @return [Float] current monotonic time in seconds with microsecond precision
9
+ def monotonic_now
10
+ ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
11
+ end
12
+ end
13
+ end
14
+ end
@@ -1,10 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "objspace"
4
-
5
3
  module Rdkafka
6
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
7
5
  class Producer
6
+ include Helpers::Time
7
+
8
8
  # Cache partitions count for 30 seconds
9
9
  PARTITIONS_COUNT_TTL = 30
10
10
 
@@ -167,18 +167,16 @@ module Rdkafka
167
167
  end
168
168
 
169
169
  # Partition count for a given topic.
170
- # NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
171
170
  #
172
171
  # @param topic [String] The topic name.
172
+ # @return [Integer] partition count for a given topic
173
173
  #
174
- # @return partition count [Integer,nil]
175
- #
176
- # We cache the partition count for a given topic for given time
177
- # This prevents us in case someone uses `partition_key` from querying for the count with
178
- # each message. Instead we query once every 30 seconds at most
174
+ # @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
175
+ # auto-created after returning nil.
179
176
  #
180
- # @param topic [String] topic name
181
- # @return [Integer] partition count for a given topic
177
+ # @note We cache the partition count for a given topic for given time.
178
+ # This prevents us in case someone uses `partition_key` from querying for the count with
179
+ # each message. Instead we query once every 30 seconds at most
182
180
  def partition_count(topic)
183
181
  closed_producer_check(__method__)
184
182
 
@@ -308,11 +306,6 @@ module Rdkafka
308
306
 
309
307
  private
310
308
 
311
- def monotonic_now
312
- # needed because Time.now can go backwards
313
- Process.clock_gettime(Process::CLOCK_MONOTONIC)
314
- end
315
-
316
309
  def closed_producer_check(method)
317
310
  raise Rdkafka::ClosedProducerError.new(method) if closed?
318
311
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.13.8"
5
- LIBRDKAFKA_VERSION = "2.2.0"
6
- LIBRDKAFKA_SOURCE_SHA256 = "af9a820cbecbc64115629471df7c7cecd40403b6c34bfdbb9223152677a47226"
4
+ VERSION = "0.14.0.beta1"
5
+ LIBRDKAFKA_VERSION = "2.3.0"
6
+ LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -1,7 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "rdkafka/version"
3
+ require "logger"
4
+ require "objspace"
5
+ require "ffi"
6
+ require "json"
4
7
 
8
+ require "rdkafka/version"
9
+ require "rdkafka/helpers/time"
5
10
  require "rdkafka/abstract_handle"
6
11
  require "rdkafka/admin"
7
12
  require "rdkafka/admin/create_topic_handle"
@@ -24,3 +29,7 @@ require "rdkafka/native_kafka"
24
29
  require "rdkafka/producer"
25
30
  require "rdkafka/producer/delivery_handle"
26
31
  require "rdkafka/producer/delivery_report"
32
+
33
+ # Main Rdkafka namespace of this gem
34
+ module Rdkafka
35
+ end
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::AbstractHandle do
6
4
  let(:response) { 0 }
7
5
  let(:result) { -1 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicReport do
6
4
  subject { Rdkafka::Admin::CreateTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicReport do
6
4
  subject { Rdkafka::Admin::DeleteTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
 
6
5
  describe Rdkafka::Admin do
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require 'zlib'
5
4
 
6
5
  describe Rdkafka::Bindings do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Callbacks do
6
4
 
7
5
  # The code in the call back functions is 100% covered by other specs. Due to
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Config do
6
4
  context "logger" do
7
5
  it "should have a default logger" do
@@ -115,6 +113,14 @@ describe Rdkafka::Config do
115
113
  consumer.close
116
114
  end
117
115
 
116
+ it "should create a consumer with consumer_poll_set set to false" do
117
+ config = rdkafka_consumer_config
118
+ config.consumer_poll_set = false
119
+ consumer = config.consumer
120
+ expect(consumer).to be_a Rdkafka::Consumer
121
+ consumer.close
122
+ end
123
+
118
124
  it "should raise an error when creating a consumer with invalid config" do
119
125
  config = Rdkafka::Config.new('invalid.key' => 'value')
120
126
  expect {
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Headers do
6
4
  let(:headers) do
7
5
  { # Note String keys!
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Message do
6
4
  let(:native_client) { new_native_client }
7
5
  let(:native_topic) { new_native_topic(native_client: native_client) }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Partition do
6
4
  let(:offset) { 100 }
7
5
  let(:err) { 0 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::TopicPartitionList do
6
4
  it "should create a new list and add unassigned topics" do
7
5
  list = Rdkafka::Consumer::TopicPartitionList.new
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
  require 'securerandom'
6
5
 
@@ -55,6 +54,30 @@ describe Rdkafka::Consumer do
55
54
  consumer.subscription
56
55
  }.to raise_error(Rdkafka::RdkafkaError)
57
56
  end
57
+
58
+ context "when using consumer without the poll set" do
59
+ let(:consumer) do
60
+ config = rdkafka_consumer_config
61
+ config.consumer_poll_set = false
62
+ config.consumer
63
+ end
64
+
65
+ it "should subscribe, unsubscribe and return the subscription" do
66
+ expect(consumer.subscription).to be_empty
67
+
68
+ consumer.subscribe("consume_test_topic")
69
+
70
+ expect(consumer.subscription).not_to be_empty
71
+ expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
72
+ list.add_topic("consume_test_topic")
73
+ end
74
+ expect(consumer.subscription).to eq expected_subscription
75
+
76
+ consumer.unsubscribe
77
+
78
+ expect(consumer.subscription).to be_empty
79
+ end
80
+ end
58
81
  end
59
82
 
60
83
  describe "#pause and #resume" do
@@ -1077,6 +1100,29 @@ describe Rdkafka::Consumer do
1077
1100
  end
1078
1101
  end
1079
1102
 
1103
+ # Only relevant in case of a consumer with separate queues
1104
+ describe '#events_poll' do
1105
+ let(:stats) { [] }
1106
+
1107
+ before { Rdkafka::Config.statistics_callback = ->(published) { stats << published } }
1108
+
1109
+ after { Rdkafka::Config.statistics_callback = nil }
1110
+
1111
+ let(:consumer) do
1112
+ config = rdkafka_consumer_config('statistics.interval.ms': 100)
1113
+ config.consumer_poll_set = false
1114
+ config.consumer
1115
+ end
1116
+
1117
+ it "expect to run events_poll, operate and propagate stats on events_poll and not poll" do
1118
+ consumer.subscribe("consume_test_topic")
1119
+ consumer.poll(1_000)
1120
+ expect(stats).to be_empty
1121
+ consumer.events_poll(-1)
1122
+ expect(stats).not_to be_empty
1123
+ end
1124
+ end
1125
+
1080
1126
  describe "a rebalance listener" do
1081
1127
  let(:consumer) do
1082
1128
  config = rdkafka_consumer_config
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::RdkafkaError do
6
4
  it "should raise a type error for a nil response" do
7
5
  expect {
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "securerandom"
5
4
 
6
5
  describe Rdkafka::Metadata do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::NativeKafka do
6
4
  let(:config) { rdkafka_producer_config }
7
5
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Producer::DeliveryHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Producer::DeliveryReport do
6
4
  subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
7
5
 
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "zlib"
5
4
 
6
5
  describe Rdkafka::Producer do
data.tar.gz.sig CHANGED
@@ -1,2 +1,3 @@
1
- إ�D�̡}f���ř��w�Oٯ뽊t�#Q�2�Q��K����+a���(�m ��
2
- ���U�� ^��5�.�+��O��+�u����.;�À��q����L�`x�
1
+ L���� ������AP(
2
+ 9�����ZOF[h �]����q}���k/�+*Ў'^���6��8<V��W�tǍ�x1u
3
+ �����*R��7\���r�K��wo�s�?�J�F���MW�V��S#݉/�K��&�9ep� s�&E!7+ �,���ְ�M�C�5I�M<�2Ҡ�q-U�N�{�dA�w���� X�.R�.���<���i�bzē�{����LL�eJ�/��� ���'0�M����ټ?1,�*�s���A���m8�{�����)U��3�mr��W��2�G� /��p�.���X�������H��S�ZV���Q�)���7h��]L'��<x= '��s��� �H����[4���B��eN�2V�CM7
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.8
4
+ version: 0.14.0.beta1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-10-31 00:00:00.000000000 Z
38
+ date: 2023-11-16 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -174,11 +174,13 @@ files:
174
174
  - ".github/workflows/ci.yml"
175
175
  - ".gitignore"
176
176
  - ".rspec"
177
+ - ".ruby-gemset"
178
+ - ".ruby-version"
177
179
  - ".yardopts"
178
180
  - CHANGELOG.md
179
181
  - Gemfile
180
182
  - Guardfile
181
- - LICENSE
183
+ - MIT-LICENSE
182
184
  - README.md
183
185
  - Rakefile
184
186
  - certs/cert_chain.pem
@@ -204,6 +206,7 @@ files:
204
206
  - lib/rdkafka/consumer/partition.rb
205
207
  - lib/rdkafka/consumer/topic_partition_list.rb
206
208
  - lib/rdkafka/error.rb
209
+ - lib/rdkafka/helpers/time.rb
207
210
  - lib/rdkafka/metadata.rb
208
211
  - lib/rdkafka/native_kafka.rb
209
212
  - lib/rdkafka/producer.rb
@@ -254,11 +257,11 @@ required_ruby_version: !ruby/object:Gem::Requirement
254
257
  version: '2.7'
255
258
  required_rubygems_version: !ruby/object:Gem::Requirement
256
259
  requirements:
257
- - - ">="
260
+ - - ">"
258
261
  - !ruby/object:Gem::Version
259
- version: '0'
262
+ version: 1.3.1
260
263
  requirements: []
261
- rubygems_version: 3.3.4
264
+ rubygems_version: 3.4.19
262
265
  signing_key:
263
266
  specification_version: 4
264
267
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
metadata.gz.sig CHANGED
Binary file