rdkafka 0.12.1 → 0.13.0.beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +6 -2
  3. data/CHANGELOG.md +9 -3
  4. data/Gemfile +2 -0
  5. data/Rakefile +2 -0
  6. data/ext/Rakefile +26 -53
  7. data/lib/rdkafka/abstract_handle.rb +2 -0
  8. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  9. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  10. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  11. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  12. data/lib/rdkafka/admin.rb +35 -35
  13. data/lib/rdkafka/bindings.rb +5 -2
  14. data/lib/rdkafka/callbacks.rb +7 -1
  15. data/lib/rdkafka/config.rb +7 -5
  16. data/lib/rdkafka/consumer/headers.rb +2 -0
  17. data/lib/rdkafka/consumer/message.rb +2 -0
  18. data/lib/rdkafka/consumer/partition.rb +2 -0
  19. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  20. data/lib/rdkafka/consumer.rb +18 -9
  21. data/lib/rdkafka/error.rb +9 -0
  22. data/lib/rdkafka/metadata.rb +2 -0
  23. data/lib/rdkafka/native_kafka.rb +52 -0
  24. data/lib/rdkafka/producer/delivery_handle.rb +5 -2
  25. data/lib/rdkafka/producer/delivery_report.rb +9 -2
  26. data/lib/rdkafka/producer.rb +11 -10
  27. data/lib/rdkafka/version.rb +5 -3
  28. data/lib/rdkafka.rb +3 -1
  29. data/rdkafka.gemspec +2 -0
  30. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  31. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  32. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  33. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  34. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  35. data/spec/rdkafka/admin_spec.rb +4 -3
  36. data/spec/rdkafka/bindings_spec.rb +2 -0
  37. data/spec/rdkafka/callbacks_spec.rb +2 -0
  38. data/spec/rdkafka/config_spec.rb +14 -0
  39. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  40. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  41. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  42. data/spec/rdkafka/consumer_spec.rb +10 -0
  43. data/spec/rdkafka/error_spec.rb +2 -0
  44. data/spec/rdkafka/metadata_spec.rb +2 -0
  45. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +8 -6
  46. data/spec/rdkafka/producer/delivery_handle_spec.rb +5 -0
  47. data/spec/rdkafka/producer/delivery_report_spec.rb +8 -2
  48. data/spec/rdkafka/producer_spec.rb +31 -3
  49. data/spec/spec_helper.rb +2 -0
  50. metadata +11 -12
  51. data/dist/librdkafka_1.9.0.tar.gz +0 -0
  52. data/lib/rdkafka/producer/client.rb +0 -47
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a75568ae9eddc2f80921c43835725e535ddc0c72d5ff931018d08dc8737a6c12
4
- data.tar.gz: 7f65353380913d15603728f3fb3c4355ac87b46f380b762ad40a534f981df1d2
3
+ metadata.gz: c3c5b37efae2485950c8a7627e09dfca50bd7c1264e18ae811b85c3f9ae7d09a
4
+ data.tar.gz: fb106016aae053f18f53885ae176cdb9e07078c80139146b165231cdd7e490ab
5
5
  SHA512:
6
- metadata.gz: b802ae224948f5e51c83025cf5a8dd937058c34a336d10f2d4cc3f8dfcebfdb8cdcfc73d92dab169f1f4fac412e1ce424412584f7398af65371bcbb6e271f0e5
7
- data.tar.gz: c668c6dfa9f7aadd35614c36cb9ad259b1e8519b949f32116f306e56d61719292a3e49bced751d1abcf7065fab713694756cec8777236b8f61c615dc9745e2e0
6
+ metadata.gz: 2aaaf70e222ad813ec88ce49078f0e643c1860ff8ce93289134d52ac8a7104681c3f66fad48083c3d78223f7f3157dd7019bd41355933141316078bb1c5fd3aa
7
+ data.tar.gz: a1503635d8e51589db14db327176cbae4f8be9454938e63c96f2bfdcbd258cef1f392479955797e742463cd44ab1951f7596d23adb2b5c06ebf6ff6ab1963442
@@ -9,15 +9,19 @@ agent:
9
9
  blocks:
10
10
  - name: Run specs
11
11
  task:
12
+ prologue:
13
+ commands:
14
+ - sudo apt install -y valgrind
12
15
  jobs:
13
16
  - name: bundle exec rspec
14
17
  matrix:
15
18
  - env_var: RUBY_VERSION
16
- values: [ "2.6.8", "2.7.4", "3.0.2", "jruby-9.3.1.0"]
19
+ values: [ "2.6.10", "2.7.6", "3.0.4", "3.1.2"]
17
20
  commands:
18
21
  - sem-version ruby $RUBY_VERSION
19
22
  - checkout
20
23
  - bundle install --path vendor/bundle
21
24
  - cd ext && bundle exec rake && cd ..
22
25
  - docker-compose up -d --no-recreate
23
- - bundle exec rspec
26
+ - ulimit -c unlimited
27
+ - valgrind -v bundle exec rspec
data/CHANGELOG.md CHANGED
@@ -1,8 +1,14 @@
1
- # 0.12.1 (2024-07-11)
2
- - [Fix] Switch to local release of librdkafka to mitigate its unavailability.
1
+ # 0.13.0
2
+ * Add topic name to delivery report (maeve)
3
+ * Allow string partitioner config (mollyegibson)
4
+ * Fix documented type for DeliveryReport#error (jimmydo)
5
+ * Bump librdkafka to 1.9.2 (thijsc)
6
+ * Use finalizers to cleanly exit producer and admin (thijsc)
3
7
 
4
8
  # 0.12.0
5
- * Bumps librdkafka to 1.9.0
9
+ * Bump librdkafka to 1.9.0
10
+ * Fix crash on empty partition key (mensfeld)
11
+ * Pass the delivery handle to the callback (gvisokinskas)
6
12
 
7
13
  # 0.11.0
8
14
  * Upgrade librdkafka to 1.8.2
data/Gemfile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  source "https://rubygems.org"
2
4
 
3
5
  gemspec
data/Rakefile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Rakefile
2
4
 
3
5
  require 'bundler/gem_tasks'
data/ext/Rakefile CHANGED
@@ -1,67 +1,40 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require File.expand_path('../../lib/rdkafka/version', __FILE__)
4
+ require "mini_portile2"
4
5
  require "fileutils"
5
6
  require "open-uri"
6
7
 
7
8
  task :default => :clean do
8
- # For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
9
- # managed by nix itself, so using the normal file paths doesn't work for nix users.
10
- #
11
- # Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
12
- # accessible in the nix environment
13
- if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
14
- # Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
15
- require "mini_portile2"
16
- recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
9
+ # Download and compile librdkafka
10
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
17
11
 
18
- # Use default homebrew openssl if we're on mac and the directory exists
19
- # and each of flags is not empty
20
- if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
21
- ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
22
- ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
23
- end
24
-
25
- releases = File.expand_path(File.join(File.dirname(__FILE__), '../dist'))
26
-
27
- recipe.files << {
28
- :url => "file://#{releases}/librdkafka_#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz",
29
- :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
30
- }
31
- recipe.configure_options = ["--host=#{recipe.host}"]
32
-
33
- # Disable using libc regex engine in favor of the embedded one
34
- # The default regex engine of librdkafka does not always work exactly as most of the users
35
- # would expect, hence this flag allows for changing it to the other one
36
- if ENV.key?('RDKAFKA_DISABLE_REGEX_EXT')
37
- recipe.configure_options << '--disable-regex-ext'
38
- end
12
+ # Use default homebrew openssl if we're on mac and the directory exists
13
+ # and each of flags is not empty
14
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
15
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
16
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
17
+ end
39
18
 
40
- recipe.cook
41
- # Move dynamic library we're interested in
42
- if recipe.host.include?('darwin')
43
- from_extension = '1.dylib'
44
- to_extension = 'dylib'
45
- else
46
- from_extension = 'so.1'
47
- to_extension = 'so'
48
- end
49
- lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
50
- FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
51
- # Cleanup files created by miniportile we don't need in the gem
52
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
53
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
19
+ recipe.files << {
20
+ :url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
21
+ :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
22
+ }
23
+ recipe.configure_options = ["--host=#{recipe.host}"]
24
+ recipe.cook
25
+ # Move dynamic library we're interested in
26
+ if recipe.host.include?('darwin')
27
+ from_extension = '1.dylib'
28
+ to_extension = 'dylib'
54
29
  else
55
- # Otherwise, copy existing libraries to ./ext
56
- if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
57
- raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
58
- end
59
- files = [
60
- File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
61
- File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
62
- ]
63
- files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
30
+ from_extension = 'so.1'
31
+ to_extension = 'so'
64
32
  end
33
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
34
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
35
+ # Cleanup files created by miniportile we don't need in the gem
36
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
37
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
65
38
  end
66
39
 
67
40
  task :clean do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "ffi"
2
4
 
3
5
  module Rdkafka
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Admin
3
5
  class CreateTopicHandle < AbstractHandle
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Admin
3
5
  class CreateTopicReport
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Admin
3
5
  class DeleteTopicHandle < AbstractHandle
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Admin
3
5
  class DeleteTopicReport
data/lib/rdkafka/admin.rb CHANGED
@@ -1,33 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "objspace"
4
+
1
5
  module Rdkafka
2
6
  class Admin
3
7
  # @private
4
8
  def initialize(native_kafka)
5
9
  @native_kafka = native_kafka
6
- @closing = false
7
-
8
- # Start thread to poll client for callbacks
9
- @polling_thread = Thread.new do
10
- loop do
11
- Rdkafka::Bindings.rd_kafka_poll(@native_kafka, 250)
12
- # Exit thread if closing and the poll queue is empty
13
- if @closing && Rdkafka::Bindings.rd_kafka_outq_len(@native_kafka) == 0
14
- break
15
- end
16
- end
17
- end
18
- @polling_thread.abort_on_exception = true
10
+
11
+ # Makes sure, that native kafka gets closed before it gets GCed by Ruby
12
+ ObjectSpace.define_finalizer(self, native_kafka.finalizer)
13
+ end
14
+
15
+ def finalizer
16
+ ->(_) { close }
19
17
  end
20
18
 
21
19
  # Close this admin instance
22
20
  def close
23
- return unless @native_kafka
24
-
25
- # Indicate to polling thread that we're closing
26
- @closing = true
27
- # Wait for the polling thread to finish up
28
- @polling_thread.join
29
- Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
30
- @native_kafka = nil
21
+ ObjectSpace.undefine_finalizer(self)
22
+
23
+ @native_kafka.close
31
24
  end
32
25
 
33
26
  # Create a topic with the given partition count and replication factor
@@ -38,6 +31,7 @@ module Rdkafka
38
31
  #
39
32
  # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
40
33
  def create_topic(topic_name, partition_count, replication_factor, topic_config={})
34
+ closed_admin_check(__method__)
41
35
 
42
36
  # Create a rd_kafka_NewTopic_t representing the new topic
43
37
  error_buffer = FFI::MemoryPointer.from_string(" " * 256)
@@ -68,7 +62,7 @@ module Rdkafka
68
62
  topics_array_ptr.write_array_of_pointer(pointer_array)
69
63
 
70
64
  # Get a pointer to the queue that our request will be enqueued on
71
- queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
65
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka.inner)
72
66
  if queue_ptr.null?
73
67
  Rdkafka::Bindings.rd_kafka_NewTopic_destroy(new_topic_ptr)
74
68
  raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
@@ -79,16 +73,16 @@ module Rdkafka
79
73
  create_topic_handle[:pending] = true
80
74
  create_topic_handle[:response] = -1
81
75
  CreateTopicHandle.register(create_topic_handle)
82
- admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATETOPICS)
76
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka.inner, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATETOPICS)
83
77
  Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, create_topic_handle.to_ptr)
84
78
 
85
79
  begin
86
80
  Rdkafka::Bindings.rd_kafka_CreateTopics(
87
- @native_kafka,
88
- topics_array_ptr,
89
- 1,
90
- admin_options_ptr,
91
- queue_ptr
81
+ @native_kafka.inner,
82
+ topics_array_ptr,
83
+ 1,
84
+ admin_options_ptr,
85
+ queue_ptr
92
86
  )
93
87
  rescue Exception
94
88
  CreateTopicHandle.remove(create_topic_handle.to_ptr.address)
@@ -108,6 +102,7 @@ module Rdkafka
108
102
  #
109
103
  # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
110
104
  def delete_topic(topic_name)
105
+ closed_admin_check(__method__)
111
106
 
112
107
  # Create a rd_kafka_DeleteTopic_t representing the topic to be deleted
113
108
  delete_topic_ptr = Rdkafka::Bindings.rd_kafka_DeleteTopic_new(FFI::MemoryPointer.from_string(topic_name))
@@ -118,7 +113,7 @@ module Rdkafka
118
113
  topics_array_ptr.write_array_of_pointer(pointer_array)
119
114
 
120
115
  # Get a pointer to the queue that our request will be enqueued on
121
- queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
116
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka.inner)
122
117
  if queue_ptr.null?
123
118
  Rdkafka::Bindings.rd_kafka_DeleteTopic_destroy(delete_topic_ptr)
124
119
  raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
@@ -129,16 +124,16 @@ module Rdkafka
129
124
  delete_topic_handle[:pending] = true
130
125
  delete_topic_handle[:response] = -1
131
126
  DeleteTopicHandle.register(delete_topic_handle)
132
- admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DELETETOPICS)
127
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka.inner, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DELETETOPICS)
133
128
  Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, delete_topic_handle.to_ptr)
134
129
 
135
130
  begin
136
131
  Rdkafka::Bindings.rd_kafka_DeleteTopics(
137
- @native_kafka,
138
- topics_array_ptr,
139
- 1,
140
- admin_options_ptr,
141
- queue_ptr
132
+ @native_kafka.inner,
133
+ topics_array_ptr,
134
+ 1,
135
+ admin_options_ptr,
136
+ queue_ptr
142
137
  )
143
138
  rescue Exception
144
139
  DeleteTopicHandle.remove(delete_topic_handle.to_ptr.address)
@@ -151,5 +146,10 @@ module Rdkafka
151
146
 
152
147
  delete_topic_handle
153
148
  end
149
+
150
+ private
151
+ def closed_admin_check(method)
152
+ raise Rdkafka::ClosedAdminError.new(method) if @native_kafka.closed?
153
+ end
154
154
  end
155
155
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "ffi"
2
4
  require "json"
3
5
  require "logger"
@@ -33,6 +35,7 @@ module Rdkafka
33
35
 
34
36
  # Polling
35
37
 
38
+ attach_function :rd_kafka_flush, [:pointer, :int], :void, blocking: true
36
39
  attach_function :rd_kafka_poll, [:pointer, :int], :void, blocking: true
37
40
  attach_function :rd_kafka_outq_len, [:pointer], :int, blocking: true
38
41
 
@@ -256,11 +259,11 @@ module Rdkafka
256
259
  # Return RD_KAFKA_PARTITION_UA(unassigned partition) when partition count is nil/zero.
257
260
  return -1 unless partition_count&.nonzero?
258
261
 
259
- str_ptr = FFI::MemoryPointer.from_string(str)
262
+ str_ptr = str.empty? ? FFI::MemoryPointer::NULL : FFI::MemoryPointer.from_string(str)
260
263
  method_name = PARTITIONERS.fetch(partitioner_name) do
261
264
  raise Rdkafka::Config::ConfigError.new("Unknown partitioner: #{partitioner_name}")
262
265
  end
263
- public_send(method_name, nil, str_ptr, str.size, partition_count, nil, nil)
266
+ public_send(method_name, nil, str_ptr, str.size > 0 ? str.size : 1, partition_count, nil, nil)
264
267
  end
265
268
 
266
269
  # Create Topics
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  module Callbacks
3
5
 
@@ -90,14 +92,18 @@ module Rdkafka
90
92
  message = Rdkafka::Bindings::Message.new(message_ptr)
91
93
  delivery_handle_ptr_address = message[:_private].address
92
94
  if delivery_handle = Rdkafka::Producer::DeliveryHandle.remove(delivery_handle_ptr_address)
95
+ topic_name = Rdkafka::Bindings.rd_kafka_topic_name(message[:rkt])
96
+
93
97
  # Update delivery handle
94
98
  delivery_handle[:response] = message[:err]
95
99
  delivery_handle[:partition] = message[:partition]
96
100
  delivery_handle[:offset] = message[:offset]
101
+ delivery_handle[:topic_name] = FFI::MemoryPointer.from_string(topic_name)
97
102
  delivery_handle[:pending] = false
103
+
98
104
  # Call delivery callback on opaque
99
105
  if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
100
- opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], message[:err]), delivery_handle)
106
+ opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], topic_name, message[:err]), delivery_handle)
101
107
  end
102
108
  end
103
109
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "logger"
2
4
 
3
5
  module Rdkafka
@@ -30,7 +32,6 @@ module Rdkafka
30
32
  @@logger
31
33
  end
32
34
 
33
-
34
35
  # Returns a queue whose contents will be passed to the configured logger. Each entry
35
36
  # should follow the format [Logger::Severity, String]. The benefit over calling the
36
37
  # logger directly is that this is safe to use from trap contexts.
@@ -47,7 +48,7 @@ module Rdkafka
47
48
  # @return [nil]
48
49
  def self.logger=(logger)
49
50
  raise NoLoggerError if logger.nil?
50
- @@logger=logger
51
+ @@logger = logger
51
52
  end
52
53
 
53
54
  # Set a callback that will be called every time the underlying client emits statistics.
@@ -179,7 +180,8 @@ module Rdkafka
179
180
  # Set callback to receive delivery reports on config
180
181
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
181
182
  # Return producer with Kafka client
182
- Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer)), self[:partitioner]).tap do |producer|
183
+ partitioner_name = self[:partitioner] || self["partitioner"]
184
+ Rdkafka::Producer.new(Rdkafka::NativeKafka.new(native_kafka(config, :rd_kafka_producer)), partitioner_name).tap do |producer|
183
185
  opaque.producer = producer
184
186
  end
185
187
  end
@@ -194,7 +196,7 @@ module Rdkafka
194
196
  opaque = Opaque.new
195
197
  config = native_config(opaque)
196
198
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
197
- Rdkafka::Admin.new(native_kafka(config, :rd_kafka_producer))
199
+ Rdkafka::Admin.new(Rdkafka::NativeKafka.new(native_kafka(config, :rd_kafka_producer)))
198
200
  end
199
201
 
200
202
  # Error that is returned by the underlying rdkafka error if an invalid configuration option is present.
@@ -210,7 +212,7 @@ module Rdkafka
210
212
 
211
213
  # This method is only intended to be used to create a client,
212
214
  # using it in another way will leak memory.
213
- def native_config(opaque=nil)
215
+ def native_config(opaque = nil)
214
216
  Rdkafka::Bindings.rd_kafka_conf_new.tap do |config|
215
217
  # Create config
216
218
  @config_hash.merge(REQUIRED_CONFIG).each do |key, value|
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # A message headers
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # A message that was consumed from a topic.
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # Information about a partition, used in {TopicPartitionList}.
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # A list of topics with their partition information
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  # A consumer of Kafka messages. It uses the high-level consumer approach where the Kafka
3
5
  # brokers automatically assign partitions and load balance partitions over consumers that
@@ -14,20 +16,27 @@ module Rdkafka
14
16
  # @private
15
17
  def initialize(native_kafka)
16
18
  @native_kafka = native_kafka
17
- @closing = false
19
+ end
20
+
21
+ def finalizer
22
+ ->(_) { close }
18
23
  end
19
24
 
20
25
  # Close this consumer
21
26
  # @return [nil]
22
27
  def close
23
- return unless @native_kafka
28
+ return if closed?
24
29
 
25
- @closing = true
26
30
  Rdkafka::Bindings.rd_kafka_consumer_close(@native_kafka)
27
31
  Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
28
32
  @native_kafka = nil
29
33
  end
30
34
 
35
+ # Whether this consumer has closed
36
+ def closed?
37
+ @native_kafka.nil?
38
+ end
39
+
31
40
  # Subscribe to one or more topics letting Kafka handle partition assignments.
32
41
  #
33
42
  # @param topics [Array<String>] One or more topic names
@@ -459,7 +468,7 @@ module Rdkafka
459
468
  if message
460
469
  yield(message)
461
470
  else
462
- if @closing
471
+ if closed?
463
472
  break
464
473
  else
465
474
  next
@@ -468,10 +477,6 @@ module Rdkafka
468
477
  end
469
478
  end
470
479
 
471
- def closed_consumer_check(method)
472
- raise Rdkafka::ClosedConsumerError.new(method) if @native_kafka.nil?
473
- end
474
-
475
480
  # Poll for new messages and yield them in batches that may contain
476
481
  # messages from more than one partition.
477
482
  #
@@ -527,7 +532,7 @@ module Rdkafka
527
532
  bytes = 0
528
533
  end_time = monotonic_now + timeout_ms / 1000.0
529
534
  loop do
530
- break if @closing
535
+ break if closed?
531
536
  max_wait = end_time - monotonic_now
532
537
  max_wait_ms = if max_wait <= 0
533
538
  0 # should not block, but may retrieve a message
@@ -561,5 +566,9 @@ module Rdkafka
561
566
  # needed because Time.now can go backwards
562
567
  Process.clock_gettime(Process::CLOCK_MONOTONIC)
563
568
  end
569
+
570
+ def closed_consumer_check(method)
571
+ raise Rdkafka::ClosedConsumerError.new(method) if closed?
572
+ end
564
573
  end
565
574
  end
data/lib/rdkafka/error.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  # Base error class.
3
5
  class BaseError < RuntimeError; end
@@ -83,4 +85,11 @@ module Rdkafka
83
85
  super("Illegal call to #{method.to_s} on a closed producer")
84
86
  end
85
87
  end
88
+
89
+ # Error class for public consumer method calls on a closed admin.
90
+ class ClosedAdminError < BaseError
91
+ def initialize(method)
92
+ super("Illegal call to #{method.to_s} on a closed admin")
93
+ end
94
+ end
86
95
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Metadata
3
5
  attr_reader :brokers, :topics
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ # @private
5
+ # A wrapper around a native kafka that polls and cleanly exits
6
+ class NativeKafka
7
+ def initialize(inner)
8
+ @inner = inner
9
+
10
+ # Start thread to poll client for delivery callbacks
11
+ @polling_thread = Thread.new do
12
+ loop do
13
+ Rdkafka::Bindings.rd_kafka_poll(inner, 250)
14
+ # Exit thread if closing and the poll queue is empty
15
+ if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(inner) == 0
16
+ break
17
+ end
18
+ end
19
+ end
20
+ @polling_thread.abort_on_exception = true
21
+ @polling_thread[:closing] = false
22
+ end
23
+
24
+ def inner
25
+ @inner
26
+ end
27
+
28
+ def finalizer
29
+ ->(_) { close }
30
+ end
31
+
32
+ def closed?
33
+ @inner.nil?
34
+ end
35
+
36
+ def close(object_id=nil)
37
+ return if closed?
38
+
39
+ # Flush outstanding activity
40
+ Rdkafka::Bindings.rd_kafka_flush(@inner, 30 * 1000)
41
+
42
+ # Indicate to polling thread that we're closing
43
+ @polling_thread[:closing] = true
44
+ # Wait for the polling thread to finish up
45
+ @polling_thread.join
46
+
47
+ Rdkafka::Bindings.rd_kafka_destroy(@inner)
48
+
49
+ @inner = nil
50
+ end
51
+ end
52
+ end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Producer
3
5
  # Handle to wait for a delivery report which is returned when
@@ -6,7 +8,8 @@ module Rdkafka
6
8
  layout :pending, :bool,
7
9
  :response, :int,
8
10
  :partition, :int,
9
- :offset, :int64
11
+ :offset, :int64,
12
+ :topic_name, :pointer
10
13
 
11
14
  # @return [String] the name of the operation (e.g. "delivery")
12
15
  def operation_name
@@ -15,7 +18,7 @@ module Rdkafka
15
18
 
16
19
  # @return [DeliveryReport] a report on the delivery of the message
17
20
  def create_result
18
- DeliveryReport.new(self[:partition], self[:offset])
21
+ DeliveryReport.new(self[:partition], self[:offset], self[:topic_name].read_string)
19
22
  end
20
23
  end
21
24
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Producer
3
5
  # Delivery report for a successfully produced message.
@@ -10,15 +12,20 @@ module Rdkafka
10
12
  # @return [Integer]
11
13
  attr_reader :offset
12
14
 
13
- # Error in case happen during produce.
15
+ # The name of the topic this message was produced to.
14
16
  # @return [String]
17
+ attr_reader :topic_name
18
+
19
+ # Error in case happen during produce.
20
+ # @return [Integer]
15
21
  attr_reader :error
16
22
 
17
23
  private
18
24
 
19
- def initialize(partition, offset, error = nil)
25
+ def initialize(partition, offset, topic_name = nil, error = nil)
20
26
  @partition = partition
21
27
  @offset = offset
28
+ @topic_name = topic_name
22
29
  @error = error
23
30
  end
24
31
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "objspace"
2
4
 
3
5
  module Rdkafka
@@ -16,12 +18,12 @@ module Rdkafka
16
18
  attr_reader :delivery_callback_arity
17
19
 
18
20
  # @private
19
- def initialize(client, partitioner_name)
20
- @client = client
21
+ def initialize(native_kafka, partitioner_name)
22
+ @native_kafka = native_kafka
21
23
  @partitioner_name = partitioner_name || "consistent_random"
22
24
 
23
- # Makes sure, that the producer gets closed before it gets GCed by Ruby
24
- ObjectSpace.define_finalizer(self, client.finalizer)
25
+ # Makes sure, that native kafka gets closed before it gets GCed by Ruby
26
+ ObjectSpace.define_finalizer(self, native_kafka.finalizer)
25
27
  end
26
28
 
27
29
  # Set a callback that will be called every time a message is successfully produced.
@@ -40,7 +42,7 @@ module Rdkafka
40
42
  def close
41
43
  ObjectSpace.undefine_finalizer(self)
42
44
 
43
- @client.close
45
+ @native_kafka.close
44
46
  end
45
47
 
46
48
  # Partition count for a given topic.
@@ -49,10 +51,9 @@ module Rdkafka
49
51
  # @param topic [String] The topic name.
50
52
  #
51
53
  # @return partition count [Integer,nil]
52
- #
53
54
  def partition_count(topic)
54
55
  closed_producer_check(__method__)
55
- Rdkafka::Metadata.new(@client.native, topic).topics&.first[:partition_count]
56
+ Rdkafka::Metadata.new(@native_kafka.inner, topic).topics&.first[:partition_count]
56
57
  end
57
58
 
58
59
  # Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
@@ -144,7 +145,7 @@ module Rdkafka
144
145
 
145
146
  # Produce the message
146
147
  response = Rdkafka::Bindings.rd_kafka_producev(
147
- @client.native,
148
+ @native_kafka.inner,
148
149
  *args
149
150
  )
150
151
 
@@ -157,7 +158,6 @@ module Rdkafka
157
158
  delivery_handle
158
159
  end
159
160
 
160
- # @private
161
161
  def call_delivery_callback(delivery_report, delivery_handle)
162
162
  return unless @delivery_callback
163
163
 
@@ -171,8 +171,9 @@ module Rdkafka
171
171
  callback.method(:call).arity
172
172
  end
173
173
 
174
+ private
174
175
  def closed_producer_check(method)
175
- raise Rdkafka::ClosedProducerError.new(method) if @client.closed?
176
+ raise Rdkafka::ClosedProducerError.new(method) if @native_kafka.closed?
176
177
  end
177
178
  end
178
179
  end
@@ -1,5 +1,7 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
- VERSION = "0.12.1"
3
- LIBRDKAFKA_VERSION = "1.9.0"
4
- LIBRDKAFKA_SOURCE_SHA256 = "59b6088b69ca6cf278c3f9de5cd6b7f3fd604212cd1c59870bc531c54147e889"
4
+ VERSION = "0.13.0.beta.1"
5
+ LIBRDKAFKA_VERSION = "1.9.2"
6
+ LIBRDKAFKA_SOURCE_SHA256 = "3fba157a9f80a0889c982acdd44608be8a46142270a389008b22d921be1198ad"
5
7
  end
data/lib/rdkafka.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "rdkafka/version"
2
4
 
3
5
  require "rdkafka/abstract_handle"
@@ -16,7 +18,7 @@ require "rdkafka/consumer/partition"
16
18
  require "rdkafka/consumer/topic_partition_list"
17
19
  require "rdkafka/error"
18
20
  require "rdkafka/metadata"
21
+ require "rdkafka/native_kafka"
19
22
  require "rdkafka/producer"
20
- require "rdkafka/producer/client"
21
23
  require "rdkafka/producer/delivery_handle"
22
24
  require "rdkafka/producer/delivery_report"
data/rdkafka.gemspec CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require File.expand_path('lib/rdkafka/version', __dir__)
2
4
 
3
5
  Gem::Specification.new do |gem|
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::AbstractHandle do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Admin::CreateTopicHandle do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Admin::CreateTopicReport do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Admin::DeleteTopicHandle do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Admin::DeleteTopicReport do
@@ -1,9 +1,11 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "ostruct"
3
5
 
4
6
  describe Rdkafka::Admin do
5
- let(:config) { rdkafka_config }
6
- let(:admin) { config.admin }
7
+ let(:config) { rdkafka_config }
8
+ let(:admin) { config.admin }
7
9
 
8
10
  after do
9
11
  # Registry should always end up being empty
@@ -174,7 +176,6 @@ describe Rdkafka::Admin do
174
176
  end
175
177
  end
176
178
 
177
-
178
179
  it "deletes a topic that was newly created" do
179
180
  create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
180
181
  create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require 'zlib'
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Callbacks do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Config do
@@ -148,6 +150,18 @@ describe Rdkafka::Config do
148
150
  }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
149
151
  end
150
152
 
153
+ it "allows string partitioner key" do
154
+ expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2")
155
+ config = Rdkafka::Config.new("partitioner" => "murmur2")
156
+ config.producer
157
+ end
158
+
159
+ it "allows symbol partitioner key" do
160
+ expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2")
161
+ config = Rdkafka::Config.new(:partitioner => "murmur2")
162
+ config.producer
163
+ end
164
+
151
165
  it "should allow configuring zstd compression" do
152
166
  config = Rdkafka::Config.new('compression.codec' => 'zstd')
153
167
  begin
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::Message do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::Partition do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::TopicPartitionList do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "ostruct"
3
5
  require 'securerandom'
@@ -1005,4 +1007,12 @@ describe Rdkafka::Consumer do
1005
1007
  end
1006
1008
  end
1007
1009
  end
1010
+
1011
+ it "provides a finalizer that closes the native kafka client" do
1012
+ expect(consumer.closed?).to eq(false)
1013
+
1014
+ consumer.finalizer.call("some-ignored-object-id")
1015
+
1016
+ expect(consumer.closed?).to eq(true)
1017
+ end
1008
1018
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::RdkafkaError do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "securerandom"
3
5
 
@@ -1,6 +1,8 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
- describe Rdkafka::Producer::Client do
5
+ describe Rdkafka::NativeKafka do
4
6
  let(:config) { rdkafka_producer_config }
5
7
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
6
8
  let(:closing) { false }
@@ -63,8 +65,8 @@ describe Rdkafka::Producer::Client do
63
65
  client
64
66
  end
65
67
 
66
- it "exposes `native` client" do
67
- expect(client.native).to eq(native)
68
+ it "exposes inner client" do
69
+ expect(client.inner).to eq(native)
68
70
  end
69
71
 
70
72
  context "when client was not yet closed (`nil`)" do
@@ -94,7 +96,7 @@ describe Rdkafka::Producer::Client do
94
96
  it "closes and unassign the native client" do
95
97
  client.close
96
98
 
97
- expect(client.native).to eq(nil)
99
+ expect(client.inner).to eq(nil)
98
100
  expect(client.closed?).to eq(true)
99
101
  end
100
102
  end
@@ -129,13 +131,13 @@ describe Rdkafka::Producer::Client do
129
131
  it "does not close and unassign the native client again" do
130
132
  client.close
131
133
 
132
- expect(client.native).to eq(nil)
134
+ expect(client.inner).to eq(nil)
133
135
  expect(client.closed?).to eq(true)
134
136
  end
135
137
  end
136
138
  end
137
139
 
138
- it "provide a finalizer Proc that closes the `native` client" do
140
+ it "provides a finalizer that closes the native kafka client" do
139
141
  expect(client.closed?).to eq(false)
140
142
 
141
143
  client.finalizer.call("some-ignored-object-id")
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryHandle do
@@ -9,6 +11,7 @@ describe Rdkafka::Producer::DeliveryHandle do
9
11
  handle[:response] = response
10
12
  handle[:partition] = 2
11
13
  handle[:offset] = 100
14
+ handle[:topic_name] = FFI::MemoryPointer.from_string("produce_test_topic")
12
15
  end
13
16
  end
14
17
 
@@ -29,6 +32,7 @@ describe Rdkafka::Producer::DeliveryHandle do
29
32
 
30
33
  expect(report.partition).to eq(2)
31
34
  expect(report.offset).to eq(100)
35
+ expect(report.topic_name).to eq("produce_test_topic")
32
36
  end
33
37
 
34
38
  it "should wait without a timeout" do
@@ -36,6 +40,7 @@ describe Rdkafka::Producer::DeliveryHandle do
36
40
 
37
41
  expect(report.partition).to eq(2)
38
42
  expect(report.offset).to eq(100)
43
+ expect(report.topic_name).to eq("produce_test_topic")
39
44
  end
40
45
  end
41
46
  end
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
6
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
5
7
 
6
8
  it "should get the partition" do
7
9
  expect(subject.partition).to eq 2
@@ -11,7 +13,11 @@ describe Rdkafka::Producer::DeliveryReport do
11
13
  expect(subject.offset).to eq 100
12
14
  end
13
15
 
16
+ it "should get the topic_name" do
17
+ expect(subject.topic_name).to eq "topic"
18
+ end
19
+
14
20
  it "should get the error" do
15
- expect(subject.error).to eq "error"
21
+ expect(subject.error).to eq -1
16
22
  end
17
23
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "zlib"
3
5
 
@@ -7,7 +9,7 @@ describe Rdkafka::Producer do
7
9
 
8
10
  after do
9
11
  # Registry should always end up being empty
10
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
12
+ expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to eq({})
11
13
  producer.close
12
14
  consumer.close
13
15
  end
@@ -30,6 +32,7 @@ describe Rdkafka::Producer do
30
32
  expect(report).not_to be_nil
31
33
  expect(report.partition).to eq 1
32
34
  expect(report.offset).to be >= 0
35
+ expect(report.topic_name).to eq "produce_test_topic"
33
36
  @callback_called = true
34
37
  end
35
38
 
@@ -113,6 +116,7 @@ describe Rdkafka::Producer do
113
116
  expect(called_report.first).not_to be_nil
114
117
  expect(called_report.first.partition).to eq 1
115
118
  expect(called_report.first.offset).to be >= 0
119
+ expect(called_report.first.topic_name).to eq "produce_test_topic"
116
120
  end
117
121
 
118
122
  it "should provide handle" do
@@ -251,6 +255,28 @@ describe Rdkafka::Producer do
251
255
  expect(messages[2].key).to eq key
252
256
  end
253
257
 
258
+ it "should produce a message with empty string without crashing" do
259
+ messages = [{key: 'a', partition_key: ''}]
260
+
261
+ messages = messages.map do |m|
262
+ handle = producer.produce(
263
+ topic: "partitioner_test_topic",
264
+ payload: "payload partition",
265
+ key: m[:key],
266
+ partition_key: m[:partition_key]
267
+ )
268
+ report = handle.wait(max_wait_timeout: 5)
269
+
270
+ wait_for_message(
271
+ topic: "partitioner_test_topic",
272
+ delivery_report: report,
273
+ )
274
+ end
275
+
276
+ expect(messages[0].partition).to eq 0
277
+ expect(messages[0].key).to eq 'a'
278
+ end
279
+
254
280
  it "should produce a message with utf-8 encoding" do
255
281
  handle = producer.produce(
256
282
  topic: "produce_test_topic",
@@ -448,7 +474,8 @@ describe Rdkafka::Producer do
448
474
 
449
475
  report_json = JSON.generate(
450
476
  "partition" => report.partition,
451
- "offset" => report.offset
477
+ "offset" => report.offset,
478
+ "topic_name" => report.topic_name
452
479
  )
453
480
 
454
481
  writer.write(report_json)
@@ -460,7 +487,8 @@ describe Rdkafka::Producer do
460
487
  report_hash = JSON.parse(reader.read)
461
488
  report = Rdkafka::Producer::DeliveryReport.new(
462
489
  report_hash["partition"],
463
- report_hash["offset"]
490
+ report_hash["offset"],
491
+ report_hash["topic_name"]
464
492
  )
465
493
 
466
494
  reader.close
data/spec/spec_helper.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  unless ENV["CI"] == "true"
2
4
  require "simplecov"
3
5
  SimpleCov.start do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.12.1
4
+ version: 0.13.0.beta.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-07-11 00:00:00.000000000 Z
11
+ date: 2022-10-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -156,7 +156,6 @@ files:
156
156
  - README.md
157
157
  - Rakefile
158
158
  - bin/console
159
- - dist/librdkafka_1.9.0.tar.gz
160
159
  - docker-compose.yml
161
160
  - ext/README.md
162
161
  - ext/Rakefile
@@ -177,8 +176,8 @@ files:
177
176
  - lib/rdkafka/consumer/topic_partition_list.rb
178
177
  - lib/rdkafka/error.rb
179
178
  - lib/rdkafka/metadata.rb
179
+ - lib/rdkafka/native_kafka.rb
180
180
  - lib/rdkafka/producer.rb
181
- - lib/rdkafka/producer/client.rb
182
181
  - lib/rdkafka/producer/delivery_handle.rb
183
182
  - lib/rdkafka/producer/delivery_report.rb
184
183
  - lib/rdkafka/version.rb
@@ -198,7 +197,7 @@ files:
198
197
  - spec/rdkafka/consumer_spec.rb
199
198
  - spec/rdkafka/error_spec.rb
200
199
  - spec/rdkafka/metadata_spec.rb
201
- - spec/rdkafka/producer/client_spec.rb
200
+ - spec/rdkafka/native_kafka_spec.rb
202
201
  - spec/rdkafka/producer/delivery_handle_spec.rb
203
202
  - spec/rdkafka/producer/delivery_report_spec.rb
204
203
  - spec/rdkafka/producer_spec.rb
@@ -207,7 +206,7 @@ homepage: https://github.com/thijsc/rdkafka-ruby
207
206
  licenses:
208
207
  - MIT
209
208
  metadata: {}
210
- post_install_message:
209
+ post_install_message:
211
210
  rdoc_options: []
212
211
  require_paths:
213
212
  - lib
@@ -218,12 +217,12 @@ required_ruby_version: !ruby/object:Gem::Requirement
218
217
  version: '2.6'
219
218
  required_rubygems_version: !ruby/object:Gem::Requirement
220
219
  requirements:
221
- - - ">="
220
+ - - ">"
222
221
  - !ruby/object:Gem::Version
223
- version: '0'
222
+ version: 1.3.1
224
223
  requirements: []
225
- rubygems_version: 3.5.14
226
- signing_key:
224
+ rubygems_version: 3.3.7
225
+ signing_key:
227
226
  specification_version: 4
228
227
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
229
228
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
@@ -244,7 +243,7 @@ test_files:
244
243
  - spec/rdkafka/consumer_spec.rb
245
244
  - spec/rdkafka/error_spec.rb
246
245
  - spec/rdkafka/metadata_spec.rb
247
- - spec/rdkafka/producer/client_spec.rb
246
+ - spec/rdkafka/native_kafka_spec.rb
248
247
  - spec/rdkafka/producer/delivery_handle_spec.rb
249
248
  - spec/rdkafka/producer/delivery_report_spec.rb
250
249
  - spec/rdkafka/producer_spec.rb
Binary file
@@ -1,47 +0,0 @@
1
- module Rdkafka
2
- class Producer
3
- class Client
4
- def initialize(native)
5
- @native = native
6
-
7
- # Start thread to poll client for delivery callbacks
8
- @polling_thread = Thread.new do
9
- loop do
10
- Rdkafka::Bindings.rd_kafka_poll(native, 250)
11
- # Exit thread if closing and the poll queue is empty
12
- if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(native) == 0
13
- break
14
- end
15
- end
16
- end
17
- @polling_thread.abort_on_exception = true
18
- @polling_thread[:closing] = false
19
- end
20
-
21
- def native
22
- @native
23
- end
24
-
25
- def finalizer
26
- ->(_) { close }
27
- end
28
-
29
- def closed?
30
- @native.nil?
31
- end
32
-
33
- def close(object_id=nil)
34
- return unless @native
35
-
36
- # Indicate to polling thread that we're closing
37
- @polling_thread[:closing] = true
38
- # Wait for the polling thread to finish up
39
- @polling_thread.join
40
-
41
- Rdkafka::Bindings.rd_kafka_destroy(@native)
42
-
43
- @native = nil
44
- end
45
- end
46
- end
47
- end