rdkafka 0.8.0 → 0.11.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. data/.rspec +1 -0
  3. data/.semaphore/semaphore.yml +23 -0
  4. data/CHANGELOG.md +24 -1
  5. data/Guardfile +19 -0
  6. data/README.md +8 -3
  7. data/bin/console +11 -0
  8. data/docker-compose.yml +5 -3
  9. data/ext/README.md +8 -1
  10. data/ext/Rakefile +5 -20
  11. data/lib/rdkafka/abstract_handle.rb +82 -0
  12. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  13. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  14. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  15. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  16. data/lib/rdkafka/admin.rb +155 -0
  17. data/lib/rdkafka/bindings.rb +57 -18
  18. data/lib/rdkafka/callbacks.rb +106 -0
  19. data/lib/rdkafka/config.rb +59 -3
  20. data/lib/rdkafka/consumer.rb +125 -5
  21. data/lib/rdkafka/error.rb +29 -3
  22. data/lib/rdkafka/metadata.rb +6 -5
  23. data/lib/rdkafka/producer/delivery_handle.rb +7 -53
  24. data/lib/rdkafka/producer/delivery_report.rb +1 -1
  25. data/lib/rdkafka/producer.rb +27 -12
  26. data/lib/rdkafka/version.rb +3 -3
  27. data/lib/rdkafka.rb +7 -0
  28. data/rdkafka.gemspec +9 -7
  29. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  30. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  31. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  32. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  33. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  34. data/spec/rdkafka/admin_spec.rb +203 -0
  35. data/spec/rdkafka/bindings_spec.rb +32 -8
  36. data/spec/rdkafka/callbacks_spec.rb +20 -0
  37. data/spec/rdkafka/config_spec.rb +78 -9
  38. data/spec/rdkafka/consumer_spec.rb +326 -42
  39. data/spec/rdkafka/error_spec.rb +4 -0
  40. data/spec/rdkafka/metadata_spec.rb +78 -0
  41. data/spec/rdkafka/producer/delivery_handle_spec.rb +1 -41
  42. data/spec/rdkafka/producer_spec.rb +102 -34
  43. data/spec/spec_helper.rb +78 -20
  44. metadata +84 -29
  45. data/.travis.yml +0 -48
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8bf97d10412b4f3c0801f657796fd05c94ceede3caafe6a45719c840b534752a
4
- data.tar.gz: 8cc93bffb8119cf9c97aa60cf1f6d634e66a99686e0785150ae51283a6514e8c
3
+ metadata.gz: 55566df04f391a8ecbd2a51e7c2e1d91dc31397f90fcb2777b26d90b4774a290
4
+ data.tar.gz: 93407bbc327703e231bc9da7e2ebb74ca52089f14a50c6491b20d98e98ec7b07
5
5
  SHA512:
6
- metadata.gz: e8e085b3dd9d80d77003d2c7f0fbda69371c233b121eac2594556d7d62da99a4bed4aace93e9489783c789e64bafd732faaad3de2ffb5d3a5af8568c5b43676b
7
- data.tar.gz: 5a267a9814e87e876544e04e6657829c298ff01b03cb5ff41a0205795ea2fb4f6a2147ee1a0949e52f7a069f5dc73d5ed2072867194032e8ea913ac5d6871e23
6
+ metadata.gz: 1ad2f39610e53f27b9e192e36d5420e15b7d2807ca628cd80a48ba5a9e60ab6c2601a2c10303767fc048f5be812656e40b8fa6decbdf2fd249cf56854dd28dae
7
+ data.tar.gz: d27ef3a56b024f2c120eb0b348111e4d4f0fac19e5b818bfaa12503b7af033d1fa1f72543b3f306b07d91ced1de9aea487fff48df482cb4e75cd207f075f8c7a
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --format documentation
@@ -0,0 +1,23 @@
1
+ version: v1.0
2
+ name: Rdkafka Ruby
3
+
4
+ agent:
5
+ machine:
6
+ type: e1-standard-4
7
+ os_image: ubuntu1804
8
+
9
+ blocks:
10
+ - name: Run specs
11
+ task:
12
+ jobs:
13
+ - name: bundle exec rspec
14
+ matrix:
15
+ - env_var: RUBY_VERSION
16
+ values: [ "2.6.8", "2.7.4", "3.0.2", "jruby-9.3.1.0"]
17
+ commands:
18
+ - sem-version ruby $RUBY_VERSION
19
+ - checkout
20
+ - bundle install --path vendor/bundle
21
+ - cd ext && bundle exec rake && cd ..
22
+ - docker-compose up -d --no-recreate
23
+ - bundle exec rspec
data/CHANGELOG.md CHANGED
@@ -1,3 +1,26 @@
1
+ # 0.11.0
2
+ * Upgrade librdkafka to 1.8.2
3
+ * Bump supported minimum Ruby version to 2.6
4
+ * Better homebrew path detection
5
+
6
+ # 0.10.0
7
+ * Upgrade librdkafka to 1.5.0
8
+ * Add error callback config
9
+
10
+ # 0.9.0
11
+ * Fixes for Ruby 3.0
12
+ * Allow any callable object for callbacks (gremerritt)
13
+ * Reduce memory allocations in Rdkafka::Producer#produce (jturkel)
14
+ * Use queue as log callback to avoid unsafe calls from trap context (breunigs)
15
+ * Allow passing in topic configuration on create_topic (dezka)
16
+ * Add each_batch method to consumer (mgrosso)
17
+
18
+ # 0.8.1
19
+ * Fix topic_flag behaviour and add tests for Metadata (geoff2k)
20
+ * Add topic admin interface (geoff2k)
21
+ * Raise an exception if @native_kafka is nil (geoff2k)
22
+ * Option to use zstd compression (jasonmartens)
23
+
1
24
  # 0.8.0
2
25
  * Upgrade librdkafka to 1.4.0
3
26
  * Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
@@ -27,7 +50,7 @@
27
50
  * Use default Homebrew openssl location if present
28
51
  * Consumer lag handles empty topics
29
52
  * End iteration in consumer when it is closed
30
- * Add suport for storing message offsets
53
+ * Add support for storing message offsets
31
54
  * Add missing runtime dependency to rake
32
55
 
33
56
  # 0.4.1
data/Guardfile ADDED
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ logger level: :error
4
+
5
+ guard :rspec, cmd: "bundle exec rspec --format #{ENV.fetch("FORMAT", "documentation")}" do
6
+ require "guard/rspec/dsl"
7
+ dsl = Guard::RSpec::Dsl.new(self)
8
+
9
+ # Ruby files
10
+ ruby = dsl.ruby
11
+ dsl.watch_spec_files_for(ruby.lib_files)
12
+ watch(%r{^lib/(.+)\.rb}) { |m| "spec/#{m[1]}_spec.rb" }
13
+
14
+ # RSpec files
15
+ rspec = dsl.rspec
16
+ watch(rspec.spec_helper) { rspec.spec_dir }
17
+ watch(rspec.spec_support) { rspec.spec_dir }
18
+ watch(rspec.spec_files)
19
+ end
data/README.md CHANGED
@@ -1,14 +1,15 @@
1
1
  # Rdkafka
2
2
 
3
- [![Build Status](https://travis-ci.org/appsignal/rdkafka-ruby.svg?branch=master)](https://travis-ci.org/appsignal/rdkafka-ruby)
3
+ [![Build Status](https://appsignal.semaphoreci.com/badges/rdkafka-ruby/branches/master.svg?style=shields)](https://appsignal.semaphoreci.com/projects/rdkafka-ruby)
4
4
  [![Gem Version](https://badge.fury.io/rb/rdkafka.svg)](https://badge.fury.io/rb/rdkafka)
5
5
  [![Maintainability](https://api.codeclimate.com/v1/badges/ecb1765f81571cccdb0e/maintainability)](https://codeclimate.com/github/appsignal/rdkafka-ruby/maintainability)
6
- [![Test Coverage](https://api.codeclimate.com/v1/badges/ecb1765f81571cccdb0e/test_coverage)](https://codeclimate.com/github/appsignal/rdkafka-ruby/test_coverage)
7
6
 
8
7
  The `rdkafka` gem is a modern Kafka client library for Ruby based on
9
8
  [librdkafka](https://github.com/edenhill/librdkafka/).
10
9
  It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
11
- gem and targets Kafka 1.0+ and Ruby 2.4+.
10
+ gem and targets Kafka 1.0+ and Ruby versions that are under security or
11
+ active maintenance. We remove Ruby version from our CI builds if they
12
+ become EOL.
12
13
 
13
14
  `rdkafka` was written because we needed a reliable Ruby client for
14
15
  Kafka that supports modern Kafka at [AppSignal](https://appsignal.com).
@@ -72,6 +73,10 @@ end
72
73
  delivery_handles.each(&:wait)
73
74
  ```
74
75
 
76
+ Note that creating a producer consumes some resources that will not be
77
+ released until it `#close` is explicitly called, so be sure to call
78
+ `Config#producer` only as necessary.
79
+
75
80
  ## Development
76
81
 
77
82
  A Docker Compose file is included to run Kafka and Zookeeper. To run
data/bin/console ADDED
@@ -0,0 +1,11 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # frozen_string_literal: true
4
+
5
+ ENV["IRBRC"] = File.join(File.dirname(__FILE__), ".irbrc")
6
+
7
+ require "bundler/setup"
8
+ require "rdkafka"
9
+
10
+ require "irb"
11
+ IRB.start(__FILE__)
data/docker-compose.yml CHANGED
@@ -1,14 +1,16 @@
1
+ ---
1
2
 
2
3
  version: '2'
4
+
3
5
  services:
4
6
  zookeeper:
5
- image: confluentinc/cp-zookeeper:latest
7
+ image: confluentinc/cp-zookeeper:5.2.6
6
8
  environment:
7
9
  ZOOKEEPER_CLIENT_PORT: 2181
8
10
  ZOOKEEPER_TICK_TIME: 2000
9
11
 
10
12
  kafka:
11
- image: confluentinc/cp-kafka:latest
13
+ image: confluentinc/cp-kafka:5.2.5-10
12
14
  depends_on:
13
15
  - zookeeper
14
16
  ports:
@@ -16,7 +18,7 @@ services:
16
18
  environment:
17
19
  KAFKA_BROKER_ID: 1
18
20
  KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
19
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
21
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:29092,PLAINTEXT_HOST://localhost:9092
20
22
  KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
21
23
  KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
22
24
  KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
data/ext/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ext
2
2
 
3
- This gem dependes on the `librdkafka` C library. It is downloaded when
3
+ This gem depends on the `librdkafka` C library. It is downloaded when
4
4
  this gem is installed.
5
5
 
6
6
  To update the `librdkafka` version follow the following steps:
@@ -9,3 +9,10 @@ To update the `librdkafka` version follow the following steps:
9
9
  version number and asset checksum for `tar.gz`.
10
10
  * Change the version in `lib/rdkafka/version.rb`
11
11
  * Change the `sha256` in `lib/rdkafka/version.rb`
12
+ * Run `bundle exec rake` in the `ext` directory to download and build
13
+ the new version
14
+ * Run `docker-compose pull` in the main gem directory to ensure the docker
15
+ images used by the tests and run `docker-compose up`
16
+ * Finally, run `bundle exec rspec` in the main gem directory to execute
17
+ the test suite to detect any regressions that may have been introduced
18
+ by the update
data/ext/Rakefile CHANGED
@@ -1,32 +1,17 @@
1
1
  require File.expand_path('../../lib/rdkafka/version', __FILE__)
2
2
  require "mini_portile2"
3
3
  require "fileutils"
4
+ require "open-uri"
4
5
 
5
6
  task :default => :clean do
6
- # MiniPortile#download_file_http is a monkey patch that removes the download
7
- # progress indicator. This indicator relies on the 'Content Length' response
8
- # headers, which is not set by GitHub
9
- class MiniPortile
10
- def download_file_http(url, full_path, _count)
11
- filename = File.basename(full_path)
12
- with_tempfile(filename, full_path) do |temp_file|
13
- params = { 'Accept-Encoding' => 'identity' }
14
- OpenURI.open_uri(url, 'rb', params) do |io|
15
- temp_file.write(io.read)
16
- end
17
- output
18
- end
19
- end
20
- end
21
-
22
7
  # Download and compile librdkafka
23
8
  recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
24
9
 
25
10
  # Use default homebrew openssl if we're on mac and the directory exists
26
11
  # and each of flags is not empty
27
- if recipe.host&.include?("darwin") && Dir.exist?("/usr/local/opt/openssl")
28
- ENV["CPPFLAGS"] = "-I/usr/local/opt/openssl/include" unless ENV["CPPFLAGS"]
29
- ENV["LDFLAGS"] = "-L/usr/local/opt/openssl/lib" unless ENV["LDFLAGS"]
12
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
13
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
14
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
30
15
  end
31
16
 
32
17
  recipe.files << {
@@ -65,7 +50,7 @@ namespace :build do
65
50
 
66
51
  recipe = MiniPortile.new("librdkafka", version)
67
52
  recipe.files << "https://github.com/edenhill/librdkafka/archive/#{ref}.tar.gz"
68
- recipe.configure_options = ["--host=#{recipe.host}"]
53
+ recipe.configure_options = ["--host=#{recipe.host}","--enable-static", "--enable-zstd"]
69
54
  recipe.cook
70
55
 
71
56
  ext = recipe.host.include?("darwin") ? "dylib" : "so"
@@ -0,0 +1,82 @@
1
+ require "ffi"
2
+
3
+ module Rdkafka
4
+ class AbstractHandle < FFI::Struct
5
+ # Subclasses must define their own layout, and the layout must start with:
6
+ #
7
+ # layout :pending, :bool,
8
+ # :response, :int
9
+
10
+ REGISTRY = {}
11
+
12
+ CURRENT_TIME = -> { Process.clock_gettime(Process::CLOCK_MONOTONIC) }.freeze
13
+
14
+ private_constant :CURRENT_TIME
15
+
16
+ def self.register(handle)
17
+ address = handle.to_ptr.address
18
+ REGISTRY[address] = handle
19
+ end
20
+
21
+ def self.remove(address)
22
+ REGISTRY.delete(address)
23
+ end
24
+
25
+ # Whether the handle is still pending.
26
+ #
27
+ # @return [Boolean]
28
+ def pending?
29
+ self[:pending]
30
+ end
31
+
32
+ # Wait for the operation to complete or raise an error if this takes longer than the timeout.
33
+ # If there is a timeout this does not mean the operation failed, rdkafka might still be working on the operation.
34
+ # In this case it is possible to call wait again.
35
+ #
36
+ # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out. If this is nil it does not time out.
37
+ # @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the operation has completed
38
+ #
39
+ # @raise [RdkafkaError] When the operation failed
40
+ # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
41
+ #
42
+ # @return [Object] Operation-specific result
43
+ def wait(max_wait_timeout: 60, wait_timeout: 0.1)
44
+ timeout = if max_wait_timeout
45
+ CURRENT_TIME.call + max_wait_timeout
46
+ else
47
+ nil
48
+ end
49
+ loop do
50
+ if pending?
51
+ if timeout && timeout <= CURRENT_TIME.call
52
+ raise WaitTimeoutError.new("Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds")
53
+ end
54
+ sleep wait_timeout
55
+ elsif self[:response] != 0
56
+ raise_error
57
+ else
58
+ return create_result
59
+ end
60
+ end
61
+ end
62
+
63
+ # @return [String] the name of the operation (e.g. "delivery")
64
+ def operation_name
65
+ raise "Must be implemented by subclass!"
66
+ end
67
+
68
+ # @return [Object] operation-specific result
69
+ def create_result
70
+ raise "Must be implemented by subclass!"
71
+ end
72
+
73
+ # Allow subclasses to override
74
+ def raise_error
75
+ raise RdkafkaError.new(self[:response])
76
+ end
77
+
78
+ # Error that is raised when waiting for the handle to complete
79
+ # takes longer than the specified timeout.
80
+ class WaitTimeoutError < RuntimeError; end
81
+ end
82
+ end
@@ -0,0 +1,27 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class CreateTopicHandle < AbstractHandle
4
+ layout :pending, :bool,
5
+ :response, :int,
6
+ :error_string, :pointer,
7
+ :result_name, :pointer
8
+
9
+ # @return [String] the name of the operation
10
+ def operation_name
11
+ "create topic"
12
+ end
13
+
14
+ # @return [Boolean] whether the create topic was successful
15
+ def create_result
16
+ CreateTopicReport.new(self[:error_string], self[:result_name])
17
+ end
18
+
19
+ def raise_error
20
+ raise RdkafkaError.new(
21
+ self[:response],
22
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
23
+ )
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,22 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class CreateTopicReport
4
+ # Any error message generated from the CreateTopic
5
+ # @return [String]
6
+ attr_reader :error_string
7
+
8
+ # The name of the topic created
9
+ # @return [String]
10
+ attr_reader :result_name
11
+
12
+ def initialize(error_string, result_name)
13
+ if error_string != FFI::Pointer::NULL
14
+ @error_string = error_string.read_string
15
+ end
16
+ if result_name != FFI::Pointer::NULL
17
+ @result_name = @result_name = result_name.read_string
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,27 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class DeleteTopicHandle < AbstractHandle
4
+ layout :pending, :bool,
5
+ :response, :int,
6
+ :error_string, :pointer,
7
+ :result_name, :pointer
8
+
9
+ # @return [String] the name of the operation
10
+ def operation_name
11
+ "delete topic"
12
+ end
13
+
14
+ # @return [Boolean] whether the delete topic was successful
15
+ def create_result
16
+ DeleteTopicReport.new(self[:error_string], self[:result_name])
17
+ end
18
+
19
+ def raise_error
20
+ raise RdkafkaError.new(
21
+ self[:response],
22
+ broker_message: DeleteTopicReport.new(self[:error_string], self[:result_name]).error_string
23
+ )
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,22 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class DeleteTopicReport
4
+ # Any error message generated from the DeleteTopic
5
+ # @return [String]
6
+ attr_reader :error_string
7
+
8
+ # The name of the topic deleted
9
+ # @return [String]
10
+ attr_reader :result_name
11
+
12
+ def initialize(error_string, result_name)
13
+ if error_string != FFI::Pointer::NULL
14
+ @error_string = error_string.read_string
15
+ end
16
+ if result_name != FFI::Pointer::NULL
17
+ @result_name = @result_name = result_name.read_string
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,155 @@
1
+ module Rdkafka
2
+ class Admin
3
+ # @private
4
+ def initialize(native_kafka)
5
+ @native_kafka = native_kafka
6
+ @closing = false
7
+
8
+ # Start thread to poll client for callbacks
9
+ @polling_thread = Thread.new do
10
+ loop do
11
+ Rdkafka::Bindings.rd_kafka_poll(@native_kafka, 250)
12
+ # Exit thread if closing and the poll queue is empty
13
+ if @closing && Rdkafka::Bindings.rd_kafka_outq_len(@native_kafka) == 0
14
+ break
15
+ end
16
+ end
17
+ end
18
+ @polling_thread.abort_on_exception = true
19
+ end
20
+
21
+ # Close this admin instance
22
+ def close
23
+ return unless @native_kafka
24
+
25
+ # Indicate to polling thread that we're closing
26
+ @closing = true
27
+ # Wait for the polling thread to finish up
28
+ @polling_thread.join
29
+ Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
30
+ @native_kafka = nil
31
+ end
32
+
33
+ # Create a topic with the given partition count and replication factor
34
+ #
35
+ # @raise [ConfigError] When the partition count or replication factor are out of valid range
36
+ # @raise [RdkafkaError] When the topic name is invalid or the topic already exists
37
+ # @raise [RdkafkaError] When the topic configuration is invalid
38
+ #
39
+ # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
40
+ def create_topic(topic_name, partition_count, replication_factor, topic_config={})
41
+
42
+ # Create a rd_kafka_NewTopic_t representing the new topic
43
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
44
+ new_topic_ptr = Rdkafka::Bindings.rd_kafka_NewTopic_new(
45
+ FFI::MemoryPointer.from_string(topic_name),
46
+ partition_count,
47
+ replication_factor,
48
+ error_buffer,
49
+ 256
50
+ )
51
+ if new_topic_ptr.null?
52
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
53
+ end
54
+
55
+ unless topic_config.nil?
56
+ topic_config.each do |key, value|
57
+ Rdkafka::Bindings.rd_kafka_NewTopic_set_config(
58
+ new_topic_ptr,
59
+ key.to_s,
60
+ value.to_s
61
+ )
62
+ end
63
+ end
64
+
65
+ # Note that rd_kafka_CreateTopics can create more than one topic at a time
66
+ pointer_array = [new_topic_ptr]
67
+ topics_array_ptr = FFI::MemoryPointer.new(:pointer)
68
+ topics_array_ptr.write_array_of_pointer(pointer_array)
69
+
70
+ # Get a pointer to the queue that our request will be enqueued on
71
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
72
+ if queue_ptr.null?
73
+ Rdkafka::Bindings.rd_kafka_NewTopic_destroy(new_topic_ptr)
74
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
75
+ end
76
+
77
+ # Create and register the handle we will return to the caller
78
+ create_topic_handle = CreateTopicHandle.new
79
+ create_topic_handle[:pending] = true
80
+ create_topic_handle[:response] = -1
81
+ CreateTopicHandle.register(create_topic_handle)
82
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATETOPICS)
83
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, create_topic_handle.to_ptr)
84
+
85
+ begin
86
+ Rdkafka::Bindings.rd_kafka_CreateTopics(
87
+ @native_kafka,
88
+ topics_array_ptr,
89
+ 1,
90
+ admin_options_ptr,
91
+ queue_ptr
92
+ )
93
+ rescue Exception => err
94
+ CreateTopicHandle.remove(create_topic_handle.to_ptr.address)
95
+ raise
96
+ ensure
97
+ Rdkafka::Bindings.rd_kafka_AdminOptions_destroy(admin_options_ptr)
98
+ Rdkafka::Bindings.rd_kafka_queue_destroy(queue_ptr)
99
+ Rdkafka::Bindings.rd_kafka_NewTopic_destroy(new_topic_ptr)
100
+ end
101
+
102
+ create_topic_handle
103
+ end
104
+
105
+ # Delete the named topic
106
+ #
107
+ # @raise [RdkafkaError] When the topic name is invalid or the topic does not exist
108
+ #
109
+ # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
110
+ def delete_topic(topic_name)
111
+
112
+ # Create a rd_kafka_DeleteTopic_t representing the topic to be deleted
113
+ delete_topic_ptr = Rdkafka::Bindings.rd_kafka_DeleteTopic_new(FFI::MemoryPointer.from_string(topic_name))
114
+
115
+ # Note that rd_kafka_DeleteTopics can create more than one topic at a time
116
+ pointer_array = [delete_topic_ptr]
117
+ topics_array_ptr = FFI::MemoryPointer.new(:pointer)
118
+ topics_array_ptr.write_array_of_pointer(pointer_array)
119
+
120
+ # Get a pointer to the queue that our request will be enqueued on
121
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
122
+ if queue_ptr.null?
123
+ Rdkafka::Bindings.rd_kafka_DeleteTopic_destroy(delete_topic_ptr)
124
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
125
+ end
126
+
127
+ # Create and register the handle we will return to the caller
128
+ delete_topic_handle = DeleteTopicHandle.new
129
+ delete_topic_handle[:pending] = true
130
+ delete_topic_handle[:response] = -1
131
+ DeleteTopicHandle.register(delete_topic_handle)
132
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DELETETOPICS)
133
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, delete_topic_handle.to_ptr)
134
+
135
+ begin
136
+ Rdkafka::Bindings.rd_kafka_DeleteTopics(
137
+ @native_kafka,
138
+ topics_array_ptr,
139
+ 1,
140
+ admin_options_ptr,
141
+ queue_ptr
142
+ )
143
+ rescue Exception => err
144
+ DeleteTopicHandle.remove(delete_topic_handle.to_ptr.address)
145
+ raise
146
+ ensure
147
+ Rdkafka::Bindings.rd_kafka_AdminOptions_destroy(admin_options_ptr)
148
+ Rdkafka::Bindings.rd_kafka_queue_destroy(queue_ptr)
149
+ Rdkafka::Bindings.rd_kafka_DeleteTopic_destroy(delete_topic_ptr)
150
+ end
151
+
152
+ delete_topic_handle
153
+ end
154
+ end
155
+ end
@@ -108,6 +108,8 @@ module Rdkafka
108
108
  attach_function :rd_kafka_conf_set_opaque, [:pointer, :pointer], :void
109
109
  callback :stats_cb, [:pointer, :string, :int, :pointer], :int
110
110
  attach_function :rd_kafka_conf_set_stats_cb, [:pointer, :stats_cb], :void
111
+ callback :error_cb, [:pointer, :int, :string, :pointer], :void
112
+ attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
111
113
 
112
114
  # Log queue
113
115
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
@@ -130,7 +132,7 @@ module Rdkafka
130
132
  else
131
133
  Logger::UNKNOWN
132
134
  end
133
- Rdkafka::Config.logger.add(severity) { "rdkafka: #{line}" }
135
+ Rdkafka::Config.log_queue << [severity, "rdkafka: #{line}"]
134
136
  end
135
137
 
136
138
  StatsCallback = FFI::Function.new(
@@ -146,6 +148,15 @@ module Rdkafka
146
148
  0
147
149
  end
148
150
 
151
+ ErrorCallback = FFI::Function.new(
152
+ :void, [:pointer, :int, :string, :pointer]
153
+ ) do |_client_prr, err_code, reason, _opaque|
154
+ if Rdkafka::Config.error_callback
155
+ error = Rdkafka::RdkafkaError.new(err_code, broker_message: reason)
156
+ Rdkafka::Config.error_callback.call(error)
157
+ end
158
+ end
159
+
149
160
  # Handle
150
161
 
151
162
  enum :kafka_type, [
@@ -245,22 +256,50 @@ module Rdkafka
245
256
  rd_kafka_msg_partitioner_consistent_random(nil, str_ptr, str.size, partition_count, nil, nil)
246
257
  end
247
258
 
248
- DeliveryCallback = FFI::Function.new(
249
- :void, [:pointer, :pointer, :pointer]
250
- ) do |client_ptr, message_ptr, opaque_ptr|
251
- message = Message.new(message_ptr)
252
- delivery_handle_ptr_address = message[:_private].address
253
- if delivery_handle = Rdkafka::Producer::DeliveryHandle.remove(delivery_handle_ptr_address)
254
- # Update delivery handle
255
- delivery_handle[:pending] = false
256
- delivery_handle[:response] = message[:err]
257
- delivery_handle[:partition] = message[:partition]
258
- delivery_handle[:offset] = message[:offset]
259
- # Call delivery callback on opaque
260
- if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
261
- opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], message[:err]))
262
- end
263
- end
264
- end
259
+ # Create Topics
260
+
261
+ RD_KAFKA_ADMIN_OP_CREATETOPICS = 1 # rd_kafka_admin_op_t
262
+ RD_KAFKA_EVENT_CREATETOPICS_RESULT = 100 # rd_kafka_event_type_t
263
+
264
+ attach_function :rd_kafka_CreateTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :void
265
+ attach_function :rd_kafka_NewTopic_new, [:pointer, :size_t, :size_t, :pointer, :size_t], :pointer
266
+ attach_function :rd_kafka_NewTopic_set_config, [:pointer, :string, :string], :int32
267
+ attach_function :rd_kafka_NewTopic_destroy, [:pointer], :void
268
+ attach_function :rd_kafka_event_CreateTopics_result, [:pointer], :pointer
269
+ attach_function :rd_kafka_CreateTopics_result_topics, [:pointer, :pointer], :pointer
270
+
271
+ # Delete Topics
272
+
273
+ RD_KAFKA_ADMIN_OP_DELETETOPICS = 2 # rd_kafka_admin_op_t
274
+ RD_KAFKA_EVENT_DELETETOPICS_RESULT = 101 # rd_kafka_event_type_t
275
+
276
+ attach_function :rd_kafka_DeleteTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :int32
277
+ attach_function :rd_kafka_DeleteTopic_new, [:pointer], :pointer
278
+ attach_function :rd_kafka_DeleteTopic_destroy, [:pointer], :void
279
+ attach_function :rd_kafka_event_DeleteTopics_result, [:pointer], :pointer
280
+ attach_function :rd_kafka_DeleteTopics_result_topics, [:pointer, :pointer], :pointer
281
+
282
+ # Background Queue and Callback
283
+
284
+ attach_function :rd_kafka_queue_get_background, [:pointer], :pointer
285
+ attach_function :rd_kafka_conf_set_background_event_cb, [:pointer, :pointer], :void
286
+ attach_function :rd_kafka_queue_destroy, [:pointer], :void
287
+
288
+ # Admin Options
289
+
290
+ attach_function :rd_kafka_AdminOptions_new, [:pointer, :int32], :pointer
291
+ attach_function :rd_kafka_AdminOptions_set_opaque, [:pointer, :pointer], :void
292
+ attach_function :rd_kafka_AdminOptions_destroy, [:pointer], :void
293
+
294
+ # Extracting data from event types
295
+
296
+ attach_function :rd_kafka_event_type, [:pointer], :int32
297
+ attach_function :rd_kafka_event_opaque, [:pointer], :pointer
298
+
299
+ # Extracting data from topic results
300
+
301
+ attach_function :rd_kafka_topic_result_error, [:pointer], :int32
302
+ attach_function :rd_kafka_topic_result_error_string, [:pointer], :pointer
303
+ attach_function :rd_kafka_topic_result_name, [:pointer], :pointer
265
304
  end
266
305
  end