karafka-rdkafka 0.12.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (62) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data/.gitignore +8 -0
  4. data/.rspec +1 -0
  5. data/.semaphore/semaphore.yml +23 -0
  6. data/.yardopts +2 -0
  7. data/CHANGELOG.md +104 -0
  8. data/Gemfile +3 -0
  9. data/Guardfile +19 -0
  10. data/LICENSE +21 -0
  11. data/README.md +114 -0
  12. data/Rakefile +96 -0
  13. data/bin/console +11 -0
  14. data/docker-compose.yml +24 -0
  15. data/ext/README.md +18 -0
  16. data/ext/Rakefile +62 -0
  17. data/lib/rdkafka/abstract_handle.rb +82 -0
  18. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  19. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  20. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  21. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  22. data/lib/rdkafka/admin.rb +155 -0
  23. data/lib/rdkafka/bindings.rb +312 -0
  24. data/lib/rdkafka/callbacks.rb +106 -0
  25. data/lib/rdkafka/config.rb +299 -0
  26. data/lib/rdkafka/consumer/headers.rb +63 -0
  27. data/lib/rdkafka/consumer/message.rb +84 -0
  28. data/lib/rdkafka/consumer/partition.rb +49 -0
  29. data/lib/rdkafka/consumer/topic_partition_list.rb +164 -0
  30. data/lib/rdkafka/consumer.rb +565 -0
  31. data/lib/rdkafka/error.rb +86 -0
  32. data/lib/rdkafka/metadata.rb +92 -0
  33. data/lib/rdkafka/producer/client.rb +47 -0
  34. data/lib/rdkafka/producer/delivery_handle.rb +22 -0
  35. data/lib/rdkafka/producer/delivery_report.rb +26 -0
  36. data/lib/rdkafka/producer.rb +178 -0
  37. data/lib/rdkafka/version.rb +5 -0
  38. data/lib/rdkafka.rb +22 -0
  39. data/rdkafka.gemspec +36 -0
  40. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  41. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  42. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  43. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  44. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  45. data/spec/rdkafka/admin_spec.rb +203 -0
  46. data/spec/rdkafka/bindings_spec.rb +134 -0
  47. data/spec/rdkafka/callbacks_spec.rb +20 -0
  48. data/spec/rdkafka/config_spec.rb +182 -0
  49. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  50. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  51. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +223 -0
  52. data/spec/rdkafka/consumer_spec.rb +1008 -0
  53. data/spec/rdkafka/error_spec.rb +89 -0
  54. data/spec/rdkafka/metadata_spec.rb +78 -0
  55. data/spec/rdkafka/producer/client_spec.rb +145 -0
  56. data/spec/rdkafka/producer/delivery_handle_spec.rb +42 -0
  57. data/spec/rdkafka/producer/delivery_report_spec.rb +17 -0
  58. data/spec/rdkafka/producer_spec.rb +525 -0
  59. data/spec/spec_helper.rb +139 -0
  60. data.tar.gz.sig +0 -0
  61. metadata +277 -0
  62. metadata.gz.sig +0 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: fb0e8bec19a86f44453d32057d22c3958b4220e1d710564360b369905fd57d81
4
+ data.tar.gz: 645b3ab638f51780de30a21e133bd07b0e80fd33a1a59e604ec1dc064fc2b9c4
5
+ SHA512:
6
+ metadata.gz: 52120cbb353e7bf7af881f442fb7a36324a70b097969af2f0e855f99e820c0be88f26ab584a285a85408eda526a0e564e06420e8c4d8224e40ae7d00f70546ab
7
+ data.tar.gz: ba886f70d88433d6dce7247df1c6d3598d20642fd1a4c17cb045f0249f22b7264c329462e0e4ae986fa053dd2a7c3598b0379fe4924822484d60c9b6baaecc35
checksums.yaml.gz.sig ADDED
@@ -0,0 +1,2 @@
1
+ ��t “qiK����2]T��poy[��C�􀚝M~Z���{��ү�����)��j�ںUM���^k��%Qv4��K�Ul3o2`T���
2
+ �o7s�;7��=�y��gol��u
data/.gitignore ADDED
@@ -0,0 +1,8 @@
1
+ Gemfile.lock
2
+ ext/ports
3
+ ext/tmp
4
+ ext/librdkafka.*
5
+ *.gem
6
+ .yardoc
7
+ doc
8
+ coverage
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --format documentation
@@ -0,0 +1,23 @@
1
+ version: v1.0
2
+ name: Rdkafka Ruby
3
+
4
+ agent:
5
+ machine:
6
+ type: e1-standard-4
7
+ os_image: ubuntu1804
8
+
9
+ blocks:
10
+ - name: Run specs
11
+ task:
12
+ jobs:
13
+ - name: bundle exec rspec
14
+ matrix:
15
+ - env_var: RUBY_VERSION
16
+ values: [ "2.6.8", "2.7.4", "3.0.2", "jruby-9.3.1.0"]
17
+ commands:
18
+ - sem-version ruby $RUBY_VERSION
19
+ - checkout
20
+ - bundle install --path vendor/bundle
21
+ - cd ext && bundle exec rake && cd ..
22
+ - docker-compose up -d --no-recreate
23
+ - bundle exec rspec
data/.yardopts ADDED
@@ -0,0 +1,2 @@
1
+ --no-private
2
+ --markup=markdown
data/CHANGELOG.md ADDED
@@ -0,0 +1,104 @@
1
+ # 0.12.0
2
+ * Bumps librdkafka to 1.9.0
3
+
4
+ # 0.11.0
5
+ * Upgrade librdkafka to 1.8.2
6
+ * Bump supported minimum Ruby version to 2.6
7
+ * Better homebrew path detection
8
+
9
+ # 0.10.0
10
+ * Upgrade librdkafka to 1.5.0
11
+ * Add error callback config
12
+
13
+ # 0.9.0
14
+ * Fixes for Ruby 3.0
15
+ * Allow any callable object for callbacks (gremerritt)
16
+ * Reduce memory allocations in Rdkafka::Producer#produce (jturkel)
17
+ * Use queue as log callback to avoid unsafe calls from trap context (breunigs)
18
+ * Allow passing in topic configuration on create_topic (dezka)
19
+ * Add each_batch method to consumer (mgrosso)
20
+
21
+ # 0.8.1
22
+ * Fix topic_flag behaviour and add tests for Metadata (geoff2k)
23
+ * Add topic admin interface (geoff2k)
24
+ * Raise an exception if @native_kafka is nil (geoff2k)
25
+ * Option to use zstd compression (jasonmartens)
26
+
27
+ # 0.8.0
28
+ * Upgrade librdkafka to 1.4.0
29
+ * Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
30
+ * Ruby 2.7 compatibility fix (by Geoff Thé)A
31
+ * Add error to delivery report (by Alex Stanovsky)
32
+ * Don't override CPPFLAGS and LDFLAGS if already set on Mac (by Hiroshi Hatake)
33
+ * Allow use of Rake 13.x and up (by Tomasz Pajor)
34
+
35
+ # 0.7.0
36
+ * Bump librdkafka to 1.2.0 (by rob-as)
37
+ * Allow customizing the wait time for delivery report availability (by mensfeld)
38
+
39
+ # 0.6.0
40
+ * Bump librdkafka to 1.1.0 (by Chris Gaffney)
41
+ * Implement seek (by breunigs)
42
+
43
+ # 0.5.0
44
+ * Bump librdkafka to 1.0.0 (by breunigs)
45
+ * Add cluster and member information (by dmexe)
46
+ * Support message headers for consumer & producer (by dmexe)
47
+ * Add consumer rebalance listener (by dmexe)
48
+ * Implement pause/resume partitions (by dmexe)
49
+
50
+ # 0.4.2
51
+ * Delivery callback for producer
52
+ * Document list param of commit method
53
+ * Use default Homebrew openssl location if present
54
+ * Consumer lag handles empty topics
55
+ * End iteration in consumer when it is closed
56
+ * Add support for storing message offsets
57
+ * Add missing runtime dependency to rake
58
+
59
+ # 0.4.1
60
+ * Bump librdkafka to 0.11.6
61
+
62
+ # 0.4.0
63
+ * Improvements in librdkafka archive download
64
+ * Add global statistics callback
65
+ * Use Time for timestamps, potentially breaking change if you
66
+ rely on the previous behavior where it returns an integer with
67
+ the number of milliseconds.
68
+ * Bump librdkafka to 0.11.5
69
+ * Implement TopicPartitionList in Ruby so we don't have to keep
70
+ track of native objects.
71
+ * Support committing a topic partition list
72
+ * Add consumer assignment method
73
+
74
+ # 0.3.5
75
+ * Fix crash when not waiting for delivery handles
76
+ * Run specs on Ruby 2.5
77
+
78
+ # 0.3.4
79
+ * Bump librdkafka to 0.11.3
80
+
81
+ # 0.3.3
82
+ * Fix bug that prevent display of `RdkafkaError` message
83
+
84
+ # 0.3.2
85
+ * `add_topic` now supports using a partition count
86
+ * Add way to make errors clearer with an extra message
87
+ * Show topics in subscribe error message
88
+ * Show partition and topic in query watermark offsets error message
89
+
90
+ # 0.3.1
91
+ * Bump librdkafka to 0.11.1
92
+ * Officially support ranges in `add_topic` for topic partition list.
93
+ * Add consumer lag calculator
94
+
95
+ # 0.3.0
96
+ * Move both add topic methods to one `add_topic` in `TopicPartitionList`
97
+ * Add committed offsets to consumer
98
+ * Add query watermark offset to consumer
99
+
100
+ # 0.2.0
101
+ * Some refactoring and add inline documentation
102
+
103
+ # 0.1.x
104
+ * Initial working version including producing and consuming
data/Gemfile ADDED
@@ -0,0 +1,3 @@
1
+ source "https://rubygems.org"
2
+
3
+ gemspec
data/Guardfile ADDED
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ logger level: :error
4
+
5
+ guard :rspec, cmd: "bundle exec rspec --format #{ENV.fetch("FORMAT", "documentation")}" do
6
+ require "guard/rspec/dsl"
7
+ dsl = Guard::RSpec::Dsl.new(self)
8
+
9
+ # Ruby files
10
+ ruby = dsl.ruby
11
+ dsl.watch_spec_files_for(ruby.lib_files)
12
+ watch(%r{^lib/(.+)\.rb}) { |m| "spec/#{m[1]}_spec.rb" }
13
+
14
+ # RSpec files
15
+ rspec = dsl.rspec
16
+ watch(rspec.spec_helper) { rspec.spec_dir }
17
+ watch(rspec.spec_support) { rspec.spec_dir }
18
+ watch(rspec.spec_files)
19
+ end
data/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2017 Thijs Cadier
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,114 @@
1
+ # Rdkafka
2
+
3
+ [![Build Status](https://appsignal.semaphoreci.com/badges/rdkafka-ruby/branches/master.svg?style=shields)](https://appsignal.semaphoreci.com/projects/rdkafka-ruby)
4
+ [![Gem Version](https://badge.fury.io/rb/rdkafka.svg)](https://badge.fury.io/rb/rdkafka)
5
+ [![Maintainability](https://api.codeclimate.com/v1/badges/ecb1765f81571cccdb0e/maintainability)](https://codeclimate.com/github/appsignal/rdkafka-ruby/maintainability)
6
+
7
+ The `rdkafka` gem is a modern Kafka client library for Ruby based on
8
+ [librdkafka](https://github.com/edenhill/librdkafka/).
9
+ It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
10
+ gem and targets Kafka 1.0+ and Ruby versions that are under security or
11
+ active maintenance. We remove Ruby version from our CI builds if they
12
+ become EOL.
13
+
14
+ `rdkafka` was written because we needed a reliable Ruby client for
15
+ Kafka that supports modern Kafka at [AppSignal](https://appsignal.com).
16
+ We run it in production on very high traffic systems.
17
+
18
+ This gem only provides a high-level Kafka consumer. If you are running
19
+ an older version of Kafka and/or need the legacy simple consumer we
20
+ suggest using the [Hermann](https://github.com/reiseburo/hermann) gem.
21
+
22
+ The most important pieces of a Kafka client are implemented. We're
23
+ working towards feature completeness, you can track that here:
24
+ https://github.com/appsignal/rdkafka-ruby/milestone/1
25
+
26
+ ## Installation
27
+
28
+ This gem downloads and compiles librdkafka when it is installed. If you
29
+ have any problems installing the gem please open an issue.
30
+
31
+ ## Usage
32
+
33
+ See the [documentation](https://www.rubydoc.info/github/appsignal/rdkafka-ruby) for full details on how to use this gem. Two quick examples:
34
+
35
+ ### Consuming messages
36
+
37
+ Subscribe to a topic and get messages. Kafka will automatically spread
38
+ the available partitions over consumers with the same group id.
39
+
40
+ ```ruby
41
+ config = {
42
+ :"bootstrap.servers" => "localhost:9092",
43
+ :"group.id" => "ruby-test"
44
+ }
45
+ consumer = Rdkafka::Config.new(config).consumer
46
+ consumer.subscribe("ruby-test-topic")
47
+
48
+ consumer.each do |message|
49
+ puts "Message received: #{message}"
50
+ end
51
+ ```
52
+
53
+ ### Producing messages
54
+
55
+ Produce a number of messages, put the delivery handles in an array and
56
+ wait for them before exiting. This way the messages will be batched and
57
+ sent to Kafka in an efficient way.
58
+
59
+ ```ruby
60
+ config = {:"bootstrap.servers" => "localhost:9092"}
61
+ producer = Rdkafka::Config.new(config).producer
62
+ delivery_handles = []
63
+
64
+ 100.times do |i|
65
+ puts "Producing message #{i}"
66
+ delivery_handles << producer.produce(
67
+ topic: "ruby-test-topic",
68
+ payload: "Payload #{i}",
69
+ key: "Key #{i}"
70
+ )
71
+ end
72
+
73
+ delivery_handles.each(&:wait)
74
+ ```
75
+
76
+ Note that creating a producer consumes some resources that will not be
77
+ released until it `#close` is explicitly called, so be sure to call
78
+ `Config#producer` only as necessary.
79
+
80
+ ## Development
81
+
82
+ A Docker Compose file is included to run Kafka and Zookeeper. To run
83
+ that:
84
+
85
+ ```
86
+ docker-compose up
87
+ ```
88
+
89
+ Run `bundle` and `cd ext && bundle exec rake && cd ..` to download and
90
+ compile `librdkafka`.
91
+
92
+ You can then run `bundle exec rspec` to run the tests. To see rdkafka
93
+ debug output:
94
+
95
+ ```
96
+ DEBUG_PRODUCER=true bundle exec rspec
97
+ DEBUG_CONSUMER=true bundle exec rspec
98
+ ```
99
+
100
+ After running the tests you can bring the cluster down to start with a
101
+ clean slate:
102
+
103
+ ```
104
+ docker-compose down
105
+ ```
106
+
107
+ ## Example
108
+
109
+ To see everything working run these in separate tabs:
110
+
111
+ ```
112
+ bundle exec rake consume_messages
113
+ bundle exec rake produce_messages
114
+ ```
data/Rakefile ADDED
@@ -0,0 +1,96 @@
1
+ # Rakefile
2
+
3
+ require 'bundler/gem_tasks'
4
+ require "./lib/rdkafka"
5
+
6
+ desc 'Generate some message traffic'
7
+ task :produce_messages do
8
+ config = {:"bootstrap.servers" => "localhost:9092"}
9
+ if ENV["DEBUG"]
10
+ config[:debug] = "broker,topic,msg"
11
+ end
12
+ producer = Rdkafka::Config.new(config).producer
13
+
14
+ delivery_handles = []
15
+ 100.times do |i|
16
+ puts "Producing message #{i}"
17
+ delivery_handles << producer.produce(
18
+ topic: "rake_test_topic",
19
+ payload: "Payload #{i} from Rake",
20
+ key: "Key #{i} from Rake"
21
+ )
22
+ end
23
+ puts 'Waiting for delivery'
24
+ delivery_handles.each(&:wait)
25
+ puts 'Done'
26
+ end
27
+
28
+ desc 'Consume some messages'
29
+ task :consume_messages do
30
+ config = {
31
+ :"bootstrap.servers" => "localhost:9092",
32
+ :"group.id" => "rake_test",
33
+ :"enable.partition.eof" => false,
34
+ :"auto.offset.reset" => "earliest",
35
+ :"statistics.interval.ms" => 10_000
36
+ }
37
+ if ENV["DEBUG"]
38
+ config[:debug] = "cgrp,topic,fetch"
39
+ end
40
+ Rdkafka::Config.statistics_callback = lambda do |stats|
41
+ puts stats
42
+ end
43
+ consumer = Rdkafka::Config.new(config).consumer
44
+ consumer = Rdkafka::Config.new(config).consumer
45
+ consumer.subscribe("rake_test_topic")
46
+ consumer.each do |message|
47
+ puts "Message received: #{message}"
48
+ end
49
+ end
50
+
51
+ desc 'Hammer down'
52
+ task :load_test do
53
+ puts "Starting load test"
54
+
55
+ config = Rdkafka::Config.new(
56
+ :"bootstrap.servers" => "localhost:9092",
57
+ :"group.id" => "load-test",
58
+ :"enable.partition.eof" => false
59
+ )
60
+
61
+ # Create a producer in a thread
62
+ Thread.new do
63
+ producer = config.producer
64
+ loop do
65
+ handles = []
66
+ 1000.times do |i|
67
+ handles.push(producer.produce(
68
+ topic: "load_test_topic",
69
+ payload: "Payload #{i}",
70
+ key: "Key #{i}"
71
+ ))
72
+ end
73
+ handles.each(&:wait)
74
+ puts "Produced 1000 messages"
75
+ end
76
+ end.abort_on_exception = true
77
+
78
+ # Create three consumers in threads
79
+ 3.times do |i|
80
+ Thread.new do
81
+ count = 0
82
+ consumer = config.consumer
83
+ consumer.subscribe("load_test_topic")
84
+ consumer.each do |message|
85
+ count += 1
86
+ if count % 1000 == 0
87
+ puts "Received 1000 messages in thread #{i}"
88
+ end
89
+ end
90
+ end.abort_on_exception = true
91
+ end
92
+
93
+ loop do
94
+ sleep 1
95
+ end
96
+ end
data/bin/console ADDED
@@ -0,0 +1,11 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # frozen_string_literal: true
4
+
5
+ ENV["IRBRC"] = File.join(File.dirname(__FILE__), ".irbrc")
6
+
7
+ require "bundler/setup"
8
+ require "rdkafka"
9
+
10
+ require "irb"
11
+ IRB.start(__FILE__)
@@ -0,0 +1,24 @@
1
+ ---
2
+
3
+ version: '2'
4
+
5
+ services:
6
+ zookeeper:
7
+ image: confluentinc/cp-zookeeper:5.2.6
8
+ environment:
9
+ ZOOKEEPER_CLIENT_PORT: 2181
10
+ ZOOKEEPER_TICK_TIME: 2000
11
+
12
+ kafka:
13
+ image: confluentinc/cp-kafka:5.2.5-10
14
+ depends_on:
15
+ - zookeeper
16
+ ports:
17
+ - 9092:9092
18
+ environment:
19
+ KAFKA_BROKER_ID: 1
20
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
21
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:29092,PLAINTEXT_HOST://localhost:9092
22
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
23
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
24
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
data/ext/README.md ADDED
@@ -0,0 +1,18 @@
1
+ # Ext
2
+
3
+ This gem depends on the `librdkafka` C library. It is downloaded when
4
+ this gem is installed.
5
+
6
+ To update the `librdkafka` version follow the following steps:
7
+
8
+ * Go to https://github.com/edenhill/librdkafka/releases to get the new
9
+ version number and asset checksum for `tar.gz`.
10
+ * Change the version in `lib/rdkafka/version.rb`
11
+ * Change the `sha256` in `lib/rdkafka/version.rb`
12
+ * Run `bundle exec rake` in the `ext` directory to download and build
13
+ the new version
14
+ * Run `docker-compose pull` in the main gem directory to ensure the docker
15
+ images used by the tests and run `docker-compose up`
16
+ * Finally, run `bundle exec rspec` in the main gem directory to execute
17
+ the test suite to detect any regressions that may have been introduced
18
+ by the update
data/ext/Rakefile ADDED
@@ -0,0 +1,62 @@
1
+ require File.expand_path('../../lib/rdkafka/version', __FILE__)
2
+ require "mini_portile2"
3
+ require "fileutils"
4
+ require "open-uri"
5
+
6
+ task :default => :clean do
7
+ # Download and compile librdkafka
8
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
9
+
10
+ # Use default homebrew openssl if we're on mac and the directory exists
11
+ # and each of flags is not empty
12
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
13
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
14
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
15
+ end
16
+
17
+ recipe.files << {
18
+ :url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
19
+ :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
20
+ }
21
+ recipe.configure_options = ["--host=#{recipe.host}"]
22
+ recipe.cook
23
+ # Move dynamic library we're interested in
24
+ if recipe.host.include?('darwin')
25
+ from_extension = '1.dylib'
26
+ to_extension = 'dylib'
27
+ else
28
+ from_extension = 'so.1'
29
+ to_extension = 'so'
30
+ end
31
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
32
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
33
+ # Cleanup files created by miniportile we don't need in the gem
34
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
35
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
36
+ end
37
+
38
+ task :clean do
39
+ FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.dylib")
40
+ FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.so")
41
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
42
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
43
+ end
44
+
45
+ namespace :build do
46
+ desc "Build librdkafka at the given git sha or tag"
47
+ task :git, [:ref] do |task, args|
48
+ ref = args[:ref]
49
+ version = "git-#{ref}"
50
+
51
+ recipe = MiniPortile.new("librdkafka", version)
52
+ recipe.files << "https://github.com/edenhill/librdkafka/archive/#{ref}.tar.gz"
53
+ recipe.configure_options = ["--host=#{recipe.host}","--enable-static", "--enable-zstd"]
54
+ recipe.cook
55
+
56
+ ext = recipe.host.include?("darwin") ? "dylib" : "so"
57
+ lib = File.expand_path("ports/#{recipe.host}/librdkafka/#{version}/lib/librdkafka.#{ext}", __dir__)
58
+
59
+ # Copy will copy the content, following any symlinks
60
+ FileUtils.cp(lib, __dir__)
61
+ end
62
+ end
@@ -0,0 +1,82 @@
1
+ require "ffi"
2
+
3
+ module Rdkafka
4
+ class AbstractHandle < FFI::Struct
5
+ # Subclasses must define their own layout, and the layout must start with:
6
+ #
7
+ # layout :pending, :bool,
8
+ # :response, :int
9
+
10
+ REGISTRY = {}
11
+
12
+ CURRENT_TIME = -> { Process.clock_gettime(Process::CLOCK_MONOTONIC) }.freeze
13
+
14
+ private_constant :CURRENT_TIME
15
+
16
+ def self.register(handle)
17
+ address = handle.to_ptr.address
18
+ REGISTRY[address] = handle
19
+ end
20
+
21
+ def self.remove(address)
22
+ REGISTRY.delete(address)
23
+ end
24
+
25
+ # Whether the handle is still pending.
26
+ #
27
+ # @return [Boolean]
28
+ def pending?
29
+ self[:pending]
30
+ end
31
+
32
+ # Wait for the operation to complete or raise an error if this takes longer than the timeout.
33
+ # If there is a timeout this does not mean the operation failed, rdkafka might still be working on the operation.
34
+ # In this case it is possible to call wait again.
35
+ #
36
+ # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out. If this is nil it does not time out.
37
+ # @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the operation has completed
38
+ #
39
+ # @raise [RdkafkaError] When the operation failed
40
+ # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
41
+ #
42
+ # @return [Object] Operation-specific result
43
+ def wait(max_wait_timeout: 60, wait_timeout: 0.1)
44
+ timeout = if max_wait_timeout
45
+ CURRENT_TIME.call + max_wait_timeout
46
+ else
47
+ nil
48
+ end
49
+ loop do
50
+ if pending?
51
+ if timeout && timeout <= CURRENT_TIME.call
52
+ raise WaitTimeoutError.new("Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds")
53
+ end
54
+ sleep wait_timeout
55
+ elsif self[:response] != 0
56
+ raise_error
57
+ else
58
+ return create_result
59
+ end
60
+ end
61
+ end
62
+
63
+ # @return [String] the name of the operation (e.g. "delivery")
64
+ def operation_name
65
+ raise "Must be implemented by subclass!"
66
+ end
67
+
68
+ # @return [Object] operation-specific result
69
+ def create_result
70
+ raise "Must be implemented by subclass!"
71
+ end
72
+
73
+ # Allow subclasses to override
74
+ def raise_error
75
+ raise RdkafkaError.new(self[:response])
76
+ end
77
+
78
+ # Error that is raised when waiting for the handle to complete
79
+ # takes longer than the specified timeout.
80
+ class WaitTimeoutError < RuntimeError; end
81
+ end
82
+ end
@@ -0,0 +1,27 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class CreateTopicHandle < AbstractHandle
4
+ layout :pending, :bool,
5
+ :response, :int,
6
+ :error_string, :pointer,
7
+ :result_name, :pointer
8
+
9
+ # @return [String] the name of the operation
10
+ def operation_name
11
+ "create topic"
12
+ end
13
+
14
+ # @return [Boolean] whether the create topic was successful
15
+ def create_result
16
+ CreateTopicReport.new(self[:error_string], self[:result_name])
17
+ end
18
+
19
+ def raise_error
20
+ raise RdkafkaError.new(
21
+ self[:response],
22
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
23
+ )
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,22 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class CreateTopicReport
4
+ # Any error message generated from the CreateTopic
5
+ # @return [String]
6
+ attr_reader :error_string
7
+
8
+ # The name of the topic created
9
+ # @return [String]
10
+ attr_reader :result_name
11
+
12
+ def initialize(error_string, result_name)
13
+ if error_string != FFI::Pointer::NULL
14
+ @error_string = error_string.read_string
15
+ end
16
+ if result_name != FFI::Pointer::NULL
17
+ @result_name = @result_name = result_name.read_string
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end