karafka-rdkafka 0.12.4 → 0.13.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +1 -1
  4. data/CHANGELOG.md +21 -2
  5. data/Gemfile +2 -0
  6. data/README.md +26 -0
  7. data/Rakefile +2 -0
  8. data/certs/cert_chain.pem +21 -21
  9. data/certs/karafka-pro.pem +11 -0
  10. data/ext/Rakefile +26 -53
  11. data/karafka-rdkafka.gemspec +2 -0
  12. data/lib/rdkafka/abstract_handle.rb +2 -0
  13. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  14. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  15. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  16. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  17. data/lib/rdkafka/admin.rb +95 -73
  18. data/lib/rdkafka/bindings.rb +52 -37
  19. data/lib/rdkafka/callbacks.rb +2 -0
  20. data/lib/rdkafka/config.rb +13 -10
  21. data/lib/rdkafka/consumer/headers.rb +24 -7
  22. data/lib/rdkafka/consumer/message.rb +3 -1
  23. data/lib/rdkafka/consumer/partition.rb +2 -0
  24. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  25. data/lib/rdkafka/consumer.rb +100 -44
  26. data/lib/rdkafka/error.rb +9 -0
  27. data/lib/rdkafka/metadata.rb +25 -2
  28. data/lib/rdkafka/native_kafka.rb +83 -0
  29. data/lib/rdkafka/producer/delivery_handle.rb +2 -0
  30. data/lib/rdkafka/producer/delivery_report.rb +3 -1
  31. data/lib/rdkafka/producer.rb +75 -12
  32. data/lib/rdkafka/version.rb +3 -1
  33. data/lib/rdkafka.rb +3 -1
  34. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  35. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  36. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  37. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  38. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  39. data/spec/rdkafka/admin_spec.rb +4 -3
  40. data/spec/rdkafka/bindings_spec.rb +2 -0
  41. data/spec/rdkafka/callbacks_spec.rb +2 -0
  42. data/spec/rdkafka/config_spec.rb +17 -2
  43. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  44. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  45. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  46. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  47. data/spec/rdkafka/consumer_spec.rb +124 -22
  48. data/spec/rdkafka/error_spec.rb +2 -0
  49. data/spec/rdkafka/metadata_spec.rb +2 -0
  50. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -34
  51. data/spec/rdkafka/producer/delivery_handle_spec.rb +2 -0
  52. data/spec/rdkafka/producer/delivery_report_spec.rb +4 -2
  53. data/spec/rdkafka/producer_spec.rb +118 -17
  54. data/spec/spec_helper.rb +17 -1
  55. data.tar.gz.sig +0 -0
  56. metadata +33 -33
  57. metadata.gz.sig +0 -0
  58. data/bin/console +0 -11
  59. data/dist/librdkafka_2.0.2.tar.gz +0 -0
  60. data/lib/rdkafka/producer/client.rb +0 -47
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c4e203bd92d9f622715bce381d382a2e7f503331922d3237cd770e9f386755b6
4
- data.tar.gz: 721e511f93bfcc8ded44eeb6e368fb50e0cd42038702e4f3d51c4b022408a665
3
+ metadata.gz: 419dcef5e71a6578202fde7eacd628054ac77bae0c91c57845fd6dbd6ade6715
4
+ data.tar.gz: 05e7e1ef8e07c1b2dddf02f1e3f8e8e0da7dfcb75af41ccc2f9dcba2e30bba1f
5
5
  SHA512:
6
- metadata.gz: 37182da237f5f469f94554e3b07247f102fa3e7b64a8a492975bc1c663687615d4da1ad955a6f894ed229be7ea405bb2575e15571ace94ece563952e36a40874
7
- data.tar.gz: adf411222f06d391a95b661e5c3fe9006a5000d01f2be63d1d8cd238814c01d73ab31384617e9f5b484ff422a97b2da6d8eb9babc5eb501fd60f46079a803e21
6
+ metadata.gz: 4643afd749af09b9bd2c8d16ae25b989eb03feb39b12c966059956cad47966a8a74e6d0c356bfdd4115a3a419aebbab1d8421410e0bef64bd2f1611d66ba2ab6
7
+ data.tar.gz: 7f526e3bb375126862a84b7824e60872454366cfa7a2b5abb8d6e09f978423dc1e56e1c2a488f36f28ff622f942834b7d576d6fe91e4263cb5864165597c881f
checksums.yaml.gz.sig CHANGED
Binary file
@@ -47,7 +47,7 @@ jobs:
47
47
  GITHUB_COVERAGE: ${{matrix.coverage}}
48
48
 
49
49
  run: |
50
+ docker-compose up -d --no-recreate
50
51
  bundle install --path vendor/bundle
51
52
  cd ext && bundle exec rake && cd ..
52
- docker-compose up -d --no-recreate
53
53
  bundle exec rspec
data/CHANGELOG.md CHANGED
@@ -1,5 +1,22 @@
1
- ## 0.12.4 (2024-07-10)
2
- - [Fix] Switch to local release of librdkafka to mitigate its unavailability.
1
+ # 0.13.0
2
+ * Support cooperative sticky partition assignment in the rebalance callback (methodmissing)
3
+ * Support both string and symbol header keys (ColinDKelley)
4
+ * Handle tombstone messages properly (kgalieva)
5
+ * Add topic name to delivery report (maeve)
6
+ * Allow string partitioner config (mollyegibson)
7
+ * Fix documented type for DeliveryReport#error (jimmydo)
8
+ * Bump librdkafka to 2.0.2 (lmaia)
9
+ * Use finalizers to cleanly exit producer and admin (thijsc)
10
+ * Lock access to the native kafka client (thijsc)
11
+ * Fix potential race condition in multi-threaded producer (mensfeld)
12
+ * Fix leaking FFI resources in specs (mensfeld)
13
+ * Improve specs stability (mensfeld)
14
+ * Make metadata request timeout configurable (mensfeld)
15
+ * call_on_partitions_assigned and call_on_partitions_revoked only get a tpl passed in (thijsc)
16
+ * Support `#assignment_lost?` on a consumer to check for involuntary assignment revocation (mensfeld)
17
+ * Expose `#name` on the consumer and producer (mensfeld)
18
+ * Introduce producer partitions count metadata cache (mensfeld)
19
+ * Retry metadta fetches on certain errors with a backoff (mensfeld)
3
20
 
4
21
  # 0.12.3
5
22
  - Include backtrace in non-raised binded errors.
@@ -14,6 +31,8 @@
14
31
 
15
32
  # 0.12.0
16
33
  * Bumps librdkafka to 1.9.0
34
+ * Fix crash on empty partition key (mensfeld)
35
+ * Pass the delivery handle to the callback (gvisokinskas)
17
36
 
18
37
  # 0.11.0
19
38
  * Upgrade librdkafka to 1.8.2
data/Gemfile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  source "https://rubygems.org"
2
4
 
3
5
  gemspec
data/README.md CHANGED
@@ -23,6 +23,19 @@ The most important pieces of a Kafka client are implemented. We're
23
23
  working towards feature completeness, you can track that here:
24
24
  https://github.com/appsignal/rdkafka-ruby/milestone/1
25
25
 
26
+ ## Table of content
27
+
28
+ - [Installation](#installation)
29
+ - [Usage](#usage)
30
+ * [Consuming messages](#consuming-messages)
31
+ * [Producing messages](#producing-messages)
32
+ - [Higher level libraries](#higher-level-libraries)
33
+ * [Message processing frameworks](#message-processing-frameworks)
34
+ * [Message publishing libraries](#message-publishing-libraries)
35
+ - [Development](#development)
36
+ - [Example](#example)
37
+
38
+
26
39
  ## Installation
27
40
 
28
41
  This gem downloads and compiles librdkafka when it is installed. If you
@@ -77,6 +90,19 @@ Note that creating a producer consumes some resources that will not be
77
90
  released until it `#close` is explicitly called, so be sure to call
78
91
  `Config#producer` only as necessary.
79
92
 
93
+ ## Higher level libraries
94
+
95
+ Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher level API that can be used to work with Kafka messages and one library for publishing messages.
96
+
97
+ ### Message processing frameworks
98
+
99
+ * [Karafka](https://github.com/karafka/karafka) - Ruby and Rails efficient Kafka processing framework.
100
+ * [Racecar](https://github.com/zendesk/racecar) - A simple framework for Kafka consumers in Ruby
101
+
102
+ ### Message publishing libraries
103
+
104
+ * [WaterDrop](https://github.com/karafka/waterdrop) – Standalone Karafka library for producing Kafka messages.
105
+
80
106
  ## Development
81
107
 
82
108
  A Docker Compose file is included to run Kafka and Zookeeper. To run
data/Rakefile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Rakefile
2
4
 
3
5
  require 'bundler/gem_tasks'
data/certs/cert_chain.pem CHANGED
@@ -1,26 +1,26 @@
1
1
  -----BEGIN CERTIFICATE-----
2
2
  MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
3
3
  YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
4
- MB4XDTIzMDgyMTA3MjU1NFoXDTI0MDgyMDA3MjU1NFowPzEQMA4GA1UEAwwHY29u
4
+ MB4XDTIyMDgxOTE3MjEzN1oXDTIzMDgxOTE3MjEzN1owPzEQMA4GA1UEAwwHY29u
5
5
  dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
6
- bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAOuZpyQKEwsTG9plLat7
7
- 8bUaNuNBEnouTsNMr6X+XTgvyrAxTuocdsyP1sNCjdS1B8RiiDH1/Nt9qpvlBWon
8
- sdJ1SYhaWNVfqiYStTDnCx3PRMmHRdD4KqUWKpN6VpZ1O/Zu+9Mw0COmvXgZuuO9
9
- wMSJkXRo6dTCfMedLAIxjMeBIxtoLR2e6Jm6MR8+8WYYVWrO9kSOOt5eKQLBY7aK
10
- b/Dc40EcJKPg3Z30Pia1M9ZyRlb6SOj6SKpHRqc7vbVQxjEw6Jjal1lZ49m3YZMd
11
- ArMAs9lQZNdSw5/UX6HWWURLowg6k10RnhTUtYyzO9BFev0JFJftHnmuk8vtb+SD
12
- 5VPmjFXg2VOcw0B7FtG75Vackk8QKfgVe3nSPhVpew2CSPlbJzH80wChbr19+e3+
13
- YGr1tOiaJrL6c+PNmb0F31NXMKpj/r+n15HwlTMRxQrzFcgjBlxf2XFGnPQXHhBm
14
- kp1OFnEq4GG9sON4glRldkwzi/f/fGcZmo5fm3d+0ZdNgwIDAQABo3cwdTAJBgNV
15
- HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUPVH5+dLA80A1kJ2Uz5iGwfOa
16
- 1+swHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
17
- bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAnpa0jcN7JzREHMTQ
18
- bfZ+xcvlrzuROMY6A3zIZmQgbnoZZNuX4cMRrT1p1HuwXpxdpHPw7dDjYqWw3+1h
19
- 3mXLeMuk7amjQpYoSWU/OIZMhIsARra22UN8qkkUlUj3AwTaChVKN/bPJOM2DzfU
20
- kz9vUgLeYYFfQbZqeI6SsM7ltilRV4W8D9yNUQQvOxCFxtLOetJ00fC/E7zMUzbK
21
- IBwYFQYsbI6XQzgAIPW6nGSYKgRhkfpmquXSNKZRIQ4V6bFrufa+DzD0bt2ZA3ah
22
- fMmJguyb5L2Gf1zpDXzFSPMG7YQFLzwYz1zZZvOU7/UCpQsHpID/YxqDp4+Dgb+Y
23
- qma0whX8UG/gXFV2pYWpYOfpatvahwi+A1TwPQsuZwkkhi1OyF1At3RY+hjSXyav
24
- AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
25
- msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
6
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAODzeO3L6lxdATzMHKNW
7
+ jFA/GGunoPuylO/BMzy8RiQHh7VIvysAKs0tHhTx3g2D0STDpF+hcQcPELFikiT2
8
+ F+1wOHj/SsrK7VKqfA8+gq04hKc5sQoX2Egf9k3V0YJ3eZ6R/koHkQ8A0TVt0w6F
9
+ ZQckoV4MqnEAx0g/FZN3mnHTlJ3VFLSBqJEIe+S6FZMl92mSv+hTrlUG8VaYxSfN
10
+ lTCvnKk284F6QZq5XIENLRmcDd/3aPBLnLwNnyMyhB+6gK8cUO+CFlDO5tjo/aBA
11
+ rUnl++wGG0JooF1ed0v+evOn9KoMBG6rHewcf79qJbVOscbD8qSAmo+sCXtcFryr
12
+ KRMTB8gNbowJkFRJDEe8tfRy11u1fYzFg/qNO82FJd62rKAw2wN0C29yCeQOPRb1
13
+ Cw9Y4ZwK9VFNEcV9L+3pHTHn2XfuZHtDaG198VweiF6raFO4yiEYccodH/USP0L5
14
+ cbcCFtmu/4HDSxL1ByQXO84A0ybJuk3/+aPUSXe9C9U8fwIDAQABo3cwdTAJBgNV
15
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUSlcEakb7gfn/5E2WY6z73BF/
16
+ iZkwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
17
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEA1aS+E7RXJ1w9g9mJ
18
+ G0NzFxe64OEuENosNlvYQCbRKGCXAU1qqelYkBQHseRgRKxLICrnypRo9IEobyHa
19
+ vDnJ4r7Tsb34dleqQW2zY/obG+cia3Ym2JsegXWF7dDOzCXJ4FN8MFoT2jHlqLLw
20
+ yrap0YO5zx0GSQ0Dwy8h2n2v2vanMEeCx7iNm3ERgR5WuN5sjzWoz2A/JLEEcK0C
21
+ EnAGKCWAd1fuG8IemDjT1edsd5FyYR4bIX0m+99oDuFZyPiiIbalmyYiSBBp59Yb
22
+ Q0P8zeBi4OfwCZNcxqz0KONmw9JLNv6DgyEAH5xe/4JzhMEgvIRiPj0pHfA7oqQF
23
+ KUNqvD1KlxbEC+bZfE5IZhnqYLdld/Ksqd22FI1RBhiS1Ejfsj99LVIm9cBuZEY2
24
+ Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
25
+ MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
26
26
  -----END CERTIFICATE-----
@@ -0,0 +1,11 @@
1
+ -----BEGIN RSA PUBLIC KEY-----
2
+ MIIBigKCAYEApcd6ybskiNs9WUvBGVUE8GdWDehjZ9TyjSj/fDl/UcMYqY0R5YX9
3
+ tnYxEwZZRMdVltKWxr88Qmshh1IQz6CpJVbcfYjt/158pSGPm+AUua6tkLqIvZDM
4
+ ocFOMafmroI+BMuL+Zu5QH7HC2tkT16jclGYfMQkJjXVUQTk2UZr+94+8RlUz/CH
5
+ Y6hPA7xPgIyPfyPCxz1VWzAwXwT++NCJQPBr5MqT84LNSEzUSlR9pFNShf3UCUT+
6
+ 8LWOvjFSNGmMMSsbo2T7/+dz9/FM02YG00EO0x04qteggwcaEYLFrigDN6/fM0ih
7
+ BXZILnMUqC/qrfW2YFg4ZqKZJuxaALqqkPxrkBDYqoqcAloqn36jBSke6tc/2I/J
8
+ 2Afq3r53UoAbUH7h5I/L8YeaiA4MYjAuq724lHlrOmIr4D6yjYC0a1LGlPjLk869
9
+ 2nsVXNgomhVb071E6amR+rJJnfvkdZgCmEBFnqnBV5A1u4qgNsa2rVcD+gJRvb2T
10
+ aQtjlQWKPx5xAgMBAAE=
11
+ -----END RSA PUBLIC KEY-----
data/ext/Rakefile CHANGED
@@ -1,67 +1,40 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require File.expand_path('../../lib/rdkafka/version', __FILE__)
4
+ require "mini_portile2"
4
5
  require "fileutils"
5
6
  require "open-uri"
6
7
 
7
8
  task :default => :clean do
8
- # For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
9
- # managed by nix itself, so using the normal file paths doesn't work for nix users.
10
- #
11
- # Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
12
- # accessible in the nix environment
13
- if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
14
- # Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
15
- require "mini_portile2"
16
- recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
9
+ # Download and compile librdkafka
10
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
17
11
 
18
- # Use default homebrew openssl if we're on mac and the directory exists
19
- # and each of flags is not empty
20
- if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
21
- ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
22
- ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
23
- end
24
-
25
- releases = File.expand_path(File.join(File.dirname(__FILE__), '../dist'))
26
-
27
- recipe.files << {
28
- :url => "file://#{releases}/librdkafka_#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz",
29
- :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
30
- }
31
- recipe.configure_options = ["--host=#{recipe.host}"]
32
-
33
- # Disable using libc regex engine in favor of the embedded one
34
- # The default regex engine of librdkafka does not always work exactly as most of the users
35
- # would expect, hence this flag allows for changing it to the other one
36
- if ENV.key?('RDKAFKA_DISABLE_REGEX_EXT')
37
- recipe.configure_options << '--disable-regex-ext'
38
- end
12
+ # Use default homebrew openssl if we're on mac and the directory exists
13
+ # and each of flags is not empty
14
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
15
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
16
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
17
+ end
39
18
 
40
- recipe.cook
41
- # Move dynamic library we're interested in
42
- if recipe.host.include?('darwin')
43
- from_extension = '1.dylib'
44
- to_extension = 'dylib'
45
- else
46
- from_extension = 'so.1'
47
- to_extension = 'so'
48
- end
49
- lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
50
- FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
51
- # Cleanup files created by miniportile we don't need in the gem
52
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
53
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
19
+ recipe.files << {
20
+ :url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
21
+ :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
22
+ }
23
+ recipe.configure_options = ["--host=#{recipe.host}"]
24
+ recipe.cook
25
+ # Move dynamic library we're interested in
26
+ if recipe.host.include?('darwin')
27
+ from_extension = '1.dylib'
28
+ to_extension = 'dylib'
54
29
  else
55
- # Otherwise, copy existing libraries to ./ext
56
- if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
57
- raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
58
- end
59
- files = [
60
- File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
61
- File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
62
- ]
63
- files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
30
+ from_extension = 'so.1'
31
+ to_extension = 'so'
64
32
  end
33
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
34
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
35
+ # Cleanup files created by miniportile we don't need in the gem
36
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
37
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
65
38
  end
66
39
 
67
40
  task :clean do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require File.expand_path('lib/rdkafka/version', __dir__)
2
4
 
3
5
  Gem::Specification.new do |gem|
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "ffi"
2
4
 
3
5
  module Rdkafka
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Admin
3
5
  class CreateTopicHandle < AbstractHandle
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Admin
3
5
  class CreateTopicReport
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Admin
3
5
  class DeleteTopicHandle < AbstractHandle
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Admin
3
5
  class DeleteTopicReport
data/lib/rdkafka/admin.rb CHANGED
@@ -1,33 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "objspace"
4
+
1
5
  module Rdkafka
2
6
  class Admin
3
7
  # @private
4
8
  def initialize(native_kafka)
5
9
  @native_kafka = native_kafka
6
- @closing = false
7
-
8
- # Start thread to poll client for callbacks
9
- @polling_thread = Thread.new do
10
- loop do
11
- Rdkafka::Bindings.rd_kafka_poll(@native_kafka, 250)
12
- # Exit thread if closing and the poll queue is empty
13
- if @closing && Rdkafka::Bindings.rd_kafka_outq_len(@native_kafka) == 0
14
- break
15
- end
16
- end
17
- end
18
- @polling_thread.abort_on_exception = true
10
+
11
+ # Makes sure, that native kafka gets closed before it gets GCed by Ruby
12
+ ObjectSpace.define_finalizer(self, native_kafka.finalizer)
13
+ end
14
+
15
+ def finalizer
16
+ ->(_) { close }
19
17
  end
20
18
 
21
19
  # Close this admin instance
22
20
  def close
23
- return unless @native_kafka
24
-
25
- # Indicate to polling thread that we're closing
26
- @closing = true
27
- # Wait for the polling thread to finish up
28
- @polling_thread.join
29
- Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
30
- @native_kafka = nil
21
+ return if closed?
22
+ ObjectSpace.undefine_finalizer(self)
23
+ @native_kafka.close
24
+ end
25
+
26
+ # Whether this admin has closed
27
+ def closed?
28
+ @native_kafka.closed?
31
29
  end
32
30
 
33
31
  # Create a topic with the given partition count and replication factor
@@ -38,6 +36,7 @@ module Rdkafka
38
36
  #
39
37
  # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
40
38
  def create_topic(topic_name, partition_count, replication_factor, topic_config={})
39
+ closed_admin_check(__method__)
41
40
 
42
41
  # Create a rd_kafka_NewTopic_t representing the new topic
43
42
  error_buffer = FFI::MemoryPointer.from_string(" " * 256)
@@ -68,7 +67,9 @@ module Rdkafka
68
67
  topics_array_ptr.write_array_of_pointer(pointer_array)
69
68
 
70
69
  # Get a pointer to the queue that our request will be enqueued on
71
- queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
70
+ queue_ptr = @native_kafka.with_inner do |inner|
71
+ Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
72
+ end
72
73
  if queue_ptr.null?
73
74
  Rdkafka::Bindings.rd_kafka_NewTopic_destroy(new_topic_ptr)
74
75
  raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
@@ -79,17 +80,21 @@ module Rdkafka
79
80
  create_topic_handle[:pending] = true
80
81
  create_topic_handle[:response] = -1
81
82
  CreateTopicHandle.register(create_topic_handle)
82
- admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATETOPICS)
83
+ admin_options_ptr = @native_kafka.with_inner do |inner|
84
+ Rdkafka::Bindings.rd_kafka_AdminOptions_new(inner, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATETOPICS)
85
+ end
83
86
  Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, create_topic_handle.to_ptr)
84
87
 
85
88
  begin
86
- Rdkafka::Bindings.rd_kafka_CreateTopics(
87
- @native_kafka,
89
+ @native_kafka.with_inner do |inner|
90
+ Rdkafka::Bindings.rd_kafka_CreateTopics(
91
+ inner,
88
92
  topics_array_ptr,
89
93
  1,
90
94
  admin_options_ptr,
91
95
  queue_ptr
92
- )
96
+ )
97
+ end
93
98
  rescue Exception
94
99
  CreateTopicHandle.remove(create_topic_handle.to_ptr.address)
95
100
  raise
@@ -108,6 +113,7 @@ module Rdkafka
108
113
  #
109
114
  # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
110
115
  def delete_topic(topic_name)
116
+ closed_admin_check(__method__)
111
117
 
112
118
  # Create a rd_kafka_DeleteTopic_t representing the topic to be deleted
113
119
  delete_topic_ptr = Rdkafka::Bindings.rd_kafka_DeleteTopic_new(FFI::MemoryPointer.from_string(topic_name))
@@ -118,7 +124,9 @@ module Rdkafka
118
124
  topics_array_ptr.write_array_of_pointer(pointer_array)
119
125
 
120
126
  # Get a pointer to the queue that our request will be enqueued on
121
- queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
127
+ queue_ptr = @native_kafka.with_inner do |inner|
128
+ Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
129
+ end
122
130
  if queue_ptr.null?
123
131
  Rdkafka::Bindings.rd_kafka_DeleteTopic_destroy(delete_topic_ptr)
124
132
  raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
@@ -129,17 +137,21 @@ module Rdkafka
129
137
  delete_topic_handle[:pending] = true
130
138
  delete_topic_handle[:response] = -1
131
139
  DeleteTopicHandle.register(delete_topic_handle)
132
- admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DELETETOPICS)
140
+ admin_options_ptr = @native_kafka.with_inner do |inner|
141
+ Rdkafka::Bindings.rd_kafka_AdminOptions_new(inner, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DELETETOPICS)
142
+ end
133
143
  Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, delete_topic_handle.to_ptr)
134
144
 
135
145
  begin
136
- Rdkafka::Bindings.rd_kafka_DeleteTopics(
137
- @native_kafka,
146
+ @native_kafka.with_inner do |inner|
147
+ Rdkafka::Bindings.rd_kafka_DeleteTopics(
148
+ inner,
138
149
  topics_array_ptr,
139
150
  1,
140
151
  admin_options_ptr,
141
152
  queue_ptr
142
- )
153
+ )
154
+ end
143
155
  rescue Exception
144
156
  DeleteTopicHandle.remove(delete_topic_handle.to_ptr.address)
145
157
  raise
@@ -163,54 +175,64 @@ module Rdkafka
163
175
  #
164
176
  # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
165
177
  def create_partitions(topic_name, partition_count)
166
- error_buffer = FFI::MemoryPointer.from_string(" " * 256)
167
- new_partitions_ptr = Rdkafka::Bindings.rd_kafka_NewPartitions_new(
168
- FFI::MemoryPointer.from_string(topic_name),
169
- partition_count,
170
- error_buffer,
171
- 256
172
- )
173
- if new_partitions_ptr.null?
174
- raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
175
- end
178
+ closed_admin_check(__method__)
179
+
180
+ @native_kafka.with_inner do |inner|
181
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
182
+ new_partitions_ptr = Rdkafka::Bindings.rd_kafka_NewPartitions_new(
183
+ FFI::MemoryPointer.from_string(topic_name),
184
+ partition_count,
185
+ error_buffer,
186
+ 256
187
+ )
188
+ if new_partitions_ptr.null?
189
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
190
+ end
176
191
 
177
- pointer_array = [new_partitions_ptr]
178
- topics_array_ptr = FFI::MemoryPointer.new(:pointer)
179
- topics_array_ptr.write_array_of_pointer(pointer_array)
192
+ pointer_array = [new_partitions_ptr]
193
+ topics_array_ptr = FFI::MemoryPointer.new(:pointer)
194
+ topics_array_ptr.write_array_of_pointer(pointer_array)
180
195
 
181
- # Get a pointer to the queue that our request will be enqueued on
182
- queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(@native_kafka)
183
- if queue_ptr.null?
184
- Rdkafka::Bindings.rd_kafka_NewPartitions_destroy(new_partitions_ptr)
185
- raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
186
- end
196
+ # Get a pointer to the queue that our request will be enqueued on
197
+ queue_ptr = Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
198
+ if queue_ptr.null?
199
+ Rdkafka::Bindings.rd_kafka_NewPartitions_destroy(new_partitions_ptr)
200
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
201
+ end
187
202
 
188
- # Create and register the handle we will return to the caller
189
- create_partitions_handle = CreatePartitionsHandle.new
190
- create_partitions_handle[:pending] = true
191
- create_partitions_handle[:response] = -1
192
- CreatePartitionsHandle.register(create_partitions_handle)
193
- admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(@native_kafka, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS)
194
- Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, create_partitions_handle.to_ptr)
203
+ # Create and register the handle we will return to the caller
204
+ create_partitions_handle = CreatePartitionsHandle.new
205
+ create_partitions_handle[:pending] = true
206
+ create_partitions_handle[:response] = -1
207
+ CreatePartitionsHandle.register(create_partitions_handle)
208
+ admin_options_ptr = Rdkafka::Bindings.rd_kafka_AdminOptions_new(inner, Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS)
209
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, create_partitions_handle.to_ptr)
210
+
211
+ begin
212
+ Rdkafka::Bindings.rd_kafka_CreatePartitions(
213
+ inner,
214
+ topics_array_ptr,
215
+ 1,
216
+ admin_options_ptr,
217
+ queue_ptr
218
+ )
219
+ rescue Exception
220
+ CreatePartitionsHandle.remove(create_partitions_handle.to_ptr.address)
221
+ raise
222
+ ensure
223
+ Rdkafka::Bindings.rd_kafka_AdminOptions_destroy(admin_options_ptr)
224
+ Rdkafka::Bindings.rd_kafka_queue_destroy(queue_ptr)
225
+ Rdkafka::Bindings.rd_kafka_NewPartitions_destroy(new_partitions_ptr)
226
+ end
195
227
 
196
- begin
197
- Rdkafka::Bindings.rd_kafka_CreatePartitions(
198
- @native_kafka,
199
- topics_array_ptr,
200
- 1,
201
- admin_options_ptr,
202
- queue_ptr
203
- )
204
- rescue Exception
205
- CreatePartitionsHandle.remove(create_partitions_handle.to_ptr.address)
206
- raise
207
- ensure
208
- Rdkafka::Bindings.rd_kafka_AdminOptions_destroy(admin_options_ptr)
209
- Rdkafka::Bindings.rd_kafka_queue_destroy(queue_ptr)
210
- Rdkafka::Bindings.rd_kafka_NewPartitions_destroy(new_partitions_ptr)
228
+ create_partitions_handle
211
229
  end
230
+ end
231
+
232
+ private
212
233
 
213
- create_partitions_handle
234
+ def closed_admin_check(method)
235
+ raise Rdkafka::ClosedAdminError.new(method) if closed?
214
236
  end
215
237
  end
216
238
  end