karafka-rdkafka 0.13.8 → 0.13.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.gitignore +4 -0
  4. data/.rspec +1 -0
  5. data/.ruby-gemset +1 -0
  6. data/.ruby-version +1 -0
  7. data/CHANGELOG.md +41 -32
  8. data/{LICENSE → MIT-LICENSE} +2 -1
  9. data/README.md +11 -11
  10. data/dist/librdkafka_2.2.0.tar.gz +0 -0
  11. data/ext/README.md +1 -1
  12. data/ext/Rakefile +53 -26
  13. data/lib/rdkafka/abstract_handle.rb +37 -24
  14. data/lib/rdkafka/admin.rb +6 -7
  15. data/lib/rdkafka/bindings.rb +0 -4
  16. data/lib/rdkafka/config.rb +30 -15
  17. data/lib/rdkafka/consumer/headers.rb +2 -4
  18. data/lib/rdkafka/consumer.rb +50 -53
  19. data/lib/rdkafka/helpers/time.rb +14 -0
  20. data/lib/rdkafka/producer.rb +8 -15
  21. data/lib/rdkafka/version.rb +1 -1
  22. data/lib/rdkafka.rb +10 -1
  23. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  24. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
  25. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  26. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
  27. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  28. data/spec/rdkafka/admin_spec.rb +0 -1
  29. data/spec/rdkafka/bindings_spec.rb +0 -1
  30. data/spec/rdkafka/callbacks_spec.rb +0 -2
  31. data/spec/rdkafka/config_spec.rb +8 -2
  32. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  33. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  34. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  35. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
  36. data/spec/rdkafka/consumer_spec.rb +47 -1
  37. data/spec/rdkafka/error_spec.rb +0 -2
  38. data/spec/rdkafka/metadata_spec.rb +0 -1
  39. data/spec/rdkafka/native_kafka_spec.rb +0 -2
  40. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
  41. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  42. data/spec/rdkafka/producer_spec.rb +0 -1
  43. data.tar.gz.sig +3 -2
  44. metadata +8 -4
  45. metadata.gz.sig +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4082c381a9d131273cc005b61b06f0e4c73b27044213d730fa5c7faeec606e07
4
- data.tar.gz: 06754bdba16fc3feaf648670e766cdbbc60342b1a43902ef08e42a5acfd4b2ac
3
+ metadata.gz: a78b060310c1f99afc9955f17d3a2fe92d5252cc7aec870c3fa34b20e543385b
4
+ data.tar.gz: 5714200f1cc64e0b1b7f4f5118f5a10010055ceb1232986fee4caae952199c44
5
5
  SHA512:
6
- metadata.gz: 89c0b5078f97c31d0e209ebbf13279a2d079aef35f7b4c2a4bf92cac17a69ccfed1769f84d1d06ca734c9a95174b0b5fa4fa9b6933f63e30fe71c8493bda4ed1
7
- data.tar.gz: 490a160689be7c4e261b2b8ff1ffdbcb5aec53cfe1dd0acabd0da33eff708cb7a1d09afe3c3a6752fc9a6345c005f1f0919604035a97240c5f9ed120870e802c
6
+ metadata.gz: 33338b74897b69cc42f370c57803b048bb6055617509518fba2accd705bb674b09bc311f43116669b135d8e6d5263746c40371d7935bba50e934d1254f50f4a8
7
+ data.tar.gz: 9b77936f7fa8a53474e04f79ed47e9bedba8524311da0d4bc501261508b6d5e6bf4c02a2202a9b3d41126fe98ce0d03dad5e663a9f73777cf48c4c2644ef4295
checksums.yaml.gz.sig CHANGED
Binary file
data/.gitignore CHANGED
@@ -1,3 +1,6 @@
1
+ # Ignore bundler config.
2
+ /.bundle
3
+
1
4
  Gemfile.lock
2
5
  ext/ports
3
6
  ext/tmp
@@ -6,3 +9,4 @@ ext/librdkafka.*
6
9
  .yardoc
7
10
  doc
8
11
  coverage
12
+ vendor
data/.rspec CHANGED
@@ -1 +1,2 @@
1
+ --require spec_helper
1
2
  --format documentation
data/.ruby-gemset ADDED
@@ -0,0 +1 @@
1
+ rdkafka-ruby
data/.ruby-version ADDED
@@ -0,0 +1 @@
1
+ 3.2.2
data/CHANGELOG.md CHANGED
@@ -1,33 +1,42 @@
1
- # 0.13.8 (2023-10-31)
1
+ # Rdkafka Changelog
2
+
3
+ ## 0.13.10 (2024-07-10)
4
+ - [Fix] Switch to local release of librdkafka to mitigate its unavailability.
5
+
6
+ ## 0.13.9 (2023-11-07)
7
+ - [Enhancement] Expose alternative way of managing consumer events via a separate queue.
8
+ - [Enhancement] Allow for setting `statistics_callback` as nil to reset predefined settings configured by a different gem.
9
+
10
+ ## 0.13.8 (2023-10-31)
2
11
  - [Enhancement] Get consumer position (thijsc & mensfeld)
3
12
 
4
- # 0.13.7 (2023-10-31)
13
+ ## 0.13.7 (2023-10-31)
5
14
  - [Change] Drop support for Ruby 2.6 due to incompatibilities in usage of `ObjectSpace::WeakMap`
6
15
  - [Fix] Fix dangling Opaque references.
7
16
 
8
- # 0.13.6 (2023-10-17)
17
+ ## 0.13.6 (2023-10-17)
9
18
  * **[Feature]** Support transactions API in the producer
10
19
  * [Enhancement] Add `raise_response_error` flag to the `Rdkafka::AbstractHandle`.
11
20
  * [Enhancement] Provide `#purge` to remove any outstanding requests from the producer.
12
21
  * [Enhancement] Fix `#flush` does not handle the timeouts errors by making it return true if all flushed or false if failed. We do **not** raise an exception here to keep it backwards compatible.
13
22
 
14
- # 0.13.5
23
+ ## 0.13.5
15
24
  * Fix DeliveryReport `create_result#error` being nil despite an error being associated with it
16
25
 
17
- # 0.13.4
26
+ ## 0.13.4
18
27
  * Always call initial poll on librdkafka to make sure oauth bearer cb is handled pre-operations.
19
28
 
20
- # 0.13.3
29
+ ## 0.13.3
21
30
  * Bump librdkafka to 2.2.0
22
31
 
23
- # 0.13.2
32
+ ## 0.13.2
24
33
  * Ensure operations counter decrement is fully thread-safe
25
34
  * Bump librdkafka to 2.1.1
26
35
 
27
- # 0.13.1
36
+ ## 0.13.1
28
37
  * Add offsets_for_times method on consumer (timflapper)
29
38
 
30
- # 0.13.0
39
+ ## 0.13.0 (2023-07-24)
31
40
  * Support cooperative sticky partition assignment in the rebalance callback (methodmissing)
32
41
  * Support both string and symbol header keys (ColinDKelley)
33
42
  * Handle tombstone messages properly (kgalieva)
@@ -48,32 +57,32 @@
48
57
  * Retry metadta fetches on certain errors with a backoff (mensfeld)
49
58
  * Do not lock access to underlying native kafka client and rely on Karafka granular locking (mensfeld)
50
59
 
51
- # 0.12.3
60
+ ## 0.12.3
52
61
  - Include backtrace in non-raised binded errors.
53
62
  - Include topic name in the delivery reports
54
63
 
55
- # 0.12.2
64
+ ## 0.12.2
56
65
  * Increase the metadata default timeout from 250ms to 2 seconds. This should allow for working with remote clusters.
57
66
 
58
- # 0.12.1
67
+ ## 0.12.1
59
68
  * Bumps librdkafka to 2.0.2 (lmaia)
60
69
  * Add support for adding more partitions via Admin API
61
70
 
62
- # 0.12.0
71
+ ## 0.12.0 (2022-06-17)
63
72
  * Bumps librdkafka to 1.9.0
64
73
  * Fix crash on empty partition key (mensfeld)
65
74
  * Pass the delivery handle to the callback (gvisokinskas)
66
75
 
67
- # 0.11.0
76
+ ## 0.11.0 (2021-11-17)
68
77
  * Upgrade librdkafka to 1.8.2
69
78
  * Bump supported minimum Ruby version to 2.6
70
79
  * Better homebrew path detection
71
80
 
72
- # 0.10.0
81
+ ## 0.10.0 (2021-09-07)
73
82
  * Upgrade librdkafka to 1.5.0
74
83
  * Add error callback config
75
84
 
76
- # 0.9.0
85
+ ## 0.9.0 (2021-06-23)
77
86
  * Fixes for Ruby 3.0
78
87
  * Allow any callable object for callbacks (gremerritt)
79
88
  * Reduce memory allocations in Rdkafka::Producer#produce (jturkel)
@@ -81,13 +90,13 @@
81
90
  * Allow passing in topic configuration on create_topic (dezka)
82
91
  * Add each_batch method to consumer (mgrosso)
83
92
 
84
- # 0.8.1
93
+ ## 0.8.1 (2020-12-07)
85
94
  * Fix topic_flag behaviour and add tests for Metadata (geoff2k)
86
95
  * Add topic admin interface (geoff2k)
87
96
  * Raise an exception if @native_kafka is nil (geoff2k)
88
97
  * Option to use zstd compression (jasonmartens)
89
98
 
90
- # 0.8.0
99
+ ## 0.8.0 (2020-06-02)
91
100
  * Upgrade librdkafka to 1.4.0
92
101
  * Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
93
102
  * Ruby 2.7 compatibility fix (by Geoff Thé)A
@@ -95,22 +104,22 @@
95
104
  * Don't override CPPFLAGS and LDFLAGS if already set on Mac (by Hiroshi Hatake)
96
105
  * Allow use of Rake 13.x and up (by Tomasz Pajor)
97
106
 
98
- # 0.7.0
107
+ ## 0.7.0 (2019-09-21)
99
108
  * Bump librdkafka to 1.2.0 (by rob-as)
100
109
  * Allow customizing the wait time for delivery report availability (by mensfeld)
101
110
 
102
- # 0.6.0
111
+ ## 0.6.0 (2019-07-23)
103
112
  * Bump librdkafka to 1.1.0 (by Chris Gaffney)
104
113
  * Implement seek (by breunigs)
105
114
 
106
- # 0.5.0
115
+ ## 0.5.0 (2019-04-11)
107
116
  * Bump librdkafka to 1.0.0 (by breunigs)
108
117
  * Add cluster and member information (by dmexe)
109
118
  * Support message headers for consumer & producer (by dmexe)
110
119
  * Add consumer rebalance listener (by dmexe)
111
120
  * Implement pause/resume partitions (by dmexe)
112
121
 
113
- # 0.4.2
122
+ ## 0.4.2 (2019-01-12)
114
123
  * Delivery callback for producer
115
124
  * Document list param of commit method
116
125
  * Use default Homebrew openssl location if present
@@ -119,10 +128,10 @@
119
128
  * Add support for storing message offsets
120
129
  * Add missing runtime dependency to rake
121
130
 
122
- # 0.4.1
131
+ ## 0.4.1 (2018-10-19)
123
132
  * Bump librdkafka to 0.11.6
124
133
 
125
- # 0.4.0
134
+ ## 0.4.0 (2018-09-24)
126
135
  * Improvements in librdkafka archive download
127
136
  * Add global statistics callback
128
137
  * Use Time for timestamps, potentially breaking change if you
@@ -134,34 +143,34 @@
134
143
  * Support committing a topic partition list
135
144
  * Add consumer assignment method
136
145
 
137
- # 0.3.5
146
+ ## 0.3.5 (2018-01-17)
138
147
  * Fix crash when not waiting for delivery handles
139
148
  * Run specs on Ruby 2.5
140
149
 
141
- # 0.3.4
150
+ ## 0.3.4 (2017-12-05)
142
151
  * Bump librdkafka to 0.11.3
143
152
 
144
- # 0.3.3
153
+ ## 0.3.3 (2017-10-27)
145
154
  * Fix bug that prevent display of `RdkafkaError` message
146
155
 
147
- # 0.3.2
156
+ ## 0.3.2 (2017-10-25)
148
157
  * `add_topic` now supports using a partition count
149
158
  * Add way to make errors clearer with an extra message
150
159
  * Show topics in subscribe error message
151
160
  * Show partition and topic in query watermark offsets error message
152
161
 
153
- # 0.3.1
162
+ ## 0.3.1 (2017-10-23)
154
163
  * Bump librdkafka to 0.11.1
155
164
  * Officially support ranges in `add_topic` for topic partition list.
156
165
  * Add consumer lag calculator
157
166
 
158
- # 0.3.0
167
+ ## 0.3.0 (2017-10-17)
159
168
  * Move both add topic methods to one `add_topic` in `TopicPartitionList`
160
169
  * Add committed offsets to consumer
161
170
  * Add query watermark offset to consumer
162
171
 
163
- # 0.2.0
172
+ ## 0.2.0 (2017-10-13)
164
173
  * Some refactoring and add inline documentation
165
174
 
166
- # 0.1.x
175
+ ## 0.1.x (2017-09-10)
167
176
  * Initial working version including producing and consuming
@@ -1,6 +1,7 @@
1
1
  The MIT License (MIT)
2
2
 
3
- Copyright (c) 2017 Thijs Cadier
3
+ Copyright (c) 2017-2023 Thijs Cadier
4
+ 2023, Maciej Mensfeld
4
5
 
5
6
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
7
  of this software and associated documentation files (the "Software"), to deal
data/README.md CHANGED
@@ -10,16 +10,16 @@
10
10
  ---
11
11
 
12
12
  The `rdkafka` gem is a modern Kafka client library for Ruby based on
13
- [librdkafka](https://github.com/edenhill/librdkafka/).
13
+ [librdkafka](https://github.com/confluentinc/librdkafka/).
14
14
  It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
15
- gem and targets Kafka 1.0+ and Ruby versions that are under security or
16
- active maintenance. We remove Ruby version from our CI builds if they
15
+ gem and targets Kafka 1.0+ and Ruby versions under security or
16
+ active maintenance. We remove a Ruby version from our CI builds when they
17
17
  become EOL.
18
18
 
19
19
  `rdkafka` was written because of the need for a reliable Ruby client for Kafka that supports modern Kafka at [AppSignal](https://appsignal.com). AppSignal runs it in production on very high-traffic systems.
20
20
 
21
21
  The most important pieces of a Kafka client are implemented. We're
22
- working towards feature completeness, you can track that here:
22
+ working towards feature completeness. You can track that here:
23
23
  https://github.com/appsignal/rdkafka-ruby/milestone/1
24
24
 
25
25
  ## Table of content
@@ -38,7 +38,7 @@ https://github.com/appsignal/rdkafka-ruby/milestone/1
38
38
  ## Installation
39
39
 
40
40
  This gem downloads and compiles librdkafka when it is installed. If you
41
- have any problems installing the gem please open an issue.
41
+ If you have any problems installing the gem, please open an issue.
42
42
 
43
43
  ## Usage
44
44
 
@@ -64,9 +64,9 @@ end
64
64
 
65
65
  ### Producing messages
66
66
 
67
- Produce a number of messages, put the delivery handles in an array and
67
+ Produce a number of messages, put the delivery handles in an array, and
68
68
  wait for them before exiting. This way the messages will be batched and
69
- sent to Kafka in an efficient way.
69
+ efficiently sent to Kafka.
70
70
 
71
71
  ```ruby
72
72
  config = {:"bootstrap.servers" => "localhost:9092"}
@@ -91,7 +91,7 @@ released until it `#close` is explicitly called, so be sure to call
91
91
 
92
92
  ## Higher level libraries
93
93
 
94
- Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher level API that can be used to work with Kafka messages and one library for publishing messages.
94
+ Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher-level API that can be used to work with Kafka messages and one library for publishing messages.
95
95
 
96
96
  ### Message processing frameworks
97
97
 
@@ -104,7 +104,7 @@ Currently, there are two actively developed frameworks based on rdkafka-ruby, th
104
104
 
105
105
  ## Development
106
106
 
107
- A Docker Compose file is included to run Kafka and Zookeeper. To run
107
+ A Docker Compose file is included to run Kafka. To run
108
108
  that:
109
109
 
110
110
  ```
@@ -122,7 +122,7 @@ DEBUG_PRODUCER=true bundle exec rspec
122
122
  DEBUG_CONSUMER=true bundle exec rspec
123
123
  ```
124
124
 
125
- After running the tests you can bring the cluster down to start with a
125
+ After running the tests, you can bring the cluster down to start with a
126
126
  clean slate:
127
127
 
128
128
  ```
@@ -131,7 +131,7 @@ docker-compose down
131
131
 
132
132
  ## Example
133
133
 
134
- To see everything working run these in separate tabs:
134
+ To see everything working, run these in separate tabs:
135
135
 
136
136
  ```
137
137
  bundle exec rake consume_messages
Binary file
data/ext/README.md CHANGED
@@ -5,7 +5,7 @@ this gem is installed.
5
5
 
6
6
  To update the `librdkafka` version follow the following steps:
7
7
 
8
- * Go to https://github.com/edenhill/librdkafka/releases to get the new
8
+ * Go to https://github.com/confluentinc/librdkafka/releases to get the new
9
9
  version number and asset checksum for `tar.gz`.
10
10
  * Change the version in `lib/rdkafka/version.rb`
11
11
  * Change the `sha256` in `lib/rdkafka/version.rb`
data/ext/Rakefile CHANGED
@@ -1,40 +1,67 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require File.expand_path('../../lib/rdkafka/version', __FILE__)
4
- require "mini_portile2"
5
4
  require "fileutils"
6
5
  require "open-uri"
7
6
 
8
7
  task :default => :clean do
9
- # Download and compile librdkafka
10
- recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
8
+ # For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
9
+ # managed by nix itself, so using the normal file paths doesn't work for nix users.
10
+ #
11
+ # Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
12
+ # accessible in the nix environment
13
+ if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
14
+ # Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
15
+ require "mini_portile2"
16
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
11
17
 
12
- # Use default homebrew openssl if we're on mac and the directory exists
13
- # and each of flags is not empty
14
- if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
15
- ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
16
- ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
17
- end
18
+ # Use default homebrew openssl if we're on mac and the directory exists
19
+ # and each of flags is not empty
20
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
21
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
22
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
23
+ end
24
+
25
+ releases = File.expand_path(File.join(File.dirname(__FILE__), '../dist'))
26
+
27
+ recipe.files << {
28
+ :url => "file://#{releases}/librdkafka_#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz",
29
+ :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
30
+ }
31
+ recipe.configure_options = ["--host=#{recipe.host}"]
32
+
33
+ # Disable using libc regex engine in favor of the embedded one
34
+ # The default regex engine of librdkafka does not always work exactly as most of the users
35
+ # would expect, hence this flag allows for changing it to the other one
36
+ if ENV.key?('RDKAFKA_DISABLE_REGEX_EXT')
37
+ recipe.configure_options << '--disable-regex-ext'
38
+ end
18
39
 
19
- recipe.files << {
20
- :url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
21
- :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
22
- }
23
- recipe.configure_options = ["--host=#{recipe.host}"]
24
- recipe.cook
25
- # Move dynamic library we're interested in
26
- if recipe.host.include?('darwin')
27
- from_extension = '1.dylib'
28
- to_extension = 'dylib'
40
+ recipe.cook
41
+ # Move dynamic library we're interested in
42
+ if recipe.host.include?('darwin')
43
+ from_extension = '1.dylib'
44
+ to_extension = 'dylib'
45
+ else
46
+ from_extension = 'so.1'
47
+ to_extension = 'so'
48
+ end
49
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
50
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
51
+ # Cleanup files created by miniportile we don't need in the gem
52
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
53
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
29
54
  else
30
- from_extension = 'so.1'
31
- to_extension = 'so'
55
+ # Otherwise, copy existing libraries to ./ext
56
+ if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
57
+ raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
58
+ end
59
+ files = [
60
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
61
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
62
+ ]
63
+ files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
32
64
  end
33
- lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
34
- FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
35
- # Cleanup files created by miniportile we don't need in the gem
36
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
37
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
38
65
  end
39
66
 
40
67
  task :clean do
@@ -1,28 +1,37 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "ffi"
4
-
5
3
  module Rdkafka
4
+ # This class serves as an abstract base class to represent handles within the Rdkafka module.
5
+ # As a subclass of `FFI::Struct`, this class provides a blueprint for other specific handle
6
+ # classes to inherit from, ensuring they adhere to a particular structure and behavior.
7
+ #
8
+ # Subclasses must define their own layout, and the layout must start with:
9
+ #
10
+ # layout :pending, :bool,
11
+ # :response, :int
6
12
  class AbstractHandle < FFI::Struct
7
- # Subclasses must define their own layout, and the layout must start with:
8
- #
9
- # layout :pending, :bool,
10
- # :response, :int
13
+ include Helpers::Time
11
14
 
15
+ # Registry for registering all the handles.
12
16
  REGISTRY = {}
13
17
 
14
- CURRENT_TIME = -> { Process.clock_gettime(Process::CLOCK_MONOTONIC) }.freeze
15
-
16
- private_constant :CURRENT_TIME
18
+ class << self
19
+ # Adds handle to the register
20
+ #
21
+ # @param handle [AbstractHandle] any handle we want to register
22
+ def register(handle)
23
+ address = handle.to_ptr.address
24
+ REGISTRY[address] = handle
25
+ end
17
26
 
18
- def self.register(handle)
19
- address = handle.to_ptr.address
20
- REGISTRY[address] = handle
27
+ # Removes handle from the register based on the handle address
28
+ #
29
+ # @param address [Integer] address of the registered handle we want to remove
30
+ def remove(address)
31
+ REGISTRY.delete(address)
32
+ end
21
33
  end
22
34
 
23
- def self.remove(address)
24
- REGISTRY.delete(address)
25
- end
26
35
 
27
36
  # Whether the handle is still pending.
28
37
  #
@@ -32,27 +41,31 @@ module Rdkafka
32
41
  end
33
42
 
34
43
  # Wait for the operation to complete or raise an error if this takes longer than the timeout.
35
- # If there is a timeout this does not mean the operation failed, rdkafka might still be working on the operation.
36
- # In this case it is possible to call wait again.
44
+ # If there is a timeout this does not mean the operation failed, rdkafka might still be working
45
+ # on the operation. In this case it is possible to call wait again.
37
46
  #
38
- # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out. If this is nil it does not time out.
39
- # @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the operation has completed
47
+ # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
48
+ # If this is nil it does not time out.
49
+ # @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the
50
+ # operation has completed
40
51
  # @param raise_response_error [Boolean] should we raise error when waiting finishes
41
52
  #
53
+ # @return [Object] Operation-specific result
54
+ #
42
55
  # @raise [RdkafkaError] When the operation failed
43
56
  # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
44
- #
45
- # @return [Object] Operation-specific result
46
57
  def wait(max_wait_timeout: 60, wait_timeout: 0.1, raise_response_error: true)
47
58
  timeout = if max_wait_timeout
48
- CURRENT_TIME.call + max_wait_timeout
59
+ monotonic_now + max_wait_timeout
49
60
  else
50
61
  nil
51
62
  end
52
63
  loop do
53
64
  if pending?
54
- if timeout && timeout <= CURRENT_TIME.call
55
- raise WaitTimeoutError.new("Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds")
65
+ if timeout && timeout <= monotonic_now
66
+ raise WaitTimeoutError.new(
67
+ "Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
68
+ )
56
69
  end
57
70
  sleep wait_timeout
58
71
  elsif self[:response] != 0 && raise_response_error
data/lib/rdkafka/admin.rb CHANGED
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "objspace"
4
-
5
3
  module Rdkafka
6
4
  class Admin
7
5
  # @private
@@ -30,11 +28,12 @@ module Rdkafka
30
28
 
31
29
  # Create a topic with the given partition count and replication factor
32
30
  #
31
+ # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of
32
+ # creating the topic
33
+ #
33
34
  # @raise [ConfigError] When the partition count or replication factor are out of valid range
34
35
  # @raise [RdkafkaError] When the topic name is invalid or the topic already exists
35
36
  # @raise [RdkafkaError] When the topic configuration is invalid
36
- #
37
- # @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
38
37
  def create_topic(topic_name, partition_count, replication_factor, topic_config={})
39
38
  closed_admin_check(__method__)
40
39
 
@@ -107,11 +106,11 @@ module Rdkafka
107
106
  create_topic_handle
108
107
  end
109
108
 
110
- # Delete the named topic
109
+ # Deletes the named topic
111
110
  #
111
+ # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of
112
+ # deleting the topic
112
113
  # @raise [RdkafkaError] When the topic name is invalid or the topic does not exist
113
- #
114
- # @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
115
114
  def delete_topic(topic_name)
116
115
  closed_admin_check(__method__)
117
116
 
@@ -1,9 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "ffi"
4
- require "json"
5
- require "logger"
6
-
7
3
  module Rdkafka
8
4
  # @private
9
5
  #