waterdrop 2.8.15 → 2.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +1 -2
  3. data/.rubocop.yml +48 -0
  4. data/.ruby-version +1 -1
  5. data/.yard-lint.yml +172 -72
  6. data/CHANGELOG.md +19 -0
  7. data/Gemfile +9 -9
  8. data/Gemfile.lint +14 -0
  9. data/Gemfile.lint.lock +108 -0
  10. data/Gemfile.lock +52 -76
  11. data/README.md +1 -1
  12. data/Rakefile +14 -2
  13. data/bin/integrations +31 -30
  14. data/bin/verify_topics_naming +8 -8
  15. data/config/locales/errors.yml +13 -0
  16. data/docker-compose.oauth.yml +56 -0
  17. data/docker-compose.yml +1 -1
  18. data/lib/waterdrop/clients/dummy.rb +9 -0
  19. data/lib/waterdrop/clients/rdkafka.rb +19 -3
  20. data/lib/waterdrop/config.rb +50 -6
  21. data/lib/waterdrop/connection_pool.rb +13 -11
  22. data/lib/waterdrop/contracts/config.rb +33 -6
  23. data/lib/waterdrop/contracts/message.rb +2 -2
  24. data/lib/waterdrop/contracts/poller_config.rb +26 -0
  25. data/lib/waterdrop/contracts/transactional_offset.rb +2 -2
  26. data/lib/waterdrop/contracts/variant.rb +18 -18
  27. data/lib/waterdrop/errors.rb +3 -0
  28. data/lib/waterdrop/instrumentation/callbacks/delivery.rb +8 -8
  29. data/lib/waterdrop/instrumentation/callbacks/error.rb +5 -5
  30. data/lib/waterdrop/instrumentation/callbacks/oauthbearer_token_refresh.rb +4 -4
  31. data/lib/waterdrop/instrumentation/callbacks/statistics.rb +10 -8
  32. data/lib/waterdrop/instrumentation/idle_disconnector_listener.rb +4 -4
  33. data/lib/waterdrop/instrumentation/logger_listener.rb +10 -10
  34. data/lib/waterdrop/instrumentation/notifications.rb +3 -0
  35. data/lib/waterdrop/instrumentation/vendors/datadog/metrics_listener.rb +19 -19
  36. data/lib/waterdrop/polling/config.rb +52 -0
  37. data/lib/waterdrop/polling/latch.rb +49 -0
  38. data/lib/waterdrop/polling/poller.rb +415 -0
  39. data/lib/waterdrop/polling/queue_pipe.rb +63 -0
  40. data/lib/waterdrop/polling/state.rb +151 -0
  41. data/lib/waterdrop/polling.rb +22 -0
  42. data/lib/waterdrop/producer/async.rb +6 -6
  43. data/lib/waterdrop/producer/buffer.rb +8 -8
  44. data/lib/waterdrop/producer/idempotence.rb +3 -3
  45. data/lib/waterdrop/producer/sync.rb +15 -8
  46. data/lib/waterdrop/producer/testing.rb +1 -1
  47. data/lib/waterdrop/producer/transactions.rb +6 -6
  48. data/lib/waterdrop/producer/variant.rb +2 -2
  49. data/lib/waterdrop/producer.rb +113 -30
  50. data/lib/waterdrop/version.rb +1 -1
  51. data/lib/waterdrop.rb +15 -10
  52. data/package-lock.json +331 -0
  53. data/package.json +9 -0
  54. data/renovate.json +26 -7
  55. data/waterdrop.gemspec +23 -23
  56. metadata +19 -17
  57. data/.coditsu/ci.yml +0 -3
  58. data/.github/CODEOWNERS +0 -3
  59. data/.github/FUNDING.yml +0 -1
  60. data/.github/ISSUE_TEMPLATE/bug_report.md +0 -43
  61. data/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
  62. data/.github/workflows/ci.yml +0 -143
  63. data/.github/workflows/push.yml +0 -35
  64. data/.github/workflows/trigger-wiki-refresh.yml +0 -30
  65. data/.github/workflows/verify-action-pins.yml +0 -16
  66. data/.rspec +0 -2
  67. data/log/.gitkeep +0 -0
data/Gemfile.lock CHANGED
@@ -1,58 +1,26 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- waterdrop (2.8.15)
5
- karafka-core (>= 2.4.9, < 3.0.0)
6
- karafka-rdkafka (>= 0.23.1)
4
+ waterdrop (2.9.0)
5
+ karafka-core (>= 2.5.12, < 3.0.0)
6
+ karafka-rdkafka (>= 0.24.0)
7
7
  zeitwerk (~> 2.3)
8
8
 
9
9
  GEM
10
10
  remote: https://rubygems.org/
11
11
  specs:
12
- byebug (12.0.0)
13
- connection_pool (2.5.4)
14
- diff-lcs (1.6.2)
12
+ byebug (13.0.0)
13
+ reline (>= 0.6.0)
14
+ connection_pool (3.0.2)
15
15
  docile (1.4.1)
16
- ffi (1.17.2)
17
- ffi (1.17.2-aarch64-linux-gnu)
18
- ffi (1.17.2-aarch64-linux-musl)
19
- ffi (1.17.2-arm-linux-gnu)
20
- ffi (1.17.2-arm-linux-musl)
21
- ffi (1.17.2-arm64-darwin)
22
- ffi (1.17.2-x86-linux-gnu)
23
- ffi (1.17.2-x86-linux-musl)
24
- ffi (1.17.2-x86_64-darwin)
25
- ffi (1.17.2-x86_64-linux-gnu)
26
- ffi (1.17.2-x86_64-linux-musl)
27
- json (2.15.1)
28
- karafka-core (2.5.7)
16
+ drb (2.2.3)
17
+ ffi (1.17.4)
18
+ io-console (0.8.2)
19
+ json (2.19.3)
20
+ karafka-core (2.5.12)
29
21
  karafka-rdkafka (>= 0.20.0)
30
22
  logger (>= 1.6.0)
31
- karafka-rdkafka (0.23.1)
32
- ffi (~> 1.17.1)
33
- json (> 2.0)
34
- logger
35
- mini_portile2 (~> 2.6)
36
- rake (> 12)
37
- karafka-rdkafka (0.23.1-aarch64-linux-gnu)
38
- ffi (~> 1.17.1)
39
- json (> 2.0)
40
- logger
41
- mini_portile2 (~> 2.6)
42
- rake (> 12)
43
- karafka-rdkafka (0.23.1-arm64-darwin)
44
- ffi (~> 1.17.1)
45
- json (> 2.0)
46
- logger
47
- mini_portile2 (~> 2.6)
48
- rake (> 12)
49
- karafka-rdkafka (0.23.1-x86_64-linux-gnu)
50
- ffi (~> 1.17.1)
51
- json (> 2.0)
52
- logger
53
- mini_portile2 (~> 2.6)
54
- rake (> 12)
55
- karafka-rdkafka (0.23.1-x86_64-linux-musl)
23
+ karafka-rdkafka (0.25.0)
56
24
  ffi (~> 1.17.1)
57
25
  json (> 2.0)
58
26
  logger
@@ -60,21 +28,17 @@ GEM
60
28
  rake (> 12)
61
29
  logger (1.7.0)
62
30
  mini_portile2 (2.8.9)
31
+ minitest (6.0.2)
32
+ drb (~> 2.0)
33
+ prism (~> 1.5)
34
+ mocha (3.1.0)
35
+ ruby2_keywords (>= 0.0.5)
63
36
  ostruct (0.6.3)
64
- rake (13.3.0)
65
- rspec (3.13.2)
66
- rspec-core (~> 3.13.0)
67
- rspec-expectations (~> 3.13.0)
68
- rspec-mocks (~> 3.13.0)
69
- rspec-core (3.13.6)
70
- rspec-support (~> 3.13.0)
71
- rspec-expectations (3.13.5)
72
- diff-lcs (>= 1.2.0, < 2.0)
73
- rspec-support (~> 3.13.0)
74
- rspec-mocks (3.13.6)
75
- diff-lcs (>= 1.2.0, < 2.0)
76
- rspec-support (~> 3.13.0)
77
- rspec-support (3.13.6)
37
+ prism (1.9.0)
38
+ rake (13.3.1)
39
+ reline (0.6.3)
40
+ io-console (~> 0.5)
41
+ ruby2_keywords (0.0.5)
78
42
  simplecov (0.22.0)
79
43
  docile (~> 1.1)
80
44
  simplecov-html (~> 0.11)
@@ -82,35 +46,47 @@ GEM
82
46
  simplecov-html (0.13.2)
83
47
  simplecov_json_formatter (0.1.4)
84
48
  warning (1.5.0)
85
- yard (0.9.37)
86
- yard-lint (1.2.3)
87
- yard (~> 0.9)
88
- zeitwerk (~> 2.6)
89
- zeitwerk (2.7.3)
49
+ zeitwerk (2.7.5)
90
50
 
91
51
  PLATFORMS
92
- aarch64-linux-gnu
93
- aarch64-linux-musl
94
- arm-linux-gnu
95
- arm-linux-musl
96
- arm64-darwin
97
52
  ruby
98
- x86-linux-gnu
99
- x86-linux-musl
100
- x86_64-darwin
101
- x86_64-linux-gnu
102
- x86_64-linux-musl
103
53
 
104
54
  DEPENDENCIES
105
55
  byebug
106
56
  connection_pool
57
+ minitest
58
+ mocha
107
59
  ostruct
108
- rspec
109
60
  simplecov
110
61
  warning
111
62
  waterdrop!
112
- yard-lint
113
63
  zeitwerk (~> 2.7.0)
114
64
 
65
+ CHECKSUMS
66
+ byebug (13.0.0) sha256=d2263efe751941ca520fa29744b71972d39cbc41839496706f5d9b22e92ae05d
67
+ connection_pool (3.0.2) sha256=33fff5ba71a12d2aa26cb72b1db8bba2a1a01823559fb01d29eb74c286e62e0a
68
+ docile (1.4.1) sha256=96159be799bfa73cdb721b840e9802126e4e03dfc26863db73647204c727f21e
69
+ drb (2.2.3) sha256=0b00d6fdb50995fe4a45dea13663493c841112e4068656854646f418fda13373
70
+ ffi (1.17.4) sha256=bcd1642e06f0d16fc9e09ac6d49c3a7298b9789bcb58127302f934e437d60acf
71
+ io-console (0.8.2) sha256=d6e3ae7a7cc7574f4b8893b4fca2162e57a825b223a177b7afa236c5ef9814cc
72
+ json (2.19.3) sha256=289b0bb53052a1fa8c34ab33cc750b659ba14a5c45f3fcf4b18762dc67c78646
73
+ karafka-core (2.5.12) sha256=57cbb45a187fbe3df9b9a57af59dda7211f9969524b2afbb83792a64705860e1
74
+ karafka-rdkafka (0.25.0) sha256=67b316b942cf9ff7e9d7bbf9029e6f2d91eba97b4c9dc93b9f49fd207dfb80f8
75
+ logger (1.7.0) sha256=196edec7cc44b66cfb40f9755ce11b392f21f7967696af15d274dde7edff0203
76
+ mini_portile2 (2.8.9) sha256=0cd7c7f824e010c072e33f68bc02d85a00aeb6fce05bb4819c03dfd3c140c289
77
+ minitest (6.0.2) sha256=db6e57956f6ecc6134683b4c87467d6dd792323c7f0eea7b93f66bd284adbc3d
78
+ mocha (3.1.0) sha256=75f42d69ebfb1f10b32489dff8f8431d37a418120ecdfc07afe3bc183d4e1d56
79
+ ostruct (0.6.3) sha256=95a2ed4a4bd1d190784e666b47b2d3f078e4a9efda2fccf18f84ddc6538ed912
80
+ prism (1.9.0) sha256=7b530c6a9f92c24300014919c9dcbc055bf4cdf51ec30aed099b06cd6674ef85
81
+ rake (13.3.1) sha256=8c9e89d09f66a26a01264e7e3480ec0607f0c497a861ef16063604b1b08eb19c
82
+ reline (0.6.3) sha256=1198b04973565b36ec0f11542ab3f5cfeeec34823f4e54cebde90968092b1835
83
+ ruby2_keywords (0.0.5) sha256=ffd13740c573b7301cf7a2e61fc857b2a8e3d3aff32545d6f8300d8bae10e3ef
84
+ simplecov (0.22.0) sha256=fe2622c7834ff23b98066bb0a854284b2729a569ac659f82621fc22ef36213a5
85
+ simplecov-html (0.13.2) sha256=bd0b8e54e7c2d7685927e8d6286466359b6f16b18cb0df47b508e8d73c777246
86
+ simplecov_json_formatter (0.1.4) sha256=529418fbe8de1713ac2b2d612aa3daa56d316975d307244399fa4838c601b428
87
+ warning (1.5.0) sha256=0f12c49fea0c06757778eefdcc7771e4fd99308901e3d55c504d87afdd718c53
88
+ waterdrop (2.9.0)
89
+ zeitwerk (2.7.5) sha256=d8da92128c09ea6ec62c949011b00ed4a20242b255293dd66bf41545398f73dd
90
+
115
91
  BUNDLED WITH
116
- 2.7.0
92
+ 4.0.6
data/README.md CHANGED
@@ -16,7 +16,7 @@ It:
16
16
  - Supports multiple delivery policies
17
17
  - Supports per-topic configuration alterations (variants)
18
18
  - Works with [async](https://github.com/socketry/async) gems ecosystem
19
- - Works with Kafka `1.0+` and Ruby `3.1+`
19
+ - Works with Kafka `1.0+` and Ruby `3.2+`
20
20
  - Works with and without Karafka
21
21
 
22
22
  ## Documentation
data/Rakefile CHANGED
@@ -1,4 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'bundler/setup'
4
- require 'bundler/gem_tasks'
3
+ require "bundler/setup"
4
+ require "bundler/gem_tasks"
5
+ require "minitest/test_task"
6
+
7
+ Minitest::TestTask.create(:test) do |t|
8
+ t.libs << "test"
9
+ t.libs << "lib"
10
+ t.test_globs = ["test/**/*_test.rb"]
11
+ # Load test_helper before minitest/autorun so SimpleCov's at_exit hook is
12
+ # registered first and thus runs last (LIFO), collecting coverage AFTER tests
13
+ t.test_prelude = 'require "test_helper"; require "minitest/autorun"'
14
+ end
15
+
16
+ task default: :test
data/bin/integrations CHANGED
@@ -4,14 +4,14 @@
4
4
 
5
5
  # All integration specs run with their own bundler context to avoid dependency conflicts.
6
6
  # All WaterDrop integration specs are pristine by default since they use isolated Gemfiles.
7
- raise 'This code needs to be executed WITHOUT bundle exec' if Kernel.const_defined?(:Bundler)
7
+ raise "This code needs to be executed WITHOUT bundle exec" if Kernel.const_defined?(:Bundler)
8
8
 
9
- require 'open3'
10
- require 'fileutils'
11
- require 'pathname'
12
- require 'tmpdir'
9
+ require "open3"
10
+ require "fileutils"
11
+ require "pathname"
12
+ require "tmpdir"
13
13
 
14
- ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
14
+ ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), "../")))
15
15
 
16
16
  # How may bytes do we want to keep from the stdout in the buffer for when we need to print it
17
17
  MAX_BUFFER_OUTPUT = 307_200
@@ -36,9 +36,9 @@ class Scenario
36
36
  def initialize(path)
37
37
  @path = path
38
38
  # First 1024 characters from stdout
39
- @stdout_head = ''
39
+ @stdout_head = ""
40
40
  # Last 1024 characters from stdout
41
- @stdout_tail = ''
41
+ @stdout_tail = ""
42
42
  end
43
43
 
44
44
  # Starts running given scenario in a separate process
@@ -49,16 +49,15 @@ class Scenario
49
49
 
50
50
  # @return [String] integration spec name
51
51
  def name
52
- @path.gsub("#{ROOT_PATH}/spec/integrations/", '')
52
+ @path.gsub("#{ROOT_PATH}/test/integrations/", "")
53
53
  end
54
54
 
55
-
56
55
  # @return [Boolean] did this scenario finished or is it still running
57
56
  def finished?
58
57
  # If the thread is running too long, kill it
59
58
  if current_time - @started_at > MAX_RUN_TIME
60
59
  begin
61
- Process.kill('TERM', pid)
60
+ Process.kill("TERM", pid)
62
61
  # It may finish right after we want to kill it, that's why we ignore this
63
62
  rescue Errno::ESRCH
64
63
  end
@@ -66,7 +65,7 @@ class Scenario
66
65
 
67
66
  # We read it so it won't grow as we use our default logger that prints to both test.log and
68
67
  # to stdout. Otherwise after reaching the buffer size, it would hang
69
- buffer = ''
68
+ buffer = ""
70
69
  @stdout.read_nonblock(MAX_BUFFER_OUTPUT, buffer, exception: false)
71
70
  @stdout_head = buffer if @stdout_head.empty?
72
71
  @stdout_tail << buffer
@@ -101,19 +100,19 @@ class Scenario
101
100
  # Prints a status report when scenario is finished and stdout if it failed
102
101
  def report
103
102
  if success?
104
- print "\e[#{32}m#{'.'}\e[0m"
103
+ print "\e[32m.\e[0m"
105
104
  else
106
- buffer = ''
105
+ buffer = ""
107
106
 
108
107
  @stderr.read_nonblock(MAX_BUFFER_OUTPUT, buffer, exception: false)
109
108
 
110
109
  puts
111
- puts "\e[#{31}m#{'[FAILED]'}\e[0m #{name}"
110
+ puts "\e[31m[FAILED]\e[0m #{name}"
112
111
  puts "Time taken: #{current_time - @started_at} seconds"
113
112
  puts "Exit code: #{exit_code}"
114
113
  puts "Exit status: #{exit_status}"
115
114
  puts @stdout_head
116
- puts '...'
115
+ puts "..."
117
116
  puts @stdout_tail
118
117
  puts buffer
119
118
  puts
@@ -143,6 +142,7 @@ class Scenario
143
142
  # We copy the spec into a temp dir, not to pollute the spec location with logs, etc
144
143
  temp_dir = Dir.mktmpdir
145
144
  file_name = File.basename(@path)
145
+ spec_file_path = @path.sub("#{ROOT_PATH}/", "")
146
146
 
147
147
  FileUtils.cp_r("#{scenario_dir}/.", temp_dir)
148
148
 
@@ -152,7 +152,8 @@ class Scenario
152
152
  bundle install &&
153
153
  BUNDLE_AUTO_INSTALL=true \
154
154
  WATERDROP_GEM_DIR=#{ROOT_PATH} \
155
- bundle exec ruby #{file_name}
155
+ SPEC_FILE_PATH=#{spec_file_path} \
156
+ bundle exec ruby -r "#{ROOT_PATH}/test/support/spec_hash" #{file_name}
156
157
  CMD
157
158
  end
158
159
 
@@ -163,38 +164,38 @@ class Scenario
163
164
  end
164
165
 
165
166
  # Load all the specs
166
- specs = Dir[ROOT_PATH.join('spec/integrations/**/*_spec.rb')]
167
+ specs = Dir[ROOT_PATH.join("test/integrations/**/*_spec.rb")]
167
168
 
168
- FILTER_TYPE = ARGV[0] == '--exclude' ? 'exclude' : 'include'
169
+ FILTER_TYPE = (ARGV[0] == "--exclude") ? "exclude" : "include"
169
170
 
170
171
  # Remove the exclude flag
171
- ARGV.shift if FILTER_TYPE == '--exclude'
172
+ ARGV.shift if FILTER_TYPE == "--exclude"
172
173
 
173
174
  # If filters is provided, apply
174
175
  # Allows to provide several filters one after another and applies all of them
175
176
  ARGV.each do |filter|
176
177
  specs.delete_if do |name|
177
178
  case FILTER_TYPE
178
- when 'include'
179
+ when "include"
179
180
  !name.include?(filter)
180
- when 'exclude'
181
+ when "exclude"
181
182
  name.include?(filter)
182
183
  else
183
- raise 'Invalid filter type'
184
+ raise "Invalid filter type"
184
185
  end
185
186
  end
186
187
  end
187
188
 
188
189
  # Randomize order
189
- seed = (ENV['SPECS_SEED'] || rand(0..10_000)).to_i
190
+ seed = (ENV["SPECS_SEED"] || rand(0..10_000)).to_i
190
191
 
191
192
  puts "Random seed: #{seed}"
192
193
 
193
194
  scenarios = specs
194
- .shuffle(random: Random.new(seed))
195
- .map { |integration| Scenario.new(integration) }
195
+ .shuffle(random: Random.new(seed))
196
+ .map { |integration| Scenario.new(integration) }
196
197
 
197
- raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if scenarios.empty?
198
+ raise ArgumentError, "No integration specs with filters: #{ARGV.join(", ")}" if scenarios.empty?
198
199
 
199
200
  puts "Running #{scenarios.size} scenarios"
200
201
 
@@ -217,8 +218,8 @@ end
217
218
  puts
218
219
  puts "\nLongest scenarios:\n\n"
219
220
 
220
- finished_scenarios.sort_by(&:time_taken).reverse.first(10).each do |long_scenario|
221
- puts "[#{'%6.2f' % long_scenario.time_taken}] #{long_scenario.name}"
221
+ finished_scenarios.sort_by(&:time_taken).last(10).reverse_each do |long_scenario|
222
+ puts "[#{"%6.2f" % long_scenario.time_taken}] #{long_scenario.name}"
222
223
  end
223
224
 
224
225
  failed_scenarios = finished_scenarios.reject(&:success?)
@@ -232,7 +233,7 @@ else
232
233
  puts "\nFailed scenarios:\n\n"
233
234
 
234
235
  failed_scenarios.each do |scenario|
235
- puts "\e[#{31}m#{'[FAILED]'}\e[0m #{scenario.name}"
236
+ puts "\e[31m[FAILED]\e[0m #{scenario.name}"
236
237
  end
237
238
 
238
239
  puts
@@ -8,18 +8,18 @@
8
8
  #
9
9
  # It also ensures we have one convention that we can follow.
10
10
 
11
- require 'bundler'
11
+ require "bundler"
12
12
  Bundler.setup(:default, :test, :integrations)
13
- require 'rdkafka'
13
+ require "rdkafka"
14
14
 
15
- admin = Rdkafka::Config.new('bootstrap.servers': 'localhost:9092').admin
15
+ admin = Rdkafka::Config.new("bootstrap.servers": "localhost:9092").admin
16
16
 
17
17
  invalid = admin
18
- .metadata
19
- .topics
20
- .map { |topic| topic[:topic_name] }
21
- .select { |topic| !topic.start_with?('it-') }
22
- .select { |topic| topic.length <= 6 }
18
+ .metadata
19
+ .topics
20
+ .map { |topic| topic[:topic_name] }
21
+ .select { |topic| !topic.start_with?("it-") }
22
+ .select { |topic| topic.length <= 6 }
23
23
 
24
24
  admin.close
25
25
 
@@ -26,6 +26,19 @@ en:
26
26
  non_reloadable_errors_format: must be an array of symbols
27
27
  oauth.token_provider_listener_format: 'must be false or respond to #on_oauthbearer_token_refresh'
28
28
  idle_disconnect_timeout_format: 'must be an integer that is equal to 0 or bigger than 30 000 (30 seconds)'
29
+ statistics_decorator_format: 'must respond to #call'
30
+ polling.mode_format: must be :thread or :fd
31
+ polling.poller_format: must be nil or a WaterDrop::Polling::Poller instance
32
+ polling.poller_poller_only_with_fd_mode: can only be set when polling.mode is :fd
33
+ polling.fd.max_time_format: must be an integer that is equal or bigger than 1
34
+ polling.fd.periodic_poll_interval_format: must be an integer that is equal or bigger than 1
35
+
36
+ poller:
37
+ thread_priority_format: must be an integer between -3 and 3
38
+ poll_timeout_format: must be an integer that is equal or bigger than 1
39
+ backoff_min_format: must be an integer that is equal or bigger than 1
40
+ backoff_max_format: must be an integer that is equal or bigger than 1
41
+ backoff_max_must_be_gte_backoff_min: backoff_max must be greater than or equal to backoff_min
29
42
 
30
43
  variant:
31
44
  missing: must be present
@@ -0,0 +1,56 @@
1
+ services:
2
+ keycloak:
3
+ image: quay.io/keycloak/keycloak:26.5
4
+ container_name: keycloak
5
+ command: start-dev --import-realm
6
+ ports:
7
+ - "8080:8080"
8
+ environment:
9
+ KEYCLOAK_ADMIN: admin
10
+ KEYCLOAK_ADMIN_PASSWORD: admin
11
+ volumes:
12
+ - ./test/integrations/oauth_token_refresh/keycloak-realm.json:/opt/keycloak/data/import/realm.json
13
+ healthcheck:
14
+ test: ["CMD-SHELL", "exec 3<>/dev/tcp/localhost/8080 && echo -e 'GET /realms/kafka HTTP/1.1\\r\\nHost: localhost\\r\\n\\r\\n' >&3 && timeout 1 cat <&3 | grep -q '200 OK'"]
15
+ interval: 10s
16
+ timeout: 10s
17
+ retries: 30
18
+ start_period: 90s
19
+
20
+ kafka-oauth:
21
+ image: confluentinc/cp-kafka:8.2.0
22
+ container_name: kafka-oauth
23
+ depends_on:
24
+ keycloak:
25
+ condition: service_healthy
26
+ ports:
27
+ - "9094:9094"
28
+ environment:
29
+ CLUSTER_ID: kafka-oauth-cluster-1
30
+ KAFKA_BROKER_ID: 1
31
+ KAFKA_PROCESS_ROLES: broker,controller
32
+ KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka-oauth:9093
33
+ KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
34
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
35
+ KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093,SASL_PLAINTEXT://:9094
36
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-oauth:9092,SASL_PLAINTEXT://127.0.0.1:9094
37
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT
38
+ KAFKA_SASL_ENABLED_MECHANISMS: OAUTHBEARER
39
+ KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: PLAINTEXT
40
+ # Per-listener callback handler
41
+ KAFKA_LISTENER_NAME_SASL__PLAINTEXT_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS: org.apache.kafka.common.security.oauthbearer.OAuthBearerValidatorCallbackHandler
42
+ KAFKA_LISTENER_NAME_SASL__PLAINTEXT_OAUTHBEARER_SASL_JAAS_CONFIG: |
43
+ org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;
44
+ # Global OAUTHBEARER settings for token validation
45
+ KAFKA_SASL_OAUTHBEARER_JWKS_ENDPOINT_URL: http://keycloak:8080/realms/kafka/protocol/openid-connect/certs
46
+ KAFKA_SASL_OAUTHBEARER_EXPECTED_AUDIENCE: kafka
47
+ # Note: Not setting expected issuer - Keycloak uses request URL which varies
48
+ KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
49
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
50
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
51
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
52
+ # Required by Confluent image when SASL is enabled
53
+ # Also allow HTTP URLs for JWKS endpoint (Kafka 3.0+ blocks non-HTTPS by default)
54
+ KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/kafka_server_jaas.conf -Dorg.apache.kafka.sasl.oauthbearer.allowed.urls=http://keycloak:8080/realms/kafka/protocol/openid-connect/certs"
55
+ volumes:
56
+ - ./test/integrations/oauth_token_refresh/kafka_server_jaas.conf:/etc/kafka/kafka_server_jaas.conf:ro
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:8.0.0
4
+ image: confluentinc/cp-kafka:8.2.0
5
5
 
6
6
  ports:
7
7
  - 9092:9092
@@ -57,6 +57,15 @@ module WaterDrop
57
57
  Handle.new(topic.to_s, partition, @counters["#{topic}#{partition}"] += 1)
58
58
  end
59
59
 
60
+ # Returns 0 as dummy client doesn't queue any real messages
61
+ #
62
+ # @return [Integer] always 0
63
+ def queue_size
64
+ 0
65
+ end
66
+
67
+ alias_method :queue_length, :queue_size
68
+
60
69
  # @param _args [Object] anything really, this dummy is suppose to support anything
61
70
  def respond_to_missing?(*_args)
62
71
  true
@@ -14,12 +14,23 @@ module WaterDrop
14
14
  kafka_config = producer.config.kafka.to_h
15
15
  monitor = producer.config.monitor
16
16
 
17
- client = ::Rdkafka::Config.new(kafka_config).producer(native_kafka_auto_start: false)
17
+ # When FD polling is enabled, we disable the native librdkafka polling thread
18
+ # and use our own Ruby-based poller instead
19
+ producer_options = { native_kafka_auto_start: false }
20
+
21
+ producer_options[:run_polling_thread] = false if producer.fd_polling?
22
+
23
+ client = ::Rdkafka::Config.new(kafka_config).producer(**producer_options)
18
24
 
19
25
  # Register statistics runner for this particular type of callbacks
20
26
  ::Karafka::Core::Instrumentation.statistics_callbacks.add(
21
27
  producer.id,
22
- Instrumentation::Callbacks::Statistics.new(producer.id, client.name, monitor)
28
+ Instrumentation::Callbacks::Statistics.new(
29
+ producer.id,
30
+ client.name,
31
+ monitor,
32
+ producer.config.statistics_decorator
33
+ )
23
34
  )
24
35
 
25
36
  # Register error tracking callback
@@ -53,8 +64,13 @@ module WaterDrop
53
64
 
54
65
  client.start
55
66
 
67
+ # Register with poller if FD polling is enabled
68
+ # Uses the producer's configured poller (custom or global singleton)
69
+ # This must happen after client.start to ensure the client is ready
70
+ producer.poller.register(producer, client) if producer.fd_polling?
71
+
56
72
  # Switch to the transactional mode if user provided the transactional id
57
- client.init_transactions if kafka_config.key?(:'transactional.id')
73
+ client.init_transactions if kafka_config.key?(:"transactional.id")
58
74
 
59
75
  client
60
76
  end
@@ -9,18 +9,18 @@ module WaterDrop
9
9
 
10
10
  # Defaults for kafka settings, that will be overwritten only if not present already
11
11
  KAFKA_DEFAULTS = {
12
- 'client.id': 'waterdrop',
12
+ "client.id": "waterdrop",
13
13
  # emit librdkafka statistics every five seconds. This is used in instrumentation.
14
14
  # When disabled, part of metrics will not be published and available.
15
- 'statistics.interval.ms': 5_000,
15
+ "statistics.interval.ms": 5_000,
16
16
  # We set it to a value that is lower than `max_wait_timeout` to have a final verdict upon
17
17
  # sync delivery
18
- 'message.timeout.ms': 50_000,
18
+ "message.timeout.ms": 150_000,
19
19
  # Must be more or equal to `message.timeout.ms` defaults
20
- 'transaction.timeout.ms': 55_000,
20
+ "transaction.timeout.ms": 165_000,
21
21
  # Lowers latency. Default in newer librdkafka but we want to make sure it is shipped to
22
22
  # users despite what librdkafka they run on
23
- 'socket.nagle.disable': true
23
+ "socket.nagle.disable": true
24
24
  }.freeze
25
25
 
26
26
  private_constant :KAFKA_DEFAULTS
@@ -53,7 +53,7 @@ module WaterDrop
53
53
  setting :max_payload_size, default: 1_000_012
54
54
  # option [Integer] Wait that long for the delivery report or raise an error if this takes
55
55
  # longer than the timeout ms.
56
- setting :max_wait_timeout, default: 60_000
56
+ setting :max_wait_timeout, default: 180_000
57
57
  # option [Boolean] should we upon detecting full librdkafka queue backoff and retry or should
58
58
  # we raise an exception.
59
59
  # When this is set to `true`, upon full queue, we won't raise an error. There will be error
@@ -116,6 +116,23 @@ module WaterDrop
116
116
  # prevent overly aggressive disconnections.
117
117
  setting :idle_disconnect_timeout, default: 0
118
118
 
119
+ # option [Karafka::Core::Monitoring::StatisticsDecorator] decorator instance used to compute
120
+ # deltas (_d) and freeze durations (_fd) on raw librdkafka statistics. The default is
121
+ # pre-configured with `only_keys` covering keys used by the built-in Datadog metrics
122
+ # listener and `excluded_keys` skipping subtrees not needed by producers (topics, broker
123
+ # window stats). Users who need additional decorated keys or full decoration can provide
124
+ # a custom decorator instance.
125
+ setting(
126
+ :statistics_decorator,
127
+ default: false,
128
+ constructor: lambda { |decorator|
129
+ decorator || ::Karafka::Core::Monitoring::StatisticsDecorator.new(
130
+ only_keys: %w[tx txretries txerrs rxerrs],
131
+ excluded_keys: %w[int_latency outbuf_latency rtt throttle req toppars topics]
132
+ )
133
+ }
134
+ )
135
+
119
136
  # option [Boolean] should we send messages. Setting this to false can be really useful when
120
137
  # testing and or developing because when set to false, won't actually ping Kafka but will
121
138
  # run all the validations, etc
@@ -140,6 +157,33 @@ module WaterDrop
140
157
  setting :token_provider_listener, default: false
141
158
  end
142
159
 
160
+ # Namespace for polling configuration
161
+ setting :polling do
162
+ # option [Symbol] Polling mode for handling producer callbacks
163
+ # :fd - uses a single global Ruby thread with IO.select-based multiplexing (default)
164
+ # :thread - uses librdkafka's native background polling threads
165
+ setting :mode, default: :fd
166
+
167
+ # option [WaterDrop::Polling::Poller, nil] Custom poller instance for isolation
168
+ # When nil (default), uses the global singleton poller shared by all FD-mode producers
169
+ # When set to a Poller instance, uses that dedicated poller for this producer only
170
+ # This allows isolating producers from each other's callback processing
171
+ # Only valid when mode is :fd
172
+ setting :poller, default: nil
173
+
174
+ # Namespace for FD-based polling configuration (only used when mode is :fd)
175
+ setting :fd do
176
+ # option [Integer] Max milliseconds to poll a single producer before rotating to the next
177
+ # This prevents any single producer from monopolizing the polling thread
178
+ setting :max_time, default: 100
179
+
180
+ # option [Integer] Interval in milliseconds for periodic polling of idle producers
181
+ # When one producer is busy, other producers still need to be polled periodically
182
+ # to ensure OAuth token refresh and statistics callbacks fire on time
183
+ setting :periodic_poll_interval, default: 1_000
184
+ end
185
+ end
186
+
143
187
  # Configuration method
144
188
  # @yield Runs a block of code providing a config singleton instance to it
145
189
  # @yieldparam [WaterDrop::Config] WaterDrop config instance