karafka 2.5.0 → 2.5.1.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/{ci.yml → ci_linux_ubuntu_x86_64_gnu.yml} +54 -30
  3. data/.github/workflows/ci_macos_arm64.yml +148 -0
  4. data/.github/workflows/push.yml +2 -2
  5. data/.github/workflows/trigger-wiki-refresh.yml +30 -0
  6. data/.github/workflows/verify-action-pins.yml +1 -1
  7. data/.ruby-version +1 -1
  8. data/CHANGELOG.md +28 -1
  9. data/Gemfile +2 -1
  10. data/Gemfile.lock +55 -26
  11. data/README.md +2 -2
  12. data/bin/integrations +3 -1
  13. data/bin/verify_kafka_warnings +2 -1
  14. data/config/locales/errors.yml +153 -152
  15. data/config/locales/pro_errors.yml +135 -134
  16. data/karafka.gemspec +3 -3
  17. data/lib/active_job/queue_adapters/karafka_adapter.rb +30 -1
  18. data/lib/karafka/active_job/dispatcher.rb +19 -9
  19. data/lib/karafka/admin/acl.rb +7 -8
  20. data/lib/karafka/admin/configs/config.rb +2 -2
  21. data/lib/karafka/admin/configs/resource.rb +2 -2
  22. data/lib/karafka/admin/configs.rb +3 -7
  23. data/lib/karafka/admin/consumer_groups.rb +351 -0
  24. data/lib/karafka/admin/topics.rb +206 -0
  25. data/lib/karafka/admin.rb +42 -451
  26. data/lib/karafka/base_consumer.rb +22 -0
  27. data/lib/karafka/{pro/contracts/server_cli_options.rb → cli/contracts/server.rb} +4 -12
  28. data/lib/karafka/cli/info.rb +1 -1
  29. data/lib/karafka/cli/install.rb +0 -2
  30. data/lib/karafka/connection/client.rb +8 -0
  31. data/lib/karafka/connection/listener.rb +5 -1
  32. data/lib/karafka/connection/status.rb +12 -9
  33. data/lib/karafka/errors.rb +0 -8
  34. data/lib/karafka/instrumentation/assignments_tracker.rb +16 -0
  35. data/lib/karafka/instrumentation/logger_listener.rb +109 -50
  36. data/lib/karafka/pro/active_job/dispatcher.rb +5 -0
  37. data/lib/karafka/pro/cleaner/messages/messages.rb +18 -8
  38. data/lib/karafka/pro/cli/contracts/server.rb +106 -0
  39. data/lib/karafka/pro/encryption/contracts/config.rb +1 -1
  40. data/lib/karafka/pro/loader.rb +1 -1
  41. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +1 -1
  42. data/lib/karafka/pro/routing/features/adaptive_iterator/contracts/topic.rb +1 -1
  43. data/lib/karafka/pro/routing/features/adaptive_iterator/topic.rb +9 -0
  44. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +1 -1
  45. data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +9 -0
  46. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +1 -1
  47. data/lib/karafka/pro/routing/features/delaying/topic.rb +9 -0
  48. data/lib/karafka/pro/routing/features/direct_assignments/contracts/consumer_group.rb +1 -1
  49. data/lib/karafka/pro/routing/features/direct_assignments/contracts/topic.rb +1 -1
  50. data/lib/karafka/pro/routing/features/direct_assignments/topic.rb +9 -0
  51. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +1 -1
  52. data/lib/karafka/pro/routing/features/expiring/topic.rb +9 -0
  53. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +1 -1
  54. data/lib/karafka/pro/routing/features/filtering/topic.rb +9 -0
  55. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +1 -1
  56. data/lib/karafka/pro/routing/features/inline_insights/topic.rb +9 -0
  57. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +1 -1
  58. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +9 -0
  59. data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +1 -1
  60. data/lib/karafka/pro/routing/features/multiplexing.rb +1 -1
  61. data/lib/karafka/pro/routing/features/offset_metadata/contracts/topic.rb +1 -1
  62. data/lib/karafka/pro/routing/features/offset_metadata/topic.rb +9 -0
  63. data/lib/karafka/pro/routing/features/parallel_segments/contracts/consumer_group.rb +1 -1
  64. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +1 -1
  65. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +1 -1
  66. data/lib/karafka/pro/routing/features/patterns/topic.rb +9 -0
  67. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +1 -1
  68. data/lib/karafka/pro/routing/features/periodic_job/contracts/topic.rb +1 -1
  69. data/lib/karafka/pro/routing/features/periodic_job/topic.rb +9 -0
  70. data/lib/karafka/pro/routing/features/recurring_tasks/contracts/topic.rb +1 -1
  71. data/lib/karafka/pro/routing/features/recurring_tasks/topic.rb +9 -0
  72. data/lib/karafka/pro/routing/features/scheduled_messages/contracts/topic.rb +1 -1
  73. data/lib/karafka/pro/routing/features/scheduled_messages/topic.rb +9 -0
  74. data/lib/karafka/pro/routing/features/swarm/contracts/topic.rb +1 -1
  75. data/lib/karafka/pro/routing/features/swarm/topic.rb +9 -0
  76. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +1 -1
  77. data/lib/karafka/pro/routing/features/throttling/topic.rb +9 -0
  78. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +1 -1
  79. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +9 -0
  80. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +1 -1
  81. data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +9 -3
  82. data/lib/karafka/pro/swarm/liveness_listener.rb +17 -2
  83. data/lib/karafka/processing/executor.rb +1 -1
  84. data/lib/karafka/routing/builder.rb +0 -3
  85. data/lib/karafka/routing/consumer_group.rb +1 -4
  86. data/lib/karafka/routing/contracts/consumer_group.rb +84 -0
  87. data/lib/karafka/routing/contracts/routing.rb +61 -0
  88. data/lib/karafka/routing/contracts/topic.rb +83 -0
  89. data/lib/karafka/routing/features/active_job/contracts/topic.rb +1 -1
  90. data/lib/karafka/routing/features/active_job/topic.rb +9 -0
  91. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -1
  92. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +9 -0
  93. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +1 -1
  94. data/lib/karafka/routing/features/declaratives/topic.rb +9 -0
  95. data/lib/karafka/routing/features/deserializers/contracts/topic.rb +1 -1
  96. data/lib/karafka/routing/features/deserializers/topic.rb +9 -0
  97. data/lib/karafka/routing/features/eofed/contracts/topic.rb +1 -1
  98. data/lib/karafka/routing/features/eofed/topic.rb +9 -0
  99. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +1 -1
  100. data/lib/karafka/routing/features/inline_insights/topic.rb +9 -0
  101. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +1 -1
  102. data/lib/karafka/routing/features/manual_offset_management/topic.rb +9 -0
  103. data/lib/karafka/routing/subscription_group.rb +1 -10
  104. data/lib/karafka/routing/topic.rb +9 -1
  105. data/lib/karafka/server.rb +2 -7
  106. data/lib/karafka/setup/attributes_map.rb +36 -0
  107. data/lib/karafka/setup/config.rb +6 -7
  108. data/lib/karafka/setup/contracts/config.rb +217 -0
  109. data/lib/karafka/setup/defaults_injector.rb +3 -1
  110. data/lib/karafka/swarm/node.rb +66 -6
  111. data/lib/karafka/swarm.rb +2 -2
  112. data/lib/karafka/templates/karafka.rb.erb +2 -7
  113. data/lib/karafka/version.rb +1 -1
  114. data/lib/karafka.rb +17 -18
  115. metadata +18 -15
  116. data/lib/karafka/contracts/config.rb +0 -210
  117. data/lib/karafka/contracts/consumer_group.rb +0 -81
  118. data/lib/karafka/contracts/routing.rb +0 -59
  119. data/lib/karafka/contracts/server_cli_options.rb +0 -92
  120. data/lib/karafka/contracts/topic.rb +0 -81
  121. data/lib/karafka/swarm/pidfd.rb +0 -147
@@ -0,0 +1,83 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Contracts
6
+ # Consumer group topic validation rules.
7
+ class Topic < Karafka::Contracts::Base
8
+ configure do |config|
9
+ config.error_messages = YAML.safe_load(
10
+ File.read(
11
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
12
+ )
13
+ ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
14
+ end
15
+
16
+ required(:deserializers) { |val| !val.nil? }
17
+ required(:id) { |val| val.is_a?(String) && Karafka::Contracts::TOPIC_REGEXP.match?(val) }
18
+ required(:kafka) { |val| val.is_a?(Hash) && !val.empty? }
19
+ required(:max_messages) { |val| val.is_a?(Integer) && val >= 1 }
20
+ required(:initial_offset) { |val| %w[earliest latest].include?(val) }
21
+ required(:max_wait_time) { |val| val.is_a?(Integer) && val >= 10 }
22
+ required(:name) { |val| val.is_a?(String) && Karafka::Contracts::TOPIC_REGEXP.match?(val) }
23
+ required(:active) { |val| [true, false].include?(val) }
24
+ nested(:subscription_group_details) do
25
+ required(:name) { |val| val.is_a?(String) && !val.empty? }
26
+ end
27
+
28
+ # Consumer needs to be present only if topic is active
29
+ # We allow not to define consumer for non-active because they may be only used via admin
30
+ # api or other ways and not consumed with consumer
31
+ virtual do |data, errors|
32
+ next unless errors.empty?
33
+ next if data.fetch(:consumer)
34
+ next unless data.fetch(:active)
35
+
36
+ [[%w[consumer], :missing]]
37
+ end
38
+
39
+ virtual do |data, errors|
40
+ next unless errors.empty?
41
+
42
+ value = data.fetch(:kafka)
43
+
44
+ begin
45
+ # This will trigger rdkafka validations that we catch and re-map the info and use dry
46
+ # compatible format
47
+ Rdkafka::Config.new(value).send(:native_config)
48
+
49
+ nil
50
+ rescue Rdkafka::Config::ConfigError => e
51
+ [[%w[kafka], e.message]]
52
+ end
53
+ end
54
+
55
+ # When users redefine kafka scope settings per topic, they often forget to define the
56
+ # basic stuff as they assume it is auto-inherited. It is not (unless inherit flag used),
57
+ # leaving them with things like bootstrap.servers undefined. This checks that bootstrap
58
+ # servers are defined so we can catch those issues before they cause more problems.
59
+ virtual do |data, errors|
60
+ next unless errors.empty?
61
+
62
+ kafka = data.fetch(:kafka)
63
+
64
+ next if kafka.key?(:'bootstrap.servers')
65
+
66
+ [[%w[kafka bootstrap.servers], :missing]]
67
+ end
68
+
69
+ virtual do |data, errors|
70
+ next unless errors.empty?
71
+ next unless ::Karafka::App.config.strict_topics_namespacing
72
+
73
+ value = data.fetch(:name)
74
+ namespacing_chars_count = value.chars.find_all { |c| ['.', '_'].include?(c) }.uniq.size
75
+
76
+ next if namespacing_chars_count <= 1
77
+
78
+ [[%w[name], :inconsistent_namespacing]]
79
+ end
80
+ end
81
+ end
82
+ end
83
+ end
@@ -14,7 +14,7 @@ module Karafka
14
14
  File.read(
15
15
  File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
16
  )
17
- ).fetch('en').fetch('validations').fetch('topic')
17
+ ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
18
18
  end
19
19
 
20
20
  virtual do |data, errors|
@@ -6,6 +6,15 @@ module Karafka
6
6
  class ActiveJob < Base
7
7
  # Topic extensions to be able to check if given topic is ActiveJob topic
8
8
  module Topic
9
+ # This method calls the parent class initializer and then sets up the
10
+ # extra instance variable to nil. The explicit initialization
11
+ # to nil is included as an optimization for Ruby's object shapes system,
12
+ # which improves memory layout and access performance.
13
+ def initialize(...)
14
+ super
15
+ @active_job = nil
16
+ end
17
+
9
18
  # @param active [Boolean] should this topic be considered one working with ActiveJob
10
19
  #
11
20
  # @note Since this feature supports only one setting (active), we can use the old API
@@ -13,7 +13,7 @@ module Karafka
13
13
  File.read(
14
14
  File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
15
  )
16
- ).fetch('en').fetch('validations').fetch('topic')
16
+ ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
17
  end
18
18
 
19
19
  nested :dead_letter_queue do
@@ -11,6 +11,15 @@ module Karafka
11
11
 
12
12
  private_constant :DEFAULT_MAX_RETRIES
13
13
 
14
+ # This method calls the parent class initializer and then sets up the
15
+ # extra instance variable to nil. The explicit initialization
16
+ # to nil is included as an optimization for Ruby's object shapes system,
17
+ # which improves memory layout and access performance.
18
+ def initialize(...)
19
+ super
20
+ @dead_letter_queue = nil
21
+ end
22
+
14
23
  # @param max_retries [Integer] after how many retries should we move data to dlq
15
24
  # @param topic [String, false] where the messages should be moved if failing or false
16
25
  # if we do not want to move it anywhere and just skip
@@ -13,7 +13,7 @@ module Karafka
13
13
  File.read(
14
14
  File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
15
  )
16
- ).fetch('en').fetch('validations').fetch('topic')
16
+ ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
17
  end
18
18
 
19
19
  nested :declaratives do
@@ -6,6 +6,15 @@ module Karafka
6
6
  class Declaratives < Base
7
7
  # Extension for managing Kafka topic configuration
8
8
  module Topic
9
+ # This method calls the parent class initializer and then sets up the
10
+ # extra instance variable to nil. The explicit initialization
11
+ # to nil is included as an optimization for Ruby's object shapes system,
12
+ # which improves memory layout and access performance.
13
+ def initialize(...)
14
+ super
15
+ @declaratives = nil
16
+ end
17
+
9
18
  # @param active [Boolean] is the topic structure management feature active
10
19
  # @param partitions [Integer]
11
20
  # @param replication_factor [Integer]
@@ -13,7 +13,7 @@ module Karafka
13
13
  File.read(
14
14
  File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
15
  )
16
- ).fetch('en').fetch('validations').fetch('topic')
16
+ ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
17
  end
18
18
 
19
19
  nested :deserializers do
@@ -8,6 +8,15 @@ module Karafka
8
8
  # Routing topic deserializers API. It allows to configure deserializers for various
9
9
  # components of each message.
10
10
  module Topic
11
+ # This method calls the parent class initializer and then sets up the
12
+ # extra instance variable to nil. The explicit initialization
13
+ # to nil is included as an optimization for Ruby's object shapes system,
14
+ # which improves memory layout and access performance.
15
+ def initialize(...)
16
+ super
17
+ @deserializers = nil
18
+ end
19
+
11
20
  # Allows for setting all the deserializers with standard defaults
12
21
  # @param payload [Object] Deserializer for the message payload
13
22
  # @param key [Object] deserializer for the message key
@@ -13,7 +13,7 @@ module Karafka
13
13
  File.read(
14
14
  File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
15
  )
16
- ).fetch('en').fetch('validations').fetch('topic')
16
+ ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
17
  end
18
18
 
19
19
  nested :eofed do
@@ -6,6 +6,15 @@ module Karafka
6
6
  class Eofed < Base
7
7
  # Routing topic eofed API
8
8
  module Topic
9
+ # This method calls the parent class initializer and then sets up the
10
+ # extra instance variable to nil. The explicit initialization
11
+ # to nil is included as an optimization for Ruby's object shapes system,
12
+ # which improves memory layout and access performance.
13
+ def initialize(...)
14
+ super
15
+ @eofed = nil
16
+ end
17
+
9
18
  # @param active [Boolean] should the `#eofed` job run on eof
10
19
  def eofed(active = false)
11
20
  @eofed ||= Config.new(
@@ -13,7 +13,7 @@ module Karafka
13
13
  File.read(
14
14
  File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
15
  )
16
- ).fetch('en').fetch('validations').fetch('topic')
16
+ ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
17
  end
18
18
 
19
19
  nested :inline_insights do
@@ -6,6 +6,15 @@ module Karafka
6
6
  class InlineInsights < Base
7
7
  # Routing topic inline insights API
8
8
  module Topic
9
+ # This method calls the parent class initializer and then sets up the
10
+ # extra instance variable to nil. The explicit initialization
11
+ # to nil is included as an optimization for Ruby's object shapes system,
12
+ # which improves memory layout and access performance.
13
+ def initialize(...)
14
+ super
15
+ @inline_insights = nil
16
+ end
17
+
9
18
  # @param active [Boolean] should inline insights be activated
10
19
  def inline_insights(active = false)
11
20
  @inline_insights ||= Config.new(
@@ -13,7 +13,7 @@ module Karafka
13
13
  File.read(
14
14
  File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
15
  )
16
- ).fetch('en').fetch('validations').fetch('topic')
16
+ ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
17
  end
18
18
 
19
19
  nested :manual_offset_management do
@@ -6,6 +6,15 @@ module Karafka
6
6
  class ManualOffsetManagement < Base
7
7
  # Topic extensions to be able to manage manual offset management settings
8
8
  module Topic
9
+ # This method calls the parent class initializer and then sets up the
10
+ # extra instance variable to nil. The explicit initialization
11
+ # to nil is included as an optimization for Ruby's object shapes system,
12
+ # which improves memory layout and access performance.
13
+ def initialize(...)
14
+ super
15
+ @manual_offset_management = nil
16
+ end
17
+
9
18
  # @param active [Boolean] should we stop managing the offset in Karafka and make the user
10
19
  # responsible for marking messages as consumed.
11
20
  # @return [Config] defined config
@@ -148,16 +148,7 @@ module Karafka
148
148
  # end user has configured
149
149
  return if kafka.key?(:'client.id')
150
150
 
151
- # This mitigates an issue for multiplexing and potentially other cases when running
152
- # multiple karafka processes on one machine, where librdkafka goes into an infinite
153
- # loop when using cooperative-sticky and upscaling.
154
- #
155
- # @see https://github.com/confluentinc/librdkafka/issues/4783
156
- kafka[:'client.id'] = if kafka[:'partition.assignment.strategy'] == 'cooperative-sticky'
157
- "#{client_id}/#{Time.now.to_f}/#{SecureRandom.hex[0..9]}"
158
- else
159
- client_id
160
- end
151
+ kafka[:'client.id'] = client_id
161
152
  end
162
153
 
163
154
  # If we use static group memberships, there can be a case, where same instance id would
@@ -40,10 +40,18 @@ module Karafka
40
40
  # Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
41
41
  # have same topic name across multiple consumer groups
42
42
  @id = "#{consumer_group.id}_#{@name}"
43
+ @consumer = nil
44
+ @active_assigned = false
45
+ @subscription_group_details = nil
46
+
47
+ INHERITABLE_ATTRIBUTES.each do |attribute|
48
+ instance_variable_set("@#{attribute}", nil)
49
+ end
43
50
  end
44
51
 
45
52
  INHERITABLE_ATTRIBUTES.each do |attribute|
46
- attr_writer attribute
53
+ # Defined below
54
+ attr_writer attribute unless attribute == :kafka
47
55
 
48
56
  class_eval <<~RUBY, __FILE__, __LINE__ + 1
49
57
  def #{attribute}
@@ -3,18 +3,13 @@
3
3
  module Karafka
4
4
  # Karafka consuming server class
5
5
  class Server
6
- # How long should we wait on the listeners forceful shutdown when they are stuck beyond the
7
- # shutdown timeout before forcing a bypass
8
- FORCEFUL_SHUTDOWN_WAIT = 5
9
-
10
- private_constant :FORCEFUL_SHUTDOWN_WAIT
11
-
12
6
  extend Helpers::ConfigImporter.new(
13
7
  cli_contract: %i[internal cli contract],
14
8
  activity_manager: %i[internal routing activity_manager],
15
9
  supervision_sleep: %i[internal supervision_sleep],
16
10
  shutdown_timeout: %i[shutdown_timeout],
17
11
  forceful_exit_code: %i[internal forceful_exit_code],
12
+ forceful_shutdown_wait: %i[internal forceful_shutdown_wait],
18
13
  process: %i[internal process]
19
14
  )
20
15
 
@@ -151,7 +146,7 @@ module Karafka
151
146
  error: e,
152
147
  type: 'app.forceful_stopping.error'
153
148
  )
154
- end.join(FORCEFUL_SHUTDOWN_WAIT)
149
+ end.join(forceful_shutdown_wait / 1_000.0)
155
150
 
156
151
  # We also do not forcefully terminate everything when running in the embedded mode,
157
152
  # otherwise we would overwrite the shutdown process of the process that started Karafka
@@ -57,6 +57,8 @@ module Karafka
57
57
  group.protocol.type
58
58
  group.remote.assignor
59
59
  heartbeat.interval.ms
60
+ https.ca.location
61
+ https.ca.pem
60
62
  interceptors
61
63
  internal.termination.signal
62
64
  isolation.level
@@ -73,6 +75,7 @@ module Karafka
73
75
  message.max.bytes
74
76
  metadata.broker.list
75
77
  metadata.max.age.ms
78
+ metadata.recovery.rebootstrap.trigger.ms
76
79
  metadata.recovery.strategy
77
80
  oauthbearer_token_refresh_cb
78
81
  offset.store.method
@@ -100,10 +103,25 @@ module Karafka
100
103
  sasl.kerberos.service.name
101
104
  sasl.mechanism
102
105
  sasl.mechanisms
106
+ sasl.oauthbearer.assertion.algorithm
107
+ sasl.oauthbearer.assertion.claim.aud
108
+ sasl.oauthbearer.assertion.claim.exp.seconds
109
+ sasl.oauthbearer.assertion.claim.iss
110
+ sasl.oauthbearer.assertion.claim.jti.include
111
+ sasl.oauthbearer.assertion.claim.nbf.seconds
112
+ sasl.oauthbearer.assertion.claim.sub
113
+ sasl.oauthbearer.assertion.file
114
+ sasl.oauthbearer.assertion.jwt.template.file
115
+ sasl.oauthbearer.assertion.private.key.file
116
+ sasl.oauthbearer.assertion.private.key.passphrase
117
+ sasl.oauthbearer.assertion.private.key.pem
118
+ sasl.oauthbearer.client.credentials.client.id
119
+ sasl.oauthbearer.client.credentials.client.secret
103
120
  sasl.oauthbearer.client.id
104
121
  sasl.oauthbearer.client.secret
105
122
  sasl.oauthbearer.config
106
123
  sasl.oauthbearer.extensions
124
+ sasl.oauthbearer.grant.type
107
125
  sasl.oauthbearer.method
108
126
  sasl.oauthbearer.scope
109
127
  sasl.oauthbearer.token.endpoint.url
@@ -192,6 +210,8 @@ module Karafka
192
210
  enable.ssl.certificate.verification
193
211
  enabled_events
194
212
  error_cb
213
+ https.ca.location
214
+ https.ca.pem
195
215
  interceptors
196
216
  internal.termination.signal
197
217
  linger.ms
@@ -208,6 +228,7 @@ module Karafka
208
228
  message.timeout.ms
209
229
  metadata.broker.list
210
230
  metadata.max.age.ms
231
+ metadata.recovery.rebootstrap.trigger.ms
211
232
  metadata.recovery.strategy
212
233
  msg_order_cmp
213
234
  oauthbearer_token_refresh_cb
@@ -239,10 +260,25 @@ module Karafka
239
260
  sasl.kerberos.service.name
240
261
  sasl.mechanism
241
262
  sasl.mechanisms
263
+ sasl.oauthbearer.assertion.algorithm
264
+ sasl.oauthbearer.assertion.claim.aud
265
+ sasl.oauthbearer.assertion.claim.exp.seconds
266
+ sasl.oauthbearer.assertion.claim.iss
267
+ sasl.oauthbearer.assertion.claim.jti.include
268
+ sasl.oauthbearer.assertion.claim.nbf.seconds
269
+ sasl.oauthbearer.assertion.claim.sub
270
+ sasl.oauthbearer.assertion.file
271
+ sasl.oauthbearer.assertion.jwt.template.file
272
+ sasl.oauthbearer.assertion.private.key.file
273
+ sasl.oauthbearer.assertion.private.key.passphrase
274
+ sasl.oauthbearer.assertion.private.key.pem
275
+ sasl.oauthbearer.client.credentials.client.id
276
+ sasl.oauthbearer.client.credentials.client.secret
242
277
  sasl.oauthbearer.client.id
243
278
  sasl.oauthbearer.client.secret
244
279
  sasl.oauthbearer.config
245
280
  sasl.oauthbearer.extensions
281
+ sasl.oauthbearer.grant.type
246
282
  sasl.oauthbearer.method
247
283
  sasl.oauthbearer.scope
248
284
  sasl.oauthbearer.token.endpoint.url
@@ -29,8 +29,8 @@ module Karafka
29
29
  setting :entity, default: ''
30
30
  end
31
31
 
32
- # option client_id [String] kafka client_id - used to provide
33
- # default Kafka groups namespaces and identify that app in kafka
32
+ # option client_id [String] kafka client_id - used to uniquely identify given client instance
33
+ # Used only for logging.
34
34
  setting :client_id, default: 'karafka'
35
35
  # option logger [Instance] logger that we want to use
36
36
  setting :logger, default: ::Karafka::Instrumentation::Logger.new
@@ -177,16 +177,15 @@ module Karafka
177
177
  setting :supervision_sleep, default: 0.1
178
178
  # What system exit code should we use when we terminated forcefully
179
179
  setting :forceful_exit_code, default: 2
180
+ # How long should we wait on the listeners forceful shutdown when they are stuck beyond the
181
+ # shutdown timeout before forcing a bypass
182
+ setting :forceful_shutdown_wait, default: 5_000
180
183
 
181
184
  setting :swarm do
182
185
  # Manager for swarm nodes control
183
186
  setting :manager, default: Swarm::Manager.new
184
187
  # Exit code we exit an orphaned child with to indicate something went wrong
185
188
  setting :orphaned_exit_code, default: 3
186
- # syscall number for https://man7.org/linux/man-pages/man2/pidfd_open.2.html
187
- setting :pidfd_open_syscall, default: 434
188
- # syscall number for https://man7.org/linux/man-pages/man2/pidfd_send_signal.2.html
189
- setting :pidfd_signal_syscall, default: 424
190
189
  # How often (in ms) should we control our nodes
191
190
  # This is maximum time after which we will check. This can happen more often in case of
192
191
  # system events.
@@ -208,7 +207,7 @@ module Karafka
208
207
  setting :cli do
209
208
  # option contract [Object] cli setup validation contract (in the context of options and
210
209
  # topics)
211
- setting :contract, default: Contracts::ServerCliOptions.new
210
+ setting :contract, default: ::Karafka::Cli::Contracts::Server.new
212
211
  end
213
212
 
214
213
  setting :routing do