karafka 2.5.2 → 2.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +18 -0
  3. data/.yard-lint.yml +174 -0
  4. data/CHANGELOG.md +6 -0
  5. data/Gemfile +1 -0
  6. data/Gemfile.lock +24 -19
  7. data/examples/payloads/json/sample_set_03/event_type_1.json +1 -1
  8. data/examples/payloads/json/sample_set_03/event_type_2.json +1 -1
  9. data/examples/payloads/json/sample_set_03/event_type_3.json +1 -1
  10. data/karafka.gemspec +2 -2
  11. data/lib/active_job/queue_adapters/karafka_adapter.rb +2 -2
  12. data/lib/karafka/active_job/consumer.rb +2 -2
  13. data/lib/karafka/active_job/current_attributes.rb +2 -2
  14. data/lib/karafka/active_job/deserializer.rb +1 -1
  15. data/lib/karafka/active_job/dispatcher.rb +2 -2
  16. data/lib/karafka/admin/configs/resource.rb +7 -1
  17. data/lib/karafka/admin/consumer_groups.rb +6 -8
  18. data/lib/karafka/admin/topics.rb +5 -4
  19. data/lib/karafka/admin.rb +10 -10
  20. data/lib/karafka/app.rb +3 -3
  21. data/lib/karafka/base_consumer.rb +1 -1
  22. data/lib/karafka/cli/base.rb +1 -1
  23. data/lib/karafka/cli/console.rb +1 -1
  24. data/lib/karafka/cli/contracts/server.rb +1 -1
  25. data/lib/karafka/cli/help.rb +1 -1
  26. data/lib/karafka/cli/install.rb +2 -1
  27. data/lib/karafka/cli/server.rb +1 -1
  28. data/lib/karafka/cli/swarm.rb +1 -1
  29. data/lib/karafka/connection/client.rb +19 -18
  30. data/lib/karafka/connection/manager.rb +1 -0
  31. data/lib/karafka/connection/proxy.rb +1 -1
  32. data/lib/karafka/connection/rebalance_manager.rb +1 -1
  33. data/lib/karafka/connection/status.rb +1 -0
  34. data/lib/karafka/constraints.rb +1 -1
  35. data/lib/karafka/contracts/base.rb +1 -1
  36. data/lib/karafka/deserializers/payload.rb +1 -1
  37. data/lib/karafka/helpers/async.rb +1 -1
  38. data/lib/karafka/helpers/config_importer.rb +3 -3
  39. data/lib/karafka/helpers/multi_delegator.rb +3 -0
  40. data/lib/karafka/instrumentation/assignments_tracker.rb +2 -1
  41. data/lib/karafka/instrumentation/callbacks/error.rb +2 -2
  42. data/lib/karafka/instrumentation/callbacks/statistics.rb +3 -3
  43. data/lib/karafka/instrumentation/logger.rb +6 -6
  44. data/lib/karafka/instrumentation/monitor.rb +2 -2
  45. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +1 -1
  46. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +1 -1
  47. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +2 -2
  48. data/lib/karafka/instrumentation/vendors/kubernetes/base_listener.rb +1 -1
  49. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +3 -15
  50. data/lib/karafka/messages/builders/batch_metadata.rb +1 -1
  51. data/lib/karafka/pro/active_job/consumer.rb +2 -2
  52. data/lib/karafka/pro/active_job/dispatcher.rb +3 -3
  53. data/lib/karafka/pro/cleaner.rb +3 -3
  54. data/lib/karafka/pro/cli/contracts/server.rb +1 -1
  55. data/lib/karafka/pro/cli/parallel_segments/base.rb +4 -3
  56. data/lib/karafka/pro/cli/parallel_segments/collapse.rb +1 -1
  57. data/lib/karafka/pro/cli/parallel_segments/distribute.rb +1 -1
  58. data/lib/karafka/pro/cli/parallel_segments.rb +1 -1
  59. data/lib/karafka/pro/connection/manager.rb +1 -2
  60. data/lib/karafka/pro/connection/multiplexing/listener.rb +1 -0
  61. data/lib/karafka/pro/contracts/base.rb +1 -1
  62. data/lib/karafka/pro/encryption/cipher.rb +3 -2
  63. data/lib/karafka/pro/encryption/contracts/config.rb +1 -1
  64. data/lib/karafka/pro/encryption/messages/parser.rb +1 -1
  65. data/lib/karafka/pro/encryption/setup/config.rb +1 -1
  66. data/lib/karafka/pro/iterator/tpl_builder.rb +1 -1
  67. data/lib/karafka/pro/iterator.rb +1 -1
  68. data/lib/karafka/pro/loader.rb +1 -1
  69. data/lib/karafka/pro/processing/coordinator.rb +1 -1
  70. data/lib/karafka/pro/processing/filters/base.rb +1 -0
  71. data/lib/karafka/pro/processing/filters/delayer.rb +1 -1
  72. data/lib/karafka/pro/processing/filters/expirer.rb +1 -1
  73. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +1 -1
  74. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +1 -1
  75. data/lib/karafka/pro/processing/jobs/eofed_non_blocking.rb +1 -1
  76. data/lib/karafka/pro/processing/jobs/periodic.rb +1 -1
  77. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +1 -1
  78. data/lib/karafka/pro/processing/jobs_builder.rb +1 -1
  79. data/lib/karafka/pro/processing/offset_metadata/fetcher.rb +1 -0
  80. data/lib/karafka/pro/processing/partitioner.rb +1 -1
  81. data/lib/karafka/pro/processing/strategies/base.rb +1 -1
  82. data/lib/karafka/pro/processing/strategies/default.rb +2 -2
  83. data/lib/karafka/pro/processing/strategy_selector.rb +1 -0
  84. data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +4 -2
  85. data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +4 -2
  86. data/lib/karafka/pro/recurring_tasks/consumer.rb +3 -2
  87. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +2 -2
  88. data/lib/karafka/pro/recurring_tasks/contracts/task.rb +1 -1
  89. data/lib/karafka/pro/recurring_tasks/deserializer.rb +1 -1
  90. data/lib/karafka/pro/recurring_tasks/dispatcher.rb +1 -1
  91. data/lib/karafka/pro/recurring_tasks/executor.rb +2 -1
  92. data/lib/karafka/pro/recurring_tasks/schedule.rb +5 -2
  93. data/lib/karafka/pro/recurring_tasks/serializer.rb +6 -5
  94. data/lib/karafka/pro/recurring_tasks/setup/config.rb +2 -2
  95. data/lib/karafka/pro/recurring_tasks/task.rb +1 -1
  96. data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +3 -0
  97. data/lib/karafka/pro/routing/features/multiplexing/subscription_groups_builder.rb +1 -1
  98. data/lib/karafka/pro/routing/features/multiplexing.rb +5 -5
  99. data/lib/karafka/pro/routing/features/offset_metadata.rb +4 -4
  100. data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +1 -1
  101. data/lib/karafka/pro/routing/features/patterns/patterns.rb +1 -1
  102. data/lib/karafka/pro/routing/features/periodic_job/topic.rb +1 -1
  103. data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +1 -1
  104. data/lib/karafka/pro/routing/features/swarm.rb +1 -1
  105. data/lib/karafka/pro/routing/features/throttling/topic.rb +3 -1
  106. data/lib/karafka/pro/scheduled_messages/consumer.rb +1 -1
  107. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +2 -2
  108. data/lib/karafka/pro/scheduled_messages/contracts/message.rb +1 -1
  109. data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +3 -2
  110. data/lib/karafka/pro/scheduled_messages/day.rb +1 -0
  111. data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +1 -1
  112. data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +1 -1
  113. data/lib/karafka/pro/scheduled_messages/max_epoch.rb +1 -0
  114. data/lib/karafka/pro/scheduled_messages/proxy.rb +1 -1
  115. data/lib/karafka/pro/scheduled_messages/serializer.rb +3 -3
  116. data/lib/karafka/pro/scheduled_messages/setup/config.rb +2 -2
  117. data/lib/karafka/pro/scheduled_messages/state.rb +1 -0
  118. data/lib/karafka/pro/scheduled_messages/tracker.rb +1 -0
  119. data/lib/karafka/process.rb +4 -4
  120. data/lib/karafka/processing/executor.rb +1 -1
  121. data/lib/karafka/processing/inline_insights/tracker.rb +1 -0
  122. data/lib/karafka/processing/jobs_queue.rb +1 -1
  123. data/lib/karafka/processing/result.rb +1 -0
  124. data/lib/karafka/processing/strategy_selector.rb +1 -0
  125. data/lib/karafka/routing/activity_manager.rb +1 -0
  126. data/lib/karafka/routing/builder.rb +3 -1
  127. data/lib/karafka/routing/contracts/consumer_group.rb +3 -2
  128. data/lib/karafka/routing/contracts/topic.rb +5 -2
  129. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -1
  130. data/lib/karafka/routing/features/declaratives/topic.rb +5 -2
  131. data/lib/karafka/routing/features/deserializers/topic.rb +3 -3
  132. data/lib/karafka/routing/features/inline_insights.rb +5 -5
  133. data/lib/karafka/routing/router.rb +1 -1
  134. data/lib/karafka/routing/subscription_group.rb +1 -1
  135. data/lib/karafka/routing/subscription_groups_builder.rb +1 -0
  136. data/lib/karafka/routing/topic.rb +3 -3
  137. data/lib/karafka/server.rb +1 -1
  138. data/lib/karafka/setup/attributes_map.rb +4 -2
  139. data/lib/karafka/setup/config.rb +21 -10
  140. data/lib/karafka/setup/config_proxy.rb +209 -0
  141. data/lib/karafka/setup/contracts/config.rb +1 -1
  142. data/lib/karafka/swarm/liveness_listener.rb +1 -0
  143. data/lib/karafka/swarm/manager.rb +7 -6
  144. data/lib/karafka/swarm/node.rb +1 -1
  145. data/lib/karafka/swarm/supervisor.rb +1 -0
  146. data/lib/karafka/time_trackers/base.rb +1 -1
  147. data/lib/karafka/version.rb +1 -1
  148. data/lib/karafka.rb +2 -2
  149. metadata +7 -5
@@ -9,7 +9,7 @@ module Karafka
9
9
  # Recurring Tasks related contracts
10
10
  module Contracts
11
11
  # Makes sure, all the expected config is defined as it should be
12
- class Config < ::Karafka::Contracts::Base
12
+ class Config < Karafka::Contracts::Base
13
13
  configure do |config|
14
14
  config.error_messages = YAML.safe_load_file(
15
15
  File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
@@ -17,7 +17,7 @@ module Karafka
17
17
  end
18
18
 
19
19
  nested(:scheduled_messages) do
20
- required(:consumer_class) { |val| val < ::Karafka::BaseConsumer }
20
+ required(:consumer_class) { |val| val < Karafka::BaseConsumer }
21
21
 
22
22
  # Do not allow to run more often than every second
23
23
  required(:interval) { |val| val.is_a?(Integer) && val >= 1_000 }
@@ -11,7 +11,7 @@ module Karafka
11
11
  #
12
12
  # Our envelope always needs to comply with this format, otherwise we won't have enough
13
13
  # details to be able to dispatch the message
14
- class Message < ::Karafka::Contracts::Base
14
+ class Message < Karafka::Contracts::Base
15
15
  configure do |config|
16
16
  config.error_messages = YAML.safe_load_file(
17
17
  File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
@@ -14,6 +14,7 @@ module Karafka
14
14
  # up to 1.5 second, thus it is acceptable. Please ping me if you encounter performance
15
15
  # issues with this naive implementation so it can be improved.
16
16
  class DailyBuffer
17
+ # Initializes the daily buffer with empty accumulator
17
18
  def initialize
18
19
  @accu = {}
19
20
  end
@@ -69,8 +70,8 @@ module Karafka
69
70
  selected.each { |_, message| yield(message) }
70
71
  end
71
72
 
72
- # Removes given key from the accumulator
73
- # @param key [String] key to remove
73
+ # Removes the schedule entry identified by the given key from the daily buffer
74
+ # @param key [String]
74
75
  def delete(key)
75
76
  @accu.delete(key)
76
77
  end
@@ -18,6 +18,7 @@ module Karafka
18
18
  # @return [Integer] utc timestamp when this day starts. Equal to 00:00:00
19
19
  attr_reader :starts_at
20
20
 
21
+ # Initializes a day representation for the current UTC day
21
22
  def initialize
22
23
  @created_at = Time.now.to_i
23
24
 
@@ -16,7 +16,7 @@ module Karafka
16
16
 
17
17
  private_constant :WORKABLE_TYPES
18
18
 
19
- # @param metadata [Karafka::aMessages::Metadata]
19
+ # @param metadata [Karafka::Messages::Metadata]
20
20
  # @return [Hash] headers
21
21
  def call(metadata)
22
22
  raw_headers = metadata.raw_headers
@@ -15,7 +15,7 @@ module Karafka
15
15
  # @param message [::Karafka::Messages::Message]
16
16
  # @return [Hash] deserialized data
17
17
  def call(message)
18
- ::JSON.parse(
18
+ JSON.parse(
19
19
  Zlib::Inflate.inflate(message.raw_payload),
20
20
  symbolize_names: true
21
21
  )
@@ -23,6 +23,7 @@ module Karafka
23
23
  # @return [Integer] max epoch recorded
24
24
  attr_reader :to_i
25
25
 
26
+ # Initializes max epoch tracker with -1 as starting value
26
27
  def initialize
27
28
  @max = -1
28
29
  @to_i = @max
@@ -21,7 +21,7 @@ module Karafka
21
21
  module Proxy
22
22
  # General WaterDrop message contract. Before we envelop a message, we need to be certain
23
23
  # it is correct, hence we use this contract.
24
- MSG_CONTRACT = ::WaterDrop::Contracts::Message.new(
24
+ MSG_CONTRACT = WaterDrop::Contracts::Message.new(
25
25
  # Payload size is a subject to the target producer dispatch validation, so we set it
26
26
  # to 100MB basically to ignore it here.
27
27
  max_payload_size: 104_857_600
@@ -9,7 +9,7 @@ module Karafka
9
9
  # Serializers used to build payloads (if applicable) for dispatch
10
10
  # @note We only deal with states payload. Other payloads are not ours but end users.
11
11
  class Serializer
12
- include ::Karafka::Core::Helpers::Time
12
+ include Karafka::Core::Helpers::Time
13
13
 
14
14
  # @param tracker [Tracker] tracker based on which we build the state
15
15
  # @return [String] compressed payload with the state details
@@ -32,9 +32,9 @@ module Karafka
32
32
  hash.to_json
33
33
  end
34
34
 
35
- # Compresses the provided data
35
+ # Compresses the provided data using Zlib deflate algorithm
36
36
  #
37
- # @param data [String] data to compress
37
+ # @param data [String]
38
38
  # @return [String] compressed data
39
39
  def compress(data)
40
40
  Zlib::Deflate.deflate(data)
@@ -10,7 +10,7 @@ module Karafka
10
10
  module Setup
11
11
  # Config for recurring tasks
12
12
  class Config
13
- extend ::Karafka::Core::Configurable
13
+ extend Karafka::Core::Configurable
14
14
 
15
15
  setting(:consumer_class, default: Consumer)
16
16
  setting(:group_id, default: 'karafka_scheduled_messages')
@@ -27,7 +27,7 @@ module Karafka
27
27
  # Producer to use. By default uses default Karafka producer.
28
28
  setting(
29
29
  :producer,
30
- constructor: -> { ::Karafka.producer },
30
+ constructor: -> { Karafka.producer },
31
31
  lazy: true
32
32
  )
33
33
 
@@ -27,6 +27,7 @@ module Karafka
27
27
 
28
28
  private_constant :STATES
29
29
 
30
+ # Initializes the state as fresh
30
31
  def initialize
31
32
  @state = 'fresh'
32
33
  end
@@ -18,6 +18,7 @@ module Karafka
18
18
  # @return [Integer] time epoch when this tracker was started
19
19
  attr_reader :started_at
20
20
 
21
+ # Initializes the tracker with empty statistics
21
22
  def initialize
22
23
  @daily = Hash.new { |h, k| h[k] = 0 }
23
24
  @started_at = Time.now.to_i
@@ -5,7 +5,7 @@ module Karafka
5
5
  # @note There might be only one process - this class is a singleton
6
6
  class Process
7
7
  # Allow for process tagging for instrumentation
8
- extend ::Karafka::Core::Taggable
8
+ extend Karafka::Core::Taggable
9
9
 
10
10
  # Signal types that we handle
11
11
  HANDLED_SIGNALS = %i[
@@ -79,12 +79,12 @@ module Karafka
79
79
  private
80
80
 
81
81
  # Traps a single signal and performs callbacks (if any) or just ignores this signal
82
- # @param [Symbol] signal type that we want to catch
82
+ # @param signal [Symbol] signal type that we want to catch
83
83
  # @note Since we do a lot of threading and queuing, we don't want to handle signals from the
84
84
  # trap context s some things may not work there as expected, that is why we spawn a separate
85
85
  # thread to handle the signals process
86
86
  def trap_signal(signal)
87
- previous_handler = ::Signal.trap(signal) do
87
+ previous_handler = Signal.trap(signal) do
88
88
  Thread.new do
89
89
  notice_signal(signal)
90
90
 
@@ -96,7 +96,7 @@ module Karafka
96
96
  end
97
97
 
98
98
  # Informs monitoring about trapped signal
99
- # @param [Symbol] signal type that we received
99
+ # @param signal [Symbol] signal type that we received
100
100
  def notice_signal(signal)
101
101
  Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)
102
102
  end
@@ -184,7 +184,7 @@ module Karafka
184
184
  # We assign producer only when not available already. It may already be available if
185
185
  # user redefined the `#producer` method for example. This can be useful for example when
186
186
  # having a multi-cluster setup and using a totally custom producer
187
- consumer.producer ||= ::Karafka::App.producer
187
+ consumer.producer ||= Karafka::App.producer
188
188
  # Since we have some message-less flows (idle, etc), we initialize consumer with empty
189
189
  # messages set. In production we have persistent consumers, so this is not a performance
190
190
  # overhead as this will happen only once per consumer lifetime
@@ -51,6 +51,7 @@ module Karafka
51
51
  def_delegators :instance, :find, :add, :exists?, :clear
52
52
  end
53
53
 
54
+ # Initializes the tracker with empty accumulator
54
55
  def initialize
55
56
  @accu = {}
56
57
  @mutex = Mutex.new
@@ -99,7 +99,7 @@ module Karafka
99
99
  # Marks a given job from a given group as completed. When there are no more jobs from a given
100
100
  # group to be executed, we won't wait.
101
101
  #
102
- # @param [Jobs::Base] job that was completed
102
+ # @param job [Jobs::Base] job that was completed
103
103
  def complete(job)
104
104
  @mutex.synchronize do
105
105
  # We finish one job and if there is another, we pick it up
@@ -8,6 +8,7 @@ module Karafka
8
8
  class Result
9
9
  attr_reader :cause
10
10
 
11
+ # Initializes the result as successful with no cause
11
12
  def initialize
12
13
  @success = true
13
14
  @cause = false
@@ -13,6 +13,7 @@ module Karafka
13
13
  dead_letter_queue
14
14
  ].freeze
15
15
 
16
+ # Initializes the strategy selector and preloads all strategies
16
17
  def initialize
17
18
  # We load them once for performance reasons not to do too many lookups
18
19
  @strategies = find_all
@@ -12,6 +12,7 @@ module Karafka
12
12
  topics
13
13
  ].freeze
14
14
 
15
+ # Initializes the activity manager with empty inclusion and exclusion lists
15
16
  def initialize
16
17
  @included = Hash.new { |h, k| h[k] = [] }
17
18
  @excluded = Hash.new { |h, k| h[k] = [] }
@@ -23,6 +23,7 @@ module Karafka
23
23
 
24
24
  private_constant :EMPTY_DEFAULTS
25
25
 
26
+ # Initializes the routing builder with empty routes
26
27
  def initialize
27
28
  @mutex = Mutex.new
28
29
  @draws = []
@@ -138,8 +139,9 @@ module Karafka
138
139
  # subscription group customization
139
140
  # @param subscription_group_name [String, Symbol] subscription group id. When not provided,
140
141
  # a random uuid will be used
141
- # @param args [Array] any extra arguments accepted by the subscription group builder
142
+ # @param args [Hash] any extra arguments accepted by the subscription group builder
142
143
  # @param block [Proc] further topics definitions
144
+ # @option args [String] :kafka optional kafka scope settings
143
145
  def subscription_group(
144
146
  subscription_group_name = SubscriptionGroup.id,
145
147
  **args,
@@ -44,14 +44,15 @@ module Karafka
44
44
 
45
45
  virtual do |data, errors|
46
46
  next unless errors.empty?
47
- next unless ::Karafka::App.config.strict_topics_namespacing
47
+ next unless Karafka::App.config.strict_topics_namespacing
48
48
 
49
49
  names = data.fetch(:topics).map { |topic| topic[:name] }
50
50
  names_hash = names.each_with_object({}) { |n, h| h[n] = true }
51
51
  error_occured = false
52
+ namespace_chars = ['.', '_'].freeze
52
53
  names.each do |n|
53
54
  # Skip topic names that are not namespaced
54
- next unless n.chars.find { |c| ['.', '_'].include?(c) }
55
+ next unless n.chars.find { |c| namespace_chars.include?(c) }
55
56
 
56
57
  if n.chars.include?('.')
57
58
  # Check underscore styled topic
@@ -66,10 +66,13 @@ module Karafka
66
66
 
67
67
  virtual do |data, errors|
68
68
  next unless errors.empty?
69
- next unless ::Karafka::App.config.strict_topics_namespacing
69
+ next unless Karafka::App.config.strict_topics_namespacing
70
70
 
71
71
  value = data.fetch(:name)
72
- namespacing_chars_count = value.chars.find_all { |c| ['.', '_'].include?(c) }.uniq.size
72
+ namespace_chars = ['.', '_'].freeze
73
+ namespacing_chars_count = value.chars.find_all do |c|
74
+ namespace_chars.include?(c)
75
+ end.uniq.size
73
76
 
74
77
  next if namespacing_chars_count <= 1
75
78
 
@@ -39,7 +39,7 @@ module Karafka
39
39
  next unless dead_letter_queue[:active]
40
40
 
41
41
  topic = dead_letter_queue[:topic]
42
- topic_regexp = ::Karafka::Contracts::TOPIC_REGEXP
42
+ topic_regexp = Karafka::Contracts::TOPIC_REGEXP
43
43
 
44
44
  # When topic is set to false, it means we just want to skip dispatch on DLQ
45
45
  next if topic == false
@@ -16,9 +16,12 @@ module Karafka
16
16
  end
17
17
 
18
18
  # @param active [Boolean] is the topic structure management feature active
19
- # @param partitions [Integer]
20
- # @param replication_factor [Integer]
19
+ # @param partitions [Integer] number of partitions for the topic
20
+ # @param replication_factor [Integer] replication factor for the topic
21
21
  # @param details [Hash] extra configuration for the topic
22
+ # @option details [String] :retention.ms retention time in milliseconds
23
+ # @option details [String] :compression.type compression type
24
+ # (none, gzip, snappy, lz4, zstd)
22
25
  # @return [Config] defined structure
23
26
  def config(active: true, partitions: 1, replication_factor: 1, **details)
24
27
  @declaratives ||= Config.new(
@@ -22,9 +22,9 @@ module Karafka
22
22
  # @param key [Object] deserializer for the message key
23
23
  # @param headers [Object] deserializer for the message headers
24
24
  def deserializers(
25
- payload: ::Karafka::Deserializers::Payload.new,
26
- key: ::Karafka::Deserializers::Key.new,
27
- headers: ::Karafka::Deserializers::Headers.new
25
+ payload: Karafka::Deserializers::Payload.new,
26
+ key: Karafka::Deserializers::Key.new,
27
+ headers: Karafka::Deserializers::Headers.new
28
28
  )
29
29
  @deserializers ||= Config.new(
30
30
  active: true,
@@ -12,11 +12,11 @@ module Karafka
12
12
  #
13
13
  # @param _config [Karafka::Core::Configurable::Node] app config
14
14
  def post_setup(_config)
15
- ::Karafka::App.monitor.subscribe('app.running') do
15
+ Karafka::App.monitor.subscribe('app.running') do
16
16
  # Do not activate tracking of statistics if none of our active topics uses it
17
17
  # This prevents us from tracking metrics when user just runs a subset of topics
18
18
  # in a given process and none of those actually utilizes this feature
19
- next unless ::Karafka::App
19
+ next unless Karafka::App
20
20
  .subscription_groups
21
21
  .values
22
22
  .flat_map(&:itself)
@@ -25,11 +25,11 @@ module Karafka
25
25
  .any?(&:inline_insights?)
26
26
 
27
27
  # Initialize the tracker prior to becoming multi-threaded
28
- ::Karafka::Processing::InlineInsights::Tracker.instance
28
+ Karafka::Processing::InlineInsights::Tracker.instance
29
29
 
30
30
  # Subscribe to the statistics reports and collect them
31
- ::Karafka.monitor.subscribe(
32
- ::Karafka::Processing::InlineInsights::Listener.new
31
+ Karafka.monitor.subscribe(
32
+ Karafka::Processing::InlineInsights::Listener.new
33
33
  )
34
34
  end
35
35
  end
@@ -8,7 +8,7 @@ module Karafka
8
8
  # structure so all the routes are being stored in a single level array
9
9
  module Router
10
10
  # Finds first reference of a given topic based on provided lookup attribute
11
- # @param lookup [Hash<Symbol, String>] hash with attribute - value key pairs
11
+ # @param lookup [Hash{Symbol => String}] hash with attribute - value key pairs
12
12
  # @return [Karafka::Routing::Topic, nil] proper route details or nil if not found
13
13
  def find_by(lookup)
14
14
  App.consumer_groups.each do |consumer_group|
@@ -30,7 +30,7 @@ module Karafka
30
30
  @group_counter ||= 0
31
31
  @group_counter += 1
32
32
 
33
- ::Digest::SHA256.hexdigest(
33
+ Digest::SHA256.hexdigest(
34
34
  @group_counter.to_s
35
35
  )[0..11]
36
36
  end
@@ -24,6 +24,7 @@ module Karafka
24
24
 
25
25
  private_constant :DISTRIBUTION_KEYS
26
26
 
27
+ # Initializes the subscription groups builder
27
28
  def initialize
28
29
  @position = -1
29
30
  end
@@ -29,7 +29,7 @@ module Karafka
29
29
 
30
30
  private_constant :INHERITABLE_ATTRIBUTES
31
31
 
32
- # @param [String, Symbol] name of a topic on which we want to listen
32
+ # @param name [String, Symbol] name of a topic on which we want to listen
33
33
  # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
34
34
  def initialize(name, consumer_group)
35
35
  @name = name.to_s
@@ -92,7 +92,7 @@ module Karafka
92
92
  # consumer class is defined with a name. It won't support code reload for anonymous
93
93
  # consumer classes, but this is an edge case
94
94
  begin
95
- ::Object.const_get(@consumer.to_s)
95
+ Object.const_get(@consumer.to_s)
96
96
  rescue NameError
97
97
  # It will only fail if the in case of anonymous classes
98
98
  @consumer
@@ -139,7 +139,7 @@ module Karafka
139
139
  [attribute, public_send(attribute)]
140
140
  end
141
141
 
142
- Hash[map].merge!(
142
+ map.to_h.merge!(
143
143
  id: id,
144
144
  name: name,
145
145
  active: active?,
@@ -174,7 +174,7 @@ module Karafka
174
174
  # This ensures that if users have configured the default pool, it is closed correctly
175
175
  #
176
176
  # Custom pools need to be closed by users themselves
177
- ::WaterDrop::ConnectionPool.close
177
+ WaterDrop::ConnectionPool.close
178
178
 
179
179
  Karafka::App.terminate!
180
180
  end
@@ -122,6 +122,7 @@ module Karafka
122
122
  sasl.oauthbearer.config
123
123
  sasl.oauthbearer.extensions
124
124
  sasl.oauthbearer.grant.type
125
+ sasl.oauthbearer.metadata.authentication.type
125
126
  sasl.oauthbearer.method
126
127
  sasl.oauthbearer.scope
127
128
  sasl.oauthbearer.token.endpoint.url
@@ -279,6 +280,7 @@ module Karafka
279
280
  sasl.oauthbearer.config
280
281
  sasl.oauthbearer.extensions
281
282
  sasl.oauthbearer.grant.type
283
+ sasl.oauthbearer.metadata.authentication.type
282
284
  sasl.oauthbearer.method
283
285
  sasl.oauthbearer.scope
284
286
  sasl.oauthbearer.token.endpoint.url
@@ -357,7 +359,7 @@ module Karafka
357
359
  end
358
360
 
359
361
  # @private
360
- # @return [Hash<Symbol, Array<Symbol>>] hash with consumer and producer attributes list
362
+ # @return [Hash{Symbol => Array<Symbol>}] hash with consumer and producer attributes list
361
363
  # that is sorted.
362
364
  # @note This method should not be used directly. It is only used to generate appropriate
363
365
  # options list in case it would change
@@ -367,7 +369,7 @@ module Karafka
367
369
 
368
370
  attributes = { consumer: Set.new, producer: Set.new }
369
371
 
370
- ::URI.parse(SOURCE).open.readlines.each do |line|
372
+ URI.parse(SOURCE).open.readlines.each do |line|
371
373
  next unless line.include?('|')
372
374
 
373
375
  attribute, attribute_type = line.split('|').map(&:strip)
@@ -12,7 +12,7 @@ module Karafka
12
12
  # enough and will still keep the code simple
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
- extend ::Karafka::Core::Configurable
15
+ extend Karafka::Core::Configurable
16
16
 
17
17
  # Available settings
18
18
 
@@ -33,9 +33,9 @@ module Karafka
33
33
  # Used only for logging.
34
34
  setting :client_id, default: 'karafka'
35
35
  # option logger [Instance] logger that we want to use
36
- setting :logger, default: ::Karafka::Instrumentation::Logger.new
36
+ setting :logger, default: Karafka::Instrumentation::Logger.new
37
37
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
38
- setting :monitor, default: ::Karafka::Instrumentation::Monitor.new
38
+ setting :monitor, default: Karafka::Instrumentation::Monitor.new
39
39
  # option [Boolean] should we reload consumers with each incoming batch thus effectively
40
40
  # supporting code reload (if someone reloads code) or should we keep the persistence
41
41
  setting :consumer_persistence, default: true
@@ -211,7 +211,7 @@ module Karafka
211
211
  setting :cli do
212
212
  # option contract [Object] cli setup validation contract (in the context of options and
213
213
  # topics)
214
- setting :contract, default: ::Karafka::Cli::Contracts::Server.new
214
+ setting :contract, default: Karafka::Cli::Contracts::Server.new
215
215
  end
216
216
 
217
217
  setting :routing do
@@ -345,7 +345,7 @@ module Karafka
345
345
  # (incoming). Can be replaced with a custom implementation for formats like Avro,
346
346
  # Protobuf, etc. This is a global setting because Rails serializes jobs before
347
347
  # Karafka receives them, so we need a consistent approach across all ActiveJob topics.
348
- setting :deserializer, default: ::Karafka::ActiveJob::Deserializer.new
348
+ setting :deserializer, default: Karafka::ActiveJob::Deserializer.new
349
349
  end
350
350
  end
351
351
 
@@ -415,17 +415,21 @@ module Karafka
415
415
  # of the pro defaults with custom components
416
416
  Pro::Loader.pre_setup_all(config) if Karafka.pro?
417
417
 
418
- configure(&)
418
+ # Wrap config in a proxy that intercepts producer block configuration
419
+ proxy = ConfigProxy.new(config)
420
+ # We need to check for the block presence here because user can just run setup without
421
+ # any block given
422
+ configure { yield(proxy) if block_given? }
419
423
 
420
424
  Contracts::Config.new.validate!(
421
425
  config.to_h,
422
426
  scope: %w[config]
423
427
  )
424
428
 
425
- configure_components
429
+ configure_components(proxy)
426
430
 
427
431
  # Refreshes the references that are cached that might have been changed by the config
428
- ::Karafka.refresh!
432
+ Karafka.refresh!
429
433
 
430
434
  # Post-setup configure all routing features that would need this
431
435
  Routing::Features::Base.post_setup_all(config)
@@ -443,14 +447,16 @@ module Karafka
443
447
  private
444
448
 
445
449
  # Sets up all the components that are based on the user configuration
450
+ # @param config_proxy [ConfigProxy] the configuration proxy containing deferred setup
451
+ # blocks
446
452
  # @note At the moment it is only WaterDrop
447
- def configure_components
453
+ def configure_components(config_proxy)
448
454
  oauth_listener = config.oauth.token_provider_listener
449
455
  # We need to subscribe the oauth listener here because we want it to be ready before
450
456
  # any consumer/admin runs
451
457
  Karafka::App.monitor.subscribe(oauth_listener) if oauth_listener
452
458
 
453
- config.producer ||= ::WaterDrop::Producer.new do |producer_config|
459
+ config.producer ||= WaterDrop::Producer.new do |producer_config|
454
460
  # In some cases WaterDrop updates the config and we don't want our consumer config to
455
461
  # be polluted by those updates, that's why we copy
456
462
  producer_kafka = AttributesMap.producer(config.kafka.dup)
@@ -463,6 +469,11 @@ module Karafka
463
469
  producer_config.oauth.token_provider_listener = oauth_listener
464
470
  producer_config.logger = config.logger
465
471
  end
472
+
473
+ # Execute user's producer configuration block
474
+ # This happens after the default producer setup, allowing users to customize settings
475
+ # If no block was provided during setup, this will be an empty lambda that does nothing
476
+ config_proxy.producer_initialization_block.call(config.producer.config)
466
477
  end
467
478
  end
468
479
  end