karafka 2.5.2 → 2.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +18 -0
  3. data/.yard-lint.yml +174 -0
  4. data/CHANGELOG.md +6 -0
  5. data/Gemfile +1 -0
  6. data/Gemfile.lock +24 -19
  7. data/examples/payloads/json/sample_set_03/event_type_1.json +1 -1
  8. data/examples/payloads/json/sample_set_03/event_type_2.json +1 -1
  9. data/examples/payloads/json/sample_set_03/event_type_3.json +1 -1
  10. data/karafka.gemspec +2 -2
  11. data/lib/active_job/queue_adapters/karafka_adapter.rb +2 -2
  12. data/lib/karafka/active_job/consumer.rb +2 -2
  13. data/lib/karafka/active_job/current_attributes.rb +2 -2
  14. data/lib/karafka/active_job/deserializer.rb +1 -1
  15. data/lib/karafka/active_job/dispatcher.rb +2 -2
  16. data/lib/karafka/admin/configs/resource.rb +7 -1
  17. data/lib/karafka/admin/consumer_groups.rb +6 -8
  18. data/lib/karafka/admin/topics.rb +5 -4
  19. data/lib/karafka/admin.rb +10 -10
  20. data/lib/karafka/app.rb +3 -3
  21. data/lib/karafka/base_consumer.rb +1 -1
  22. data/lib/karafka/cli/base.rb +1 -1
  23. data/lib/karafka/cli/console.rb +1 -1
  24. data/lib/karafka/cli/contracts/server.rb +1 -1
  25. data/lib/karafka/cli/help.rb +1 -1
  26. data/lib/karafka/cli/install.rb +2 -1
  27. data/lib/karafka/cli/server.rb +1 -1
  28. data/lib/karafka/cli/swarm.rb +1 -1
  29. data/lib/karafka/connection/client.rb +19 -18
  30. data/lib/karafka/connection/manager.rb +1 -0
  31. data/lib/karafka/connection/proxy.rb +1 -1
  32. data/lib/karafka/connection/rebalance_manager.rb +1 -1
  33. data/lib/karafka/connection/status.rb +1 -0
  34. data/lib/karafka/constraints.rb +1 -1
  35. data/lib/karafka/contracts/base.rb +1 -1
  36. data/lib/karafka/deserializers/payload.rb +1 -1
  37. data/lib/karafka/helpers/async.rb +1 -1
  38. data/lib/karafka/helpers/config_importer.rb +3 -3
  39. data/lib/karafka/helpers/multi_delegator.rb +3 -0
  40. data/lib/karafka/instrumentation/assignments_tracker.rb +2 -1
  41. data/lib/karafka/instrumentation/callbacks/error.rb +2 -2
  42. data/lib/karafka/instrumentation/callbacks/statistics.rb +3 -3
  43. data/lib/karafka/instrumentation/logger.rb +6 -6
  44. data/lib/karafka/instrumentation/monitor.rb +2 -2
  45. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +1 -1
  46. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +1 -1
  47. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +2 -2
  48. data/lib/karafka/instrumentation/vendors/kubernetes/base_listener.rb +1 -1
  49. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +3 -15
  50. data/lib/karafka/messages/builders/batch_metadata.rb +1 -1
  51. data/lib/karafka/pro/active_job/consumer.rb +2 -2
  52. data/lib/karafka/pro/active_job/dispatcher.rb +3 -3
  53. data/lib/karafka/pro/cleaner.rb +3 -3
  54. data/lib/karafka/pro/cli/contracts/server.rb +1 -1
  55. data/lib/karafka/pro/cli/parallel_segments/base.rb +4 -3
  56. data/lib/karafka/pro/cli/parallel_segments/collapse.rb +1 -1
  57. data/lib/karafka/pro/cli/parallel_segments/distribute.rb +1 -1
  58. data/lib/karafka/pro/cli/parallel_segments.rb +1 -1
  59. data/lib/karafka/pro/connection/manager.rb +1 -2
  60. data/lib/karafka/pro/connection/multiplexing/listener.rb +1 -0
  61. data/lib/karafka/pro/contracts/base.rb +1 -1
  62. data/lib/karafka/pro/encryption/cipher.rb +3 -2
  63. data/lib/karafka/pro/encryption/contracts/config.rb +1 -1
  64. data/lib/karafka/pro/encryption/messages/parser.rb +1 -1
  65. data/lib/karafka/pro/encryption/setup/config.rb +1 -1
  66. data/lib/karafka/pro/iterator/tpl_builder.rb +1 -1
  67. data/lib/karafka/pro/iterator.rb +1 -1
  68. data/lib/karafka/pro/loader.rb +1 -1
  69. data/lib/karafka/pro/processing/coordinator.rb +1 -1
  70. data/lib/karafka/pro/processing/filters/base.rb +1 -0
  71. data/lib/karafka/pro/processing/filters/delayer.rb +1 -1
  72. data/lib/karafka/pro/processing/filters/expirer.rb +1 -1
  73. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +1 -1
  74. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +1 -1
  75. data/lib/karafka/pro/processing/jobs/eofed_non_blocking.rb +1 -1
  76. data/lib/karafka/pro/processing/jobs/periodic.rb +1 -1
  77. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +1 -1
  78. data/lib/karafka/pro/processing/jobs_builder.rb +1 -1
  79. data/lib/karafka/pro/processing/offset_metadata/fetcher.rb +1 -0
  80. data/lib/karafka/pro/processing/partitioner.rb +1 -1
  81. data/lib/karafka/pro/processing/strategies/base.rb +1 -1
  82. data/lib/karafka/pro/processing/strategies/default.rb +2 -2
  83. data/lib/karafka/pro/processing/strategy_selector.rb +1 -0
  84. data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +4 -2
  85. data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +4 -2
  86. data/lib/karafka/pro/recurring_tasks/consumer.rb +3 -2
  87. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +2 -2
  88. data/lib/karafka/pro/recurring_tasks/contracts/task.rb +1 -1
  89. data/lib/karafka/pro/recurring_tasks/deserializer.rb +1 -1
  90. data/lib/karafka/pro/recurring_tasks/dispatcher.rb +1 -1
  91. data/lib/karafka/pro/recurring_tasks/executor.rb +2 -1
  92. data/lib/karafka/pro/recurring_tasks/schedule.rb +5 -2
  93. data/lib/karafka/pro/recurring_tasks/serializer.rb +6 -5
  94. data/lib/karafka/pro/recurring_tasks/setup/config.rb +2 -2
  95. data/lib/karafka/pro/recurring_tasks/task.rb +1 -1
  96. data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +3 -0
  97. data/lib/karafka/pro/routing/features/multiplexing/subscription_groups_builder.rb +1 -1
  98. data/lib/karafka/pro/routing/features/multiplexing.rb +5 -5
  99. data/lib/karafka/pro/routing/features/offset_metadata.rb +4 -4
  100. data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +1 -1
  101. data/lib/karafka/pro/routing/features/patterns/patterns.rb +1 -1
  102. data/lib/karafka/pro/routing/features/periodic_job/topic.rb +1 -1
  103. data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +1 -1
  104. data/lib/karafka/pro/routing/features/swarm.rb +1 -1
  105. data/lib/karafka/pro/routing/features/throttling/topic.rb +3 -1
  106. data/lib/karafka/pro/scheduled_messages/consumer.rb +1 -1
  107. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +2 -2
  108. data/lib/karafka/pro/scheduled_messages/contracts/message.rb +1 -1
  109. data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +3 -2
  110. data/lib/karafka/pro/scheduled_messages/day.rb +1 -0
  111. data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +1 -1
  112. data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +1 -1
  113. data/lib/karafka/pro/scheduled_messages/max_epoch.rb +1 -0
  114. data/lib/karafka/pro/scheduled_messages/proxy.rb +1 -1
  115. data/lib/karafka/pro/scheduled_messages/serializer.rb +3 -3
  116. data/lib/karafka/pro/scheduled_messages/setup/config.rb +2 -2
  117. data/lib/karafka/pro/scheduled_messages/state.rb +1 -0
  118. data/lib/karafka/pro/scheduled_messages/tracker.rb +1 -0
  119. data/lib/karafka/process.rb +4 -4
  120. data/lib/karafka/processing/executor.rb +1 -1
  121. data/lib/karafka/processing/inline_insights/tracker.rb +1 -0
  122. data/lib/karafka/processing/jobs_queue.rb +1 -1
  123. data/lib/karafka/processing/result.rb +1 -0
  124. data/lib/karafka/processing/strategy_selector.rb +1 -0
  125. data/lib/karafka/routing/activity_manager.rb +1 -0
  126. data/lib/karafka/routing/builder.rb +3 -1
  127. data/lib/karafka/routing/contracts/consumer_group.rb +3 -2
  128. data/lib/karafka/routing/contracts/topic.rb +5 -2
  129. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -1
  130. data/lib/karafka/routing/features/declaratives/topic.rb +5 -2
  131. data/lib/karafka/routing/features/deserializers/topic.rb +3 -3
  132. data/lib/karafka/routing/features/inline_insights.rb +5 -5
  133. data/lib/karafka/routing/router.rb +1 -1
  134. data/lib/karafka/routing/subscription_group.rb +1 -1
  135. data/lib/karafka/routing/subscription_groups_builder.rb +1 -0
  136. data/lib/karafka/routing/topic.rb +3 -3
  137. data/lib/karafka/server.rb +1 -1
  138. data/lib/karafka/setup/attributes_map.rb +4 -2
  139. data/lib/karafka/setup/config.rb +21 -10
  140. data/lib/karafka/setup/config_proxy.rb +209 -0
  141. data/lib/karafka/setup/contracts/config.rb +1 -1
  142. data/lib/karafka/swarm/liveness_listener.rb +1 -0
  143. data/lib/karafka/swarm/manager.rb +7 -6
  144. data/lib/karafka/swarm/node.rb +1 -1
  145. data/lib/karafka/swarm/supervisor.rb +1 -0
  146. data/lib/karafka/time_trackers/base.rb +1 -1
  147. data/lib/karafka/version.rb +1 -1
  148. data/lib/karafka.rb +2 -2
  149. metadata +7 -5
@@ -0,0 +1,209 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Setup
5
+ # Configuration proxy that wraps the actual config object during the setup phase.
6
+ #
7
+ # ## Purpose
8
+ #
9
+ # This proxy exists to intercept specific configuration methods during the setup block
10
+ # execution, allowing for deferred initialization and special handling of certain
11
+ # configuration aspects without permanently modifying the config object's API.
12
+ #
13
+ # The key design principle is: **the proxy only exists during setup and doesn't pollute
14
+ # the permanent config API**. Once setup is complete, all access goes directly to the
15
+ # real config object.
16
+ #
17
+ # ## Why Use a Proxy?
18
+ #
19
+ # During Karafka setup, there's a specific order of operations:
20
+ #
21
+ # 1. User configures basic settings (kafka, client_id, etc.)
22
+ # 2. Config validation runs
23
+ # 3. Components are initialized based on finalized config
24
+ # 4. Post-setup hooks execute
25
+ #
26
+ # Some configuration needs to happen **after** user settings but **during** component
27
+ # initialization. The proxy intercepts these special cases during step 1, stores the
28
+ # instructions, and applies them during step 3.
29
+ #
30
+ # ## Current Use Case: Producer Configuration
31
+ #
32
+ # The proxy currently intercepts `producer` calls with blocks:
33
+ #
34
+ # ```ruby
35
+ # Karafka::App.setup do |config|
36
+ # config.kafka = { 'bootstrap.servers': 'localhost:9092' }
37
+ #
38
+ # # This is intercepted by the proxy
39
+ # config.producer do |producer_config|
40
+ # producer_config.kafka['compression.type'] = 'snappy'
41
+ # end
42
+ # end
43
+ # ```
44
+ #
45
+ # Without the proxy, we'd have two problems:
46
+ #
47
+ # 1. **Permanent API pollution**: Adding a `producer` method to config that accepts blocks
48
+ # would change its permanent API, even though this functionality is only needed during
49
+ # setup.
50
+ #
51
+ # 2. **Timing issues**: The producer doesn't exist yet when the user's setup block runs.
52
+ # The producer is created in `configure_components` after all user configuration is
53
+ # complete. We need to store the user's producer configuration block and apply it
54
+ # later at the right time.
55
+ #
56
+ # The proxy solves both:
57
+ # - It only exists during the setup call
58
+ # - It stores the producer configuration block in an instance variable
59
+ # - After setup, the block is passed to `configure_components` for execution
60
+ # - The proxy is then discarded
61
+ #
62
+ # ## Future Use Cases
63
+ #
64
+ # This pattern can be extended for other deferred configuration needs:
65
+ #
66
+ # - **Monitor configuration**: Intercept monitor setup to configure it after all components
67
+ # are initialized
68
+ # - **Custom component initialization**: Allow users to configure internal components that
69
+ # need access to the fully-configured environment
70
+ # - **Feature toggles**: Enable/disable features based on the complete configuration state
71
+ #
72
+ # ## Implementation Details
73
+ #
74
+ # The proxy uses Ruby's SimpleDelegator pattern:
75
+ # 1. Inherits from SimpleDelegator for automatic delegation
76
+ # 2. Implements specific interceptor methods (currently just `producer`)
77
+ # 3. Delegates everything else to the wrapped config automatically
78
+ # 4. Stores deferred configuration in instance variables
79
+ #
80
+ # ## Lifecycle
81
+ #
82
+ # ```
83
+ # Setup.setup(&block)
84
+ # ↓
85
+ # proxy = ConfigProxy.new(config) # Create proxy
86
+ # ↓
87
+ # configure { yield(proxy) } # User block receives proxy
88
+ # ↓
89
+ # [User calls config methods] # Most delegate to real config
90
+ # ↓
91
+ # [User calls config.producer {}] # Intercepted by proxy
92
+ # ↓
93
+ # configure_components(proxy.producer_initialization_block) # Block retrieved
94
+ # ↓
95
+ # [Block executed with real producer config]
96
+ # ↓
97
+ # proxy = nil # Proxy discarded
98
+ # ```
99
+ #
100
+ # ## Example Usage
101
+ #
102
+ # ```ruby
103
+ # class KarafkaApp < Karafka::App
104
+ # setup do |config|
105
+ # # Standard config access - delegated to real config
106
+ # config.kafka = { 'bootstrap.servers': 'localhost:9092' }
107
+ # config.client_id = 'my_app'
108
+ #
109
+ # # Special intercepted method - handled by proxy
110
+ # config.producer do |producer_config|
111
+ # producer_config.kafka['compression.type'] = 'snappy'
112
+ # producer_config.kafka['linger.ms'] = 10
113
+ # producer_config.max_wait_timeout = 60_000
114
+ # end
115
+ # end
116
+ # end
117
+ # ```
118
+ #
119
+ # @see Karafka::Setup::Config.setup
120
+ # @see Karafka::Setup::Config.configure_components
121
+ class ConfigProxy < SimpleDelegator
122
+ # @return [Proc] the stored producer initialization block (defaults to empty lambda)
123
+ attr_reader :producer_initialization_block
124
+
125
+ # Creates a new configuration proxy wrapping the actual config object.
126
+ #
127
+ # Uses SimpleDelegator to automatically delegate all method calls to the wrapped config
128
+ # except for specifically intercepted methods like {#producer}.
129
+ #
130
+ # The producer initialization block defaults to an empty lambda, eliminating the need for
131
+ # nil checks when executing the block in {#configure_components}.
132
+ #
133
+ # @param config [Karafka::Setup::Config::Node] the actual config object to wrap
134
+ #
135
+ # @example
136
+ # proxy = ConfigProxy.new(Karafka::App.config)
137
+ def initialize(config)
138
+ super
139
+ @producer_initialization_block = ->(_) {}
140
+ end
141
+
142
+ # Captures a block for producer configuration or delegates producer assignment to config.
143
+ #
144
+ # This method has dual behavior:
145
+ #
146
+ # 1. **With a block**: Stores the block for later execution after the producer is created.
147
+ # This allows users to customize producer settings without manually creating a producer
148
+ # instance.
149
+ #
150
+ # 2. **With an instance**: Delegates to `config.producer=` for direct producer assignment.
151
+ # This preserves the existing API for users who want to provide their own producer.
152
+ #
153
+ # The block is stored in `@producer_initialization_block` and later passed to
154
+ # `configure_components` where it's executed with the producer's config object.
155
+ #
156
+ # ## Why This Exists
157
+ #
158
+ # The producer is created in `configure_components` after all user configuration is
159
+ # complete. This ensures the producer inherits the correct kafka settings. However,
160
+ # users may want to customize the producer further (add middleware, change timeouts,
161
+ # etc.) without creating their own producer instance.
162
+ #
163
+ # This method bridges the gap: it lets users configure the producer **as if it exists**
164
+ # during setup, but actually defers the configuration until after it's created.
165
+ #
166
+ # ## Block Execution Timing
167
+ #
168
+ # ```
169
+ # setup do |config|
170
+ # config.kafka = { ... } # Runs immediately
171
+ #
172
+ # config.producer do |pc| # Block STORED (not executed yet)
173
+ # pc.kafka['...'] = '...'
174
+ # end
175
+ # end
176
+ # # User block complete
177
+ # # Config validation runs
178
+ # # configure_components creates producer
179
+ # # NOW the stored block executes: block.call(producer.config)
180
+ # ```
181
+ #
182
+ # @param instance [WaterDrop::Producer, nil] optional producer instance for direct
183
+ # assignment
184
+ # @param block [Proc] optional block for producer configuration. Will be called with
185
+ # the producer's config object after the producer is created.
186
+ # @return [void]
187
+ #
188
+ # @example Configuring producer with a block
189
+ # config.producer do |producer_config|
190
+ # producer_config.kafka['compression.type'] = 'snappy'
191
+ # producer_config.max_wait_timeout = 30_000
192
+ # producer_config.middleware.append(MyMiddleware.new)
193
+ # end
194
+ #
195
+ # @example Direct producer assignment
196
+ # custom_producer = WaterDrop::Producer.new { |c| c.kafka = { 'bootstrap.servers' => 'localhost:9092' } }
197
+ # config.producer = custom_producer
198
+ def producer(instance = nil, &block)
199
+ if block
200
+ # Store the configuration block for later execution
201
+ @producer_initialization_block = block
202
+ else
203
+ # Direct assignment - delegate to real config via __getobj__
204
+ __getobj__.producer = instance
205
+ end
206
+ end
207
+ end
208
+ end
209
+ end
@@ -18,7 +18,7 @@ module Karafka
18
18
  end
19
19
 
20
20
  # Topics regexp constant reference for easier usage
21
- TOPIC_REGEXP = ::Karafka::Contracts::TOPIC_REGEXP
21
+ TOPIC_REGEXP = Karafka::Contracts::TOPIC_REGEXP
22
22
 
23
23
  private_constant :TOPIC_REGEXP
24
24
 
@@ -13,6 +13,7 @@ module Karafka
13
13
  orphaned_exit_code: %i[internal swarm orphaned_exit_code]
14
14
  )
15
15
 
16
+ # Initializes the liveness listener
16
17
  def initialize
17
18
  @last_checked_at = 0
18
19
  @mutex = Mutex.new
@@ -29,6 +29,7 @@ module Karafka
29
29
  # @return [Array<Node>] All nodes that manager manages
30
30
  attr_reader :nodes
31
31
 
32
+ # Initializes the swarm manager with empty nodes
32
33
  def initialize
33
34
  @nodes = []
34
35
  @statuses = Hash.new { |h, k| h[k] = {} }
@@ -97,7 +98,7 @@ module Karafka
97
98
  # If we've issued a stop to this process and it does not want to stop in the period, kills it
98
99
  #
99
100
  # @param statuses [Hash] hash with statuses transitions with times
100
- # @param [Swarm::Node] node we're checking
101
+ # @param node [Swarm::Node] node we're checking
101
102
  # @return [Boolean] should it be the last action taken on this node in this run
102
103
  def terminate_if_hanging(statuses, node)
103
104
  return false unless statuses.key?(:stop)
@@ -118,7 +119,7 @@ module Karafka
118
119
  # reported it is not healthy.
119
120
  #
120
121
  # @param statuses [Hash] hash with statuses transitions with times
121
- # @param [Swarm::Node] node we're checking
122
+ # @param node [Swarm::Node] node we're checking
122
123
  # @return [Boolean] should it be the last action taken on this node in this run
123
124
  def stop_if_not_healthy(statuses, node)
124
125
  status = node.status
@@ -146,7 +147,7 @@ module Karafka
146
147
  # If node stopped responding, starts the stopping procedure.
147
148
  #
148
149
  # @param statuses [Hash] hash with statuses transitions with times
149
- # @param [Swarm::Node] node we're checking
150
+ # @param node [Swarm::Node] node we're checking
150
151
  # @return [Boolean] should it be the last action taken on this node in this run
151
152
  def stop_if_not_responding(statuses, node)
152
153
  # Do nothing if already stopping
@@ -171,7 +172,7 @@ module Karafka
171
172
  # Cleans up a dead process and remembers time of death for restart after a period.
172
173
  #
173
174
  # @param statuses [Hash] hash with statuses transitions with times
174
- # @param [Swarm::Node] node we're checking
175
+ # @param node [Swarm::Node] node we're checking
175
176
  # @return [Boolean] should it be the last action taken on this node in this run
176
177
  def cleanup_one(statuses, node)
177
178
  return false if statuses.key?(:dead_since)
@@ -188,7 +189,7 @@ module Karafka
188
189
  # killed for some external reason.
189
190
  #
190
191
  # @param statuses [Hash] hash with statuses transitions with times
191
- # @param [Swarm::Node] node we're checking
192
+ # @param node [Swarm::Node] node we're checking
192
193
  # @return [Boolean] should it be the last action taken on this node in this run
193
194
  def restart_after_timeout(statuses, node)
194
195
  return false unless over?(statuses[:dead_since], node_restart_timeout)
@@ -200,7 +201,7 @@ module Karafka
200
201
 
201
202
  # Starts a new node (or restarts dead)
202
203
  #
203
- # @param [Swarm::Node] node we're starting
204
+ # @param node [Swarm::Node] node we're starting
204
205
  def start_one(node)
205
206
  instr_args = { caller: self, node: node }
206
207
 
@@ -73,7 +73,7 @@ module Karafka
73
73
  old_producer_config = old_producer.config
74
74
 
75
75
  # Supervisor producer is closed, hence we need a new one here
76
- config.producer = ::WaterDrop::Producer.new do |p_config|
76
+ config.producer = WaterDrop::Producer.new do |p_config|
77
77
  p_config.kafka = Setup::AttributesMap.producer(kafka.dup)
78
78
  p_config.logger = config.logger
79
79
 
@@ -34,6 +34,7 @@ module Karafka
34
34
 
35
35
  private_constant :SHUTDOWN_GRACE_PERIOD
36
36
 
37
+ # Initializes the swarm supervisor
37
38
  def initialize
38
39
  @mutex = Mutex.new
39
40
  @queue = Queue.new
@@ -8,7 +8,7 @@ module Karafka
8
8
  module TimeTrackers
9
9
  # Base class for all the time-trackers.
10
10
  class Base
11
- include ::Karafka::Core::Helpers::Time
11
+ include Karafka::Core::Helpers::Time
12
12
  end
13
13
  end
14
14
  end
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.5.2'
6
+ VERSION = '2.5.3'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -34,7 +34,7 @@ module Karafka
34
34
  env.replace(environment.to_s)
35
35
  end
36
36
 
37
- # @return [Logger] logger that we want to use. Will use ::Karafka::Logger by default
37
+ # @return [Logger] logger that we want to use. Will use Karafka::Logger by default
38
38
  def logger
39
39
  @logger ||= App.config.logger
40
40
  end
@@ -148,7 +148,7 @@ module Karafka
148
148
  #
149
149
  # This method refreshes the things that might have been altered by the configuration
150
150
  def refresh!
151
- config = ::Karafka::App.config
151
+ config = Karafka::App.config
152
152
 
153
153
  @logger = config.logger
154
154
  @producer = config.producer
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.5.2
4
+ version: 2.5.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -49,21 +49,21 @@ dependencies:
49
49
  requirements:
50
50
  - - ">="
51
51
  - !ruby/object:Gem::Version
52
- version: 0.22.0
52
+ version: 0.23.1
53
53
  type: :runtime
54
54
  prerelease: false
55
55
  version_requirements: !ruby/object:Gem::Requirement
56
56
  requirements:
57
57
  - - ">="
58
58
  - !ruby/object:Gem::Version
59
- version: 0.22.0
59
+ version: 0.23.1
60
60
  - !ruby/object:Gem::Dependency
61
61
  name: waterdrop
62
62
  requirement: !ruby/object:Gem::Requirement
63
63
  requirements:
64
64
  - - ">="
65
65
  - !ruby/object:Gem::Version
66
- version: 2.8.9
66
+ version: 2.8.14
67
67
  - - "<"
68
68
  - !ruby/object:Gem::Version
69
69
  version: 3.0.0
@@ -73,7 +73,7 @@ dependencies:
73
73
  requirements:
74
74
  - - ">="
75
75
  - !ruby/object:Gem::Version
76
- version: 2.8.9
76
+ version: 2.8.14
77
77
  - - "<"
78
78
  - !ruby/object:Gem::Version
79
79
  version: 3.0.0
@@ -118,6 +118,7 @@ files:
118
118
  - ".rspec"
119
119
  - ".ruby-gemset"
120
120
  - ".ruby-version"
121
+ - ".yard-lint.yml"
121
122
  - CHANGELOG.md
122
123
  - CODE_OF_CONDUCT.md
123
124
  - CONTRIBUTING.md
@@ -586,6 +587,7 @@ files:
586
587
  - lib/karafka/server.rb
587
588
  - lib/karafka/setup/attributes_map.rb
588
589
  - lib/karafka/setup/config.rb
590
+ - lib/karafka/setup/config_proxy.rb
589
591
  - lib/karafka/setup/contracts/config.rb
590
592
  - lib/karafka/setup/defaults_injector.rb
591
593
  - lib/karafka/setup/dsl.rb