karafka 2.5.1 → 2.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +21 -29
  3. data/.github/workflows/ci_macos_arm64.yml +1 -1
  4. data/.github/workflows/push.yml +2 -2
  5. data/.github/workflows/trigger-wiki-refresh.yml +1 -1
  6. data/.ruby-version +1 -1
  7. data/.yard-lint.yml +174 -0
  8. data/CHANGELOG.md +20 -4
  9. data/Gemfile +1 -2
  10. data/Gemfile.lock +45 -41
  11. data/bin/integrations +2 -1
  12. data/bin/rspecs +4 -0
  13. data/config/locales/errors.yml +6 -4
  14. data/config/locales/pro_errors.yml +5 -4
  15. data/docker-compose.yml +1 -1
  16. data/examples/payloads/json/sample_set_02/download.json +191 -0
  17. data/examples/payloads/json/sample_set_03/event_type_1.json +18 -0
  18. data/examples/payloads/json/sample_set_03/event_type_2.json +263 -0
  19. data/examples/payloads/json/sample_set_03/event_type_3.json +41 -0
  20. data/karafka.gemspec +3 -3
  21. data/lib/active_job/queue_adapters/karafka_adapter.rb +3 -3
  22. data/lib/karafka/active_job/consumer.rb +7 -3
  23. data/lib/karafka/active_job/current_attributes/job_wrapper.rb +45 -0
  24. data/lib/karafka/active_job/current_attributes/loading.rb +1 -1
  25. data/lib/karafka/active_job/current_attributes/persistence.rb +19 -7
  26. data/lib/karafka/active_job/current_attributes.rb +3 -2
  27. data/lib/karafka/active_job/deserializer.rb +61 -0
  28. data/lib/karafka/active_job/dispatcher.rb +34 -14
  29. data/lib/karafka/active_job/job_options_contract.rb +2 -4
  30. data/lib/karafka/admin/acl.rb +8 -4
  31. data/lib/karafka/admin/configs/config.rb +6 -4
  32. data/lib/karafka/admin/configs/resource.rb +7 -1
  33. data/lib/karafka/admin/consumer_groups.rb +80 -12
  34. data/lib/karafka/admin/topics.rb +43 -9
  35. data/lib/karafka/admin.rb +23 -14
  36. data/lib/karafka/app.rb +3 -3
  37. data/lib/karafka/base_consumer.rb +6 -6
  38. data/lib/karafka/cli/base.rb +2 -2
  39. data/lib/karafka/cli/console.rb +1 -1
  40. data/lib/karafka/cli/contracts/server.rb +3 -5
  41. data/lib/karafka/cli/help.rb +1 -1
  42. data/lib/karafka/cli/install.rb +3 -2
  43. data/lib/karafka/cli/server.rb +1 -1
  44. data/lib/karafka/cli/swarm.rb +1 -1
  45. data/lib/karafka/cli/topics/align.rb +1 -1
  46. data/lib/karafka/cli/topics/repartition.rb +2 -2
  47. data/lib/karafka/connection/client.rb +30 -19
  48. data/lib/karafka/connection/listeners_batch.rb +2 -3
  49. data/lib/karafka/connection/manager.rb +1 -0
  50. data/lib/karafka/connection/proxy.rb +12 -8
  51. data/lib/karafka/connection/rebalance_manager.rb +1 -1
  52. data/lib/karafka/connection/status.rb +1 -0
  53. data/lib/karafka/constraints.rb +1 -1
  54. data/lib/karafka/contracts/base.rb +1 -1
  55. data/lib/karafka/deserializers/payload.rb +1 -1
  56. data/lib/karafka/env.rb +1 -2
  57. data/lib/karafka/helpers/async.rb +1 -1
  58. data/lib/karafka/helpers/config_importer.rb +3 -3
  59. data/lib/karafka/helpers/interval_runner.rb +4 -1
  60. data/lib/karafka/helpers/multi_delegator.rb +3 -0
  61. data/lib/karafka/instrumentation/assignments_tracker.rb +19 -1
  62. data/lib/karafka/instrumentation/callbacks/error.rb +2 -2
  63. data/lib/karafka/instrumentation/callbacks/statistics.rb +3 -3
  64. data/lib/karafka/instrumentation/logger.rb +6 -6
  65. data/lib/karafka/instrumentation/monitor.rb +3 -3
  66. data/lib/karafka/instrumentation/notifications.rb +1 -0
  67. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +3 -4
  68. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +3 -4
  69. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +10 -11
  70. data/lib/karafka/instrumentation/vendors/kubernetes/base_listener.rb +1 -1
  71. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +5 -18
  72. data/lib/karafka/messages/builders/batch_metadata.rb +2 -2
  73. data/lib/karafka/messages/builders/message.rb +1 -1
  74. data/lib/karafka/messages/messages.rb +2 -3
  75. data/lib/karafka/patches/rdkafka/bindings.rb +6 -6
  76. data/lib/karafka/patches/rdkafka/opaque.rb +1 -1
  77. data/lib/karafka/pro/active_job/consumer.rb +2 -2
  78. data/lib/karafka/pro/active_job/dispatcher.rb +10 -6
  79. data/lib/karafka/pro/active_job/job_options_contract.rb +2 -4
  80. data/lib/karafka/pro/cleaner/messages/messages.rb +2 -3
  81. data/lib/karafka/pro/cleaner.rb +3 -3
  82. data/lib/karafka/pro/cli/contracts/server.rb +3 -5
  83. data/lib/karafka/pro/cli/parallel_segments/base.rb +5 -5
  84. data/lib/karafka/pro/cli/parallel_segments/collapse.rb +3 -3
  85. data/lib/karafka/pro/cli/parallel_segments/distribute.rb +3 -3
  86. data/lib/karafka/pro/cli/parallel_segments.rb +1 -1
  87. data/lib/karafka/pro/connection/manager.rb +3 -4
  88. data/lib/karafka/pro/connection/multiplexing/listener.rb +1 -0
  89. data/lib/karafka/pro/contracts/base.rb +1 -1
  90. data/lib/karafka/pro/encryption/cipher.rb +3 -2
  91. data/lib/karafka/pro/encryption/contracts/config.rb +5 -7
  92. data/lib/karafka/pro/encryption/messages/parser.rb +4 -4
  93. data/lib/karafka/pro/encryption/setup/config.rb +1 -1
  94. data/lib/karafka/pro/instrumentation/performance_tracker.rb +3 -3
  95. data/lib/karafka/pro/iterator/expander.rb +1 -1
  96. data/lib/karafka/pro/iterator/tpl_builder.rb +2 -2
  97. data/lib/karafka/pro/iterator.rb +3 -3
  98. data/lib/karafka/pro/loader.rb +1 -1
  99. data/lib/karafka/pro/processing/coordinator.rb +1 -1
  100. data/lib/karafka/pro/processing/coordinators/errors_tracker.rb +2 -3
  101. data/lib/karafka/pro/processing/coordinators/filters_applier.rb +3 -3
  102. data/lib/karafka/pro/processing/filters/base.rb +1 -0
  103. data/lib/karafka/pro/processing/filters/delayer.rb +1 -1
  104. data/lib/karafka/pro/processing/filters/expirer.rb +1 -1
  105. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +1 -1
  106. data/lib/karafka/pro/processing/filters/throttler.rb +1 -1
  107. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +1 -1
  108. data/lib/karafka/pro/processing/jobs/eofed_non_blocking.rb +1 -1
  109. data/lib/karafka/pro/processing/jobs/periodic.rb +1 -1
  110. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +1 -1
  111. data/lib/karafka/pro/processing/jobs_builder.rb +1 -1
  112. data/lib/karafka/pro/processing/offset_metadata/fetcher.rb +1 -0
  113. data/lib/karafka/pro/processing/partitioner.rb +1 -1
  114. data/lib/karafka/pro/processing/schedulers/default.rb +2 -4
  115. data/lib/karafka/pro/processing/strategies/base.rb +1 -1
  116. data/lib/karafka/pro/processing/strategies/default.rb +2 -2
  117. data/lib/karafka/pro/processing/strategies/lrj/default.rb +2 -4
  118. data/lib/karafka/pro/processing/strategies/vp/default.rb +2 -4
  119. data/lib/karafka/pro/processing/strategy_selector.rb +1 -0
  120. data/lib/karafka/pro/processing/subscription_groups_coordinator.rb +2 -3
  121. data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +4 -2
  122. data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +4 -2
  123. data/lib/karafka/pro/recurring_tasks/consumer.rb +3 -2
  124. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +4 -6
  125. data/lib/karafka/pro/recurring_tasks/contracts/task.rb +3 -5
  126. data/lib/karafka/pro/recurring_tasks/deserializer.rb +1 -1
  127. data/lib/karafka/pro/recurring_tasks/dispatcher.rb +7 -6
  128. data/lib/karafka/pro/recurring_tasks/executor.rb +2 -1
  129. data/lib/karafka/pro/recurring_tasks/schedule.rb +9 -8
  130. data/lib/karafka/pro/recurring_tasks/serializer.rb +6 -5
  131. data/lib/karafka/pro/recurring_tasks/setup/config.rb +2 -2
  132. data/lib/karafka/pro/recurring_tasks/task.rb +1 -1
  133. data/lib/karafka/pro/recurring_tasks.rb +8 -5
  134. data/lib/karafka/pro/routing/features/adaptive_iterator/contracts/topic.rb +2 -4
  135. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +2 -4
  136. data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +3 -0
  137. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +2 -4
  138. data/lib/karafka/pro/routing/features/delaying/topic.rb +2 -4
  139. data/lib/karafka/pro/routing/features/direct_assignments/contracts/consumer_group.rb +4 -8
  140. data/lib/karafka/pro/routing/features/direct_assignments/contracts/topic.rb +5 -7
  141. data/lib/karafka/pro/routing/features/direct_assignments/subscription_group.rb +7 -6
  142. data/lib/karafka/pro/routing/features/direct_assignments/topic.rb +2 -2
  143. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +2 -4
  144. data/lib/karafka/pro/routing/features/expiring/topic.rb +2 -4
  145. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +2 -4
  146. data/lib/karafka/pro/routing/features/filtering/topic.rb +2 -3
  147. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +2 -4
  148. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +2 -4
  149. data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +3 -5
  150. data/lib/karafka/pro/routing/features/multiplexing/subscription_groups_builder.rb +1 -1
  151. data/lib/karafka/pro/routing/features/multiplexing.rb +5 -5
  152. data/lib/karafka/pro/routing/features/non_blocking_job/topic.rb +3 -3
  153. data/lib/karafka/pro/routing/features/offset_metadata/contracts/topic.rb +2 -4
  154. data/lib/karafka/pro/routing/features/offset_metadata.rb +4 -4
  155. data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +1 -1
  156. data/lib/karafka/pro/routing/features/parallel_segments/contracts/consumer_group.rb +2 -4
  157. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +3 -5
  158. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +2 -4
  159. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +2 -4
  160. data/lib/karafka/pro/routing/features/patterns/patterns.rb +1 -1
  161. data/lib/karafka/pro/routing/features/pausing/config.rb +26 -0
  162. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +17 -11
  163. data/lib/karafka/pro/routing/features/pausing/topic.rb +69 -8
  164. data/lib/karafka/pro/routing/features/periodic_job/contracts/topic.rb +2 -4
  165. data/lib/karafka/pro/routing/features/periodic_job/topic.rb +1 -1
  166. data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +1 -1
  167. data/lib/karafka/pro/routing/features/recurring_tasks/contracts/topic.rb +2 -4
  168. data/lib/karafka/pro/routing/features/scheduled_messages/contracts/topic.rb +2 -4
  169. data/lib/karafka/pro/routing/features/swarm/contracts/routing.rb +2 -4
  170. data/lib/karafka/pro/routing/features/swarm/contracts/topic.rb +6 -8
  171. data/lib/karafka/pro/routing/features/swarm.rb +1 -1
  172. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +2 -4
  173. data/lib/karafka/pro/routing/features/throttling/topic.rb +3 -1
  174. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +2 -4
  175. data/lib/karafka/pro/scheduled_messages/consumer.rb +1 -1
  176. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +4 -6
  177. data/lib/karafka/pro/scheduled_messages/contracts/message.rb +3 -5
  178. data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +3 -2
  179. data/lib/karafka/pro/scheduled_messages/day.rb +1 -0
  180. data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +1 -1
  181. data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +1 -1
  182. data/lib/karafka/pro/scheduled_messages/max_epoch.rb +1 -0
  183. data/lib/karafka/pro/scheduled_messages/proxy.rb +1 -1
  184. data/lib/karafka/pro/scheduled_messages/serializer.rb +3 -3
  185. data/lib/karafka/pro/scheduled_messages/setup/config.rb +2 -2
  186. data/lib/karafka/pro/scheduled_messages/state.rb +1 -0
  187. data/lib/karafka/pro/scheduled_messages/tracker.rb +1 -0
  188. data/lib/karafka/pro/scheduled_messages.rb +4 -6
  189. data/lib/karafka/pro/swarm/liveness_listener.rb +2 -2
  190. data/lib/karafka/process.rb +4 -4
  191. data/lib/karafka/processing/coordinator.rb +2 -4
  192. data/lib/karafka/processing/coordinators_buffer.rb +2 -3
  193. data/lib/karafka/processing/executor.rb +3 -4
  194. data/lib/karafka/processing/inline_insights/tracker.rb +1 -0
  195. data/lib/karafka/processing/jobs/base.rb +2 -3
  196. data/lib/karafka/processing/jobs_queue.rb +1 -1
  197. data/lib/karafka/processing/result.rb +1 -0
  198. data/lib/karafka/processing/strategy_selector.rb +1 -0
  199. data/lib/karafka/processing/workers_batch.rb +2 -3
  200. data/lib/karafka/railtie.rb +1 -0
  201. data/lib/karafka/routing/activity_manager.rb +3 -2
  202. data/lib/karafka/routing/builder.rb +8 -8
  203. data/lib/karafka/routing/consumer_group.rb +4 -6
  204. data/lib/karafka/routing/contracts/consumer_group.rb +6 -7
  205. data/lib/karafka/routing/contracts/routing.rb +2 -4
  206. data/lib/karafka/routing/contracts/topic.rb +7 -6
  207. data/lib/karafka/routing/features/active_job/contracts/topic.rb +2 -4
  208. data/lib/karafka/routing/features/active_job/topic.rb +6 -0
  209. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +3 -5
  210. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +3 -5
  211. data/lib/karafka/routing/features/declaratives/topic.rb +5 -2
  212. data/lib/karafka/routing/features/deserializers/contracts/topic.rb +2 -4
  213. data/lib/karafka/routing/features/deserializers/topic.rb +3 -3
  214. data/lib/karafka/routing/features/eofed/contracts/topic.rb +2 -4
  215. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +2 -4
  216. data/lib/karafka/routing/features/inline_insights.rb +5 -5
  217. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +2 -4
  218. data/lib/karafka/routing/router.rb +1 -1
  219. data/lib/karafka/routing/subscription_group.rb +1 -1
  220. data/lib/karafka/routing/subscription_groups_builder.rb +1 -0
  221. data/lib/karafka/routing/topic.rb +3 -3
  222. data/lib/karafka/routing/topics.rb +4 -9
  223. data/lib/karafka/server.rb +2 -2
  224. data/lib/karafka/setup/attributes_map.rb +4 -2
  225. data/lib/karafka/setup/config.rb +85 -17
  226. data/lib/karafka/setup/config_proxy.rb +209 -0
  227. data/lib/karafka/setup/contracts/config.rb +13 -11
  228. data/lib/karafka/setup/defaults_injector.rb +3 -2
  229. data/lib/karafka/setup/dsl.rb +2 -3
  230. data/lib/karafka/swarm/liveness_listener.rb +3 -3
  231. data/lib/karafka/swarm/manager.rb +7 -6
  232. data/lib/karafka/swarm/node.rb +1 -1
  233. data/lib/karafka/swarm/supervisor.rb +2 -1
  234. data/lib/karafka/time_trackers/base.rb +1 -1
  235. data/lib/karafka/version.rb +1 -1
  236. data/lib/karafka.rb +4 -4
  237. metadata +14 -6
  238. data/.diffend.yml +0 -3
@@ -6,10 +6,8 @@ module Karafka
6
6
  # Consumer group topic validation rules.
7
7
  class Topic < Karafka::Contracts::Base
8
8
  configure do |config|
9
- config.error_messages = YAML.safe_load(
10
- File.read(
11
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
12
- )
9
+ config.error_messages = YAML.safe_load_file(
10
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
13
11
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
14
12
  end
15
13
 
@@ -68,10 +66,13 @@ module Karafka
68
66
 
69
67
  virtual do |data, errors|
70
68
  next unless errors.empty?
71
- next unless ::Karafka::App.config.strict_topics_namespacing
69
+ next unless Karafka::App.config.strict_topics_namespacing
72
70
 
73
71
  value = data.fetch(:name)
74
- namespacing_chars_count = value.chars.find_all { |c| ['.', '_'].include?(c) }.uniq.size
72
+ namespace_chars = ['.', '_'].freeze
73
+ namespacing_chars_count = value.chars.find_all do |c|
74
+ namespace_chars.include?(c)
75
+ end.uniq.size
75
76
 
76
77
  next if namespacing_chars_count <= 1
77
78
 
@@ -10,10 +10,8 @@ module Karafka
10
10
  # in order to be able to use active job routing
11
11
  class Topic < Karafka::Contracts::Base
12
12
  configure do |config|
13
- config.error_messages = YAML.safe_load(
14
- File.read(
15
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
- )
13
+ config.error_messages = YAML.safe_load_file(
14
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
17
15
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
18
16
  end
19
17
 
@@ -5,6 +5,12 @@ module Karafka
5
5
  module Features
6
6
  class ActiveJob < Base
7
7
  # Topic extensions to be able to check if given topic is ActiveJob topic
8
+ #
9
+ # @note ActiveJob topics do not have per-topic deserializer configuration. The deserializer
10
+ # is configured globally via `config.internal.active_job.deserializer` because Rails
11
+ # serializes jobs before dispatching them, requiring a consistent serialization format
12
+ # across all ActiveJob topics. If you need custom serialization (e.g., Avro, Protobuf),
13
+ # configure it once at the application level rather than per-topic.
8
14
  module Topic
9
15
  # This method calls the parent class initializer and then sets up the
10
16
  # extra instance variable to nil. The explicit initialization
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Rules around dead letter queue settings
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -41,7 +39,7 @@ module Karafka
41
39
  next unless dead_letter_queue[:active]
42
40
 
43
41
  topic = dead_letter_queue[:topic]
44
- topic_regexp = ::Karafka::Contracts::TOPIC_REGEXP
42
+ topic_regexp = Karafka::Contracts::TOPIC_REGEXP
45
43
 
46
44
  # When topic is set to false, it means we just want to skip dispatch on DLQ
47
45
  next if topic == false
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Basic validation of the Kafka expected config details
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -22,7 +20,7 @@ module Karafka
22
20
  required(:replication_factor) { |val| val.is_a?(Integer) && val.positive? }
23
21
  required(:details) do |val|
24
22
  val.is_a?(Hash) &&
25
- val.keys.all? { |key| key.is_a?(Symbol) }
23
+ val.keys.all?(Symbol)
26
24
  end
27
25
  end
28
26
  end
@@ -16,9 +16,12 @@ module Karafka
16
16
  end
17
17
 
18
18
  # @param active [Boolean] is the topic structure management feature active
19
- # @param partitions [Integer]
20
- # @param replication_factor [Integer]
19
+ # @param partitions [Integer] number of partitions for the topic
20
+ # @param replication_factor [Integer] replication factor for the topic
21
21
  # @param details [Hash] extra configuration for the topic
22
+ # @option details [String] :retention.ms retention time in milliseconds
23
+ # @option details [String] :compression.type compression type
24
+ # (none, gzip, snappy, lz4, zstd)
22
25
  # @return [Config] defined structure
23
26
  def config(active: true, partitions: 1, replication_factor: 1, **details)
24
27
  @declaratives ||= Config.new(
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Basic validation of the Kafka expected config details
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -22,9 +22,9 @@ module Karafka
22
22
  # @param key [Object] deserializer for the message key
23
23
  # @param headers [Object] deserializer for the message headers
24
24
  def deserializers(
25
- payload: ::Karafka::Deserializers::Payload.new,
26
- key: ::Karafka::Deserializers::Key.new,
27
- headers: ::Karafka::Deserializers::Headers.new
25
+ payload: Karafka::Deserializers::Payload.new,
26
+ key: Karafka::Deserializers::Key.new,
27
+ headers: Karafka::Deserializers::Headers.new
28
28
  )
29
29
  @deserializers ||= Config.new(
30
30
  active: true,
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Contract for eofed topic setup
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Contract for inline insights topic setup
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -12,11 +12,11 @@ module Karafka
12
12
  #
13
13
  # @param _config [Karafka::Core::Configurable::Node] app config
14
14
  def post_setup(_config)
15
- ::Karafka::App.monitor.subscribe('app.running') do
15
+ Karafka::App.monitor.subscribe('app.running') do
16
16
  # Do not activate tracking of statistics if none of our active topics uses it
17
17
  # This prevents us from tracking metrics when user just runs a subset of topics
18
18
  # in a given process and none of those actually utilizes this feature
19
- next unless ::Karafka::App
19
+ next unless Karafka::App
20
20
  .subscription_groups
21
21
  .values
22
22
  .flat_map(&:itself)
@@ -25,11 +25,11 @@ module Karafka
25
25
  .any?(&:inline_insights?)
26
26
 
27
27
  # Initialize the tracker prior to becoming multi-threaded
28
- ::Karafka::Processing::InlineInsights::Tracker.instance
28
+ Karafka::Processing::InlineInsights::Tracker.instance
29
29
 
30
30
  # Subscribe to the statistics reports and collect them
31
- ::Karafka.monitor.subscribe(
32
- ::Karafka::Processing::InlineInsights::Listener.new
31
+ Karafka.monitor.subscribe(
32
+ Karafka::Processing::InlineInsights::Listener.new
33
33
  )
34
34
  end
35
35
  end
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Rules around manual offset management settings
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -8,7 +8,7 @@ module Karafka
8
8
  # structure so all the routes are being stored in a single level array
9
9
  module Router
10
10
  # Finds first reference of a given topic based on provided lookup attribute
11
- # @param lookup [Hash<Symbol, String>] hash with attribute - value key pairs
11
+ # @param lookup [Hash{Symbol => String}] hash with attribute - value key pairs
12
12
  # @return [Karafka::Routing::Topic, nil] proper route details or nil if not found
13
13
  def find_by(lookup)
14
14
  App.consumer_groups.each do |consumer_group|
@@ -30,7 +30,7 @@ module Karafka
30
30
  @group_counter ||= 0
31
31
  @group_counter += 1
32
32
 
33
- ::Digest::SHA256.hexdigest(
33
+ Digest::SHA256.hexdigest(
34
34
  @group_counter.to_s
35
35
  )[0..11]
36
36
  end
@@ -24,6 +24,7 @@ module Karafka
24
24
 
25
25
  private_constant :DISTRIBUTION_KEYS
26
26
 
27
+ # Initializes the subscription groups builder
27
28
  def initialize
28
29
  @position = -1
29
30
  end
@@ -29,7 +29,7 @@ module Karafka
29
29
 
30
30
  private_constant :INHERITABLE_ATTRIBUTES
31
31
 
32
- # @param [String, Symbol] name of a topic on which we want to listen
32
+ # @param name [String, Symbol] name of a topic on which we want to listen
33
33
  # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
34
34
  def initialize(name, consumer_group)
35
35
  @name = name.to_s
@@ -92,7 +92,7 @@ module Karafka
92
92
  # consumer class is defined with a name. It won't support code reload for anonymous
93
93
  # consumer classes, but this is an edge case
94
94
  begin
95
- ::Object.const_get(@consumer.to_s)
95
+ Object.const_get(@consumer.to_s)
96
96
  rescue NameError
97
97
  # It will only fail if the in case of anonymous classes
98
98
  @consumer
@@ -139,7 +139,7 @@ module Karafka
139
139
  [attribute, public_send(attribute)]
140
140
  end
141
141
 
142
- Hash[map].merge!(
142
+ map.to_h.merge!(
143
143
  id: id,
144
144
  name: name,
145
145
  active: active?,
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # frozen_string_literal: true
4
-
5
3
  module Karafka
6
4
  module Routing
7
5
  # Abstraction layer on top of groups of topics
@@ -17,18 +15,15 @@ module Karafka
17
15
  end
18
16
 
19
17
  # Yields each topic
20
- #
21
- # @param [Proc] block we want to yield with on each topic
22
- def each(&block)
23
- @accumulator.each(&block)
18
+ def each(&)
19
+ @accumulator.each(&)
24
20
  end
25
21
 
26
22
  # Allows us to remove elements from the topics
27
23
  #
28
24
  # Block to decide what to delete
29
- # @param block [Proc]
30
- def delete_if(&block)
31
- @accumulator.delete_if(&block)
25
+ def delete_if(&)
26
+ @accumulator.delete_if(&)
32
27
  end
33
28
 
34
29
  # Finds topic by its name
@@ -64,7 +64,7 @@ module Karafka
64
64
  process.on_sigterm { stop }
65
65
  process.on_sigtstp { quiet }
66
66
  # Needed for instrumentation
67
- process.on_sigttin {}
67
+ process.on_sigttin { nil }
68
68
  process.supervise
69
69
 
70
70
  # This will only run when not in a swarm mode. In swarm mode the server runs post-fork, so
@@ -174,7 +174,7 @@ module Karafka
174
174
  # This ensures that if users have configured the default pool, it is closed correctly
175
175
  #
176
176
  # Custom pools need to be closed by users themselves
177
- ::WaterDrop::ConnectionPool.close
177
+ WaterDrop::ConnectionPool.close
178
178
 
179
179
  Karafka::App.terminate!
180
180
  end
@@ -122,6 +122,7 @@ module Karafka
122
122
  sasl.oauthbearer.config
123
123
  sasl.oauthbearer.extensions
124
124
  sasl.oauthbearer.grant.type
125
+ sasl.oauthbearer.metadata.authentication.type
125
126
  sasl.oauthbearer.method
126
127
  sasl.oauthbearer.scope
127
128
  sasl.oauthbearer.token.endpoint.url
@@ -279,6 +280,7 @@ module Karafka
279
280
  sasl.oauthbearer.config
280
281
  sasl.oauthbearer.extensions
281
282
  sasl.oauthbearer.grant.type
283
+ sasl.oauthbearer.metadata.authentication.type
282
284
  sasl.oauthbearer.method
283
285
  sasl.oauthbearer.scope
284
286
  sasl.oauthbearer.token.endpoint.url
@@ -357,7 +359,7 @@ module Karafka
357
359
  end
358
360
 
359
361
  # @private
360
- # @return [Hash<Symbol, Array<Symbol>>] hash with consumer and producer attributes list
362
+ # @return [Hash{Symbol => Array<Symbol>}] hash with consumer and producer attributes list
361
363
  # that is sorted.
362
364
  # @note This method should not be used directly. It is only used to generate appropriate
363
365
  # options list in case it would change
@@ -367,7 +369,7 @@ module Karafka
367
369
 
368
370
  attributes = { consumer: Set.new, producer: Set.new }
369
371
 
370
- ::URI.parse(SOURCE).open.readlines.each do |line|
372
+ URI.parse(SOURCE).open.readlines.each do |line|
371
373
  next unless line.include?('|')
372
374
 
373
375
  attribute, attribute_type = line.split('|').map(&:strip)
@@ -12,7 +12,7 @@ module Karafka
12
12
  # enough and will still keep the code simple
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
- extend ::Karafka::Core::Configurable
15
+ extend Karafka::Core::Configurable
16
16
 
17
17
  # Available settings
18
18
 
@@ -33,9 +33,9 @@ module Karafka
33
33
  # Used only for logging.
34
34
  setting :client_id, default: 'karafka'
35
35
  # option logger [Instance] logger that we want to use
36
- setting :logger, default: ::Karafka::Instrumentation::Logger.new
36
+ setting :logger, default: Karafka::Instrumentation::Logger.new
37
37
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
38
- setting :monitor, default: ::Karafka::Instrumentation::Monitor.new
38
+ setting :monitor, default: Karafka::Instrumentation::Monitor.new
39
39
  # option [Boolean] should we reload consumers with each incoming batch thus effectively
40
40
  # supporting code reload (if someone reloads code) or should we keep the persistence
41
41
  setting :consumer_persistence, default: true
@@ -52,12 +52,16 @@ module Karafka
52
52
  setting :shutdown_timeout, default: 60_000
53
53
  # option [Integer] number of threads in which we want to do parallel processing
54
54
  setting :concurrency, default: 5
55
- # option [Integer] how long should we wait upon processing error (milliseconds)
56
- setting :pause_timeout, default: 1_000
57
- # option [Integer] what is the max timeout in case of an exponential backoff (milliseconds)
58
- setting :pause_max_timeout, default: 30_000
59
- # option [Boolean] should we use exponential backoff
60
- setting :pause_with_exponential_backoff, default: true
55
+ # Namespace for pause-related settings
56
+ setting :pause do
57
+ # option [Integer] how long should we wait upon processing error (milliseconds)
58
+ setting :timeout, default: 1_000
59
+ # option [Integer] what is the max timeout in case of an exponential backoff (milliseconds)
60
+ setting :max_timeout, default: 30_000
61
+ # option [Boolean] should we use exponential backoff
62
+ setting :with_exponential_backoff, default: true
63
+ end
64
+
61
65
  # option [::WaterDrop::Producer, nil]
62
66
  # Unless configured, will be created once Karafka is configured based on user Karafka setup
63
67
  setting :producer, default: nil
@@ -207,7 +211,7 @@ module Karafka
207
211
  setting :cli do
208
212
  # option contract [Object] cli setup validation contract (in the context of options and
209
213
  # topics)
210
- setting :contract, default: ::Karafka::Cli::Contracts::Server.new
214
+ setting :contract, default: Karafka::Cli::Contracts::Server.new
211
215
  end
212
216
 
213
217
  setting :routing do
@@ -336,6 +340,12 @@ module Karafka
336
340
  setting :job_options_contract, default: ActiveJob::JobOptionsContract.new
337
341
  # option consumer [Class] consumer class that should be used to consume ActiveJob data
338
342
  setting :consumer_class, default: ActiveJob::Consumer
343
+ # option deserializer [Karafka::ActiveJob::Deserializer] deserializer for ActiveJob jobs
344
+ # Despite the name, handles both serialization (outgoing) and deserialization
345
+ # (incoming). Can be replaced with a custom implementation for formats like Avro,
346
+ # Protobuf, etc. This is a global setting because Rails serializes jobs before
347
+ # Karafka receives them, so we need a consistent approach across all ActiveJob topics.
348
+ setting :deserializer, default: Karafka::ActiveJob::Deserializer.new
339
349
  end
340
350
  end
341
351
 
@@ -343,10 +353,57 @@ module Karafka
343
353
  # Thanks to that we have an initial state out of the box.
344
354
  configure
345
355
 
356
+ # Backwards compatibility: Add old flat API methods to the config instance
357
+ # These delegate to the new nested pause config
358
+ # @deprecated Will be removed in Karafka 2.6
359
+ #
360
+ # Prior to the introduction of nested pause configuration, pause-related settings were
361
+ # accessed directly on the config object (e.g., `config.pause_timeout`). With the nested
362
+ # structure introduced, these settings moved to `config.pause.timeout`, etc.
363
+ #
364
+ # This instance_eval block adds delegation methods to maintain backwards compatibility,
365
+ # allowing existing code using the old flat API to continue working without modification.
366
+ config.instance_eval do
367
+ # @return [Integer] delegated timeout value from pause.timeout
368
+ # @deprecated Use config.pause.timeout instead
369
+ def pause_timeout
370
+ pause.timeout
371
+ end
372
+
373
+ # @param value [Integer] timeout value to set
374
+ # @deprecated Use config.pause.timeout= instead
375
+ def pause_timeout=(value)
376
+ pause.timeout = value
377
+ end
378
+
379
+ # @return [Integer] delegated max_timeout value from pause.max_timeout
380
+ # @deprecated Use config.pause.max_timeout instead
381
+ def pause_max_timeout
382
+ pause.max_timeout
383
+ end
384
+
385
+ # @param value [Integer] max timeout value to set
386
+ # @deprecated Use config.pause.max_timeout= instead
387
+ def pause_max_timeout=(value)
388
+ pause.max_timeout = value
389
+ end
390
+
391
+ # @return [Boolean] delegated exponential backoff flag from pause.with_exponential_backoff
392
+ # @deprecated Use config.pause.with_exponential_backoff instead
393
+ def pause_with_exponential_backoff
394
+ pause.with_exponential_backoff
395
+ end
396
+
397
+ # @param value [Boolean] exponential backoff flag to set
398
+ # @deprecated Use config.pause.with_exponential_backoff= instead
399
+ def pause_with_exponential_backoff=(value)
400
+ pause.with_exponential_backoff = value
401
+ end
402
+ end
403
+
346
404
  class << self
347
405
  # Configuring method
348
- # @param block [Proc] block we want to execute with the config instance
349
- def setup(&block)
406
+ def setup(&)
350
407
  # Will prepare and verify license if present
351
408
  Licenser.prepare_and_verify(config.license)
352
409
 
@@ -358,17 +415,21 @@ module Karafka
358
415
  # of the pro defaults with custom components
359
416
  Pro::Loader.pre_setup_all(config) if Karafka.pro?
360
417
 
361
- configure(&block)
418
+ # Wrap config in a proxy that intercepts producer block configuration
419
+ proxy = ConfigProxy.new(config)
420
+ # We need to check for the block presence here because user can just run setup without
421
+ # any block given
422
+ configure { yield(proxy) if block_given? }
362
423
 
363
424
  Contracts::Config.new.validate!(
364
425
  config.to_h,
365
426
  scope: %w[config]
366
427
  )
367
428
 
368
- configure_components
429
+ configure_components(proxy)
369
430
 
370
431
  # Refreshes the references that are cached that might have been changed by the config
371
- ::Karafka.refresh!
432
+ Karafka.refresh!
372
433
 
373
434
  # Post-setup configure all routing features that would need this
374
435
  Routing::Features::Base.post_setup_all(config)
@@ -386,14 +447,16 @@ module Karafka
386
447
  private
387
448
 
388
449
  # Sets up all the components that are based on the user configuration
450
+ # @param config_proxy [ConfigProxy] the configuration proxy containing deferred setup
451
+ # blocks
389
452
  # @note At the moment it is only WaterDrop
390
- def configure_components
453
+ def configure_components(config_proxy)
391
454
  oauth_listener = config.oauth.token_provider_listener
392
455
  # We need to subscribe the oauth listener here because we want it to be ready before
393
456
  # any consumer/admin runs
394
457
  Karafka::App.monitor.subscribe(oauth_listener) if oauth_listener
395
458
 
396
- config.producer ||= ::WaterDrop::Producer.new do |producer_config|
459
+ config.producer ||= WaterDrop::Producer.new do |producer_config|
397
460
  # In some cases WaterDrop updates the config and we don't want our consumer config to
398
461
  # be polluted by those updates, that's why we copy
399
462
  producer_kafka = AttributesMap.producer(config.kafka.dup)
@@ -406,6 +469,11 @@ module Karafka
406
469
  producer_config.oauth.token_provider_listener = oauth_listener
407
470
  producer_config.logger = config.logger
408
471
  end
472
+
473
+ # Execute user's producer configuration block
474
+ # This happens after the default producer setup, allowing users to customize settings
475
+ # If no block was provided during setup, this will be an empty lambda that does nothing
476
+ config_proxy.producer_initialization_block.call(config.producer.config)
409
477
  end
410
478
  end
411
479
  end