karafka 2.5.1 → 2.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (151) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +3 -29
  3. data/.github/workflows/ci_macos_arm64.yml +1 -1
  4. data/.github/workflows/push.yml +2 -2
  5. data/.github/workflows/trigger-wiki-refresh.yml +1 -1
  6. data/.ruby-version +1 -1
  7. data/CHANGELOG.md +14 -4
  8. data/Gemfile +0 -2
  9. data/Gemfile.lock +30 -31
  10. data/bin/integrations +2 -1
  11. data/bin/rspecs +4 -0
  12. data/config/locales/errors.yml +6 -4
  13. data/config/locales/pro_errors.yml +5 -4
  14. data/docker-compose.yml +1 -1
  15. data/examples/payloads/json/sample_set_02/download.json +191 -0
  16. data/examples/payloads/json/sample_set_03/event_type_1.json +18 -0
  17. data/examples/payloads/json/sample_set_03/event_type_2.json +263 -0
  18. data/examples/payloads/json/sample_set_03/event_type_3.json +41 -0
  19. data/karafka.gemspec +1 -1
  20. data/lib/active_job/queue_adapters/karafka_adapter.rb +1 -1
  21. data/lib/karafka/active_job/consumer.rb +5 -1
  22. data/lib/karafka/active_job/current_attributes/job_wrapper.rb +45 -0
  23. data/lib/karafka/active_job/current_attributes/loading.rb +1 -1
  24. data/lib/karafka/active_job/current_attributes/persistence.rb +19 -7
  25. data/lib/karafka/active_job/current_attributes.rb +1 -0
  26. data/lib/karafka/active_job/deserializer.rb +61 -0
  27. data/lib/karafka/active_job/dispatcher.rb +32 -12
  28. data/lib/karafka/active_job/job_options_contract.rb +2 -4
  29. data/lib/karafka/admin/acl.rb +8 -4
  30. data/lib/karafka/admin/configs/config.rb +6 -4
  31. data/lib/karafka/admin/consumer_groups.rb +74 -4
  32. data/lib/karafka/admin/topics.rb +40 -7
  33. data/lib/karafka/admin.rb +13 -4
  34. data/lib/karafka/base_consumer.rb +5 -5
  35. data/lib/karafka/cli/base.rb +1 -1
  36. data/lib/karafka/cli/contracts/server.rb +2 -4
  37. data/lib/karafka/cli/install.rb +1 -1
  38. data/lib/karafka/cli/topics/align.rb +1 -1
  39. data/lib/karafka/cli/topics/repartition.rb +2 -2
  40. data/lib/karafka/connection/client.rb +12 -2
  41. data/lib/karafka/connection/listeners_batch.rb +2 -3
  42. data/lib/karafka/connection/proxy.rb +11 -7
  43. data/lib/karafka/env.rb +1 -2
  44. data/lib/karafka/helpers/interval_runner.rb +4 -1
  45. data/lib/karafka/instrumentation/assignments_tracker.rb +17 -0
  46. data/lib/karafka/instrumentation/monitor.rb +1 -1
  47. data/lib/karafka/instrumentation/notifications.rb +1 -0
  48. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +2 -3
  49. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +2 -3
  50. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +8 -9
  51. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +2 -3
  52. data/lib/karafka/messages/builders/batch_metadata.rb +1 -1
  53. data/lib/karafka/messages/builders/message.rb +1 -1
  54. data/lib/karafka/messages/messages.rb +2 -3
  55. data/lib/karafka/patches/rdkafka/bindings.rb +6 -6
  56. data/lib/karafka/patches/rdkafka/opaque.rb +1 -1
  57. data/lib/karafka/pro/active_job/dispatcher.rb +7 -3
  58. data/lib/karafka/pro/active_job/job_options_contract.rb +2 -4
  59. data/lib/karafka/pro/cleaner/messages/messages.rb +2 -3
  60. data/lib/karafka/pro/cli/contracts/server.rb +2 -4
  61. data/lib/karafka/pro/cli/parallel_segments/base.rb +1 -2
  62. data/lib/karafka/pro/cli/parallel_segments/collapse.rb +2 -2
  63. data/lib/karafka/pro/cli/parallel_segments/distribute.rb +2 -2
  64. data/lib/karafka/pro/connection/manager.rb +2 -2
  65. data/lib/karafka/pro/encryption/contracts/config.rb +4 -6
  66. data/lib/karafka/pro/encryption/messages/parser.rb +3 -3
  67. data/lib/karafka/pro/instrumentation/performance_tracker.rb +3 -3
  68. data/lib/karafka/pro/iterator/expander.rb +1 -1
  69. data/lib/karafka/pro/iterator/tpl_builder.rb +1 -1
  70. data/lib/karafka/pro/iterator.rb +2 -2
  71. data/lib/karafka/pro/processing/coordinators/errors_tracker.rb +2 -3
  72. data/lib/karafka/pro/processing/coordinators/filters_applier.rb +3 -3
  73. data/lib/karafka/pro/processing/filters/delayer.rb +1 -1
  74. data/lib/karafka/pro/processing/filters/expirer.rb +1 -1
  75. data/lib/karafka/pro/processing/filters/throttler.rb +1 -1
  76. data/lib/karafka/pro/processing/schedulers/default.rb +2 -4
  77. data/lib/karafka/pro/processing/strategies/lrj/default.rb +2 -4
  78. data/lib/karafka/pro/processing/strategies/vp/default.rb +2 -4
  79. data/lib/karafka/pro/processing/subscription_groups_coordinator.rb +2 -3
  80. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +2 -4
  81. data/lib/karafka/pro/recurring_tasks/contracts/task.rb +2 -4
  82. data/lib/karafka/pro/recurring_tasks/dispatcher.rb +6 -5
  83. data/lib/karafka/pro/recurring_tasks/schedule.rb +4 -6
  84. data/lib/karafka/pro/recurring_tasks.rb +8 -5
  85. data/lib/karafka/pro/routing/features/adaptive_iterator/contracts/topic.rb +2 -4
  86. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +2 -4
  87. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +2 -4
  88. data/lib/karafka/pro/routing/features/delaying/topic.rb +2 -4
  89. data/lib/karafka/pro/routing/features/direct_assignments/contracts/consumer_group.rb +4 -8
  90. data/lib/karafka/pro/routing/features/direct_assignments/contracts/topic.rb +5 -7
  91. data/lib/karafka/pro/routing/features/direct_assignments/subscription_group.rb +7 -6
  92. data/lib/karafka/pro/routing/features/direct_assignments/topic.rb +2 -2
  93. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +2 -4
  94. data/lib/karafka/pro/routing/features/expiring/topic.rb +2 -4
  95. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +2 -4
  96. data/lib/karafka/pro/routing/features/filtering/topic.rb +2 -3
  97. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +2 -4
  98. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +2 -4
  99. data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +3 -5
  100. data/lib/karafka/pro/routing/features/non_blocking_job/topic.rb +3 -3
  101. data/lib/karafka/pro/routing/features/offset_metadata/contracts/topic.rb +2 -4
  102. data/lib/karafka/pro/routing/features/parallel_segments/contracts/consumer_group.rb +2 -4
  103. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +3 -5
  104. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +2 -4
  105. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +2 -4
  106. data/lib/karafka/pro/routing/features/pausing/config.rb +26 -0
  107. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +17 -11
  108. data/lib/karafka/pro/routing/features/pausing/topic.rb +69 -8
  109. data/lib/karafka/pro/routing/features/periodic_job/contracts/topic.rb +2 -4
  110. data/lib/karafka/pro/routing/features/recurring_tasks/contracts/topic.rb +2 -4
  111. data/lib/karafka/pro/routing/features/scheduled_messages/contracts/topic.rb +2 -4
  112. data/lib/karafka/pro/routing/features/swarm/contracts/routing.rb +2 -4
  113. data/lib/karafka/pro/routing/features/swarm/contracts/topic.rb +6 -8
  114. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +2 -4
  115. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +2 -4
  116. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +2 -4
  117. data/lib/karafka/pro/scheduled_messages/contracts/message.rb +2 -4
  118. data/lib/karafka/pro/scheduled_messages.rb +4 -6
  119. data/lib/karafka/pro/swarm/liveness_listener.rb +2 -2
  120. data/lib/karafka/processing/coordinator.rb +2 -4
  121. data/lib/karafka/processing/coordinators_buffer.rb +2 -3
  122. data/lib/karafka/processing/executor.rb +2 -3
  123. data/lib/karafka/processing/jobs/base.rb +2 -3
  124. data/lib/karafka/processing/workers_batch.rb +2 -3
  125. data/lib/karafka/railtie.rb +1 -0
  126. data/lib/karafka/routing/activity_manager.rb +2 -2
  127. data/lib/karafka/routing/builder.rb +5 -7
  128. data/lib/karafka/routing/consumer_group.rb +4 -6
  129. data/lib/karafka/routing/contracts/consumer_group.rb +3 -5
  130. data/lib/karafka/routing/contracts/routing.rb +2 -4
  131. data/lib/karafka/routing/contracts/topic.rb +2 -4
  132. data/lib/karafka/routing/features/active_job/contracts/topic.rb +2 -4
  133. data/lib/karafka/routing/features/active_job/topic.rb +6 -0
  134. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +2 -4
  135. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +3 -5
  136. data/lib/karafka/routing/features/deserializers/contracts/topic.rb +2 -4
  137. data/lib/karafka/routing/features/eofed/contracts/topic.rb +2 -4
  138. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +2 -4
  139. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +2 -4
  140. data/lib/karafka/routing/topics.rb +4 -9
  141. data/lib/karafka/server.rb +1 -1
  142. data/lib/karafka/setup/config.rb +66 -9
  143. data/lib/karafka/setup/contracts/config.rb +12 -10
  144. data/lib/karafka/setup/defaults_injector.rb +3 -2
  145. data/lib/karafka/setup/dsl.rb +2 -3
  146. data/lib/karafka/swarm/liveness_listener.rb +2 -3
  147. data/lib/karafka/swarm/supervisor.rb +1 -1
  148. data/lib/karafka/version.rb +1 -1
  149. data/lib/karafka.rb +2 -2
  150. metadata +8 -2
  151. data/.diffend.yml +0 -3
@@ -36,11 +36,10 @@ module Karafka
36
36
  end
37
37
 
38
38
  # Resumes processing of partitions for which pause time has ended.
39
- # @param block we want to run for resumed topic partitions
40
39
  # @yieldparam [String] topic name
41
40
  # @yieldparam [Integer] partition number
42
- def resume(&block)
43
- @pauses_manager.resume(&block)
41
+ def resume(&)
42
+ @pauses_manager.resume(&)
44
43
  end
45
44
 
46
45
  # @param topic_name [String] topic name
@@ -83,9 +83,8 @@ module Karafka
83
83
 
84
84
  # Runs the wrap/around execution context appropriate for a given action
85
85
  # @param action [Symbol] action execution wrapped with our block
86
- # @param block [Proc] execution context
87
- def wrap(action, &block)
88
- consumer.on_wrap(action, &block)
86
+ def wrap(action, &)
87
+ consumer.on_wrap(action, &)
89
88
  end
90
89
 
91
90
  # Runs consumer data processing against given batch and handles failures and errors.
@@ -29,11 +29,10 @@ module Karafka
29
29
  end
30
30
 
31
31
  # Runs the wrap/around job hook within which the rest of the flow happens
32
- # @param block [Proc] whole user related processing flow
33
32
  # @note We inject the action name so user can decide whether to run custom logic on a
34
33
  # given action or not.
35
- def wrap(&block)
36
- executor.wrap(self.class.action, &block)
34
+ def wrap(&)
35
+ executor.wrap(self.class.action, &)
37
36
  end
38
37
 
39
38
  # When redefined can run any code prior to the job being scheduled
@@ -16,9 +16,8 @@ module Karafka
16
16
  end
17
17
 
18
18
  # Iterates over available workers and yields each worker
19
- # @param block [Proc] block we want to run
20
- def each(&block)
21
- @batch.each(&block)
19
+ def each(&)
20
+ @batch.each(&)
22
21
  end
23
22
 
24
23
  # @return [Integer] number of workers in the batch
@@ -49,6 +49,7 @@ if Karafka.rails?
49
49
  rails71plus = Rails.gem_version >= Gem::Version.new('7.1.0')
50
50
 
51
51
  # Rails 7.1 replaced the broadcast module with a broadcast logger
52
+ # While 7.1 is EOL, we keep this for users who may still use it without official support
52
53
  if rails71plus
53
54
  Rails.logger.broadcast_to(stdout_logger)
54
55
  else
@@ -58,8 +58,8 @@ module Karafka
58
58
  # @return [Hash] accumulated data in a hash for validations
59
59
  def to_h
60
60
  (
61
- SUPPORTED_TYPES.map { |type| ["include_#{type}".to_sym, @included[type]] } +
62
- SUPPORTED_TYPES.map { |type| ["exclude_#{type}".to_sym, @excluded[type]] }
61
+ SUPPORTED_TYPES.map { |type| [:"include_#{type}", @included[type]] } +
62
+ SUPPORTED_TYPES.map { |type| [:"exclude_#{type}", @excluded[type]] }
63
63
  ).to_h
64
64
  end
65
65
 
@@ -81,13 +81,12 @@ module Karafka
81
81
  private :array_clear
82
82
 
83
83
  # Clear routes and draw them again with the given block. Helpful for testing purposes.
84
- # @param block [Proc] block we will evaluate within the builder context
85
- def redraw(&block)
84
+ def redraw(&)
86
85
  @mutex.synchronize do
87
86
  @draws.clear
88
87
  array_clear
89
88
  end
90
- draw(&block)
89
+ draw(&)
91
90
  end
92
91
 
93
92
  # @return [Array<Karafka::Routing::ConsumerGroup>] only active consumer groups that
@@ -124,15 +123,14 @@ module Karafka
124
123
 
125
124
  # Builds and saves given consumer group
126
125
  # @param group_id [String, Symbol] name for consumer group
127
- # @param block [Proc] proc that should be executed in the proxy context
128
- def consumer_group(group_id, &block)
126
+ def consumer_group(group_id, &)
129
127
  consumer_group = find { |cg| cg.name == group_id.to_s }
130
128
 
131
129
  if consumer_group
132
- Proxy.new(consumer_group, &block).target
130
+ Proxy.new(consumer_group, &).target
133
131
  else
134
132
  consumer_group = ConsumerGroup.new(group_id.to_s)
135
- self << Proxy.new(consumer_group, &block).target
133
+ self << Proxy.new(consumer_group, &).target
136
134
  end
137
135
  end
138
136
 
@@ -40,14 +40,13 @@ module Karafka
40
40
 
41
41
  # Builds a topic representation inside of a current consumer group route
42
42
  # @param name [String, Symbol] name of topic to which we want to subscribe
43
- # @param block [Proc] block that we want to evaluate in the topic context
44
43
  # @return [Karafka::Routing::Topic] newly built topic instance
45
- def topic=(name, &block)
44
+ def topic=(name, &)
46
45
  topic = Topic.new(name, self)
47
46
  @topics << Proxy.new(
48
47
  topic,
49
48
  builder.defaults,
50
- &block
49
+ &
51
50
  ).target
52
51
  built_topic = @topics.last
53
52
  # We overwrite it conditionally in case it was not set by the user inline in the topic
@@ -59,13 +58,12 @@ module Karafka
59
58
  # Assigns the current subscription group id based on the defined one and allows for further
60
59
  # topic definition
61
60
  # @param name [String, Symbol] name of the current subscription group
62
- # @param block [Proc] block that may include topics definitions
63
- def subscription_group=(name = SubscriptionGroup.id, &block)
61
+ def subscription_group=(name = SubscriptionGroup.id, &)
64
62
  # We cast it here, so the routing supports symbol based but that's anyhow later on
65
63
  # validated as a string
66
64
  @current_subscription_group_details = { name: name.to_s }
67
65
 
68
- Proxy.new(self, &block)
66
+ Proxy.new(self, &)
69
67
 
70
68
  # We need to reset the current subscription group after it is used, so it won't leak
71
69
  # outside to other topics that would be defined without a defined subscription group
@@ -7,10 +7,8 @@ module Karafka
7
7
  # Contract for single full route (consumer group + topics) validation.
8
8
  class ConsumerGroup < Karafka::Contracts::Base
9
9
  configure do |config|
10
- config.error_messages = YAML.safe_load(
11
- File.read(
12
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
13
- )
10
+ config.error_messages = YAML.safe_load_file(
11
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
14
12
  ).fetch('en').fetch('validations').fetch('routing').fetch('consumer_group')
15
13
  end
16
14
 
@@ -39,7 +37,7 @@ module Karafka
39
37
  topics_consumers[topic[:name]] << topic[:consumer]
40
38
  end
41
39
 
42
- next if topics_consumers.values.map(&:size).all? { |count| count == 1 }
40
+ next if topics_consumers.values.map(&:size).all?(1)
43
41
 
44
42
  [[%i[topics], :many_consumers_same_topic]]
45
43
  end
@@ -6,10 +6,8 @@ module Karafka
6
6
  # Ensures that routing wide rules are obeyed
7
7
  class Routing < Karafka::Contracts::Base
8
8
  configure do |config|
9
- config.error_messages = YAML.safe_load(
10
- File.read(
11
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
12
- )
9
+ config.error_messages = YAML.safe_load_file(
10
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
13
11
  ).fetch('en').fetch('validations').fetch('routing')
14
12
  end
15
13
 
@@ -6,10 +6,8 @@ module Karafka
6
6
  # Consumer group topic validation rules.
7
7
  class Topic < Karafka::Contracts::Base
8
8
  configure do |config|
9
- config.error_messages = YAML.safe_load(
10
- File.read(
11
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
12
- )
9
+ config.error_messages = YAML.safe_load_file(
10
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
13
11
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
14
12
  end
15
13
 
@@ -10,10 +10,8 @@ module Karafka
10
10
  # in order to be able to use active job routing
11
11
  class Topic < Karafka::Contracts::Base
12
12
  configure do |config|
13
- config.error_messages = YAML.safe_load(
14
- File.read(
15
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
- )
13
+ config.error_messages = YAML.safe_load_file(
14
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
17
15
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
18
16
  end
19
17
 
@@ -5,6 +5,12 @@ module Karafka
5
5
  module Features
6
6
  class ActiveJob < Base
7
7
  # Topic extensions to be able to check if given topic is ActiveJob topic
8
+ #
9
+ # @note ActiveJob topics do not have per-topic deserializer configuration. The deserializer
10
+ # is configured globally via `config.internal.active_job.deserializer` because Rails
11
+ # serializes jobs before dispatching them, requiring a consistent serialization format
12
+ # across all ActiveJob topics. If you need custom serialization (e.g., Avro, Protobuf),
13
+ # configure it once at the application level rather than per-topic.
8
14
  module Topic
9
15
  # This method calls the parent class initializer and then sets up the
10
16
  # extra instance variable to nil. The explicit initialization
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Rules around dead letter queue settings
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Basic validation of the Kafka expected config details
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -22,7 +20,7 @@ module Karafka
22
20
  required(:replication_factor) { |val| val.is_a?(Integer) && val.positive? }
23
21
  required(:details) do |val|
24
22
  val.is_a?(Hash) &&
25
- val.keys.all? { |key| key.is_a?(Symbol) }
23
+ val.keys.all?(Symbol)
26
24
  end
27
25
  end
28
26
  end
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Basic validation of the Kafka expected config details
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Contract for eofed topic setup
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Contract for inline insights topic setup
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -9,10 +9,8 @@ module Karafka
9
9
  # Rules around manual offset management settings
10
10
  class Topic < Karafka::Contracts::Base
11
11
  configure do |config|
12
- config.error_messages = YAML.safe_load(
13
- File.read(
14
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
- )
12
+ config.error_messages = YAML.safe_load_file(
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
14
  ).fetch('en').fetch('validations').fetch('routing').fetch('topic')
17
15
  end
18
16
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # frozen_string_literal: true
4
-
5
3
  module Karafka
6
4
  module Routing
7
5
  # Abstraction layer on top of groups of topics
@@ -17,18 +15,15 @@ module Karafka
17
15
  end
18
16
 
19
17
  # Yields each topic
20
- #
21
- # @param [Proc] block we want to yield with on each topic
22
- def each(&block)
23
- @accumulator.each(&block)
18
+ def each(&)
19
+ @accumulator.each(&)
24
20
  end
25
21
 
26
22
  # Allows us to remove elements from the topics
27
23
  #
28
24
  # Block to decide what to delete
29
- # @param block [Proc]
30
- def delete_if(&block)
31
- @accumulator.delete_if(&block)
25
+ def delete_if(&)
26
+ @accumulator.delete_if(&)
32
27
  end
33
28
 
34
29
  # Finds topic by its name
@@ -64,7 +64,7 @@ module Karafka
64
64
  process.on_sigterm { stop }
65
65
  process.on_sigtstp { quiet }
66
66
  # Needed for instrumentation
67
- process.on_sigttin {}
67
+ process.on_sigttin { nil }
68
68
  process.supervise
69
69
 
70
70
  # This will only run when not in a swarm mode. In swarm mode the server runs post-fork, so
@@ -52,12 +52,16 @@ module Karafka
52
52
  setting :shutdown_timeout, default: 60_000
53
53
  # option [Integer] number of threads in which we want to do parallel processing
54
54
  setting :concurrency, default: 5
55
- # option [Integer] how long should we wait upon processing error (milliseconds)
56
- setting :pause_timeout, default: 1_000
57
- # option [Integer] what is the max timeout in case of an exponential backoff (milliseconds)
58
- setting :pause_max_timeout, default: 30_000
59
- # option [Boolean] should we use exponential backoff
60
- setting :pause_with_exponential_backoff, default: true
55
+ # Namespace for pause-related settings
56
+ setting :pause do
57
+ # option [Integer] how long should we wait upon processing error (milliseconds)
58
+ setting :timeout, default: 1_000
59
+ # option [Integer] what is the max timeout in case of an exponential backoff (milliseconds)
60
+ setting :max_timeout, default: 30_000
61
+ # option [Boolean] should we use exponential backoff
62
+ setting :with_exponential_backoff, default: true
63
+ end
64
+
61
65
  # option [::WaterDrop::Producer, nil]
62
66
  # Unless configured, will be created once Karafka is configured based on user Karafka setup
63
67
  setting :producer, default: nil
@@ -336,6 +340,12 @@ module Karafka
336
340
  setting :job_options_contract, default: ActiveJob::JobOptionsContract.new
337
341
  # option consumer [Class] consumer class that should be used to consume ActiveJob data
338
342
  setting :consumer_class, default: ActiveJob::Consumer
343
+ # option deserializer [Karafka::ActiveJob::Deserializer] deserializer for ActiveJob jobs
344
+ # Despite the name, handles both serialization (outgoing) and deserialization
345
+ # (incoming). Can be replaced with a custom implementation for formats like Avro,
346
+ # Protobuf, etc. This is a global setting because Rails serializes jobs before
347
+ # Karafka receives them, so we need a consistent approach across all ActiveJob topics.
348
+ setting :deserializer, default: ::Karafka::ActiveJob::Deserializer.new
339
349
  end
340
350
  end
341
351
 
@@ -343,10 +353,57 @@ module Karafka
343
353
  # Thanks to that we have an initial state out of the box.
344
354
  configure
345
355
 
356
+ # Backwards compatibility: Add old flat API methods to the config instance
357
+ # These delegate to the new nested pause config
358
+ # @deprecated Will be removed in Karafka 2.6
359
+ #
360
+ # Prior to the introduction of nested pause configuration, pause-related settings were
361
+ # accessed directly on the config object (e.g., `config.pause_timeout`). With the nested
362
+ # structure introduced, these settings moved to `config.pause.timeout`, etc.
363
+ #
364
+ # This instance_eval block adds delegation methods to maintain backwards compatibility,
365
+ # allowing existing code using the old flat API to continue working without modification.
366
+ config.instance_eval do
367
+ # @return [Integer] delegated timeout value from pause.timeout
368
+ # @deprecated Use config.pause.timeout instead
369
+ def pause_timeout
370
+ pause.timeout
371
+ end
372
+
373
+ # @param value [Integer] timeout value to set
374
+ # @deprecated Use config.pause.timeout= instead
375
+ def pause_timeout=(value)
376
+ pause.timeout = value
377
+ end
378
+
379
+ # @return [Integer] delegated max_timeout value from pause.max_timeout
380
+ # @deprecated Use config.pause.max_timeout instead
381
+ def pause_max_timeout
382
+ pause.max_timeout
383
+ end
384
+
385
+ # @param value [Integer] max timeout value to set
386
+ # @deprecated Use config.pause.max_timeout= instead
387
+ def pause_max_timeout=(value)
388
+ pause.max_timeout = value
389
+ end
390
+
391
+ # @return [Boolean] delegated exponential backoff flag from pause.with_exponential_backoff
392
+ # @deprecated Use config.pause.with_exponential_backoff instead
393
+ def pause_with_exponential_backoff
394
+ pause.with_exponential_backoff
395
+ end
396
+
397
+ # @param value [Boolean] exponential backoff flag to set
398
+ # @deprecated Use config.pause.with_exponential_backoff= instead
399
+ def pause_with_exponential_backoff=(value)
400
+ pause.with_exponential_backoff = value
401
+ end
402
+ end
403
+
346
404
  class << self
347
405
  # Configuring method
348
- # @param block [Proc] block we want to execute with the config instance
349
- def setup(&block)
406
+ def setup(&)
350
407
  # Will prepare and verify license if present
351
408
  Licenser.prepare_and_verify(config.license)
352
409
 
@@ -358,7 +415,7 @@ module Karafka
358
415
  # of the pro defaults with custom components
359
416
  Pro::Loader.pre_setup_all(config) if Karafka.pro?
360
417
 
361
- configure(&block)
418
+ configure(&)
362
419
 
363
420
  Contracts::Config.new.validate!(
364
421
  config.to_h,
@@ -12,10 +12,8 @@ module Karafka
12
12
  # so we validate all of that once all the routes are defined and ready.
13
13
  class Config < Karafka::Contracts::Base
14
14
  configure do |config|
15
- config.error_messages = YAML.safe_load(
16
- File.read(
17
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
18
- )
15
+ config.error_messages = YAML.safe_load_file(
16
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
19
17
  ).fetch('en').fetch('validations').fetch('setup').fetch('config')
20
18
  end
21
19
 
@@ -33,9 +31,13 @@ module Karafka
33
31
  required(:client_id) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
34
32
  required(:concurrency) { |val| val.is_a?(Integer) && val.positive? }
35
33
  required(:consumer_persistence) { |val| [true, false].include?(val) }
36
- required(:pause_timeout) { |val| val.is_a?(Integer) && val.positive? }
37
- required(:pause_max_timeout) { |val| val.is_a?(Integer) && val.positive? }
38
- required(:pause_with_exponential_backoff) { |val| [true, false].include?(val) }
34
+
35
+ nested(:pause) do
36
+ required(:timeout) { |val| val.is_a?(Integer) && val.positive? }
37
+ required(:max_timeout) { |val| val.is_a?(Integer) && val.positive? }
38
+ required(:with_exponential_backoff) { |val| [true, false].include?(val) }
39
+ end
40
+
39
41
  required(:strict_topics_namespacing) { |val| [true, false].include?(val) }
40
42
  required(:shutdown_timeout) { |val| val.is_a?(Integer) && val.positive? }
41
43
  required(:max_wait_time) { |val| val.is_a?(Integer) && val.positive? }
@@ -171,12 +173,12 @@ module Karafka
171
173
  virtual do |data, errors|
172
174
  next unless errors.empty?
173
175
 
174
- pause_timeout = data.fetch(:pause_timeout)
175
- pause_max_timeout = data.fetch(:pause_max_timeout)
176
+ pause_timeout = data.fetch(:pause).fetch(:timeout)
177
+ pause_max_timeout = data.fetch(:pause).fetch(:max_timeout)
176
178
 
177
179
  next if pause_timeout <= pause_max_timeout
178
180
 
179
- [[%i[pause_timeout], :max_timeout_vs_pause_max_timeout]]
181
+ [[%i[pause timeout], :max_timeout_vs_pause_max_timeout]]
180
182
  end
181
183
 
182
184
  virtual do |data, errors|
@@ -47,8 +47,9 @@ module Karafka
47
47
  'socket.nagle.disable': true
48
48
  }.freeze
49
49
 
50
- private_constant :CONSUMER_KAFKA_DEFAULTS, :CONSUMER_KAFKA_DEV_DEFAULTS,
51
- :PRODUCER_KAFKA_DEV_DEFAULTS
50
+ private_constant(
51
+ :CONSUMER_KAFKA_DEFAULTS, :CONSUMER_KAFKA_DEV_DEFAULTS, :PRODUCER_KAFKA_DEV_DEFAULTS
52
+ )
52
53
 
53
54
  class << self
54
55
  # Propagates the kafka setting defaults unless they are already present for consumer config
@@ -7,9 +7,8 @@ module Karafka
7
7
  # from the Karafka::Setup::Config
8
8
  module Dsl
9
9
  # Sets up the whole configuration
10
- # @param [Block] block configuration block
11
- def setup(&block)
12
- Setup::Config.setup(&block)
10
+ def setup(&)
11
+ Setup::Config.setup(&)
13
12
  end
14
13
 
15
14
  # @return [Karafka::Config] config instance
@@ -35,9 +35,8 @@ module Karafka
35
35
  private
36
36
 
37
37
  # Wraps the logic with a mutex
38
- # @param block [Proc] code we want to run in mutex
39
- def synchronize(&block)
40
- @mutex.synchronize(&block)
38
+ def synchronize(&)
39
+ @mutex.synchronize(&)
41
40
  end
42
41
 
43
42
  # Runs requested code once in a while
@@ -67,7 +67,7 @@ module Karafka
67
67
  process.on_sigtstp { quiet }
68
68
  process.on_sigttin { signal('TTIN') }
69
69
  # Needed to be registered as we want to unlock on child changes
70
- process.on_sigchld {}
70
+ process.on_sigchld { nil }
71
71
  process.on_any_active { unlock }
72
72
  process.supervise
73
73
 
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.5.1'
6
+ VERSION = '2.5.2'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -101,7 +101,7 @@ module Karafka
101
101
  # always mean user wants to have it required. User may want to run Karafka without Rails
102
102
  # even when having both in the same Gemfile.
103
103
  def rails?
104
- return @rails if instance_variable_defined?('@rails')
104
+ return @rails if instance_variable_defined?(:@rails)
105
105
 
106
106
  @rails = Object.const_defined?('Rails::Railtie')
107
107
 
@@ -177,7 +177,7 @@ loader.eager_load
177
177
 
178
178
  # This will load features but since Pro are not loaded automatically, they will not be visible
179
179
  # nor included here
180
- ::Karafka::Routing::Features::Base.load_all
180
+ Karafka::Routing::Features::Base.load_all
181
181
 
182
182
  # We need to detect and require (not setup) Pro components during the gem load, because we need
183
183
  # to make pro components available in case anyone wants to use them as a base to their own
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.5.1
4
+ version: 2.5.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -105,7 +105,6 @@ extra_rdoc_files: []
105
105
  files:
106
106
  - ".coditsu/ci.yml"
107
107
  - ".console_irbrc"
108
- - ".diffend.yml"
109
108
  - ".github/CODEOWNERS"
110
109
  - ".github/FUNDING.yml"
111
110
  - ".github/ISSUE_TEMPLATE/bug_report.md"
@@ -153,14 +152,20 @@ files:
153
152
  - examples/payloads/json/sample_set_01/ingestion_event.json
154
153
  - examples/payloads/json/sample_set_01/transaction_event.json
155
154
  - examples/payloads/json/sample_set_01/user_event.json
155
+ - examples/payloads/json/sample_set_02/download.json
156
+ - examples/payloads/json/sample_set_03/event_type_1.json
157
+ - examples/payloads/json/sample_set_03/event_type_2.json
158
+ - examples/payloads/json/sample_set_03/event_type_3.json
156
159
  - karafka.gemspec
157
160
  - lib/active_job/karafka.rb
158
161
  - lib/active_job/queue_adapters/karafka_adapter.rb
159
162
  - lib/karafka.rb
160
163
  - lib/karafka/active_job/consumer.rb
161
164
  - lib/karafka/active_job/current_attributes.rb
165
+ - lib/karafka/active_job/current_attributes/job_wrapper.rb
162
166
  - lib/karafka/active_job/current_attributes/loading.rb
163
167
  - lib/karafka/active_job/current_attributes/persistence.rb
168
+ - lib/karafka/active_job/deserializer.rb
164
169
  - lib/karafka/active_job/dispatcher.rb
165
170
  - lib/karafka/active_job/job_extensions.rb
166
171
  - lib/karafka/active_job/job_options_contract.rb
@@ -452,6 +457,7 @@ files:
452
457
  - lib/karafka/pro/routing/features/patterns/topic.rb
453
458
  - lib/karafka/pro/routing/features/patterns/topics.rb
454
459
  - lib/karafka/pro/routing/features/pausing.rb
460
+ - lib/karafka/pro/routing/features/pausing/config.rb
455
461
  - lib/karafka/pro/routing/features/pausing/contracts/topic.rb
456
462
  - lib/karafka/pro/routing/features/pausing/topic.rb
457
463
  - lib/karafka/pro/routing/features/periodic_job.rb
data/.diffend.yml DELETED
@@ -1,3 +0,0 @@
1
- project_id: 'de9b9933-7610-4cc4-b69b-f7e3e3c5e797'
2
- shareable_id: '68a8c626-b605-40ad-ac45-e3961ad7c57d'
3
- shareable_key: 'a3ec2dac-fba2-4b6c-b181-49e927b15057'