karafka 2.0.23 → 2.0.24

Sign up to get free protection for your applications and to get access to all the features.
Files changed (68) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +2 -0
  4. data/CHANGELOG.md +32 -1
  5. data/Gemfile.lock +8 -7
  6. data/README.md +3 -3
  7. data/config/{errors.yml → locales/errors.yml} +1 -1
  8. data/config/locales/pro_errors.yml +18 -0
  9. data/docker-compose.yml +3 -0
  10. data/karafka.gemspec +2 -2
  11. data/lib/karafka/active_job/job_options_contract.rb +1 -1
  12. data/lib/karafka/admin.rb +2 -4
  13. data/lib/karafka/app.rb +15 -4
  14. data/lib/karafka/base_consumer.rb +36 -0
  15. data/lib/karafka/connection/listener.rb +1 -1
  16. data/lib/karafka/contracts/config.rb +1 -1
  17. data/lib/karafka/contracts/consumer_group.rb +1 -1
  18. data/lib/karafka/contracts/server_cli_options.rb +1 -1
  19. data/lib/karafka/contracts/topic.rb +1 -1
  20. data/lib/karafka/instrumentation/logger_listener.rb +32 -0
  21. data/lib/karafka/instrumentation/notifications.rb +3 -0
  22. data/lib/karafka/messages/message.rb +14 -2
  23. data/lib/karafka/messages/parser.rb +14 -0
  24. data/lib/karafka/pro/active_job/job_options_contract.rb +1 -1
  25. data/lib/karafka/pro/encryption/cipher.rb +58 -0
  26. data/lib/karafka/pro/encryption/contracts/config.rb +79 -0
  27. data/lib/karafka/pro/encryption/errors.rb +24 -0
  28. data/lib/karafka/pro/encryption/messages/middleware.rb +46 -0
  29. data/lib/karafka/pro/encryption/messages/parser.rb +56 -0
  30. data/lib/karafka/pro/encryption/setup/config.rb +48 -0
  31. data/lib/karafka/pro/encryption.rb +47 -0
  32. data/lib/karafka/pro/loader.rb +22 -1
  33. data/lib/karafka/pro/processing/strategies/aj_dlq_mom.rb +1 -1
  34. data/lib/karafka/pro/processing/strategies/aj_lrj_mom_vp.rb +1 -1
  35. data/lib/karafka/pro/processing/strategies/aj_mom_vp.rb +1 -1
  36. data/lib/karafka/pro/processing/strategies/default.rb +1 -1
  37. data/lib/karafka/pro/processing/strategies/dlq.rb +1 -1
  38. data/lib/karafka/pro/processing/strategies/dlq_lrj.rb +1 -1
  39. data/lib/karafka/pro/processing/strategies/dlq_lrj_mom.rb +1 -1
  40. data/lib/karafka/pro/processing/strategies/dlq_mom.rb +1 -1
  41. data/lib/karafka/pro/processing/strategies/lrj.rb +1 -1
  42. data/lib/karafka/pro/processing/strategies/lrj_mom.rb +1 -1
  43. data/lib/karafka/pro/processing/strategies/mom.rb +1 -1
  44. data/lib/karafka/pro/routing/features/dead_letter_queue/contract.rb +2 -2
  45. data/lib/karafka/pro/routing/features/long_running_job/contract.rb +2 -2
  46. data/lib/karafka/pro/routing/features/virtual_partitions/contract.rb +2 -2
  47. data/lib/karafka/processing/executor.rb +1 -1
  48. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +1 -1
  49. data/lib/karafka/processing/strategies/default.rb +1 -1
  50. data/lib/karafka/processing/strategies/dlq.rb +1 -1
  51. data/lib/karafka/processing/strategies/dlq_mom.rb +1 -1
  52. data/lib/karafka/processing/strategies/mom.rb +1 -1
  53. data/lib/karafka/processing/worker.rb +1 -1
  54. data/lib/karafka/railtie.rb +3 -0
  55. data/lib/karafka/routing/builder.rb +1 -1
  56. data/lib/karafka/routing/consumer_group.rb +3 -3
  57. data/lib/karafka/routing/features/active_job/contract.rb +1 -1
  58. data/lib/karafka/routing/features/dead_letter_queue/contract.rb +1 -1
  59. data/lib/karafka/routing/features/manual_offset_management/contract.rb +1 -1
  60. data/lib/karafka/server.rb +14 -14
  61. data/lib/karafka/setup/config.rb +15 -2
  62. data/lib/karafka/status.rb +27 -9
  63. data/lib/karafka/templates/karafka.rb.erb +1 -2
  64. data/lib/karafka/time_trackers/pause.rb +3 -1
  65. data/lib/karafka/version.rb +1 -1
  66. data.tar.gz.sig +0 -0
  67. metadata +16 -7
  68. metadata.gz.sig +0 -0
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Encryption
17
+ # Encryption related messages components
18
+ module Messages
19
+ # Middleware for WaterDrop. It automatically encrypts messages payload.
20
+ # It is injected only if encryption is enabled.
21
+ class Middleware
22
+ # @param message [Hash] WaterDrop message hash
23
+ # @return [Hash] hash with encrypted payload and encryption version indicator
24
+ def call(message)
25
+ message[:headers] ||= {}
26
+ message[:headers]['encryption'] = version
27
+ message[:payload] = cipher.encrypt(message[:payload])
28
+ message
29
+ end
30
+
31
+ private
32
+
33
+ # @return [::Karafka::Pro::Encryption::Cipher]
34
+ def cipher
35
+ @cipher ||= ::Karafka::App.config.encryption.cipher
36
+ end
37
+
38
+ # @return [String] encryption version
39
+ def version
40
+ @version ||= ::Karafka::App.config.encryption.version
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Encryption
17
+ module Messages
18
+ # Pro parser that takes into consideration encryption usage
19
+ # @note There may be a case where someone decides not to encrypt data and we start getting
20
+ # unencrypted payloads. That is why we always rely on message headers for encryption
21
+ # indication.
22
+ class Parser < ::Karafka::Messages::Parser
23
+ # @param message [::Karafka::Messages::Message]
24
+ # @return [Object] deserialized payload
25
+ def call(message)
26
+ if active? && message.headers.key?('encryption')
27
+ # Decrypt raw payload so it can be handled by the default parser logic
28
+ message.raw_payload = cipher.decrypt(
29
+ message.headers['encryption'],
30
+ message.raw_payload
31
+ )
32
+ end
33
+
34
+ super(message)
35
+ end
36
+
37
+ private
38
+
39
+ # @return [::Karafka::Pro::Encryption::Cipher]
40
+ def cipher
41
+ @cipher ||= ::Karafka::App.config.encryption.cipher
42
+ end
43
+
44
+ # @return [Boolean] is encryption active
45
+ def active?
46
+ return @active unless @active.nil?
47
+
48
+ @active = ::Karafka::App.config.encryption.active
49
+
50
+ @active
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,48 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Encryption
17
+ # Setup and config related encryption components
18
+ module Setup
19
+ # Config for encryption
20
+ class Config
21
+ extend ::Karafka::Core::Configurable
22
+
23
+ # Should this feature be in use
24
+ setting(:active, default: false)
25
+
26
+ # Supporting versions allows us to be able to rotate private and public keys in case
27
+ # we would need this. We can increase the version, rotate and Karafka when decrypting
28
+ # will figure out proper private key based on the version
29
+ setting(:version, default: '1')
30
+
31
+ # We always support one public key for producing messages
32
+ # Public key needs to be always present even if we do not plan to produce messages from
33
+ # a Karafka process. This is because of the web-ui and potentially other cases like this
34
+ setting(:public_key, default: '')
35
+
36
+ # Private keys in pem format, where the key is the version and value is the key.
37
+ # This allows us to support key rotation
38
+ setting(:private_keys, default: {})
39
+
40
+ # Cipher used to encrypt and decrypt data
41
+ setting(:cipher, default: Encryption::Cipher.new)
42
+
43
+ configure
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ # Out of the box encryption engine for both Karafka and WaterDrop
17
+ # It uses asymmetric encryption via RSA. We use asymmetric so we can have producers that won't
18
+ # have ability (when private key not added) to decrypt messages.
19
+ module Encryption
20
+ class << self
21
+ # Sets up additional config scope, validations and other things
22
+ #
23
+ # @param config [Karafka::Core::Configurable::Node] root node config
24
+ def pre_setup(config)
25
+ # Expand the config with this feature specific stuff
26
+ config.instance_eval do
27
+ setting(:encryption, default: Setup::Config.config)
28
+ end
29
+ end
30
+
31
+ # @param config [Karafka::Core::Configurable::Node] root node config
32
+ def post_setup(config)
33
+ Encryption::Contracts::Config.new.validate!(config.to_h)
34
+
35
+ # Don't inject extra components if encryption is not active
36
+ return unless config.encryption.active
37
+
38
+ # This parser is encryption aware
39
+ config.internal.messages.parser = Messages::Parser.new
40
+
41
+ # Encryption for WaterDrop
42
+ config.producer.middleware.append(Messages::Middleware.new)
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -23,6 +23,11 @@ module Karafka
23
23
  processing/jobs/consume_non_blocking
24
24
  processing/strategies/base
25
25
  routing/features/base
26
+ encryption
27
+ encryption/cipher
28
+ encryption/setup/config
29
+ encryption/contracts/config
30
+ encryption/messages/parser
26
31
  ].freeze
27
32
 
28
33
  # Zeitwerk pro loader
@@ -44,14 +49,30 @@ module Karafka
44
49
  # Loads all the pro components and configures them wherever it is expected
45
50
  # @param config [Karafka::Core::Configurable::Node] app config that we can alter with pro
46
51
  # components
47
- def setup(config)
52
+ def pre_setup(config)
53
+ features.each { |feature| feature.pre_setup(config) }
54
+
48
55
  reconfigure(config)
49
56
 
50
57
  load_topic_features
51
58
  end
52
59
 
60
+ # Runs post setup features configuration operations
61
+ #
62
+ # @param config [Karafka::Core::Configurable::Node]
63
+ def post_setup(config)
64
+ features.each { |feature| feature.post_setup(config) }
65
+ end
66
+
53
67
  private
54
68
 
69
+ # @return [Array<Module>] extra non-routing related pro features
70
+ def features
71
+ [
72
+ Encryption
73
+ ]
74
+ end
75
+
55
76
  # Sets proper config options to use pro components
56
77
  # @param config [::Karafka::Core::Configurable::Node] root config node
57
78
  def reconfigure(config)
@@ -42,7 +42,7 @@ module Karafka
42
42
  # Do NOT commit offsets, they are comitted after each job in the AJ consumer.
43
43
  coordinator.pause_tracker.reset
44
44
  elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
45
- pause(coordinator.seek_offset, nil, false)
45
+ retry_after_pause
46
46
  else
47
47
  coordinator.pause_tracker.reset
48
48
  skippable_message = find_skippable_message
@@ -51,7 +51,7 @@ module Karafka
51
51
  # If processing failed, we need to pause
52
52
  # For long running job this will overwrite the default never-ending pause and will
53
53
  # cause the processing to keep going after the error backoff
54
- pause(coordinator.seek_offset, nil, false)
54
+ retry_after_pause
55
55
  end
56
56
  end
57
57
  end
@@ -47,7 +47,7 @@ module Karafka
47
47
 
48
48
  mark_as_consumed(last_group_message)
49
49
  else
50
- pause(coordinator.seek_offset, nil, false)
50
+ retry_after_pause
51
51
  end
52
52
  end
53
53
  end
@@ -79,7 +79,7 @@ module Karafka
79
79
 
80
80
  mark_as_consumed(last_group_message)
81
81
  else
82
- pause(coordinator.seek_offset, nil, false)
82
+ retry_after_pause
83
83
  end
84
84
  end
85
85
  end
@@ -36,7 +36,7 @@ module Karafka
36
36
 
37
37
  mark_as_consumed(messages.last)
38
38
  elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
39
- pause(coordinator.seek_offset, nil, false)
39
+ retry_after_pause
40
40
  # If we've reached number of retries that we could, we need to skip the first message
41
41
  # that was not marked as consumed, pause and continue, while also moving this message
42
42
  # to the dead topic
@@ -41,7 +41,7 @@ module Karafka
41
41
 
42
42
  resume
43
43
  elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
44
- pause(coordinator.seek_offset, nil, false)
44
+ retry_after_pause
45
45
  else
46
46
  coordinator.pause_tracker.reset
47
47
 
@@ -38,7 +38,7 @@ module Karafka
38
38
 
39
39
  resume
40
40
  elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
41
- pause(coordinator.seek_offset, nil, false)
41
+ retry_after_pause
42
42
  else
43
43
  coordinator.pause_tracker.reset
44
44
 
@@ -35,7 +35,7 @@ module Karafka
35
35
  if coordinator.success?
36
36
  coordinator.pause_tracker.reset
37
37
  elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
38
- pause(coordinator.seek_offset, nil, false)
38
+ retry_after_pause
39
39
  # If we've reached number of retries that we could, we need to skip the first message
40
40
  # that was not marked as consumed, pause and continue, while also moving this message
41
41
  # to the dead topic.
@@ -57,7 +57,7 @@ module Karafka
57
57
  # If processing failed, we need to pause
58
58
  # For long running job this will overwrite the default never-ending pause and will
59
59
  # cause the processing to keep going after the error backoff
60
- pause(coordinator.seek_offset, nil, false)
60
+ retry_after_pause
61
61
  end
62
62
  end
63
63
  end
@@ -50,7 +50,7 @@ module Karafka
50
50
 
51
51
  resume
52
52
  else
53
- pause(coordinator.seek_offset, false)
53
+ retry_after_pause
54
54
  end
55
55
  end
56
56
  end
@@ -32,7 +32,7 @@ module Karafka
32
32
  if coordinator.success?
33
33
  coordinator.pause_tracker.reset
34
34
  else
35
- pause(coordinator.seek_offset, nil, false)
35
+ retry_after_pause
36
36
  end
37
37
  end
38
38
  end
@@ -21,9 +21,9 @@ module Karafka
21
21
  configure do |config|
22
22
  config.error_messages = YAML.safe_load(
23
23
  File.read(
24
- File.join(Karafka.gem_root, 'config', 'errors.yml')
24
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
25
25
  )
26
- ).fetch('en').fetch('validations').fetch('pro_topic')
26
+ ).fetch('en').fetch('validations').fetch('topic')
27
27
  end
28
28
 
29
29
  # Make sure that we don't use DLQ with VP
@@ -21,9 +21,9 @@ module Karafka
21
21
  configure do |config|
22
22
  config.error_messages = YAML.safe_load(
23
23
  File.read(
24
- File.join(Karafka.gem_root, 'config', 'errors.yml')
24
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
25
25
  )
26
- ).fetch('en').fetch('validations').fetch('pro_topic')
26
+ ).fetch('en').fetch('validations').fetch('topic')
27
27
  end
28
28
 
29
29
  nested(:long_running_job) do
@@ -21,9 +21,9 @@ module Karafka
21
21
  configure do |config|
22
22
  config.error_messages = YAML.safe_load(
23
23
  File.read(
24
- File.join(Karafka.gem_root, 'config', 'errors.yml')
24
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
25
25
  )
26
- ).fetch('en').fetch('validations').fetch('pro_topic')
26
+ ).fetch('en').fetch('validations').fetch('topic')
27
27
  end
28
28
 
29
29
  nested(:virtual_partitions) do
@@ -31,7 +31,7 @@ module Karafka
31
31
  # @param client [Karafka::Connection::Client] kafka client
32
32
  # @param topic [Karafka::Routing::Topic] topic for which this executor will run
33
33
  def initialize(group_id, client, topic)
34
- @id = SecureRandom.uuid
34
+ @id = SecureRandom.hex(6)
35
35
  @group_id = group_id
36
36
  @client = client
37
37
  @topic = topic
@@ -26,7 +26,7 @@ module Karafka
26
26
  # Do NOT commit offsets, they are comitted after each job in the AJ consumer.
27
27
  coordinator.pause_tracker.reset
28
28
  elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
29
- pause(coordinator.seek_offset, nil, false)
29
+ retry_after_pause
30
30
  else
31
31
  coordinator.pause_tracker.reset
32
32
  skippable_message = find_skippable_message
@@ -59,7 +59,7 @@ module Karafka
59
59
 
60
60
  mark_as_consumed(messages.last)
61
61
  else
62
- pause(coordinator.seek_offset, nil, false)
62
+ retry_after_pause
63
63
  end
64
64
  end
65
65
 
@@ -26,7 +26,7 @@ module Karafka
26
26
 
27
27
  mark_as_consumed(messages.last)
28
28
  elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
29
- pause(coordinator.seek_offset, nil, false)
29
+ retry_after_pause
30
30
  # If we've reached number of retries that we could, we need to skip the first message
31
31
  # that was not marked as consumed, pause and continue, while also moving this message
32
32
  # to the dead topic
@@ -21,7 +21,7 @@ module Karafka
21
21
  if coordinator.success?
22
22
  coordinator.pause_tracker.reset
23
23
  elsif coordinator.pause_tracker.attempt <= topic.dead_letter_queue.max_retries
24
- pause(coordinator.seek_offset, nil, false)
24
+ retry_after_pause
25
25
  # If we've reached number of retries that we could, we need to skip the first message
26
26
  # that was not marked as consumed, pause and continue, while also moving this message
27
27
  # to the dead topic
@@ -20,7 +20,7 @@ module Karafka
20
20
  if coordinator.success?
21
21
  coordinator.pause_tracker.reset
22
22
  else
23
- pause(coordinator.seek_offset, nil, false)
23
+ retry_after_pause
24
24
  end
25
25
  end
26
26
  end
@@ -25,7 +25,7 @@ module Karafka
25
25
  # @param jobs_queue [JobsQueue]
26
26
  # @return [Worker]
27
27
  def initialize(jobs_queue)
28
- @id = SecureRandom.uuid
28
+ @id = SecureRandom.hex(6)
29
29
  @jobs_queue = jobs_queue
30
30
  end
31
31
 
@@ -79,6 +79,9 @@ if rails
79
79
  ::Karafka::App.monitor.subscribe('connection.listener.fetch_loop') do
80
80
  # Reload code each time there is a change in the code
81
81
  next unless Rails.application.reloaders.any?(&:updated?)
82
+ # If consumer persistence is enabled, no reason to reload because we will still keep
83
+ # old consumer instances in memory.
84
+ next if Karafka::App.config.consumer_persistence
82
85
 
83
86
  Rails.application.reloader.reload!
84
87
  end
@@ -80,7 +80,7 @@ module Karafka
80
80
  # @param subscription_group_name [String, Symbol] subscription group id. When not provided,
81
81
  # a random uuid will be used
82
82
  # @param block [Proc] further topics definitions
83
- def subscription_group(subscription_group_name = SecureRandom.uuid, &block)
83
+ def subscription_group(subscription_group_name = SecureRandom.hex(6), &block)
84
84
  consumer_group('app') do
85
85
  target.public_send(:subscription_group=, subscription_group_name.to_s, &block)
86
86
  end
@@ -26,7 +26,7 @@ module Karafka
26
26
  @topics = Topics.new([])
27
27
  # Initialize the subscription group so there's always a value for it, since even if not
28
28
  # defined directly, a subscription group will be created
29
- @current_subscription_group_id = SecureRandom.uuid
29
+ @current_subscription_group_id = SecureRandom.hex(6)
30
30
  end
31
31
 
32
32
  # @return [Boolean] true if this consumer group should be active in our current process
@@ -55,7 +55,7 @@ module Karafka
55
55
  # topic definition
56
56
  # @param name [String, Symbol] name of the current subscription group
57
57
  # @param block [Proc] block that may include topics definitions
58
- def subscription_group=(name = SecureRandom.uuid, &block)
58
+ def subscription_group=(name = SecureRandom.hex(6), &block)
59
59
  # We cast it here, so the routing supports symbol based but that's anyhow later on
60
60
  # validated as a string
61
61
  @current_subscription_group_id = name
@@ -64,7 +64,7 @@ module Karafka
64
64
 
65
65
  # We need to reset the current subscription group after it is used, so it won't leak
66
66
  # outside to other topics that would be defined without a defined subscription group
67
- @current_subscription_group_id = SecureRandom.uuid
67
+ @current_subscription_group_id = SecureRandom.hex(6)
68
68
  end
69
69
 
70
70
  # @return [Array<Routing::SubscriptionGroup>] all the subscription groups build based on
@@ -10,7 +10,7 @@ module Karafka
10
10
  configure do |config|
11
11
  config.error_messages = YAML.safe_load(
12
12
  File.read(
13
- File.join(Karafka.gem_root, 'config', 'errors.yml')
13
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
14
14
  )
15
15
  ).fetch('en').fetch('validations').fetch('topic')
16
16
  end
@@ -9,7 +9,7 @@ module Karafka
9
9
  configure do |config|
10
10
  config.error_messages = YAML.safe_load(
11
11
  File.read(
12
- File.join(Karafka.gem_root, 'config', 'errors.yml')
12
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
13
13
  )
14
14
  ).fetch('en').fetch('validations').fetch('topic')
15
15
  end
@@ -9,7 +9,7 @@ module Karafka
9
9
  configure do |config|
10
10
  config.error_messages = YAML.safe_load(
11
11
  File.read(
12
- File.join(Karafka.gem_root, 'config', 'errors.yml')
12
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
13
13
  )
14
14
  ).fetch('en').fetch('validations').fetch('topic')
15
15
  end
@@ -47,7 +47,7 @@ module Karafka
47
47
  # in a separate thread (or trap context) to indicate everything is closed
48
48
  # Since `#start` is blocking, we were get here only after the runner is done. This will
49
49
  # not add any performance degradation because of that.
50
- Thread.pass until Karafka::App.stopped?
50
+ Thread.pass until Karafka::App.terminated?
51
51
  # Try its best to shutdown underlying components before re-raising
52
52
  # rubocop:disable Lint/RescueException
53
53
  rescue Exception => e
@@ -75,6 +75,7 @@ module Karafka
75
75
  # Initialize the stopping process only if Karafka was running
76
76
  return if Karafka::App.stopping?
77
77
  return if Karafka::App.stopped?
78
+ return if Karafka::App.terminated?
78
79
 
79
80
  Karafka::App.stop!
80
81
 
@@ -84,13 +85,7 @@ module Karafka
84
85
  # their work and if so, we can just return and normal shutdown process will take place
85
86
  # We divide it by 1000 because we use time in ms.
86
87
  ((timeout / 1_000) * SUPERVISION_CHECK_FACTOR).to_i.times do
87
- if listeners.count(&:alive?).zero? &&
88
- workers.count(&:alive?).zero?
89
-
90
- Karafka::App.producer.close
91
-
92
- return
93
- end
88
+ return if listeners.count(&:alive?).zero? && workers.count(&:alive?).zero?
94
89
 
95
90
  sleep SUPERVISION_SLEEP
96
91
  end
@@ -122,18 +117,23 @@ module Karafka
122
117
  ensure
123
118
  # We need to check if it wasn't an early exit to make sure that only on stop invocation
124
119
  # can change the status after everything is closed
125
- Karafka::App.stopped! if timeout
120
+ if timeout
121
+ Karafka::App.stopped!
122
+
123
+ # We close producer as the last thing as it can be used in the notification pipeline
124
+ # to dispatch state changes, etc
125
+ Karafka::App.producer.close
126
+
127
+ Karafka::App.terminate!
128
+ end
126
129
  end
127
130
 
128
131
  # Quiets the Karafka server.
129
132
  # Karafka will stop processing but won't quiet to consumer group, so no rebalance will be
130
133
  # triggered until final shutdown.
131
134
  def quiet
132
- # If we are already quieting or in the stop procedures, we should not do it again.
133
- return if Karafka::App.quieting?
134
- return if Karafka::App.stopping?
135
- return if Karafka::App.stopped?
136
-
135
+ # We don't have to safe-guard it with check states as the state transitions work only
136
+ # in one direction
137
137
  Karafka::App.quiet!
138
138
  end
139
139
 
@@ -16,7 +16,10 @@ module Karafka
16
16
 
17
17
  # Defaults for kafka settings, that will be overwritten only if not present already
18
18
  KAFKA_DEFAULTS = {
19
- 'client.id': 'karafka'
19
+ 'client.id': 'karafka',
20
+ # We emit the statistics by default, so all the instrumentation and web-ui work out of
21
+ # the box, without requiring users to take any extra actions aside from enabling.
22
+ 'statistics.interval.ms': 5_000
20
23
  }.freeze
21
24
 
22
25
  # Contains settings that should not be used in production but make life easier in dev
@@ -130,6 +133,12 @@ module Karafka
130
133
  setting :strategy_selector, default: Processing::StrategySelector.new
131
134
  end
132
135
 
136
+ # Things related to operating on messages
137
+ setting :messages do
138
+ # Parser is used to convert raw payload prior to deserialization
139
+ setting :parser, default: Messages::Parser.new
140
+ end
141
+
133
142
  # Karafka components for ActiveJob
134
143
  setting :active_job do
135
144
  # option dispatcher [Karafka::ActiveJob::Dispatcher] default dispatcher for ActiveJob
@@ -155,7 +164,7 @@ module Karafka
155
164
  # Will configure all the pro components
156
165
  # This needs to happen before end user configuration as the end user may overwrite some
157
166
  # of the pro defaults with custom components
158
- Pro::Loader.setup(config) if Karafka.pro?
167
+ Pro::Loader.pre_setup(config) if Karafka.pro?
159
168
 
160
169
  configure(&block)
161
170
  merge_kafka_defaults!(config)
@@ -164,6 +173,10 @@ module Karafka
164
173
 
165
174
  configure_components
166
175
 
176
+ # Runs things that need to be executed after config is defined and all the components
177
+ # are also configured
178
+ Pro::Loader.post_setup(config) if Karafka.pro?
179
+
167
180
  Karafka::App.initialized!
168
181
  end
169
182