karafka 2.1.13 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/CHANGELOG.md +35 -0
  4. data/Gemfile.lock +1 -1
  5. data/config/locales/errors.yml +4 -0
  6. data/config/locales/pro_errors.yml +17 -0
  7. data/lib/karafka/admin.rb +21 -33
  8. data/lib/karafka/connection/client.rb +1 -1
  9. data/lib/karafka/contracts/config.rb +24 -0
  10. data/lib/karafka/pro/contracts/base.rb +23 -0
  11. data/lib/karafka/pro/contracts/server_cli_options.rb +111 -0
  12. data/lib/karafka/pro/loader.rb +4 -1
  13. data/lib/karafka/pro/routing/features/active_job/builder.rb +45 -0
  14. data/lib/karafka/pro/routing/features/active_job.rb +26 -0
  15. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +53 -0
  16. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +41 -0
  17. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +41 -0
  18. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +44 -0
  19. data/lib/karafka/pro/routing/features/long_running_job/{contract.rb → contracts/topic.rb} +14 -11
  20. data/lib/karafka/pro/routing/features/{filtering/contract.rb → patterns/builder.rb} +13 -16
  21. data/lib/karafka/pro/routing/features/patterns/config.rb +54 -0
  22. data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +68 -0
  23. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +62 -0
  24. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +46 -0
  25. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +41 -0
  26. data/lib/karafka/pro/routing/features/patterns/detector.rb +68 -0
  27. data/lib/karafka/pro/routing/features/patterns/pattern.rb +81 -0
  28. data/lib/karafka/pro/routing/features/{delaying/contract.rb → patterns/patterns.rb} +11 -14
  29. data/lib/karafka/pro/routing/features/patterns/topic.rb +50 -0
  30. data/lib/karafka/pro/routing/features/patterns/topics.rb +53 -0
  31. data/lib/karafka/pro/routing/features/patterns.rb +33 -0
  32. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +51 -0
  33. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +44 -0
  34. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +55 -0
  35. data/lib/karafka/routing/consumer_group.rb +1 -1
  36. data/lib/karafka/routing/features/active_job/contracts/topic.rb +44 -0
  37. data/lib/karafka/routing/features/active_job/proxy.rb +14 -0
  38. data/lib/karafka/routing/features/base/expander.rb +8 -2
  39. data/lib/karafka/routing/features/base.rb +4 -2
  40. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +46 -0
  41. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +33 -0
  42. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +27 -0
  43. data/lib/karafka/routing/router.rb +0 -11
  44. data/lib/karafka/routing/subscription_group.rb +9 -0
  45. data/lib/karafka/routing/topic.rb +5 -0
  46. data/lib/karafka/server.rb +9 -4
  47. data/lib/karafka/setup/config.rb +45 -0
  48. data/lib/karafka/version.rb +1 -1
  49. data.tar.gz.sig +0 -0
  50. metadata +32 -15
  51. metadata.gz.sig +0 -0
  52. data/lib/karafka/pro/routing/features/dead_letter_queue/contract.rb +0 -50
  53. data/lib/karafka/pro/routing/features/expiring/contract.rb +0 -38
  54. data/lib/karafka/pro/routing/features/pausing/contract.rb +0 -48
  55. data/lib/karafka/pro/routing/features/throttling/contract.rb +0 -41
  56. data/lib/karafka/pro/routing/features/virtual_partitions/contract.rb +0 -52
  57. data/lib/karafka/routing/features/active_job/contract.rb +0 -41
  58. data/lib/karafka/routing/features/dead_letter_queue/contract.rb +0 -42
  59. data/lib/karafka/routing/features/declaratives/contract.rb +0 -30
  60. data/lib/karafka/routing/features/manual_offset_management/contract.rb +0 -24
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class Patterns < Base
19
+ # Patterns feature topic extensions
20
+ module Topics
21
+ # Finds topic by its name in a more extensive way than the regular. Regular uses the
22
+ # pre-existing topics definitions. This extension also runs the expansion based on
23
+ # defined routing patterns (if any)
24
+ #
25
+ # If topic does not exist, it will try to run discovery in case there are patterns
26
+ # defined that would match it.
27
+ # This allows us to support lookups for newly appearing topics based on their regexp
28
+ # patterns.
29
+ #
30
+ # @param topic_name [String] topic name
31
+ # @return [Karafka::Routing::Topic]
32
+ # @raise [Karafka::Errors::TopicNotFoundError] this should never happen. If you see it,
33
+ # please create an issue.
34
+ #
35
+ # @note This method should not be used in context of finding multiple missing topics in
36
+ # loops because it catches exceptions and attempts to expand routes. If this is used
37
+ # in a loop for lookups on thousands of topics with detector expansion, this may
38
+ # be slow. It should be used in the context where newly discovered topics are found
39
+ # and should by design match a pattern. For quick lookups on batches of topics, it
40
+ # is recommended to use a custom built lookup with conditional expander.
41
+ def find(topic_name)
42
+ super
43
+ rescue Karafka::Errors::TopicNotFoundError
44
+ Detector.new.expand(self, topic_name)
45
+
46
+ super
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ # Dynamic topics builder feature.
19
+ #
20
+ # Allows you to define patterns in routes that would then automatically subscribe and
21
+ # start consuming new topics.
22
+ #
23
+ # This feature works by injecting a topic that represents a regexp subscription (matcher)
24
+ # that at the same time holds the builder block for full config of a newly detected topic.
25
+ #
26
+ # We inject a virtual topic to hold settings but also to be able to run validations
27
+ # during boot to ensure consistency of the pattern base setup.
28
+ class Patterns < Base
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class Pausing < Base
19
+ # Namespace for pausing feature
20
+ module Contracts
21
+ # Contract to make sure, that the pause settings on a per topic basis are as expected
22
+ class Topic < Karafka::Contracts::Base
23
+ configure do |config|
24
+ config.error_messages = YAML.safe_load(
25
+ File.read(
26
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
27
+ )
28
+ ).fetch('en').fetch('validations').fetch('topic')
29
+
30
+ required(:pause_timeout) { |val| val.is_a?(Integer) && val.positive? }
31
+ required(:pause_max_timeout) { |val| val.is_a?(Integer) && val.positive? }
32
+ required(:pause_with_exponential_backoff) { |val| [true, false].include?(val) }
33
+
34
+ virtual do |data, errors|
35
+ next unless errors.empty?
36
+
37
+ pause_timeout = data.fetch(:pause_timeout)
38
+ pause_max_timeout = data.fetch(:pause_max_timeout)
39
+
40
+ next if pause_timeout <= pause_max_timeout
41
+
42
+ [[%i[pause_timeout], :max_timeout_vs_pause_max_timeout]]
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class Throttling < Base
19
+ # Namespace for throttling contracts
20
+ module Contracts
21
+ # Rules around throttling settings
22
+ class Topic < Karafka::Contracts::Base
23
+ configure do |config|
24
+ config.error_messages = YAML.safe_load(
25
+ File.read(
26
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
27
+ )
28
+ ).fetch('en').fetch('validations').fetch('topic')
29
+ end
30
+
31
+ nested(:throttling) do
32
+ required(:active) { |val| [true, false].include?(val) }
33
+ required(:interval) { |val| val.is_a?(Integer) && val.positive? }
34
+ required(:limit) do |val|
35
+ (val.is_a?(Integer) || val == Float::INFINITY) && val.positive?
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module Routing
17
+ module Features
18
+ class VirtualPartitions < Base
19
+ # Namespace for VP contracts
20
+ module Contracts
21
+ # Rules around virtual partitions
22
+ class Topic < Karafka::Contracts::Base
23
+ configure do |config|
24
+ config.error_messages = YAML.safe_load(
25
+ File.read(
26
+ File.join(Karafka.gem_root, 'config', 'locales', 'pro_errors.yml')
27
+ )
28
+ ).fetch('en').fetch('validations').fetch('topic')
29
+ end
30
+
31
+ nested(:virtual_partitions) do
32
+ required(:active) { |val| [true, false].include?(val) }
33
+ required(:partitioner) { |val| val.nil? || val.respond_to?(:call) }
34
+ required(:max_partitions) { |val| val.is_a?(Integer) && val >= 1 }
35
+ end
36
+
37
+ # When virtual partitions are defined, partitioner needs to respond to `#call` and it
38
+ # cannot be nil
39
+ virtual do |data, errors|
40
+ next unless errors.empty?
41
+
42
+ virtual_partitions = data[:virtual_partitions]
43
+
44
+ next unless virtual_partitions[:active]
45
+ next if virtual_partitions[:partitioner].respond_to?(:call)
46
+
47
+ [[%i[virtual_partitions partitioner], :respond_to_call]]
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -55,7 +55,7 @@ module Karafka
55
55
  def subscription_group=(name = SubscriptionGroup.id, &block)
56
56
  # We cast it here, so the routing supports symbol based but that's anyhow later on
57
57
  # validated as a string
58
- @current_subscription_group_id = name
58
+ @current_subscription_group_id = name.to_s
59
59
 
60
60
  Proxy.new(self, &block)
61
61
 
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ActiveJob < Base
7
+ # This feature validation contracts
8
+ module Contracts
9
+ # Rules around using ActiveJob routing - basically you need to have ActiveJob available
10
+ # in order to be able to use active job routing
11
+ class Topic < Karafka::Contracts::Base
12
+ configure do |config|
13
+ config.error_messages = YAML.safe_load(
14
+ File.read(
15
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
16
+ )
17
+ ).fetch('en').fetch('validations').fetch('topic')
18
+ end
19
+
20
+ virtual do |data, errors|
21
+ next unless errors.empty?
22
+ next unless data[:active_job][:active]
23
+ # One should not define active job jobs without ActiveJob being available for usage
24
+ next if Object.const_defined?('ActiveJob::Base')
25
+
26
+ [[%i[consumer], :active_job_missing]]
27
+ end
28
+
29
+ # ActiveJob needs to always run with manual offset management
30
+ # Automatic offset management cannot work with ActiveJob. Otherwise we could mark as
31
+ # consumed jobs that did not run because of shutdown.
32
+ virtual do |data, errors|
33
+ next unless errors.empty?
34
+ next unless data[:active_job][:active]
35
+ next if data[:manual_offset_management][:active]
36
+
37
+ [[%i[manual_offset_management], :must_be_enabled]]
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ActiveJob < Base
7
+ # Routing proxy extensions for ActiveJob
8
+ module Proxy
9
+ include Builder
10
+ end
11
+ end
12
+ end
13
+ end
14
+ end
@@ -30,15 +30,21 @@ module Karafka
30
30
  scope = @scope
31
31
 
32
32
  Module.new do
33
- # Runs validations related to this feature on a topic
33
+ # Runs validations related to this feature on a routing resources
34
34
  #
35
35
  # @param block [Proc] routing defining block
36
36
  define_method :draw do |&block|
37
37
  result = super(&block)
38
38
 
39
39
  each do |consumer_group|
40
+ if scope::Contracts.const_defined?('ConsumerGroup', false)
41
+ scope::Contracts::ConsumerGroup.new.validate!(consumer_group.to_h)
42
+ end
43
+
44
+ next unless scope::Contracts.const_defined?('Topic', false)
45
+
40
46
  consumer_group.topics.each do |topic|
41
- scope::Contract.new.validate!(topic.to_h)
47
+ scope::Contracts::Topic.new.validate!(topic.to_h)
42
48
  end
43
49
  end
44
50
 
@@ -14,9 +14,11 @@ module Karafka
14
14
  # Extends topic and builder with given feature API
15
15
  def activate
16
16
  Topic.prepend(self::Topic) if const_defined?('Topic', false)
17
- Proxy.prepend(self::Builder) if const_defined?('Builder', false)
17
+ Topics.prepend(self::Topics) if const_defined?('Topics', false)
18
+ ConsumerGroup.prepend(self::ConsumerGroup) if const_defined?('ConsumerGroup', false)
19
+ Proxy.prepend(self::Proxy) if const_defined?('Proxy', false)
18
20
  Builder.prepend(self::Builder) if const_defined?('Builder', false)
19
- Builder.prepend(Base::Expander.new(self)) if const_defined?('Contract', false)
21
+ Builder.prepend(Base::Expander.new(self)) if const_defined?('Contracts', false)
20
22
  end
21
23
 
22
24
  # Loads all the features and activates them
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class DeadLetterQueue < Base
7
+ # This feature validation contracts
8
+ module Contracts
9
+ # Rules around dead letter queue settings
10
+ class Topic < Karafka::Contracts::Base
11
+ configure do |config|
12
+ config.error_messages = YAML.safe_load(
13
+ File.read(
14
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
+ )
16
+ ).fetch('en').fetch('validations').fetch('topic')
17
+ end
18
+
19
+ nested :dead_letter_queue do
20
+ required(:active) { |val| [true, false].include?(val) }
21
+ required(:max_retries) { |val| val.is_a?(Integer) && val >= 0 }
22
+ end
23
+
24
+ # Validate topic name only if dlq is active
25
+ virtual do |data, errors|
26
+ next unless errors.empty?
27
+
28
+ dead_letter_queue = data[:dead_letter_queue]
29
+
30
+ next unless dead_letter_queue[:active]
31
+
32
+ topic = dead_letter_queue[:topic]
33
+ topic_regexp = ::Karafka::Contracts::TOPIC_REGEXP
34
+
35
+ # When topic is set to false, it means we just want to skip dispatch on DLQ
36
+ next if topic == false
37
+ next if topic.is_a?(String) && topic_regexp.match?(topic)
38
+
39
+ [[%i[dead_letter_queue topic], :format]]
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class Declaratives < Base
7
+ # This feature validation contracts
8
+ module Contracts
9
+ # Basic validation of the Kafka expected config details
10
+ class Topic < Karafka::Contracts::Base
11
+ configure do |config|
12
+ config.error_messages = YAML.safe_load(
13
+ File.read(
14
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
+ )
16
+ ).fetch('en').fetch('validations').fetch('topic')
17
+ end
18
+
19
+ nested :declaratives do
20
+ required(:active) { |val| [true, false].include?(val) }
21
+ required(:partitions) { |val| val.is_a?(Integer) && val.positive? }
22
+ required(:replication_factor) { |val| val.is_a?(Integer) && val.positive? }
23
+ required(:details) do |val|
24
+ val.is_a?(Hash) &&
25
+ val.keys.all? { |key| key.is_a?(Symbol) }
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ module Features
6
+ class ManualOffsetManagement < Base
7
+ # This feature validation contracts
8
+ module Contracts
9
+ # Rules around manual offset management settings
10
+ class Topic < Karafka::Contracts::Base
11
+ configure do |config|
12
+ config.error_messages = YAML.safe_load(
13
+ File.read(
14
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
+ )
16
+ ).fetch('en').fetch('validations').fetch('topic')
17
+ end
18
+
19
+ nested :manual_offset_management do
20
+ required(:active) { |val| [true, false].include?(val) }
21
+ end
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -7,16 +7,6 @@ module Karafka
7
7
  # @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
8
8
  # structure so all the routes are being stored in a single level array
9
9
  module Router
10
- # Find a proper topic based on full topic id
11
- # @param topic_id [String] proper topic id (already mapped, etc) for which we want to find
12
- # routing topic
13
- # @return [Karafka::Routing::Topic] proper route details
14
- # @raise [Karafka::Topic::NonMatchingTopicError] raised if topic name does not match
15
- # any route defined by user using routes.draw
16
- def find(topic_id)
17
- find_by(id: topic_id) || raise(Errors::NonMatchingRouteError, topic_id)
18
- end
19
-
20
10
  # Finds first reference of a given topic based on provided lookup attribute
21
11
  # @param lookup [Hash<Symbol, String>] hash with attribute - value key pairs
22
12
  # @return [Karafka::Routing::Topic, nil] proper route details or nil if not found
@@ -46,7 +36,6 @@ module Karafka
46
36
  find_by(name: name) || Topic.new(name, ConsumerGroup.new(name))
47
37
  end
48
38
 
49
- module_function :find
50
39
  module_function :find_by
51
40
  module_function :find_or_initialize_by_name
52
41
  end
@@ -61,6 +61,15 @@ module Karafka
61
61
  Karafka::App.config.internal.routing.activity_manager.active?(:subscription_groups, name)
62
62
  end
63
63
 
64
+ # @return [Array<String>] names of topics to which we should subscribe.
65
+ #
66
+ # @note Most of the time it should not include inactive topics but in case of pattern
67
+ # matching the matcher topics become inactive down the road, hence we filter out so
68
+ # they are later removed.
69
+ def subscriptions
70
+ topics.select(&:active?).map(&:subscription_name)
71
+ end
72
+
64
73
  private
65
74
 
66
75
  # @return [Hash] kafka settings are a bit special. They are exactly the same for all of the
@@ -52,6 +52,11 @@ module Karafka
52
52
  end
53
53
  end
54
54
 
55
+ # @return [String] name of subscription that will go to librdkafka
56
+ def subscription_name
57
+ name
58
+ end
59
+
55
60
  # @return [Class] consumer class that we should use
56
61
  def consumer
57
62
  if consumer_persistence
@@ -32,8 +32,8 @@ module Karafka
32
32
  # embedded
33
33
  # We cannot validate this during the start because config needs to be populated and routes
34
34
  # need to be defined.
35
- Contracts::ServerCliOptions.new.validate!(
36
- Karafka::App.config.internal.routing.activity_manager.to_h
35
+ config.internal.cli.contract.validate!(
36
+ config.internal.routing.activity_manager.to_h
37
37
  )
38
38
 
39
39
  process.on_sigint { stop }
@@ -82,7 +82,7 @@ module Karafka
82
82
 
83
83
  Karafka::App.stop!
84
84
 
85
- timeout = Karafka::App.config.shutdown_timeout
85
+ timeout = config.shutdown_timeout
86
86
 
87
87
  # We check from time to time (for the timeout period) if all the threads finished
88
88
  # their work and if so, we can just return and normal shutdown process will take place
@@ -148,9 +148,14 @@ module Karafka
148
148
 
149
149
  private
150
150
 
151
+ # @return [Karafka::Core::Configurable::Node] root config node
152
+ def config
153
+ Karafka::App.config
154
+ end
155
+
151
156
  # @return [Karafka::Process] process wrapper instance used to catch system signal calls
152
157
  def process
153
- Karafka::App.config.internal.process
158
+ config.internal.process
154
159
  end
155
160
  end
156
161
  end
@@ -105,6 +105,44 @@ module Karafka
105
105
  # @see https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
106
106
  setting :kafka, default: {}
107
107
 
108
+ # Admin specific settings.
109
+ #
110
+ # Since admin operations are often specific, they may require specific librdkafka settings
111
+ # or other settings that are unique to admin.
112
+ setting :admin do
113
+ # Specific kafka settings that are tuned to operate within the Admin.
114
+ #
115
+ # Please do not change them unless you know what you are doing as their misconfiguration
116
+ # may cause Admin API to misbehave
117
+ # option [Hash] extra changes to the default root kafka settings
118
+ setting :kafka, default: {
119
+ # We want to know when there is no more data not to end up with an endless loop
120
+ 'enable.partition.eof': true,
121
+ # Do not publish statistics from admin as they are not relevant
122
+ 'statistics.interval.ms': 0,
123
+ # Fetch at most 5 MBs when using admin
124
+ 'fetch.message.max.bytes': 5 * 1_048_576,
125
+ # Do not commit offset automatically, this prevents offset tracking for operations
126
+ # involving a consumer instance
127
+ 'enable.auto.commit': false,
128
+ # Make sure that topic metadata lookups do not create topics accidentally
129
+ 'allow.auto.create.topics': false
130
+ }
131
+
132
+ # option [String] default name for the admin consumer group. Please note, that this is a
133
+ # subject to be remapped by the consumer mapper as any other consumer group in the routes
134
+ setting :group_id, default: 'karafka_admin'
135
+
136
+ # option max_wait_time [Integer] We wait only for this amount of time before raising error
137
+ # as we intercept this error and retry after checking that the operation was finished or
138
+ # failed using external factor.
139
+ setting :max_wait_time, default: 1_000
140
+
141
+ # How many times should be try. 1 000 ms x 60 => 60 seconds wait in total and then we give
142
+ # up on pending operations
143
+ setting :max_attempts, default: 60
144
+ end
145
+
108
146
  # Namespace for internal settings that should not be modified directly
109
147
  setting :internal do
110
148
  # option status [Karafka::Status] app status
@@ -114,6 +152,13 @@ module Karafka
114
152
  # instances
115
153
  setting :process, default: Process.new
116
154
 
155
+ # Namespace for CLI related settings
156
+ setting :cli do
157
+ # option contract [Object] cli setup validation contract (in the context of options and
158
+ # topics)
159
+ setting :contract, default: Contracts::ServerCliOptions.new
160
+ end
161
+
117
162
  setting :routing do
118
163
  # option builder [Karafka::Routing::Builder] builder instance
119
164
  setting :builder, default: Routing::Builder.new
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.1.13'
6
+ VERSION = '2.2.0'
7
7
  end
data.tar.gz.sig CHANGED
Binary file