karafka 1.1.0 → 1.2.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (69) hide show
  1. checksums.yaml +5 -5
  2. data/.ruby-version +1 -1
  3. data/.travis.yml +4 -8
  4. data/CHANGELOG.md +50 -2
  5. data/CONTRIBUTING.md +5 -6
  6. data/Gemfile +1 -2
  7. data/Gemfile.lock +43 -31
  8. data/README.md +13 -21
  9. data/karafka.gemspec +6 -4
  10. data/lib/karafka.rb +17 -7
  11. data/lib/karafka/app.rb +8 -15
  12. data/lib/karafka/attributes_map.rb +1 -1
  13. data/lib/karafka/backends/inline.rb +1 -2
  14. data/lib/karafka/{base_controller.rb → base_consumer.rb} +19 -11
  15. data/lib/karafka/base_responder.rb +34 -15
  16. data/lib/karafka/callbacks.rb +30 -0
  17. data/lib/karafka/callbacks/config.rb +22 -0
  18. data/lib/karafka/callbacks/dsl.rb +16 -0
  19. data/lib/karafka/cli/install.rb +2 -3
  20. data/lib/karafka/cli/server.rb +0 -1
  21. data/lib/karafka/connection/{consumer.rb → client.rb} +32 -36
  22. data/lib/karafka/connection/config_adapter.rb +14 -6
  23. data/lib/karafka/connection/delegator.rb +46 -0
  24. data/lib/karafka/connection/listener.rb +22 -13
  25. data/lib/karafka/{controllers → consumers}/callbacks.rb +9 -9
  26. data/lib/karafka/consumers/includer.rb +51 -0
  27. data/lib/karafka/consumers/responders.rb +24 -0
  28. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  29. data/lib/karafka/errors.rb +13 -3
  30. data/lib/karafka/fetcher.rb +30 -34
  31. data/lib/karafka/helpers/class_matcher.rb +8 -8
  32. data/lib/karafka/helpers/config_retriever.rb +2 -2
  33. data/lib/karafka/instrumentation/listener.rb +112 -0
  34. data/lib/karafka/instrumentation/logger.rb +55 -0
  35. data/lib/karafka/instrumentation/monitor.rb +64 -0
  36. data/lib/karafka/loader.rb +0 -1
  37. data/lib/karafka/params/{params.rb → dsl.rb} +71 -43
  38. data/lib/karafka/params/params_batch.rb +7 -2
  39. data/lib/karafka/patches/dry_configurable.rb +6 -2
  40. data/lib/karafka/patches/ruby_kafka.rb +10 -10
  41. data/lib/karafka/persistence/client.rb +25 -0
  42. data/lib/karafka/persistence/consumer.rb +27 -14
  43. data/lib/karafka/persistence/topic.rb +29 -0
  44. data/lib/karafka/process.rb +5 -4
  45. data/lib/karafka/responders/builder.rb +15 -14
  46. data/lib/karafka/routing/builder.rb +1 -1
  47. data/lib/karafka/routing/consumer_mapper.rb +3 -2
  48. data/lib/karafka/routing/router.rb +1 -1
  49. data/lib/karafka/routing/topic.rb +5 -11
  50. data/lib/karafka/schemas/config.rb +3 -0
  51. data/lib/karafka/schemas/consumer_group.rb +15 -3
  52. data/lib/karafka/schemas/consumer_group_topic.rb +1 -1
  53. data/lib/karafka/server.rb +44 -5
  54. data/lib/karafka/setup/config.rb +47 -21
  55. data/lib/karafka/setup/configurators/base.rb +6 -12
  56. data/lib/karafka/setup/configurators/params.rb +25 -0
  57. data/lib/karafka/setup/configurators/water_drop.rb +6 -3
  58. data/lib/karafka/setup/dsl.rb +22 -0
  59. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
  60. data/lib/karafka/templates/karafka.rb.example +17 -4
  61. data/lib/karafka/version.rb +1 -1
  62. metadata +58 -24
  63. data/.github/ISSUE_TEMPLATE.md +0 -2
  64. data/lib/karafka/connection/processor.rb +0 -61
  65. data/lib/karafka/controllers/includer.rb +0 -51
  66. data/lib/karafka/controllers/responders.rb +0 -19
  67. data/lib/karafka/logger.rb +0 -53
  68. data/lib/karafka/monitor.rb +0 -98
  69. data/lib/karafka/persistence/controller.rb +0 -38
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Persistence
5
+ # Local cache for routing topics
6
+ # We use it in order not to build string instances and remap incoming topic upon each
7
+ # message / message batches received
8
+ class Topic
9
+ # Thread.current scope under which we store topics data
10
+ PERSISTENCE_SCOPE = :topics
11
+
12
+ # @param group_id [String] group id for which we fetch a topic representation
13
+ # @param raw_topic_name [String] raw topic name (before remapping) for which we fetch a
14
+ # topic representation
15
+ # @return [Karafka::Routing::Topic] remapped topic representation that can be used further
16
+ # on when working with given parameters
17
+ def self.fetch(group_id, raw_topic_name)
18
+ Thread.current[PERSISTENCE_SCOPE] ||= Hash.new { |hash, key| hash[key] = {} }
19
+
20
+ Thread.current[PERSISTENCE_SCOPE][group_id][raw_topic_name] ||= begin
21
+ # We map from incoming topic name, as it might be namespaced, etc.
22
+ # @see topic_mapper internal docs
23
+ mapped_topic_name = Karafka::App.config.topic_mapper.incoming(raw_topic_name)
24
+ Routing::Router.find("#{group_id}_#{mapped_topic_name}")
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -8,7 +8,9 @@ module Karafka
8
8
 
9
9
  # Signal types that we handle
10
10
  HANDLED_SIGNALS = %i[
11
- SIGINT SIGQUIT SIGTERM
11
+ SIGINT
12
+ SIGQUIT
13
+ SIGTERM
12
14
  ].freeze
13
15
 
14
16
  HANDLED_SIGNALS.each do |signal|
@@ -27,8 +29,7 @@ module Karafka
27
29
 
28
30
  # Creates an instance of process and creates empty hash for callbacks
29
31
  def initialize
30
- @callbacks = {}
31
- HANDLED_SIGNALS.each { |signal| @callbacks[signal] = [] }
32
+ @callbacks = Hash.new { |hsh, key| hsh[key] = [] }
32
33
  end
33
34
 
34
35
  # Method catches all HANDLED_SIGNALS and performs appropriate callbacks (if defined)
@@ -56,7 +57,7 @@ module Karafka
56
57
  # we have to spin up a new thread to do this
57
58
  def notice_signal(signal)
58
59
  Thread.new do
59
- Karafka.monitor.notice(self.class, signal: signal)
60
+ Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)
60
61
  end
61
62
  end
62
63
  end
@@ -3,30 +3,31 @@
3
3
  module Karafka
4
4
  # Responders namespace encapsulates all the internal responder implementation parts
5
5
  module Responders
6
- # Responders builder is used to find (based on the controller class name) a responder that
7
- # match the controller. This is used when user does not provide a responder inside routing
8
- # but he still names responder with the same convention (and namespaces) as controller
6
+ # Responders builder is used to finding (based on the consumer class name) a responder
7
+ # that match the consumer. We use it when user does not provide a responder inside routing,
8
+ # but he still names responder with the same convention (and namespaces) as consumer
9
+ #
9
10
  # @example Matching responder exists
10
- # Karafka::Responder::Builder(NewEventsController).build #=> NewEventsResponder
11
+ # Karafka::Responder::Builder(NewEventsConsumer).build #=> NewEventsResponder
11
12
  # @example Matching responder does not exist
12
- # Karafka::Responder::Builder(NewBuildsController).build #=> nil
13
+ # Karafka::Responder::Builder(NewBuildsConsumer).build #=> nil
13
14
  class Builder
14
- # @param controller_class [Karafka::BaseController, nil] descendant of
15
- # Karafka::BaseController
16
- # @example Tries to find a responder that matches a given controller. If nothing found,
17
- # will return nil (nil is accepted, because it means that a given controller don't
15
+ # @param consumer_class [Karafka::BaseConsumer, nil] descendant of
16
+ # Karafka::BaseConsumer
17
+ # @example Tries to find a responder that matches a given consumer. If nothing found,
18
+ # will return nil (nil is accepted, because it means that a given consumer don't
18
19
  # pipe stuff further on)
19
- def initialize(controller_class)
20
- @controller_class = controller_class
20
+ def initialize(consumer_class)
21
+ @consumer_class = consumer_class
21
22
  end
22
23
 
23
- # Tries to figure out a responder based on a controller class name
24
+ # Tries to figure out a responder based on a consumer class name
24
25
  # @return [Class] Responder class (not an instance)
25
26
  # @return [nil] or nil if there's no matching responding class
26
27
  def build
27
28
  Helpers::ClassMatcher.new(
28
- @controller_class,
29
- from: 'Controller',
29
+ @consumer_class,
30
+ from: 'Consumer',
30
31
  to: 'Responder'
31
32
  ).match
32
33
  end
@@ -6,7 +6,7 @@ module Karafka
6
6
  # @example Build a simple (most common) route
7
7
  # consumers do
8
8
  # topic :new_videos do
9
- # controller NewVideosController
9
+ # consumer NewVideosConsumer
10
10
  # end
11
11
  # end
12
12
  class Builder < Array
@@ -17,7 +17,7 @@ module Karafka
17
17
  # module MyMapper
18
18
  # def self.call(raw_consumer_group_name)
19
19
  # [
20
- # Karafka::App.config.client_id.to_s.underscope,
20
+ # Dry::Inflector.new.underscore(Karafka::App.config.client_id.to_s),
21
21
  # raw_consumer_group_name
22
22
  # ].join('_').gsub('_', '.')
23
23
  # end
@@ -26,7 +26,8 @@ module Karafka
26
26
  # @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
27
27
  # @return [String] remapped final consumer group name
28
28
  def self.call(raw_consumer_group_name)
29
- "#{Karafka::App.config.client_id.to_s.underscore}_#{raw_consumer_group_name}"
29
+ client_name = Dry::Inflector.new.underscore(Karafka::App.config.client_id.to_s)
30
+ "#{client_name}_#{raw_consumer_group_name}"
30
31
  end
31
32
  end
32
33
  end
@@ -3,7 +3,7 @@
3
3
  module Karafka
4
4
  # Namespace for all elements related to requests routing
5
5
  module Routing
6
- # Karafka framework Router for routing incoming messages to proper controllers
6
+ # Karafka framework Router for routing incoming messages to proper consumers
7
7
  # @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
8
8
  # structure so all the routes are being stored in a single level array
9
9
  module Router
@@ -9,7 +9,7 @@ module Karafka
9
9
  extend Helpers::ConfigRetriever
10
10
 
11
11
  attr_reader :id, :consumer_group
12
- attr_accessor :controller
12
+ attr_accessor :consumer
13
13
 
14
14
  # @param [String, Symbol] name of a topic on which we want to listen
15
15
  # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
@@ -29,20 +29,14 @@ module Karafka
29
29
  # example for Sidekiq
30
30
  def build
31
31
  Karafka::AttributesMap.topic.each { |attr| send(attr) }
32
- controller&.topic = self
32
+ consumer&.topic = self
33
33
  self
34
34
  end
35
35
 
36
36
  # @return [Class, nil] Class (not an instance) of a responder that should respond from
37
- # controller back to Kafka (usefull for piping dataflows)
37
+ # consumer back to Kafka (usefull for piping dataflows)
38
38
  def responder
39
- @responder ||= Karafka::Responders::Builder.new(controller).build
40
- end
41
-
42
- # @return [Class] Parser class (not instance) that we want to use to unparse Kafka messages
43
- # @note If not provided - will use Json as default
44
- def parser
45
- @parser ||= Karafka::Parsers::Json
39
+ @responder ||= Karafka::Responders::Builder.new(consumer).build
46
40
  end
47
41
 
48
42
  Karafka::AttributesMap.topic.each do |attribute|
@@ -58,7 +52,7 @@ module Karafka
58
52
 
59
53
  Hash[map].merge!(
60
54
  id: id,
61
- controller: controller
55
+ consumer: consumer
62
56
  )
63
57
  end
64
58
  end
@@ -13,8 +13,11 @@ module Karafka
13
13
  # so we validate all of that once all the routes are defined and ready
14
14
  Config = Dry::Validation.Schema do
15
15
  required(:client_id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
16
+ required(:shutdown_timeout) { none? | (int? & gteq?(0)) }
16
17
  required(:consumer_mapper)
17
18
  required(:topic_mapper)
19
+ required(:params_base_class).filled
20
+
18
21
  optional(:backend).filled
19
22
  end
20
23
  end
@@ -9,6 +9,9 @@ module Karafka
9
9
  # but someetimes loads things twice
10
10
  URI_SCHEMES ||= %w[kafka kafka+ssl].freeze
11
11
 
12
+ # Available sasl scram mechanism of authentication (plus nil)
13
+ SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
14
+
12
15
  configure do
13
16
  config.messages_file = File.join(
14
17
  Karafka.gem_root, 'config', 'errors.yml'
@@ -21,7 +24,7 @@ module Karafka
21
24
  uri = URI.parse(uri)
22
25
  URI_SCHEMES.include?(uri.scheme) && uri.port
23
26
  rescue URI::InvalidURIError
24
- return false
27
+ false
25
28
  end
26
29
  end
27
30
 
@@ -36,6 +39,7 @@ module Karafka
36
39
  required(:connect_timeout).filled { (int? | float?) & gt?(0) }
37
40
  required(:socket_timeout).filled { (int? | float?) & gt?(0) }
38
41
  required(:min_bytes).filled(:int?, gt?: 0)
42
+ required(:max_bytes).filled(:int?, gt?: 0)
39
43
  required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
40
44
  required(:batch_fetching).filled(:bool?)
41
45
  required(:topics).filled { each { schema(ConsumerGroupTopic) } }
@@ -52,14 +56,22 @@ module Karafka
52
56
  ssl_ca_cert_file_path
53
57
  ssl_client_cert
54
58
  ssl_client_cert_key
59
+ sasl_gssapi_principal
60
+ sasl_gssapi_keytab
55
61
  sasl_plain_authzid
56
62
  sasl_plain_username
57
63
  sasl_plain_password
58
- sasl_gssapi_principal
59
- sasl_gssapi_keytab
64
+ sasl_scram_username
65
+ sasl_scram_password
60
66
  ].each do |encryption_attribute|
61
67
  optional(encryption_attribute).maybe(:str?)
62
68
  end
69
+
70
+ optional(:ssl_ca_certs_from_system).maybe(:bool?)
71
+
72
+ # It's not with other encryptions as it has some more rules
73
+ optional(:sasl_scram_mechanism)
74
+ .maybe(:str?, included_in?: Karafka::Schemas::SASL_SCRAM_MECHANISMS)
63
75
  end
64
76
  end
65
77
  end
@@ -7,7 +7,7 @@ module Karafka
7
7
  required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
8
  required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
9
9
  required(:backend).filled(included_in?: %i[inline sidekiq])
10
- required(:controller).filled
10
+ required(:consumer).filled
11
11
  required(:parser).filled
12
12
  required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
13
13
  required(:start_from_beginning).filled(:bool?)
@@ -3,6 +3,13 @@
3
3
  module Karafka
4
4
  # Karafka consuming server class
5
5
  class Server
6
+ @consumer_threads = Concurrent::Array.new
7
+
8
+ # How long should we sleep between checks on shutting down consumers
9
+ SUPERVISION_SLEEP = 1
10
+ # What system exit code should we use when we terminated forcefully
11
+ FORCEFUL_EXIT_CODE = 2
12
+
6
13
  class << self
7
14
  # Set of consuming threads. Each consumer thread contains a single consumer
8
15
  attr_accessor :consumer_threads
@@ -12,7 +19,6 @@ module Karafka
12
19
 
13
20
  # Method which runs app
14
21
  def run
15
- @consumer_threads = Concurrent::Array.new
16
22
  bind_on_sigint
17
23
  bind_on_sigquit
18
24
  bind_on_sigterm
@@ -35,17 +41,17 @@ module Karafka
35
41
 
36
42
  # What should happen when we decide to quit with sigint
37
43
  def bind_on_sigint
38
- process.on_sigint { Karafka::App.stop! }
44
+ process.on_sigint { stop_supervised }
39
45
  end
40
46
 
41
47
  # What should happen when we decide to quit with sigquit
42
48
  def bind_on_sigquit
43
- process.on_sigquit { Karafka::App.stop! }
49
+ process.on_sigquit { stop_supervised }
44
50
  end
45
51
 
46
52
  # What should happen when we decide to quit with sigterm
47
53
  def bind_on_sigterm
48
- process.on_sigterm { Karafka::App.stop! }
54
+ process.on_sigterm { stop_supervised }
49
55
  end
50
56
 
51
57
  # Starts Karafka with a supervision
@@ -54,9 +60,42 @@ module Karafka
54
60
  def start_supervised
55
61
  process.supervise do
56
62
  Karafka::App.run!
57
- Karafka::Fetcher.new.fetch_loop
63
+ Karafka::Fetcher.call
58
64
  end
59
65
  end
66
+
67
+ # Stops Karafka with a supervision (as long as there is a shutdown timeout)
68
+ # If consumers won't stop in a given timeframe, it will force them to exit
69
+ def stop_supervised
70
+ # Because this is called in the trap context, there is a chance that instrumentation
71
+ # listeners contain things that aren't allowed from within a trap context.
72
+ # To bypass that (instead of telling users not to do things they need to)
73
+ # we spin up a thread to instrument server.stop and server.stop.error and wait until
74
+ # they're finished
75
+ Thread.new { Karafka.monitor.instrument('server.stop', {}) }.join
76
+
77
+ Karafka::App.stop!
78
+ # If there is no shutdown timeout, we don't exit and wait until all the consumers
79
+ # had done their work
80
+ return unless Karafka::App.config.shutdown_timeout
81
+
82
+ # If there is a timeout, we check every 1 second (for the timeout period) if all
83
+ # the threads finished their work and if so, we can just return and normal
84
+ # shutdown process will take place
85
+ Karafka::App.config.shutdown_timeout.to_i.times do
86
+ return if consumer_threads.count(&:alive?).zero?
87
+ sleep SUPERVISION_SLEEP
88
+ end
89
+
90
+ raise Errors::ForcefulShutdown
91
+ rescue Errors::ForcefulShutdown => error
92
+ Thread.new { Karafka.monitor.instrument('server.stop.error', error: error) }.join
93
+ # We're done waiting, lets kill them!
94
+ consumer_threads.each(&:terminate)
95
+
96
+ # exit is not within the instrumentation as it would not trigger due to exit
97
+ Kernel.exit FORCEFUL_EXIT_CODE
98
+ end
60
99
  end
61
100
  end
62
101
  end
@@ -13,6 +13,7 @@ module Karafka
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
15
  extend Dry::Configurable
16
+ extend Callbacks::Config
16
17
 
17
18
  # Available settings
18
19
  # option client_id [String] kafka client_id - used to provide
@@ -21,9 +22,9 @@ module Karafka
21
22
  # What backend do we want to use to process messages
22
23
  setting :backend, :inline
23
24
  # option logger [Instance] logger that we want to use
24
- setting :logger, -> { ::Karafka::Logger.instance }
25
+ setting :logger, -> { ::Karafka::Instrumentation::Logger.instance }
25
26
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
26
- setting :monitor, -> { ::Karafka::Monitor.instance }
27
+ setting :monitor, -> { ::Karafka::Instrumentation::Monitor.instance }
27
28
  # Mapper used to remap consumer groups ids, so in case users migrate from other tools
28
29
  # or they need to maintain their own internal consumer group naming conventions, they
29
30
  # can easily do it, replacing the default client_id + consumer name pattern concept
@@ -34,6 +35,8 @@ module Karafka
34
35
  # - #incoming - for remapping from the incoming message to our internal format
35
36
  # - #outgoing - for remapping from internal topic name into outgoing message
36
37
  setting :topic_mapper, -> { Routing::TopicMapper }
38
+ # Default parser for parsing and unparsing incoming and outgoing data
39
+ setting :parser, -> { Karafka::Parsers::Json }
37
40
  # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
38
41
  # @note Fetching does not equal consuming, see batch_consuming description for details
39
42
  setting :batch_fetching, true
@@ -41,12 +44,24 @@ module Karafka
41
44
  # #params_batch will contain params received from Kafka (may be more than 1) so we can
42
45
  # process them in batches
43
46
  setting :batch_consuming, false
44
- # Should we operate in a single controller instance across multiple batches of messages,
45
- # from the same partition or should we build a new instance for each incoming batch.
46
- # Disabling that can be useful when you want to build a new controller instance for each
47
- # incoming batch. It's disabled by default, not to create more objects that needed on
48
- # each batch
47
+ # Should we operate in a single consumer instance across multiple batches of messages,
48
+ # from the same partition or should we build a new one for each incoming batch.
49
+ # Disabling that can be useful when you want to create a new consumer instance for each
50
+ # incoming batch. It's disabled by default, not to create more objects that needed
51
+ # on each batch
49
52
  setting :persistent, true
53
+ # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
+ # longer wait for the consumers to stop gracefully but instead we force
55
+ # terminate everything.
56
+ # @note Keep in mind, that if your business logic
57
+ # @note If set to nil, it won't forcefully shutdown the process at all.
58
+ setting :shutdown_timeout, 60
59
+ # option params_base_class [Class] base class for params class initialization
60
+ # This can be either a Hash or a HashWithIndifferentAccess depending on your
61
+ # requirements. Note, that by using HashWithIndifferentAccess, you remove some of the
62
+ # performance in favor of convenience. This can be useful especially if you already use
63
+ # it with Rails, etc
64
+ setting :params_base_class, Hash
50
65
 
51
66
  # option kafka [Hash] - optional - kafka configuration options
52
67
  setting :kafka do
@@ -79,6 +94,9 @@ module Karafka
79
94
  # returning messages from the server; if `max_wait_time` is reached, this
80
95
  # is ignored.
81
96
  setting :min_bytes, 1
97
+ # option max_bytes [Integer] the maximum number of bytes to read before returning messages
98
+ # from each broker.
99
+ setting :max_bytes, 10_485_760
82
100
  # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
83
101
  # wait before returning data from a single message fetch. By setting this high you also
84
102
  # increase the fetching throughput - and by setting it low you set a bound on latency.
@@ -107,24 +125,33 @@ module Karafka
107
125
  setting :socket_timeout, 30
108
126
 
109
127
  # SSL authentication related settings
110
- # option ca_cert [String] SSL CA certificate
128
+ # option ca_cert [String, nil] SSL CA certificate
111
129
  setting :ssl_ca_cert, nil
112
- # option ssl_ca_cert_file_path [String] SSL CA certificate file path
130
+ # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
113
131
  setting :ssl_ca_cert_file_path, nil
114
- # option ssl_client_cert [String] SSL client certificate
132
+ # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
133
+ # certificate store
134
+ setting :ssl_ca_certs_from_system, false
135
+ # option ssl_client_cert [String, nil] SSL client certificate
115
136
  setting :ssl_client_cert, nil
116
- # option ssl_client_cert_key [String] SSL client certificate password
137
+ # option ssl_client_cert_key [String, nil] SSL client certificate password
117
138
  setting :ssl_client_cert_key, nil
118
- # option sasl_gssapi_principal [String] sasl principal
139
+ # option sasl_gssapi_principal [String, nil] sasl principal
119
140
  setting :sasl_gssapi_principal, nil
120
- # option sasl_gssapi_keytab [String] sasl keytab
141
+ # option sasl_gssapi_keytab [String, nil] sasl keytab
121
142
  setting :sasl_gssapi_keytab, nil
122
143
  # option sasl_plain_authzid [String] The authorization identity to use
123
144
  setting :sasl_plain_authzid, ''
124
- # option sasl_plain_username [String] The username used to authenticate
145
+ # option sasl_plain_username [String, nil] The username used to authenticate
125
146
  setting :sasl_plain_username, nil
126
- # option sasl_plain_password [String] The password used to authenticate
147
+ # option sasl_plain_password [String, nil] The password used to authenticate
127
148
  setting :sasl_plain_password, nil
149
+ # option sasl_scram_username [String, nil] The username used to authenticate
150
+ setting :sasl_scram_username, nil
151
+ # option sasl_scram_password [String, nil] The password used to authenticate
152
+ setting :sasl_scram_password, nil
153
+ # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
154
+ setting :sasl_scram_mechanism, nil
128
155
  end
129
156
 
130
157
  class << self
@@ -132,18 +159,17 @@ module Karafka
132
159
  # @yield Runs a block of code providing a config singleton instance to it
133
160
  # @yieldparam [Karafka::Setup::Config] Karafka config instance
134
161
  def setup
135
- configure do |config|
136
- yield(config)
137
- end
162
+ configure { |config| yield(config) }
138
163
  end
139
164
 
140
165
  # Everything that should be initialized after the setup
141
166
  # Components are in karafka/config directory and are all loaded one by one
142
167
  # If you want to configure a next component, please add a proper file to config dir
143
168
  def setup_components
144
- Configurators::Base.descendants.each do |klass|
145
- klass.new(config).setup
146
- end
169
+ [
170
+ Configurators::Params,
171
+ Configurators::WaterDrop
172
+ ].each { |klass| klass.setup(config) }
147
173
  end
148
174
 
149
175
  # Validate config based on ConfigurationSchema