karafka 1.1.1 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. checksums.yaml +5 -5
  2. data/.ruby-version +1 -1
  3. data/.travis.yml +1 -0
  4. data/CHANGELOG.md +40 -0
  5. data/Gemfile +1 -2
  6. data/Gemfile.lock +41 -29
  7. data/README.md +7 -4
  8. data/karafka.gemspec +6 -4
  9. data/lib/karafka.rb +17 -7
  10. data/lib/karafka/app.rb +8 -15
  11. data/lib/karafka/attributes_map.rb +1 -1
  12. data/lib/karafka/backends/inline.rb +1 -2
  13. data/lib/karafka/{base_controller.rb → base_consumer.rb} +19 -11
  14. data/lib/karafka/base_responder.rb +33 -14
  15. data/lib/karafka/callbacks.rb +30 -0
  16. data/lib/karafka/callbacks/config.rb +22 -0
  17. data/lib/karafka/callbacks/dsl.rb +16 -0
  18. data/lib/karafka/cli/install.rb +2 -3
  19. data/lib/karafka/cli/server.rb +0 -1
  20. data/lib/karafka/connection/{consumer.rb → client.rb} +32 -36
  21. data/lib/karafka/connection/config_adapter.rb +14 -6
  22. data/lib/karafka/connection/delegator.rb +46 -0
  23. data/lib/karafka/connection/listener.rb +22 -13
  24. data/lib/karafka/{controllers → consumers}/callbacks.rb +9 -9
  25. data/lib/karafka/consumers/includer.rb +51 -0
  26. data/lib/karafka/consumers/responders.rb +24 -0
  27. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  28. data/lib/karafka/errors.rb +10 -3
  29. data/lib/karafka/fetcher.rb +30 -34
  30. data/lib/karafka/helpers/class_matcher.rb +8 -8
  31. data/lib/karafka/helpers/config_retriever.rb +2 -2
  32. data/lib/karafka/instrumentation/listener.rb +112 -0
  33. data/lib/karafka/instrumentation/logger.rb +55 -0
  34. data/lib/karafka/instrumentation/monitor.rb +64 -0
  35. data/lib/karafka/loader.rb +0 -1
  36. data/lib/karafka/params/{params.rb → dsl.rb} +71 -43
  37. data/lib/karafka/params/params_batch.rb +7 -2
  38. data/lib/karafka/patches/dry_configurable.rb +6 -2
  39. data/lib/karafka/patches/ruby_kafka.rb +10 -10
  40. data/lib/karafka/persistence/client.rb +25 -0
  41. data/lib/karafka/persistence/consumer.rb +27 -14
  42. data/lib/karafka/persistence/topic.rb +29 -0
  43. data/lib/karafka/process.rb +5 -4
  44. data/lib/karafka/responders/builder.rb +15 -14
  45. data/lib/karafka/routing/builder.rb +1 -1
  46. data/lib/karafka/routing/consumer_mapper.rb +3 -2
  47. data/lib/karafka/routing/router.rb +1 -1
  48. data/lib/karafka/routing/topic.rb +5 -5
  49. data/lib/karafka/schemas/config.rb +3 -0
  50. data/lib/karafka/schemas/consumer_group.rb +15 -3
  51. data/lib/karafka/schemas/consumer_group_topic.rb +1 -1
  52. data/lib/karafka/server.rb +37 -5
  53. data/lib/karafka/setup/config.rb +45 -21
  54. data/lib/karafka/setup/configurators/base.rb +6 -12
  55. data/lib/karafka/setup/configurators/params.rb +25 -0
  56. data/lib/karafka/setup/configurators/water_drop.rb +6 -3
  57. data/lib/karafka/setup/dsl.rb +22 -0
  58. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
  59. data/lib/karafka/templates/karafka.rb.example +17 -4
  60. data/lib/karafka/version.rb +1 -1
  61. metadata +58 -23
  62. data/lib/karafka/connection/processor.rb +0 -61
  63. data/lib/karafka/controllers/includer.rb +0 -51
  64. data/lib/karafka/controllers/responders.rb +0 -19
  65. data/lib/karafka/logger.rb +0 -53
  66. data/lib/karafka/monitor.rb +0 -98
  67. data/lib/karafka/persistence/controller.rb +0 -38
@@ -8,7 +8,9 @@ module Karafka
8
8
 
9
9
  # Signal types that we handle
10
10
  HANDLED_SIGNALS = %i[
11
- SIGINT SIGQUIT SIGTERM
11
+ SIGINT
12
+ SIGQUIT
13
+ SIGTERM
12
14
  ].freeze
13
15
 
14
16
  HANDLED_SIGNALS.each do |signal|
@@ -27,8 +29,7 @@ module Karafka
27
29
 
28
30
  # Creates an instance of process and creates empty hash for callbacks
29
31
  def initialize
30
- @callbacks = {}
31
- HANDLED_SIGNALS.each { |signal| @callbacks[signal] = [] }
32
+ @callbacks = Hash.new { |hsh, key| hsh[key] = [] }
32
33
  end
33
34
 
34
35
  # Method catches all HANDLED_SIGNALS and performs appropriate callbacks (if defined)
@@ -56,7 +57,7 @@ module Karafka
56
57
  # we have to spin up a new thread to do this
57
58
  def notice_signal(signal)
58
59
  Thread.new do
59
- Karafka.monitor.notice(self.class, signal: signal)
60
+ Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)
60
61
  end
61
62
  end
62
63
  end
@@ -3,30 +3,31 @@
3
3
  module Karafka
4
4
  # Responders namespace encapsulates all the internal responder implementation parts
5
5
  module Responders
6
- # Responders builder is used to find (based on the controller class name) a responder that
7
- # match the controller. This is used when user does not provide a responder inside routing
8
- # but he still names responder with the same convention (and namespaces) as controller
6
+ # Responders builder is used to finding (based on the consumer class name) a responder
7
+ # that match the consumer. We use it when user does not provide a responder inside routing,
8
+ # but he still names responder with the same convention (and namespaces) as consumer
9
+ #
9
10
  # @example Matching responder exists
10
- # Karafka::Responder::Builder(NewEventsController).build #=> NewEventsResponder
11
+ # Karafka::Responder::Builder(NewEventsConsumer).build #=> NewEventsResponder
11
12
  # @example Matching responder does not exist
12
- # Karafka::Responder::Builder(NewBuildsController).build #=> nil
13
+ # Karafka::Responder::Builder(NewBuildsConsumer).build #=> nil
13
14
  class Builder
14
- # @param controller_class [Karafka::BaseController, nil] descendant of
15
- # Karafka::BaseController
16
- # @example Tries to find a responder that matches a given controller. If nothing found,
17
- # will return nil (nil is accepted, because it means that a given controller don't
15
+ # @param consumer_class [Karafka::BaseConsumer, nil] descendant of
16
+ # Karafka::BaseConsumer
17
+ # @example Tries to find a responder that matches a given consumer. If nothing found,
18
+ # will return nil (nil is accepted, because it means that a given consumer don't
18
19
  # pipe stuff further on)
19
- def initialize(controller_class)
20
- @controller_class = controller_class
20
+ def initialize(consumer_class)
21
+ @consumer_class = consumer_class
21
22
  end
22
23
 
23
- # Tries to figure out a responder based on a controller class name
24
+ # Tries to figure out a responder based on a consumer class name
24
25
  # @return [Class] Responder class (not an instance)
25
26
  # @return [nil] or nil if there's no matching responding class
26
27
  def build
27
28
  Helpers::ClassMatcher.new(
28
- @controller_class,
29
- from: 'Controller',
29
+ @consumer_class,
30
+ from: 'Consumer',
30
31
  to: 'Responder'
31
32
  ).match
32
33
  end
@@ -6,7 +6,7 @@ module Karafka
6
6
  # @example Build a simple (most common) route
7
7
  # consumers do
8
8
  # topic :new_videos do
9
- # controller NewVideosController
9
+ # consumer NewVideosConsumer
10
10
  # end
11
11
  # end
12
12
  class Builder < Array
@@ -17,7 +17,7 @@ module Karafka
17
17
  # module MyMapper
18
18
  # def self.call(raw_consumer_group_name)
19
19
  # [
20
- # Karafka::App.config.client_id.to_s.underscope,
20
+ # Dry::Inflector.new.underscore(Karafka::App.config.client_id.to_s),
21
21
  # raw_consumer_group_name
22
22
  # ].join('_').gsub('_', '.')
23
23
  # end
@@ -26,7 +26,8 @@ module Karafka
26
26
  # @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
27
27
  # @return [String] remapped final consumer group name
28
28
  def self.call(raw_consumer_group_name)
29
- "#{Karafka::App.config.client_id.to_s.underscore}_#{raw_consumer_group_name}"
29
+ client_name = Dry::Inflector.new.underscore(Karafka::App.config.client_id.to_s)
30
+ "#{client_name}_#{raw_consumer_group_name}"
30
31
  end
31
32
  end
32
33
  end
@@ -3,7 +3,7 @@
3
3
  module Karafka
4
4
  # Namespace for all elements related to requests routing
5
5
  module Routing
6
- # Karafka framework Router for routing incoming messages to proper controllers
6
+ # Karafka framework Router for routing incoming messages to proper consumers
7
7
  # @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
8
8
  # structure so all the routes are being stored in a single level array
9
9
  module Router
@@ -9,7 +9,7 @@ module Karafka
9
9
  extend Helpers::ConfigRetriever
10
10
 
11
11
  attr_reader :id, :consumer_group
12
- attr_accessor :controller
12
+ attr_accessor :consumer
13
13
 
14
14
  # @param [String, Symbol] name of a topic on which we want to listen
15
15
  # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
@@ -29,14 +29,14 @@ module Karafka
29
29
  # example for Sidekiq
30
30
  def build
31
31
  Karafka::AttributesMap.topic.each { |attr| send(attr) }
32
- controller&.topic = self
32
+ consumer&.topic = self
33
33
  self
34
34
  end
35
35
 
36
36
  # @return [Class, nil] Class (not an instance) of a responder that should respond from
37
- # controller back to Kafka (usefull for piping dataflows)
37
+ # consumer back to Kafka (usefull for piping dataflows)
38
38
  def responder
39
- @responder ||= Karafka::Responders::Builder.new(controller).build
39
+ @responder ||= Karafka::Responders::Builder.new(consumer).build
40
40
  end
41
41
 
42
42
  Karafka::AttributesMap.topic.each do |attribute|
@@ -52,7 +52,7 @@ module Karafka
52
52
 
53
53
  Hash[map].merge!(
54
54
  id: id,
55
- controller: controller
55
+ consumer: consumer
56
56
  )
57
57
  end
58
58
  end
@@ -13,8 +13,11 @@ module Karafka
13
13
  # so we validate all of that once all the routes are defined and ready
14
14
  Config = Dry::Validation.Schema do
15
15
  required(:client_id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
16
+ required(:shutdown_timeout) { none? | (int? & gteq?(0)) }
16
17
  required(:consumer_mapper)
17
18
  required(:topic_mapper)
19
+ required(:params_base_class).filled
20
+
18
21
  optional(:backend).filled
19
22
  end
20
23
  end
@@ -9,6 +9,9 @@ module Karafka
9
9
  # but someetimes loads things twice
10
10
  URI_SCHEMES ||= %w[kafka kafka+ssl].freeze
11
11
 
12
+ # Available sasl scram mechanism of authentication (plus nil)
13
+ SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
14
+
12
15
  configure do
13
16
  config.messages_file = File.join(
14
17
  Karafka.gem_root, 'config', 'errors.yml'
@@ -21,7 +24,7 @@ module Karafka
21
24
  uri = URI.parse(uri)
22
25
  URI_SCHEMES.include?(uri.scheme) && uri.port
23
26
  rescue URI::InvalidURIError
24
- return false
27
+ false
25
28
  end
26
29
  end
27
30
 
@@ -36,6 +39,7 @@ module Karafka
36
39
  required(:connect_timeout).filled { (int? | float?) & gt?(0) }
37
40
  required(:socket_timeout).filled { (int? | float?) & gt?(0) }
38
41
  required(:min_bytes).filled(:int?, gt?: 0)
42
+ required(:max_bytes).filled(:int?, gt?: 0)
39
43
  required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
40
44
  required(:batch_fetching).filled(:bool?)
41
45
  required(:topics).filled { each { schema(ConsumerGroupTopic) } }
@@ -52,14 +56,22 @@ module Karafka
52
56
  ssl_ca_cert_file_path
53
57
  ssl_client_cert
54
58
  ssl_client_cert_key
59
+ sasl_gssapi_principal
60
+ sasl_gssapi_keytab
55
61
  sasl_plain_authzid
56
62
  sasl_plain_username
57
63
  sasl_plain_password
58
- sasl_gssapi_principal
59
- sasl_gssapi_keytab
64
+ sasl_scram_username
65
+ sasl_scram_password
60
66
  ].each do |encryption_attribute|
61
67
  optional(encryption_attribute).maybe(:str?)
62
68
  end
69
+
70
+ optional(:ssl_ca_certs_from_system).maybe(:bool?)
71
+
72
+ # It's not with other encryptions as it has some more rules
73
+ optional(:sasl_scram_mechanism)
74
+ .maybe(:str?, included_in?: Karafka::Schemas::SASL_SCRAM_MECHANISMS)
63
75
  end
64
76
  end
65
77
  end
@@ -7,7 +7,7 @@ module Karafka
7
7
  required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
8
  required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
9
9
  required(:backend).filled(included_in?: %i[inline sidekiq])
10
- required(:controller).filled
10
+ required(:consumer).filled
11
11
  required(:parser).filled
12
12
  required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
13
13
  required(:start_from_beginning).filled(:bool?)
@@ -3,6 +3,13 @@
3
3
  module Karafka
4
4
  # Karafka consuming server class
5
5
  class Server
6
+ @consumer_threads = Concurrent::Array.new
7
+
8
+ # How long should we sleep between checks on shutting down consumers
9
+ SUPERVISION_SLEEP = 1
10
+ # What system exit code should we use when we terminated forcefully
11
+ FORCEFUL_EXIT_CODE = 2
12
+
6
13
  class << self
7
14
  # Set of consuming threads. Each consumer thread contains a single consumer
8
15
  attr_accessor :consumer_threads
@@ -12,7 +19,6 @@ module Karafka
12
19
 
13
20
  # Method which runs app
14
21
  def run
15
- @consumer_threads = Concurrent::Array.new
16
22
  bind_on_sigint
17
23
  bind_on_sigquit
18
24
  bind_on_sigterm
@@ -35,17 +41,17 @@ module Karafka
35
41
 
36
42
  # What should happen when we decide to quit with sigint
37
43
  def bind_on_sigint
38
- process.on_sigint { Karafka::App.stop! }
44
+ process.on_sigint { stop_supervised }
39
45
  end
40
46
 
41
47
  # What should happen when we decide to quit with sigquit
42
48
  def bind_on_sigquit
43
- process.on_sigquit { Karafka::App.stop! }
49
+ process.on_sigquit { stop_supervised }
44
50
  end
45
51
 
46
52
  # What should happen when we decide to quit with sigterm
47
53
  def bind_on_sigterm
48
- process.on_sigterm { Karafka::App.stop! }
54
+ process.on_sigterm { stop_supervised }
49
55
  end
50
56
 
51
57
  # Starts Karafka with a supervision
@@ -54,9 +60,35 @@ module Karafka
54
60
  def start_supervised
55
61
  process.supervise do
56
62
  Karafka::App.run!
57
- Karafka::Fetcher.new.fetch_loop
63
+ Karafka::Fetcher.call
58
64
  end
59
65
  end
66
+
67
+ # Stops Karafka with a supervision (as long as there is a shutdown timeout)
68
+ # If consumers won't stop in a given timeframe, it will force them to exit
69
+ def stop_supervised
70
+ Karafka.monitor.instrument('server.stop', {})
71
+
72
+ Karafka::App.stop!
73
+ # If there is no shutdown timeout, we don't exit and wait until all the consumers
74
+ # had done their work
75
+ return unless Karafka::App.config.shutdown_timeout
76
+
77
+ # If there is a timeout, we check every 1 second (for the timeout period) if all
78
+ # the threads finished their work and if so, we can just return and normal
79
+ # shutdown process will take place
80
+ Karafka::App.config.shutdown_timeout.to_i.times do
81
+ return if consumer_threads.count(&:alive?).zero?
82
+ sleep SUPERVISION_SLEEP
83
+ end
84
+
85
+ Karafka.monitor.instrument('server.stop.error', {})
86
+ # We're done waiting, lets kill them!
87
+ consumer_threads.each(&:terminate)
88
+
89
+ # exit is not within the instrumentation as it would not trigger due to exit
90
+ Kernel.exit FORCEFUL_EXIT_CODE
91
+ end
60
92
  end
61
93
  end
62
94
  end
@@ -13,6 +13,7 @@ module Karafka
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
15
  extend Dry::Configurable
16
+ extend Callbacks::Config
16
17
 
17
18
  # Available settings
18
19
  # option client_id [String] kafka client_id - used to provide
@@ -21,9 +22,9 @@ module Karafka
21
22
  # What backend do we want to use to process messages
22
23
  setting :backend, :inline
23
24
  # option logger [Instance] logger that we want to use
24
- setting :logger, -> { ::Karafka::Logger.instance }
25
+ setting :logger, -> { ::Karafka::Instrumentation::Logger.instance }
25
26
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
26
- setting :monitor, -> { ::Karafka::Monitor.instance }
27
+ setting :monitor, -> { ::Karafka::Instrumentation::Monitor.instance }
27
28
  # Mapper used to remap consumer groups ids, so in case users migrate from other tools
28
29
  # or they need to maintain their own internal consumer group naming conventions, they
29
30
  # can easily do it, replacing the default client_id + consumer name pattern concept
@@ -43,12 +44,24 @@ module Karafka
43
44
  # #params_batch will contain params received from Kafka (may be more than 1) so we can
44
45
  # process them in batches
45
46
  setting :batch_consuming, false
46
- # Should we operate in a single controller instance across multiple batches of messages,
47
- # from the same partition or should we build a new instance for each incoming batch.
48
- # Disabling that can be useful when you want to build a new controller instance for each
49
- # incoming batch. It's disabled by default, not to create more objects that needed on
50
- # each batch
47
+ # Should we operate in a single consumer instance across multiple batches of messages,
48
+ # from the same partition or should we build a new one for each incoming batch.
49
+ # Disabling that can be useful when you want to create a new consumer instance for each
50
+ # incoming batch. It's disabled by default, not to create more objects that needed
51
+ # on each batch
51
52
  setting :persistent, true
53
+ # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
+ # longer wait for the consumers to stop gracefully but instead we force
55
+ # terminate everything.
56
+ # @note Keep in mind, that if your business logic
57
+ # @note If set to nil, it won't forcefully shutdown the process at all.
58
+ setting :shutdown_timeout, 60
59
+ # option params_base_class [Class] base class for params class initialization
60
+ # This can be either a Hash or a HashWithIndifferentAccess depending on your
61
+ # requirements. Note, that by using HashWithIndifferentAccess, you remove some of the
62
+ # performance in favor of convenience. This can be useful especially if you already use
63
+ # it with Rails, etc
64
+ setting :params_base_class, Hash
52
65
 
53
66
  # option kafka [Hash] - optional - kafka configuration options
54
67
  setting :kafka do
@@ -81,6 +94,9 @@ module Karafka
81
94
  # returning messages from the server; if `max_wait_time` is reached, this
82
95
  # is ignored.
83
96
  setting :min_bytes, 1
97
+ # option max_bytes [Integer] the maximum number of bytes to read before returning messages
98
+ # from each broker.
99
+ setting :max_bytes, 10_485_760
84
100
  # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
85
101
  # wait before returning data from a single message fetch. By setting this high you also
86
102
  # increase the fetching throughput - and by setting it low you set a bound on latency.
@@ -109,24 +125,33 @@ module Karafka
109
125
  setting :socket_timeout, 30
110
126
 
111
127
  # SSL authentication related settings
112
- # option ca_cert [String] SSL CA certificate
128
+ # option ca_cert [String, nil] SSL CA certificate
113
129
  setting :ssl_ca_cert, nil
114
- # option ssl_ca_cert_file_path [String] SSL CA certificate file path
130
+ # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
115
131
  setting :ssl_ca_cert_file_path, nil
116
- # option ssl_client_cert [String] SSL client certificate
132
+ # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
133
+ # certificate store
134
+ setting :ssl_ca_certs_from_system, false
135
+ # option ssl_client_cert [String, nil] SSL client certificate
117
136
  setting :ssl_client_cert, nil
118
- # option ssl_client_cert_key [String] SSL client certificate password
137
+ # option ssl_client_cert_key [String, nil] SSL client certificate password
119
138
  setting :ssl_client_cert_key, nil
120
- # option sasl_gssapi_principal [String] sasl principal
139
+ # option sasl_gssapi_principal [String, nil] sasl principal
121
140
  setting :sasl_gssapi_principal, nil
122
- # option sasl_gssapi_keytab [String] sasl keytab
141
+ # option sasl_gssapi_keytab [String, nil] sasl keytab
123
142
  setting :sasl_gssapi_keytab, nil
124
143
  # option sasl_plain_authzid [String] The authorization identity to use
125
144
  setting :sasl_plain_authzid, ''
126
- # option sasl_plain_username [String] The username used to authenticate
145
+ # option sasl_plain_username [String, nil] The username used to authenticate
127
146
  setting :sasl_plain_username, nil
128
- # option sasl_plain_password [String] The password used to authenticate
147
+ # option sasl_plain_password [String, nil] The password used to authenticate
129
148
  setting :sasl_plain_password, nil
149
+ # option sasl_scram_username [String, nil] The username used to authenticate
150
+ setting :sasl_scram_username, nil
151
+ # option sasl_scram_password [String, nil] The password used to authenticate
152
+ setting :sasl_scram_password, nil
153
+ # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
154
+ setting :sasl_scram_mechanism, nil
130
155
  end
131
156
 
132
157
  class << self
@@ -134,18 +159,17 @@ module Karafka
134
159
  # @yield Runs a block of code providing a config singleton instance to it
135
160
  # @yieldparam [Karafka::Setup::Config] Karafka config instance
136
161
  def setup
137
- configure do |config|
138
- yield(config)
139
- end
162
+ configure { |config| yield(config) }
140
163
  end
141
164
 
142
165
  # Everything that should be initialized after the setup
143
166
  # Components are in karafka/config directory and are all loaded one by one
144
167
  # If you want to configure a next component, please add a proper file to config dir
145
168
  def setup_components
146
- Configurators::Base.descendants.each do |klass|
147
- klass.new(config).setup
148
- end
169
+ [
170
+ Configurators::Params,
171
+ Configurators::WaterDrop
172
+ ].each { |klass| klass.setup(config) }
149
173
  end
150
174
 
151
175
  # Validate config based on ConfigurationSchema
@@ -3,10 +3,13 @@
3
3
  module Karafka
4
4
  module Setup
5
5
  # Configurators module is used to enclose all the external dependencies configurations
6
+ # upon which Karafka depents
6
7
  class Configurators
7
- # Karafka has come components that it relies on (like Sidekiq)
8
+ # Karafka has some components that it relies on (like Sidekiq)
8
9
  # We need to configure all of them only when the framework was set up.
9
10
  # Any class that descends from this one will be automatically invoked upon setup (after it)
11
+ # @note This should be used only for internal Karafka dependencies configuration
12
+ # End users configuration should go to the after_init block
10
13
  # @example Configure an Example class
11
14
  # class ExampleConfigurator < Base
12
15
  # def setup
@@ -15,18 +18,9 @@ module Karafka
15
18
  # end
16
19
  # end
17
20
  class Base
18
- extend ActiveSupport::DescendantsTracker
19
-
20
- attr_reader :config
21
-
22
- # @param config [Karafka::Config] config instance
23
- # @return [Karafka::Config::Base] configurator for a given component
24
- def initialize(config)
25
- @config = config
26
- end
27
-
21
+ # @param _config [Karafka::Config] config instance
28
22
  # This method needs to be implemented in a subclass
29
- def setup
23
+ def self.setup(_config)
30
24
  raise NotImplementedError
31
25
  end
32
26
  end