karafka 1.1.0 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (114) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/.travis.yml +35 -16
  12. data/CHANGELOG.md +151 -2
  13. data/CONTRIBUTING.md +6 -7
  14. data/Gemfile +3 -3
  15. data/Gemfile.lock +96 -70
  16. data/README.md +29 -23
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +25 -0
  19. data/config/errors.yml +38 -5
  20. data/karafka.gemspec +19 -10
  21. data/lib/karafka.rb +15 -12
  22. data/lib/karafka/app.rb +19 -18
  23. data/lib/karafka/attributes_map.rb +15 -14
  24. data/lib/karafka/backends/inline.rb +1 -2
  25. data/lib/karafka/base_consumer.rb +57 -0
  26. data/lib/karafka/base_responder.rb +72 -31
  27. data/lib/karafka/cli.rb +1 -1
  28. data/lib/karafka/cli/console.rb +11 -9
  29. data/lib/karafka/cli/flow.rb +0 -1
  30. data/lib/karafka/cli/info.rb +3 -1
  31. data/lib/karafka/cli/install.rb +29 -8
  32. data/lib/karafka/cli/server.rb +11 -7
  33. data/lib/karafka/code_reloader.rb +67 -0
  34. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +67 -24
  35. data/lib/karafka/connection/batch_delegator.rb +51 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +117 -0
  38. data/lib/karafka/connection/listener.rb +37 -17
  39. data/lib/karafka/connection/message_delegator.rb +36 -0
  40. data/lib/karafka/consumers/callbacks.rb +71 -0
  41. data/lib/karafka/consumers/includer.rb +63 -0
  42. data/lib/karafka/consumers/metadata.rb +10 -0
  43. data/lib/karafka/consumers/responders.rb +24 -0
  44. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  45. data/lib/karafka/contracts.rb +10 -0
  46. data/lib/karafka/contracts/config.rb +21 -0
  47. data/lib/karafka/contracts/consumer_group.rb +206 -0
  48. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  49. data/lib/karafka/contracts/responder_usage.rb +54 -0
  50. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  51. data/lib/karafka/errors.rb +23 -15
  52. data/lib/karafka/fetcher.rb +6 -12
  53. data/lib/karafka/helpers/class_matcher.rb +19 -9
  54. data/lib/karafka/helpers/config_retriever.rb +3 -3
  55. data/lib/karafka/helpers/inflector.rb +26 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  57. data/lib/karafka/instrumentation/logger.rb +57 -0
  58. data/lib/karafka/instrumentation/monitor.rb +70 -0
  59. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  60. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  61. data/lib/karafka/params/builders/metadata.rb +33 -0
  62. data/lib/karafka/params/builders/params.rb +36 -0
  63. data/lib/karafka/params/builders/params_batch.rb +25 -0
  64. data/lib/karafka/params/metadata.rb +35 -0
  65. data/lib/karafka/params/params.rb +35 -95
  66. data/lib/karafka/params/params_batch.rb +38 -18
  67. data/lib/karafka/patches/ruby_kafka.rb +25 -12
  68. data/lib/karafka/persistence/client.rb +29 -0
  69. data/lib/karafka/persistence/consumers.rb +45 -0
  70. data/lib/karafka/persistence/topics.rb +48 -0
  71. data/lib/karafka/process.rb +5 -8
  72. data/lib/karafka/responders/builder.rb +15 -14
  73. data/lib/karafka/responders/topic.rb +6 -8
  74. data/lib/karafka/routing/builder.rb +37 -9
  75. data/lib/karafka/routing/consumer_group.rb +1 -1
  76. data/lib/karafka/routing/consumer_mapper.rb +10 -9
  77. data/lib/karafka/routing/proxy.rb +10 -1
  78. data/lib/karafka/routing/router.rb +1 -1
  79. data/lib/karafka/routing/topic.rb +8 -12
  80. data/lib/karafka/routing/topic_mapper.rb +16 -18
  81. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  82. data/lib/karafka/serialization/json/serializer.rb +31 -0
  83. data/lib/karafka/server.rb +45 -24
  84. data/lib/karafka/setup/config.rb +95 -37
  85. data/lib/karafka/setup/configurators/water_drop.rb +12 -5
  86. data/lib/karafka/setup/dsl.rb +21 -0
  87. data/lib/karafka/status.rb +7 -3
  88. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.erb} +2 -2
  89. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  90. data/lib/karafka/templates/karafka.rb.erb +92 -0
  91. data/lib/karafka/version.rb +1 -1
  92. metadata +126 -57
  93. metadata.gz.sig +0 -0
  94. data/.github/ISSUE_TEMPLATE.md +0 -2
  95. data/lib/karafka/base_controller.rb +0 -60
  96. data/lib/karafka/connection/consumer.rb +0 -121
  97. data/lib/karafka/connection/processor.rb +0 -61
  98. data/lib/karafka/controllers/callbacks.rb +0 -54
  99. data/lib/karafka/controllers/includer.rb +0 -51
  100. data/lib/karafka/controllers/responders.rb +0 -19
  101. data/lib/karafka/loader.rb +0 -29
  102. data/lib/karafka/logger.rb +0 -53
  103. data/lib/karafka/monitor.rb +0 -98
  104. data/lib/karafka/parsers/json.rb +0 -38
  105. data/lib/karafka/patches/dry_configurable.rb +0 -31
  106. data/lib/karafka/persistence/consumer.rb +0 -25
  107. data/lib/karafka/persistence/controller.rb +0 -38
  108. data/lib/karafka/schemas/config.rb +0 -21
  109. data/lib/karafka/schemas/consumer_group.rb +0 -65
  110. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  111. data/lib/karafka/schemas/responder_usage.rb +0 -39
  112. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  113. data/lib/karafka/setup/configurators/base.rb +0 -35
  114. data/lib/karafka/templates/karafka.rb.example +0 -41
@@ -1,98 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Monitor is used to hookup external monitoring services to monitor how Karafka works
5
- # It provides a standarized API for checking incoming messages/enqueueing etc
6
- # By default it implements logging functionalities but can be replaced with any more
7
- # sophisticated logging/monitoring system like Errbit, Airbrake, NewRelic
8
- # @note This class acts as a singleton because we are only permitted to have single monitor
9
- # per running process (just as logger)
10
- # Keep in mind, that if you create your own monitor object, you will have to implement also
11
- # logging functionality (or just inherit, super and do whatever you want)
12
- class Monitor
13
- include Singleton
14
-
15
- # This method is executed in many important places in the code (during data flow), like
16
- # the moment before #consume_async, etc. For full list just grep for 'monitor.notice'
17
- # @param caller_class [Class] class of object that executed this call
18
- # @param options [Hash] hash with options that we passed to notice. It differs based
19
- # on of who and when is calling
20
- # @note We don't provide a name of method in which this was called, because we can take
21
- # it directly from Ruby (see #caller_label method of this class for more details)
22
- # @example Notice about consuming with controller_class
23
- # Karafka.monitor.notice(self.class, controller_class: controller_class)
24
- # @example Notice about terminating with a signal
25
- # Karafka.monitor.notice(self.class, signal: signal)
26
- def notice(caller_class, options = {})
27
- logger.info("#{caller_class}##{caller_label} with #{options}")
28
- end
29
-
30
- # This method is executed when we want to notify about an error that happened somewhere
31
- # in the system
32
- # @param caller_class [Class] class of object that executed this call
33
- # @param e [Exception] exception that was raised
34
- # @note We don't provide a name of method in which this was called, because we can take
35
- # it directly from Ruby (see #caller_label method of this class for more details)
36
- # @example Notify about error
37
- # Karafka.monitor.notice(self.class, e)
38
- def notice_error(caller_class, e)
39
- caller_exceptions_map.each do |level, types|
40
- next unless types.include?(caller_class)
41
-
42
- return logger.public_send(level, e)
43
- end
44
-
45
- logger.info(e)
46
- end
47
-
48
- private
49
-
50
- # @return [Hash] Hash containing informations on which level of notification should
51
- # we use for exceptions that happen in certain parts of Karafka
52
- # @note Keep in mind that any not handled here class should be logged with info
53
- # @note Those are not maps of exceptions classes but of classes that were callers of this
54
- # particular exception
55
- def caller_exceptions_map
56
- @caller_exceptions_map ||= {
57
- error: [
58
- Karafka::Connection::Consumer,
59
- Karafka::Connection::Listener,
60
- Karafka::Params::Params
61
- ],
62
- fatal: [
63
- Karafka::Fetcher
64
- ]
65
- }
66
- end
67
-
68
- # @return [String] label of method that invoked #notice or #notice_error
69
- # @example Check label of method that invoked #notice
70
- # caller_label #=> 'fetch'
71
- # @example Check label of method that invoked #notice in a block
72
- # caller_label #=> 'block in fetch'
73
- # @example Check label of method that invoked #notice_error
74
- # caller_label #=> 'rescue in target'
75
- def caller_label
76
- # We need to calculate ancestors because if someone inherits
77
- # from this class, caller chains is longer
78
- index = self.class.ancestors.index(Karafka::Monitor)
79
- # caller_locations has a differs in result whether it is a subclass of
80
- # Karafka::Monitor, the basic Karafka::Monitor itself or a super for a subclass.
81
- # So to cover all the cases we need to differentiate.
82
- # @see https://github.com/karafka/karafka/issues/128
83
- # @note It won't work if the monitor caller_label caller class is defined using
84
- # define method
85
- super_execution = caller_locations(1, 2)[0].label == caller_locations(1, 2)[1].label
86
-
87
- scope = super_execution ? 1 : nil
88
- scope ||= index.positive? ? 0 : 1
89
-
90
- caller_locations(index + 1, 2)[scope].label
91
- end
92
-
93
- # @return [Logger] logger instance
94
- def logger
95
- Karafka.logger
96
- end
97
- end
98
- end
@@ -1,38 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Module for all supported by default parsers for incoming/outgoing data
5
- module Parsers
6
- # Default Karafka Json parser for serializing and deserializing data
7
- class Json
8
- # @param content [String] content based on which we want to get our hash
9
- # @return [Hash] hash with parsed JSON data
10
- # @example
11
- # Json.parse("{\"a\":1}") #=> { 'a' => 1 }
12
- def self.parse(content)
13
- ::MultiJson.load(content)
14
- rescue ::MultiJson::ParseError => e
15
- raise ::Karafka::Errors::ParserError, e
16
- end
17
-
18
- # @param content [Object] any object that we want to convert to a json string
19
- # @return [String] Valid JSON string containing serialized data
20
- # @raise [Karafka::Errors::ParserError] raised when we don't have a way to parse
21
- # given content to a json string format
22
- # @note When string is passed to this method, we assume that it is already a json
23
- # string and we don't serialize it again. This allows us to serialize data before
24
- # it is being forwarded to a parser if we want to have a custom (not that simple)
25
- # json serialization
26
- #
27
- # @example From an ActiveRecord object
28
- # Json.generate(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
29
- # @example From a string (no changes)
30
- # Json.generate("{\"a\":1}") #=> "{\"a\":1}"
31
- def self.generate(content)
32
- return content if content.is_a?(String)
33
- return content.to_json if content.respond_to?(:to_json)
34
- raise Karafka::Errors::ParserError, content
35
- end
36
- end
37
- end
38
- end
@@ -1,31 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Namespace for patches of external gems/libraries
5
- module Patches
6
- # Patch that will allow to use proc based lazy evaluated settings with Dry Configurable
7
- # @see https://github.com/dry-rb/dry-configurable/blob/master/lib/dry/configurable.rb
8
- module DryConfigurable
9
- # We overwrite ::Dry::Configurable::Config to change on proc behaviour
10
- # Unfortunately it does not provide an on call proc evaluation, so
11
- # this feature had to be added here on demand/
12
- # @param args Any arguments that DryConfigurable::Config accepts
13
- def initialize(*args)
14
- super
15
-
16
- @config.each_key(&method(:rebuild))
17
- end
18
-
19
- private
20
-
21
- # Method that rebuilds a given accessor, so when it consists a proc value, it will
22
- # evaluate it upon return
23
- # @param method_name [Symbol] name of an accessor that we want to rebuild
24
- def rebuild(method_name)
25
- define_singleton_method method_name do
26
- super().is_a?(Proc) ? super().call : super()
27
- end
28
- end
29
- end
30
- end
31
- end
@@ -1,25 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Persistence
5
- # Persistence layer to store current thread messages consumer for further use
6
- class Consumer
7
- # Thread.current key under which we store current thread messages consumer
8
- PERSISTENCE_SCOPE = :consumer
9
-
10
- # @param consumer [Karafka::Connection::Consumer] messages consumer of
11
- # a current thread
12
- # @return [Karafka::Connection::Consumer] persisted messages consumer
13
- def self.write(consumer)
14
- Thread.current[PERSISTENCE_SCOPE] = consumer
15
- end
16
-
17
- # @return [Karafka::Connection::Consumer] persisted messages consumer
18
- # @raise [Karafka::Errors::MissingConsumer] raised when no thread messages consumer
19
- # but we try to use it anyway
20
- def self.read
21
- Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingConsumer)
22
- end
23
- end
24
- end
25
- end
@@ -1,38 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Module used to provide a persistent cache layer for Karafka components that need to be
5
- # shared inside of a same thread
6
- module Persistence
7
- # Module used to provide a persistent cache across batch requests for a given
8
- # topic and partition to store some additional details when the persistent mode
9
- # for a given topic is turned on
10
- class Controller
11
- # Thread.current scope under which we store controllers data
12
- PERSISTENCE_SCOPE = :controllers
13
-
14
- class << self
15
- # @return [Hash] current thread persistence scope hash with all the controllers
16
- def all
17
- Thread.current[PERSISTENCE_SCOPE] ||= {}
18
- end
19
-
20
- # Used to build (if block given) and/or fetch a current controller instance that will be
21
- # used to process messages from a given topic and partition
22
- # @return [Karafka::BaseController] base controller descendant
23
- # @param topic [Karafka::Routing::Topic] topic instance for which we might cache
24
- # @param partition [Integer] number of partition for which we want to cache
25
- def fetch(topic, partition)
26
- all[topic.id] ||= {}
27
-
28
- # We always store a current instance
29
- if topic.persistent
30
- all[topic.id][partition] ||= yield
31
- else
32
- all[topic.id][partition] = yield
33
- end
34
- end
35
- end
36
- end
37
- end
38
- end
@@ -1,21 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Namespace for all the validation schemas that we use to check input
5
- module Schemas
6
- # Regexp for validating format of groups and topics
7
- TOPIC_REGEXP = /\A(\w|\-|\.)+\z/
8
-
9
- # Schema with validation rules for Karafka configuration details
10
- # @note There are many more configuration options inside of the
11
- # Karafka::Setup::Config model, but we don't validate them here as they are
12
- # validated per each route (topic + consumer_group) because they can be overwritten,
13
- # so we validate all of that once all the routes are defined and ready
14
- Config = Dry::Validation.Schema do
15
- required(:client_id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
16
- required(:consumer_mapper)
17
- required(:topic_mapper)
18
- optional(:backend).filled
19
- end
20
- end
21
- end
@@ -1,65 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Schemas
5
- # Schema for single full route (consumer group + topics) validation.
6
- ConsumerGroup = Dry::Validation.Schema do
7
- # Valid uri schemas of Kafka broker url
8
- # The ||= is due to the behavior of require_all that resolves dependencies
9
- # but someetimes loads things twice
10
- URI_SCHEMES ||= %w[kafka kafka+ssl].freeze
11
-
12
- configure do
13
- config.messages_file = File.join(
14
- Karafka.gem_root, 'config', 'errors.yml'
15
- )
16
-
17
- # Uri validator to check if uri is in a Karafka acceptable format
18
- # @param uri [String] uri we want to validate
19
- # @return [Boolean] true if it is a valid uri, otherwise false
20
- def broker_schema?(uri)
21
- uri = URI.parse(uri)
22
- URI_SCHEMES.include?(uri.scheme) && uri.port
23
- rescue URI::InvalidURIError
24
- return false
25
- end
26
- end
27
-
28
- required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
29
- required(:seed_brokers).filled { each(:broker_schema?) }
30
- required(:session_timeout).filled { int? | float? }
31
- required(:pause_timeout).filled { (int? | float?) & gteq?(0) }
32
- required(:offset_commit_interval) { int? | float? }
33
- required(:offset_commit_threshold).filled(:int?)
34
- required(:offset_retention_time) { none?.not > int? }
35
- required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
36
- required(:connect_timeout).filled { (int? | float?) & gt?(0) }
37
- required(:socket_timeout).filled { (int? | float?) & gt?(0) }
38
- required(:min_bytes).filled(:int?, gt?: 0)
39
- required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
40
- required(:batch_fetching).filled(:bool?)
41
- required(:topics).filled { each { schema(ConsumerGroupTopic) } }
42
-
43
- # Max wait time cannot exceed socket_timeout - wouldn't make sense
44
- rule(
45
- max_wait_time_limit: %i[max_wait_time socket_timeout]
46
- ) do |max_wait_time, socket_timeout|
47
- socket_timeout.int? > max_wait_time.lteq?(value(:socket_timeout))
48
- end
49
-
50
- %i[
51
- ssl_ca_cert
52
- ssl_ca_cert_file_path
53
- ssl_client_cert
54
- ssl_client_cert_key
55
- sasl_plain_authzid
56
- sasl_plain_username
57
- sasl_plain_password
58
- sasl_gssapi_principal
59
- sasl_gssapi_keytab
60
- ].each do |encryption_attribute|
61
- optional(encryption_attribute).maybe(:str?)
62
- end
63
- end
64
- end
65
- end
@@ -1,18 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Schemas
5
- # Consumer group topic validation rules
6
- ConsumerGroupTopic = Dry::Validation.Schema do
7
- required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
- required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
9
- required(:backend).filled(included_in?: %i[inline sidekiq])
10
- required(:controller).filled
11
- required(:parser).filled
12
- required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
13
- required(:start_from_beginning).filled(:bool?)
14
- required(:batch_consuming).filled(:bool?)
15
- required(:persistent).filled(:bool?)
16
- end
17
- end
18
- end
@@ -1,39 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Schemas
5
- # Validator to check responder topic usage
6
- ResponderUsageTopic = Dry::Validation.Schema do
7
- required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
- required(:required).filled(:bool?)
9
- required(:multiple_usage).filled(:bool?)
10
- required(:usage_count).filled(:int?, gteq?: 0)
11
- required(:registered).filled(eql?: true)
12
- required(:async).filled(:bool?)
13
-
14
- rule(
15
- required_usage: %i[required usage_count]
16
- ) do |required, usage_count|
17
- required.true? > usage_count.gteq?(1)
18
- end
19
-
20
- rule(
21
- multiple_usage_permission: %i[multiple_usage usage_count]
22
- ) do |multiple_usage, usage_count|
23
- usage_count.gt?(1) > multiple_usage.true?
24
- end
25
-
26
- rule(
27
- multiple_usage_block: %i[multiple_usage usage_count]
28
- ) do |multiple_usage, usage_count|
29
- multiple_usage.false? > usage_count.lteq?(1)
30
- end
31
- end
32
-
33
- # Validator to check that everything in a responder flow matches responder rules
34
- ResponderUsage = Dry::Validation.Schema do
35
- required(:used_topics) { filled? > each { schema(ResponderUsageTopic) } }
36
- required(:registered_topics) { filled? > each { schema(ResponderUsageTopic) } }
37
- end
38
- end
39
- end
@@ -1,43 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Schemas
5
- # Schema for validating correctness of the server cli command options
6
- # We validate some basics + the list of consumer_groups on which we want to use, to make
7
- # sure that all of them are defined, plus that a pidfile does not exist
8
- ServerCliOptions = Dry::Validation.Schema do
9
- configure do
10
- option :consumer_groups
11
-
12
- def self.messages
13
- super.merge(
14
- en: {
15
- errors: {
16
- consumer_groups_inclusion: 'Unknown consumer group.',
17
- pid_existence: 'Pidfile already exists.'
18
- }
19
- }
20
- )
21
- end
22
- end
23
-
24
- optional(:pid).filled(:str?)
25
- optional(:daemon).filled(:bool?)
26
- optional(:consumer_groups).filled(:array?)
27
-
28
- validate(consumer_groups_inclusion: :consumer_groups) do |consumer_groups|
29
- # If there were no consumer_groups declared in the server cli, it means that we will
30
- # run all of them and no need to validate them here at all
31
- if consumer_groups.nil?
32
- true
33
- else
34
- (consumer_groups - Karafka::Routing::Builder.instance.map(&:name)).empty?
35
- end
36
- end
37
-
38
- validate(pid_existence: :pid) do |pid|
39
- pid ? !File.exist?(pid) : true
40
- end
41
- end
42
- end
43
- end
@@ -1,35 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Setup
5
- # Configurators module is used to enclose all the external dependencies configurations
6
- class Configurators
7
- # Karafka has come components that it relies on (like Sidekiq)
8
- # We need to configure all of them only when the framework was set up.
9
- # Any class that descends from this one will be automatically invoked upon setup (after it)
10
- # @example Configure an Example class
11
- # class ExampleConfigurator < Base
12
- # def setup
13
- # ExampleClass.logger = Karafka.logger
14
- # ExampleClass.redis = config.redis
15
- # end
16
- # end
17
- class Base
18
- extend ActiveSupport::DescendantsTracker
19
-
20
- attr_reader :config
21
-
22
- # @param config [Karafka::Config] config instance
23
- # @return [Karafka::Config::Base] configurator for a given component
24
- def initialize(config)
25
- @config = config
26
- end
27
-
28
- # This method needs to be implemented in a subclass
29
- def setup
30
- raise NotImplementedError
31
- end
32
- end
33
- end
34
- end
35
- end