karafka 1.1.0 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (114) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/.travis.yml +35 -16
  12. data/CHANGELOG.md +151 -2
  13. data/CONTRIBUTING.md +6 -7
  14. data/Gemfile +3 -3
  15. data/Gemfile.lock +96 -70
  16. data/README.md +29 -23
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +25 -0
  19. data/config/errors.yml +38 -5
  20. data/karafka.gemspec +19 -10
  21. data/lib/karafka.rb +15 -12
  22. data/lib/karafka/app.rb +19 -18
  23. data/lib/karafka/attributes_map.rb +15 -14
  24. data/lib/karafka/backends/inline.rb +1 -2
  25. data/lib/karafka/base_consumer.rb +57 -0
  26. data/lib/karafka/base_responder.rb +72 -31
  27. data/lib/karafka/cli.rb +1 -1
  28. data/lib/karafka/cli/console.rb +11 -9
  29. data/lib/karafka/cli/flow.rb +0 -1
  30. data/lib/karafka/cli/info.rb +3 -1
  31. data/lib/karafka/cli/install.rb +29 -8
  32. data/lib/karafka/cli/server.rb +11 -7
  33. data/lib/karafka/code_reloader.rb +67 -0
  34. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +67 -24
  35. data/lib/karafka/connection/batch_delegator.rb +51 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +117 -0
  38. data/lib/karafka/connection/listener.rb +37 -17
  39. data/lib/karafka/connection/message_delegator.rb +36 -0
  40. data/lib/karafka/consumers/callbacks.rb +71 -0
  41. data/lib/karafka/consumers/includer.rb +63 -0
  42. data/lib/karafka/consumers/metadata.rb +10 -0
  43. data/lib/karafka/consumers/responders.rb +24 -0
  44. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  45. data/lib/karafka/contracts.rb +10 -0
  46. data/lib/karafka/contracts/config.rb +21 -0
  47. data/lib/karafka/contracts/consumer_group.rb +206 -0
  48. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  49. data/lib/karafka/contracts/responder_usage.rb +54 -0
  50. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  51. data/lib/karafka/errors.rb +23 -15
  52. data/lib/karafka/fetcher.rb +6 -12
  53. data/lib/karafka/helpers/class_matcher.rb +19 -9
  54. data/lib/karafka/helpers/config_retriever.rb +3 -3
  55. data/lib/karafka/helpers/inflector.rb +26 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  57. data/lib/karafka/instrumentation/logger.rb +57 -0
  58. data/lib/karafka/instrumentation/monitor.rb +70 -0
  59. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  60. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  61. data/lib/karafka/params/builders/metadata.rb +33 -0
  62. data/lib/karafka/params/builders/params.rb +36 -0
  63. data/lib/karafka/params/builders/params_batch.rb +25 -0
  64. data/lib/karafka/params/metadata.rb +35 -0
  65. data/lib/karafka/params/params.rb +35 -95
  66. data/lib/karafka/params/params_batch.rb +38 -18
  67. data/lib/karafka/patches/ruby_kafka.rb +25 -12
  68. data/lib/karafka/persistence/client.rb +29 -0
  69. data/lib/karafka/persistence/consumers.rb +45 -0
  70. data/lib/karafka/persistence/topics.rb +48 -0
  71. data/lib/karafka/process.rb +5 -8
  72. data/lib/karafka/responders/builder.rb +15 -14
  73. data/lib/karafka/responders/topic.rb +6 -8
  74. data/lib/karafka/routing/builder.rb +37 -9
  75. data/lib/karafka/routing/consumer_group.rb +1 -1
  76. data/lib/karafka/routing/consumer_mapper.rb +10 -9
  77. data/lib/karafka/routing/proxy.rb +10 -1
  78. data/lib/karafka/routing/router.rb +1 -1
  79. data/lib/karafka/routing/topic.rb +8 -12
  80. data/lib/karafka/routing/topic_mapper.rb +16 -18
  81. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  82. data/lib/karafka/serialization/json/serializer.rb +31 -0
  83. data/lib/karafka/server.rb +45 -24
  84. data/lib/karafka/setup/config.rb +95 -37
  85. data/lib/karafka/setup/configurators/water_drop.rb +12 -5
  86. data/lib/karafka/setup/dsl.rb +21 -0
  87. data/lib/karafka/status.rb +7 -3
  88. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.erb} +2 -2
  89. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  90. data/lib/karafka/templates/karafka.rb.erb +92 -0
  91. data/lib/karafka/version.rb +1 -1
  92. metadata +126 -57
  93. metadata.gz.sig +0 -0
  94. data/.github/ISSUE_TEMPLATE.md +0 -2
  95. data/lib/karafka/base_controller.rb +0 -60
  96. data/lib/karafka/connection/consumer.rb +0 -121
  97. data/lib/karafka/connection/processor.rb +0 -61
  98. data/lib/karafka/controllers/callbacks.rb +0 -54
  99. data/lib/karafka/controllers/includer.rb +0 -51
  100. data/lib/karafka/controllers/responders.rb +0 -19
  101. data/lib/karafka/loader.rb +0 -29
  102. data/lib/karafka/logger.rb +0 -53
  103. data/lib/karafka/monitor.rb +0 -98
  104. data/lib/karafka/parsers/json.rb +0 -38
  105. data/lib/karafka/patches/dry_configurable.rb +0 -31
  106. data/lib/karafka/persistence/consumer.rb +0 -25
  107. data/lib/karafka/persistence/controller.rb +0 -38
  108. data/lib/karafka/schemas/config.rb +0 -21
  109. data/lib/karafka/schemas/consumer_group.rb +0 -65
  110. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  111. data/lib/karafka/schemas/responder_usage.rb +0 -39
  112. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  113. data/lib/karafka/setup/configurators/base.rb +0 -35
  114. data/lib/karafka/templates/karafka.rb.example +0 -41
@@ -14,22 +14,31 @@ module Karafka
14
14
  !
15
15
  ].freeze
16
16
 
17
+ private_constant :IGNORED_POSTFIXES
18
+
17
19
  # @param target [Object] target object to which we proxy any DSL call
18
- # @yield Evaluates block in the proxy context
20
+ # @param block [Proc] block that we want to evaluate in the proxy context
19
21
  def initialize(target, &block)
20
22
  @target = target
21
23
  instance_eval(&block)
22
24
  end
23
25
 
24
26
  # Translates the no "=" DSL of routing into elements assignments on target
27
+ # @param method_name [Symbol] name of the missing method
28
+ # @param arguments [Array] array with it's arguments
29
+ # @param block [Proc] block provided to the method
25
30
  def method_missing(method_name, *arguments, &block)
26
31
  return super unless respond_to_missing?(method_name)
32
+
27
33
  @target.public_send(:"#{method_name}=", *arguments, &block)
28
34
  end
29
35
 
30
36
  # Tells whether or not a given element exists on the target
37
+ # @param method_name [Symbol] name of the missing method
38
+ # @param include_private [Boolean] should we include private in the check as well
31
39
  def respond_to_missing?(method_name, include_private = false)
32
40
  return false if IGNORED_POSTFIXES.any? { |postfix| method_name.to_s.end_with?(postfix) }
41
+
33
42
  @target.respond_to?(:"#{method_name}=", include_private) || super
34
43
  end
35
44
  end
@@ -3,7 +3,7 @@
3
3
  module Karafka
4
4
  # Namespace for all elements related to requests routing
5
5
  module Routing
6
- # Karafka framework Router for routing incoming messages to proper controllers
6
+ # Karafka framework Router for routing incoming messages to proper consumers
7
7
  # @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
8
8
  # structure so all the routes are being stored in a single level array
9
9
  module Router
@@ -7,9 +7,12 @@ module Karafka
7
7
  # It is a part of Karafka's DSL
8
8
  class Topic
9
9
  extend Helpers::ConfigRetriever
10
+ extend Forwardable
10
11
 
11
12
  attr_reader :id, :consumer_group
12
- attr_accessor :controller
13
+ attr_accessor :consumer
14
+
15
+ def_delegator :@consumer_group, :batch_fetching
13
16
 
14
17
  # @param [String, Symbol] name of a topic on which we want to listen
15
18
  # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
@@ -19,7 +22,7 @@ module Karafka
19
22
  @attributes = {}
20
23
  # @note We use identifier related to the consumer group that owns a topic, because from
21
24
  # Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
22
- # have same topic name across mutliple Kafkas
25
+ # have same topic name across multiple Kafkas
23
26
  @id = "#{consumer_group.id}_#{@name}"
24
27
  end
25
28
 
@@ -29,20 +32,13 @@ module Karafka
29
32
  # example for Sidekiq
30
33
  def build
31
34
  Karafka::AttributesMap.topic.each { |attr| send(attr) }
32
- controller&.topic = self
33
35
  self
34
36
  end
35
37
 
36
38
  # @return [Class, nil] Class (not an instance) of a responder that should respond from
37
- # controller back to Kafka (usefull for piping dataflows)
39
+ # consumer back to Kafka (useful for piping data flows)
38
40
  def responder
39
- @responder ||= Karafka::Responders::Builder.new(controller).build
40
- end
41
-
42
- # @return [Class] Parser class (not instance) that we want to use to unparse Kafka messages
43
- # @note If not provided - will use Json as default
44
- def parser
45
- @parser ||= Karafka::Parsers::Json
41
+ @responder ||= Karafka::Responders::Builder.new(consumer).build
46
42
  end
47
43
 
48
44
  Karafka::AttributesMap.topic.each do |attribute|
@@ -58,7 +54,7 @@ module Karafka
58
54
 
59
55
  Hash[map].merge!(
60
56
  id: id,
61
- controller: controller
57
+ consumer: consumer
62
58
  )
63
59
  end
64
60
  end
@@ -8,7 +8,7 @@ module Karafka
8
8
  # routes and responders
9
9
  #
10
10
  # @example Mapper for mapping prefixed topics
11
- # module MyMapper
11
+ # class MyMapper
12
12
  # PREFIX = "my_user_name."
13
13
  #
14
14
  # def incoming(topic)
@@ -21,7 +21,7 @@ module Karafka
21
21
  # end
22
22
  #
23
23
  # @example Mapper for replacing "." with "_" in topic names
24
- # module MyMapper
24
+ # class MyMapper
25
25
  # PREFIX = "my_user_name."
26
26
  #
27
27
  # def incoming(topic)
@@ -32,23 +32,21 @@ module Karafka
32
32
  # topic.to_s.gsub('_', '.')
33
33
  # end
34
34
  # end
35
- module TopicMapper
36
- class << self
37
- # @param topic [String, Symbol] topic
38
- # @return [String, Symbol] same topic as on input
39
- # @example
40
- # incoming('topic_name') #=> 'topic_name'
41
- def incoming(topic)
42
- topic
43
- end
35
+ class TopicMapper
36
+ # @param topic [String, Symbol] topic
37
+ # @return [String, Symbol] same topic as on input
38
+ # @example
39
+ # incoming('topic_name') #=> 'topic_name'
40
+ def incoming(topic)
41
+ topic
42
+ end
44
43
 
45
- # @param topic [String, Symbol] topic
46
- # @return [String, Symbol] same topic as on input
47
- # @example
48
- # outgoing('topic_name') #=> 'topic_name'
49
- def outgoing(topic)
50
- topic
51
- end
44
+ # @param topic [String, Symbol] topic
45
+ # @return [String, Symbol] same topic as on input
46
+ # @example
47
+ # outgoing('topic_name') #=> 'topic_name'
48
+ def outgoing(topic)
49
+ topic
52
50
  end
53
51
  end
54
52
  end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module for all supported by default serialization and deserialization ways
5
+ module Serialization
6
+ # Namespace for json ser/der
7
+ module Json
8
+ # Default Karafka Json deserializer for loading JSON data
9
+ class Deserializer
10
+ # @param params [Karafka::Params::Params] Full params object that we want to deserialize
11
+ # @return [Hash] hash with deserialized JSON data
12
+ # @example
13
+ # params = {
14
+ # 'payload' => "{\"a\":1}",
15
+ # 'topic' => 'my-topic',
16
+ # 'headers' => { 'message_type' => :test }
17
+ # }
18
+ # Deserializer.call(params) #=> { 'a' => 1 }
19
+ def call(params)
20
+ ::MultiJson.load(params['payload'])
21
+ rescue ::MultiJson::ParseError => e
22
+ raise ::Karafka::Errors::DeserializationError, e
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module for all supported by default serialization and deserialization ways
5
+ module Serialization
6
+ module Json
7
+ # Default Karafka Json serializer for serializing data
8
+ class Serializer
9
+ # @param content [Object] any object that we want to convert to a json string
10
+ # @return [String] Valid JSON string containing serialized data
11
+ # @raise [Karafka::Errors::SerializationError] raised when we don't have a way to
12
+ # serialize provided data to json
13
+ # @note When string is passed to this method, we assume that it is already a json
14
+ # string and we don't serialize it again. This allows us to serialize data before
15
+ # it is being forwarded to this serializer if we want to have a custom (not that simple)
16
+ # json serialization
17
+ #
18
+ # @example From an ActiveRecord object
19
+ # Serializer.call(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
20
+ # @example From a string (no changes)
21
+ # Serializer.call("{\"a\":1}") #=> "{\"a\":1}"
22
+ def call(content)
23
+ return content if content.is_a?(String)
24
+ return content.to_json if content.respond_to?(:to_json)
25
+
26
+ raise Karafka::Errors::SerializationError, content
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -3,6 +3,18 @@
3
3
  module Karafka
4
4
  # Karafka consuming server class
5
5
  class Server
6
+ @consumer_threads = Concurrent::Array.new
7
+
8
+ # How long should we sleep between checks on shutting down consumers
9
+ SUPERVISION_SLEEP = 0.1
10
+ # What system exit code should we use when we terminated forcefully
11
+ FORCEFUL_EXIT_CODE = 2
12
+ # This factor allows us to calculate how many times we have to sleep before
13
+ # a forceful shutdown
14
+ SUPERVISION_CHECK_FACTOR = (1 / SUPERVISION_SLEEP)
15
+
16
+ private_constant :SUPERVISION_SLEEP, :FORCEFUL_EXIT_CODE, :SUPERVISION_CHECK_FACTOR
17
+
6
18
  class << self
7
19
  # Set of consuming threads. Each consumer thread contains a single consumer
8
20
  attr_accessor :consumer_threads
@@ -12,11 +24,10 @@ module Karafka
12
24
 
13
25
  # Method which runs app
14
26
  def run
15
- @consumer_threads = Concurrent::Array.new
16
- bind_on_sigint
17
- bind_on_sigquit
18
- bind_on_sigterm
19
- start_supervised
27
+ process.on_sigint { stop_supervised }
28
+ process.on_sigquit { stop_supervised }
29
+ process.on_sigterm { stop_supervised }
30
+ run_supervised
20
31
  end
21
32
 
22
33
  # @return [Array<String>] array with names of consumer groups that should be consumed in a
@@ -30,32 +41,42 @@ module Karafka
30
41
 
31
42
  # @return [Karafka::Process] process wrapper instance used to catch system signal calls
32
43
  def process
33
- Karafka::Process.instance
44
+ Karafka::App.config.internal.process
34
45
  end
35
46
 
36
- # What should happen when we decide to quit with sigint
37
- def bind_on_sigint
38
- process.on_sigint { Karafka::App.stop! }
47
+ # Starts Karafka with a supervision
48
+ # @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
49
+ # finish loop (and it won't happen until we explicitly want to stop)
50
+ def run_supervised
51
+ process.supervise
52
+ Karafka::App.run!
53
+ Karafka::App.config.internal.fetcher.call
39
54
  end
40
55
 
41
- # What should happen when we decide to quit with sigquit
42
- def bind_on_sigquit
43
- process.on_sigquit { Karafka::App.stop! }
44
- end
56
+ # Stops Karafka with a supervision (as long as there is a shutdown timeout)
57
+ # If consumers won't stop in a given time frame, it will force them to exit
58
+ def stop_supervised
59
+ Karafka::App.stop!
45
60
 
46
- # What should happen when we decide to quit with sigterm
47
- def bind_on_sigterm
48
- process.on_sigterm { Karafka::App.stop! }
49
- end
61
+ # We check from time to time (for the timeout period) if all the threads finished
62
+ # their work and if so, we can just return and normal shutdown process will take place
63
+ (Karafka::App.config.shutdown_timeout * SUPERVISION_CHECK_FACTOR).to_i.times do
64
+ if consumer_threads.count(&:alive?).zero?
65
+ Thread.new { Karafka.monitor.instrument('app.stopped') }.join
66
+ return
67
+ end
50
68
 
51
- # Starts Karafka with a supervision
52
- # @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
53
- # finish loop (and it won't happen until we explicitily want to stop)
54
- def start_supervised
55
- process.supervise do
56
- Karafka::App.run!
57
- Karafka::Fetcher.new.fetch_loop
69
+ sleep SUPERVISION_SLEEP
58
70
  end
71
+
72
+ raise Errors::ForcefulShutdownError
73
+ rescue Errors::ForcefulShutdownError => e
74
+ Thread.new { Karafka.monitor.instrument('app.stopping.error', error: e) }.join
75
+ # We're done waiting, lets kill them!
76
+ consumer_threads.each(&:terminate)
77
+
78
+ # exit! is not within the instrumentation as it would not trigger due to exit
79
+ Kernel.exit! FORCEFUL_EXIT_CODE
59
80
  end
60
81
  end
61
82
  end
@@ -8,12 +8,17 @@ module Karafka
8
8
  # @note If you want to do some configurations after all of this is done, please add to
9
9
  # karafka/config a proper file (needs to inherit from Karafka::Setup::Configurators::Base
10
10
  # and implement setup method) after that everything will happen automatically
11
- # @note This config object allows to create a 1 level nestings (nodes) only. This should be
11
+ # @note This config object allows to create a 1 level nesting (nodes) only. This should be
12
12
  # enough and will still keep the code simple
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
15
  extend Dry::Configurable
16
16
 
17
+ # Contract for checking the config provided by the user
18
+ CONTRACT = Karafka::Contracts::Config.new.freeze
19
+
20
+ private_constant :CONTRACT
21
+
17
22
  # Available settings
18
23
  # option client_id [String] kafka client_id - used to provide
19
24
  # default Kafka groups namespaces and identify that app in kafka
@@ -21,19 +26,23 @@ module Karafka
21
26
  # What backend do we want to use to process messages
22
27
  setting :backend, :inline
23
28
  # option logger [Instance] logger that we want to use
24
- setting :logger, -> { ::Karafka::Logger.instance }
29
+ setting :logger, ::Karafka::Instrumentation::Logger.new
25
30
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
26
- setting :monitor, -> { ::Karafka::Monitor.instance }
31
+ setting :monitor, ::Karafka::Instrumentation::Monitor.new
27
32
  # Mapper used to remap consumer groups ids, so in case users migrate from other tools
28
33
  # or they need to maintain their own internal consumer group naming conventions, they
29
34
  # can easily do it, replacing the default client_id + consumer name pattern concept
30
- setting :consumer_mapper, -> { Routing::ConsumerMapper }
31
- # Mapper used to remap names of topics, so we can have a clean internal topic namings
35
+ setting :consumer_mapper, Routing::ConsumerMapper.new
36
+ # Mapper used to remap names of topics, so we can have a clean internal topic naming
32
37
  # despite using any Kafka provider that uses namespacing, etc
33
38
  # It needs to implement two methods:
34
39
  # - #incoming - for remapping from the incoming message to our internal format
35
40
  # - #outgoing - for remapping from internal topic name into outgoing message
36
- setting :topic_mapper, -> { Routing::TopicMapper }
41
+ setting :topic_mapper, Routing::TopicMapper.new
42
+ # Default serializer for converting whatever we want to send to kafka to json
43
+ setting :serializer, Karafka::Serialization::Json::Serializer.new
44
+ # Default deserializer for converting incoming data into ruby objects
45
+ setting :deserializer, Karafka::Serialization::Json::Deserializer.new
37
46
  # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
38
47
  # @note Fetching does not equal consuming, see batch_consuming description for details
39
48
  setting :batch_fetching, true
@@ -41,25 +50,28 @@ module Karafka
41
50
  # #params_batch will contain params received from Kafka (may be more than 1) so we can
42
51
  # process them in batches
43
52
  setting :batch_consuming, false
44
- # Should we operate in a single controller instance across multiple batches of messages,
45
- # from the same partition or should we build a new instance for each incoming batch.
46
- # Disabling that can be useful when you want to build a new controller instance for each
47
- # incoming batch. It's disabled by default, not to create more objects that needed on
48
- # each batch
49
- setting :persistent, true
53
+ # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
+ # longer wait for the consumers to stop gracefully but instead we force terminate
55
+ # everything.
56
+ setting :shutdown_timeout, 60
50
57
 
51
58
  # option kafka [Hash] - optional - kafka configuration options
52
59
  setting :kafka do
53
60
  # Array with at least one host
54
- setting :seed_brokers
61
+ setting :seed_brokers, %w[kafka://127.0.0.1:9092]
55
62
  # option session_timeout [Integer] the number of seconds after which, if a client
56
63
  # hasn't contacted the Kafka cluster, it will be kicked out of the group.
57
64
  setting :session_timeout, 30
58
65
  # Time that a given partition will be paused from fetching messages, when message
59
66
  # consumption fails. It allows us to process other partitions, while the error is being
60
67
  # resolved and also "slows" things down, so it prevents from "eating" up all messages and
61
- # consuming them with failed code
68
+ # consuming them with failed code. Use `nil` if you want to pause forever and never retry.
62
69
  setting :pause_timeout, 10
70
+ # option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
71
+ # or `nil` if no maximum should be enforced.
72
+ setting :pause_max_timeout, nil
73
+ # option pause_exponential_backoff [Boolean] whether to enable exponential backoff
74
+ setting :pause_exponential_backoff, false
63
75
  # option offset_commit_interval [Integer] the interval between offset commits,
64
76
  # in seconds.
65
77
  setting :offset_commit_interval, 10
@@ -70,6 +82,13 @@ module Karafka
70
82
  # option heartbeat_interval [Integer] the interval between heartbeats; must be less
71
83
  # than the session window.
72
84
  setting :heartbeat_interval, 10
85
+ # option offset_retention_time [Integer] The length of the retention window, known as
86
+ # offset retention time
87
+ setting :offset_retention_time, nil
88
+ # option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
89
+ # are stored for further processing. Note, that each item in the queue represents a
90
+ # response from a single broker
91
+ setting :fetcher_max_queue_size, 10
73
92
  # option max_bytes_per_partition [Integer] the maximum amount of data fetched
74
93
  # from a single partition at a time.
75
94
  setting :max_bytes_per_partition, 1_048_576
@@ -79,6 +98,9 @@ module Karafka
79
98
  # returning messages from the server; if `max_wait_time` is reached, this
80
99
  # is ignored.
81
100
  setting :min_bytes, 1
101
+ # option max_bytes [Integer] the maximum number of bytes to read before returning messages
102
+ # from each broker.
103
+ setting :max_bytes, 10_485_760
82
104
  # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
83
105
  # wait before returning data from a single message fetch. By setting this high you also
84
106
  # increase the fetching throughput - and by setting it low you set a bound on latency.
@@ -93,9 +115,6 @@ module Karafka
93
115
  # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
94
116
  # Kafka cluster that went down (in seconds)
95
117
  setting :reconnect_timeout, 5
96
- # option offset_retention_time [Integer] The length of the retention window, known as
97
- # offset retention time
98
- setting :offset_retention_time, nil
99
118
  # option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
100
119
  # a broker for the first time. When ruby-kafka initializes, it needs to connect to at
101
120
  # least one host.
@@ -107,55 +126,94 @@ module Karafka
107
126
  setting :socket_timeout, 30
108
127
 
109
128
  # SSL authentication related settings
110
- # option ca_cert [String] SSL CA certificate
129
+ # option ca_cert [String, nil] SSL CA certificate
111
130
  setting :ssl_ca_cert, nil
112
- # option ssl_ca_cert_file_path [String] SSL CA certificate file path
131
+ # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
113
132
  setting :ssl_ca_cert_file_path, nil
114
- # option ssl_client_cert [String] SSL client certificate
133
+ # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
134
+ # certificate store
135
+ setting :ssl_ca_certs_from_system, false
136
+ # option ssl_verify_hostname [Boolean] Verify the hostname for client certs
137
+ setting :ssl_verify_hostname, true
138
+ # option ssl_client_cert [String, nil] SSL client certificate
115
139
  setting :ssl_client_cert, nil
116
- # option ssl_client_cert_key [String] SSL client certificate password
140
+ # option ssl_client_cert_key [String, nil] SSL client certificate password
117
141
  setting :ssl_client_cert_key, nil
118
- # option sasl_gssapi_principal [String] sasl principal
142
+ # option sasl_gssapi_principal [String, nil] sasl principal
119
143
  setting :sasl_gssapi_principal, nil
120
- # option sasl_gssapi_keytab [String] sasl keytab
144
+ # option sasl_gssapi_keytab [String, nil] sasl keytab
121
145
  setting :sasl_gssapi_keytab, nil
122
146
  # option sasl_plain_authzid [String] The authorization identity to use
123
147
  setting :sasl_plain_authzid, ''
124
- # option sasl_plain_username [String] The username used to authenticate
148
+ # option sasl_plain_username [String, nil] The username used to authenticate
125
149
  setting :sasl_plain_username, nil
126
- # option sasl_plain_password [String] The password used to authenticate
150
+ # option sasl_plain_password [String, nil] The password used to authenticate
127
151
  setting :sasl_plain_password, nil
152
+ # option sasl_scram_username [String, nil] The username used to authenticate
153
+ setting :sasl_scram_username, nil
154
+ # option sasl_scram_password [String, nil] The password used to authenticate
155
+ setting :sasl_scram_password, nil
156
+ # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
157
+ setting :sasl_scram_mechanism, nil
158
+ # option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
159
+ setting :sasl_over_ssl, true
160
+ # option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
161
+ setting :ssl_client_cert_chain, nil
162
+ # option ssl_client_cert_key_password [String, nil] the password required to read
163
+ # the ssl_client_cert_key
164
+ setting :ssl_client_cert_key_password, nil
165
+ # @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
166
+ # implements method token.
167
+ setting :sasl_oauth_token_provider, nil
168
+ end
169
+
170
+ # Namespace for internal settings that should not be modified
171
+ # It's a temporary step to "declassify" several things internally before we move to a
172
+ # non global state
173
+ setting :internal do
174
+ # option routing_builder [Karafka::Routing::Builder] builder instance
175
+ setting :routing_builder, Routing::Builder.new
176
+ # option status [Karafka::Status] app status
177
+ setting :status, Status.new
178
+ # option process [Karafka::Process] process status
179
+ # @note In the future, we need to have a single process representation for all the karafka
180
+ # instances
181
+ setting :process, Process.new
182
+ # option fetcher [Karafka::Fetcher] fetcher instance
183
+ setting :fetcher, Fetcher.new
184
+ # option configurators [Array<Object>] all configurators that we want to run after
185
+ # the setup
186
+ setting :configurators, [Configurators::WaterDrop.new]
128
187
  end
129
188
 
130
189
  class << self
131
- # Configurating method
190
+ # Configuring method
132
191
  # @yield Runs a block of code providing a config singleton instance to it
133
192
  # @yieldparam [Karafka::Setup::Config] Karafka config instance
134
193
  def setup
135
- configure do |config|
136
- yield(config)
137
- end
194
+ configure { |config| yield(config) }
138
195
  end
139
196
 
140
197
  # Everything that should be initialized after the setup
141
198
  # Components are in karafka/config directory and are all loaded one by one
142
199
  # If you want to configure a next component, please add a proper file to config dir
143
200
  def setup_components
144
- Configurators::Base.descendants.each do |klass|
145
- klass.new(config).setup
146
- end
201
+ config
202
+ .internal
203
+ .configurators
204
+ .each { |configurator| configurator.call(config) }
147
205
  end
148
206
 
149
- # Validate config based on ConfigurationSchema
207
+ # Validate config based on the config contract
150
208
  # @return [Boolean] true if configuration is valid
151
- # @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
152
- # doesn't match with ConfigurationSchema
209
+ # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
210
+ # doesn't match with the config contract
153
211
  def validate!
154
- validation_result = Karafka::Schemas::Config.call(config.to_h)
212
+ validation_result = CONTRACT.call(config.to_h)
155
213
 
156
214
  return true if validation_result.success?
157
215
 
158
- raise Errors::InvalidConfiguration, validation_result.errors
216
+ raise Errors::InvalidConfigurationError, validation_result.errors.to_h
159
217
  end
160
218
  end
161
219
  end