karafka 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (99) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +11 -0
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +69 -0
  10. data/.rspec +1 -0
  11. data/.ruby-gemset +1 -0
  12. data/.ruby-version +1 -0
  13. data/.travis.yml +36 -0
  14. data/CHANGELOG.md +520 -0
  15. data/CODE_OF_CONDUCT.md +46 -0
  16. data/CONTRIBUTING.md +41 -0
  17. data/Gemfile +12 -0
  18. data/Gemfile.lock +137 -0
  19. data/MIT-LICENCE +18 -0
  20. data/README.md +101 -0
  21. data/bin/karafka +19 -0
  22. data/certs/mensfeld.pem +25 -0
  23. data/config/errors.yml +39 -0
  24. data/karafka.gemspec +44 -0
  25. data/lib/karafka.rb +71 -0
  26. data/lib/karafka/app.rb +53 -0
  27. data/lib/karafka/attributes_map.rb +68 -0
  28. data/lib/karafka/backends/inline.rb +16 -0
  29. data/lib/karafka/base_consumer.rb +57 -0
  30. data/lib/karafka/base_responder.rb +226 -0
  31. data/lib/karafka/cli.rb +54 -0
  32. data/lib/karafka/cli/base.rb +78 -0
  33. data/lib/karafka/cli/console.rb +31 -0
  34. data/lib/karafka/cli/flow.rb +45 -0
  35. data/lib/karafka/cli/info.rb +31 -0
  36. data/lib/karafka/cli/install.rb +64 -0
  37. data/lib/karafka/cli/server.rb +71 -0
  38. data/lib/karafka/code_reloader.rb +67 -0
  39. data/lib/karafka/connection/api_adapter.rb +155 -0
  40. data/lib/karafka/connection/batch_delegator.rb +51 -0
  41. data/lib/karafka/connection/builder.rb +16 -0
  42. data/lib/karafka/connection/client.rb +117 -0
  43. data/lib/karafka/connection/listener.rb +71 -0
  44. data/lib/karafka/connection/message_delegator.rb +36 -0
  45. data/lib/karafka/consumers/callbacks.rb +71 -0
  46. data/lib/karafka/consumers/includer.rb +63 -0
  47. data/lib/karafka/consumers/metadata.rb +10 -0
  48. data/lib/karafka/consumers/responders.rb +24 -0
  49. data/lib/karafka/consumers/single_params.rb +15 -0
  50. data/lib/karafka/contracts.rb +10 -0
  51. data/lib/karafka/contracts/config.rb +21 -0
  52. data/lib/karafka/contracts/consumer_group.rb +206 -0
  53. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  54. data/lib/karafka/contracts/responder_usage.rb +54 -0
  55. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  56. data/lib/karafka/errors.rb +51 -0
  57. data/lib/karafka/fetcher.rb +42 -0
  58. data/lib/karafka/helpers/class_matcher.rb +88 -0
  59. data/lib/karafka/helpers/config_retriever.rb +46 -0
  60. data/lib/karafka/helpers/inflector.rb +26 -0
  61. data/lib/karafka/helpers/multi_delegator.rb +32 -0
  62. data/lib/karafka/instrumentation/logger.rb +57 -0
  63. data/lib/karafka/instrumentation/monitor.rb +70 -0
  64. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  65. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  66. data/lib/karafka/params/builders/metadata.rb +33 -0
  67. data/lib/karafka/params/builders/params.rb +36 -0
  68. data/lib/karafka/params/builders/params_batch.rb +25 -0
  69. data/lib/karafka/params/metadata.rb +35 -0
  70. data/lib/karafka/params/params.rb +68 -0
  71. data/lib/karafka/params/params_batch.rb +61 -0
  72. data/lib/karafka/patches/ruby_kafka.rb +47 -0
  73. data/lib/karafka/persistence/client.rb +29 -0
  74. data/lib/karafka/persistence/consumers.rb +45 -0
  75. data/lib/karafka/persistence/topics.rb +48 -0
  76. data/lib/karafka/process.rb +60 -0
  77. data/lib/karafka/responders/builder.rb +36 -0
  78. data/lib/karafka/responders/topic.rb +55 -0
  79. data/lib/karafka/routing/builder.rb +89 -0
  80. data/lib/karafka/routing/consumer_group.rb +61 -0
  81. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  82. data/lib/karafka/routing/proxy.rb +46 -0
  83. data/lib/karafka/routing/router.rb +29 -0
  84. data/lib/karafka/routing/topic.rb +62 -0
  85. data/lib/karafka/routing/topic_mapper.rb +53 -0
  86. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  87. data/lib/karafka/serialization/json/serializer.rb +31 -0
  88. data/lib/karafka/server.rb +83 -0
  89. data/lib/karafka/setup/config.rb +221 -0
  90. data/lib/karafka/setup/configurators/water_drop.rb +36 -0
  91. data/lib/karafka/setup/dsl.rb +21 -0
  92. data/lib/karafka/status.rb +29 -0
  93. data/lib/karafka/templates/application_consumer.rb.erb +7 -0
  94. data/lib/karafka/templates/application_responder.rb.erb +11 -0
  95. data/lib/karafka/templates/karafka.rb.erb +92 -0
  96. data/lib/karafka/version.rb +7 -0
  97. data/log/.gitkeep +0 -0
  98. metadata +336 -0
  99. metadata.gz.sig +0 -0
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ # Proxy is used as a translation layer in between the DSL and raw topic and consumer group
6
+ # objects.
7
+ class Proxy
8
+ attr_reader :target
9
+
10
+ # We should proxy only non ? and = methods as we want to have a regular dsl
11
+ IGNORED_POSTFIXES = %w[
12
+ ?
13
+ =
14
+ !
15
+ ].freeze
16
+
17
+ private_constant :IGNORED_POSTFIXES
18
+
19
+ # @param target [Object] target object to which we proxy any DSL call
20
+ # @param block [Proc] block that we want to evaluate in the proxy context
21
+ def initialize(target, &block)
22
+ @target = target
23
+ instance_eval(&block)
24
+ end
25
+
26
+ # Translates the no "=" DSL of routing into elements assignments on target
27
+ # @param method_name [Symbol] name of the missing method
28
+ # @param arguments [Array] array with it's arguments
29
+ # @param block [Proc] block provided to the method
30
+ def method_missing(method_name, *arguments, &block)
31
+ return super unless respond_to_missing?(method_name)
32
+
33
+ @target.public_send(:"#{method_name}=", *arguments, &block)
34
+ end
35
+
36
+ # Tells whether or not a given element exists on the target
37
+ # @param method_name [Symbol] name of the missing method
38
+ # @param include_private [Boolean] should we include private in the check as well
39
+ def respond_to_missing?(method_name, include_private = false)
40
+ return false if IGNORED_POSTFIXES.any? { |postfix| method_name.to_s.end_with?(postfix) }
41
+
42
+ @target.respond_to?(:"#{method_name}=", include_private) || super
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all elements related to requests routing
5
+ module Routing
6
+ # Karafka framework Router for routing incoming messages to proper consumers
7
+ # @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
8
+ # structure so all the routes are being stored in a single level array
9
+ module Router
10
+ # Find a proper topic based on full topic id
11
+ # @param topic_id [String] proper topic id (already mapped, etc) for which we want to find
12
+ # routing topic
13
+ # @return [Karafka::Routing::Route] proper route details
14
+ # @raise [Karafka::Topic::NonMatchingTopicError] raised if topic name does not match
15
+ # any route defined by user using routes.draw
16
+ def find(topic_id)
17
+ App.consumer_groups.each do |consumer_group|
18
+ consumer_group.topics.each do |topic|
19
+ return topic if topic.id == topic_id
20
+ end
21
+ end
22
+
23
+ raise(Errors::NonMatchingRouteError, topic_id)
24
+ end
25
+
26
+ module_function :find
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ # Topic stores all the details on how we should interact with Kafka given topic
6
+ # It belongs to a consumer group as from 0.6 all the topics can work in the same consumer group
7
+ # It is a part of Karafka's DSL
8
+ class Topic
9
+ extend Helpers::ConfigRetriever
10
+ extend Forwardable
11
+
12
+ attr_reader :id, :consumer_group
13
+ attr_accessor :consumer
14
+
15
+ def_delegator :@consumer_group, :batch_fetching
16
+
17
+ # @param [String, Symbol] name of a topic on which we want to listen
18
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
19
+ def initialize(name, consumer_group)
20
+ @name = name.to_s
21
+ @consumer_group = consumer_group
22
+ @attributes = {}
23
+ # @note We use identifier related to the consumer group that owns a topic, because from
24
+ # Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
25
+ # have same topic name across multiple Kafkas
26
+ @id = "#{consumer_group.id}_#{@name}"
27
+ end
28
+
29
+ # Initializes default values for all the options that support defaults if their values are
30
+ # not yet specified. This is need to be done (cannot be lazy loaded on first use) because
31
+ # everywhere except Karafka server command, those would not be initialized on time - for
32
+ # example for Sidekiq
33
+ def build
34
+ Karafka::AttributesMap.topic.each { |attr| send(attr) }
35
+ self
36
+ end
37
+
38
+ # @return [Class, nil] Class (not an instance) of a responder that should respond from
39
+ # consumer back to Kafka (useful for piping data flows)
40
+ def responder
41
+ @responder ||= Karafka::Responders::Builder.new(consumer).build
42
+ end
43
+
44
+ Karafka::AttributesMap.topic.each do |attribute|
45
+ config_retriever_for(attribute)
46
+ end
47
+
48
+ # @return [Hash] hash with all the topic attributes
49
+ # @note This is being used when we validate the consumer_group and its topics
50
+ def to_h
51
+ map = Karafka::AttributesMap.topic.map do |attribute|
52
+ [attribute, public_send(attribute)]
53
+ end
54
+
55
+ Hash[map].merge!(
56
+ id: id,
57
+ consumer: consumer
58
+ )
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ # Default topic mapper that does not remap things
6
+ # Mapper can be used for Kafka providers that require namespaced topic names. Instead of being
7
+ # provider dependent, we can then define mapper and use internally "pure" topic names in
8
+ # routes and responders
9
+ #
10
+ # @example Mapper for mapping prefixed topics
11
+ # class MyMapper
12
+ # PREFIX = "my_user_name."
13
+ #
14
+ # def incoming(topic)
15
+ # topic.to_s.gsub(PREFIX, '')
16
+ # end
17
+ #
18
+ # def outgoing(topic)
19
+ # "#{PREFIX}#{topic}"
20
+ # end
21
+ # end
22
+ #
23
+ # @example Mapper for replacing "." with "_" in topic names
24
+ # class MyMapper
25
+ # PREFIX = "my_user_name."
26
+ #
27
+ # def incoming(topic)
28
+ # topic.to_s.gsub('.', '_')
29
+ # end
30
+ #
31
+ # def outgoing(topic)
32
+ # topic.to_s.gsub('_', '.')
33
+ # end
34
+ # end
35
+ class TopicMapper
36
+ # @param topic [String, Symbol] topic
37
+ # @return [String, Symbol] same topic as on input
38
+ # @example
39
+ # incoming('topic_name') #=> 'topic_name'
40
+ def incoming(topic)
41
+ topic
42
+ end
43
+
44
+ # @param topic [String, Symbol] topic
45
+ # @return [String, Symbol] same topic as on input
46
+ # @example
47
+ # outgoing('topic_name') #=> 'topic_name'
48
+ def outgoing(topic)
49
+ topic
50
+ end
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module for all supported by default serialization and deserialization ways
5
+ module Serialization
6
+ # Namespace for json ser/der
7
+ module Json
8
+ # Default Karafka Json deserializer for loading JSON data
9
+ class Deserializer
10
+ # @param params [Karafka::Params::Params] Full params object that we want to deserialize
11
+ # @return [Hash] hash with deserialized JSON data
12
+ # @example
13
+ # params = {
14
+ # 'payload' => "{\"a\":1}",
15
+ # 'topic' => 'my-topic',
16
+ # 'headers' => { 'message_type' => :test }
17
+ # }
18
+ # Deserializer.call(params) #=> { 'a' => 1 }
19
+ def call(params)
20
+ ::MultiJson.load(params['payload'])
21
+ rescue ::MultiJson::ParseError => e
22
+ raise ::Karafka::Errors::DeserializationError, e
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module for all supported by default serialization and deserialization ways
5
+ module Serialization
6
+ module Json
7
+ # Default Karafka Json serializer for serializing data
8
+ class Serializer
9
+ # @param content [Object] any object that we want to convert to a json string
10
+ # @return [String] Valid JSON string containing serialized data
11
+ # @raise [Karafka::Errors::SerializationError] raised when we don't have a way to
12
+ # serialize provided data to json
13
+ # @note When string is passed to this method, we assume that it is already a json
14
+ # string and we don't serialize it again. This allows us to serialize data before
15
+ # it is being forwarded to this serializer if we want to have a custom (not that simple)
16
+ # json serialization
17
+ #
18
+ # @example From an ActiveRecord object
19
+ # Serializer.call(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
20
+ # @example From a string (no changes)
21
+ # Serializer.call("{\"a\":1}") #=> "{\"a\":1}"
22
+ def call(content)
23
+ return content if content.is_a?(String)
24
+ return content.to_json if content.respond_to?(:to_json)
25
+
26
+ raise Karafka::Errors::SerializationError, content
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,83 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Karafka consuming server class
5
+ class Server
6
+ @consumer_threads = Concurrent::Array.new
7
+
8
+ # How long should we sleep between checks on shutting down consumers
9
+ SUPERVISION_SLEEP = 0.1
10
+ # What system exit code should we use when we terminated forcefully
11
+ FORCEFUL_EXIT_CODE = 2
12
+ # This factor allows us to calculate how many times we have to sleep before
13
+ # a forceful shutdown
14
+ SUPERVISION_CHECK_FACTOR = (1 / SUPERVISION_SLEEP)
15
+
16
+ private_constant :SUPERVISION_SLEEP, :FORCEFUL_EXIT_CODE, :SUPERVISION_CHECK_FACTOR
17
+
18
+ class << self
19
+ # Set of consuming threads. Each consumer thread contains a single consumer
20
+ attr_accessor :consumer_threads
21
+
22
+ # Writer for list of consumer groups that we want to consume in our current process context
23
+ attr_writer :consumer_groups
24
+
25
+ # Method which runs app
26
+ def run
27
+ process.on_sigint { stop_supervised }
28
+ process.on_sigquit { stop_supervised }
29
+ process.on_sigterm { stop_supervised }
30
+ run_supervised
31
+ end
32
+
33
+ # @return [Array<String>] array with names of consumer groups that should be consumed in a
34
+ # current server context
35
+ def consumer_groups
36
+ # If not specified, a server will listed on all the topics
37
+ @consumer_groups ||= Karafka::App.consumer_groups.map(&:name).freeze
38
+ end
39
+
40
+ private
41
+
42
+ # @return [Karafka::Process] process wrapper instance used to catch system signal calls
43
+ def process
44
+ Karafka::App.config.internal.process
45
+ end
46
+
47
+ # Starts Karafka with a supervision
48
+ # @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
49
+ # finish loop (and it won't happen until we explicitly want to stop)
50
+ def run_supervised
51
+ process.supervise
52
+ Karafka::App.run!
53
+ Karafka::App.config.internal.fetcher.call
54
+ end
55
+
56
+ # Stops Karafka with a supervision (as long as there is a shutdown timeout)
57
+ # If consumers won't stop in a given time frame, it will force them to exit
58
+ def stop_supervised
59
+ Karafka::App.stop!
60
+
61
+ # We check from time to time (for the timeout period) if all the threads finished
62
+ # their work and if so, we can just return and normal shutdown process will take place
63
+ (Karafka::App.config.shutdown_timeout * SUPERVISION_CHECK_FACTOR).to_i.times do
64
+ if consumer_threads.count(&:alive?).zero?
65
+ Thread.new { Karafka.monitor.instrument('app.stopped') }.join
66
+ return
67
+ end
68
+
69
+ sleep SUPERVISION_SLEEP
70
+ end
71
+
72
+ raise Errors::ForcefulShutdownError
73
+ rescue Errors::ForcefulShutdownError => e
74
+ Thread.new { Karafka.monitor.instrument('app.stopping.error', error: e) }.join
75
+ # We're done waiting, lets kill them!
76
+ consumer_threads.each(&:terminate)
77
+
78
+ # exit! is not within the instrumentation as it would not trigger due to exit
79
+ Kernel.exit! FORCEFUL_EXIT_CODE
80
+ end
81
+ end
82
+ end
83
+ end
@@ -0,0 +1,221 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module containing all Karafka setup related elements like configuration settings,
5
+ # config validations and configurators for external gems integration
6
+ module Setup
7
+ # Configurator for setting up all the framework details that are required to make it work
8
+ # @note If you want to do some configurations after all of this is done, please add to
9
+ # karafka/config a proper file (needs to inherit from Karafka::Setup::Configurators::Base
10
+ # and implement setup method) after that everything will happen automatically
11
+ # @note This config object allows to create a 1 level nesting (nodes) only. This should be
12
+ # enough and will still keep the code simple
13
+ # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
+ class Config
15
+ extend Dry::Configurable
16
+
17
+ # Contract for checking the config provided by the user
18
+ CONTRACT = Karafka::Contracts::Config.new.freeze
19
+
20
+ private_constant :CONTRACT
21
+
22
+ # Available settings
23
+ # option client_id [String] kafka client_id - used to provide
24
+ # default Kafka groups namespaces and identify that app in kafka
25
+ setting :client_id
26
+ # What backend do we want to use to process messages
27
+ setting :backend, :inline
28
+ # option logger [Instance] logger that we want to use
29
+ setting :logger, ::Karafka::Instrumentation::Logger.new
30
+ # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
31
+ setting :monitor, ::Karafka::Instrumentation::Monitor.new
32
+ # Mapper used to remap consumer groups ids, so in case users migrate from other tools
33
+ # or they need to maintain their own internal consumer group naming conventions, they
34
+ # can easily do it, replacing the default client_id + consumer name pattern concept
35
+ setting :consumer_mapper, Routing::ConsumerMapper.new
36
+ # Mapper used to remap names of topics, so we can have a clean internal topic naming
37
+ # despite using any Kafka provider that uses namespacing, etc
38
+ # It needs to implement two methods:
39
+ # - #incoming - for remapping from the incoming message to our internal format
40
+ # - #outgoing - for remapping from internal topic name into outgoing message
41
+ setting :topic_mapper, Routing::TopicMapper.new
42
+ # Default serializer for converting whatever we want to send to kafka to json
43
+ setting :serializer, Karafka::Serialization::Json::Serializer.new
44
+ # Default deserializer for converting incoming data into ruby objects
45
+ setting :deserializer, Karafka::Serialization::Json::Deserializer.new
46
+ # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
47
+ # @note Fetching does not equal consuming, see batch_consuming description for details
48
+ setting :batch_fetching, true
49
+ # If batch_consuming is true, we will have access to #params_batch instead of #params.
50
+ # #params_batch will contain params received from Kafka (may be more than 1) so we can
51
+ # process them in batches
52
+ setting :batch_consuming, false
53
+ # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
+ # longer wait for the consumers to stop gracefully but instead we force terminate
55
+ # everything.
56
+ setting :shutdown_timeout, 60
57
+
58
+ # option kafka [Hash] - optional - kafka configuration options
59
+ setting :kafka do
60
+ # Array with at least one host
61
+ setting :seed_brokers, %w[kafka://127.0.0.1:9092]
62
+ # option session_timeout [Integer] the number of seconds after which, if a client
63
+ # hasn't contacted the Kafka cluster, it will be kicked out of the group.
64
+ setting :session_timeout, 30
65
+ # Time that a given partition will be paused from fetching messages, when message
66
+ # consumption fails. It allows us to process other partitions, while the error is being
67
+ # resolved and also "slows" things down, so it prevents from "eating" up all messages and
68
+ # consuming them with failed code. Use `nil` if you want to pause forever and never retry.
69
+ setting :pause_timeout, 10
70
+ # option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
71
+ # or `nil` if no maximum should be enforced.
72
+ setting :pause_max_timeout, nil
73
+ # option pause_exponential_backoff [Boolean] whether to enable exponential backoff
74
+ setting :pause_exponential_backoff, false
75
+ # option offset_commit_interval [Integer] the interval between offset commits,
76
+ # in seconds.
77
+ setting :offset_commit_interval, 10
78
+ # option offset_commit_threshold [Integer] the number of messages that can be
79
+ # processed before their offsets are committed. If zero, offset commits are
80
+ # not triggered by message consumption.
81
+ setting :offset_commit_threshold, 0
82
+ # option heartbeat_interval [Integer] the interval between heartbeats; must be less
83
+ # than the session window.
84
+ setting :heartbeat_interval, 10
85
+ # option offset_retention_time [Integer] The length of the retention window, known as
86
+ # offset retention time
87
+ setting :offset_retention_time, nil
88
+ # option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
89
+ # are stored for further processing. Note, that each item in the queue represents a
90
+ # response from a single broker
91
+ setting :fetcher_max_queue_size, 10
92
+ # option max_bytes_per_partition [Integer] the maximum amount of data fetched
93
+ # from a single partition at a time.
94
+ setting :max_bytes_per_partition, 1_048_576
95
+ # whether to consume messages starting at the beginning or to just consume new messages
96
+ setting :start_from_beginning, true
97
+ # option min_bytes [Integer] the minimum number of bytes to read before
98
+ # returning messages from the server; if `max_wait_time` is reached, this
99
+ # is ignored.
100
+ setting :min_bytes, 1
101
+ # option max_bytes [Integer] the maximum number of bytes to read before returning messages
102
+ # from each broker.
103
+ setting :max_bytes, 10_485_760
104
+ # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
105
+ # wait before returning data from a single message fetch. By setting this high you also
106
+ # increase the fetching throughput - and by setting it low you set a bound on latency.
107
+ # This configuration overrides `min_bytes`, so you'll _always_ get data back within the
108
+ # time specified. The default value is one second. If you want to have at most five
109
+ # seconds of latency, set `max_wait_time` to 5. You should make sure
110
+ # max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
111
+ setting :max_wait_time, 1
112
+ # option automatically_mark_as_consumed [Boolean] should we automatically mark received
113
+ # messages as consumed (processed) after non-error consumption
114
+ setting :automatically_mark_as_consumed, true
115
+ # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
116
+ # Kafka cluster that went down (in seconds)
117
+ setting :reconnect_timeout, 5
118
+ # option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
119
+ # a broker for the first time. When ruby-kafka initializes, it needs to connect to at
120
+ # least one host.
121
+ setting :connect_timeout, 10
122
+ # option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
123
+ # writing to a socket connection to a broker. After this timeout expires the connection
124
+ # will be killed. Note that some Kafka operations are by definition long-running, such as
125
+ # waiting for new messages to arrive in a partition, so don't set this value too low
126
+ setting :socket_timeout, 30
127
+
128
+ # SSL authentication related settings
129
+ # option ca_cert [String, nil] SSL CA certificate
130
+ setting :ssl_ca_cert, nil
131
+ # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
132
+ setting :ssl_ca_cert_file_path, nil
133
+ # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
134
+ # certificate store
135
+ setting :ssl_ca_certs_from_system, false
136
+ # option ssl_verify_hostname [Boolean] Verify the hostname for client certs
137
+ setting :ssl_verify_hostname, true
138
+ # option ssl_client_cert [String, nil] SSL client certificate
139
+ setting :ssl_client_cert, nil
140
+ # option ssl_client_cert_key [String, nil] SSL client certificate password
141
+ setting :ssl_client_cert_key, nil
142
+ # option sasl_gssapi_principal [String, nil] sasl principal
143
+ setting :sasl_gssapi_principal, nil
144
+ # option sasl_gssapi_keytab [String, nil] sasl keytab
145
+ setting :sasl_gssapi_keytab, nil
146
+ # option sasl_plain_authzid [String] The authorization identity to use
147
+ setting :sasl_plain_authzid, ''
148
+ # option sasl_plain_username [String, nil] The username used to authenticate
149
+ setting :sasl_plain_username, nil
150
+ # option sasl_plain_password [String, nil] The password used to authenticate
151
+ setting :sasl_plain_password, nil
152
+ # option sasl_scram_username [String, nil] The username used to authenticate
153
+ setting :sasl_scram_username, nil
154
+ # option sasl_scram_password [String, nil] The password used to authenticate
155
+ setting :sasl_scram_password, nil
156
+ # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
157
+ setting :sasl_scram_mechanism, nil
158
+ # option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
159
+ setting :sasl_over_ssl, true
160
+ # option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
161
+ setting :ssl_client_cert_chain, nil
162
+ # option ssl_client_cert_key_password [String, nil] the password required to read
163
+ # the ssl_client_cert_key
164
+ setting :ssl_client_cert_key_password, nil
165
+ # @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
166
+ # implements method token.
167
+ setting :sasl_oauth_token_provider, nil
168
+ end
169
+
170
+ # Namespace for internal settings that should not be modified
171
+ # It's a temporary step to "declassify" several things internally before we move to a
172
+ # non global state
173
+ setting :internal do
174
+ # option routing_builder [Karafka::Routing::Builder] builder instance
175
+ setting :routing_builder, Routing::Builder.new
176
+ # option status [Karafka::Status] app status
177
+ setting :status, Status.new
178
+ # option process [Karafka::Process] process status
179
+ # @note In the future, we need to have a single process representation for all the karafka
180
+ # instances
181
+ setting :process, Process.new
182
+ # option fetcher [Karafka::Fetcher] fetcher instance
183
+ setting :fetcher, Fetcher.new
184
+ # option configurators [Array<Object>] all configurators that we want to run after
185
+ # the setup
186
+ setting :configurators, [Configurators::WaterDrop.new]
187
+ end
188
+
189
+ class << self
190
+ # Configuring method
191
+ # @yield Runs a block of code providing a config singleton instance to it
192
+ # @yieldparam [Karafka::Setup::Config] Karafka config instance
193
+ def setup
194
+ configure { |config| yield(config) }
195
+ end
196
+
197
+ # Everything that should be initialized after the setup
198
+ # Components are in karafka/config directory and are all loaded one by one
199
+ # If you want to configure a next component, please add a proper file to config dir
200
+ def setup_components
201
+ config
202
+ .internal
203
+ .configurators
204
+ .each { |configurator| configurator.call(config) }
205
+ end
206
+
207
+ # Validate config based on the config contract
208
+ # @return [Boolean] true if configuration is valid
209
+ # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
210
+ # doesn't match with the config contract
211
+ def validate!
212
+ validation_result = CONTRACT.call(config.to_h)
213
+
214
+ return true if validation_result.success?
215
+
216
+ raise Errors::InvalidConfigurationError, validation_result.errors.to_h
217
+ end
218
+ end
219
+ end
220
+ end
221
+ end