karafka 1.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (80) hide show
  1. checksums.yaml +7 -0
  2. data/.console_irbrc +13 -0
  3. data/.github/ISSUE_TEMPLATE.md +2 -0
  4. data/.gitignore +68 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/.travis.yml +17 -0
  9. data/CHANGELOG.md +371 -0
  10. data/CODE_OF_CONDUCT.md +46 -0
  11. data/CONTRIBUTING.md +42 -0
  12. data/Gemfile +12 -0
  13. data/Gemfile.lock +111 -0
  14. data/MIT-LICENCE +18 -0
  15. data/README.md +95 -0
  16. data/bin/karafka +19 -0
  17. data/config/errors.yml +6 -0
  18. data/karafka.gemspec +35 -0
  19. data/lib/karafka.rb +68 -0
  20. data/lib/karafka/app.rb +52 -0
  21. data/lib/karafka/attributes_map.rb +67 -0
  22. data/lib/karafka/backends/inline.rb +17 -0
  23. data/lib/karafka/base_controller.rb +60 -0
  24. data/lib/karafka/base_responder.rb +185 -0
  25. data/lib/karafka/cli.rb +54 -0
  26. data/lib/karafka/cli/base.rb +78 -0
  27. data/lib/karafka/cli/console.rb +29 -0
  28. data/lib/karafka/cli/flow.rb +46 -0
  29. data/lib/karafka/cli/info.rb +29 -0
  30. data/lib/karafka/cli/install.rb +43 -0
  31. data/lib/karafka/cli/server.rb +67 -0
  32. data/lib/karafka/connection/config_adapter.rb +112 -0
  33. data/lib/karafka/connection/consumer.rb +121 -0
  34. data/lib/karafka/connection/listener.rb +51 -0
  35. data/lib/karafka/connection/processor.rb +61 -0
  36. data/lib/karafka/controllers/callbacks.rb +54 -0
  37. data/lib/karafka/controllers/includer.rb +51 -0
  38. data/lib/karafka/controllers/responders.rb +19 -0
  39. data/lib/karafka/controllers/single_params.rb +15 -0
  40. data/lib/karafka/errors.rb +43 -0
  41. data/lib/karafka/fetcher.rb +48 -0
  42. data/lib/karafka/helpers/class_matcher.rb +78 -0
  43. data/lib/karafka/helpers/config_retriever.rb +46 -0
  44. data/lib/karafka/helpers/multi_delegator.rb +33 -0
  45. data/lib/karafka/loader.rb +29 -0
  46. data/lib/karafka/logger.rb +53 -0
  47. data/lib/karafka/monitor.rb +98 -0
  48. data/lib/karafka/params/params.rb +128 -0
  49. data/lib/karafka/params/params_batch.rb +41 -0
  50. data/lib/karafka/parsers/json.rb +38 -0
  51. data/lib/karafka/patches/dry_configurable.rb +31 -0
  52. data/lib/karafka/patches/ruby_kafka.rb +34 -0
  53. data/lib/karafka/persistence/consumer.rb +25 -0
  54. data/lib/karafka/persistence/controller.rb +38 -0
  55. data/lib/karafka/process.rb +63 -0
  56. data/lib/karafka/responders/builder.rb +35 -0
  57. data/lib/karafka/responders/topic.rb +57 -0
  58. data/lib/karafka/routing/builder.rb +61 -0
  59. data/lib/karafka/routing/consumer_group.rb +61 -0
  60. data/lib/karafka/routing/consumer_mapper.rb +33 -0
  61. data/lib/karafka/routing/proxy.rb +37 -0
  62. data/lib/karafka/routing/router.rb +29 -0
  63. data/lib/karafka/routing/topic.rb +66 -0
  64. data/lib/karafka/routing/topic_mapper.rb +55 -0
  65. data/lib/karafka/schemas/config.rb +21 -0
  66. data/lib/karafka/schemas/consumer_group.rb +65 -0
  67. data/lib/karafka/schemas/consumer_group_topic.rb +18 -0
  68. data/lib/karafka/schemas/responder_usage.rb +39 -0
  69. data/lib/karafka/schemas/server_cli_options.rb +43 -0
  70. data/lib/karafka/server.rb +62 -0
  71. data/lib/karafka/setup/config.rb +163 -0
  72. data/lib/karafka/setup/configurators/base.rb +35 -0
  73. data/lib/karafka/setup/configurators/water_drop.rb +29 -0
  74. data/lib/karafka/status.rb +25 -0
  75. data/lib/karafka/templates/application_controller.rb.example +7 -0
  76. data/lib/karafka/templates/application_responder.rb.example +11 -0
  77. data/lib/karafka/templates/karafka.rb.example +41 -0
  78. data/lib/karafka/version.rb +7 -0
  79. data/log/.gitkeep +0 -0
  80. metadata +267 -0
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all elements related to requests routing
5
+ module Routing
6
+ # Karafka framework Router for routing incoming messages to proper controllers
7
+ # @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
8
+ # structure so all the routes are being stored in a single level array
9
+ module Router
10
+ # Find a proper topic based on full topic id
11
+ # @param topic_id [String] proper topic id (already mapped, etc) for which we want to find
12
+ # routing topic
13
+ # @return [Karafka::Routing::Route] proper route details
14
+ # @raise [Karafka::Topic::NonMatchingTopicError] raised if topic name does not match
15
+ # any route defined by user using routes.draw
16
+ def find(topic_id)
17
+ App.consumer_groups.each do |consumer_group|
18
+ consumer_group.topics.each do |topic|
19
+ return topic if topic.id == topic_id
20
+ end
21
+ end
22
+
23
+ raise(Errors::NonMatchingRouteError, topic_id)
24
+ end
25
+
26
+ module_function :find
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ # Topic stores all the details on how we should interact with Kafka given topic
6
+ # It belongs to a consumer group as from 0.6 all the topics can work in the same consumer group
7
+ # It is a part of Karafka's DSL
8
+ class Topic
9
+ extend Helpers::ConfigRetriever
10
+
11
+ attr_reader :id, :consumer_group
12
+ attr_accessor :controller
13
+
14
+ # @param [String, Symbol] name of a topic on which we want to listen
15
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
16
+ def initialize(name, consumer_group)
17
+ @name = name.to_s
18
+ @consumer_group = consumer_group
19
+ @attributes = {}
20
+ # @note We use identifier related to the consumer group that owns a topic, because from
21
+ # Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
22
+ # have same topic name across mutliple Kafkas
23
+ @id = "#{consumer_group.id}_#{@name}"
24
+ end
25
+
26
+ # Initializes default values for all the options that support defaults if their values are
27
+ # not yet specified. This is need to be done (cannot be lazy loaded on first use) because
28
+ # everywhere except Karafka server command, those would not be initialized on time - for
29
+ # example for Sidekiq
30
+ def build
31
+ Karafka::AttributesMap.topic.each { |attr| send(attr) }
32
+ controller&.topic = self
33
+ self
34
+ end
35
+
36
+ # @return [Class, nil] Class (not an instance) of a responder that should respond from
37
+ # controller back to Kafka (usefull for piping dataflows)
38
+ def responder
39
+ @responder ||= Karafka::Responders::Builder.new(controller).build
40
+ end
41
+
42
+ # @return [Class] Parser class (not instance) that we want to use to unparse Kafka messages
43
+ # @note If not provided - will use Json as default
44
+ def parser
45
+ @parser ||= Karafka::Parsers::Json
46
+ end
47
+
48
+ Karafka::AttributesMap.topic.each do |attribute|
49
+ config_retriever_for(attribute)
50
+ end
51
+
52
+ # @return [Hash] hash with all the topic attributes
53
+ # @note This is being used when we validate the consumer_group and its topics
54
+ def to_h
55
+ map = Karafka::AttributesMap.topic.map do |attribute|
56
+ [attribute, public_send(attribute)]
57
+ end
58
+
59
+ Hash[map].merge!(
60
+ id: id,
61
+ controller: controller
62
+ )
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ # Default topic mapper that does not remap things
6
+ # Mapper can be used for Kafka providers that require namespaced topic names. Instead of being
7
+ # provider dependent, we can then define mapper and use internally "pure" topic names in
8
+ # routes and responders
9
+ #
10
+ # @example Mapper for mapping prefixed topics
11
+ # module MyMapper
12
+ # PREFIX = "my_user_name."
13
+ #
14
+ # def incoming(topic)
15
+ # topic.to_s.gsub(PREFIX, '')
16
+ # end
17
+ #
18
+ # def outgoing(topic)
19
+ # "#{PREFIX}#{topic}"
20
+ # end
21
+ # end
22
+ #
23
+ # @example Mapper for replacing "." with "_" in topic names
24
+ # module MyMapper
25
+ # PREFIX = "my_user_name."
26
+ #
27
+ # def incoming(topic)
28
+ # topic.to_s.gsub('.', '_')
29
+ # end
30
+ #
31
+ # def outgoing(topic)
32
+ # topic.to_s.gsub('_', '.')
33
+ # end
34
+ # end
35
+ module TopicMapper
36
+ class << self
37
+ # @param topic [String, Symbol] topic
38
+ # @return [String, Symbol] same topic as on input
39
+ # @example
40
+ # incoming('topic_name') #=> 'topic_name'
41
+ def incoming(topic)
42
+ topic
43
+ end
44
+
45
+ # @param topic [String, Symbol] topic
46
+ # @return [String, Symbol] same topic as on input
47
+ # @example
48
+ # outgoing('topic_name') #=> 'topic_name'
49
+ def outgoing(topic)
50
+ topic
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the validation schemas that we use to check input
5
+ module Schemas
6
+ # Regexp for validating format of groups and topics
7
+ TOPIC_REGEXP = /\A(\w|\-|\.)+\z/
8
+
9
+ # Schema with validation rules for Karafka configuration details
10
+ # @note There are many more configuration options inside of the
11
+ # Karafka::Setup::Config model, but we don't validate them here as they are
12
+ # validated per each route (topic + consumer_group) because they can be overwritten,
13
+ # so we validate all of that once all the routes are defined and ready
14
+ Config = Dry::Validation.Schema do
15
+ required(:client_id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
16
+ required(:consumer_mapper)
17
+ required(:topic_mapper)
18
+ optional(:backend).filled
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,65 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Schemas
5
+ # Schema for single full route (consumer group + topics) validation.
6
+ ConsumerGroup = Dry::Validation.Schema do
7
+ # Valid uri schemas of Kafka broker url
8
+ # The ||= is due to the behavior of require_all that resolves dependencies
9
+ # but someetimes loads things twice
10
+ URI_SCHEMES ||= %w[kafka kafka+ssl].freeze
11
+
12
+ configure do
13
+ config.messages_file = File.join(
14
+ Karafka.gem_root, 'config', 'errors.yml'
15
+ )
16
+
17
+ # Uri validator to check if uri is in a Karafka acceptable format
18
+ # @param uri [String] uri we want to validate
19
+ # @return [Boolean] true if it is a valid uri, otherwise false
20
+ def broker_schema?(uri)
21
+ uri = URI.parse(uri)
22
+ URI_SCHEMES.include?(uri.scheme) && uri.port
23
+ rescue URI::InvalidURIError
24
+ return false
25
+ end
26
+ end
27
+
28
+ required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
29
+ required(:seed_brokers).filled { each(:broker_schema?) }
30
+ required(:session_timeout).filled { int? | float? }
31
+ required(:pause_timeout).filled { (int? | float?) & gteq?(0) }
32
+ required(:offset_commit_interval) { int? | float? }
33
+ required(:offset_commit_threshold).filled(:int?)
34
+ required(:offset_retention_time) { none?.not > int? }
35
+ required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
36
+ required(:connect_timeout).filled { (int? | float?) & gt?(0) }
37
+ required(:socket_timeout).filled { (int? | float?) & gt?(0) }
38
+ required(:min_bytes).filled(:int?, gt?: 0)
39
+ required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
40
+ required(:batch_fetching).filled(:bool?)
41
+ required(:topics).filled { each { schema(ConsumerGroupTopic) } }
42
+
43
+ # Max wait time cannot exceed socket_timeout - wouldn't make sense
44
+ rule(
45
+ max_wait_time_limit: %i[max_wait_time socket_timeout]
46
+ ) do |max_wait_time, socket_timeout|
47
+ socket_timeout.int? > max_wait_time.lteq?(value(:socket_timeout))
48
+ end
49
+
50
+ %i[
51
+ ssl_ca_cert
52
+ ssl_ca_cert_file_path
53
+ ssl_client_cert
54
+ ssl_client_cert_key
55
+ sasl_plain_authzid
56
+ sasl_plain_username
57
+ sasl_plain_password
58
+ sasl_gssapi_principal
59
+ sasl_gssapi_keytab
60
+ ].each do |encryption_attribute|
61
+ optional(encryption_attribute).maybe(:str?)
62
+ end
63
+ end
64
+ end
65
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Schemas
5
+ # Consumer group topic validation rules
6
+ ConsumerGroupTopic = Dry::Validation.Schema do
7
+ required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
+ required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
9
+ required(:backend).filled(included_in?: %i[inline sidekiq])
10
+ required(:controller).filled
11
+ required(:parser).filled
12
+ required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
13
+ required(:start_from_beginning).filled(:bool?)
14
+ required(:batch_consuming).filled(:bool?)
15
+ required(:persistent).filled(:bool?)
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Schemas
5
+ # Validator to check responder topic usage
6
+ ResponderUsageTopic = Dry::Validation.Schema do
7
+ required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
+ required(:required).filled(:bool?)
9
+ required(:multiple_usage).filled(:bool?)
10
+ required(:usage_count).filled(:int?, gteq?: 0)
11
+ required(:registered).filled(eql?: true)
12
+ required(:async).filled(:bool?)
13
+
14
+ rule(
15
+ required_usage: %i[required usage_count]
16
+ ) do |required, usage_count|
17
+ required.true? > usage_count.gteq?(1)
18
+ end
19
+
20
+ rule(
21
+ multiple_usage_permission: %i[multiple_usage usage_count]
22
+ ) do |multiple_usage, usage_count|
23
+ usage_count.gt?(1) > multiple_usage.true?
24
+ end
25
+
26
+ rule(
27
+ multiple_usage_block: %i[multiple_usage usage_count]
28
+ ) do |multiple_usage, usage_count|
29
+ multiple_usage.false? > usage_count.lteq?(1)
30
+ end
31
+ end
32
+
33
+ # Validator to check that everything in a responder flow matches responder rules
34
+ ResponderUsage = Dry::Validation.Schema do
35
+ required(:used_topics) { filled? > each { schema(ResponderUsageTopic) } }
36
+ required(:registered_topics) { filled? > each { schema(ResponderUsageTopic) } }
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,43 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Schemas
5
+ # Schema for validating correctness of the server cli command options
6
+ # We validate some basics + the list of consumer_groups on which we want to use, to make
7
+ # sure that all of them are defined, plus that a pidfile does not exist
8
+ ServerCliOptions = Dry::Validation.Schema do
9
+ configure do
10
+ option :consumer_groups
11
+
12
+ def self.messages
13
+ super.merge(
14
+ en: {
15
+ errors: {
16
+ consumer_groups_inclusion: 'Unknown consumer group.',
17
+ pid_existence: 'Pidfile already exists.'
18
+ }
19
+ }
20
+ )
21
+ end
22
+ end
23
+
24
+ optional(:pid).filled(:str?)
25
+ optional(:daemon).filled(:bool?)
26
+ optional(:consumer_groups).filled(:array?)
27
+
28
+ validate(consumer_groups_inclusion: :consumer_groups) do |consumer_groups|
29
+ # If there were no consumer_groups declared in the server cli, it means that we will
30
+ # run all of them and no need to validate them here at all
31
+ if consumer_groups.nil?
32
+ true
33
+ else
34
+ (consumer_groups - Karafka::Routing::Builder.instance.map(&:name)).empty?
35
+ end
36
+ end
37
+
38
+ validate(pid_existence: :pid) do |pid|
39
+ pid ? !File.exist?(pid) : true
40
+ end
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Karafka consuming server class
5
+ class Server
6
+ class << self
7
+ # Set of consuming threads. Each consumer thread contains a single consumer
8
+ attr_accessor :consumer_threads
9
+
10
+ # Writer for list of consumer groups that we want to consume in our current process context
11
+ attr_writer :consumer_groups
12
+
13
+ # Method which runs app
14
+ def run
15
+ @consumer_threads = Concurrent::Array.new
16
+ bind_on_sigint
17
+ bind_on_sigquit
18
+ bind_on_sigterm
19
+ start_supervised
20
+ end
21
+
22
+ # @return [Array<String>] array with names of consumer groups that should be consumed in a
23
+ # current server context
24
+ def consumer_groups
25
+ # If not specified, a server will listed on all the topics
26
+ @consumer_groups ||= Karafka::App.consumer_groups.map(&:name).freeze
27
+ end
28
+
29
+ private
30
+
31
+ # @return [Karafka::Process] process wrapper instance used to catch system signal calls
32
+ def process
33
+ Karafka::Process.instance
34
+ end
35
+
36
+ # What should happen when we decide to quit with sigint
37
+ def bind_on_sigint
38
+ process.on_sigint { Karafka::App.stop! }
39
+ end
40
+
41
+ # What should happen when we decide to quit with sigquit
42
+ def bind_on_sigquit
43
+ process.on_sigquit { Karafka::App.stop! }
44
+ end
45
+
46
+ # What should happen when we decide to quit with sigterm
47
+ def bind_on_sigterm
48
+ process.on_sigterm { Karafka::App.stop! }
49
+ end
50
+
51
+ # Starts Karafka with a supervision
52
+ # @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
53
+ # finish loop (and it won't happen until we explicitily want to stop)
54
+ def start_supervised
55
+ process.supervise do
56
+ Karafka::App.run!
57
+ Karafka::Fetcher.new.fetch_loop
58
+ end
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,163 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module containing all Karafka setup related elements like configuration settings,
5
+ # config validations and configurators for external gems integration
6
+ module Setup
7
+ # Configurator for setting up all the framework details that are required to make it work
8
+ # @note If you want to do some configurations after all of this is done, please add to
9
+ # karafka/config a proper file (needs to inherit from Karafka::Setup::Configurators::Base
10
+ # and implement setup method) after that everything will happen automatically
11
+ # @note This config object allows to create a 1 level nestings (nodes) only. This should be
12
+ # enough and will still keep the code simple
13
+ # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
+ class Config
15
+ extend Dry::Configurable
16
+
17
+ # Available settings
18
+ # option client_id [String] kafka client_id - used to provide
19
+ # default Kafka groups namespaces and identify that app in kafka
20
+ setting :client_id
21
+ # What backend do we want to use to process messages
22
+ setting :backend, :inline
23
+ # option logger [Instance] logger that we want to use
24
+ setting :logger, -> { ::Karafka::Logger.instance }
25
+ # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
26
+ setting :monitor, -> { ::Karafka::Monitor.instance }
27
+ # Mapper used to remap consumer groups ids, so in case users migrate from other tools
28
+ # or they need to maintain their own internal consumer group naming conventions, they
29
+ # can easily do it, replacing the default client_id + consumer name pattern concept
30
+ setting :consumer_mapper, -> { Routing::ConsumerMapper }
31
+ # Mapper used to remap names of topics, so we can have a clean internal topic namings
32
+ # despite using any Kafka provider that uses namespacing, etc
33
+ # It needs to implement two methods:
34
+ # - #incoming - for remapping from the incoming message to our internal format
35
+ # - #outgoing - for remapping from internal topic name into outgoing message
36
+ setting :topic_mapper, -> { Routing::TopicMapper }
37
+ # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
38
+ # @note Fetching does not equal consuming, see batch_consuming description for details
39
+ setting :batch_fetching, true
40
+ # If batch_consuming is true, we will have access to #params_batch instead of #params.
41
+ # #params_batch will contain params received from Kafka (may be more than 1) so we can
42
+ # process them in batches
43
+ setting :batch_consuming, false
44
+ # Should we operate in a single controller instance across multiple batches of messages,
45
+ # from the same partition or should we build a new instance for each incoming batch.
46
+ # Disabling that can be useful when you want to build a new controller instance for each
47
+ # incoming batch. It's disabled by default, not to create more objects that needed on
48
+ # each batch
49
+ setting :persistent, true
50
+
51
+ # option kafka [Hash] - optional - kafka configuration options
52
+ setting :kafka do
53
+ # Array with at least one host
54
+ setting :seed_brokers
55
+ # option session_timeout [Integer] the number of seconds after which, if a client
56
+ # hasn't contacted the Kafka cluster, it will be kicked out of the group.
57
+ setting :session_timeout, 30
58
+ # Time that a given partition will be paused from fetching messages, when message
59
+ # consumption fails. It allows us to process other partitions, while the error is being
60
+ # resolved and also "slows" things down, so it prevents from "eating" up all messages and
61
+ # consuming them with failed code
62
+ setting :pause_timeout, 10
63
+ # option offset_commit_interval [Integer] the interval between offset commits,
64
+ # in seconds.
65
+ setting :offset_commit_interval, 10
66
+ # option offset_commit_threshold [Integer] the number of messages that can be
67
+ # processed before their offsets are committed. If zero, offset commits are
68
+ # not triggered by message consumption.
69
+ setting :offset_commit_threshold, 0
70
+ # option heartbeat_interval [Integer] the interval between heartbeats; must be less
71
+ # than the session window.
72
+ setting :heartbeat_interval, 10
73
+ # option max_bytes_per_partition [Integer] the maximum amount of data fetched
74
+ # from a single partition at a time.
75
+ setting :max_bytes_per_partition, 1_048_576
76
+ # whether to consume messages starting at the beginning or to just consume new messages
77
+ setting :start_from_beginning, true
78
+ # option min_bytes [Integer] the minimum number of bytes to read before
79
+ # returning messages from the server; if `max_wait_time` is reached, this
80
+ # is ignored.
81
+ setting :min_bytes, 1
82
+ # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
83
+ # wait before returning data from a single message fetch. By setting this high you also
84
+ # increase the fetching throughput - and by setting it low you set a bound on latency.
85
+ # This configuration overrides `min_bytes`, so you'll _always_ get data back within the
86
+ # time specified. The default value is one second. If you want to have at most five
87
+ # seconds of latency, set `max_wait_time` to 5. You should make sure
88
+ # max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
89
+ setting :max_wait_time, 1
90
+ # option automatically_mark_as_consumed [Boolean] should we automatically mark received
91
+ # messages as consumed (processed) after non-error consumption
92
+ setting :automatically_mark_as_consumed, true
93
+ # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
94
+ # Kafka cluster that went down (in seconds)
95
+ setting :reconnect_timeout, 5
96
+ # option offset_retention_time [Integer] The length of the retention window, known as
97
+ # offset retention time
98
+ setting :offset_retention_time, nil
99
+ # option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
100
+ # a broker for the first time. When ruby-kafka initializes, it needs to connect to at
101
+ # least one host.
102
+ setting :connect_timeout, 10
103
+ # option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
104
+ # writing to a socket connection to a broker. After this timeout expires the connection
105
+ # will be killed. Note that some Kafka operations are by definition long-running, such as
106
+ # waiting for new messages to arrive in a partition, so don't set this value too low
107
+ setting :socket_timeout, 30
108
+
109
+ # SSL authentication related settings
110
+ # option ca_cert [String] SSL CA certificate
111
+ setting :ssl_ca_cert, nil
112
+ # option ssl_ca_cert_file_path [String] SSL CA certificate file path
113
+ setting :ssl_ca_cert_file_path, nil
114
+ # option ssl_client_cert [String] SSL client certificate
115
+ setting :ssl_client_cert, nil
116
+ # option ssl_client_cert_key [String] SSL client certificate password
117
+ setting :ssl_client_cert_key, nil
118
+ # option sasl_gssapi_principal [String] sasl principal
119
+ setting :sasl_gssapi_principal, nil
120
+ # option sasl_gssapi_keytab [String] sasl keytab
121
+ setting :sasl_gssapi_keytab, nil
122
+ # option sasl_plain_authzid [String] The authorization identity to use
123
+ setting :sasl_plain_authzid, ''
124
+ # option sasl_plain_username [String] The username used to authenticate
125
+ setting :sasl_plain_username, nil
126
+ # option sasl_plain_password [String] The password used to authenticate
127
+ setting :sasl_plain_password, nil
128
+ end
129
+
130
+ class << self
131
+ # Configurating method
132
+ # @yield Runs a block of code providing a config singleton instance to it
133
+ # @yieldparam [Karafka::Setup::Config] Karafka config instance
134
+ def setup
135
+ configure do |config|
136
+ yield(config)
137
+ end
138
+ end
139
+
140
+ # Everything that should be initialized after the setup
141
+ # Components are in karafka/config directory and are all loaded one by one
142
+ # If you want to configure a next component, please add a proper file to config dir
143
+ def setup_components
144
+ Configurators::Base.descendants.each do |klass|
145
+ klass.new(config).setup
146
+ end
147
+ end
148
+
149
+ # Validate config based on ConfigurationSchema
150
+ # @return [Boolean] true if configuration is valid
151
+ # @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
152
+ # doesn't match with ConfigurationSchema
153
+ def validate!
154
+ validation_result = Karafka::Schemas::Config.call(config.to_h)
155
+
156
+ return true if validation_result.success?
157
+
158
+ raise Errors::InvalidConfiguration, validation_result.errors
159
+ end
160
+ end
161
+ end
162
+ end
163
+ end