karafka 1.2.13 → 1.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +2 -0
  4. data/{.coditsu.yml → .coditsu/ci.yml} +1 -1
  5. data/.console_irbrc +1 -3
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/.travis.yml +8 -21
  12. data/CHANGELOG.md +91 -18
  13. data/CONTRIBUTING.md +1 -1
  14. data/Gemfile +2 -5
  15. data/Gemfile.lock +79 -65
  16. data/README.md +10 -11
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +25 -0
  19. data/config/errors.yml +38 -5
  20. data/karafka.gemspec +14 -12
  21. data/lib/karafka.rb +8 -15
  22. data/lib/karafka/app.rb +14 -6
  23. data/lib/karafka/attributes_map.rb +5 -12
  24. data/lib/karafka/base_consumer.rb +19 -30
  25. data/lib/karafka/base_responder.rb +45 -27
  26. data/lib/karafka/cli.rb +1 -1
  27. data/lib/karafka/cli/console.rb +11 -9
  28. data/lib/karafka/cli/flow.rb +5 -2
  29. data/lib/karafka/cli/info.rb +3 -1
  30. data/lib/karafka/cli/install.rb +30 -6
  31. data/lib/karafka/cli/server.rb +11 -6
  32. data/lib/karafka/code_reloader.rb +67 -0
  33. data/lib/karafka/connection/api_adapter.rb +11 -4
  34. data/lib/karafka/connection/batch_delegator.rb +51 -0
  35. data/lib/karafka/connection/builder.rb +1 -1
  36. data/lib/karafka/connection/client.rb +30 -20
  37. data/lib/karafka/connection/listener.rb +24 -13
  38. data/lib/karafka/connection/message_delegator.rb +36 -0
  39. data/lib/karafka/consumers/callbacks.rb +32 -15
  40. data/lib/karafka/consumers/includer.rb +30 -18
  41. data/lib/karafka/consumers/metadata.rb +10 -0
  42. data/lib/karafka/consumers/responders.rb +2 -2
  43. data/lib/karafka/contracts.rb +10 -0
  44. data/lib/karafka/contracts/config.rb +21 -0
  45. data/lib/karafka/contracts/consumer_group.rb +206 -0
  46. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  47. data/lib/karafka/contracts/responder_usage.rb +54 -0
  48. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  49. data/lib/karafka/errors.rb +17 -16
  50. data/lib/karafka/fetcher.rb +28 -30
  51. data/lib/karafka/helpers/class_matcher.rb +6 -2
  52. data/lib/karafka/helpers/config_retriever.rb +1 -1
  53. data/lib/karafka/helpers/inflector.rb +26 -0
  54. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  55. data/lib/karafka/instrumentation/logger.rb +9 -6
  56. data/lib/karafka/instrumentation/monitor.rb +15 -9
  57. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  58. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  59. data/lib/karafka/params/builders/metadata.rb +33 -0
  60. data/lib/karafka/params/builders/params.rb +36 -0
  61. data/lib/karafka/params/builders/params_batch.rb +25 -0
  62. data/lib/karafka/params/metadata.rb +35 -0
  63. data/lib/karafka/params/params.rb +68 -0
  64. data/lib/karafka/params/params_batch.rb +35 -20
  65. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  66. data/lib/karafka/persistence/client.rb +15 -11
  67. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  68. data/lib/karafka/persistence/topics.rb +48 -0
  69. data/lib/karafka/process.rb +0 -2
  70. data/lib/karafka/responders/topic.rb +6 -8
  71. data/lib/karafka/routing/builder.rb +36 -8
  72. data/lib/karafka/routing/consumer_group.rb +1 -1
  73. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  74. data/lib/karafka/routing/proxy.rb +10 -1
  75. data/lib/karafka/routing/topic.rb +5 -3
  76. data/lib/karafka/routing/topic_mapper.rb +16 -18
  77. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  78. data/lib/karafka/serialization/json/serializer.rb +31 -0
  79. data/lib/karafka/server.rb +25 -27
  80. data/lib/karafka/setup/config.rb +63 -37
  81. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  82. data/lib/karafka/setup/dsl.rb +0 -1
  83. data/lib/karafka/status.rb +7 -3
  84. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  85. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  86. data/lib/karafka/templates/karafka.rb.erb +92 -0
  87. data/lib/karafka/version.rb +1 -1
  88. metadata +90 -57
  89. metadata.gz.sig +0 -0
  90. data/lib/karafka/callbacks.rb +0 -30
  91. data/lib/karafka/callbacks/config.rb +0 -22
  92. data/lib/karafka/callbacks/dsl.rb +0 -16
  93. data/lib/karafka/connection/delegator.rb +0 -46
  94. data/lib/karafka/instrumentation/listener.rb +0 -112
  95. data/lib/karafka/loader.rb +0 -28
  96. data/lib/karafka/params/dsl.rb +0 -158
  97. data/lib/karafka/parsers/json.rb +0 -38
  98. data/lib/karafka/patches/dry_configurable.rb +0 -33
  99. data/lib/karafka/persistence/topic.rb +0 -29
  100. data/lib/karafka/schemas/config.rb +0 -24
  101. data/lib/karafka/schemas/consumer_group.rb +0 -79
  102. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  103. data/lib/karafka/schemas/responder_usage.rb +0 -39
  104. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  105. data/lib/karafka/setup/configurators/base.rb +0 -29
  106. data/lib/karafka/setup/configurators/params.rb +0 -25
  107. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -16,9 +16,10 @@ module Karafka
16
16
 
17
17
  # Runs prefetch callbacks and executes the main listener fetch loop
18
18
  def call
19
- Karafka::Callbacks.before_fetch_loop(
20
- @consumer_group,
21
- client
19
+ Karafka.monitor.instrument(
20
+ 'connection.listener.before_fetch_loop',
21
+ consumer_group: @consumer_group,
22
+ client: client
22
23
  )
23
24
  fetch_loop
24
25
  end
@@ -26,27 +27,37 @@ module Karafka
26
27
  private
27
28
 
28
29
  # Opens connection, gets messages and calls a block for each of the incoming messages
29
- # @yieldparam [String] consumer group id
30
- # @yieldparam [Array<Kafka::FetchedMessage>] kafka fetched messages
31
- # @note This will yield with a raw message - no preprocessing or reformatting
32
30
  # @note We catch all the errors here, so they don't affect other listeners (or this one)
33
31
  # so we will be able to listen and consume other incoming messages.
34
32
  # Since it is run inside Karafka::Connection::ActorCluster - catching all the exceptions
35
- # won't crash the whole cluster. Here we mostly focus on catchin the exceptions related to
33
+ # won't crash the whole cluster. Here we mostly focus on catching the exceptions related to
36
34
  # Kafka connections / Internet connection issues / Etc. Business logic problems should not
37
35
  # propagate this far
38
36
  def fetch_loop
39
- client.fetch_loop do |raw_messages|
40
- # @note What happens here is a delegation of processing to a proper processor based
41
- # on the incoming messages characteristics
42
- Karafka::Connection::Delegator.call(@consumer_group.id, raw_messages)
37
+ # @note What happens here is a delegation of processing to a proper processor based
38
+ # on the incoming messages characteristics
39
+ client.fetch_loop do |raw_data, type|
40
+ Karafka.monitor.instrument('connection.listener.fetch_loop')
41
+
42
+ case type
43
+ when :message
44
+ MessageDelegator.call(@consumer_group.id, raw_data)
45
+ when :batch
46
+ BatchDelegator.call(@consumer_group.id, raw_data)
47
+ end
43
48
  end
44
49
  # This is on purpose - see the notes for this method
45
- # rubocop:disable RescueException
50
+ # rubocop:disable Lint/RescueException
46
51
  rescue Exception => e
47
52
  Karafka.monitor.instrument('connection.listener.fetch_loop.error', caller: self, error: e)
48
- # rubocop:enable RescueException
53
+ # rubocop:enable Lint/RescueException
54
+ # We can stop client without a problem, as it will reinitialize itself when running the
55
+ # `fetch_loop` again
49
56
  @client.stop
57
+ # We need to clear the consumers cache for current connection when fatal error happens and
58
+ # we reset the connection. Otherwise for consumers with manual offset management, the
59
+ # persistence might have stored some data that would be reprocessed
60
+ Karafka::Persistence::Consumers.clear
50
61
  sleep(@consumer_group.reconnect_timeout) && retry
51
62
  end
52
63
 
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Connection
5
+ # Class that delegates processing of a single received message for which we listen to
6
+ # a proper processor
7
+ module MessageDelegator
8
+ class << self
9
+ # Delegates message (does something with it)
10
+ # It will either schedule or run a proper processor action for the incoming message
11
+ # @param group_id [String] group_id of a group from which a given message came
12
+ # @param kafka_message [<Kafka::FetchedMessage>] raw message from kafka
13
+ # @note This should be looped to obtain a constant delegating of new messages
14
+ def call(group_id, kafka_message)
15
+ topic = Persistence::Topics.fetch(group_id, kafka_message.topic)
16
+ consumer = Persistence::Consumers.fetch(topic, kafka_message.partition)
17
+
18
+ Karafka.monitor.instrument(
19
+ 'connection.message_delegator.call',
20
+ caller: self,
21
+ consumer: consumer,
22
+ kafka_message: kafka_message
23
+ ) do
24
+ # @note We always get a single message within single delegator, which means that
25
+ # we don't care if user marked it as a batch consumed or not.
26
+ consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
27
+ [kafka_message],
28
+ topic
29
+ )
30
+ consumer.call
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -16,28 +16,40 @@ module Karafka
16
16
  before_stop
17
17
  ].freeze
18
18
 
19
+ private_constant :TYPES
20
+
19
21
  # Class methods needed to make callbacks run
20
22
  module ClassMethods
21
23
  TYPES.each do |type|
22
- # A Creates a callback wrapper
24
+ # Creates a callback wrapper
25
+ #
23
26
  # @param method_name [Symbol, String] method name or nil if we plan to provide a block
24
27
  # @yield A block with a code that should be executed before scheduling
25
- define_method type do |method_name = nil, &block|
26
- set_callback type, :before, method_name || block
28
+ # @note We don't have to optimize the key fetching here as those are class methods that
29
+ # are evaluated once upon start
30
+ define_method(type) do |method_name = nil, &block|
31
+ key = "consumers.#{Helpers::Inflector.map(to_s)}.#{type}"
32
+ Karafka::App.monitor.register_event(key)
33
+
34
+ Karafka::App.monitor.subscribe(key) do |event|
35
+ context = event[:context]
36
+
37
+ if method_name
38
+ context.send(method_name)
39
+ else
40
+ context.instance_eval(&block)
41
+ end
42
+ end
27
43
  end
28
44
  end
29
45
  end
30
46
 
31
- # @param consumer_class [Class] consumer class that we extend with callbacks
32
- def self.included(consumer_class)
33
- consumer_class.class_eval do
34
- extend ClassMethods
35
- include ActiveSupport::Callbacks
36
-
37
- # The call method is wrapped with a set of callbacks
38
- # We won't run process if any of the callbacks throw abort
39
- # @see http://api.rubyonrails.org/classes/ActiveSupport/Callbacks/ClassMethods.html#method-i-get_callbacks
40
- TYPES.each { |type| define_callbacks type }
47
+ class << self
48
+ # @param consumer_class [Class] consumer class that we extend with callbacks
49
+ def included(consumer_class)
50
+ consumer_class.class_eval do
51
+ extend ClassMethods
52
+ end
41
53
  end
42
54
  end
43
55
 
@@ -45,9 +57,14 @@ module Karafka
45
57
  # method of a proper backend. It is here because it interacts with the default Karafka
46
58
  # call flow and needs to be overwritten to support callbacks
47
59
  def call
48
- run_callbacks :after_fetch do
49
- process
60
+ if self.class.respond_to?(:after_fetch)
61
+ Karafka::App.monitor.instrument(
62
+ "consumers.#{Helpers::Inflector.map(self.class.to_s)}.after_fetch",
63
+ context: self
64
+ )
50
65
  end
66
+
67
+ process
51
68
  end
52
69
  end
53
70
  end
@@ -3,47 +3,59 @@
3
3
  module Karafka
4
4
  # Additional functionalities for consumers
5
5
  module Consumers
6
- # Module used to inject functionalities into a given consumer class, based on the consumer
6
+ # Module used to inject functionalities into a given consumer instance, based on the consumer
7
7
  # topic and its settings
8
8
  # We don't need all the behaviors in all the cases, so it is not worth having everything
9
9
  # in all the cases all the time
10
10
  module Includer
11
11
  class << self
12
- # @param consumer_class [Class] consumer class, that will get some functionalities
13
- # based on the topic under which it operates
14
- def call(consumer_class)
15
- topic = consumer_class.topic
16
-
17
- bind_backend(consumer_class, topic)
18
- bind_params(consumer_class, topic)
19
- bind_responders(consumer_class, topic)
12
+ # @param consumer [Karafka::BaseConsumer] consumer instance, that will get some
13
+ # functionalities based on the topic under which it operates
14
+ def call(consumer)
15
+ topic = consumer.topic
16
+
17
+ bind_backend(consumer, topic)
18
+ bind_params(consumer, topic)
19
+ bind_metadata(consumer, topic)
20
+ bind_responders(consumer, topic)
20
21
  end
21
22
 
22
23
  private
23
24
 
24
25
  # Figures out backend for a given consumer class, based on the topic backend and
25
26
  # includes it into the consumer class
26
- # @param consumer_class [Class] consumer class
27
+ # @param consumer [Karafka::BaseConsumer] consumer instance
27
28
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
28
- def bind_backend(consumer_class, topic)
29
+ def bind_backend(consumer, topic)
29
30
  backend = Kernel.const_get("::Karafka::Backends::#{topic.backend.to_s.capitalize}")
30
- consumer_class.include backend
31
+ consumer.extend(backend)
31
32
  end
32
33
 
33
34
  # Adds a single #params support for non batch processed topics
34
- # @param consumer_class [Class] consumer class
35
+ # @param consumer [Karafka::BaseConsumer] consumer instance
35
36
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
36
- def bind_params(consumer_class, topic)
37
+ def bind_params(consumer, topic)
37
38
  return if topic.batch_consuming
38
- consumer_class.include SingleParams
39
+
40
+ consumer.extend(SingleParams)
41
+ end
42
+
43
+ # Adds an option to work with metadata for consumer instances that have batch fetching
44
+ # @param consumer [Karafka::BaseConsumer] consumer instance
45
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
46
+ def bind_metadata(consumer, topic)
47
+ return unless topic.batch_fetching
48
+
49
+ consumer.extend(Metadata)
39
50
  end
40
51
 
41
52
  # Adds responders support for topics and consumers with responders defined for them
42
- # @param consumer_class [Class] consumer class
53
+ # @param consumer [Karafka::BaseConsumer] consumer instance
43
54
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
44
- def bind_responders(consumer_class, topic)
55
+ def bind_responders(consumer, topic)
45
56
  return unless topic.responder
46
- consumer_class.include Responders
57
+
58
+ consumer.extend(Responders)
47
59
  end
48
60
  end
49
61
  end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Consumers
5
+ # Brings the metadata into consumers that support batch_fetching
6
+ module Metadata
7
+ attr_accessor :metadata
8
+ end
9
+ end
10
+ end
@@ -15,8 +15,8 @@ module Karafka
15
15
  data: data
16
16
  ) do
17
17
  # @note we build a new instance of responder each time, as a long-running (persisted)
18
- # consumers can respond multiple times during the lifecycle
19
- topic.responder.new(topic.parser).call(*data)
18
+ # consumers can respond multiple times during the life-cycle
19
+ topic.responder.new.call(*data)
20
20
  end
21
21
  end
22
22
  end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the validation contracts that we use to check input
5
+ module Contracts
6
+ # Regexp for validating format of groups and topics
7
+ # @note It is not nested inside of the contracts, as it is used by couple of them
8
+ TOPIC_REGEXP = /\A(\w|\-|\.)+\z/.freeze
9
+ end
10
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract with validation rules for Karafka configuration details
6
+ # @note There are many more configuration options inside of the
7
+ # Karafka::Setup::Config model, but we don't validate them here as they are
8
+ # validated per each route (topic + consumer_group) because they can be overwritten,
9
+ # so we validate all of that once all the routes are defined and ready
10
+ class Config < Dry::Validation::Contract
11
+ params do
12
+ required(:client_id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
13
+ required(:shutdown_timeout) { (int? & gt?(0)) }
14
+ required(:consumer_mapper)
15
+ required(:topic_mapper)
16
+
17
+ optional(:backend).filled
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,206 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract for single full route (consumer group + topics) validation.
6
+ class ConsumerGroup < Dry::Validation::Contract
7
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
+
9
+ # Valid uri schemas of Kafka broker url
10
+ # The ||= is due to the behavior of require_all that resolves dependencies
11
+ # but sometimes loads things twice
12
+ URI_SCHEMES ||= %w[kafka kafka+ssl plaintext ssl].freeze
13
+
14
+ # Available sasl scram mechanism of authentication (plus nil)
15
+ SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
16
+
17
+ # Internal contract for sub-validating topics schema
18
+ TOPIC_CONTRACT = ConsumerGroupTopic.new.freeze
19
+
20
+ private_constant :TOPIC_CONTRACT
21
+
22
+ params do
23
+ required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
24
+ required(:topics).value(:array, :filled?)
25
+ required(:seed_brokers).value(:array, :filled?)
26
+ required(:session_timeout).filled { int? | float? }
27
+ required(:pause_timeout).maybe(%i[integer float]) { filled? > gteq?(0) }
28
+ required(:pause_max_timeout).maybe(%i[integer float]) { filled? > gteq?(0) }
29
+ required(:pause_exponential_backoff).filled(:bool?)
30
+ required(:offset_commit_interval) { int? | float? }
31
+ required(:offset_commit_threshold).filled(:int?)
32
+ required(:offset_retention_time).maybe(:integer)
33
+ required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
34
+ required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
35
+ required(:connect_timeout).filled { (int? | float?) & gt?(0) }
36
+ required(:reconnect_timeout).filled { (int? | float?) & gteq?(0) }
37
+ required(:socket_timeout).filled { (int? | float?) & gt?(0) }
38
+ required(:min_bytes).filled(:int?, gt?: 0)
39
+ required(:max_bytes).filled(:int?, gt?: 0)
40
+ required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
41
+ required(:batch_fetching).filled(:bool?)
42
+
43
+ %i[
44
+ ssl_ca_cert
45
+ ssl_ca_cert_file_path
46
+ ssl_client_cert
47
+ ssl_client_cert_key
48
+ ssl_client_cert_chain
49
+ ssl_client_cert_key_password
50
+ sasl_gssapi_principal
51
+ sasl_gssapi_keytab
52
+ sasl_plain_authzid
53
+ sasl_plain_username
54
+ sasl_plain_password
55
+ sasl_scram_username
56
+ sasl_scram_password
57
+ ].each do |encryption_attribute|
58
+ optional(encryption_attribute).maybe(:str?)
59
+ end
60
+
61
+ optional(:ssl_verify_hostname).maybe(:bool?)
62
+ optional(:ssl_ca_certs_from_system).maybe(:bool?)
63
+ optional(:sasl_over_ssl).maybe(:bool?)
64
+ optional(:sasl_oauth_token_provider).value(:any)
65
+
66
+ # It's not with other encryptions as it has some more rules
67
+ optional(:sasl_scram_mechanism)
68
+ .maybe(:str?, included_in?: SASL_SCRAM_MECHANISMS)
69
+ end
70
+
71
+ # Uri rule to check if uri is in a Karafka acceptable format
72
+ rule(:seed_brokers) do
73
+ if value&.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
+ key.failure(:invalid_broker_schema)
75
+ end
76
+ end
77
+
78
+ rule(:topics) do
79
+ if value&.is_a?(Array)
80
+ names = value.map { |topic| topic[:name] }
81
+
82
+ key.failure(:topics_names_not_unique) if names.size != names.uniq.size
83
+ end
84
+ end
85
+
86
+ rule(:topics) do
87
+ if value&.is_a?(Array)
88
+ value.each_with_index do |topic, index|
89
+ TOPIC_CONTRACT.call(topic).errors.each do |error|
90
+ key([:topics, index, error.path[0]]).failure(error.text)
91
+ end
92
+ end
93
+ end
94
+ end
95
+
96
+ rule(:ssl_client_cert, :ssl_client_cert_key) do
97
+ if values[:ssl_client_cert] && !values[:ssl_client_cert_key]
98
+ key(:ssl_client_cert_key).failure(:ssl_client_cert_with_ssl_client_cert_key)
99
+ end
100
+ end
101
+
102
+ rule(:ssl_client_cert, :ssl_client_cert_key) do
103
+ if values[:ssl_client_cert_key] && !values[:ssl_client_cert]
104
+ key(:ssl_client_cert).failure(:ssl_client_cert_key_with_ssl_client_cert)
105
+ end
106
+ end
107
+
108
+ rule(:ssl_client_cert, :ssl_client_cert_chain) do
109
+ if values[:ssl_client_cert_chain] && !values[:ssl_client_cert]
110
+ key(:ssl_client_cert).failure(:ssl_client_cert_chain_with_ssl_client_cert)
111
+ end
112
+ end
113
+
114
+ rule(:ssl_client_cert_chain, :ssl_client_cert_key) do
115
+ if values[:ssl_client_cert_chain] && !values[:ssl_client_cert]
116
+ key(:ssl_client_cert).failure(:ssl_client_cert_chain_with_ssl_client_cert_key)
117
+ end
118
+ end
119
+
120
+ rule(:ssl_client_cert_key_password, :ssl_client_cert_key) do
121
+ if values[:ssl_client_cert_key_password] && !values[:ssl_client_cert_key]
122
+ key(:ssl_client_cert_key).failure(:ssl_client_cert_key_password_with_ssl_client_cert_key)
123
+ end
124
+ end
125
+
126
+ rule(:ssl_ca_cert) do
127
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
128
+ end
129
+
130
+ rule(:ssl_client_cert) do
131
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
132
+ end
133
+
134
+ rule(:ssl_ca_cert_file_path) do
135
+ if value
136
+ if File.exist?(value)
137
+ key.failure(:invalid_certificate_from_path) unless valid_certificate?(File.read(value))
138
+ else
139
+ key.failure(:does_not_exist)
140
+ end
141
+ end
142
+ end
143
+
144
+ rule(:ssl_client_cert_key) do
145
+ key.failure(:invalid_private_key) if value && !valid_private_key?(value)
146
+ end
147
+
148
+ rule(:ssl_client_cert_chain) do
149
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
150
+ end
151
+
152
+ rule(:sasl_oauth_token_provider) do
153
+ key.failure(:does_not_respond_to_token) if value && !value.respond_to?(:token)
154
+ end
155
+
156
+ rule(:max_wait_time, :socket_timeout) do
157
+ max_wait_time = values[:max_wait_time]
158
+ socket_timeout = values[:socket_timeout]
159
+
160
+ if socket_timeout.is_a?(Numeric) &&
161
+ max_wait_time.is_a?(Numeric) &&
162
+ max_wait_time > socket_timeout
163
+
164
+ key(:max_wait_time).failure(:max_wait_time_limit)
165
+ end
166
+ end
167
+
168
+ rule(:pause_timeout, :pause_max_timeout, :pause_exponential_backoff) do
169
+ if values[:pause_exponential_backoff]
170
+ if values[:pause_timeout].to_i > values[:pause_max_timeout].to_i
171
+ key(:pause_max_timeout).failure(:max_timeout_size_for_exponential)
172
+ end
173
+ end
174
+ end
175
+
176
+ private
177
+
178
+ # @param value [String] potential RSA key value
179
+ # @return [Boolean] is the given string a valid RSA key
180
+ def valid_private_key?(value)
181
+ OpenSSL::PKey.read(value)
182
+ true
183
+ rescue OpenSSL::PKey::PKeyError
184
+ false
185
+ end
186
+
187
+ # @param value [String] potential X509 cert value
188
+ # @return [Boolean] is the given string a valid X509 cert
189
+ def valid_certificate?(value)
190
+ OpenSSL::X509::Certificate.new(value)
191
+ true
192
+ rescue OpenSSL::X509::CertificateError
193
+ false
194
+ end
195
+
196
+ # @param value [String] potential kafka uri
197
+ # @return [Boolean] true if it is a kafka uri, otherwise false
198
+ def kafka_uri?(value)
199
+ uri = URI.parse(value)
200
+ URI_SCHEMES.include?(uri.scheme) && uri.port
201
+ rescue URI::InvalidURIError
202
+ false
203
+ end
204
+ end
205
+ end
206
+ end