waterdrop 1.4.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,25 @@
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
3
+ ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjAwODExMDkxNTM3WhcNMjEwODExMDkx
4
+ NTM3WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
5
+ CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDCpXsCgmINb6lHBXXBdyrgsBPSxC4/
6
+ 2H+weJ6L9CruTiv2+2/ZkQGtnLcDgrD14rdLIHK7t0o3EKYlDT5GhD/XUVhI15JE
7
+ N7IqnPUgexe1fbZArwQ51afxz2AmPQN2BkB2oeQHXxnSWUGMhvcEZpfbxCCJH26w
8
+ hS0Ccsma8yxA6hSlGVhFVDuCr7c2L1di6cK2CtIDpfDaWqnVNJEwBYHIxrCoWK5g
9
+ sIGekVt/admS9gRhIMaIBg+Mshth5/DEyWO2QjteTodItlxfTctrfmiAl8X8T5JP
10
+ VXeLp5SSOJ5JXE80nShMJp3RFnGw5fqjX/ffjtISYh78/By4xF3a25HdWH9+qO2Z
11
+ tx0wSGc9/4gqNM0APQnjN/4YXrGZ4IeSjtE+OrrX07l0TiyikzSLFOkZCAp8oBJi
12
+ Fhlosz8xQDJf7mhNxOaZziqASzp/hJTU/tuDKl5+ql2icnMv5iV/i6SlmvU29QNg
13
+ LCV71pUv0pWzN+OZbHZKWepGhEQ3cG9MwvkCAwEAAaN3MHUwCQYDVR0TBAIwADAL
14
+ BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFImGed2AXS070ohfRidiCEhXEUN+MB0GA1Ud
15
+ EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
16
+ c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKiHpwoENVrMi94V1zD4o8/6G3AU
17
+ gWz4udkPYHTZLUy3dLznc/sNjdkJFWT3E6NKYq7c60EpJ0m0vAEg5+F5pmNOsvD3
18
+ 2pXLj9kisEeYhR516HwXAvtngboUcb75skqvBCU++4Pu7BRAPjO1/ihLSBexbwSS
19
+ fF+J5OWNuyHHCQp+kGPLtXJe2yUYyvSWDj3I2//Vk0VhNOIlaCS1+5/P3ZJThOtm
20
+ zJUBI7h3HgovwRpcnmk2mXTmU4Zx/bCzX8EA6VY0khEvnmiq7S6eBF0H9qH8KyQ6
21
+ EkVLpvmUDFcf/uNaBQdazEMB5jYtwoA8gQlANETNGPi51KlkukhKgaIEDMkBDJOx
22
+ 65N7DzmkcyY0/GwjIVIxmRhcrCt1YeCUElmfFx0iida1/YRm6sB2AXqScc1+ECRi
23
+ 2DND//YJUikn1zwbz1kT70XmHd97B4Eytpln7K+M1u2g1pHVEPW4owD/ammXNpUy
24
+ nt70FcDD4yxJQ+0YNiHd0N8IcVBM1TMIVctMNQ==
25
+ -----END CERTIFICATE-----
@@ -0,0 +1,19 @@
1
+ en:
2
+ dry_validation:
3
+ errors:
4
+ broker_schema: >
5
+ has an invalid format.
6
+ Expected schema, host and port number.
7
+ Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
8
+ ssl_client_cert_with_ssl_client_cert_key: >
9
+ Both ssl_client_cert and ssl_client_cert_key need to be provided.
10
+ ssl_client_cert_key_with_ssl_client_cert: >
11
+ Both ssl_client_cert_key and ssl_client_cert need to be provided.
12
+ ssl_client_cert_chain_with_ssl_client_cert: >
13
+ Both ssl_client_cert_chain and ssl_client_cert need to be provided.
14
+ ssl_client_cert_chain_with_ssl_client_cert_key: >
15
+ Both ssl_client_cert_chain and ssl_client_cert_key need to be provided.
16
+ ssl_client_cert_key_password_with_ssl_client_cert_key: >
17
+ Both ssl_client_cert_key_password and ssl_client_cert_key need to be provided.
18
+ sasl_oauth_token_provider_respond_to_token: >
19
+ sasl_oauth_token_provider needs to respond to a #token method.
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ # External components
4
+ # delegate should be removed because we don't need it, we just add it because of ruby-kafka
5
+ %w[
6
+ delegate
7
+ json
8
+ delivery_boy
9
+ singleton
10
+ dry-configurable
11
+ dry/monitor/notifications
12
+ dry-validation
13
+ zeitwerk
14
+ ].each { |lib| require lib }
15
+
16
+ # WaterDrop library
17
+ module WaterDrop
18
+ class << self
19
+ attr_accessor :logger
20
+
21
+ # Sets up the whole configuration
22
+ # @param [Block] block configuration block
23
+ def setup(&block)
24
+ Config.setup(&block)
25
+ DeliveryBoy.logger = self.logger = config.logger
26
+ ConfigApplier.call(DeliveryBoy.config, Config.config.to_h)
27
+ end
28
+
29
+ # @return [WaterDrop::Config] config instance
30
+ def config
31
+ Config.config
32
+ end
33
+
34
+ # @return [::WaterDrop::Monitor] monitor that we want to use
35
+ def monitor
36
+ config.monitor
37
+ end
38
+
39
+ # @return [String] root path of this gem
40
+ def gem_root
41
+ Pathname.new(File.expand_path('..', __dir__))
42
+ end
43
+ end
44
+ end
45
+
46
+ Zeitwerk::Loader
47
+ .for_gem
48
+ .tap { |loader| loader.ignore("#{__dir__}/waterdrop.rb") }
49
+ .tap(&:setup)
50
+ .tap(&:eager_load)
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ # WaterDrop library
4
+ module WaterDrop
5
+ # Async producer for messages
6
+ class AsyncProducer < BaseProducer
7
+ # Performs message delivery using deliver_async method
8
+ # @param message [String] message that we want to send to Kafka
9
+ # @param options [Hash] options (including topic) for producer
10
+ # @raise [WaterDrop::Errors::InvalidMessageOptions] raised when message options are
11
+ # somehow invalid and we cannot perform delivery because of that
12
+ def self.call(message, options)
13
+ attempts_count ||= 0
14
+ attempts_count += 1
15
+
16
+ validate!(options)
17
+ return unless WaterDrop.config.deliver
18
+
19
+ d_method = WaterDrop.config.raise_on_buffer_overflow ? :deliver_async! : :deliver_async
20
+
21
+ DeliveryBoy.send(d_method, message, **options)
22
+ rescue Kafka::Error => e
23
+ graceful_attempt?(attempts_count, message, options, e) ? retry : raise(e)
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ module WaterDrop
4
+ # Base messages producer that contains all the logic that is exactly the same for both
5
+ # sync and async producers
6
+ class BaseProducer
7
+ # Contract for checking the correctness of the provided data that someone wants to
8
+ # dispatch to Kafka
9
+ SCHEMA = Contracts::MessageOptions.new.freeze
10
+
11
+ private_constant :SCHEMA
12
+
13
+ class << self
14
+ private
15
+
16
+ # Runs the message options validations and raises an error if anything is invalid
17
+ # @param options [Hash] hash that we want to validate
18
+ # @raise [WaterDrop::Errors::InvalidMessageOptions] raised when message options are
19
+ # somehow invalid and we cannot perform delivery because of that
20
+ def validate!(options)
21
+ validation_result = SCHEMA.call(options)
22
+ return true if validation_result.success?
23
+
24
+ raise Errors::InvalidMessageOptions, validation_result.errors
25
+ end
26
+
27
+ # Upon failed delivery, we may try to resend a message depending on the attempt number
28
+ # or re-raise an error if we're unable to do that after given number of retries
29
+ # This method checks that and also instruments errors and retries for the delivery
30
+ # @param attempts_count [Integer] number of attempt (starting from 1) for the delivery
31
+ # @param message [String] message that we want to send to Kafka
32
+ # @param options [Hash] options (including topic) for producer
33
+ # @param error [Kafka::Error] error that occurred
34
+ # @return [Boolean] true if this is a graceful attempt and we can retry or false it this
35
+ # was the final one and we should deal with the fact, that we cannot deliver a given
36
+ # message
37
+ def graceful_attempt?(attempts_count, message, options, error)
38
+ scope = "#{to_s.split('::').last.sub('Producer', '_producer').downcase}.call"
39
+ payload = {
40
+ caller: self,
41
+ message: message,
42
+ options: options,
43
+ error: error,
44
+ attempts_count: attempts_count
45
+ }
46
+
47
+ if attempts_count > WaterDrop.config.kafka.max_retries
48
+ WaterDrop.monitor.instrument("#{scope}.error", payload)
49
+ false
50
+ else
51
+ WaterDrop.monitor.instrument("#{scope}.retry", payload)
52
+ true
53
+ end
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,162 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Configuration and descriptions are based on the delivery boy zendesk gem
4
+ # @see https://github.com/zendesk/delivery_boy
5
+ module WaterDrop
6
+ # Configuration object for setting up all options required by WaterDrop
7
+ class Config
8
+ extend Dry::Configurable
9
+
10
+ # Config schema definition
11
+ # @note We use a single instance not to create new one upon each usage
12
+ SCHEMA = Contracts::Config.new.freeze
13
+
14
+ private_constant :SCHEMA
15
+
16
+ # WaterDrop options
17
+ # option client_id [String] identifier of this producer
18
+ setting :client_id, 'waterdrop'
19
+ # option [Instance, nil] logger that we want to use or nil to fallback to ruby-kafka logger
20
+ setting :logger, Logger.new($stdout, level: Logger::WARN)
21
+ # option [Instance] monitor that we want to use. See instrumentation part of the README for
22
+ # more details
23
+ setting :monitor, WaterDrop::Instrumentation::Monitor.new
24
+ # option [Boolean] should we send messages. Setting this to false can be really useful when
25
+ # testing and or developing because when set to false, won't actually ping Kafka
26
+ setting :deliver, true
27
+ # option [Boolean] if you're producing messages faster than the framework or the network can
28
+ # send them off, ruby-kafka might reject them. If that happens, WaterDrop will either raise
29
+ # or ignore - this setting manages that behavior. This only applies to async producer as
30
+ # sync producer will always raise upon problems
31
+ setting :raise_on_buffer_overflow, true
32
+
33
+ # Settings directly related to the Kafka driver
34
+ setting :kafka do
35
+ # option [Array<String>] Array that contains Kafka seed broker hosts with ports
36
+ setting :seed_brokers
37
+
38
+ # Network timeouts
39
+ # option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
40
+ # a broker for the first time. When ruby-kafka initializes, it needs to connect to at
41
+ # least one host.
42
+ setting :connect_timeout, 10
43
+ # option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
44
+ # writing to a socket connection to a broker. After this timeout expires the connection
45
+ # will be killed. Note that some Kafka operations are by definition long-running, such as
46
+ # waiting for new messages to arrive in a partition, so don't set this value too low
47
+ setting :socket_timeout, 30
48
+
49
+ # Buffering for async producer
50
+ # @option [Integer] The maximum number of bytes allowed in the buffer before new messages
51
+ # are rejected.
52
+ setting :max_buffer_bytesize, 10_000_000
53
+ # @option [Integer] The maximum number of messages allowed in the buffer before new messages
54
+ # are rejected.
55
+ setting :max_buffer_size, 1000
56
+ # @option [Integer] The maximum number of messages allowed in the queue before new messages
57
+ # are rejected. The queue is used to ferry messages from the foreground threads of your
58
+ # application to the background thread that buffers and delivers messages.
59
+ setting :max_queue_size, 1000
60
+
61
+ # option [Integer] A timeout executed by a broker when the client is sending messages to it.
62
+ # It defines the number of seconds the broker should wait for replicas to acknowledge the
63
+ # write before responding to the client with an error. As such, it relates to the
64
+ # required_acks setting. It should be set lower than socket_timeout.
65
+ setting :ack_timeout, 5
66
+ # option [Integer] The number of seconds between background message
67
+ # deliveries. Default is 10 seconds. Disable timer-based background deliveries by
68
+ # setting this to 0.
69
+ setting :delivery_interval, 10
70
+ # option [Integer] The number of buffered messages that will trigger a background message
71
+ # delivery. Default is 100 messages. Disable buffer size based background deliveries by
72
+ # setting this to 0.
73
+ setting :delivery_threshold, 100
74
+ # option [Boolean]
75
+ setting :idempotent, false
76
+ # option [Boolean]
77
+ setting :transactional, false
78
+ # option [Integer]
79
+ setting :transactional_timeout, 60
80
+
81
+ # option [Integer] The number of retries when attempting to deliver messages.
82
+ setting :max_retries, 2
83
+ # option [Integer]
84
+ setting :required_acks, -1
85
+ # option [Integer]
86
+ setting :retry_backoff, 1
87
+
88
+ # option [Integer] The minimum number of messages that must be buffered before compression is
89
+ # attempted. By default only one message is required. Only relevant if compression_codec
90
+ # is set.
91
+ setting :compression_threshold, 1
92
+ # option [Symbol] The codec used to compress messages. Must be either snappy or gzip.
93
+ setting :compression_codec, nil
94
+
95
+ # SSL authentication related settings
96
+ # option ca_cert [String, nil] SSL CA certificate
97
+ setting :ssl_ca_cert, nil
98
+ # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
99
+ setting :ssl_ca_cert_file_path, nil
100
+ # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
101
+ # certificate store
102
+ setting :ssl_ca_certs_from_system, false
103
+ # option ssl_verify_hostname [Boolean] Verify the hostname for client certs
104
+ setting :ssl_verify_hostname, true
105
+ # option ssl_client_cert [String, nil] SSL client certificate
106
+ setting :ssl_client_cert, nil
107
+ # option ssl_client_cert_key [String, nil] SSL client certificate password
108
+ setting :ssl_client_cert_key, nil
109
+ # option sasl_gssapi_principal [String, nil] sasl principal
110
+ setting :sasl_gssapi_principal, nil
111
+ # option sasl_gssapi_keytab [String, nil] sasl keytab
112
+ setting :sasl_gssapi_keytab, nil
113
+ # option sasl_plain_authzid [String] The authorization identity to use
114
+ setting :sasl_plain_authzid, ''
115
+ # option sasl_plain_username [String, nil] The username used to authenticate
116
+ setting :sasl_plain_username, nil
117
+ # option sasl_plain_password [String, nil] The password used to authenticate
118
+ setting :sasl_plain_password, nil
119
+ # option sasl_scram_username [String, nil] The username used to authenticate
120
+ setting :sasl_scram_username, nil
121
+ # option sasl_scram_password [String, nil] The password used to authenticate
122
+ setting :sasl_scram_password, nil
123
+ # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
124
+ setting :sasl_scram_mechanism, nil
125
+ # option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
126
+ setting :sasl_over_ssl, true
127
+ # option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
128
+ setting :ssl_client_cert_chain, nil
129
+ # option ssl_client_cert_key_password [String, nil] the password required to read
130
+ # the ssl_client_cert_key
131
+ setting :ssl_client_cert_key_password, nil
132
+ # @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
133
+ # implements method token.
134
+ setting :sasl_oauth_token_provider, nil
135
+ end
136
+
137
+ class << self
138
+ # Configuration method
139
+ # @yield Runs a block of code providing a config singleton instance to it
140
+ # @yieldparam [WaterDrop::Config] WaterDrop config instance
141
+ def setup
142
+ configure do |config|
143
+ yield(config)
144
+ validate!(config.to_h)
145
+ end
146
+ end
147
+
148
+ private
149
+
150
+ # Validates the configuration and if anything is wrong, will raise an exception
151
+ # @param config_hash [Hash] config hash with setup details
152
+ # @raise [WaterDrop::Errors::InvalidConfiguration] raised when something is wrong with
153
+ # the configuration
154
+ def validate!(config_hash)
155
+ validation_result = SCHEMA.call(config_hash)
156
+ return true if validation_result.success?
157
+
158
+ raise Errors::InvalidConfiguration, validation_result.errors.to_h
159
+ end
160
+ end
161
+ end
162
+ end
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module WaterDrop
4
+ # Engine used to propagate config application to DeliveryBoy with corner case handling
5
+ module ConfigApplier
6
+ class << self
7
+ # @param delivery_boy_config [DeliveryBoy::Config] delivery boy config instance
8
+ # @param settings [Hash] hash with WaterDrop settings
9
+ def call(delivery_boy_config, settings)
10
+ # Recursive lambda for mapping config down to delivery boy
11
+ settings.each do |key, value|
12
+ call(delivery_boy_config, value) && next if value.is_a?(Hash)
13
+
14
+ # If this is a special case that needs manual setup instead of a direct reassignment
15
+ if respond_to?(key, true)
16
+ send(key, delivery_boy_config, value)
17
+ else
18
+ # If this setting is our internal one, we should not sync it with the delivery boy
19
+ next unless delivery_boy_config.respond_to?(:"#{key}=")
20
+
21
+ delivery_boy_config.public_send(:"#{key}=", value)
22
+ end
23
+ end
24
+ end
25
+
26
+ private
27
+
28
+ # Extra setup for the compression codec as it behaves differently than other settings
29
+ # that are ported 1:1 from ruby-kafka
30
+ # For some crazy reason, delivery boy requires compression codec as a string, when
31
+ # ruby-kafka as a symbol. We follow ruby-kafka internal design, so we had to mimic
32
+ # that by assigning a string version that down the road will be symbolized again
33
+ # by delivery boy
34
+ # @param delivery_boy_config [DeliveryBoy::Config] delivery boy config instance
35
+ # @param codec_name [Symbol] codec name as a symbol
36
+ def compression_codec(delivery_boy_config, codec_name)
37
+ # If there is no compression codec, we don't apply anything
38
+ return unless codec_name
39
+
40
+ delivery_boy_config.compression_codec = codec_name.to_s
41
+ end
42
+
43
+ # We use the "seed_brokers" name and DeliveryBoy uses "brokers" so we pass the values
44
+ # manually
45
+ # @param delivery_boy_config [DeliveryBoy::Config] delivery boy config instance
46
+ # @param seed_brokers [Array<String>] kafka seed brokers
47
+ def seed_brokers(delivery_boy_config, seed_brokers)
48
+ delivery_boy_config.brokers = seed_brokers
49
+ end
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,9 @@
1
+ # frozen_string_literal: true
2
+
3
+ module WaterDrop
4
+ # Namespace for all the contracts for config validations
5
+ module Contracts
6
+ # Regex to check that topic has a valid format
7
+ TOPIC_REGEXP = /\A(\w|\-|\.)+\z/.freeze
8
+ end
9
+ end
@@ -0,0 +1,139 @@
1
+ # frozen_string_literal: true
2
+
3
+ module WaterDrop
4
+ module Contracts
5
+ # Contract with validation rules for WaterDrop configuration details
6
+ class Config < Dry::Validation::Contract
7
+ # Valid uri schemas of Kafka broker url
8
+ URI_SCHEMES = %w[kafka kafka+ssl plaintext ssl].freeze
9
+
10
+ # Available sasl scram mechanism of authentication (plus nil)
11
+ SASL_SCRAM_MECHANISMS = %w[sha256 sha512].freeze
12
+
13
+ # Supported compression codecs
14
+ COMPRESSION_CODECS = %i[snappy gzip lz4 zstd].freeze
15
+
16
+ config.messages.load_paths << File.join(WaterDrop.gem_root, 'config', 'errors.yml')
17
+
18
+ class << self
19
+ private
20
+
21
+ # Builder for kafka scoped data custom rules
22
+ # @param keys [Symbol, Hash] the keys names
23
+ # @param block [Proc] block we want to run with validations within the kafka scope
24
+ def kafka_scope_rule(*keys, &block)
25
+ rule(*[:kafka].product(keys)) do
26
+ instance_exec(values[:kafka], &block)
27
+ end
28
+ end
29
+ end
30
+
31
+ private
32
+
33
+ # Uri validator to check if uri is in a Kafka acceptable format
34
+ # @param uri [String] uri we want to validate
35
+ # @return [Boolean] true if it is a valid uri, otherwise false
36
+ def broker_schema?(uri)
37
+ uri = URI.parse(uri)
38
+ URI_SCHEMES.include?(uri.scheme) && uri.port
39
+ rescue URI::InvalidURIError
40
+ false
41
+ end
42
+
43
+ params do
44
+ required(:client_id).filled(:str?, format?: Contracts::TOPIC_REGEXP)
45
+ required(:logger).filled
46
+ required(:deliver).filled(:bool?)
47
+ required(:raise_on_buffer_overflow).filled(:bool?)
48
+
49
+ required(:kafka).schema do
50
+ required(:seed_brokers).value(:array, :filled?).each(:str?)
51
+ required(:connect_timeout).filled(:int?, gt?: 0)
52
+ required(:socket_timeout).filled(:int?, gt?: 0)
53
+ required(:compression_threshold).filled(:int?, gteq?: 1)
54
+ optional(:compression_codec).maybe(included_in?: COMPRESSION_CODECS)
55
+
56
+ required(:max_buffer_bytesize).filled(:int?, gt?: 0)
57
+ required(:max_buffer_size).filled(:int?, gt?: 0)
58
+ required(:max_queue_size).filled(:int?, gt?: 0)
59
+
60
+ required(:ack_timeout).filled(:int?, gt?: 0)
61
+ required(:delivery_interval).filled(:int?, gteq?: 0)
62
+ required(:delivery_threshold).filled(:int?, gteq?: 0)
63
+
64
+ required(:max_retries).filled(:int?, gteq?: 0)
65
+ required(:retry_backoff).filled(:int?, gteq?: 0)
66
+ required(:required_acks).filled(included_in?: [1, 0, -1, :all])
67
+
68
+ %i[
69
+ ssl_ca_cert
70
+ ssl_ca_cert_file_path
71
+ ssl_client_cert
72
+ ssl_client_cert_key
73
+ ssl_client_cert_chain
74
+ ssl_client_cert_key_password
75
+ sasl_gssapi_principal
76
+ sasl_gssapi_keytab
77
+ sasl_plain_authzid
78
+ sasl_plain_username
79
+ sasl_plain_password
80
+ sasl_scram_username
81
+ sasl_scram_password
82
+ ].each do |encryption_attribute|
83
+ optional(encryption_attribute).maybe(:str?)
84
+ end
85
+
86
+ optional(:ssl_verify_hostname).maybe(:bool?)
87
+ optional(:ssl_ca_certs_from_system).maybe(:bool?)
88
+ optional(:sasl_over_ssl).maybe(:bool?)
89
+ optional(:sasl_oauth_token_provider).value(:any)
90
+
91
+ # It's not with other encryptions as it has some more rules
92
+ optional(:sasl_scram_mechanism)
93
+ .maybe(:str?, included_in?: SASL_SCRAM_MECHANISMS)
94
+ end
95
+ end
96
+
97
+ kafka_scope_rule(:seed_brokers) do |kafka|
98
+ unless kafka[:seed_brokers].all?(&method(:broker_schema?))
99
+ key(%i[kafka seed_brokers]).failure(:broker_schema)
100
+ end
101
+ end
102
+
103
+ kafka_scope_rule(:ssl_client_cert, :ssl_client_cert_key) do |kafka|
104
+ if kafka[:ssl_client_cert] &&
105
+ kafka[:ssl_client_cert_key].nil?
106
+ key(%i[kafka ssl_client_cert_key]).failure(:ssl_client_cert_with_ssl_client_cert_key)
107
+ end
108
+ end
109
+
110
+ kafka_scope_rule(:ssl_client_cert_key, :ssl_client_cert) do |kafka|
111
+ if kafka[:ssl_client_cert_key] &&
112
+ kafka[:ssl_client_cert].nil?
113
+ key.failure(:ssl_client_cert_key_with_ssl_client_cert)
114
+ end
115
+ end
116
+
117
+ kafka_scope_rule(:ssl_client_cert_chain, :ssl_client_cert) do |kafka|
118
+ if kafka[:ssl_client_cert_chain] &&
119
+ kafka[:ssl_client_cert].nil?
120
+ key.failure(:ssl_client_cert_chain_with_ssl_client_cert)
121
+ end
122
+ end
123
+
124
+ kafka_scope_rule(:ssl_client_cert_key_password, :ssl_client_cert_key) do |kafka|
125
+ if kafka[:ssl_client_cert_key_password] &&
126
+ kafka[:ssl_client_cert_key].nil?
127
+ key.failure(:ssl_client_cert_key_password_with_ssl_client_cert_key)
128
+ end
129
+ end
130
+
131
+ kafka_scope_rule(:sasl_oauth_token_provider) do |kafka|
132
+ if kafka[:sasl_oauth_token_provider] &&
133
+ !kafka[:sasl_oauth_token_provider].respond_to?(:token)
134
+ key.failure(:sasl_oauth_token_provider_respond_to_token)
135
+ end
136
+ end
137
+ end
138
+ end
139
+ end