karafka 1.2.8 → 1.4.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +134 -14
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +4 -5
  17. data/Gemfile.lock +92 -81
  18. data/README.md +9 -12
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +18 -17
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +5 -10
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +45 -27
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/api_adapter.rb +22 -9
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +5 -3
  39. data/lib/karafka/connection/client.rb +31 -31
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -16
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +54 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -2
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +29 -28
  85. data/lib/karafka/setup/config.rb +67 -37
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +94 -72
  94. metadata.gz.sig +0 -0
  95. data/.travis.yml +0 -21
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -78
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Connection
5
+ # Class that delegates processing of a single received message for which we listen to
6
+ # a proper processor
7
+ module MessageDelegator
8
+ class << self
9
+ # Delegates message (does something with it)
10
+ # It will either schedule or run a proper processor action for the incoming message
11
+ # @param group_id [String] group_id of a group from which a given message came
12
+ # @param kafka_message [<Kafka::FetchedMessage>] raw message from kafka
13
+ # @note This should be looped to obtain a constant delegating of new messages
14
+ def call(group_id, kafka_message)
15
+ topic = Persistence::Topics.fetch(group_id, kafka_message.topic)
16
+ consumer = Persistence::Consumers.fetch(topic, kafka_message.partition)
17
+
18
+ Karafka.monitor.instrument(
19
+ 'connection.message_delegator.call',
20
+ caller: self,
21
+ consumer: consumer,
22
+ kafka_message: kafka_message
23
+ ) do
24
+ # @note We always get a single message within single delegator, which means that
25
+ # we don't care if user marked it as a batch consumed or not.
26
+ consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
27
+ [kafka_message],
28
+ topic
29
+ )
30
+ consumer.call
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Consumers
5
+ # Brings the batch metadata into consumers that support batch_fetching
6
+ module BatchMetadata
7
+ attr_accessor :batch_metadata
8
+ end
9
+ end
10
+ end
@@ -16,28 +16,40 @@ module Karafka
16
16
  before_stop
17
17
  ].freeze
18
18
 
19
+ private_constant :TYPES
20
+
19
21
  # Class methods needed to make callbacks run
20
22
  module ClassMethods
21
23
  TYPES.each do |type|
22
- # A Creates a callback wrapper
24
+ # Creates a callback wrapper
25
+ #
23
26
  # @param method_name [Symbol, String] method name or nil if we plan to provide a block
24
27
  # @yield A block with a code that should be executed before scheduling
25
- define_method type do |method_name = nil, &block|
26
- set_callback type, :before, method_name || block
28
+ # @note We don't have to optimize the key fetching here as those are class methods that
29
+ # are evaluated once upon start
30
+ define_method(type) do |method_name = nil, &block|
31
+ key = "consumers.#{Helpers::Inflector.map(to_s)}.#{type}"
32
+ Karafka::App.monitor.register_event(key)
33
+
34
+ Karafka::App.monitor.subscribe(key) do |event|
35
+ context = event[:context]
36
+
37
+ if method_name
38
+ context.send(method_name)
39
+ else
40
+ context.instance_eval(&block)
41
+ end
42
+ end
27
43
  end
28
44
  end
29
45
  end
30
46
 
31
- # @param consumer_class [Class] consumer class that we extend with callbacks
32
- def self.included(consumer_class)
33
- consumer_class.class_eval do
34
- extend ClassMethods
35
- include ActiveSupport::Callbacks
36
-
37
- # The call method is wrapped with a set of callbacks
38
- # We won't run process if any of the callbacks throw abort
39
- # @see http://api.rubyonrails.org/classes/ActiveSupport/Callbacks/ClassMethods.html#method-i-get_callbacks
40
- TYPES.each { |type| define_callbacks type }
47
+ class << self
48
+ # @param consumer_class [Class] consumer class that we extend with callbacks
49
+ def included(consumer_class)
50
+ consumer_class.class_eval do
51
+ extend ClassMethods
52
+ end
41
53
  end
42
54
  end
43
55
 
@@ -45,9 +57,14 @@ module Karafka
45
57
  # method of a proper backend. It is here because it interacts with the default Karafka
46
58
  # call flow and needs to be overwritten to support callbacks
47
59
  def call
48
- run_callbacks :after_fetch do
49
- process
60
+ if self.class.respond_to?(:after_fetch)
61
+ Karafka::App.monitor.instrument(
62
+ "consumers.#{Helpers::Inflector.map(self.class.to_s)}.after_fetch",
63
+ context: self
64
+ )
50
65
  end
66
+
67
+ process
51
68
  end
52
69
  end
53
70
  end
@@ -3,47 +3,60 @@
3
3
  module Karafka
4
4
  # Additional functionalities for consumers
5
5
  module Consumers
6
- # Module used to inject functionalities into a given consumer class, based on the consumer
6
+ # Module used to inject functionalities into a given consumer instance, based on the consumer
7
7
  # topic and its settings
8
8
  # We don't need all the behaviors in all the cases, so it is not worth having everything
9
9
  # in all the cases all the time
10
10
  module Includer
11
11
  class << self
12
- # @param consumer_class [Class] consumer class, that will get some functionalities
13
- # based on the topic under which it operates
14
- def call(consumer_class)
15
- topic = consumer_class.topic
16
-
17
- bind_backend(consumer_class, topic)
18
- bind_params(consumer_class, topic)
19
- bind_responders(consumer_class, topic)
12
+ # @param consumer [Karafka::BaseConsumer] consumer instance, that will get some
13
+ # functionalities based on the topic under which it operates
14
+ def call(consumer)
15
+ topic = consumer.topic
16
+
17
+ bind_backend(consumer, topic)
18
+ bind_params(consumer, topic)
19
+ bind_batch_metadata(consumer, topic)
20
+ bind_responders(consumer, topic)
20
21
  end
21
22
 
22
23
  private
23
24
 
24
25
  # Figures out backend for a given consumer class, based on the topic backend and
25
26
  # includes it into the consumer class
26
- # @param consumer_class [Class] consumer class
27
+ # @param consumer [Karafka::BaseConsumer] consumer instance
27
28
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
28
- def bind_backend(consumer_class, topic)
29
+ def bind_backend(consumer, topic)
29
30
  backend = Kernel.const_get("::Karafka::Backends::#{topic.backend.to_s.capitalize}")
30
- consumer_class.include backend
31
+ consumer.extend(backend)
31
32
  end
32
33
 
33
34
  # Adds a single #params support for non batch processed topics
34
- # @param consumer_class [Class] consumer class
35
+ # @param consumer [Karafka::BaseConsumer] consumer instance
35
36
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
36
- def bind_params(consumer_class, topic)
37
+ def bind_params(consumer, topic)
37
38
  return if topic.batch_consuming
38
- consumer_class.include SingleParams
39
+
40
+ consumer.extend(SingleParams)
41
+ end
42
+
43
+ # Adds an option to work with batch metadata for consumer instances that have
44
+ # batch fetching enabled
45
+ # @param consumer [Karafka::BaseConsumer] consumer instance
46
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
47
+ def bind_batch_metadata(consumer, topic)
48
+ return unless topic.batch_fetching
49
+
50
+ consumer.extend(BatchMetadata)
39
51
  end
40
52
 
41
53
  # Adds responders support for topics and consumers with responders defined for them
42
- # @param consumer_class [Class] consumer class
54
+ # @param consumer [Karafka::BaseConsumer] consumer instance
43
55
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
44
- def bind_responders(consumer_class, topic)
56
+ def bind_responders(consumer, topic)
45
57
  return unless topic.responder
46
- consumer_class.include Responders
58
+
59
+ consumer.extend(Responders)
47
60
  end
48
61
  end
49
62
  end
@@ -15,8 +15,8 @@ module Karafka
15
15
  data: data
16
16
  ) do
17
17
  # @note we build a new instance of responder each time, as a long-running (persisted)
18
- # consumers can respond multiple times during the lifecycle
19
- topic.responder.new(topic.parser).call(*data)
18
+ # consumers can respond multiple times during the life-cycle
19
+ topic.responder.new.call(*data)
20
20
  end
21
21
  end
22
22
  end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the validation contracts that we use to check input
5
+ module Contracts
6
+ # Regexp for validating format of groups and topics
7
+ # @note It is not nested inside of the contracts, as it is used by couple of them
8
+ TOPIC_REGEXP = /\A(\w|\-|\.)+\z/.freeze
9
+ end
10
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract with validation rules for Karafka configuration details
6
+ # @note There are many more configuration options inside of the
7
+ # Karafka::Setup::Config model, but we don't validate them here as they are
8
+ # validated per each route (topic + consumer_group) because they can be overwritten,
9
+ # so we validate all of that once all the routes are defined and ready
10
+ class Config < Dry::Validation::Contract
11
+ params do
12
+ required(:client_id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
13
+ required(:shutdown_timeout) { (int? & gt?(0)) }
14
+ required(:consumer_mapper)
15
+ required(:topic_mapper)
16
+
17
+ optional(:backend).filled
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,206 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract for single full route (consumer group + topics) validation.
6
+ class ConsumerGroup < Dry::Validation::Contract
7
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
+
9
+ # Valid uri schemas of Kafka broker url
10
+ # The ||= is due to the behavior of require_all that resolves dependencies
11
+ # but sometimes loads things twice
12
+ URI_SCHEMES ||= %w[kafka kafka+ssl plaintext ssl].freeze
13
+
14
+ # Available sasl scram mechanism of authentication (plus nil)
15
+ SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
16
+
17
+ # Internal contract for sub-validating topics schema
18
+ TOPIC_CONTRACT = ConsumerGroupTopic.new.freeze
19
+
20
+ private_constant :TOPIC_CONTRACT
21
+
22
+ params do
23
+ required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
24
+ required(:topics).value(:array, :filled?)
25
+ required(:seed_brokers).value(:array, :filled?)
26
+ required(:session_timeout).filled { int? | float? }
27
+ required(:pause_timeout).maybe(%i[integer float]) { filled? > gteq?(0) }
28
+ required(:pause_max_timeout).maybe(%i[integer float]) { filled? > gteq?(0) }
29
+ required(:pause_exponential_backoff).filled(:bool?)
30
+ required(:offset_commit_interval) { int? | float? }
31
+ required(:offset_commit_threshold).filled(:int?)
32
+ required(:offset_retention_time).maybe(:integer)
33
+ required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
34
+ required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
35
+ required(:connect_timeout).filled { (int? | float?) & gt?(0) }
36
+ required(:reconnect_timeout).filled { (int? | float?) & gteq?(0) }
37
+ required(:socket_timeout).filled { (int? | float?) & gt?(0) }
38
+ required(:min_bytes).filled(:int?, gt?: 0)
39
+ required(:max_bytes).filled(:int?, gt?: 0)
40
+ required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
41
+ required(:batch_fetching).filled(:bool?)
42
+
43
+ %i[
44
+ ssl_ca_cert
45
+ ssl_ca_cert_file_path
46
+ ssl_client_cert
47
+ ssl_client_cert_key
48
+ ssl_client_cert_chain
49
+ ssl_client_cert_key_password
50
+ sasl_gssapi_principal
51
+ sasl_gssapi_keytab
52
+ sasl_plain_authzid
53
+ sasl_plain_username
54
+ sasl_plain_password
55
+ sasl_scram_username
56
+ sasl_scram_password
57
+ ].each do |encryption_attribute|
58
+ optional(encryption_attribute).maybe(:str?)
59
+ end
60
+
61
+ optional(:ssl_verify_hostname).maybe(:bool?)
62
+ optional(:ssl_ca_certs_from_system).maybe(:bool?)
63
+ optional(:sasl_over_ssl).maybe(:bool?)
64
+ optional(:sasl_oauth_token_provider).value(:any)
65
+
66
+ # It's not with other encryptions as it has some more rules
67
+ optional(:sasl_scram_mechanism)
68
+ .maybe(:str?, included_in?: SASL_SCRAM_MECHANISMS)
69
+ end
70
+
71
+ # Uri rule to check if uri is in a Karafka acceptable format
72
+ rule(:seed_brokers) do
73
+ if value&.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
+ key.failure(:invalid_broker_schema)
75
+ end
76
+ end
77
+
78
+ rule(:topics) do
79
+ if value&.is_a?(Array)
80
+ names = value.map { |topic| topic[:name] }
81
+
82
+ key.failure(:topics_names_not_unique) if names.size != names.uniq.size
83
+ end
84
+ end
85
+
86
+ rule(:topics) do
87
+ if value&.is_a?(Array)
88
+ value.each_with_index do |topic, index|
89
+ TOPIC_CONTRACT.call(topic).errors.each do |error|
90
+ key([:topics, index, error.path[0]]).failure(error.text)
91
+ end
92
+ end
93
+ end
94
+ end
95
+
96
+ rule(:ssl_client_cert, :ssl_client_cert_key) do
97
+ if values[:ssl_client_cert] && !values[:ssl_client_cert_key]
98
+ key(:ssl_client_cert_key).failure(:ssl_client_cert_with_ssl_client_cert_key)
99
+ end
100
+ end
101
+
102
+ rule(:ssl_client_cert, :ssl_client_cert_key) do
103
+ if values[:ssl_client_cert_key] && !values[:ssl_client_cert]
104
+ key(:ssl_client_cert).failure(:ssl_client_cert_key_with_ssl_client_cert)
105
+ end
106
+ end
107
+
108
+ rule(:ssl_client_cert, :ssl_client_cert_chain) do
109
+ if values[:ssl_client_cert_chain] && !values[:ssl_client_cert]
110
+ key(:ssl_client_cert).failure(:ssl_client_cert_chain_with_ssl_client_cert)
111
+ end
112
+ end
113
+
114
+ rule(:ssl_client_cert_chain, :ssl_client_cert_key) do
115
+ if values[:ssl_client_cert_chain] && !values[:ssl_client_cert]
116
+ key(:ssl_client_cert).failure(:ssl_client_cert_chain_with_ssl_client_cert_key)
117
+ end
118
+ end
119
+
120
+ rule(:ssl_client_cert_key_password, :ssl_client_cert_key) do
121
+ if values[:ssl_client_cert_key_password] && !values[:ssl_client_cert_key]
122
+ key(:ssl_client_cert_key).failure(:ssl_client_cert_key_password_with_ssl_client_cert_key)
123
+ end
124
+ end
125
+
126
+ rule(:ssl_ca_cert) do
127
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
128
+ end
129
+
130
+ rule(:ssl_client_cert) do
131
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
132
+ end
133
+
134
+ rule(:ssl_ca_cert_file_path) do
135
+ if value
136
+ if File.exist?(value)
137
+ key.failure(:invalid_certificate_from_path) unless valid_certificate?(File.read(value))
138
+ else
139
+ key.failure(:does_not_exist)
140
+ end
141
+ end
142
+ end
143
+
144
+ rule(:ssl_client_cert_key) do
145
+ key.failure(:invalid_private_key) if value && !valid_private_key?(value)
146
+ end
147
+
148
+ rule(:ssl_client_cert_chain) do
149
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
150
+ end
151
+
152
+ rule(:sasl_oauth_token_provider) do
153
+ key.failure(:does_not_respond_to_token) if value && !value.respond_to?(:token)
154
+ end
155
+
156
+ rule(:max_wait_time, :socket_timeout) do
157
+ max_wait_time = values[:max_wait_time]
158
+ socket_timeout = values[:socket_timeout]
159
+
160
+ if socket_timeout.is_a?(Numeric) &&
161
+ max_wait_time.is_a?(Numeric) &&
162
+ max_wait_time > socket_timeout
163
+
164
+ key(:max_wait_time).failure(:max_wait_time_limit)
165
+ end
166
+ end
167
+
168
+ rule(:pause_timeout, :pause_max_timeout, :pause_exponential_backoff) do
169
+ if values[:pause_exponential_backoff]
170
+ if values[:pause_timeout].to_i > values[:pause_max_timeout].to_i
171
+ key(:pause_max_timeout).failure(:max_timeout_size_for_exponential)
172
+ end
173
+ end
174
+ end
175
+
176
+ private
177
+
178
+ # @param value [String] potential RSA key value
179
+ # @return [Boolean] is the given string a valid RSA key
180
+ def valid_private_key?(value)
181
+ OpenSSL::PKey.read(value)
182
+ true
183
+ rescue OpenSSL::PKey::PKeyError
184
+ false
185
+ end
186
+
187
+ # @param value [String] potential X509 cert value
188
+ # @return [Boolean] is the given string a valid X509 cert
189
+ def valid_certificate?(value)
190
+ OpenSSL::X509::Certificate.new(value)
191
+ true
192
+ rescue OpenSSL::X509::CertificateError
193
+ false
194
+ end
195
+
196
+ # @param value [String] potential kafka uri
197
+ # @return [Boolean] true if it is a kafka uri, otherwise false
198
+ def kafka_uri?(value)
199
+ uri = URI.parse(value)
200
+ URI_SCHEMES.include?(uri.scheme) && uri.port
201
+ rescue URI::InvalidURIError
202
+ false
203
+ end
204
+ end
205
+ end
206
+ end