karafka 1.2.11
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.coditsu.yml +3 -0
- data/.console_irbrc +13 -0
- data/.gitignore +68 -0
- data/.rspec +1 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.travis.yml +49 -0
- data/CHANGELOG.md +458 -0
- data/CODE_OF_CONDUCT.md +46 -0
- data/CONTRIBUTING.md +41 -0
- data/Gemfile +15 -0
- data/Gemfile.lock +126 -0
- data/MIT-LICENCE +18 -0
- data/README.md +102 -0
- data/bin/karafka +19 -0
- data/config/errors.yml +6 -0
- data/karafka.gemspec +42 -0
- data/lib/karafka.rb +79 -0
- data/lib/karafka/app.rb +45 -0
- data/lib/karafka/attributes_map.rb +69 -0
- data/lib/karafka/backends/inline.rb +16 -0
- data/lib/karafka/base_consumer.rb +68 -0
- data/lib/karafka/base_responder.rb +208 -0
- data/lib/karafka/callbacks.rb +30 -0
- data/lib/karafka/callbacks/config.rb +22 -0
- data/lib/karafka/callbacks/dsl.rb +16 -0
- data/lib/karafka/cli.rb +54 -0
- data/lib/karafka/cli/base.rb +78 -0
- data/lib/karafka/cli/console.rb +29 -0
- data/lib/karafka/cli/flow.rb +46 -0
- data/lib/karafka/cli/info.rb +29 -0
- data/lib/karafka/cli/install.rb +42 -0
- data/lib/karafka/cli/server.rb +66 -0
- data/lib/karafka/connection/api_adapter.rb +148 -0
- data/lib/karafka/connection/builder.rb +16 -0
- data/lib/karafka/connection/client.rb +107 -0
- data/lib/karafka/connection/delegator.rb +46 -0
- data/lib/karafka/connection/listener.rb +60 -0
- data/lib/karafka/consumers/callbacks.rb +54 -0
- data/lib/karafka/consumers/includer.rb +51 -0
- data/lib/karafka/consumers/responders.rb +24 -0
- data/lib/karafka/consumers/single_params.rb +15 -0
- data/lib/karafka/errors.rb +50 -0
- data/lib/karafka/fetcher.rb +44 -0
- data/lib/karafka/helpers/class_matcher.rb +78 -0
- data/lib/karafka/helpers/config_retriever.rb +46 -0
- data/lib/karafka/helpers/multi_delegator.rb +33 -0
- data/lib/karafka/instrumentation/listener.rb +112 -0
- data/lib/karafka/instrumentation/logger.rb +55 -0
- data/lib/karafka/instrumentation/monitor.rb +64 -0
- data/lib/karafka/loader.rb +28 -0
- data/lib/karafka/params/dsl.rb +158 -0
- data/lib/karafka/params/params_batch.rb +46 -0
- data/lib/karafka/parsers/json.rb +38 -0
- data/lib/karafka/patches/dry_configurable.rb +33 -0
- data/lib/karafka/patches/ruby_kafka.rb +34 -0
- data/lib/karafka/persistence/client.rb +25 -0
- data/lib/karafka/persistence/consumer.rb +38 -0
- data/lib/karafka/persistence/topic.rb +29 -0
- data/lib/karafka/process.rb +62 -0
- data/lib/karafka/responders/builder.rb +36 -0
- data/lib/karafka/responders/topic.rb +57 -0
- data/lib/karafka/routing/builder.rb +61 -0
- data/lib/karafka/routing/consumer_group.rb +61 -0
- data/lib/karafka/routing/consumer_mapper.rb +34 -0
- data/lib/karafka/routing/proxy.rb +37 -0
- data/lib/karafka/routing/router.rb +29 -0
- data/lib/karafka/routing/topic.rb +60 -0
- data/lib/karafka/routing/topic_mapper.rb +55 -0
- data/lib/karafka/schemas/config.rb +24 -0
- data/lib/karafka/schemas/consumer_group.rb +78 -0
- data/lib/karafka/schemas/consumer_group_topic.rb +18 -0
- data/lib/karafka/schemas/responder_usage.rb +39 -0
- data/lib/karafka/schemas/server_cli_options.rb +43 -0
- data/lib/karafka/server.rb +85 -0
- data/lib/karafka/setup/config.rb +193 -0
- data/lib/karafka/setup/configurators/base.rb +29 -0
- data/lib/karafka/setup/configurators/params.rb +25 -0
- data/lib/karafka/setup/configurators/water_drop.rb +32 -0
- data/lib/karafka/setup/dsl.rb +22 -0
- data/lib/karafka/status.rb +25 -0
- data/lib/karafka/templates/application_consumer.rb.example +6 -0
- data/lib/karafka/templates/application_responder.rb.example +11 -0
- data/lib/karafka/templates/karafka.rb.example +54 -0
- data/lib/karafka/version.rb +7 -0
- data/log/.gitkeep +0 -0
- metadata +303 -0
@@ -0,0 +1,61 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Routing
|
5
|
+
# Object used to describe a single consumer group that is going to subscribe to
|
6
|
+
# given topics
|
7
|
+
# It is a part of Karafka's DSL
|
8
|
+
class ConsumerGroup
|
9
|
+
extend Helpers::ConfigRetriever
|
10
|
+
|
11
|
+
attr_reader :topics
|
12
|
+
attr_reader :id
|
13
|
+
attr_reader :name
|
14
|
+
|
15
|
+
# @param name [String, Symbol] raw name of this consumer group. Raw means, that it does not
|
16
|
+
# yet have an application client_id namespace, this will be added here by default.
|
17
|
+
# We add it to make a multi-system development easier for people that don't use
|
18
|
+
# kafka and don't understand the concept of consumer groups.
|
19
|
+
def initialize(name)
|
20
|
+
@name = name
|
21
|
+
@id = Karafka::App.config.consumer_mapper.call(name)
|
22
|
+
@topics = []
|
23
|
+
end
|
24
|
+
|
25
|
+
# @return [Boolean] true if this consumer group should be active in our current process
|
26
|
+
def active?
|
27
|
+
Karafka::Server.consumer_groups.include?(name)
|
28
|
+
end
|
29
|
+
|
30
|
+
# Builds a topic representation inside of a current consumer group route
|
31
|
+
# @param name [String, Symbol] name of topic to which we want to subscribe
|
32
|
+
# @yield Evaluates a given block in a topic context
|
33
|
+
# @return [Karafka::Routing::Topic] newly built topic instance
|
34
|
+
def topic=(name, &block)
|
35
|
+
topic = Topic.new(name, self)
|
36
|
+
@topics << Proxy.new(topic, &block).target.tap(&:build)
|
37
|
+
@topics.last
|
38
|
+
end
|
39
|
+
|
40
|
+
Karafka::AttributesMap.consumer_group.each do |attribute|
|
41
|
+
config_retriever_for(attribute)
|
42
|
+
end
|
43
|
+
|
44
|
+
# Hashed version of consumer group that can be used for validation purposes
|
45
|
+
# @return [Hash] hash with consumer group attributes including serialized to hash
|
46
|
+
# topics inside of it.
|
47
|
+
def to_h
|
48
|
+
result = {
|
49
|
+
topics: topics.map(&:to_h),
|
50
|
+
id: id
|
51
|
+
}
|
52
|
+
|
53
|
+
Karafka::AttributesMap.consumer_group.each do |attribute|
|
54
|
+
result[attribute] = public_send(attribute)
|
55
|
+
end
|
56
|
+
|
57
|
+
result
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Routing
|
5
|
+
# Default consumer mapper that builds consumer ids based on app id and consumer group name
|
6
|
+
# Different mapper can be used in case of preexisting consumer names or for applying
|
7
|
+
# other naming conventions not compatible wiih Karafkas client_id + consumer name concept
|
8
|
+
#
|
9
|
+
# @example Mapper for using consumer groups without a client_id prefix
|
10
|
+
# module MyMapper
|
11
|
+
# def self.call(raw_consumer_group_name)
|
12
|
+
# raw_consumer_group_name
|
13
|
+
# end
|
14
|
+
# end
|
15
|
+
#
|
16
|
+
# @example Mapper for replacing "_" with "." in topic names
|
17
|
+
# module MyMapper
|
18
|
+
# def self.call(raw_consumer_group_name)
|
19
|
+
# [
|
20
|
+
# Dry::Inflector.new.underscore(Karafka::App.config.client_id.to_s),
|
21
|
+
# raw_consumer_group_name
|
22
|
+
# ].join('_').gsub('_', '.')
|
23
|
+
# end
|
24
|
+
# end
|
25
|
+
module ConsumerMapper
|
26
|
+
# @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
|
27
|
+
# @return [String] remapped final consumer group name
|
28
|
+
def self.call(raw_consumer_group_name)
|
29
|
+
client_name = Dry::Inflector.new.underscore(Karafka::App.config.client_id.to_s)
|
30
|
+
"#{client_name}_#{raw_consumer_group_name}"
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Routing
|
5
|
+
# Proxy is used as a translation layer in between the DSL and raw topic and consumer group
|
6
|
+
# objects.
|
7
|
+
class Proxy
|
8
|
+
attr_reader :target
|
9
|
+
|
10
|
+
# We should proxy only non ? and = methods as we want to have a regular dsl
|
11
|
+
IGNORED_POSTFIXES = %w[
|
12
|
+
?
|
13
|
+
=
|
14
|
+
!
|
15
|
+
].freeze
|
16
|
+
|
17
|
+
# @param target [Object] target object to which we proxy any DSL call
|
18
|
+
# @yield Evaluates block in the proxy context
|
19
|
+
def initialize(target, &block)
|
20
|
+
@target = target
|
21
|
+
instance_eval(&block)
|
22
|
+
end
|
23
|
+
|
24
|
+
# Translates the no "=" DSL of routing into elements assignments on target
|
25
|
+
def method_missing(method_name, *arguments, &block)
|
26
|
+
return super unless respond_to_missing?(method_name)
|
27
|
+
@target.public_send(:"#{method_name}=", *arguments, &block)
|
28
|
+
end
|
29
|
+
|
30
|
+
# Tells whether or not a given element exists on the target
|
31
|
+
def respond_to_missing?(method_name, include_private = false)
|
32
|
+
return false if IGNORED_POSTFIXES.any? { |postfix| method_name.to_s.end_with?(postfix) }
|
33
|
+
@target.respond_to?(:"#{method_name}=", include_private) || super
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Namespace for all elements related to requests routing
|
5
|
+
module Routing
|
6
|
+
# Karafka framework Router for routing incoming messages to proper consumers
|
7
|
+
# @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
|
8
|
+
# structure so all the routes are being stored in a single level array
|
9
|
+
module Router
|
10
|
+
# Find a proper topic based on full topic id
|
11
|
+
# @param topic_id [String] proper topic id (already mapped, etc) for which we want to find
|
12
|
+
# routing topic
|
13
|
+
# @return [Karafka::Routing::Route] proper route details
|
14
|
+
# @raise [Karafka::Topic::NonMatchingTopicError] raised if topic name does not match
|
15
|
+
# any route defined by user using routes.draw
|
16
|
+
def find(topic_id)
|
17
|
+
App.consumer_groups.each do |consumer_group|
|
18
|
+
consumer_group.topics.each do |topic|
|
19
|
+
return topic if topic.id == topic_id
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
raise(Errors::NonMatchingRouteError, topic_id)
|
24
|
+
end
|
25
|
+
|
26
|
+
module_function :find
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,60 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Routing
|
5
|
+
# Topic stores all the details on how we should interact with Kafka given topic
|
6
|
+
# It belongs to a consumer group as from 0.6 all the topics can work in the same consumer group
|
7
|
+
# It is a part of Karafka's DSL
|
8
|
+
class Topic
|
9
|
+
extend Helpers::ConfigRetriever
|
10
|
+
|
11
|
+
attr_reader :id, :consumer_group
|
12
|
+
attr_accessor :consumer
|
13
|
+
|
14
|
+
# @param [String, Symbol] name of a topic on which we want to listen
|
15
|
+
# @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
|
16
|
+
def initialize(name, consumer_group)
|
17
|
+
@name = name.to_s
|
18
|
+
@consumer_group = consumer_group
|
19
|
+
@attributes = {}
|
20
|
+
# @note We use identifier related to the consumer group that owns a topic, because from
|
21
|
+
# Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
|
22
|
+
# have same topic name across mutliple Kafkas
|
23
|
+
@id = "#{consumer_group.id}_#{@name}"
|
24
|
+
end
|
25
|
+
|
26
|
+
# Initializes default values for all the options that support defaults if their values are
|
27
|
+
# not yet specified. This is need to be done (cannot be lazy loaded on first use) because
|
28
|
+
# everywhere except Karafka server command, those would not be initialized on time - for
|
29
|
+
# example for Sidekiq
|
30
|
+
def build
|
31
|
+
Karafka::AttributesMap.topic.each { |attr| send(attr) }
|
32
|
+
consumer&.topic = self
|
33
|
+
self
|
34
|
+
end
|
35
|
+
|
36
|
+
# @return [Class, nil] Class (not an instance) of a responder that should respond from
|
37
|
+
# consumer back to Kafka (usefull for piping dataflows)
|
38
|
+
def responder
|
39
|
+
@responder ||= Karafka::Responders::Builder.new(consumer).build
|
40
|
+
end
|
41
|
+
|
42
|
+
Karafka::AttributesMap.topic.each do |attribute|
|
43
|
+
config_retriever_for(attribute)
|
44
|
+
end
|
45
|
+
|
46
|
+
# @return [Hash] hash with all the topic attributes
|
47
|
+
# @note This is being used when we validate the consumer_group and its topics
|
48
|
+
def to_h
|
49
|
+
map = Karafka::AttributesMap.topic.map do |attribute|
|
50
|
+
[attribute, public_send(attribute)]
|
51
|
+
end
|
52
|
+
|
53
|
+
Hash[map].merge!(
|
54
|
+
id: id,
|
55
|
+
consumer: consumer
|
56
|
+
)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Routing
|
5
|
+
# Default topic mapper that does not remap things
|
6
|
+
# Mapper can be used for Kafka providers that require namespaced topic names. Instead of being
|
7
|
+
# provider dependent, we can then define mapper and use internally "pure" topic names in
|
8
|
+
# routes and responders
|
9
|
+
#
|
10
|
+
# @example Mapper for mapping prefixed topics
|
11
|
+
# module MyMapper
|
12
|
+
# PREFIX = "my_user_name."
|
13
|
+
#
|
14
|
+
# def incoming(topic)
|
15
|
+
# topic.to_s.gsub(PREFIX, '')
|
16
|
+
# end
|
17
|
+
#
|
18
|
+
# def outgoing(topic)
|
19
|
+
# "#{PREFIX}#{topic}"
|
20
|
+
# end
|
21
|
+
# end
|
22
|
+
#
|
23
|
+
# @example Mapper for replacing "." with "_" in topic names
|
24
|
+
# module MyMapper
|
25
|
+
# PREFIX = "my_user_name."
|
26
|
+
#
|
27
|
+
# def incoming(topic)
|
28
|
+
# topic.to_s.gsub('.', '_')
|
29
|
+
# end
|
30
|
+
#
|
31
|
+
# def outgoing(topic)
|
32
|
+
# topic.to_s.gsub('_', '.')
|
33
|
+
# end
|
34
|
+
# end
|
35
|
+
module TopicMapper
|
36
|
+
class << self
|
37
|
+
# @param topic [String, Symbol] topic
|
38
|
+
# @return [String, Symbol] same topic as on input
|
39
|
+
# @example
|
40
|
+
# incoming('topic_name') #=> 'topic_name'
|
41
|
+
def incoming(topic)
|
42
|
+
topic
|
43
|
+
end
|
44
|
+
|
45
|
+
# @param topic [String, Symbol] topic
|
46
|
+
# @return [String, Symbol] same topic as on input
|
47
|
+
# @example
|
48
|
+
# outgoing('topic_name') #=> 'topic_name'
|
49
|
+
def outgoing(topic)
|
50
|
+
topic
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Namespace for all the validation schemas that we use to check input
|
5
|
+
module Schemas
|
6
|
+
# Regexp for validating format of groups and topics
|
7
|
+
TOPIC_REGEXP = /\A(\w|\-|\.)+\z/
|
8
|
+
|
9
|
+
# Schema with validation rules for Karafka configuration details
|
10
|
+
# @note There are many more configuration options inside of the
|
11
|
+
# Karafka::Setup::Config model, but we don't validate them here as they are
|
12
|
+
# validated per each route (topic + consumer_group) because they can be overwritten,
|
13
|
+
# so we validate all of that once all the routes are defined and ready
|
14
|
+
Config = Dry::Validation.Schema do
|
15
|
+
required(:client_id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
|
16
|
+
required(:shutdown_timeout) { none? | (int? & gteq?(0)) }
|
17
|
+
required(:consumer_mapper)
|
18
|
+
required(:topic_mapper)
|
19
|
+
required(:params_base_class).filled
|
20
|
+
|
21
|
+
optional(:backend).filled
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,78 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Schemas
|
5
|
+
# Schema for single full route (consumer group + topics) validation.
|
6
|
+
ConsumerGroup = Dry::Validation.Schema do
|
7
|
+
# Valid uri schemas of Kafka broker url
|
8
|
+
# The ||= is due to the behavior of require_all that resolves dependencies
|
9
|
+
# but someetimes loads things twice
|
10
|
+
URI_SCHEMES ||= %w[kafka kafka+ssl plaintext ssl].freeze
|
11
|
+
|
12
|
+
# Available sasl scram mechanism of authentication (plus nil)
|
13
|
+
SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
|
14
|
+
|
15
|
+
configure do
|
16
|
+
config.messages_file = File.join(
|
17
|
+
Karafka.gem_root, 'config', 'errors.yml'
|
18
|
+
)
|
19
|
+
|
20
|
+
# Uri validator to check if uri is in a Karafka acceptable format
|
21
|
+
# @param uri [String] uri we want to validate
|
22
|
+
# @return [Boolean] true if it is a valid uri, otherwise false
|
23
|
+
def broker_schema?(uri)
|
24
|
+
uri = URI.parse(uri)
|
25
|
+
URI_SCHEMES.include?(uri.scheme) && uri.port
|
26
|
+
rescue URI::InvalidURIError
|
27
|
+
false
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
|
32
|
+
required(:seed_brokers).filled { each(:broker_schema?) }
|
33
|
+
required(:session_timeout).filled { int? | float? }
|
34
|
+
required(:pause_timeout) { none? | ((int? | float?) & gteq?(0)) }
|
35
|
+
required(:offset_commit_interval) { int? | float? }
|
36
|
+
required(:offset_commit_threshold).filled(:int?)
|
37
|
+
required(:offset_retention_time) { none?.not > int? }
|
38
|
+
required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
|
39
|
+
required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
|
40
|
+
required(:connect_timeout).filled { (int? | float?) & gt?(0) }
|
41
|
+
required(:socket_timeout).filled { (int? | float?) & gt?(0) }
|
42
|
+
required(:min_bytes).filled(:int?, gt?: 0)
|
43
|
+
required(:max_bytes).filled(:int?, gt?: 0)
|
44
|
+
required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
|
45
|
+
required(:batch_fetching).filled(:bool?)
|
46
|
+
required(:topics).filled { each { schema(ConsumerGroupTopic) } }
|
47
|
+
|
48
|
+
# Max wait time cannot exceed socket_timeout - wouldn't make sense
|
49
|
+
rule(
|
50
|
+
max_wait_time_limit: %i[max_wait_time socket_timeout]
|
51
|
+
) do |max_wait_time, socket_timeout|
|
52
|
+
socket_timeout.int? > max_wait_time.lteq?(value(:socket_timeout))
|
53
|
+
end
|
54
|
+
|
55
|
+
%i[
|
56
|
+
ssl_ca_cert
|
57
|
+
ssl_ca_cert_file_path
|
58
|
+
ssl_client_cert
|
59
|
+
ssl_client_cert_key
|
60
|
+
sasl_gssapi_principal
|
61
|
+
sasl_gssapi_keytab
|
62
|
+
sasl_plain_authzid
|
63
|
+
sasl_plain_username
|
64
|
+
sasl_plain_password
|
65
|
+
sasl_scram_username
|
66
|
+
sasl_scram_password
|
67
|
+
].each do |encryption_attribute|
|
68
|
+
optional(encryption_attribute).maybe(:str?)
|
69
|
+
end
|
70
|
+
|
71
|
+
optional(:ssl_ca_certs_from_system).maybe(:bool?)
|
72
|
+
|
73
|
+
# It's not with other encryptions as it has some more rules
|
74
|
+
optional(:sasl_scram_mechanism)
|
75
|
+
.maybe(:str?, included_in?: Karafka::Schemas::SASL_SCRAM_MECHANISMS)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Schemas
|
5
|
+
# Consumer group topic validation rules
|
6
|
+
ConsumerGroupTopic = Dry::Validation.Schema do
|
7
|
+
required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
|
8
|
+
required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
|
9
|
+
required(:backend).filled(included_in?: %i[inline sidekiq])
|
10
|
+
required(:consumer).filled
|
11
|
+
required(:parser).filled
|
12
|
+
required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
|
13
|
+
required(:start_from_beginning).filled(:bool?)
|
14
|
+
required(:batch_consuming).filled(:bool?)
|
15
|
+
required(:persistent).filled(:bool?)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Schemas
|
5
|
+
# Validator to check responder topic usage
|
6
|
+
ResponderUsageTopic = Dry::Validation.Schema do
|
7
|
+
required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
|
8
|
+
required(:required).filled(:bool?)
|
9
|
+
required(:multiple_usage).filled(:bool?)
|
10
|
+
required(:usage_count).filled(:int?, gteq?: 0)
|
11
|
+
required(:registered).filled(eql?: true)
|
12
|
+
required(:async).filled(:bool?)
|
13
|
+
|
14
|
+
rule(
|
15
|
+
required_usage: %i[required usage_count]
|
16
|
+
) do |required, usage_count|
|
17
|
+
required.true? > usage_count.gteq?(1)
|
18
|
+
end
|
19
|
+
|
20
|
+
rule(
|
21
|
+
multiple_usage_permission: %i[multiple_usage usage_count]
|
22
|
+
) do |multiple_usage, usage_count|
|
23
|
+
usage_count.gt?(1) > multiple_usage.true?
|
24
|
+
end
|
25
|
+
|
26
|
+
rule(
|
27
|
+
multiple_usage_block: %i[multiple_usage usage_count]
|
28
|
+
) do |multiple_usage, usage_count|
|
29
|
+
multiple_usage.false? > usage_count.lteq?(1)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
# Validator to check that everything in a responder flow matches responder rules
|
34
|
+
ResponderUsage = Dry::Validation.Schema do
|
35
|
+
required(:used_topics) { filled? > each { schema(ResponderUsageTopic) } }
|
36
|
+
required(:registered_topics) { filled? > each { schema(ResponderUsageTopic) } }
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|