karafka 1.2.2 → 1.4.0.rc1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +157 -13
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +5 -2
  17. data/Gemfile.lock +95 -79
  18. data/README.md +15 -3
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +19 -13
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +13 -18
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +51 -29
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +62 -21
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +18 -0
  39. data/lib/karafka/connection/client.rb +40 -40
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -19
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +50 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -4
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +34 -49
  85. data/lib/karafka/setup/config.rb +74 -40
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +97 -73
  94. metadata.gz.sig +4 -0
  95. data/.travis.yml +0 -13
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -77
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -7,7 +7,13 @@ module Karafka
7
7
  class Client
8
8
  extend Forwardable
9
9
 
10
- def_delegator :kafka_consumer, :seek
10
+ %i[
11
+ seek
12
+ trigger_heartbeat
13
+ trigger_heartbeat!
14
+ ].each do |delegated_method|
15
+ def_delegator :kafka_consumer, delegated_method
16
+ end
11
17
 
12
18
  # Creates a queue consumer client that will pull the data from Kafka
13
19
  # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group for which
@@ -20,40 +26,32 @@ module Karafka
20
26
  end
21
27
 
22
28
  # Opens connection, gets messages and calls a block for each of the incoming messages
23
- # @yieldparam [Array<Kafka::FetchedMessage>] kafka fetched messages
29
+ # @yieldparam [Array<Kafka::FetchedMessage>, Symbol] kafka response with an info about
30
+ # the type of the fetcher that is being used
24
31
  # @note This will yield with raw messages - no preprocessing or reformatting.
25
32
  def fetch_loop
26
- settings = ConfigAdapter.consuming(consumer_group)
33
+ settings = ApiAdapter.consumption(consumer_group)
27
34
 
28
35
  if consumer_group.batch_fetching
29
- kafka_consumer.each_batch(*settings) { |batch| yield(batch.messages) }
36
+ kafka_consumer.each_batch(*settings) { |batch| yield(batch, :batch) }
30
37
  else
31
- # always yield an array of messages, so we have consistent API (always a batch)
32
- kafka_consumer.each_message(*settings) { |message| yield([message]) }
38
+ kafka_consumer.each_message(*settings) { |message| yield(message, :message) }
33
39
  end
34
- rescue Kafka::ProcessingError => error
40
+ # @note We catch only the processing errors as any other are considered critical (exceptions)
41
+ # and should require a client restart with a backoff
42
+ rescue Kafka::ProcessingError => e
35
43
  # If there was an error during consumption, we have to log it, pause current partition
36
44
  # and process other things
37
45
  Karafka.monitor.instrument(
38
46
  'connection.client.fetch_loop.error',
39
47
  caller: self,
40
- error: error.cause
41
- )
42
- pause(error.topic, error.partition)
43
- retry
44
- # This is on purpose - see the notes for this method
45
- # rubocop:disable RescueException
46
- rescue Exception => error
47
- # rubocop:enable RescueException
48
- Karafka.monitor.instrument(
49
- 'connection.client.fetch_loop.error',
50
- caller: self,
51
- error: error
48
+ error: e.cause
52
49
  )
50
+ pause(e.topic, e.partition)
53
51
  retry
54
52
  end
55
53
 
56
- # Gracefuly stops topic consumption
54
+ # Gracefully stops topic consumption
57
55
  # @note Stopping running consumers without a really important reason is not recommended
58
56
  # as until all the consumers are stopped, the server will keep running serving only
59
57
  # part of the messages
@@ -66,18 +64,25 @@ module Karafka
66
64
  # @param topic [String] topic that we want to pause
67
65
  # @param partition [Integer] number partition that we want to pause
68
66
  def pause(topic, partition)
69
- settings = ConfigAdapter.pausing(consumer_group)
70
- timeout = settings[:timeout]
71
- raise(Errors::InvalidPauseTimeout, timeout) unless timeout.positive?
72
- kafka_consumer.pause(topic, partition, settings)
67
+ kafka_consumer.pause(*ApiAdapter.pause(topic, partition, consumer_group))
73
68
  end
74
69
 
75
- # Marks a given message as consumed and commit the offsets
76
- # @note In opposite to ruby-kafka, we commit the offset for each manual marking to be sure
77
- # that offset commit happen asap in case of a crash
70
+ # Marks given message as consumed
78
71
  # @param [Karafka::Params::Params] params message that we want to mark as processed
72
+ # @note This method won't trigger automatic offsets commits, rather relying on the ruby-kafka
73
+ # offsets time-interval based committing
79
74
  def mark_as_consumed(params)
80
- kafka_consumer.mark_message_as_processed(params)
75
+ kafka_consumer.mark_message_as_processed(
76
+ *ApiAdapter.mark_message_as_processed(params)
77
+ )
78
+ end
79
+
80
+ # Marks a given message as consumed and commit the offsets in a blocking way
81
+ # @param [Karafka::Params::Params] params message that we want to mark as processed
82
+ # @note This method commits the offset for each manual marking to be sure
83
+ # that offset commit happen asap in case of a crash
84
+ def mark_as_consumed!(params)
85
+ mark_as_consumed(params)
81
86
  # Trigger an immediate, blocking offset commit in order to minimize the risk of crashing
82
87
  # before the automatic triggers have kicked in.
83
88
  kafka_consumer.commit_offsets
@@ -90,28 +95,23 @@ module Karafka
90
95
  # @return [Kafka::Consumer] returns a ready to consume Kafka consumer
91
96
  # that is set up to consume from topics of a given consumer group
92
97
  def kafka_consumer
93
- @kafka_consumer ||= kafka.consumer(
94
- *ConfigAdapter.consumer(consumer_group)
98
+ # @note We don't cache the connection internally because we cache kafka_consumer that uses
99
+ # kafka client object instance
100
+ @kafka_consumer ||= Builder.call(consumer_group).consumer(
101
+ *ApiAdapter.consumer(consumer_group)
95
102
  ).tap do |consumer|
96
103
  consumer_group.topics.each do |topic|
97
- consumer.subscribe(*ConfigAdapter.subscription(topic))
104
+ consumer.subscribe(*ApiAdapter.subscribe(topic))
98
105
  end
99
106
  end
100
107
  rescue Kafka::ConnectionError
101
- # If we would not wait it would totally spam log file with failed
108
+ # If we would not wait it will spam log file with failed
102
109
  # attempts if Kafka is down
103
110
  sleep(consumer_group.reconnect_timeout)
104
- # We don't log and just reraise - this will be logged
111
+ # We don't log and just re-raise - this will be logged
105
112
  # down the road
106
113
  raise
107
114
  end
108
-
109
- # @return [Kafka] returns a Kafka
110
- # @note We don't cache it internally because we cache kafka_consumer that uses kafka
111
- # object instance
112
- def kafka
113
- Kafka.new(*ConfigAdapter.client(consumer_group))
114
- end
115
115
  end
116
116
  end
117
117
  end
@@ -16,9 +16,10 @@ module Karafka
16
16
 
17
17
  # Runs prefetch callbacks and executes the main listener fetch loop
18
18
  def call
19
- Karafka::Callbacks.before_fetch_loop(
20
- @consumer_group,
21
- client
19
+ Karafka.monitor.instrument(
20
+ 'connection.listener.before_fetch_loop',
21
+ consumer_group: @consumer_group,
22
+ client: client
22
23
  )
23
24
  fetch_loop
24
25
  end
@@ -26,28 +27,38 @@ module Karafka
26
27
  private
27
28
 
28
29
  # Opens connection, gets messages and calls a block for each of the incoming messages
29
- # @yieldparam [String] consumer group id
30
- # @yieldparam [Array<Kafka::FetchedMessage>] kafka fetched messages
31
- # @note This will yield with a raw message - no preprocessing or reformatting
32
30
  # @note We catch all the errors here, so they don't affect other listeners (or this one)
33
31
  # so we will be able to listen and consume other incoming messages.
34
32
  # Since it is run inside Karafka::Connection::ActorCluster - catching all the exceptions
35
- # won't crash the whole cluster. Here we mostly focus on catchin the exceptions related to
33
+ # won't crash the whole cluster. Here we mostly focus on catching the exceptions related to
36
34
  # Kafka connections / Internet connection issues / Etc. Business logic problems should not
37
35
  # propagate this far
38
36
  def fetch_loop
39
- client.fetch_loop do |raw_messages|
40
- # @note What happens here is a delegation of processing to a proper processor based
41
- # on the incoming messages characteristics
42
- Karafka::Connection::Delegator.call(@consumer_group.id, raw_messages)
37
+ # @note What happens here is a delegation of processing to a proper processor based
38
+ # on the incoming messages characteristics
39
+ client.fetch_loop do |raw_data, type|
40
+ Karafka.monitor.instrument('connection.listener.fetch_loop')
41
+
42
+ case type
43
+ when :message
44
+ MessageDelegator.call(@consumer_group.id, raw_data)
45
+ when :batch
46
+ BatchDelegator.call(@consumer_group.id, raw_data)
47
+ end
43
48
  end
44
49
  # This is on purpose - see the notes for this method
45
- # rubocop:disable RescueException
50
+ # rubocop:disable Lint/RescueException
46
51
  rescue Exception => e
47
52
  Karafka.monitor.instrument('connection.listener.fetch_loop.error', caller: self, error: e)
48
- # rubocop:enable RescueException
49
- @client&.stop
50
- retry if @client
53
+ # rubocop:enable Lint/RescueException
54
+ # We can stop client without a problem, as it will reinitialize itself when running the
55
+ # `fetch_loop` again
56
+ @client.stop
57
+ # We need to clear the consumers cache for current connection when fatal error happens and
58
+ # we reset the connection. Otherwise for consumers with manual offset management, the
59
+ # persistence might have stored some data that would be reprocessed
60
+ Karafka::Persistence::Consumers.clear
61
+ sleep(@consumer_group.reconnect_timeout) && retry
51
62
  end
52
63
 
53
64
  # @return [Karafka::Connection::Client] wrapped kafka consuming client for a given topic
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Connection
5
+ # Class that delegates processing of a single received message for which we listen to
6
+ # a proper processor
7
+ module MessageDelegator
8
+ class << self
9
+ # Delegates message (does something with it)
10
+ # It will either schedule or run a proper processor action for the incoming message
11
+ # @param group_id [String] group_id of a group from which a given message came
12
+ # @param kafka_message [<Kafka::FetchedMessage>] raw message from kafka
13
+ # @note This should be looped to obtain a constant delegating of new messages
14
+ def call(group_id, kafka_message)
15
+ topic = Persistence::Topics.fetch(group_id, kafka_message.topic)
16
+ consumer = Persistence::Consumers.fetch(topic, kafka_message.partition)
17
+
18
+ Karafka.monitor.instrument(
19
+ 'connection.message_delegator.call',
20
+ caller: self,
21
+ consumer: consumer,
22
+ kafka_message: kafka_message
23
+ ) do
24
+ # @note We always get a single message within single delegator, which means that
25
+ # we don't care if user marked it as a batch consumed or not.
26
+ consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
27
+ [kafka_message],
28
+ topic
29
+ )
30
+ consumer.call
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Consumers
5
+ # Brings the batch metadata into consumers that support batch_fetching
6
+ module BatchMetadata
7
+ attr_accessor :batch_metadata
8
+ end
9
+ end
10
+ end
@@ -16,28 +16,40 @@ module Karafka
16
16
  before_stop
17
17
  ].freeze
18
18
 
19
+ private_constant :TYPES
20
+
19
21
  # Class methods needed to make callbacks run
20
22
  module ClassMethods
21
23
  TYPES.each do |type|
22
- # A Creates a callback wrapper
24
+ # Creates a callback wrapper
25
+ #
23
26
  # @param method_name [Symbol, String] method name or nil if we plan to provide a block
24
27
  # @yield A block with a code that should be executed before scheduling
25
- define_method type do |method_name = nil, &block|
26
- set_callback type, :before, method_name ? method_name : block
28
+ # @note We don't have to optimize the key fetching here as those are class methods that
29
+ # are evaluated once upon start
30
+ define_method(type) do |method_name = nil, &block|
31
+ key = "consumers.#{Helpers::Inflector.map(to_s)}.#{type}"
32
+ Karafka::App.monitor.register_event(key)
33
+
34
+ Karafka::App.monitor.subscribe(key) do |event|
35
+ context = event[:context]
36
+
37
+ if method_name
38
+ context.send(method_name)
39
+ else
40
+ context.instance_eval(&block)
41
+ end
42
+ end
27
43
  end
28
44
  end
29
45
  end
30
46
 
31
- # @param consumer_class [Class] consumer class that we extend with callbacks
32
- def self.included(consumer_class)
33
- consumer_class.class_eval do
34
- extend ClassMethods
35
- include ActiveSupport::Callbacks
36
-
37
- # The call method is wrapped with a set of callbacks
38
- # We won't run process if any of the callbacks throw abort
39
- # @see http://api.rubyonrails.org/classes/ActiveSupport/Callbacks/ClassMethods.html#method-i-get_callbacks
40
- TYPES.each { |type| define_callbacks type }
47
+ class << self
48
+ # @param consumer_class [Class] consumer class that we extend with callbacks
49
+ def included(consumer_class)
50
+ consumer_class.class_eval do
51
+ extend ClassMethods
52
+ end
41
53
  end
42
54
  end
43
55
 
@@ -45,9 +57,14 @@ module Karafka
45
57
  # method of a proper backend. It is here because it interacts with the default Karafka
46
58
  # call flow and needs to be overwritten to support callbacks
47
59
  def call
48
- run_callbacks :after_fetch do
49
- process
60
+ if self.class.respond_to?(:after_fetch)
61
+ Karafka::App.monitor.instrument(
62
+ "consumers.#{Helpers::Inflector.map(self.class.to_s)}.after_fetch",
63
+ context: self
64
+ )
50
65
  end
66
+
67
+ process
51
68
  end
52
69
  end
53
70
  end
@@ -3,47 +3,60 @@
3
3
  module Karafka
4
4
  # Additional functionalities for consumers
5
5
  module Consumers
6
- # Module used to inject functionalities into a given consumer class, based on the consumer
6
+ # Module used to inject functionalities into a given consumer instance, based on the consumer
7
7
  # topic and its settings
8
8
  # We don't need all the behaviors in all the cases, so it is not worth having everything
9
9
  # in all the cases all the time
10
10
  module Includer
11
11
  class << self
12
- # @param consumer_class [Class] consumer class, that will get some functionalities
13
- # based on the topic under which it operates
14
- def call(consumer_class)
15
- topic = consumer_class.topic
16
-
17
- bind_backend(consumer_class, topic)
18
- bind_params(consumer_class, topic)
19
- bind_responders(consumer_class, topic)
12
+ # @param consumer [Karafka::BaseConsumer] consumer instance, that will get some
13
+ # functionalities based on the topic under which it operates
14
+ def call(consumer)
15
+ topic = consumer.topic
16
+
17
+ bind_backend(consumer, topic)
18
+ bind_params(consumer, topic)
19
+ bind_batch_metadata(consumer, topic)
20
+ bind_responders(consumer, topic)
20
21
  end
21
22
 
22
23
  private
23
24
 
24
25
  # Figures out backend for a given consumer class, based on the topic backend and
25
26
  # includes it into the consumer class
26
- # @param consumer_class [Class] consumer class
27
+ # @param consumer [Karafka::BaseConsumer] consumer instance
27
28
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
28
- def bind_backend(consumer_class, topic)
29
+ def bind_backend(consumer, topic)
29
30
  backend = Kernel.const_get("::Karafka::Backends::#{topic.backend.to_s.capitalize}")
30
- consumer_class.include backend
31
+ consumer.extend(backend)
31
32
  end
32
33
 
33
34
  # Adds a single #params support for non batch processed topics
34
- # @param consumer_class [Class] consumer class
35
+ # @param consumer [Karafka::BaseConsumer] consumer instance
35
36
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
36
- def bind_params(consumer_class, topic)
37
+ def bind_params(consumer, topic)
37
38
  return if topic.batch_consuming
38
- consumer_class.include SingleParams
39
+
40
+ consumer.extend(SingleParams)
41
+ end
42
+
43
+ # Adds an option to work with batch metadata for consumer instances that have
44
+ # batch fetching enabled
45
+ # @param consumer [Karafka::BaseConsumer] consumer instance
46
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
47
+ def bind_batch_metadata(consumer, topic)
48
+ return unless topic.batch_fetching
49
+
50
+ consumer.extend(BatchMetadata)
39
51
  end
40
52
 
41
53
  # Adds responders support for topics and consumers with responders defined for them
42
- # @param consumer_class [Class] consumer class
54
+ # @param consumer [Karafka::BaseConsumer] consumer instance
43
55
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
44
- def bind_responders(consumer_class, topic)
56
+ def bind_responders(consumer, topic)
45
57
  return unless topic.responder
46
- consumer_class.include Responders
58
+
59
+ consumer.extend(Responders)
47
60
  end
48
61
  end
49
62
  end
@@ -15,8 +15,8 @@ module Karafka
15
15
  data: data
16
16
  ) do
17
17
  # @note we build a new instance of responder each time, as a long-running (persisted)
18
- # consumers can respond multiple times during the lifecycle
19
- topic.responder.new(topic.parser).call(*data)
18
+ # consumers can respond multiple times during the life-cycle
19
+ topic.responder.new.call(*data)
20
20
  end
21
21
  end
22
22
  end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the validation contracts that we use to check input
5
+ module Contracts
6
+ # Regexp for validating format of groups and topics
7
+ # @note It is not nested inside of the contracts, as it is used by couple of them
8
+ TOPIC_REGEXP = /\A(\w|\-|\.)+\z/.freeze
9
+ end
10
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract with validation rules for Karafka configuration details
6
+ # @note There are many more configuration options inside of the
7
+ # Karafka::Setup::Config model, but we don't validate them here as they are
8
+ # validated per each route (topic + consumer_group) because they can be overwritten,
9
+ # so we validate all of that once all the routes are defined and ready
10
+ class Config < Dry::Validation::Contract
11
+ params do
12
+ required(:client_id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
13
+ required(:shutdown_timeout) { (int? & gt?(0)) }
14
+ required(:consumer_mapper)
15
+ required(:topic_mapper)
16
+
17
+ optional(:backend).filled
18
+ end
19
+ end
20
+ end
21
+ end