karafka 1.0.1 → 1.4.14

Sign up to get free protection for your applications and to get access to all the features.
Files changed (121) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +0 -0
  3. data/.coditsu/ci.yml +3 -0
  4. data/.console_irbrc +1 -3
  5. data/.diffend.yml +3 -0
  6. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  7. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  8. data/.github/workflows/ci.yml +76 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/CHANGELOG.md +286 -16
  12. data/CODE_OF_CONDUCT.md +1 -1
  13. data/CONTRIBUTING.md +6 -7
  14. data/Gemfile +5 -2
  15. data/Gemfile.lock +100 -103
  16. data/README.md +54 -74
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +26 -0
  19. data/config/errors.yml +40 -5
  20. data/docker-compose.yml +17 -0
  21. data/karafka.gemspec +31 -15
  22. data/lib/karafka/app.rb +19 -18
  23. data/lib/karafka/assignment_strategies/round_robin.rb +13 -0
  24. data/lib/karafka/attributes_map.rb +17 -21
  25. data/lib/karafka/backends/inline.rb +2 -3
  26. data/lib/karafka/base_consumer.rb +57 -0
  27. data/lib/karafka/base_responder.rb +77 -31
  28. data/lib/karafka/cli/base.rb +4 -4
  29. data/lib/karafka/cli/console.rb +11 -9
  30. data/lib/karafka/cli/flow.rb +9 -7
  31. data/lib/karafka/cli/info.rb +5 -4
  32. data/lib/karafka/cli/install.rb +32 -8
  33. data/lib/karafka/cli/missingno.rb +19 -0
  34. data/lib/karafka/cli/server.rb +18 -16
  35. data/lib/karafka/cli.rb +10 -2
  36. data/lib/karafka/code_reloader.rb +67 -0
  37. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +71 -22
  38. data/lib/karafka/connection/batch_delegator.rb +55 -0
  39. data/lib/karafka/connection/builder.rb +23 -0
  40. data/lib/karafka/connection/client.rb +120 -0
  41. data/lib/karafka/connection/listener.rb +39 -26
  42. data/lib/karafka/connection/message_delegator.rb +36 -0
  43. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  44. data/lib/karafka/consumers/callbacks.rb +71 -0
  45. data/lib/karafka/consumers/includer.rb +64 -0
  46. data/lib/karafka/consumers/responders.rb +24 -0
  47. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  48. data/lib/karafka/contracts/config.rb +21 -0
  49. data/lib/karafka/contracts/consumer_group.rb +211 -0
  50. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  51. data/lib/karafka/contracts/responder_usage.rb +54 -0
  52. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  53. data/lib/karafka/contracts.rb +10 -0
  54. data/lib/karafka/errors.rb +27 -12
  55. data/lib/karafka/fetcher.rb +15 -15
  56. data/lib/karafka/helpers/class_matcher.rb +20 -10
  57. data/lib/karafka/helpers/config_retriever.rb +3 -3
  58. data/lib/karafka/helpers/inflector.rb +26 -0
  59. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  60. data/lib/karafka/instrumentation/logger.rb +54 -0
  61. data/lib/karafka/instrumentation/monitor.rb +70 -0
  62. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  63. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  64. data/lib/karafka/params/batch_metadata.rb +26 -0
  65. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  66. data/lib/karafka/params/builders/params.rb +38 -0
  67. data/lib/karafka/params/builders/params_batch.rb +25 -0
  68. data/lib/karafka/params/metadata.rb +20 -0
  69. data/lib/karafka/params/params.rb +35 -107
  70. data/lib/karafka/params/params_batch.rb +38 -19
  71. data/lib/karafka/patches/ruby_kafka.rb +47 -0
  72. data/lib/karafka/persistence/client.rb +29 -0
  73. data/lib/karafka/persistence/consumers.rb +45 -0
  74. data/lib/karafka/persistence/topics.rb +48 -0
  75. data/lib/karafka/process.rb +6 -9
  76. data/lib/karafka/responders/builder.rb +15 -14
  77. data/lib/karafka/responders/topic.rb +14 -9
  78. data/lib/karafka/routing/builder.rb +38 -9
  79. data/lib/karafka/routing/consumer_group.rb +6 -4
  80. data/lib/karafka/routing/consumer_mapper.rb +10 -9
  81. data/lib/karafka/routing/proxy.rb +10 -1
  82. data/lib/karafka/routing/router.rb +1 -1
  83. data/lib/karafka/routing/topic.rb +8 -12
  84. data/lib/karafka/routing/topic_mapper.rb +16 -18
  85. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  86. data/lib/karafka/serialization/json/serializer.rb +31 -0
  87. data/lib/karafka/server.rb +50 -39
  88. data/lib/karafka/setup/config.rb +138 -91
  89. data/lib/karafka/setup/configurators/water_drop.rb +21 -16
  90. data/lib/karafka/setup/dsl.rb +21 -0
  91. data/lib/karafka/status.rb +7 -3
  92. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.erb} +2 -2
  93. data/lib/karafka/templates/karafka.rb.erb +92 -0
  94. data/lib/karafka/version.rb +1 -1
  95. data/lib/karafka.rb +19 -15
  96. data.tar.gz.sig +0 -0
  97. metadata +119 -81
  98. metadata.gz.sig +5 -0
  99. data/.github/ISSUE_TEMPLATE.md +0 -2
  100. data/.travis.yml +0 -17
  101. data/Rakefile +0 -7
  102. data/lib/karafka/base_controller.rb +0 -117
  103. data/lib/karafka/connection/messages_consumer.rb +0 -106
  104. data/lib/karafka/connection/messages_processor.rb +0 -61
  105. data/lib/karafka/controllers/includer.rb +0 -51
  106. data/lib/karafka/controllers/responders.rb +0 -19
  107. data/lib/karafka/loader.rb +0 -29
  108. data/lib/karafka/logger.rb +0 -53
  109. data/lib/karafka/monitor.rb +0 -98
  110. data/lib/karafka/parsers/json.rb +0 -38
  111. data/lib/karafka/patches/dry_configurable.rb +0 -33
  112. data/lib/karafka/persistence/controller.rb +0 -23
  113. data/lib/karafka/schemas/config.rb +0 -31
  114. data/lib/karafka/schemas/consumer_group.rb +0 -64
  115. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  116. data/lib/karafka/schemas/responder_usage.rb +0 -38
  117. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  118. data/lib/karafka/setup/configurators/base.rb +0 -35
  119. data/lib/karafka/setup/configurators/celluloid.rb +0 -19
  120. data/lib/karafka/templates/karafka.rb.example +0 -41
  121. /data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
@@ -3,38 +3,57 @@
3
3
  module Karafka
4
4
  module Params
5
5
  # Params batch represents a set of messages received from Kafka.
6
- # @note Params internally are lazy loaded before first use. That way we can skip parsing
7
- # process if we have after_received that rejects some incoming messages without using params
8
- # It can be also used when handling really heavy data (in terms of parsing).
6
+ # @note Params internally are lazy loaded before first use. That way we can skip
7
+ # deserialization process if we have after_fetch that rejects some incoming messages
8
+ # without using params It can be also used when handling really heavy data.
9
9
  class ParamsBatch
10
10
  include Enumerable
11
11
 
12
- # Builds up a params batch based on raw kafka messages
13
- # @param messages_batch [Array<Kafka::FetchedMessage>] messages batch
14
- # @param topic_parser [Class] topic parser for unparsing messages values
15
- def initialize(messages_batch, topic_parser)
16
- @params_batch = messages_batch.map do |message|
17
- Karafka::Params::Params.build(message, topic_parser)
18
- end
12
+ # @param params_array [Array<Karafka::Params::Params>] array with karafka params
13
+ # @return [Karafka::Params::ParamsBatch] lazy evaluated params batch object
14
+ def initialize(params_array)
15
+ @params_array = params_array
19
16
  end
20
17
 
21
- # @yieldparam [Karafka::Params::Params] each parsed and loaded params instance
22
- # @note Invocation of this method will cause loading and parsing each param after another.
23
- # If you want to get access without parsing, please access params_batch directly
18
+ # @yieldparam [Karafka::Params::Params] each params instance
19
+ # @note Invocation of this method will not cause loading and deserializing each param after
20
+ # another.
24
21
  def each
25
- @params_batch.each { |param| yield(param.retrieve!) }
22
+ @params_array.each { |param| yield(param) }
26
23
  end
27
24
 
28
25
  # @return [Array<Karafka::Params::Params>] returns all the params in a loaded state, so they
29
26
  # can be used for batch insert, etc. Without invoking all, up until first use, they won't
30
- # be parsed
31
- def parsed
32
- each(&:itself)
27
+ # be deserialized
28
+ def deserialize!
29
+ each(&:payload)
33
30
  end
34
31
 
35
- # @return [Array<Karafka::Params::Params>] pure array with params (not parsed)
32
+ # @return [Array<Object>] array with deserialized payloads. This method can be useful when
33
+ # we don't care about metadata and just want to extract all the data payloads from the
34
+ # batch
35
+ def payloads
36
+ map(&:payload)
37
+ end
38
+
39
+ # @return [Karafka::Params::Params] first element
40
+ def first
41
+ @params_array.first
42
+ end
43
+
44
+ # @return [Karafka::Params::Params] last element
45
+ def last
46
+ @params_array.last
47
+ end
48
+
49
+ # @return [Integer] number of messages in the batch
50
+ def size
51
+ @params_array.size
52
+ end
53
+
54
+ # @return [Array<Karafka::Params::Params>] pure array with params
36
55
  def to_a
37
- @params_batch
56
+ @params_array
38
57
  end
39
58
  end
40
59
  end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for various other libs patches
5
+ module Patches
6
+ # Patches for Ruby Kafka gem
7
+ module RubyKafka
8
+ # This patch allows us to inject business logic in between fetches and before the consumer
9
+ # stop, so we can perform stop commit or anything else that we need since
10
+ # ruby-kafka fetch loop does not allow that directly
11
+ # We don't won't to use poll ruby-kafka api as it brings many more problems that we would
12
+ # have to take care of. That way, nothing like that ever happens but we get the control
13
+ # over the stopping process that we need (since we're the once that initiate it for each
14
+ # thread)
15
+ def consumer_loop
16
+ super do
17
+ consumers = Karafka::Persistence::Consumers
18
+ .current
19
+ .values
20
+ .flat_map(&:values)
21
+ .select { |consumer| consumer.class.respond_to?(:after_fetch) }
22
+
23
+ if Karafka::App.stopping?
24
+ publish_event(consumers, 'before_stop')
25
+ Karafka::Persistence::Client.read.stop
26
+ else
27
+ publish_event(consumers, 'before_poll')
28
+ yield
29
+ publish_event(consumers, 'after_poll')
30
+ end
31
+ end
32
+ end
33
+
34
+ private
35
+
36
+ # Notifies consumers about particular events happening
37
+ # @param consumers [Array<Object>] all consumers that want to be notified about an event
38
+ # @param event_name [String] name of the event that happened
39
+ def publish_event(consumers, event_name)
40
+ consumers.each do |consumer|
41
+ key = "consumers.#{Helpers::Inflector.map(consumer.class.to_s)}.#{event_name}"
42
+ Karafka::App.monitor.instrument(key, context: consumer)
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Persistence
5
+ # Persistence layer to store current thread messages consumer client for further use
6
+ class Client
7
+ # Thread.current key under which we store current thread messages consumer client
8
+ PERSISTENCE_SCOPE = :client
9
+
10
+ private_constant :PERSISTENCE_SCOPE
11
+
12
+ class << self
13
+ # @param client [Karafka::Connection::Client] messages consumer client of
14
+ # a current thread
15
+ # @return [Karafka::Connection::Client] persisted messages consumer client
16
+ def write(client)
17
+ Thread.current[PERSISTENCE_SCOPE] = client
18
+ end
19
+
20
+ # @return [Karafka::Connection::Client] persisted messages consumer client
21
+ # @raise [Karafka::Errors::MissingClientError] raised when no thread messages consumer
22
+ # client but we try to use it anyway
23
+ def read
24
+ Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingClientError)
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module used to provide a persistent cache layer for Karafka components that need to be
5
+ # shared inside of a same thread
6
+ module Persistence
7
+ # Module used to provide a persistent cache across batch requests for a given
8
+ # topic and partition to store some additional details when the persistent mode
9
+ # for a given topic is turned on
10
+ class Consumers
11
+ # Thread.current scope under which we store consumers data
12
+ PERSISTENCE_SCOPE = :consumers
13
+
14
+ private_constant :PERSISTENCE_SCOPE
15
+
16
+ class << self
17
+ # @return [Hash] current thread's persistence scope hash with all the consumers
18
+ def current
19
+ Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
20
+ hash[key] = Concurrent::Hash.new
21
+ end
22
+ end
23
+
24
+ # Used to build (if block given) and/or fetch a current consumer instance that will be
25
+ # used to process messages from a given topic and partition
26
+ # @param topic [Karafka::Routing::Topic] topic instance for which we might cache
27
+ # @param partition [Integer] number of partition for which we want to cache
28
+ # @return [Karafka::BaseConsumer] base consumer descendant
29
+ def fetch(topic, partition)
30
+ current[topic][partition] ||= topic.consumer.new(topic)
31
+ end
32
+
33
+ # Removes all persisted instances of consumers from the consumer cache
34
+ # @note This is used to reload consumers instances when code reloading in development mode
35
+ # is present. This should not be used in production.
36
+ def clear
37
+ Thread
38
+ .list
39
+ .select { |thread| thread[PERSISTENCE_SCOPE] }
40
+ .each { |thread| thread[PERSISTENCE_SCOPE].clear }
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,48 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Persistence
5
+ # Local cache for routing topics
6
+ # We use it in order not to build string instances and remap incoming topic upon each
7
+ # message / message batches received
8
+ class Topics
9
+ # Thread.current scope under which we store topics data
10
+ PERSISTENCE_SCOPE = :topics
11
+
12
+ private_constant :PERSISTENCE_SCOPE
13
+
14
+ class << self
15
+ # @return [Concurrent::Hash] hash with all the topics from given groups
16
+ def current
17
+ Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
18
+ hash[key] = Concurrent::Hash.new
19
+ end
20
+ end
21
+
22
+ # @param group_id [String] group id for which we fetch a topic representation
23
+ # @param raw_topic_name [String] raw topic name (before remapping) for which we fetch a
24
+ # topic representation
25
+ # @return [Karafka::Routing::Topics] remapped topic representation that can be used further
26
+ # on when working with given parameters
27
+ def fetch(group_id, raw_topic_name)
28
+ current[group_id][raw_topic_name] ||= begin
29
+ # We map from incoming topic name, as it might be namespaced, etc.
30
+ # @see topic_mapper internal docs
31
+ mapped_topic_name = Karafka::App.config.topic_mapper.incoming(raw_topic_name)
32
+ Routing::Router.find("#{group_id}_#{mapped_topic_name}")
33
+ end
34
+ end
35
+
36
+ # Clears the whole topics cache for all the threads
37
+ # This is used for in-development code reloading as we need to get rid of all the
38
+ # preloaded and cached instances of objects to make it work
39
+ def clear
40
+ Thread
41
+ .list
42
+ .select { |thread| thread[PERSISTENCE_SCOPE] }
43
+ .each { |thread| thread[PERSISTENCE_SCOPE].clear }
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
@@ -1,14 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
- # Class used to catch signals from ruby Signal class in order to manage Karafka shutdown
4
+ # Class used to catch signals from ruby Signal class in order to manage Karafka stop
5
5
  # @note There might be only one process - this class is a singleton
6
6
  class Process
7
- include Singleton
8
-
9
7
  # Signal types that we handle
10
8
  HANDLED_SIGNALS = %i[
11
- SIGINT SIGQUIT SIGTERM
9
+ SIGINT
10
+ SIGQUIT
11
+ SIGTERM
12
12
  ].freeze
13
13
 
14
14
  HANDLED_SIGNALS.each do |signal|
@@ -27,16 +27,13 @@ module Karafka
27
27
 
28
28
  # Creates an instance of process and creates empty hash for callbacks
29
29
  def initialize
30
- @callbacks = {}
31
- HANDLED_SIGNALS.each { |signal| @callbacks[signal] = [] }
30
+ @callbacks = Hash.new { |hsh, key| hsh[key] = [] }
32
31
  end
33
32
 
34
33
  # Method catches all HANDLED_SIGNALS and performs appropriate callbacks (if defined)
35
34
  # @note If there are no callbacks, this method will just ignore a given signal that was sent
36
- # @yield [Block] block of code that we want to execute and supervise
37
35
  def supervise
38
36
  HANDLED_SIGNALS.each { |signal| trap_signal(signal) }
39
- yield
40
37
  end
41
38
 
42
39
  private
@@ -56,7 +53,7 @@ module Karafka
56
53
  # we have to spin up a new thread to do this
57
54
  def notice_signal(signal)
58
55
  Thread.new do
59
- Karafka.monitor.notice(self.class, signal: signal)
56
+ Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)
60
57
  end
61
58
  end
62
59
  end
@@ -3,30 +3,31 @@
3
3
  module Karafka
4
4
  # Responders namespace encapsulates all the internal responder implementation parts
5
5
  module Responders
6
- # Responders builder is used to find (based on the controller class name) a responder that
7
- # match the controller. This is used when user does not provide a responder inside routing
8
- # but he still names responder with the same convention (and namespaces) as controller
6
+ # Responders builder is used for finding (based on the consumer class name) a responder
7
+ # that match the consumer. We use it when user does not provide a responder inside routing,
8
+ # but he still names responder with the same convention (and namespaces) as consumer
9
+ #
9
10
  # @example Matching responder exists
10
- # Karafka::Responder::Builder(NewEventsController).build #=> NewEventsResponder
11
+ # Karafka::Responder::Builder(NewEventsConsumer).build #=> NewEventsResponder
11
12
  # @example Matching responder does not exist
12
- # Karafka::Responder::Builder(NewBuildsController).build #=> nil
13
+ # Karafka::Responder::Builder(NewBuildsConsumer).build #=> nil
13
14
  class Builder
14
- # @param controller_class [Karafka::BaseController, nil] descendant of
15
- # Karafka::BaseController
16
- # @example Tries to find a responder that matches a given controller. If nothing found,
17
- # will return nil (nil is accepted, because it means that a given controller don't
15
+ # @param consumer_class [Karafka::BaseConsumer, nil] descendant of
16
+ # Karafka::BaseConsumer
17
+ # @example Tries to find a responder that matches a given consumer. If nothing found,
18
+ # will return nil (nil is accepted, because it means that a given consumer don't
18
19
  # pipe stuff further on)
19
- def initialize(controller_class)
20
- @controller_class = controller_class
20
+ def initialize(consumer_class)
21
+ @consumer_class = consumer_class
21
22
  end
22
23
 
23
- # Tries to figure out a responder based on a controller class name
24
+ # Tries to figure out a responder based on a consumer class name
24
25
  # @return [Class] Responder class (not an instance)
25
26
  # @return [nil] or nil if there's no matching responding class
26
27
  def build
27
28
  Helpers::ClassMatcher.new(
28
- @controller_class,
29
- from: 'Controller',
29
+ @consumer_class,
30
+ from: 'Consumer',
30
31
  to: 'Responder'
31
32
  ).match
32
33
  end
@@ -7,8 +7,6 @@ module Karafka
7
7
  # Karafka::Responders::Topic.new(:topic_name, {}) #=> #<Karafka::Responders::Topic...
8
8
  # @example Define optional topic
9
9
  # Karafka::Responders::Topic.new(:topic_name, required: false)
10
- # @example Define topic that on which we want to respond multiple times
11
- # Karafka::Responders::Topic.new(:topic_name, multiple_usage: true)
12
10
  class Topic
13
11
  # Name of the topic on which we want to respond
14
12
  attr_reader :name
@@ -26,23 +24,30 @@ module Karafka
26
24
  @options.key?(:required) ? @options[:required] : true
27
25
  end
28
26
 
29
- # @return [Boolean] do we expect to use it multiple times in a single respond flow
30
- def multiple_usage?
31
- @options[:multiple_usage] || false
32
- end
33
-
34
27
  # @return [Boolean] was usage of this topic registered or not
35
28
  def registered?
36
29
  @options[:registered] == true
37
30
  end
38
31
 
32
+ # @return [Class] Class to use to serialize messages for this topic
33
+ def serializer
34
+ @options[:serializer]
35
+ end
36
+
37
+ # @return [Boolean] do we want to use async producer. Defaults to false as the sync producer
38
+ # is safer and introduces less problems
39
+ def async?
40
+ @options.key?(:async) ? @options[:async] : false
41
+ end
42
+
39
43
  # @return [Hash] hash with this topic attributes and options
40
44
  def to_h
41
45
  {
42
46
  name: name,
43
- multiple_usage: multiple_usage?,
44
47
  required: required?,
45
- registered: registered?
48
+ registered: registered?,
49
+ serializer: serializer,
50
+ async: async?
46
51
  }
47
52
  end
48
53
  end
@@ -6,29 +6,43 @@ module Karafka
6
6
  # @example Build a simple (most common) route
7
7
  # consumers do
8
8
  # topic :new_videos do
9
- # controller NewVideosController
9
+ # consumer NewVideosConsumer
10
10
  # end
11
11
  # end
12
- class Builder < Array
13
- include Singleton
12
+ class Builder < Concurrent::Array
13
+ # Consumer group consistency checking contract
14
+ CONTRACT = Karafka::Contracts::ConsumerGroup.new.freeze
15
+
16
+ private_constant :CONTRACT
17
+
18
+ def initialize
19
+ super
20
+ @draws = Concurrent::Array.new
21
+ end
14
22
 
15
23
  # Used to draw routes for Karafka
24
+ # @param block [Proc] block we will evaluate within the builder context
25
+ # @yield Evaluates provided block in a builder context so we can describe routes
26
+ # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
27
+ # doesn't match with the config contract
16
28
  # @note After it is done drawing it will store and validate all the routes to make sure that
17
29
  # they are correct and that there are no topic/group duplications (this is forbidden)
18
- # @yield Evaluates provided block in a builder context so we can describe routes
19
30
  # @example
20
31
  # draw do
21
32
  # topic :xyz do
22
33
  # end
23
34
  # end
24
35
  def draw(&block)
36
+ @draws << block
37
+
25
38
  instance_eval(&block)
26
39
 
27
40
  each do |consumer_group|
28
41
  hashed_group = consumer_group.to_h
29
- validation_result = Karafka::Schemas::ConsumerGroup.call(hashed_group)
30
- return if validation_result.success?
31
- raise Errors::InvalidConfiguration, validation_result.errors
42
+ validation_result = CONTRACT.call(hashed_group)
43
+ next if validation_result.success?
44
+
45
+ raise Errors::InvalidConfigurationError, validation_result.errors.to_h
32
46
  end
33
47
  end
34
48
 
@@ -39,18 +53,33 @@ module Karafka
39
53
  select(&:active?)
40
54
  end
41
55
 
56
+ # Clears the builder and the draws memory
57
+ def clear
58
+ @draws.clear
59
+ super
60
+ end
61
+
62
+ # Redraws all the routes for the in-process code reloading.
63
+ # @note This won't allow registration of new topics without process restart but will trigger
64
+ # cache invalidation so all the classes, etc are re-fetched after code reload
65
+ def reload
66
+ draws = @draws.dup
67
+ clear
68
+ draws.each { |block| draw(&block) }
69
+ end
70
+
42
71
  private
43
72
 
44
73
  # Builds and saves given consumer group
45
74
  # @param group_id [String, Symbol] name for consumer group
46
- # @yield Evaluates a given block in a consumer group context
75
+ # @param block [Proc] proc that should be executed in the proxy context
47
76
  def consumer_group(group_id, &block)
48
77
  consumer_group = ConsumerGroup.new(group_id.to_s)
49
78
  self << Proxy.new(consumer_group, &block).target
50
79
  end
51
80
 
52
81
  # @param topic_name [String, Symbol] name of a topic from which we want to consumer
53
- # @yield Evaluates a given block in a topic context
82
+ # @param block [Proc] proc we want to evaluate in the topic context
54
83
  def topic(topic_name, &block)
55
84
  consumer_group(topic_name) do
56
85
  topic(topic_name, &block).tap(&:build)
@@ -8,9 +8,11 @@ module Karafka
8
8
  class ConsumerGroup
9
9
  extend Helpers::ConfigRetriever
10
10
 
11
- attr_reader :topics
12
- attr_reader :id
13
- attr_reader :name
11
+ attr_reader(
12
+ :topics,
13
+ :id,
14
+ :name
15
+ )
14
16
 
15
17
  # @param name [String, Symbol] raw name of this consumer group. Raw means, that it does not
16
18
  # yet have an application client_id namespace, this will be added here by default.
@@ -29,7 +31,7 @@ module Karafka
29
31
 
30
32
  # Builds a topic representation inside of a current consumer group route
31
33
  # @param name [String, Symbol] name of topic to which we want to subscribe
32
- # @yield Evaluates a given block in a topic context
34
+ # @param block [Proc] block that we want to evaluate in the topic context
33
35
  # @return [Karafka::Routing::Topic] newly built topic instance
34
36
  def topic=(name, &block)
35
37
  topic = Topic.new(name, self)
@@ -4,29 +4,30 @@ module Karafka
4
4
  module Routing
5
5
  # Default consumer mapper that builds consumer ids based on app id and consumer group name
6
6
  # Different mapper can be used in case of preexisting consumer names or for applying
7
- # other naming conventions not compatible wiih Karafkas client_id + consumer name concept
7
+ # other naming conventions not compatible with Karafka client_id + consumer name concept
8
8
  #
9
9
  # @example Mapper for using consumer groups without a client_id prefix
10
- # module MyMapper
11
- # def self.call(raw_consumer_group_name)
10
+ # class MyMapper
11
+ # def call(raw_consumer_group_name)
12
12
  # raw_consumer_group_name
13
13
  # end
14
14
  # end
15
15
  #
16
16
  # @example Mapper for replacing "_" with "." in topic names
17
- # module MyMapper
18
- # def self.call(raw_consumer_group_name)
17
+ # class MyMapper
18
+ # def call(raw_consumer_group_name)
19
19
  # [
20
- # Karafka::App.config.client_id.to_s.underscope,
20
+ # Karafka::Helpers::Inflector.map(Karafka::App.config.client_id.to_s),
21
21
  # raw_consumer_group_name
22
22
  # ].join('_').gsub('_', '.')
23
23
  # end
24
24
  # end
25
- module ConsumerMapper
25
+ class ConsumerMapper
26
26
  # @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
27
27
  # @return [String] remapped final consumer group name
28
- def self.call(raw_consumer_group_name)
29
- "#{Karafka::App.config.client_id.to_s.underscore}_#{raw_consumer_group_name}"
28
+ def call(raw_consumer_group_name)
29
+ client_name = Karafka::Helpers::Inflector.map(Karafka::App.config.client_id.to_s)
30
+ "#{client_name}_#{raw_consumer_group_name}"
30
31
  end
31
32
  end
32
33
  end
@@ -14,22 +14,31 @@ module Karafka
14
14
  !
15
15
  ].freeze
16
16
 
17
+ private_constant :IGNORED_POSTFIXES
18
+
17
19
  # @param target [Object] target object to which we proxy any DSL call
18
- # @yield Evaluates block in the proxy context
20
+ # @param block [Proc] block that we want to evaluate in the proxy context
19
21
  def initialize(target, &block)
20
22
  @target = target
21
23
  instance_eval(&block)
22
24
  end
23
25
 
24
26
  # Translates the no "=" DSL of routing into elements assignments on target
27
+ # @param method_name [Symbol] name of the missing method
28
+ # @param arguments [Array] array with it's arguments
29
+ # @param block [Proc] block provided to the method
25
30
  def method_missing(method_name, *arguments, &block)
26
31
  return super unless respond_to_missing?(method_name)
32
+
27
33
  @target.public_send(:"#{method_name}=", *arguments, &block)
28
34
  end
29
35
 
30
36
  # Tells whether or not a given element exists on the target
37
+ # @param method_name [Symbol] name of the missing method
38
+ # @param include_private [Boolean] should we include private in the check as well
31
39
  def respond_to_missing?(method_name, include_private = false)
32
40
  return false if IGNORED_POSTFIXES.any? { |postfix| method_name.to_s.end_with?(postfix) }
41
+
33
42
  @target.respond_to?(:"#{method_name}=", include_private) || super
34
43
  end
35
44
  end
@@ -3,7 +3,7 @@
3
3
  module Karafka
4
4
  # Namespace for all elements related to requests routing
5
5
  module Routing
6
- # Karafka framework Router for routing incoming messages to proper controllers
6
+ # Karafka framework Router for routing incoming messages to proper consumers
7
7
  # @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
8
8
  # structure so all the routes are being stored in a single level array
9
9
  module Router
@@ -7,9 +7,12 @@ module Karafka
7
7
  # It is a part of Karafka's DSL
8
8
  class Topic
9
9
  extend Helpers::ConfigRetriever
10
+ extend Forwardable
10
11
 
11
12
  attr_reader :id, :consumer_group
12
- attr_accessor :controller
13
+ attr_accessor :consumer
14
+
15
+ def_delegator :@consumer_group, :batch_fetching
13
16
 
14
17
  # @param [String, Symbol] name of a topic on which we want to listen
15
18
  # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
@@ -19,7 +22,7 @@ module Karafka
19
22
  @attributes = {}
20
23
  # @note We use identifier related to the consumer group that owns a topic, because from
21
24
  # Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
22
- # have same topic name across mutliple Kafkas
25
+ # have same topic name across multiple Kafkas
23
26
  @id = "#{consumer_group.id}_#{@name}"
24
27
  end
25
28
 
@@ -29,20 +32,13 @@ module Karafka
29
32
  # example for Sidekiq
30
33
  def build
31
34
  Karafka::AttributesMap.topic.each { |attr| send(attr) }
32
- controller&.topic = self
33
35
  self
34
36
  end
35
37
 
36
38
  # @return [Class, nil] Class (not an instance) of a responder that should respond from
37
- # controller back to Kafka (usefull for piping dataflows)
39
+ # consumer back to Kafka (useful for piping data flows)
38
40
  def responder
39
- @responder ||= Karafka::Responders::Builder.new(controller).build
40
- end
41
-
42
- # @return [Class] Parser class (not instance) that we want to use to unparse Kafka messages
43
- # @note If not provided - will use Json as default
44
- def parser
45
- @parser ||= Karafka::Parsers::Json
41
+ @responder ||= Karafka::Responders::Builder.new(consumer).build
46
42
  end
47
43
 
48
44
  Karafka::AttributesMap.topic.each do |attribute|
@@ -58,7 +54,7 @@ module Karafka
58
54
 
59
55
  Hash[map].merge!(
60
56
  id: id,
61
- controller: controller
57
+ consumer: consumer
62
58
  )
63
59
  end
64
60
  end