karafka 1.1.1 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. checksums.yaml +5 -5
  2. data/.ruby-version +1 -1
  3. data/.travis.yml +1 -0
  4. data/CHANGELOG.md +40 -0
  5. data/Gemfile +1 -2
  6. data/Gemfile.lock +41 -29
  7. data/README.md +7 -4
  8. data/karafka.gemspec +6 -4
  9. data/lib/karafka.rb +17 -7
  10. data/lib/karafka/app.rb +8 -15
  11. data/lib/karafka/attributes_map.rb +1 -1
  12. data/lib/karafka/backends/inline.rb +1 -2
  13. data/lib/karafka/{base_controller.rb → base_consumer.rb} +19 -11
  14. data/lib/karafka/base_responder.rb +33 -14
  15. data/lib/karafka/callbacks.rb +30 -0
  16. data/lib/karafka/callbacks/config.rb +22 -0
  17. data/lib/karafka/callbacks/dsl.rb +16 -0
  18. data/lib/karafka/cli/install.rb +2 -3
  19. data/lib/karafka/cli/server.rb +0 -1
  20. data/lib/karafka/connection/{consumer.rb → client.rb} +32 -36
  21. data/lib/karafka/connection/config_adapter.rb +14 -6
  22. data/lib/karafka/connection/delegator.rb +46 -0
  23. data/lib/karafka/connection/listener.rb +22 -13
  24. data/lib/karafka/{controllers → consumers}/callbacks.rb +9 -9
  25. data/lib/karafka/consumers/includer.rb +51 -0
  26. data/lib/karafka/consumers/responders.rb +24 -0
  27. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  28. data/lib/karafka/errors.rb +10 -3
  29. data/lib/karafka/fetcher.rb +30 -34
  30. data/lib/karafka/helpers/class_matcher.rb +8 -8
  31. data/lib/karafka/helpers/config_retriever.rb +2 -2
  32. data/lib/karafka/instrumentation/listener.rb +112 -0
  33. data/lib/karafka/instrumentation/logger.rb +55 -0
  34. data/lib/karafka/instrumentation/monitor.rb +64 -0
  35. data/lib/karafka/loader.rb +0 -1
  36. data/lib/karafka/params/{params.rb → dsl.rb} +71 -43
  37. data/lib/karafka/params/params_batch.rb +7 -2
  38. data/lib/karafka/patches/dry_configurable.rb +6 -2
  39. data/lib/karafka/patches/ruby_kafka.rb +10 -10
  40. data/lib/karafka/persistence/client.rb +25 -0
  41. data/lib/karafka/persistence/consumer.rb +27 -14
  42. data/lib/karafka/persistence/topic.rb +29 -0
  43. data/lib/karafka/process.rb +5 -4
  44. data/lib/karafka/responders/builder.rb +15 -14
  45. data/lib/karafka/routing/builder.rb +1 -1
  46. data/lib/karafka/routing/consumer_mapper.rb +3 -2
  47. data/lib/karafka/routing/router.rb +1 -1
  48. data/lib/karafka/routing/topic.rb +5 -5
  49. data/lib/karafka/schemas/config.rb +3 -0
  50. data/lib/karafka/schemas/consumer_group.rb +15 -3
  51. data/lib/karafka/schemas/consumer_group_topic.rb +1 -1
  52. data/lib/karafka/server.rb +37 -5
  53. data/lib/karafka/setup/config.rb +45 -21
  54. data/lib/karafka/setup/configurators/base.rb +6 -12
  55. data/lib/karafka/setup/configurators/params.rb +25 -0
  56. data/lib/karafka/setup/configurators/water_drop.rb +6 -3
  57. data/lib/karafka/setup/dsl.rb +22 -0
  58. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
  59. data/lib/karafka/templates/karafka.rb.example +17 -4
  60. data/lib/karafka/version.rb +1 -1
  61. metadata +58 -23
  62. data/lib/karafka/connection/processor.rb +0 -61
  63. data/lib/karafka/controllers/includer.rb +0 -51
  64. data/lib/karafka/controllers/responders.rb +0 -19
  65. data/lib/karafka/logger.rb +0 -53
  66. data/lib/karafka/monitor.rb +0 -98
  67. data/lib/karafka/persistence/controller.rb +0 -38
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
- module Controllers
4
+ module Consumers
5
5
  # Additional callbacks that can be used to trigger some actions on certain moments like
6
6
  # manual offset management, committing or anything else outside of a standard messages flow
7
7
  # They are not included by default, as we don't want to provide functionalities that are
@@ -10,7 +10,7 @@ module Karafka
10
10
  module Callbacks
11
11
  # Types of events on which we run callbacks
12
12
  TYPES = %i[
13
- after_fetched
13
+ after_fetch
14
14
  after_poll
15
15
  before_poll
16
16
  before_stop
@@ -28,9 +28,9 @@ module Karafka
28
28
  end
29
29
  end
30
30
 
31
- # @param controller_class [Class] controller class that we extend with callbacks
32
- def self.included(controller_class)
33
- controller_class.class_eval do
31
+ # @param consumer_class [Class] consumer class that we extend with callbacks
32
+ def self.included(consumer_class)
33
+ consumer_class.class_eval do
34
34
  extend ClassMethods
35
35
  include ActiveSupport::Callbacks
36
36
 
@@ -41,11 +41,11 @@ module Karafka
41
41
  end
42
42
  end
43
43
 
44
- # Executes the default controller flow, runs callbacks and if not halted will call process
45
- # method of a proper backend. This is here because it interacts with the default Karafka
46
- # call flow and needs to be overwritten in order to support callbacks
44
+ # Executes the default consumer flow, runs callbacks and if not halted will call process
45
+ # method of a proper backend. It is here because it interacts with the default Karafka
46
+ # call flow and needs to be overwritten to support callbacks
47
47
  def call
48
- run_callbacks :after_fetched do
48
+ run_callbacks :after_fetch do
49
49
  process
50
50
  end
51
51
  end
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Additional functionalities for consumers
5
+ module Consumers
6
+ # Module used to inject functionalities into a given consumer class, based on the consumer
7
+ # topic and its settings
8
+ # We don't need all the behaviors in all the cases, so it is not worth having everything
9
+ # in all the cases all the time
10
+ module Includer
11
+ class << self
12
+ # @param consumer_class [Class] consumer class, that will get some functionalities
13
+ # based on the topic under which it operates
14
+ def call(consumer_class)
15
+ topic = consumer_class.topic
16
+
17
+ bind_backend(consumer_class, topic)
18
+ bind_params(consumer_class, topic)
19
+ bind_responders(consumer_class, topic)
20
+ end
21
+
22
+ private
23
+
24
+ # Figures out backend for a given consumer class, based on the topic backend and
25
+ # includes it into the consumer class
26
+ # @param consumer_class [Class] consumer class
27
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
28
+ def bind_backend(consumer_class, topic)
29
+ backend = Kernel.const_get("::Karafka::Backends::#{topic.backend.to_s.capitalize}")
30
+ consumer_class.include backend
31
+ end
32
+
33
+ # Adds a single #params support for non batch processed topics
34
+ # @param consumer_class [Class] consumer class
35
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
36
+ def bind_params(consumer_class, topic)
37
+ return if topic.batch_consuming
38
+ consumer_class.include SingleParams
39
+ end
40
+
41
+ # Adds responders support for topics and consumers with responders defined for them
42
+ # @param consumer_class [Class] consumer class
43
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
44
+ def bind_responders(consumer_class, topic)
45
+ return unless topic.responder
46
+ consumer_class.include Responders
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Consumers
5
+ # Feature that allows us to use responders flow in consumer
6
+ module Responders
7
+ # Responds with given data using given responder. This allows us to have a similar way of
8
+ # defining flows like synchronous protocols
9
+ # @param data Anything we want to pass to responder based on which we want to trigger further
10
+ # Kafka responding
11
+ def respond_with(*data)
12
+ Karafka.monitor.instrument(
13
+ 'consumers.responders.respond_with',
14
+ caller: self,
15
+ data: data
16
+ ) do
17
+ # @note we build a new instance of responder each time, as a long-running (persisted)
18
+ # consumers can respond multiple times during the lifecycle
19
+ topic.responder.new(topic.parser).call(*data)
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -1,12 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
- module Controllers
5
- # Params alias for single message consumption controllers
4
+ module Consumers
5
+ # Params alias for single message consumption consumers
6
6
  module SingleParams
7
7
  private
8
8
 
9
- # @return [Karafka::Params::Params] params instance for non batch consumption controllers
9
+ # @return [Karafka::Params::Params] params instance for non batch consumption consumers
10
10
  def params
11
11
  params_batch.first
12
12
  end
@@ -13,9 +13,9 @@ module Karafka
13
13
 
14
14
  # Raised when router receives topic name which does not correspond with any routes
15
15
  # This can only happen in a case when:
16
- # - you've received a message and we cannot match it with a controller
16
+ # - you've received a message and we cannot match it with a consumer
17
17
  # - you've changed the routing, so router can no longer associate your topic to
18
- # any controller
18
+ # any consumer
19
19
  # - or in a case when you do a lot of metaprogramming and you change routing/etc on runtime
20
20
  #
21
21
  # In case this happens, you will have to create a temporary route that will allow
@@ -27,6 +27,10 @@ module Karafka
27
27
  # topics usage definitions
28
28
  InvalidResponderUsage = Class.new(BaseError)
29
29
 
30
+ # Raised when options that we provide to the responder to respond aren't what the schema
31
+ # requires
32
+ InvalidResponderMessageOptions = Class.new(BaseError)
33
+
30
34
  # Raised when configuration doesn't match with validation schema
31
35
  InvalidConfiguration = Class.new(BaseError)
32
36
 
@@ -35,9 +39,12 @@ module Karafka
35
39
 
36
40
  # Raised when we want to read a persisted thread messages consumer but it is unavailable
37
41
  # This should never happen and if it does, please contact us
38
- MissingConsumer = Class.new(BaseError)
42
+ MissingClient = Class.new(BaseError)
39
43
 
40
44
  # Raised when we attemp to pause a partition but the pause timeout is equal to 0
41
45
  InvalidPauseTimeout = Class.new(BaseError)
46
+
47
+ # Raised when want to hook up to an event that is not registered and supported
48
+ UnregisteredMonitorEvent = Class.new(BaseError)
42
49
  end
43
50
  end
@@ -5,43 +5,39 @@ module Karafka
5
5
  # @note Creating multiple fetchers will result in having multiple connections to the same
6
6
  # topics, which means that if there are no partitions, it won't use them.
7
7
  class Fetcher
8
- # Starts listening on all the listeners asynchronously
9
- # Fetch loop should never end, which means that we won't create more actor clusters
10
- # so we don't have to terminate them
11
- def fetch_loop
12
- threads = listeners.map do |listener|
13
- # We abort on exception because there should be an exception handling developed for
14
- # each listener running in separate threads, so the exceptions should never leak
15
- # and if that happens, it means that something really bad happened and we should stop
16
- # the whole process
17
- Thread
18
- .new { listener.fetch_loop(processor) }
19
- .tap { |thread| thread.abort_on_exception = true }
20
- end
21
-
22
- threads.each(&:join)
23
- # If anything crashes here, we need to raise the error and crush the runner because it means
24
- # that something really bad happened
25
- rescue StandardError => e
26
- Karafka.monitor.notice_error(self.class, e)
27
- Karafka::App.stop!
28
- raise e
29
- end
8
+ class << self
9
+ # Starts listening on all the listeners asynchronously
10
+ # Fetch loop should never end, which means that we won't create more actor clusters
11
+ # so we don't have to terminate them
12
+ def call
13
+ threads = listeners.map do |listener|
14
+ # We abort on exception because there should be an exception handling developed for
15
+ # each listener running in separate threads, so the exceptions should never leak
16
+ # and if that happens, it means that something really bad happened and we should stop
17
+ # the whole process
18
+ Thread
19
+ .new { listener.call }
20
+ .tap { |thread| thread.abort_on_exception = true }
21
+ end
30
22
 
31
- private
32
-
33
- # @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
34
- def listeners
35
- @listeners ||= App.consumer_groups.active.map do |consumer_group|
36
- Karafka::Connection::Listener.new(consumer_group)
23
+ # We aggregate threads here for a supervised shutdown process
24
+ threads.each { |thread| Karafka::Server.consumer_threads << thread }
25
+ threads.each(&:join)
26
+ # If anything crashes here, we need to raise the error and crush the runner because it means
27
+ # that something terrible happened
28
+ rescue StandardError => e
29
+ Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
30
+ Karafka::App.stop!
31
+ raise e
37
32
  end
38
- end
39
33
 
40
- # @return [Proc] proc that should be processed when a messages arrive
41
- # @yieldparam messages [Array<Kafka::FetchedMessage>] messages from kafka (raw)
42
- def processor
43
- lambda do |group_id, messages|
44
- Karafka::Connection::Processor.process(group_id, messages)
34
+ private
35
+
36
+ # @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
37
+ def listeners
38
+ @listeners ||= App.consumer_groups.active.map do |consumer_group|
39
+ Karafka::Connection::Listener.new(consumer_group)
40
+ end
45
41
  end
46
42
  end
47
43
  end
@@ -4,20 +4,20 @@ module Karafka
4
4
  module Helpers
5
5
  # Class used to autodetect corresponding classes that are internally inside Karafka framework
6
6
  # It is used among others to match:
7
- # controller => responder
7
+ # consumer => responder
8
8
  class ClassMatcher
9
- # Regexp used to remove any non classy like characters that might be in the controller
9
+ # Regexp used to remove any non classy like characters that might be in the consumer
10
10
  # class name (if defined dynamically, etc)
11
11
  CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}
12
12
 
13
13
  # @param klass [Class] class to which we want to find a corresponding class
14
14
  # @param from [String] what type of object is it (based on postfix name part)
15
15
  # @param to [String] what are we looking for (based on a postfix name part)
16
- # @example Controller that has a corresponding responder
17
- # matcher = Karafka::Helpers::ClassMatcher.new(SuperController, 'Controller', 'Responder')
16
+ # @example Consumer that has a corresponding responder
17
+ # matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
18
18
  # matcher.match #=> SuperResponder
19
- # @example Controller without a corresponding responder
20
- # matcher = Karafka::Helpers::ClassMatcher.new(Super2Controller, 'Controller', 'Responder')
19
+ # @example Consumer without a corresponding responder
20
+ # matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
21
21
  # matcher.match #=> nil
22
22
  def initialize(klass, from:, to:)
23
23
  @klass = klass
@@ -36,9 +36,9 @@ module Karafka
36
36
 
37
37
  # @return [String] name of a new class that we're looking for
38
38
  # @note This method returns name of a class without a namespace
39
- # @example From SuperController matching responder
39
+ # @example From SuperConsumer matching responder
40
40
  # matcher.name #=> 'SuperResponder'
41
- # @example From Namespaced::Super2Controller matching responder
41
+ # @example From Namespaced::Super2Consumer matching responder
42
42
  # matcher.name #=> Super2Responder
43
43
  def name
44
44
  inflected = @klass.to_s.split('::').last.to_s
@@ -33,9 +33,9 @@ module Karafka
33
33
  return current_value unless current_value.nil?
34
34
 
35
35
  value = if Karafka::App.config.respond_to?(attribute)
36
- Karafka::App.config.public_send(attribute)
36
+ Karafka::App.config.send(attribute)
37
37
  else
38
- Karafka::App.config.kafka.public_send(attribute)
38
+ Karafka::App.config.kafka.send(attribute)
39
39
  end
40
40
 
41
41
  instance_variable_set(:"@#{attribute}", value)
@@ -0,0 +1,112 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Default listener that hooks up to our instrumentation and uses its events for logging
6
+ # It can be removed/replaced or anything without any harm to the Karafka app flow
7
+ module Listener
8
+ # Log levels that we use in this particular listener
9
+ USED_LOG_LEVELS = %i[
10
+ debug
11
+ info
12
+ error
13
+ fatal
14
+ ].freeze
15
+
16
+ # Injects WaterDrop listener logger actions
17
+ extend WaterDrop::Instrumentation::Listener
18
+
19
+ class << self
20
+ # Logs details about incoming messages and with which consumer we will consume them
21
+ # @param event [Dry::Events::Event] event details including payload
22
+ def on_connection_delegator_call(event)
23
+ consumer = event[:consumer]
24
+ topic = consumer.topic.name
25
+ kafka_messages = event[:kafka_messages]
26
+ info "#{kafka_messages.count} messages on #{topic} topic delegated to #{consumer.class}"
27
+ end
28
+
29
+ # Logs details about each received message value parsing
30
+ # @param event [Dry::Events::Event] event details including payload
31
+ def on_params_params_parse(event)
32
+ # Keep in mind, that a caller here is a param object not a controller,
33
+ # so it returns a topic as a string, not a routing topic
34
+ debug "Params parsing for #{event[:caller].topic} topic successful in #{event[:time]} ms"
35
+ end
36
+
37
+ # Logs unsuccessful parsing attempts of incoming data
38
+ # @param event [Dry::Events::Event] event details including payload
39
+ def on_params_params_parse_error(event)
40
+ error "Params parsing error for #{event[:caller].topic} topic: #{event[:error]}"
41
+ end
42
+
43
+ # Logs errors that occured in a listener fetch loop
44
+ # @param event [Dry::Events::Event] event details including payload
45
+ # @note It's an error as we can recover from it not a fatal
46
+ def on_connection_listener_fetch_loop_error(event)
47
+ error "Listener fetch loop error: #{event[:error]}"
48
+ end
49
+
50
+ # Logs errors that are related to the connection itself
51
+ # @note Karafka will attempt to reconnect, so an error not a fatal
52
+ # @param event [Dry::Events::Event] event details including payload
53
+ def on_connection_client_fetch_loop_error(event)
54
+ error "Client fetch loop error: #{event[:error]}"
55
+ end
56
+
57
+ # Logs info about crashed fetcher
58
+ # @note If this happens, Karafka will shutdown as it means a critical error
59
+ # in one of the threads
60
+ # @param event [Dry::Events::Event] event details including payload
61
+ def on_fetcher_call_error(event)
62
+ fatal "Fetcher crash due to an error: #{event[:error]}"
63
+ end
64
+
65
+ # Logs info about processing of a certain dataset with an inline backend
66
+ # @param event [Dry::Events::Event] event details including payload
67
+ def on_backends_inline_process(event)
68
+ count = event[:caller].send(:params_batch).to_a.size
69
+ topic = event[:caller].topic.name
70
+ time = event[:time]
71
+ info "Inline processing of topic #{topic} with #{count} messages took #{time} ms"
72
+ end
73
+
74
+ # Logs info about system signals that Karafka received
75
+ # @param event [Dry::Events::Event] event details including payload
76
+ def on_process_notice_signal(event)
77
+ info "Received #{event[:signal]} system signal"
78
+ end
79
+
80
+ # Logs info about responder usage withing a controller flow
81
+ # @param event [Dry::Events::Event] event details including payload
82
+ def on_consumers_responders_respond_with(event)
83
+ calling = event[:caller].class
84
+ responder = calling.topic.responder
85
+ data = event[:data]
86
+ info "Responded from #{calling} using #{responder} with following data #{data}"
87
+ end
88
+
89
+ # Logs info that we're going to stop the Karafka server
90
+ # @param _event [Dry::Events::Event] event details including payload
91
+ def on_server_stop(_event)
92
+ # We use a separate thread as logging can't be called from trap context
93
+ Thread.new { info "Stopping Karafka server #{::Process.pid}" }
94
+ end
95
+
96
+ # Logs an error that Karafka was unable to stop the server gracefully and it had to do a
97
+ # forced exit
98
+ # @param _event [Dry::Events::Event] event details including payload
99
+ def on_server_stop_error(_event)
100
+ # We use a separate thread as logging can't be called from trap context
101
+ Thread.new { error "Forceful Karafka server #{::Process.pid} stop" }
102
+ end
103
+
104
+ USED_LOG_LEVELS.each do |log_level|
105
+ define_method log_level do |*args|
106
+ Karafka.logger.send(log_level, *args)
107
+ end
108
+ end
109
+ end
110
+ end
111
+ end
112
+ end
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Default logger for Event Delegator
6
+ # @note It uses ::Logger features - providing basic logging
7
+ class Logger < ::Logger
8
+ include Singleton
9
+
10
+ # Map containing information about log level for given environment
11
+ ENV_MAP = {
12
+ 'production' => ::Logger::ERROR,
13
+ 'test' => ::Logger::ERROR,
14
+ 'development' => ::Logger::INFO,
15
+ 'debug' => ::Logger::DEBUG,
16
+ 'default' => ::Logger::INFO
17
+ }.freeze
18
+
19
+ # Creates a new instance of logger ensuring that it has a place to write to
20
+ def initialize(*_args)
21
+ ensure_dir_exists
22
+ super(target)
23
+ self.level = ENV_MAP[Karafka.env] || ENV_MAP['default']
24
+ end
25
+
26
+ private
27
+
28
+ # @return [Karafka::Helpers::MultiDelegator] multi delegator instance
29
+ # to which we will be writtng logs
30
+ # We use this approach to log stuff to file and to the STDOUT at the same time
31
+ def target
32
+ Karafka::Helpers::MultiDelegator
33
+ .delegate(:write, :close)
34
+ .to(STDOUT, file)
35
+ end
36
+
37
+ # Makes sure the log directory exists
38
+ def ensure_dir_exists
39
+ dir = File.dirname(log_path)
40
+ FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
41
+ end
42
+
43
+ # @return [Pathname] Path to a file to which we should log
44
+ def log_path
45
+ @log_path ||= Karafka::App.root.join("log/#{Karafka.env}.log")
46
+ end
47
+
48
+ # @return [File] file to which we want to write our logs
49
+ # @note File is being opened in append mode ('a')
50
+ def file
51
+ @file ||= File.open(log_path, 'a')
52
+ end
53
+ end
54
+ end
55
+ end