karafka 1.1.2 → 1.2.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. checksums.yaml +5 -5
  2. data/.ruby-version +1 -1
  3. data/.travis.yml +1 -0
  4. data/CHANGELOG.md +34 -0
  5. data/Gemfile +1 -2
  6. data/Gemfile.lock +35 -22
  7. data/README.md +4 -3
  8. data/karafka.gemspec +5 -3
  9. data/lib/karafka.rb +4 -5
  10. data/lib/karafka/app.rb +8 -15
  11. data/lib/karafka/attributes_map.rb +1 -1
  12. data/lib/karafka/backends/inline.rb +1 -2
  13. data/lib/karafka/{base_controller.rb → base_consumer.rb} +19 -11
  14. data/lib/karafka/base_responder.rb +33 -14
  15. data/lib/karafka/callbacks.rb +30 -0
  16. data/lib/karafka/callbacks/config.rb +22 -0
  17. data/lib/karafka/callbacks/dsl.rb +16 -0
  18. data/lib/karafka/cli/install.rb +2 -3
  19. data/lib/karafka/cli/server.rb +0 -1
  20. data/lib/karafka/connection/{consumer.rb → client.rb} +25 -33
  21. data/lib/karafka/connection/config_adapter.rb +14 -6
  22. data/lib/karafka/connection/delegator.rb +46 -0
  23. data/lib/karafka/connection/listener.rb +22 -13
  24. data/lib/karafka/{controllers → consumers}/callbacks.rb +9 -9
  25. data/lib/karafka/consumers/includer.rb +51 -0
  26. data/lib/karafka/consumers/responders.rb +24 -0
  27. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  28. data/lib/karafka/errors.rb +10 -3
  29. data/lib/karafka/fetcher.rb +30 -34
  30. data/lib/karafka/helpers/class_matcher.rb +8 -8
  31. data/lib/karafka/helpers/config_retriever.rb +2 -2
  32. data/lib/karafka/instrumentation/listener.rb +97 -0
  33. data/lib/karafka/instrumentation/logger.rb +55 -0
  34. data/lib/karafka/instrumentation/monitor.rb +62 -0
  35. data/lib/karafka/loader.rb +0 -1
  36. data/lib/karafka/params/{params.rb → dsl.rb} +69 -44
  37. data/lib/karafka/params/params_batch.rb +2 -2
  38. data/lib/karafka/patches/dry_configurable.rb +6 -2
  39. data/lib/karafka/patches/ruby_kafka.rb +10 -10
  40. data/lib/karafka/persistence/client.rb +25 -0
  41. data/lib/karafka/persistence/consumer.rb +27 -14
  42. data/lib/karafka/persistence/topic.rb +29 -0
  43. data/lib/karafka/process.rb +5 -4
  44. data/lib/karafka/responders/builder.rb +15 -14
  45. data/lib/karafka/routing/builder.rb +1 -1
  46. data/lib/karafka/routing/consumer_mapper.rb +3 -2
  47. data/lib/karafka/routing/router.rb +1 -1
  48. data/lib/karafka/routing/topic.rb +5 -5
  49. data/lib/karafka/schemas/config.rb +3 -0
  50. data/lib/karafka/schemas/consumer_group.rb +14 -2
  51. data/lib/karafka/schemas/consumer_group_topic.rb +1 -1
  52. data/lib/karafka/server.rb +33 -5
  53. data/lib/karafka/setup/config.rb +45 -21
  54. data/lib/karafka/setup/configurators/base.rb +6 -12
  55. data/lib/karafka/setup/configurators/params.rb +25 -0
  56. data/lib/karafka/setup/configurators/water_drop.rb +6 -3
  57. data/lib/karafka/setup/dsl.rb +22 -0
  58. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
  59. data/lib/karafka/templates/karafka.rb.example +14 -3
  60. data/lib/karafka/version.rb +1 -1
  61. metadata +58 -23
  62. data/lib/karafka/connection/processor.rb +0 -61
  63. data/lib/karafka/controllers/includer.rb +0 -51
  64. data/lib/karafka/controllers/responders.rb +0 -19
  65. data/lib/karafka/logger.rb +0 -53
  66. data/lib/karafka/monitor.rb +0 -98
  67. data/lib/karafka/persistence/controller.rb +0 -38
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Consumers
5
+ # Feature that allows us to use responders flow in consumer
6
+ module Responders
7
+ # Responds with given data using given responder. This allows us to have a similar way of
8
+ # defining flows like synchronous protocols
9
+ # @param data Anything we want to pass to responder based on which we want to trigger further
10
+ # Kafka responding
11
+ def respond_with(*data)
12
+ Karafka.monitor.instrument(
13
+ 'consumers.responders.respond_with',
14
+ caller: self,
15
+ data: data
16
+ ) do
17
+ # @note we build a new instance of responder each time, as a long-running (persisted)
18
+ # consumers can respond multiple times during the lifecycle
19
+ topic.responder.new(topic.parser).call(*data)
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -1,12 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
- module Controllers
5
- # Params alias for single message consumption controllers
4
+ module Consumers
5
+ # Params alias for single message consumption consumers
6
6
  module SingleParams
7
7
  private
8
8
 
9
- # @return [Karafka::Params::Params] params instance for non batch consumption controllers
9
+ # @return [Karafka::Params::Params] params instance for non batch consumption consumers
10
10
  def params
11
11
  params_batch.first
12
12
  end
@@ -13,9 +13,9 @@ module Karafka
13
13
 
14
14
  # Raised when router receives topic name which does not correspond with any routes
15
15
  # This can only happen in a case when:
16
- # - you've received a message and we cannot match it with a controller
16
+ # - you've received a message and we cannot match it with a consumer
17
17
  # - you've changed the routing, so router can no longer associate your topic to
18
- # any controller
18
+ # any consumer
19
19
  # - or in a case when you do a lot of metaprogramming and you change routing/etc on runtime
20
20
  #
21
21
  # In case this happens, you will have to create a temporary route that will allow
@@ -27,6 +27,10 @@ module Karafka
27
27
  # topics usage definitions
28
28
  InvalidResponderUsage = Class.new(BaseError)
29
29
 
30
+ # Raised when options that we provide to the responder to respond aren't what the schema
31
+ # requires
32
+ InvalidResponderMessageOptions = Class.new(BaseError)
33
+
30
34
  # Raised when configuration doesn't match with validation schema
31
35
  InvalidConfiguration = Class.new(BaseError)
32
36
 
@@ -35,9 +39,12 @@ module Karafka
35
39
 
36
40
  # Raised when we want to read a persisted thread messages consumer but it is unavailable
37
41
  # This should never happen and if it does, please contact us
38
- MissingConsumer = Class.new(BaseError)
42
+ MissingClient = Class.new(BaseError)
39
43
 
40
44
  # Raised when we attemp to pause a partition but the pause timeout is equal to 0
41
45
  InvalidPauseTimeout = Class.new(BaseError)
46
+
47
+ # Raised when want to hook up to an event that is not registered and supported
48
+ UnregisteredMonitorEvent = Class.new(BaseError)
42
49
  end
43
50
  end
@@ -5,43 +5,39 @@ module Karafka
5
5
  # @note Creating multiple fetchers will result in having multiple connections to the same
6
6
  # topics, which means that if there are no partitions, it won't use them.
7
7
  class Fetcher
8
- # Starts listening on all the listeners asynchronously
9
- # Fetch loop should never end, which means that we won't create more actor clusters
10
- # so we don't have to terminate them
11
- def fetch_loop
12
- threads = listeners.map do |listener|
13
- # We abort on exception because there should be an exception handling developed for
14
- # each listener running in separate threads, so the exceptions should never leak
15
- # and if that happens, it means that something really bad happened and we should stop
16
- # the whole process
17
- Thread
18
- .new { listener.fetch_loop(processor) }
19
- .tap { |thread| thread.abort_on_exception = true }
20
- end
21
-
22
- threads.each(&:join)
23
- # If anything crashes here, we need to raise the error and crush the runner because it means
24
- # that something really bad happened
25
- rescue StandardError => e
26
- Karafka.monitor.notice_error(self.class, e)
27
- Karafka::App.stop!
28
- raise e
29
- end
8
+ class << self
9
+ # Starts listening on all the listeners asynchronously
10
+ # Fetch loop should never end, which means that we won't create more actor clusters
11
+ # so we don't have to terminate them
12
+ def call
13
+ threads = listeners.map do |listener|
14
+ # We abort on exception because there should be an exception handling developed for
15
+ # each listener running in separate threads, so the exceptions should never leak
16
+ # and if that happens, it means that something really bad happened and we should stop
17
+ # the whole process
18
+ Thread
19
+ .new { listener.call }
20
+ .tap { |thread| thread.abort_on_exception = true }
21
+ end
30
22
 
31
- private
32
-
33
- # @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
34
- def listeners
35
- @listeners ||= App.consumer_groups.active.map do |consumer_group|
36
- Karafka::Connection::Listener.new(consumer_group)
23
+ # We aggregate threads here for a supervised shutdown process
24
+ threads.each { |thread| Karafka::Server.consumer_threads << thread }
25
+ threads.each(&:join)
26
+ # If anything crashes here, we need to raise the error and crush the runner because it means
27
+ # that something terrible happened
28
+ rescue StandardError => e
29
+ Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
30
+ Karafka::App.stop!
31
+ raise e
37
32
  end
38
- end
39
33
 
40
- # @return [Proc] proc that should be processed when a messages arrive
41
- # @yieldparam messages [Array<Kafka::FetchedMessage>] messages from kafka (raw)
42
- def processor
43
- lambda do |group_id, messages|
44
- Karafka::Connection::Processor.process(group_id, messages)
34
+ private
35
+
36
+ # @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
37
+ def listeners
38
+ @listeners ||= App.consumer_groups.active.map do |consumer_group|
39
+ Karafka::Connection::Listener.new(consumer_group)
40
+ end
45
41
  end
46
42
  end
47
43
  end
@@ -4,20 +4,20 @@ module Karafka
4
4
  module Helpers
5
5
  # Class used to autodetect corresponding classes that are internally inside Karafka framework
6
6
  # It is used among others to match:
7
- # controller => responder
7
+ # consumer => responder
8
8
  class ClassMatcher
9
- # Regexp used to remove any non classy like characters that might be in the controller
9
+ # Regexp used to remove any non classy like characters that might be in the consumer
10
10
  # class name (if defined dynamically, etc)
11
11
  CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}
12
12
 
13
13
  # @param klass [Class] class to which we want to find a corresponding class
14
14
  # @param from [String] what type of object is it (based on postfix name part)
15
15
  # @param to [String] what are we looking for (based on a postfix name part)
16
- # @example Controller that has a corresponding responder
17
- # matcher = Karafka::Helpers::ClassMatcher.new(SuperController, 'Controller', 'Responder')
16
+ # @example Consumer that has a corresponding responder
17
+ # matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
18
18
  # matcher.match #=> SuperResponder
19
- # @example Controller without a corresponding responder
20
- # matcher = Karafka::Helpers::ClassMatcher.new(Super2Controller, 'Controller', 'Responder')
19
+ # @example Consumer without a corresponding responder
20
+ # matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
21
21
  # matcher.match #=> nil
22
22
  def initialize(klass, from:, to:)
23
23
  @klass = klass
@@ -36,9 +36,9 @@ module Karafka
36
36
 
37
37
  # @return [String] name of a new class that we're looking for
38
38
  # @note This method returns name of a class without a namespace
39
- # @example From SuperController matching responder
39
+ # @example From SuperConsumer matching responder
40
40
  # matcher.name #=> 'SuperResponder'
41
- # @example From Namespaced::Super2Controller matching responder
41
+ # @example From Namespaced::Super2Consumer matching responder
42
42
  # matcher.name #=> Super2Responder
43
43
  def name
44
44
  inflected = @klass.to_s.split('::').last.to_s
@@ -33,9 +33,9 @@ module Karafka
33
33
  return current_value unless current_value.nil?
34
34
 
35
35
  value = if Karafka::App.config.respond_to?(attribute)
36
- Karafka::App.config.public_send(attribute)
36
+ Karafka::App.config.send(attribute)
37
37
  else
38
- Karafka::App.config.kafka.public_send(attribute)
38
+ Karafka::App.config.kafka.send(attribute)
39
39
  end
40
40
 
41
41
  instance_variable_set(:"@#{attribute}", value)
@@ -0,0 +1,97 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Default listener that hooks up to our instrumentation and uses its events for logging
6
+ # It can be removed/replaced or anything without any harm to the Karafka app flow
7
+ module Listener
8
+ # Log levels that we use in this particular listener
9
+ USED_LOG_LEVELS = %i[
10
+ debug
11
+ info
12
+ error
13
+ fatal
14
+ ].freeze
15
+
16
+ # Injects WaterDrop listener logger actions
17
+ extend WaterDrop::Instrumentation::Listener
18
+
19
+ class << self
20
+ # Logs details about incoming messages and with which consumer we will consume them
21
+ # @param event [Dry::Events::Event] event details including payload
22
+ def on_connection_delegator_call(event)
23
+ consumer = event[:consumer]
24
+ topic = consumer.topic.name
25
+ kafka_messages = event[:kafka_messages]
26
+ info "#{kafka_messages.count} messages on #{topic} topic delegated to #{consumer.class}"
27
+ end
28
+
29
+ # Logs details about each received message value parsing
30
+ # @param event [Dry::Events::Event] event details including payload
31
+ def on_params_params_parse(event)
32
+ # Keep in mind, that a caller here is a param object not a controller,
33
+ # so it returns a topic as a string, not a routing topic
34
+ debug "Params parsing for #{event[:caller].topic} topic successful in #{event[:time]} ms"
35
+ end
36
+
37
+ # Logs unsuccessful parsing attempts of incoming data
38
+ # @param event [Dry::Events::Event] event details including payload
39
+ def on_params_params_parse_error(event)
40
+ error "Params parsing error for #{event[:caller].topic} topic: #{event[:error]}"
41
+ end
42
+
43
+ # Logs errors that occured in a listener fetch loop
44
+ # @param event [Dry::Events::Event] event details including payload
45
+ # @note It's an error as we can recover from it not a fatal
46
+ def on_connection_listener_fetch_loop_error(event)
47
+ error "Listener fetch loop error: #{event[:error]}"
48
+ end
49
+
50
+ # Logs errors that are related to the connection itself
51
+ # @note Karafka will attempt to reconnect, so an error not a fatal
52
+ # @param event [Dry::Events::Event] event details including payload
53
+ def on_connection_client_fetch_loop_error(event)
54
+ error "Client fetch loop error: #{event[:error]}"
55
+ end
56
+
57
+ # Logs info about crashed fetcher
58
+ # @note If this happens, Karafka will shutdown as it means a critical error
59
+ # in one of the threads
60
+ # @param event [Dry::Events::Event] event details including payload
61
+ def on_fetcher_call_error(event)
62
+ fatal "Fetcher crash due to an error: #{event[:error]}"
63
+ end
64
+
65
+ # Logs info about processing of a certain dataset with an inline backend
66
+ # @param event [Dry::Events::Event] event details including payload
67
+ def on_backends_inline_process(event)
68
+ count = event[:caller].send(:params_batch).to_a.size
69
+ topic = event[:caller].topic.name
70
+ time = event[:time]
71
+ info "Inline processing of topic #{topic} with #{count} messages took #{time} ms"
72
+ end
73
+
74
+ # Logs info about system signals that Karafka received
75
+ # @param event [Dry::Events::Event] event details including payload
76
+ def on_process_notice_signal(event)
77
+ info "Received #{event[:signal]} system signal"
78
+ end
79
+
80
+ # Logs info about responder usage withing a controller flow
81
+ # @param event [Dry::Events::Event] event details including payload
82
+ def on_consumers_responders_respond_with(event)
83
+ calling = event[:caller].class
84
+ responder = calling.topic.responder
85
+ data = event[:data]
86
+ info "Responded from #{calling} using #{responder} with following data #{data}"
87
+ end
88
+
89
+ USED_LOG_LEVELS.each do |log_level|
90
+ define_method log_level do |*args|
91
+ Karafka.logger.send(log_level, *args)
92
+ end
93
+ end
94
+ end
95
+ end
96
+ end
97
+ end
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Default logger for Event Delegator
6
+ # @note It uses ::Logger features - providing basic logging
7
+ class Logger < ::Logger
8
+ include Singleton
9
+
10
+ # Map containing information about log level for given environment
11
+ ENV_MAP = {
12
+ 'production' => ::Logger::ERROR,
13
+ 'test' => ::Logger::ERROR,
14
+ 'development' => ::Logger::INFO,
15
+ 'debug' => ::Logger::DEBUG,
16
+ 'default' => ::Logger::INFO
17
+ }.freeze
18
+
19
+ # Creates a new instance of logger ensuring that it has a place to write to
20
+ def initialize(*_args)
21
+ ensure_dir_exists
22
+ super(target)
23
+ self.level = ENV_MAP[Karafka.env] || ENV_MAP['default']
24
+ end
25
+
26
+ private
27
+
28
+ # @return [Karafka::Helpers::MultiDelegator] multi delegator instance
29
+ # to which we will be writtng logs
30
+ # We use this approach to log stuff to file and to the STDOUT at the same time
31
+ def target
32
+ Karafka::Helpers::MultiDelegator
33
+ .delegate(:write, :close)
34
+ .to(STDOUT, file)
35
+ end
36
+
37
+ # Makes sure the log directory exists
38
+ def ensure_dir_exists
39
+ dir = File.dirname(log_path)
40
+ FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
41
+ end
42
+
43
+ # @return [Pathname] Path to a file to which we should log
44
+ def log_path
45
+ @log_path ||= Karafka::App.root.join("log/#{Karafka.env}.log")
46
+ end
47
+
48
+ # @return [File] file to which we want to write our logs
49
+ # @note File is being opened in append mode ('a')
50
+ def file
51
+ @file ||= File.open(log_path, 'a')
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the things related with Karafka instrumentation process
5
+ module Instrumentation
6
+ # Monitor is used to hookup external monitoring services to monitor how Karafka works
7
+ # It provides a standardized API for checking incoming messages/enqueueing etc
8
+ # Since it is a pub-sub based on dry-monitor, you can use as many subscribers/loggers at the
9
+ # same time, which means that you might have for example file logging and newrelic at the same
10
+ # time
11
+ # @note This class acts as a singleton because we are only permitted to have single monitor
12
+ # per running process (just as logger)
13
+ class Monitor < Dry::Monitor::Notifications
14
+ include Singleton
15
+
16
+ # List of events that we support in the system and to which a monitor client can hook up
17
+ # @note The non-error once support timestamp benchmarking
18
+ # @note Depending on Karafka extensions and additional engines, this might not be the
19
+ # complete list of all the events. Please use the #available_events on fully loaded
20
+ # Karafka system to determine all of the events you can use.
21
+ # Last 4 events are from WaterDrop but for convenience we use the same monitor for the
22
+ # whole karafka ecosystem
23
+ BASE_EVENTS = %w[
24
+ params.params.parse
25
+ params.params.parse.error
26
+ connection.listener.fetch_loop.error
27
+ connection.client.fetch_loop.error
28
+ connection.delegator.call
29
+ fetcher.call.error
30
+ backends.inline.process
31
+ process.notice_signal
32
+ consumers.responders.respond_with
33
+ async_producer.call.error
34
+ async_producer.call.retry
35
+ sync_producer.call.error
36
+ sync_producer.call.retry
37
+ ].freeze
38
+
39
+ private_constant :BASE_EVENTS
40
+
41
+ # @return [Karafka::Instrumentation::Monitor] monitor instance for system instrumentation
42
+ def initialize
43
+ super(:karafka)
44
+ BASE_EVENTS.each(&method(:register_event))
45
+ end
46
+
47
+ # Allows us to subscribe to events with a code that will be yielded upon events
48
+ # @param event_name_or_listener [String, Object] name of the event we want to subscribe to
49
+ # or a listener if we decide to go with object listener
50
+ def subscribe(event_name_or_listener)
51
+ return super unless event_name_or_listener.is_a?(String)
52
+ return super if available_events.include?(event_name_or_listener)
53
+ raise Errors::UnregisteredMonitorEvent, event_name_or_listener
54
+ end
55
+
56
+ # @return [Array<String>] names of available events to which we can subscribe
57
+ def available_events
58
+ __bus__.events.keys
59
+ end
60
+ end
61
+ end
62
+ end
@@ -5,7 +5,6 @@ module Karafka
5
5
  module Loader
6
6
  # Order in which we want to load app files
7
7
  DIRS = %w[
8
- config/initializers
9
8
  lib
10
9
  app
11
10
  ].freeze
@@ -3,18 +3,28 @@
3
3
  module Karafka
4
4
  # Params namespace encapsulating all the logic that is directly related to params handling
5
5
  module Params
6
- # Class-wrapper for hash with indifferent access with additional lazy loading feature
6
+ # Dsl for Karafka params. We don't provide the params class here as we want to allow users to
7
+ # use either hash (default) or Rails hash with indifferent access as a base for their params
8
+ #
9
+ # We do that because both of them have their own advantages and we don't want to enforce users
10
+ # to handle things differently if they already use any of those
11
+ #
7
12
  # It provides lazy loading not only until the first usage, but also allows us to skip
8
13
  # using parser until we execute our logic. That way we can operate with
9
14
  # heavy-parsing data without slowing down the whole application.
10
- class Params < HashWithIndifferentAccess
11
- # Kafka::FetchedMessage attributes that we want to use inside of params
12
- KAFKA_MESSAGE_ATTRIBUTES = %i[
15
+ module Dsl
16
+ # Params keys that are "our" and internal. We use this list for additional backends
17
+ # that don't allow symbols to be transferred, to remap the interchanged params
18
+ # back into a valid form
19
+ SYSTEM_KEYS = %i[
20
+ parser
13
21
  value
14
22
  partition
15
23
  offset
16
24
  key
17
25
  create_time
26
+ receive_time
27
+ topic
18
28
  ].freeze
19
29
 
20
30
  # Params attributes that should be available via a method call invocation for Kafka
@@ -22,15 +32,19 @@ module Karafka
22
32
  # Kafka passes internally Kafka::FetchedMessage object and the ruby-kafka consumer
23
33
  # uses those fields via method calls, so in order to be able to pass there our params
24
34
  # objects, have to have same api.
25
- PARAMS_METHOD_ATTRIBUTES = %i[
35
+ METHOD_ATTRIBUTES = %i[
26
36
  topic
27
37
  partition
28
38
  offset
29
39
  key
30
40
  create_time
41
+ receive_time
31
42
  ].freeze
32
43
 
33
- class << self
44
+ private_constant :METHOD_ATTRIBUTES
45
+
46
+ # Class methods required by params to work
47
+ module ClassMethods
34
48
  # We allow building instances only via the #build method
35
49
 
36
50
  # @param message [Kafka::FetchedMessage, Hash] message that we get out of Kafka
@@ -44,38 +58,27 @@ module Karafka
44
58
  # @example Build params instance from a Kafka::FetchedMessage object
45
59
  # Karafka::Params::Params.build(message) #=> params object
46
60
  def build(message, parser)
47
- # Hash case happens inside backends that interchange data
48
- if message.is_a?(Hash)
49
- new(parser: parser).send(:merge!, message)
50
- else
51
- # This happens inside Kafka::FetchedProcessor
52
- new(
53
- parser: parser,
54
- parsed: false,
55
- received_at: Time.now
56
- ).tap do |instance|
57
- KAFKA_MESSAGE_ATTRIBUTES.each do |attribute|
58
- instance[attribute] = message.send(attribute)
59
- end
61
+ instance = new
62
+ instance[:parser] = parser
60
63
 
61
- # When we get raw messages, they might have a topic, that was modified by a
62
- # topic mapper. We need to "reverse" this change and map back to the non-modified
63
- # format, so our internal flow is not corrupted with the mapping
64
- instance[:topic] = Karafka::App.config.topic_mapper.incoming(message.topic)
65
- end
64
+ # Non kafka fetched message can happen when we interchange data with an
65
+ # additional backend
66
+ if message.is_a?(Kafka::FetchedMessage)
67
+ instance[:value] = message.value
68
+ instance[:partition] = message.partition
69
+ instance[:offset] = message.offset
70
+ instance[:key] = message.key
71
+ instance[:create_time] = message.create_time
72
+ instance[:receive_time] = Time.now
73
+ # When we get raw messages, they might have a topic, that was modified by a
74
+ # topic mapper. We need to "reverse" this change and map back to the non-modified
75
+ # format, so our internal flow is not corrupted with the mapping
76
+ instance[:topic] = Karafka::App.config.topic_mapper.incoming(message.topic)
77
+ else
78
+ instance.send(:merge!, message)
66
79
  end
67
- end
68
80
 
69
- # Defines a method call accessor to a particular hash field.
70
- # @note Won't work for complex key names that contain spaces, etc
71
- # @param key [Symbol] name of a field that we want to retrieve with a method call
72
- # @example
73
- # key_attr_reader :example
74
- # params.example #=> 'my example value'
75
- def key_attr_reader(key)
76
- define_method key do
77
- self[key]
78
- end
81
+ instance
79
82
  end
80
83
  end
81
84
 
@@ -85,18 +88,41 @@ module Karafka
85
88
  # parse it again.
86
89
  def retrieve!
87
90
  return self if self[:parsed]
91
+ self[:parsed] = true
88
92
 
89
93
  merge!(parse(delete(:value)))
90
94
  end
91
95
 
92
- PARAMS_METHOD_ATTRIBUTES.each(&method(:key_attr_reader))
96
+ # Includes and extends the base params klass with everything that is needed by Karafka to
97
+ # fully work in any conditions.
98
+ # @param params_klass [Karafka::Params::Params] initialized params class that we will
99
+ # use for a given Karafka process
100
+ def self.included(params_klass)
101
+ params_klass.extend(Dsl::ClassMethods)
102
+
103
+ METHOD_ATTRIBUTES.each do |attr|
104
+ # Defines a method call accessor to a particular hash field.
105
+ # @note Won't work for complex key names that contain spaces, etc
106
+ # @param key [Symbol] name of a field that we want to retrieve with a method call
107
+ # @example
108
+ # key_attr_reader :example
109
+ # params.example #=> 'my example value'
110
+ params_klass.send :define_method, attr do
111
+ self[attr]
112
+ end
113
+ end
114
+
115
+ params_klass.send :private, :merge!
116
+ params_klass.send :private, :parse
117
+ end
93
118
 
94
119
  private
95
120
 
96
121
  # Overwritten merge! method - it behaves differently for keys that are the same in our hash
97
122
  # and in a other_hash - it will not replace keys that are the same in our hash
98
- # and in the other one
99
- # @param other_hash [Hash, HashWithIndifferentAccess] hash that we want to merge into current
123
+ # and in the other one. This protects some important Karafka params keys that cannot be
124
+ # replaced with custom values from incoming Kafka message
125
+ # @param other_hash [Hash] hash that we want to merge into current
100
126
  # @return [Karafka::Params::Params] our parameters hash with merged values
101
127
  # @example Merge with hash without same keys
102
128
  # new(a: 1, b: 2).merge!(c: 3) #=> { a: 1, b: 2, c: 3 }
@@ -110,18 +136,17 @@ module Karafka
110
136
  super(other_hash) { |_key, base_value, _new_value| base_value }
111
137
  end
112
138
 
113
- # @param value [String] Raw data that we want to parse using controller's parser
139
+ # @param value [String] Raw data that we want to parse using consumer parser
114
140
  # @note If something goes wrong, it will return raw data in a hash with a message key
115
141
  # @return [Hash] parsed data or a hash with message key containing raw data if something
116
142
  # went wrong during parsing
117
143
  def parse(value)
118
- self[:parser].parse(value)
119
- # We catch both of them, because for default JSON - we use JSON parser directly
144
+ Karafka.monitor.instrument('params.params.parse', caller: self) do
145
+ self[:parser].parse(value)
146
+ end
120
147
  rescue ::Karafka::Errors::ParserError => e
121
- Karafka.monitor.notice_error(self.class, e)
148
+ Karafka.monitor.instrument('params.params.parse.error', caller: self, error: e)
122
149
  raise e
123
- ensure
124
- self[:parsed] = true
125
150
  end
126
151
  end
127
152
  end