karafka 1.2.11

Sign up to get free protection for your applications and to get access to all the features.
Files changed (88) hide show
  1. checksums.yaml +7 -0
  2. data/.coditsu.yml +3 -0
  3. data/.console_irbrc +13 -0
  4. data/.gitignore +68 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/.travis.yml +49 -0
  9. data/CHANGELOG.md +458 -0
  10. data/CODE_OF_CONDUCT.md +46 -0
  11. data/CONTRIBUTING.md +41 -0
  12. data/Gemfile +15 -0
  13. data/Gemfile.lock +126 -0
  14. data/MIT-LICENCE +18 -0
  15. data/README.md +102 -0
  16. data/bin/karafka +19 -0
  17. data/config/errors.yml +6 -0
  18. data/karafka.gemspec +42 -0
  19. data/lib/karafka.rb +79 -0
  20. data/lib/karafka/app.rb +45 -0
  21. data/lib/karafka/attributes_map.rb +69 -0
  22. data/lib/karafka/backends/inline.rb +16 -0
  23. data/lib/karafka/base_consumer.rb +68 -0
  24. data/lib/karafka/base_responder.rb +208 -0
  25. data/lib/karafka/callbacks.rb +30 -0
  26. data/lib/karafka/callbacks/config.rb +22 -0
  27. data/lib/karafka/callbacks/dsl.rb +16 -0
  28. data/lib/karafka/cli.rb +54 -0
  29. data/lib/karafka/cli/base.rb +78 -0
  30. data/lib/karafka/cli/console.rb +29 -0
  31. data/lib/karafka/cli/flow.rb +46 -0
  32. data/lib/karafka/cli/info.rb +29 -0
  33. data/lib/karafka/cli/install.rb +42 -0
  34. data/lib/karafka/cli/server.rb +66 -0
  35. data/lib/karafka/connection/api_adapter.rb +148 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +107 -0
  38. data/lib/karafka/connection/delegator.rb +46 -0
  39. data/lib/karafka/connection/listener.rb +60 -0
  40. data/lib/karafka/consumers/callbacks.rb +54 -0
  41. data/lib/karafka/consumers/includer.rb +51 -0
  42. data/lib/karafka/consumers/responders.rb +24 -0
  43. data/lib/karafka/consumers/single_params.rb +15 -0
  44. data/lib/karafka/errors.rb +50 -0
  45. data/lib/karafka/fetcher.rb +44 -0
  46. data/lib/karafka/helpers/class_matcher.rb +78 -0
  47. data/lib/karafka/helpers/config_retriever.rb +46 -0
  48. data/lib/karafka/helpers/multi_delegator.rb +33 -0
  49. data/lib/karafka/instrumentation/listener.rb +112 -0
  50. data/lib/karafka/instrumentation/logger.rb +55 -0
  51. data/lib/karafka/instrumentation/monitor.rb +64 -0
  52. data/lib/karafka/loader.rb +28 -0
  53. data/lib/karafka/params/dsl.rb +158 -0
  54. data/lib/karafka/params/params_batch.rb +46 -0
  55. data/lib/karafka/parsers/json.rb +38 -0
  56. data/lib/karafka/patches/dry_configurable.rb +33 -0
  57. data/lib/karafka/patches/ruby_kafka.rb +34 -0
  58. data/lib/karafka/persistence/client.rb +25 -0
  59. data/lib/karafka/persistence/consumer.rb +38 -0
  60. data/lib/karafka/persistence/topic.rb +29 -0
  61. data/lib/karafka/process.rb +62 -0
  62. data/lib/karafka/responders/builder.rb +36 -0
  63. data/lib/karafka/responders/topic.rb +57 -0
  64. data/lib/karafka/routing/builder.rb +61 -0
  65. data/lib/karafka/routing/consumer_group.rb +61 -0
  66. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  67. data/lib/karafka/routing/proxy.rb +37 -0
  68. data/lib/karafka/routing/router.rb +29 -0
  69. data/lib/karafka/routing/topic.rb +60 -0
  70. data/lib/karafka/routing/topic_mapper.rb +55 -0
  71. data/lib/karafka/schemas/config.rb +24 -0
  72. data/lib/karafka/schemas/consumer_group.rb +78 -0
  73. data/lib/karafka/schemas/consumer_group_topic.rb +18 -0
  74. data/lib/karafka/schemas/responder_usage.rb +39 -0
  75. data/lib/karafka/schemas/server_cli_options.rb +43 -0
  76. data/lib/karafka/server.rb +85 -0
  77. data/lib/karafka/setup/config.rb +193 -0
  78. data/lib/karafka/setup/configurators/base.rb +29 -0
  79. data/lib/karafka/setup/configurators/params.rb +25 -0
  80. data/lib/karafka/setup/configurators/water_drop.rb +32 -0
  81. data/lib/karafka/setup/dsl.rb +22 -0
  82. data/lib/karafka/status.rb +25 -0
  83. data/lib/karafka/templates/application_consumer.rb.example +6 -0
  84. data/lib/karafka/templates/application_responder.rb.example +11 -0
  85. data/lib/karafka/templates/karafka.rb.example +54 -0
  86. data/lib/karafka/version.rb +7 -0
  87. data/log/.gitkeep +0 -0
  88. metadata +303 -0
@@ -0,0 +1,78 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Helpers
5
+ # Class used to autodetect corresponding classes that are internally inside Karafka framework
6
+ # It is used among others to match:
7
+ # consumer => responder
8
+ class ClassMatcher
9
+ # Regexp used to remove any non classy like characters that might be in the consumer
10
+ # class name (if defined dynamically, etc)
11
+ CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}
12
+
13
+ # @param klass [Class] class to which we want to find a corresponding class
14
+ # @param from [String] what type of object is it (based on postfix name part)
15
+ # @param to [String] what are we looking for (based on a postfix name part)
16
+ # @example Consumer that has a corresponding responder
17
+ # matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
18
+ # matcher.match #=> SuperResponder
19
+ # @example Consumer without a corresponding responder
20
+ # matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
21
+ # matcher.match #=> nil
22
+ def initialize(klass, from:, to:)
23
+ @klass = klass
24
+ @from = from
25
+ @to = to
26
+ end
27
+
28
+ # @return [Class] matched class
29
+ # @return [nil] nil if we couldn't find matching class
30
+ def match
31
+ return nil if name.empty?
32
+ return nil unless scope.const_defined?(name)
33
+ matching = scope.const_get(name)
34
+ same_scope?(matching) ? matching : nil
35
+ end
36
+
37
+ # @return [String] name of a new class that we're looking for
38
+ # @note This method returns name of a class without a namespace
39
+ # @example From SuperConsumer matching responder
40
+ # matcher.name #=> 'SuperResponder'
41
+ # @example From Namespaced::Super2Consumer matching responder
42
+ # matcher.name #=> Super2Responder
43
+ def name
44
+ inflected = @klass.to_s.split('::').last.to_s
45
+ inflected.gsub!(@from, @to)
46
+ inflected.gsub!(CONSTANT_REGEXP, '')
47
+ inflected
48
+ end
49
+
50
+ # @return [Class, Module] class or module in which we're looking for a matching
51
+ def scope
52
+ scope_of(@klass)
53
+ end
54
+
55
+ private
56
+
57
+ # @param klass [Class] class for which we want to extract it's enclosing class/module
58
+ # @return [Class, Module] enclosing class/module
59
+ # @return [::Object] object if it was a root class
60
+ #
61
+ # @example Non-namespaced class
62
+ # scope_of(SuperClass) #=> Object
63
+ # @example Namespaced class
64
+ # scope_of(Abc::SuperClass) #=> Abc
65
+ def scope_of(klass)
66
+ enclosing = klass.to_s.split('::')[0...-1]
67
+ return ::Object if enclosing.empty?
68
+ ::Object.const_get(enclosing.join('::'))
69
+ end
70
+
71
+ # @param matching [Class] class of which scope we want to check
72
+ # @return [Boolean] true if the scope of class is the same as scope of matching
73
+ def same_scope?(matching)
74
+ scope == scope_of(matching)
75
+ end
76
+ end
77
+ end
78
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Helpers
5
+ # A helper method that allows us to build methods that try to get a given
6
+ # attribute from its instance value and if it fails, will fallback to
7
+ # the default config or config.kafka value for a given attribute.
8
+ # It is used to simplify the checkings.
9
+ # @note Worth noticing, that the value might be equal to false, so even
10
+ # then we need to return it. That's why we check for nil?
11
+ # @example Define config retried attribute for start_from_beginning
12
+ # class Test
13
+ # extend Karafka::Helpers::ConfigRetriever
14
+ # config_retriever_for :start_from_beginning
15
+ # end
16
+ #
17
+ # Test.new.start_from_beginning #=> false
18
+ # test_instance = Test.new
19
+ # test_instance.start_from_beginning = true
20
+ # test_instance.start_from_beginning #=> true
21
+ module ConfigRetriever
22
+ # Builds proper methods for setting and retrieving (with fallback) given attribute value
23
+ # @param attribute [Symbol] attribute name based on which we will build
24
+ # accessor with fallback
25
+ def config_retriever_for(attribute)
26
+ attr_writer attribute unless method_defined? :"#{attribute}="
27
+
28
+ # Don't redefine if we already have accessor for a given element
29
+ return if method_defined? attribute
30
+
31
+ define_method attribute do
32
+ current_value = instance_variable_get(:"@#{attribute}")
33
+ return current_value unless current_value.nil?
34
+
35
+ value = if Karafka::App.config.respond_to?(attribute)
36
+ Karafka::App.config.send(attribute)
37
+ else
38
+ Karafka::App.config.kafka.send(attribute)
39
+ end
40
+
41
+ instance_variable_set(:"@#{attribute}", value)
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module containing classes and methods that provide some additional functionalities
5
+ module Helpers
6
+ # @note Taken from http://stackoverflow.com/questions/6407141
7
+ # Multidelegator is used to delegate calls to multiple targets
8
+ class MultiDelegator
9
+ # @param targets to which we want to delegate methods
10
+ #
11
+ def initialize(*targets)
12
+ @targets = targets
13
+ end
14
+
15
+ class << self
16
+ # @param methods names that should be delegated to
17
+ # @example Delegate write and close to STDOUT and file
18
+ # Logger.new MultiDelegator.delegate(:write, :close).to(STDOUT, log_file)
19
+ def delegate(*methods)
20
+ methods.each do |m|
21
+ define_method(m) do |*args|
22
+ @targets.map { |t| t.send(m, *args) }
23
+ end
24
+ end
25
+
26
+ self
27
+ end
28
+
29
+ alias to new
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,112 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Default listener that hooks up to our instrumentation and uses its events for logging
6
+ # It can be removed/replaced or anything without any harm to the Karafka app flow
7
+ module Listener
8
+ # Log levels that we use in this particular listener
9
+ USED_LOG_LEVELS = %i[
10
+ debug
11
+ info
12
+ error
13
+ fatal
14
+ ].freeze
15
+
16
+ # Injects WaterDrop listener logger actions
17
+ extend WaterDrop::Instrumentation::Listener
18
+
19
+ class << self
20
+ # Logs details about incoming messages and with which consumer we will consume them
21
+ # @param event [Dry::Events::Event] event details including payload
22
+ def on_connection_delegator_call(event)
23
+ consumer = event[:consumer]
24
+ topic = consumer.topic.name
25
+ kafka_messages = event[:kafka_messages]
26
+ info "#{kafka_messages.count} messages on #{topic} topic delegated to #{consumer.class}"
27
+ end
28
+
29
+ # Logs details about each received message value parsing
30
+ # @param event [Dry::Events::Event] event details including payload
31
+ def on_params_params_parse(event)
32
+ # Keep in mind, that a caller here is a param object not a controller,
33
+ # so it returns a topic as a string, not a routing topic
34
+ debug "Params parsing for #{event[:caller].topic} topic successful in #{event[:time]} ms"
35
+ end
36
+
37
+ # Logs unsuccessful parsing attempts of incoming data
38
+ # @param event [Dry::Events::Event] event details including payload
39
+ def on_params_params_parse_error(event)
40
+ error "Params parsing error for #{event[:caller].topic} topic: #{event[:error]}"
41
+ end
42
+
43
+ # Logs errors that occured in a listener fetch loop
44
+ # @param event [Dry::Events::Event] event details including payload
45
+ # @note It's an error as we can recover from it not a fatal
46
+ def on_connection_listener_fetch_loop_error(event)
47
+ error "Listener fetch loop error: #{event[:error]}"
48
+ end
49
+
50
+ # Logs errors that are related to the connection itself
51
+ # @note Karafka will attempt to reconnect, so an error not a fatal
52
+ # @param event [Dry::Events::Event] event details including payload
53
+ def on_connection_client_fetch_loop_error(event)
54
+ error "Client fetch loop error: #{event[:error]}"
55
+ end
56
+
57
+ # Logs info about crashed fetcher
58
+ # @note If this happens, Karafka will shutdown as it means a critical error
59
+ # in one of the threads
60
+ # @param event [Dry::Events::Event] event details including payload
61
+ def on_fetcher_call_error(event)
62
+ fatal "Fetcher crash due to an error: #{event[:error]}"
63
+ end
64
+
65
+ # Logs info about processing of a certain dataset with an inline backend
66
+ # @param event [Dry::Events::Event] event details including payload
67
+ def on_backends_inline_process(event)
68
+ count = event[:caller].send(:params_batch).to_a.size
69
+ topic = event[:caller].topic.name
70
+ time = event[:time]
71
+ info "Inline processing of topic #{topic} with #{count} messages took #{time} ms"
72
+ end
73
+
74
+ # Logs info about system signals that Karafka received
75
+ # @param event [Dry::Events::Event] event details including payload
76
+ def on_process_notice_signal(event)
77
+ info "Received #{event[:signal]} system signal"
78
+ end
79
+
80
+ # Logs info about responder usage withing a controller flow
81
+ # @param event [Dry::Events::Event] event details including payload
82
+ def on_consumers_responders_respond_with(event)
83
+ calling = event[:caller].class
84
+ responder = calling.topic.responder
85
+ data = event[:data]
86
+ info "Responded from #{calling} using #{responder} with following data #{data}"
87
+ end
88
+
89
+ # Logs info that we're going to stop the Karafka server
90
+ # @param _event [Dry::Events::Event] event details including payload
91
+ def on_server_stop(_event)
92
+ # We use a separate thread as logging can't be called from trap context
93
+ Thread.new { info "Stopping Karafka server #{::Process.pid}" }
94
+ end
95
+
96
+ # Logs an error that Karafka was unable to stop the server gracefully and it had to do a
97
+ # forced exit
98
+ # @param _event [Dry::Events::Event] event details including payload
99
+ def on_server_stop_error(_event)
100
+ # We use a separate thread as logging can't be called from trap context
101
+ Thread.new { error "Forceful Karafka server #{::Process.pid} stop" }
102
+ end
103
+
104
+ USED_LOG_LEVELS.each do |log_level|
105
+ define_method log_level do |*args|
106
+ Karafka.logger.send(log_level, *args)
107
+ end
108
+ end
109
+ end
110
+ end
111
+ end
112
+ end
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Default logger for Event Delegator
6
+ # @note It uses ::Logger features - providing basic logging
7
+ class Logger < ::Logger
8
+ include Singleton
9
+
10
+ # Map containing information about log level for given environment
11
+ ENV_MAP = {
12
+ 'production' => ::Logger::ERROR,
13
+ 'test' => ::Logger::ERROR,
14
+ 'development' => ::Logger::INFO,
15
+ 'debug' => ::Logger::DEBUG,
16
+ 'default' => ::Logger::INFO
17
+ }.freeze
18
+
19
+ # Creates a new instance of logger ensuring that it has a place to write to
20
+ def initialize(*_args)
21
+ ensure_dir_exists
22
+ super(target)
23
+ self.level = ENV_MAP[Karafka.env] || ENV_MAP['default']
24
+ end
25
+
26
+ private
27
+
28
+ # @return [Karafka::Helpers::MultiDelegator] multi delegator instance
29
+ # to which we will be writtng logs
30
+ # We use this approach to log stuff to file and to the STDOUT at the same time
31
+ def target
32
+ Karafka::Helpers::MultiDelegator
33
+ .delegate(:write, :close)
34
+ .to(STDOUT, file)
35
+ end
36
+
37
+ # Makes sure the log directory exists
38
+ def ensure_dir_exists
39
+ dir = File.dirname(log_path)
40
+ FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
41
+ end
42
+
43
+ # @return [Pathname] Path to a file to which we should log
44
+ def log_path
45
+ @log_path ||= Karafka::App.root.join("log/#{Karafka.env}.log")
46
+ end
47
+
48
+ # @return [File] file to which we want to write our logs
49
+ # @note File is being opened in append mode ('a')
50
+ def file
51
+ @file ||= File.open(log_path, 'a')
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,64 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the things related with Karafka instrumentation process
5
+ module Instrumentation
6
+ # Monitor is used to hookup external monitoring services to monitor how Karafka works
7
+ # It provides a standardized API for checking incoming messages/enqueueing etc
8
+ # Since it is a pub-sub based on dry-monitor, you can use as many subscribers/loggers at the
9
+ # same time, which means that you might have for example file logging and newrelic at the same
10
+ # time
11
+ # @note This class acts as a singleton because we are only permitted to have single monitor
12
+ # per running process (just as logger)
13
+ class Monitor < Dry::Monitor::Notifications
14
+ include Singleton
15
+
16
+ # List of events that we support in the system and to which a monitor client can hook up
17
+ # @note The non-error once support timestamp benchmarking
18
+ # @note Depending on Karafka extensions and additional engines, this might not be the
19
+ # complete list of all the events. Please use the #available_events on fully loaded
20
+ # Karafka system to determine all of the events you can use.
21
+ # Last 4 events are from WaterDrop but for convenience we use the same monitor for the
22
+ # whole karafka ecosystem
23
+ BASE_EVENTS = %w[
24
+ params.params.parse
25
+ params.params.parse.error
26
+ connection.listener.fetch_loop.error
27
+ connection.client.fetch_loop.error
28
+ connection.delegator.call
29
+ fetcher.call.error
30
+ backends.inline.process
31
+ process.notice_signal
32
+ consumers.responders.respond_with
33
+ async_producer.call.error
34
+ async_producer.call.retry
35
+ sync_producer.call.error
36
+ sync_producer.call.retry
37
+ server.stop
38
+ server.stop.error
39
+ ].freeze
40
+
41
+ private_constant :BASE_EVENTS
42
+
43
+ # @return [Karafka::Instrumentation::Monitor] monitor instance for system instrumentation
44
+ def initialize
45
+ super(:karafka)
46
+ BASE_EVENTS.each(&method(:register_event))
47
+ end
48
+
49
+ # Allows us to subscribe to events with a code that will be yielded upon events
50
+ # @param event_name_or_listener [String, Object] name of the event we want to subscribe to
51
+ # or a listener if we decide to go with object listener
52
+ def subscribe(event_name_or_listener)
53
+ return super unless event_name_or_listener.is_a?(String)
54
+ return super if available_events.include?(event_name_or_listener)
55
+ raise Errors::UnregisteredMonitorEvent, event_name_or_listener
56
+ end
57
+
58
+ # @return [Array<String>] names of available events to which we can subscribe
59
+ def available_events
60
+ __bus__.events.keys
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Loader for requiring all the files in a proper order
5
+ module Loader
6
+ # Order in which we want to load app files
7
+ DIRS = %w[
8
+ lib
9
+ app
10
+ ].freeze
11
+
12
+ # Will load files in a proper order (based on DIRS)
13
+ # @param [String] root path from which we want to start
14
+ def self.load(root)
15
+ DIRS.each do |dir|
16
+ path = File.join(root, dir)
17
+ next unless File.exist?(path)
18
+ load!(path)
19
+ end
20
+ end
21
+
22
+ # Requires all the ruby files from one path in a proper order
23
+ # @param path [String] path (dir) from which we want to load ruby files in a proper order
24
+ def self.load!(path)
25
+ require_all(path)
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,158 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Params namespace encapsulating all the logic that is directly related to params handling
5
+ module Params
6
+ # Dsl for Karafka params. We don't provide the params class here as we want to allow users to
7
+ # use either hash (default) or Rails hash with indifferent access as a base for their params
8
+ #
9
+ # We do that because both of them have their own advantages and we don't want to enforce users
10
+ # to handle things differently if they already use any of those
11
+ #
12
+ # It provides lazy loading not only until the first usage, but also allows us to skip
13
+ # using parser until we execute our logic. That way we can operate with
14
+ # heavy-parsing data without slowing down the whole application.
15
+ module Dsl
16
+ # Params keys that are "our" and internal. We use this list for additional backends
17
+ # that somehow operatae on those keys
18
+ SYSTEM_KEYS = %w[
19
+ parser
20
+ value
21
+ partition
22
+ offset
23
+ key
24
+ create_time
25
+ receive_time
26
+ topic
27
+ parsed
28
+ ].freeze
29
+
30
+ # Params attributes that should be available via a method call invocation for Kafka
31
+ # client compatibility.
32
+ # Kafka passes internally Kafka::FetchedMessage object and the ruby-kafka consumer
33
+ # uses those fields via method calls, so in order to be able to pass there our params
34
+ # objects, have to have same api.
35
+ METHOD_ATTRIBUTES = %w[
36
+ topic
37
+ partition
38
+ offset
39
+ key
40
+ create_time
41
+ receive_time
42
+ ].freeze
43
+
44
+ private_constant :METHOD_ATTRIBUTES
45
+
46
+ # Class methods required by params to work
47
+ module ClassMethods
48
+ # We allow building instances only via the #build method
49
+
50
+ # @param message [Kafka::FetchedMessage, Hash] message that we get out of Kafka
51
+ # in case of building params inside main Karafka process in
52
+ # Karafka::Connection::Consumer, or a hash when we retrieve data that is already parsed
53
+ # @param parser [Class] parser class that we will use to unparse data
54
+ # @return [Karafka::Params::Params] Karafka params object not yet used parser for
55
+ # retrieving data that we've got from Kafka
56
+ # @example Build params instance from a hash
57
+ # Karafka::Params::Params.build({ key: 'value' }) #=> params object
58
+ # @example Build params instance from a Kafka::FetchedMessage object
59
+ # Karafka::Params::Params.build(message) #=> params object
60
+ def build(message, parser)
61
+ instance = new
62
+ instance['parser'] = parser
63
+
64
+ # Non kafka fetched message can happen when we interchange data with an
65
+ # additional backend
66
+ if message.is_a?(Kafka::FetchedMessage)
67
+ instance.send(
68
+ :merge!,
69
+ 'value' => message.value,
70
+ 'partition' => message.partition,
71
+ 'offset' => message.offset,
72
+ 'key' => message.key,
73
+ 'create_time' => message.create_time,
74
+ 'receive_time' => Time.now,
75
+ # When we get raw messages, they might have a topic, that was modified by a
76
+ # topic mapper. We need to "reverse" this change and map back to the non-modified
77
+ # format, so our internal flow is not corrupted with the mapping
78
+ 'topic' => Karafka::App.config.topic_mapper.incoming(message.topic)
79
+ )
80
+ else
81
+ instance.send(:merge!, message)
82
+ end
83
+
84
+ instance
85
+ end
86
+ end
87
+
88
+ # @return [Karafka::Params::Params] this will trigger parser execution. If we decide to
89
+ # retrieve data, parser will be executed to parse data. Output of parsing will be merged
90
+ # to the current object. This object will be also marked as already parsed, so we won't
91
+ # parse it again.
92
+ def retrieve!
93
+ return self if self['parsed']
94
+ self['parsed'] = true
95
+
96
+ parsed_data = parse(self['value'])
97
+ delete('value')
98
+ merge!(parsed_data)
99
+ end
100
+
101
+ # Includes and extends the base params klass with everything that is needed by Karafka to
102
+ # fully work in any conditions.
103
+ # @param params_klass [Karafka::Params::Params] initialized params class that we will
104
+ # use for a given Karafka process
105
+ def self.included(params_klass)
106
+ params_klass.extend(Dsl::ClassMethods)
107
+
108
+ METHOD_ATTRIBUTES.each do |attr|
109
+ # Defines a method call accessor to a particular hash field.
110
+ # @note Won't work for complex key names that contain spaces, etc
111
+ # @param key [Symbol] name of a field that we want to retrieve with a method call
112
+ # @example
113
+ # key_attr_reader :example
114
+ # params.example #=> 'my example value'
115
+ params_klass.send :define_method, attr do
116
+ self[attr]
117
+ end
118
+ end
119
+
120
+ params_klass.send :private, :merge!
121
+ params_klass.send :private, :parse
122
+ end
123
+
124
+ private
125
+
126
+ # Overwritten merge! method - it behaves differently for keys that are the same in our hash
127
+ # and in a other_hash - it will not replace keys that are the same in our hash
128
+ # and in the other one. This protects some important Karafka params keys that cannot be
129
+ # replaced with custom values from incoming Kafka message
130
+ # @param other_hash [Hash] hash that we want to merge into current
131
+ # @return [Karafka::Params::Params] our parameters hash with merged values
132
+ # @example Merge with hash without same keys
133
+ # new(a: 1, b: 2).merge!(c: 3) #=> { a: 1, b: 2, c: 3 }
134
+ # @example Merge with hash with same keys (symbol based)
135
+ # new(a: 1).merge!(a: 2) #=> { a: 1 }
136
+ # @example Merge with hash with same keys (string based)
137
+ # new(a: 1).merge!('a' => 2) #=> { a: 1 }
138
+ # @example Merge with hash with same keys (current string based)
139
+ # new('a' => 1).merge!(a: 2) #=> { a: 1 }
140
+ def merge!(other_hash)
141
+ super(other_hash) { |_key, base_value, _new_value| base_value }
142
+ end
143
+
144
+ # @param value [String] Raw data that we want to parse using consumer parser
145
+ # @note If something goes wrong, it will return raw data in a hash with a message key
146
+ # @return [Hash] parsed data or a hash with message key containing raw data if something
147
+ # went wrong during parsing
148
+ def parse(value)
149
+ Karafka.monitor.instrument('params.params.parse', caller: self) do
150
+ self['parser'].parse(value)
151
+ end
152
+ rescue ::Karafka::Errors::ParserError => e
153
+ Karafka.monitor.instrument('params.params.parse.error', caller: self, error: e)
154
+ raise e
155
+ end
156
+ end
157
+ end
158
+ end