karafka 1.2.2 → 1.4.0.rc1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +157 -13
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +5 -2
  17. data/Gemfile.lock +95 -79
  18. data/README.md +15 -3
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +19 -13
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +13 -18
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +51 -29
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +62 -21
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +18 -0
  39. data/lib/karafka/connection/client.rb +40 -40
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -19
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +50 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -4
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +34 -49
  85. data/lib/karafka/setup/config.rb +74 -40
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +97 -73
  94. metadata.gz.sig +4 -0
  95. data/.travis.yml +0 -13
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -77
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -8,7 +8,9 @@ module Karafka
8
8
  class ClassMatcher
9
9
  # Regexp used to remove any non classy like characters that might be in the consumer
10
10
  # class name (if defined dynamically, etc)
11
- CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}
11
+ CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}.freeze
12
+
13
+ private_constant :CONSTANT_REGEXP
12
14
 
13
15
  # @param klass [Class] class to which we want to find a corresponding class
14
16
  # @param from [String] what type of object is it (based on postfix name part)
@@ -30,6 +32,7 @@ module Karafka
30
32
  def match
31
33
  return nil if name.empty?
32
34
  return nil unless scope.const_defined?(name)
35
+
33
36
  matching = scope.const_get(name)
34
37
  same_scope?(matching) ? matching : nil
35
38
  end
@@ -41,7 +44,13 @@ module Karafka
41
44
  # @example From Namespaced::Super2Consumer matching responder
42
45
  # matcher.name #=> Super2Responder
43
46
  def name
44
- inflected = @klass.to_s.split('::').last.to_s
47
+ inflected = +@klass.to_s.split('::').last.to_s
48
+ # We inject the from into the name just in case it is missing as in a situation like
49
+ # that it would just sanitize the name without adding the "to" postfix.
50
+ # It could create cases when we want to build for example a responder to a consumer
51
+ # that does not have the "Consumer" postfix and would do nothing returning the same name.
52
+ # That would be bad as the matching classes shouldn't be matched to themselves.
53
+ inflected << @from unless inflected.include?(@from)
45
54
  inflected.gsub!(@from, @to)
46
55
  inflected.gsub!(CONSTANT_REGEXP, '')
47
56
  inflected
@@ -65,6 +74,7 @@ module Karafka
65
74
  def scope_of(klass)
66
75
  enclosing = klass.to_s.split('::')[0...-1]
67
76
  return ::Object if enclosing.empty?
77
+
68
78
  ::Object.const_get(enclosing.join('::'))
69
79
  end
70
80
 
@@ -5,7 +5,7 @@ module Karafka
5
5
  # A helper method that allows us to build methods that try to get a given
6
6
  # attribute from its instance value and if it fails, will fallback to
7
7
  # the default config or config.kafka value for a given attribute.
8
- # It is used to simplify the checkings.
8
+ # It is used to simplify the checks.
9
9
  # @note Worth noticing, that the value might be equal to false, so even
10
10
  # then we need to return it. That's why we check for nil?
11
11
  # @example Define config retried attribute for start_from_beginning
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Helpers
5
+ # Inflector provides inflection for the whole Karafka framework with additional inflection
6
+ # caching (due to the fact, that Dry::Inflector is slow)
7
+ module Inflector
8
+ # What inflection engine do we want to use
9
+ ENGINE = Dry::Inflector.new
10
+
11
+ @map = Concurrent::Hash.new
12
+
13
+ private_constant :ENGINE
14
+
15
+ class << self
16
+ # @param string [String] string that we want to convert to our underscore format
17
+ # @return [String] inflected string
18
+ # @example
19
+ # Karafka::Helpers::Inflector.map('Module/ControllerName') #=> 'module_controller_name'
20
+ def map(string)
21
+ @map[string] ||= ENGINE.underscore(string).tr('/', '_')
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
@@ -7,7 +7,6 @@ module Karafka
7
7
  # Multidelegator is used to delegate calls to multiple targets
8
8
  class MultiDelegator
9
9
  # @param targets to which we want to delegate methods
10
- #
11
10
  def initialize(*targets)
12
11
  @targets = targets
13
12
  end
@@ -5,8 +5,6 @@ module Karafka
5
5
  # Default logger for Event Delegator
6
6
  # @note It uses ::Logger features - providing basic logging
7
7
  class Logger < ::Logger
8
- include Singleton
9
-
10
8
  # Map containing information about log level for given environment
11
9
  ENV_MAP = {
12
10
  'production' => ::Logger::ERROR,
@@ -16,7 +14,11 @@ module Karafka
16
14
  'default' => ::Logger::INFO
17
15
  }.freeze
18
16
 
17
+ private_constant :ENV_MAP
18
+
19
19
  # Creates a new instance of logger ensuring that it has a place to write to
20
+ # @param _args Any arguments that we don't care about but that are needed in order to
21
+ # make this logger compatible with the default Ruby one
20
22
  def initialize(*_args)
21
23
  ensure_dir_exists
22
24
  super(target)
@@ -26,7 +28,7 @@ module Karafka
26
28
  private
27
29
 
28
30
  # @return [Karafka::Helpers::MultiDelegator] multi delegator instance
29
- # to which we will be writtng logs
31
+ # to which we will be writing logs
30
32
  # We use this approach to log stuff to file and to the STDOUT at the same time
31
33
  def target
32
34
  Karafka::Helpers::MultiDelegator
@@ -34,10 +36,11 @@ module Karafka
34
36
  .to(STDOUT, file)
35
37
  end
36
38
 
37
- # Makes sure the log directory exists
39
+ # Makes sure the log directory exists as long as we can write to it
38
40
  def ensure_dir_exists
39
- dir = File.dirname(log_path)
40
- FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
41
+ FileUtils.mkdir_p(File.dirname(log_path))
42
+ rescue Errno::EACCES
43
+ nil
41
44
  end
42
45
 
43
46
  # @return [Pathname] Path to a file to which we should log
@@ -6,13 +6,11 @@ module Karafka
6
6
  # Monitor is used to hookup external monitoring services to monitor how Karafka works
7
7
  # It provides a standardized API for checking incoming messages/enqueueing etc
8
8
  # Since it is a pub-sub based on dry-monitor, you can use as many subscribers/loggers at the
9
- # same time, which means that you might have for example file logging and newrelic at the same
9
+ # same time, which means that you might have for example file logging and NewRelic at the same
10
10
  # time
11
11
  # @note This class acts as a singleton because we are only permitted to have single monitor
12
12
  # per running process (just as logger)
13
13
  class Monitor < Dry::Monitor::Notifications
14
- include Singleton
15
-
16
14
  # List of events that we support in the system and to which a monitor client can hook up
17
15
  # @note The non-error once support timestamp benchmarking
18
16
  # @note Depending on Karafka extensions and additional engines, this might not be the
@@ -21,11 +19,14 @@ module Karafka
21
19
  # Last 4 events are from WaterDrop but for convenience we use the same monitor for the
22
20
  # whole karafka ecosystem
23
21
  BASE_EVENTS = %w[
24
- params.params.parse
25
- params.params.parse.error
22
+ params.params.deserialize
23
+ params.params.deserialize.error
24
+ connection.listener.before_fetch_loop
25
+ connection.listener.fetch_loop
26
26
  connection.listener.fetch_loop.error
27
27
  connection.client.fetch_loop.error
28
- connection.delegator.call
28
+ connection.batch_delegator.call
29
+ connection.message_delegator.call
29
30
  fetcher.call.error
30
31
  backends.inline.process
31
32
  process.notice_signal
@@ -34,8 +35,12 @@ module Karafka
34
35
  async_producer.call.retry
35
36
  sync_producer.call.error
36
37
  sync_producer.call.retry
37
- server.stop
38
- server.stop.error
38
+ app.initializing
39
+ app.initialized
40
+ app.running
41
+ app.stopping
42
+ app.stopping.error
43
+ app.stopped
39
44
  ].freeze
40
45
 
41
46
  private_constant :BASE_EVENTS
@@ -52,7 +57,8 @@ module Karafka
52
57
  def subscribe(event_name_or_listener)
53
58
  return super unless event_name_or_listener.is_a?(String)
54
59
  return super if available_events.include?(event_name_or_listener)
55
- raise Errors::UnregisteredMonitorEvent, event_name_or_listener
60
+
61
+ raise Errors::UnregisteredMonitorEventError, event_name_or_listener
56
62
  end
57
63
 
58
64
  # @return [Array<String>] names of available events to which we can subscribe
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Listener that sets a proc title with a nice descriptive value
6
+ class ProctitleListener
7
+ # Updates proc title to an initializing one
8
+ # @param _event [Dry::Events::Event] event details including payload
9
+ def on_app_initializing(_event)
10
+ setproctitle('initializing')
11
+ end
12
+
13
+ # Updates proc title to a running one
14
+ # @param _event [Dry::Events::Event] event details including payload
15
+ def on_app_running(_event)
16
+ setproctitle('running')
17
+ end
18
+
19
+ # Updates proc title to a stopping one
20
+ # @param _event [Dry::Events::Event] event details including payload
21
+ def on_app_stopping(_event)
22
+ setproctitle('stopping')
23
+ end
24
+
25
+ private
26
+
27
+ # Sets a proper proc title with our constant prefix
28
+ # @param status [String] any status we want to set
29
+ def setproctitle(status)
30
+ ::Process.setproctitle(
31
+ "karafka #{Karafka::App.config.client_id} (#{status})"
32
+ )
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,140 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Default listener that hooks up to our instrumentation and uses its events for logging
6
+ # It can be removed/replaced or anything without any harm to the Karafka app flow
7
+ class StdoutListener
8
+ # Log levels that we use in this particular listener
9
+ USED_LOG_LEVELS = %i[
10
+ debug
11
+ info
12
+ error
13
+ fatal
14
+ ].freeze
15
+
16
+ # Logs details about incoming batches and with which consumer we will consume them
17
+ # @param event [Dry::Events::Event] event details including payload
18
+ def on_connection_batch_delegator_call(event)
19
+ consumer = event[:consumer]
20
+ topic = consumer.topic.name
21
+ kafka_messages = event[:kafka_batch].messages
22
+ info(
23
+ <<~MSG.chomp.tr("\n", ' ')
24
+ #{kafka_messages.count} messages
25
+ on #{topic} topic
26
+ delegated to #{consumer.class}
27
+ MSG
28
+ )
29
+ end
30
+
31
+ # Logs details about incoming message and with which consumer we will consume it
32
+ # @param event [Dry::Events::Event] event details including payload
33
+ def on_connection_message_delegator_call(event)
34
+ consumer = event[:consumer]
35
+ topic = consumer.topic.name
36
+ info "1 message on #{topic} topic delegated to #{consumer.class}"
37
+ end
38
+
39
+ # Logs details about each received message value deserialization
40
+ # @param event [Dry::Events::Event] event details including payload
41
+ def on_params_params_deserialize(event)
42
+ # Keep in mind, that a caller here is a param object not a controller,
43
+ # so it returns a topic as a string, not a routing topic
44
+ debug(
45
+ <<~MSG.chomp.tr("\n", ' ')
46
+ Params deserialization for #{event[:caller].metadata.topic} topic
47
+ successful in #{event[:time]} ms
48
+ MSG
49
+ )
50
+ end
51
+
52
+ # Logs unsuccessful deserialization attempts of incoming data
53
+ # @param event [Dry::Events::Event] event details including payload
54
+ def on_params_params_deserialize_error(event)
55
+ topic = event[:caller].metadata.topic
56
+ error = event[:error]
57
+ error "Params deserialization error for #{topic} topic: #{error}"
58
+ end
59
+
60
+ # Logs errors that occurred in a listener fetch loop
61
+ # @param event [Dry::Events::Event] event details including payload
62
+ # @note It's an error as we can recover from it not a fatal
63
+ def on_connection_listener_fetch_loop_error(event)
64
+ error "Listener fetch loop error: #{event[:error]}"
65
+ end
66
+
67
+ # Logs errors that are related to the connection itself
68
+ # @param event [Dry::Events::Event] event details including payload
69
+ # @note Karafka will attempt to reconnect, so an error not a fatal
70
+ def on_connection_client_fetch_loop_error(event)
71
+ error "Client fetch loop error: #{event[:error]}"
72
+ end
73
+
74
+ # Logs info about crashed fetcher
75
+ # @param event [Dry::Events::Event] event details including payload
76
+ # @note If this happens, Karafka will shutdown as it means a critical error
77
+ # in one of the threads
78
+ def on_fetcher_call_error(event)
79
+ fatal "Fetcher crash due to an error: #{event[:error]}"
80
+ end
81
+
82
+ # Logs info about processing of a certain dataset with an inline backend
83
+ # @param event [Dry::Events::Event] event details including payload
84
+ def on_backends_inline_process(event)
85
+ count = event[:caller].send(:params_batch).to_a.size
86
+ topic = event[:caller].topic.name
87
+ time = event[:time]
88
+ info "Inline processing of topic #{topic} with #{count} messages took #{time} ms"
89
+ end
90
+
91
+ # Logs info about system signals that Karafka received
92
+ # @param event [Dry::Events::Event] event details including payload
93
+ def on_process_notice_signal(event)
94
+ info "Received #{event[:signal]} system signal"
95
+ end
96
+
97
+ # Logs info about responder usage withing a controller flow
98
+ # @param event [Dry::Events::Event] event details including payload
99
+ def on_consumers_responders_respond_with(event)
100
+ calling = event[:caller]
101
+ responder = calling.topic.responder
102
+ data = event[:data]
103
+ info "Responded from #{calling.class} using #{responder} with following data #{data}"
104
+ end
105
+
106
+ # Logs info that we're initializing Karafka app
107
+ # @param _event [Dry::Events::Event] event details including payload
108
+ def on_app_initializing(_event)
109
+ info "Initializing Karafka server #{::Process.pid}"
110
+ end
111
+
112
+ # Logs info that we're running Karafka app
113
+ # @param _event [Dry::Events::Event] event details including payload
114
+ def on_app_running(_event)
115
+ info "Running Karafka server #{::Process.pid}"
116
+ end
117
+
118
+ # Logs info that we're going to stop the Karafka server
119
+ # @param _event [Dry::Events::Event] event details including payload
120
+ def on_app_stopping(_event)
121
+ # We use a separate thread as logging can't be called from trap context
122
+ Thread.new { info "Stopping Karafka server #{::Process.pid}" }
123
+ end
124
+
125
+ # Logs an error that Karafka was unable to stop the server gracefully and it had to do a
126
+ # forced exit
127
+ # @param _event [Dry::Events::Event] event details including payload
128
+ def on_app_stopping_error(_event)
129
+ # We use a separate thread as logging can't be called from trap context
130
+ Thread.new { error "Forceful Karafka server #{::Process.pid} stop" }
131
+ end
132
+
133
+ USED_LOG_LEVELS.each do |log_level|
134
+ define_method log_level do |*args|
135
+ Karafka.logger.send(log_level, *args)
136
+ end
137
+ end
138
+ end
139
+ end
140
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ # Simple batch metadata object that stores all non-message information received from Kafka
6
+ # cluster while fetching the data
7
+ # @note This metadata object refers to per batch metadata, not `#params.metadata`
8
+ BatchMetadata = Struct.new(
9
+ :batch_size,
10
+ :first_offset,
11
+ :highwater_mark_offset,
12
+ :unknown_last_offset,
13
+ :last_offset,
14
+ :offset_lag,
15
+ :deserializer,
16
+ :partition,
17
+ :topic,
18
+ keyword_init: true
19
+ ) do
20
+ # @return [Boolean] is the last offset known or unknown
21
+ def unknown_last_offset?
22
+ unknown_last_offset
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ module Builders
6
+ # Builder for creating batch metadata object based on the batch informations
7
+ module BatchMetadata
8
+ class << self
9
+ # Creates metadata based on the kafka batch data
10
+ # @param kafka_batch [Kafka::FetchedBatch] kafka batch details
11
+ # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
12
+ # @return [Karafka::Params::BatchMetadata] batch metadata object
13
+ def from_kafka_batch(kafka_batch, topic)
14
+ Karafka::Params::BatchMetadata.new(
15
+ batch_size: kafka_batch.messages.count,
16
+ first_offset: kafka_batch.first_offset,
17
+ highwater_mark_offset: kafka_batch.highwater_mark_offset,
18
+ unknown_last_offset: kafka_batch.unknown_last_offset?,
19
+ last_offset: kafka_batch.last_offset,
20
+ offset_lag: kafka_batch.offset_lag,
21
+ deserializer: topic.deserializer,
22
+ partition: kafka_batch.partition,
23
+ topic: topic.name
24
+ ).freeze
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ # Due to the fact, that we create params related objects in couple contexts / places
6
+ # plus backends can build up them their own way we have this namespace.
7
+ # It allows to isolate actual params objects from their building process that can be
8
+ # context dependent.
9
+ module Builders
10
+ # Builder for params
11
+ module Params
12
+ class << self
13
+ # @param kafka_message [Kafka::FetchedMessage] message fetched from Kafka
14
+ # @param topic [Karafka::Routing::Topic] topic for which this message was fetched
15
+ # @return [Karafka::Params::Params] params object with payload and message metadata
16
+ def from_kafka_message(kafka_message, topic)
17
+ metadata = Karafka::Params::Metadata.new(
18
+ create_time: kafka_message.create_time,
19
+ headers: kafka_message.headers || {},
20
+ is_control_record: kafka_message.is_control_record,
21
+ key: kafka_message.key,
22
+ offset: kafka_message.offset,
23
+ deserializer: topic.deserializer,
24
+ partition: kafka_message.partition,
25
+ receive_time: Time.now,
26
+ topic: topic.name
27
+ ).freeze
28
+
29
+ Karafka::Params::Params.new(
30
+ kafka_message.value,
31
+ metadata
32
+ )
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end