karafka 1.2.8 → 1.4.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +134 -14
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +4 -5
  17. data/Gemfile.lock +92 -81
  18. data/README.md +9 -12
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +18 -17
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +5 -10
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +45 -27
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/api_adapter.rb +22 -9
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +5 -3
  39. data/lib/karafka/connection/client.rb +31 -31
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -16
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +54 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -2
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +29 -28
  85. data/lib/karafka/setup/config.rb +67 -37
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +94 -72
  94. metadata.gz.sig +0 -0
  95. data/.travis.yml +0 -21
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -78
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -1,16 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Callbacks
5
- # App level dsl to define callbacks
6
- module Dsl
7
- Callbacks::TYPES.each do |callback_type|
8
- # Allows us to define a block, that will be executed for a given moment
9
- # @param [Block] block that should be executed after the initialization process
10
- define_method callback_type do |&block|
11
- config.callbacks.send(callback_type).push block
12
- end
13
- end
14
- end
15
- end
16
- end
@@ -1,46 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Connection
5
- # Class that delegates processing of messages for which we listen to a proper processor
6
- module Delegator
7
- class << self
8
- # Delegates messages (does something with them)
9
- # It will either schedule or run a proper processor action for messages
10
- # @note This should be looped to obtain a constant delegating of new messages
11
- # @note We catch all the errors here, to make sure that none failures
12
- # for a given consumption will affect other consumed messages
13
- # If we wouldn't catch it, it would propagate up until killing the thread
14
- # @note It is a one huge method, because of performance reasons. It is much faster then
15
- # using send or invoking additional methods
16
- # @param group_id [String] group_id of a group from which a given message came
17
- # @param kafka_messages [Array<Kafka::FetchedMessage>] raw messages fetched from kafka
18
- def call(group_id, kafka_messages)
19
- # @note We always get messages by topic and partition so we can take topic from the
20
- # first one and it will be valid for all the messages
21
- topic = Persistence::Topic.fetch(group_id, kafka_messages[0].topic)
22
- consumer = Persistence::Consumer.fetch(topic, kafka_messages[0].partition)
23
-
24
- Karafka.monitor.instrument(
25
- 'connection.delegator.call',
26
- caller: self,
27
- consumer: consumer,
28
- kafka_messages: kafka_messages
29
- ) do
30
- # Depending on a case (persisted or not) we might use new consumer instance per
31
- # each batch, or use the same one for all of them (for implementing buffering, etc.)
32
- if topic.batch_consuming
33
- consumer.params_batch = kafka_messages
34
- consumer.call
35
- else
36
- kafka_messages.each do |kafka_message|
37
- consumer.params_batch = [kafka_message]
38
- consumer.call
39
- end
40
- end
41
- end
42
- end
43
- end
44
- end
45
- end
46
- end
@@ -1,112 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Instrumentation
5
- # Default listener that hooks up to our instrumentation and uses its events for logging
6
- # It can be removed/replaced or anything without any harm to the Karafka app flow
7
- module Listener
8
- # Log levels that we use in this particular listener
9
- USED_LOG_LEVELS = %i[
10
- debug
11
- info
12
- error
13
- fatal
14
- ].freeze
15
-
16
- # Injects WaterDrop listener logger actions
17
- extend WaterDrop::Instrumentation::Listener
18
-
19
- class << self
20
- # Logs details about incoming messages and with which consumer we will consume them
21
- # @param event [Dry::Events::Event] event details including payload
22
- def on_connection_delegator_call(event)
23
- consumer = event[:consumer]
24
- topic = consumer.topic.name
25
- kafka_messages = event[:kafka_messages]
26
- info "#{kafka_messages.count} messages on #{topic} topic delegated to #{consumer.class}"
27
- end
28
-
29
- # Logs details about each received message value parsing
30
- # @param event [Dry::Events::Event] event details including payload
31
- def on_params_params_parse(event)
32
- # Keep in mind, that a caller here is a param object not a controller,
33
- # so it returns a topic as a string, not a routing topic
34
- debug "Params parsing for #{event[:caller].topic} topic successful in #{event[:time]} ms"
35
- end
36
-
37
- # Logs unsuccessful parsing attempts of incoming data
38
- # @param event [Dry::Events::Event] event details including payload
39
- def on_params_params_parse_error(event)
40
- error "Params parsing error for #{event[:caller].topic} topic: #{event[:error]}"
41
- end
42
-
43
- # Logs errors that occured in a listener fetch loop
44
- # @param event [Dry::Events::Event] event details including payload
45
- # @note It's an error as we can recover from it not a fatal
46
- def on_connection_listener_fetch_loop_error(event)
47
- error "Listener fetch loop error: #{event[:error]}"
48
- end
49
-
50
- # Logs errors that are related to the connection itself
51
- # @note Karafka will attempt to reconnect, so an error not a fatal
52
- # @param event [Dry::Events::Event] event details including payload
53
- def on_connection_client_fetch_loop_error(event)
54
- error "Client fetch loop error: #{event[:error]}"
55
- end
56
-
57
- # Logs info about crashed fetcher
58
- # @note If this happens, Karafka will shutdown as it means a critical error
59
- # in one of the threads
60
- # @param event [Dry::Events::Event] event details including payload
61
- def on_fetcher_call_error(event)
62
- fatal "Fetcher crash due to an error: #{event[:error]}"
63
- end
64
-
65
- # Logs info about processing of a certain dataset with an inline backend
66
- # @param event [Dry::Events::Event] event details including payload
67
- def on_backends_inline_process(event)
68
- count = event[:caller].send(:params_batch).to_a.size
69
- topic = event[:caller].topic.name
70
- time = event[:time]
71
- info "Inline processing of topic #{topic} with #{count} messages took #{time} ms"
72
- end
73
-
74
- # Logs info about system signals that Karafka received
75
- # @param event [Dry::Events::Event] event details including payload
76
- def on_process_notice_signal(event)
77
- info "Received #{event[:signal]} system signal"
78
- end
79
-
80
- # Logs info about responder usage withing a controller flow
81
- # @param event [Dry::Events::Event] event details including payload
82
- def on_consumers_responders_respond_with(event)
83
- calling = event[:caller].class
84
- responder = calling.topic.responder
85
- data = event[:data]
86
- info "Responded from #{calling} using #{responder} with following data #{data}"
87
- end
88
-
89
- # Logs info that we're going to stop the Karafka server
90
- # @param _event [Dry::Events::Event] event details including payload
91
- def on_server_stop(_event)
92
- # We use a separate thread as logging can't be called from trap context
93
- Thread.new { info "Stopping Karafka server #{::Process.pid}" }
94
- end
95
-
96
- # Logs an error that Karafka was unable to stop the server gracefully and it had to do a
97
- # forced exit
98
- # @param _event [Dry::Events::Event] event details including payload
99
- def on_server_stop_error(_event)
100
- # We use a separate thread as logging can't be called from trap context
101
- Thread.new { error "Forceful Karafka server #{::Process.pid} stop" }
102
- end
103
-
104
- USED_LOG_LEVELS.each do |log_level|
105
- define_method log_level do |*args|
106
- Karafka.logger.send(log_level, *args)
107
- end
108
- end
109
- end
110
- end
111
- end
112
- end
@@ -1,28 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Loader for requiring all the files in a proper order
5
- module Loader
6
- # Order in which we want to load app files
7
- DIRS = %w[
8
- lib
9
- app
10
- ].freeze
11
-
12
- # Will load files in a proper order (based on DIRS)
13
- # @param [String] root path from which we want to start
14
- def self.load(root)
15
- DIRS.each do |dir|
16
- path = File.join(root, dir)
17
- next unless File.exist?(path)
18
- load!(path)
19
- end
20
- end
21
-
22
- # Requires all the ruby files from one path in a proper order
23
- # @param path [String] path (dir) from which we want to load ruby files in a proper order
24
- def self.load!(path)
25
- require_all(path)
26
- end
27
- end
28
- end
@@ -1,156 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Params namespace encapsulating all the logic that is directly related to params handling
5
- module Params
6
- # Dsl for Karafka params. We don't provide the params class here as we want to allow users to
7
- # use either hash (default) or Rails hash with indifferent access as a base for their params
8
- #
9
- # We do that because both of them have their own advantages and we don't want to enforce users
10
- # to handle things differently if they already use any of those
11
- #
12
- # It provides lazy loading not only until the first usage, but also allows us to skip
13
- # using parser until we execute our logic. That way we can operate with
14
- # heavy-parsing data without slowing down the whole application.
15
- module Dsl
16
- # Params keys that are "our" and internal. We use this list for additional backends
17
- # that somehow operatae on those keys
18
- SYSTEM_KEYS = %w[
19
- parser
20
- value
21
- partition
22
- offset
23
- key
24
- create_time
25
- receive_time
26
- topic
27
- parsed
28
- ].freeze
29
-
30
- # Params attributes that should be available via a method call invocation for Kafka
31
- # client compatibility.
32
- # Kafka passes internally Kafka::FetchedMessage object and the ruby-kafka consumer
33
- # uses those fields via method calls, so in order to be able to pass there our params
34
- # objects, have to have same api.
35
- METHOD_ATTRIBUTES = %w[
36
- topic
37
- partition
38
- offset
39
- key
40
- create_time
41
- receive_time
42
- ].freeze
43
-
44
- private_constant :METHOD_ATTRIBUTES
45
-
46
- # Class methods required by params to work
47
- module ClassMethods
48
- # We allow building instances only via the #build method
49
-
50
- # @param message [Kafka::FetchedMessage, Hash] message that we get out of Kafka
51
- # in case of building params inside main Karafka process in
52
- # Karafka::Connection::Consumer, or a hash when we retrieve data that is already parsed
53
- # @param parser [Class] parser class that we will use to unparse data
54
- # @return [Karafka::Params::Params] Karafka params object not yet used parser for
55
- # retrieving data that we've got from Kafka
56
- # @example Build params instance from a hash
57
- # Karafka::Params::Params.build({ key: 'value' }) #=> params object
58
- # @example Build params instance from a Kafka::FetchedMessage object
59
- # Karafka::Params::Params.build(message) #=> params object
60
- def build(message, parser)
61
- instance = new
62
- instance['parser'] = parser
63
-
64
- # Non kafka fetched message can happen when we interchange data with an
65
- # additional backend
66
- if message.is_a?(Kafka::FetchedMessage)
67
- instance.send(
68
- :merge!,
69
- 'value' => message.value,
70
- 'partition' => message.partition,
71
- 'offset' => message.offset,
72
- 'key' => message.key,
73
- 'create_time' => message.create_time,
74
- 'receive_time' => Time.now,
75
- # When we get raw messages, they might have a topic, that was modified by a
76
- # topic mapper. We need to "reverse" this change and map back to the non-modified
77
- # format, so our internal flow is not corrupted with the mapping
78
- 'topic' => Karafka::App.config.topic_mapper.incoming(message.topic)
79
- )
80
- else
81
- instance.send(:merge!, message)
82
- end
83
-
84
- instance
85
- end
86
- end
87
-
88
- # @return [Karafka::Params::Params] this will trigger parser execution. If we decide to
89
- # retrieve data, parser will be executed to parse data. Output of parsing will be merged
90
- # to the current object. This object will be also marked as already parsed, so we won't
91
- # parse it again.
92
- def retrieve!
93
- return self if self['parsed']
94
- self['parsed'] = true
95
-
96
- merge!(parse(delete('value')))
97
- end
98
-
99
- # Includes and extends the base params klass with everything that is needed by Karafka to
100
- # fully work in any conditions.
101
- # @param params_klass [Karafka::Params::Params] initialized params class that we will
102
- # use for a given Karafka process
103
- def self.included(params_klass)
104
- params_klass.extend(Dsl::ClassMethods)
105
-
106
- METHOD_ATTRIBUTES.each do |attr|
107
- # Defines a method call accessor to a particular hash field.
108
- # @note Won't work for complex key names that contain spaces, etc
109
- # @param key [Symbol] name of a field that we want to retrieve with a method call
110
- # @example
111
- # key_attr_reader :example
112
- # params.example #=> 'my example value'
113
- params_klass.send :define_method, attr do
114
- self[attr]
115
- end
116
- end
117
-
118
- params_klass.send :private, :merge!
119
- params_klass.send :private, :parse
120
- end
121
-
122
- private
123
-
124
- # Overwritten merge! method - it behaves differently for keys that are the same in our hash
125
- # and in a other_hash - it will not replace keys that are the same in our hash
126
- # and in the other one. This protects some important Karafka params keys that cannot be
127
- # replaced with custom values from incoming Kafka message
128
- # @param other_hash [Hash] hash that we want to merge into current
129
- # @return [Karafka::Params::Params] our parameters hash with merged values
130
- # @example Merge with hash without same keys
131
- # new(a: 1, b: 2).merge!(c: 3) #=> { a: 1, b: 2, c: 3 }
132
- # @example Merge with hash with same keys (symbol based)
133
- # new(a: 1).merge!(a: 2) #=> { a: 1 }
134
- # @example Merge with hash with same keys (string based)
135
- # new(a: 1).merge!('a' => 2) #=> { a: 1 }
136
- # @example Merge with hash with same keys (current string based)
137
- # new('a' => 1).merge!(a: 2) #=> { a: 1 }
138
- def merge!(other_hash)
139
- super(other_hash) { |_key, base_value, _new_value| base_value }
140
- end
141
-
142
- # @param value [String] Raw data that we want to parse using consumer parser
143
- # @note If something goes wrong, it will return raw data in a hash with a message key
144
- # @return [Hash] parsed data or a hash with message key containing raw data if something
145
- # went wrong during parsing
146
- def parse(value)
147
- Karafka.monitor.instrument('params.params.parse', caller: self) do
148
- self['parser'].parse(value)
149
- end
150
- rescue ::Karafka::Errors::ParserError => e
151
- Karafka.monitor.instrument('params.params.parse.error', caller: self, error: e)
152
- raise e
153
- end
154
- end
155
- end
156
- end
@@ -1,38 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Module for all supported by default parsers for incoming/outgoing data
5
- module Parsers
6
- # Default Karafka Json parser for serializing and deserializing data
7
- class Json
8
- # @param content [String] content based on which we want to get our hash
9
- # @return [Hash] hash with parsed JSON data
10
- # @example
11
- # Json.parse("{\"a\":1}") #=> { 'a' => 1 }
12
- def self.parse(content)
13
- ::MultiJson.load(content)
14
- rescue ::MultiJson::ParseError => e
15
- raise ::Karafka::Errors::ParserError, e
16
- end
17
-
18
- # @param content [Object] any object that we want to convert to a json string
19
- # @return [String] Valid JSON string containing serialized data
20
- # @raise [Karafka::Errors::ParserError] raised when we don't have a way to parse
21
- # given content to a json string format
22
- # @note When string is passed to this method, we assume that it is already a json
23
- # string and we don't serialize it again. This allows us to serialize data before
24
- # it is being forwarded to a parser if we want to have a custom (not that simple)
25
- # json serialization
26
- #
27
- # @example From an ActiveRecord object
28
- # Json.generate(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
29
- # @example From a string (no changes)
30
- # Json.generate("{\"a\":1}") #=> "{\"a\":1}"
31
- def self.generate(content)
32
- return content if content.is_a?(String)
33
- return content.to_json if content.respond_to?(:to_json)
34
- raise Karafka::Errors::ParserError, content
35
- end
36
- end
37
- end
38
- end
@@ -1,35 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Namespace for patches of external gems/libraries
5
- module Patches
6
- # Patch that will allow to use proc based lazy evaluated settings with Dry Configurable
7
- # @see https://github.com/dry-rb/dry-configurable/blob/master/lib/dry/configurable.rb
8
- module DryConfigurable
9
- # We overwrite ::Dry::Configurable::Config to change on proc behaviour
10
- # Unfortunately it does not provide an on call proc evaluation, so
11
- # this feature had to be added here on demand/
12
- # @param args Any arguments that DryConfigurable::Config accepts
13
- def initialize(*args)
14
- super
15
-
16
- @config.each_key(&method(:rebuild))
17
- end
18
-
19
- private
20
-
21
- # Method that rebuilds a given accessor, so when it consists a proc value, it will
22
- # evaluate it upon return for blocks that don't require any arguments, otherwise
23
- # it will return the block
24
- # @param method_name [Symbol] name of an accessor that we want to rebuild
25
- def rebuild(method_name)
26
- define_singleton_method method_name do
27
- value = super()
28
- return value unless value.is_a?(Proc)
29
- return value unless value.parameters.empty?
30
- value.call
31
- end
32
- end
33
- end
34
- end
35
- end
@@ -1,29 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Persistence
5
- # Local cache for routing topics
6
- # We use it in order not to build string instances and remap incoming topic upon each
7
- # message / message batches received
8
- class Topic
9
- # Thread.current scope under which we store topics data
10
- PERSISTENCE_SCOPE = :topics
11
-
12
- # @param group_id [String] group id for which we fetch a topic representation
13
- # @param raw_topic_name [String] raw topic name (before remapping) for which we fetch a
14
- # topic representation
15
- # @return [Karafka::Routing::Topic] remapped topic representation that can be used further
16
- # on when working with given parameters
17
- def self.fetch(group_id, raw_topic_name)
18
- Thread.current[PERSISTENCE_SCOPE] ||= Hash.new { |hash, key| hash[key] = {} }
19
-
20
- Thread.current[PERSISTENCE_SCOPE][group_id][raw_topic_name] ||= begin
21
- # We map from incoming topic name, as it might be namespaced, etc.
22
- # @see topic_mapper internal docs
23
- mapped_topic_name = Karafka::App.config.topic_mapper.incoming(raw_topic_name)
24
- Routing::Router.find("#{group_id}_#{mapped_topic_name}")
25
- end
26
- end
27
- end
28
- end
29
- end