karafka 1.2.13 → 1.3.0.rc1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (107) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/{.coditsu.yml → .coditsu/ci.yml} +1 -1
  5. data/.console_irbrc +1 -3
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/.travis.yml +4 -15
  12. data/CHANGELOG.md +56 -4
  13. data/CONTRIBUTING.md +1 -1
  14. data/Gemfile +4 -4
  15. data/Gemfile.lock +55 -43
  16. data/README.md +10 -11
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +25 -0
  19. data/config/errors.yml +38 -5
  20. data/karafka.gemspec +12 -10
  21. data/lib/karafka.rb +7 -15
  22. data/lib/karafka/app.rb +14 -6
  23. data/lib/karafka/attributes_map.rb +3 -4
  24. data/lib/karafka/base_consumer.rb +19 -30
  25. data/lib/karafka/base_responder.rb +45 -27
  26. data/lib/karafka/cli.rb +1 -1
  27. data/lib/karafka/cli/console.rb +11 -9
  28. data/lib/karafka/cli/flow.rb +0 -1
  29. data/lib/karafka/cli/info.rb +3 -1
  30. data/lib/karafka/cli/install.rb +28 -6
  31. data/lib/karafka/cli/server.rb +11 -6
  32. data/lib/karafka/code_reloader.rb +67 -0
  33. data/lib/karafka/connection/api_adapter.rb +11 -4
  34. data/lib/karafka/connection/batch_delegator.rb +51 -0
  35. data/lib/karafka/connection/builder.rb +1 -1
  36. data/lib/karafka/connection/client.rb +30 -20
  37. data/lib/karafka/connection/listener.rb +22 -11
  38. data/lib/karafka/connection/message_delegator.rb +36 -0
  39. data/lib/karafka/consumers/callbacks.rb +32 -15
  40. data/lib/karafka/consumers/includer.rb +30 -18
  41. data/lib/karafka/consumers/metadata.rb +10 -0
  42. data/lib/karafka/consumers/responders.rb +2 -2
  43. data/lib/karafka/contracts.rb +10 -0
  44. data/lib/karafka/contracts/config.rb +21 -0
  45. data/lib/karafka/contracts/consumer_group.rb +206 -0
  46. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  47. data/lib/karafka/contracts/responder_usage.rb +54 -0
  48. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  49. data/lib/karafka/errors.rb +17 -16
  50. data/lib/karafka/fetcher.rb +28 -30
  51. data/lib/karafka/helpers/class_matcher.rb +5 -1
  52. data/lib/karafka/helpers/config_retriever.rb +1 -1
  53. data/lib/karafka/helpers/inflector.rb +26 -0
  54. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  55. data/lib/karafka/instrumentation/logger.rb +5 -3
  56. data/lib/karafka/instrumentation/monitor.rb +15 -9
  57. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  58. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  59. data/lib/karafka/params/builders/metadata.rb +33 -0
  60. data/lib/karafka/params/builders/params.rb +36 -0
  61. data/lib/karafka/params/builders/params_batch.rb +25 -0
  62. data/lib/karafka/params/metadata.rb +35 -0
  63. data/lib/karafka/params/params.rb +68 -0
  64. data/lib/karafka/params/params_batch.rb +35 -20
  65. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  66. data/lib/karafka/persistence/client.rb +15 -11
  67. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +19 -12
  68. data/lib/karafka/persistence/topics.rb +48 -0
  69. data/lib/karafka/process.rb +0 -2
  70. data/lib/karafka/responders/topic.rb +6 -8
  71. data/lib/karafka/routing/builder.rb +35 -7
  72. data/lib/karafka/routing/consumer_group.rb +1 -1
  73. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  74. data/lib/karafka/routing/proxy.rb +10 -1
  75. data/lib/karafka/routing/topic.rb +5 -3
  76. data/lib/karafka/routing/topic_mapper.rb +16 -18
  77. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  78. data/lib/karafka/serialization/json/serializer.rb +31 -0
  79. data/lib/karafka/server.rb +25 -27
  80. data/lib/karafka/setup/config.rb +63 -37
  81. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  82. data/lib/karafka/setup/dsl.rb +0 -1
  83. data/lib/karafka/status.rb +7 -3
  84. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  85. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  86. data/lib/karafka/templates/karafka.rb.erb +92 -0
  87. data/lib/karafka/version.rb +1 -1
  88. metadata +94 -61
  89. metadata.gz.sig +4 -0
  90. data/lib/karafka/callbacks.rb +0 -30
  91. data/lib/karafka/callbacks/config.rb +0 -22
  92. data/lib/karafka/callbacks/dsl.rb +0 -16
  93. data/lib/karafka/connection/delegator.rb +0 -46
  94. data/lib/karafka/instrumentation/listener.rb +0 -112
  95. data/lib/karafka/loader.rb +0 -28
  96. data/lib/karafka/params/dsl.rb +0 -158
  97. data/lib/karafka/parsers/json.rb +0 -38
  98. data/lib/karafka/patches/dry_configurable.rb +0 -33
  99. data/lib/karafka/persistence/topic.rb +0 -29
  100. data/lib/karafka/schemas/config.rb +0 -24
  101. data/lib/karafka/schemas/consumer_group.rb +0 -79
  102. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  103. data/lib/karafka/schemas/responder_usage.rb +0 -39
  104. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  105. data/lib/karafka/setup/configurators/base.rb +0 -29
  106. data/lib/karafka/setup/configurators/params.rb +0 -25
  107. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ module Builders
6
+ # Builder for creating metadata object based on the message or batch informations
7
+ # @note We have 2 ways of creating metadata based on the way ruby-kafka operates
8
+ module Metadata
9
+ class << self
10
+ # Creates metadata based on the kafka batch data
11
+ # @param kafka_batch [Kafka::FetchedBatch] kafka batch details
12
+ # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
13
+ # @return [Karafka::Params::Metadata] metadata object
14
+ def from_kafka_batch(kafka_batch, topic)
15
+ Karafka::Params::Metadata
16
+ .new
17
+ .merge!(
18
+ 'batch_size' => kafka_batch.messages.count,
19
+ 'first_offset' => kafka_batch.first_offset,
20
+ 'highwater_mark_offset' => kafka_batch.highwater_mark_offset,
21
+ 'last_offset' => kafka_batch.last_offset,
22
+ 'offset_lag' => kafka_batch.offset_lag,
23
+ 'deserializer' => topic.deserializer,
24
+ 'partition' => kafka_batch.partition,
25
+ 'topic' => kafka_batch.topic,
26
+ 'unknown_last_offset' => kafka_batch.unknown_last_offset?
27
+ )
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ # Due to the fact, that we create params related objects in couple contexts / places
6
+ # plus backends can build up them their own way we have this namespace.
7
+ # It allows to isolate actual params objects from their building process that can be
8
+ # context dependent.
9
+ module Builders
10
+ # Builder for params
11
+ module Params
12
+ class << self
13
+ # @param kafka_message [Kafka::FetchedMessage] message fetched from Kafka
14
+ # @param topic [Karafka::Routing::Topic] topic for which this message was fetched
15
+ # @return [Karafka::Params::Params] params object
16
+ def from_kafka_message(kafka_message, topic)
17
+ Karafka::Params::Params
18
+ .new
19
+ .merge!(
20
+ 'create_time' => kafka_message.create_time,
21
+ 'headers' => kafka_message.headers || {},
22
+ 'is_control_record' => kafka_message.is_control_record,
23
+ 'key' => kafka_message.key,
24
+ 'offset' => kafka_message.offset,
25
+ 'deserializer' => topic.deserializer,
26
+ 'partition' => kafka_message.partition,
27
+ 'receive_time' => Time.now,
28
+ 'topic' => kafka_message.topic,
29
+ 'payload' => kafka_message.value
30
+ )
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ module Builders
6
+ # Builder for creating params batch instances
7
+ module ParamsBatch
8
+ class << self
9
+ # Creates params batch with params inside based on the incoming messages
10
+ # and the topic from which it comes
11
+ # @param kafka_messages [Array<Kafka::FetchedMessage>] raw fetched messages
12
+ # @param topic [Karafka::Routing::Topic] topic for which we're received messages
13
+ # @return [Karafka::Params::ParamsBatch<Karafka::Params::Params>] batch with params
14
+ def from_kafka_messages(kafka_messages, topic)
15
+ params_array = kafka_messages.map! do |message|
16
+ Karafka::Params::Builders::Params.from_kafka_message(message, topic)
17
+ end
18
+
19
+ Karafka::Params::ParamsBatch.new(params_array)
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ # Simple metadata object that stores all non-message information received from Kafka cluster
6
+ # while fetching the data
7
+ class Metadata < Hash
8
+ # Attributes that should be accessible as methods as well (not only hash)
9
+ METHOD_ATTRIBUTES = %w[
10
+ batch_size
11
+ first_offset
12
+ highwater_mark_offset
13
+ last_offset
14
+ offset_lag
15
+ deserializer
16
+ partition
17
+ topic
18
+ ].freeze
19
+
20
+ private_constant :METHOD_ATTRIBUTES
21
+
22
+ METHOD_ATTRIBUTES.each do |attr|
23
+ # Defines a method call accessor to a particular hash field.
24
+ define_method(attr) do
25
+ self[attr]
26
+ end
27
+ end
28
+
29
+ # @return [Boolean] is the last offset known or unknown
30
+ def unknown_last_offset?
31
+ self['unknown_last_offset']
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Params namespace encapsulating all the logic that is directly related to params handling
5
+ module Params
6
+ # It provides lazy loading not only until the first usage, but also allows us to skip
7
+ # using deserializer until we execute our logic. That way we can operate with
8
+ # heavy-deserialization data without slowing down the whole application.
9
+ class Params < Hash
10
+ # Params attributes that should be available via a method call invocation for Kafka
11
+ # client compatibility.
12
+ # Kafka passes internally Kafka::FetchedMessage object and the ruby-kafka consumer
13
+ # uses those fields via method calls, so in order to be able to pass there our params
14
+ # objects, have to have same api.
15
+ METHOD_ATTRIBUTES = %w[
16
+ create_time
17
+ headers
18
+ is_control_record
19
+ key
20
+ offset
21
+ deserializer
22
+ deserialized
23
+ partition
24
+ receive_time
25
+ topic
26
+ payload
27
+ ].freeze
28
+
29
+ private_constant :METHOD_ATTRIBUTES
30
+
31
+ METHOD_ATTRIBUTES.each do |attr|
32
+ # Defines a method call accessor to a particular hash field.
33
+ # @note Won't work for complex key names that contain spaces, etc
34
+ # @param key [Symbol] name of a field that we want to retrieve with a method call
35
+ # @example
36
+ # key_attr_reader :example
37
+ # params.example #=> 'my example payload'
38
+ define_method(attr) do
39
+ self[attr]
40
+ end
41
+ end
42
+
43
+ # @return [Karafka::Params::Params] This method will trigger deserializer execution. If we
44
+ # decide to retrieve data, deserializer will be executed to get data. Output of that will
45
+ # be merged to the current object. This object will be also marked as already deserialized,
46
+ # so we won't deserialize it again.
47
+ def deserialize!
48
+ return self if self['deserialized']
49
+
50
+ self['deserialized'] = true
51
+ self['payload'] = deserialize
52
+ self
53
+ end
54
+
55
+ private
56
+
57
+ # @return [Object] deserialized data
58
+ def deserialize
59
+ Karafka.monitor.instrument('params.params.deserialize', caller: self) do
60
+ self['deserializer'].call(self)
61
+ end
62
+ rescue ::StandardError => e
63
+ Karafka.monitor.instrument('params.params.deserialize.error', caller: self, error: e)
64
+ raise e
65
+ end
66
+ end
67
+ end
68
+ end
@@ -3,43 +3,58 @@
3
3
  module Karafka
4
4
  module Params
5
5
  # Params batch represents a set of messages received from Kafka.
6
- # @note Params internally are lazy loaded before first use. That way we can skip parsing
7
- # process if we have after_fetch that rejects some incoming messages without using params
8
- # It can be also used when handling really heavy data (in terms of parsing).
6
+ # @note Params internally are lazy loaded before first use. That way we can skip
7
+ # deserialization process if we have after_fetch that rejects some incoming messages
8
+ # without using params It can be also used when handling really heavy data.
9
9
  class ParamsBatch
10
10
  include Enumerable
11
11
 
12
- # Builds up a params batch based on raw kafka messages
13
- # @param messages_batch [Array<Kafka::FetchedMessage>] messages batch
14
- # @param topic_parser [Class] topic parser for unparsing messages values
15
- def initialize(messages_batch, topic_parser)
16
- @params_batch = messages_batch.map! do |message|
17
- Karafka::Params::Params.build(message, topic_parser)
18
- end
12
+ # @param params_array [Array<Karafka::Params::Params>] array with karafka params
13
+ # @return [Karafka::Params::ParamsBatch] lazy evaluated params batch object
14
+ def initialize(params_array)
15
+ @params_array = params_array
19
16
  end
20
17
 
21
- # @yieldparam [Karafka::Params::Params] each parsed and loaded params instance
22
- # @note Invocation of this method will cause loading and parsing each param after another.
23
- # If you want to get access without parsing, please access params_batch directly
18
+ # @yieldparam [Karafka::Params::Params] each deserialized and loaded params instance
19
+ # @note Invocation of this method will cause loading and deserializing each param after
20
+ # another. If you want to get access without deserializing, please access params_array
21
+ # directly
24
22
  def each
25
- @params_batch.each { |param| yield(param.retrieve!) }
23
+ @params_array.each { |param| yield(param.deserialize!) }
26
24
  end
27
25
 
28
26
  # @return [Array<Karafka::Params::Params>] returns all the params in a loaded state, so they
29
27
  # can be used for batch insert, etc. Without invoking all, up until first use, they won't
30
- # be parsed
31
- def parsed
28
+ # be deserialized
29
+ def deserialize!
32
30
  each(&:itself)
33
31
  end
34
32
 
35
- # @return [Karafka::Params::Params] last element after the unparsing process
33
+ # @return [Array<Object>] array with deserialized payloads. This method can be useful when
34
+ # we don't care about metadata and just want to extract all the data payloads from the
35
+ # batch
36
+ def payloads
37
+ deserialize!.map(&:payload)
38
+ end
39
+
40
+ # @return [Karafka::Params::Params] first element after the deserialization process
41
+ def first
42
+ @params_array.first.deserialize!
43
+ end
44
+
45
+ # @return [Karafka::Params::Params] last element after the deserialization process
36
46
  def last
37
- @params_batch.last.retrieve!
47
+ @params_array.last.deserialize!
38
48
  end
39
49
 
40
- # @return [Array<Karafka::Params::Params>] pure array with params (not parsed)
50
+ # @return [Array<Karafka::Params::Params>] pure array with params (not deserialized)
41
51
  def to_a
42
- @params_batch
52
+ @params_array
53
+ end
54
+
55
+ # @return [Integer] number of messages in the batch
56
+ def size
57
+ @params_array.size
43
58
  end
44
59
  end
45
60
  end
@@ -1,34 +1,47 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
+ # Namespace for various other libs patches
4
5
  module Patches
5
6
  # Patches for Ruby Kafka gem
6
7
  module RubyKafka
7
8
  # This patch allows us to inject business logic in between fetches and before the consumer
8
9
  # stop, so we can perform stop commit or anything else that we need since
9
10
  # ruby-kafka fetch loop does not allow that directly
10
- # We don't wan't to use poll ruby-kafka api as it brings many more problems that we would
11
+ # We don't won't to use poll ruby-kafka api as it brings many more problems that we would
11
12
  # have to take care of. That way, nothing like that ever happens but we get the control
12
13
  # over the stopping process that we need (since we're the once that initiate it for each
13
14
  # thread)
14
15
  def consumer_loop
15
16
  super do
16
- consumers = Karafka::Persistence::Consumer
17
- .all
17
+ consumers = Karafka::Persistence::Consumers
18
+ .current
18
19
  .values
19
20
  .flat_map(&:values)
20
- .select { |ctrl| ctrl.respond_to?(:run_callbacks) }
21
+ .select { |consumer| consumer.class.respond_to?(:after_fetch) }
21
22
 
22
- if Karafka::App.stopped?
23
- consumers.each { |ctrl| ctrl.run_callbacks :before_stop }
23
+ if Karafka::App.stopping?
24
+ publish_event(consumers, 'before_stop')
24
25
  Karafka::Persistence::Client.read.stop
25
26
  else
26
- consumers.each { |ctrl| ctrl.run_callbacks :before_poll }
27
+ publish_event(consumers, 'before_poll')
27
28
  yield
28
- consumers.each { |ctrl| ctrl.run_callbacks :after_poll }
29
+ publish_event(consumers, 'after_poll')
29
30
  end
30
31
  end
31
32
  end
33
+
34
+ private
35
+
36
+ # Notifies consumers about particular events happening
37
+ # @param consumers [Array<Object>] all consumers that want to be notified about an event
38
+ # @param event_name [String] name of the event that happened
39
+ def publish_event(consumers, event_name)
40
+ consumers.each do |consumer|
41
+ key = "consumers.#{Helpers::Inflector.map(consumer.class.to_s)}.#{event_name}"
42
+ Karafka::App.monitor.instrument(key, context: consumer)
43
+ end
44
+ end
32
45
  end
33
46
  end
34
47
  end
@@ -7,18 +7,22 @@ module Karafka
7
7
  # Thread.current key under which we store current thread messages consumer client
8
8
  PERSISTENCE_SCOPE = :client
9
9
 
10
- # @param client [Karafka::Connection::Client] messages consumer client of
11
- # a current thread
12
- # @return [Karafka::Connection::Client] persisted messages consumer client
13
- def self.write(client)
14
- Thread.current[PERSISTENCE_SCOPE] = client
15
- end
10
+ private_constant :PERSISTENCE_SCOPE
11
+
12
+ class << self
13
+ # @param client [Karafka::Connection::Client] messages consumer client of
14
+ # a current thread
15
+ # @return [Karafka::Connection::Client] persisted messages consumer client
16
+ def write(client)
17
+ Thread.current[PERSISTENCE_SCOPE] = client
18
+ end
16
19
 
17
- # @return [Karafka::Connection::Client] persisted messages consumer client
18
- # @raise [Karafka::Errors::MissingConsumer] raised when no thread messages consumer
19
- # client but we try to use it anyway
20
- def self.read
21
- Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingClient)
20
+ # @return [Karafka::Connection::Client] persisted messages consumer client
21
+ # @raise [Karafka::Errors::MissingClientError] raised when no thread messages consumer
22
+ # client but we try to use it anyway
23
+ def read
24
+ Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingClientError)
25
+ end
22
26
  end
23
27
  end
24
28
  end
@@ -7,16 +7,18 @@ module Karafka
7
7
  # Module used to provide a persistent cache across batch requests for a given
8
8
  # topic and partition to store some additional details when the persistent mode
9
9
  # for a given topic is turned on
10
- class Consumer
10
+ class Consumers
11
11
  # Thread.current scope under which we store consumers data
12
12
  PERSISTENCE_SCOPE = :consumers
13
13
 
14
+ private_constant :PERSISTENCE_SCOPE
15
+
14
16
  class << self
15
- # @return [Hash] current thread persistence scope hash with all the consumers
16
- def all
17
- # @note This does not need to be threadsafe (Hash) as it is always executed in a
18
- # current thread context
19
- Thread.current[PERSISTENCE_SCOPE] ||= Hash.new { |hash, key| hash[key] = {} }
17
+ # @return [Hash] current thread's persistence scope hash with all the consumers
18
+ def current
19
+ Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
20
+ hash[key] = Concurrent::Hash.new
21
+ end
20
22
  end
21
23
 
22
24
  # Used to build (if block given) and/or fetch a current consumer instance that will be
@@ -25,12 +27,17 @@ module Karafka
25
27
  # @param topic [Karafka::Routing::Topic] topic instance for which we might cache
26
28
  # @param partition [Integer] number of partition for which we want to cache
27
29
  def fetch(topic, partition)
28
- # We always store a current instance for callback reasons
29
- if topic.persistent
30
- all[topic][partition] ||= topic.consumer.new
31
- else
32
- all[topic][partition] = topic.consumer.new
33
- end
30
+ current[topic][partition] ||= topic.consumer.new(topic)
31
+ end
32
+
33
+ # Removes all persisted instances of consumers from the consumer cache
34
+ # @note This is used to reload consumers instances when code reloading in development mode
35
+ # is present. This should not be used in production.
36
+ def clear
37
+ Thread
38
+ .list
39
+ .select { |thread| thread[PERSISTENCE_SCOPE] }
40
+ .each { |thread| thread[PERSISTENCE_SCOPE].clear }
34
41
  end
35
42
  end
36
43
  end
@@ -0,0 +1,48 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Persistence
5
+ # Local cache for routing topics
6
+ # We use it in order not to build string instances and remap incoming topic upon each
7
+ # message / message batches received
8
+ class Topics
9
+ # Thread.current scope under which we store topics data
10
+ PERSISTENCE_SCOPE = :topics
11
+
12
+ private_constant :PERSISTENCE_SCOPE
13
+
14
+ class << self
15
+ # @return [Concurrent::Hash] hash with all the topics from given groups
16
+ def current
17
+ Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
18
+ hash[key] = Concurrent::Hash.new
19
+ end
20
+ end
21
+
22
+ # @param group_id [String] group id for which we fetch a topic representation
23
+ # @param raw_topic_name [String] raw topic name (before remapping) for which we fetch a
24
+ # topic representation
25
+ # @return [Karafka::Routing::Topics] remapped topic representation that can be used further
26
+ # on when working with given parameters
27
+ def fetch(group_id, raw_topic_name)
28
+ current[group_id][raw_topic_name] ||= begin
29
+ # We map from incoming topic name, as it might be namespaced, etc.
30
+ # @see topic_mapper internal docs
31
+ mapped_topic_name = Karafka::App.config.topic_mapper.incoming(raw_topic_name)
32
+ Routing::Router.find("#{group_id}_#{mapped_topic_name}")
33
+ end
34
+ end
35
+
36
+ # Clears the whole topics cache for all the threads
37
+ # This is used for in-development code reloading as we need to get rid of all the
38
+ # preloaded and cached instances of objects to make it work
39
+ def clear
40
+ Thread
41
+ .list
42
+ .select { |thread| thread[PERSISTENCE_SCOPE] }
43
+ .each { |thread| thread[PERSISTENCE_SCOPE].clear }
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end