karafka 1.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- checksums.yaml.gz.sig +2 -0
- data.tar.gz.sig +0 -0
- data/.coditsu/ci.yml +3 -0
- data/.console_irbrc +11 -0
- data/.github/FUNDING.yml +3 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.gitignore +69 -0
- data/.rspec +1 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.travis.yml +36 -0
- data/CHANGELOG.md +520 -0
- data/CODE_OF_CONDUCT.md +46 -0
- data/CONTRIBUTING.md +41 -0
- data/Gemfile +12 -0
- data/Gemfile.lock +137 -0
- data/MIT-LICENCE +18 -0
- data/README.md +101 -0
- data/bin/karafka +19 -0
- data/certs/mensfeld.pem +25 -0
- data/config/errors.yml +39 -0
- data/karafka.gemspec +44 -0
- data/lib/karafka.rb +71 -0
- data/lib/karafka/app.rb +53 -0
- data/lib/karafka/attributes_map.rb +68 -0
- data/lib/karafka/backends/inline.rb +16 -0
- data/lib/karafka/base_consumer.rb +57 -0
- data/lib/karafka/base_responder.rb +226 -0
- data/lib/karafka/cli.rb +54 -0
- data/lib/karafka/cli/base.rb +78 -0
- data/lib/karafka/cli/console.rb +31 -0
- data/lib/karafka/cli/flow.rb +45 -0
- data/lib/karafka/cli/info.rb +31 -0
- data/lib/karafka/cli/install.rb +64 -0
- data/lib/karafka/cli/server.rb +71 -0
- data/lib/karafka/code_reloader.rb +67 -0
- data/lib/karafka/connection/api_adapter.rb +155 -0
- data/lib/karafka/connection/batch_delegator.rb +51 -0
- data/lib/karafka/connection/builder.rb +16 -0
- data/lib/karafka/connection/client.rb +117 -0
- data/lib/karafka/connection/listener.rb +71 -0
- data/lib/karafka/connection/message_delegator.rb +36 -0
- data/lib/karafka/consumers/callbacks.rb +71 -0
- data/lib/karafka/consumers/includer.rb +63 -0
- data/lib/karafka/consumers/metadata.rb +10 -0
- data/lib/karafka/consumers/responders.rb +24 -0
- data/lib/karafka/consumers/single_params.rb +15 -0
- data/lib/karafka/contracts.rb +10 -0
- data/lib/karafka/contracts/config.rb +21 -0
- data/lib/karafka/contracts/consumer_group.rb +206 -0
- data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
- data/lib/karafka/contracts/responder_usage.rb +54 -0
- data/lib/karafka/contracts/server_cli_options.rb +29 -0
- data/lib/karafka/errors.rb +51 -0
- data/lib/karafka/fetcher.rb +42 -0
- data/lib/karafka/helpers/class_matcher.rb +88 -0
- data/lib/karafka/helpers/config_retriever.rb +46 -0
- data/lib/karafka/helpers/inflector.rb +26 -0
- data/lib/karafka/helpers/multi_delegator.rb +32 -0
- data/lib/karafka/instrumentation/logger.rb +57 -0
- data/lib/karafka/instrumentation/monitor.rb +70 -0
- data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
- data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
- data/lib/karafka/params/builders/metadata.rb +33 -0
- data/lib/karafka/params/builders/params.rb +36 -0
- data/lib/karafka/params/builders/params_batch.rb +25 -0
- data/lib/karafka/params/metadata.rb +35 -0
- data/lib/karafka/params/params.rb +68 -0
- data/lib/karafka/params/params_batch.rb +61 -0
- data/lib/karafka/patches/ruby_kafka.rb +47 -0
- data/lib/karafka/persistence/client.rb +29 -0
- data/lib/karafka/persistence/consumers.rb +45 -0
- data/lib/karafka/persistence/topics.rb +48 -0
- data/lib/karafka/process.rb +60 -0
- data/lib/karafka/responders/builder.rb +36 -0
- data/lib/karafka/responders/topic.rb +55 -0
- data/lib/karafka/routing/builder.rb +89 -0
- data/lib/karafka/routing/consumer_group.rb +61 -0
- data/lib/karafka/routing/consumer_mapper.rb +34 -0
- data/lib/karafka/routing/proxy.rb +46 -0
- data/lib/karafka/routing/router.rb +29 -0
- data/lib/karafka/routing/topic.rb +62 -0
- data/lib/karafka/routing/topic_mapper.rb +53 -0
- data/lib/karafka/serialization/json/deserializer.rb +27 -0
- data/lib/karafka/serialization/json/serializer.rb +31 -0
- data/lib/karafka/server.rb +83 -0
- data/lib/karafka/setup/config.rb +221 -0
- data/lib/karafka/setup/configurators/water_drop.rb +36 -0
- data/lib/karafka/setup/dsl.rb +21 -0
- data/lib/karafka/status.rb +29 -0
- data/lib/karafka/templates/application_consumer.rb.erb +7 -0
- data/lib/karafka/templates/application_responder.rb.erb +11 -0
- data/lib/karafka/templates/karafka.rb.erb +92 -0
- data/lib/karafka/version.rb +7 -0
- data/log/.gitkeep +0 -0
- metadata +336 -0
- metadata.gz.sig +0 -0
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Persistence
|
5
|
+
# Persistence layer to store current thread messages consumer client for further use
|
6
|
+
class Client
|
7
|
+
# Thread.current key under which we store current thread messages consumer client
|
8
|
+
PERSISTENCE_SCOPE = :client
|
9
|
+
|
10
|
+
private_constant :PERSISTENCE_SCOPE
|
11
|
+
|
12
|
+
class << self
|
13
|
+
# @param client [Karafka::Connection::Client] messages consumer client of
|
14
|
+
# a current thread
|
15
|
+
# @return [Karafka::Connection::Client] persisted messages consumer client
|
16
|
+
def write(client)
|
17
|
+
Thread.current[PERSISTENCE_SCOPE] = client
|
18
|
+
end
|
19
|
+
|
20
|
+
# @return [Karafka::Connection::Client] persisted messages consumer client
|
21
|
+
# @raise [Karafka::Errors::MissingClientError] raised when no thread messages consumer
|
22
|
+
# client but we try to use it anyway
|
23
|
+
def read
|
24
|
+
Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingClientError)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Module used to provide a persistent cache layer for Karafka components that need to be
|
5
|
+
# shared inside of a same thread
|
6
|
+
module Persistence
|
7
|
+
# Module used to provide a persistent cache across batch requests for a given
|
8
|
+
# topic and partition to store some additional details when the persistent mode
|
9
|
+
# for a given topic is turned on
|
10
|
+
class Consumers
|
11
|
+
# Thread.current scope under which we store consumers data
|
12
|
+
PERSISTENCE_SCOPE = :consumers
|
13
|
+
|
14
|
+
private_constant :PERSISTENCE_SCOPE
|
15
|
+
|
16
|
+
class << self
|
17
|
+
# @return [Hash] current thread's persistence scope hash with all the consumers
|
18
|
+
def current
|
19
|
+
Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
|
20
|
+
hash[key] = Concurrent::Hash.new
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
# Used to build (if block given) and/or fetch a current consumer instance that will be
|
25
|
+
# used to process messages from a given topic and partition
|
26
|
+
# @param topic [Karafka::Routing::Topic] topic instance for which we might cache
|
27
|
+
# @param partition [Integer] number of partition for which we want to cache
|
28
|
+
# @return [Karafka::BaseConsumer] base consumer descendant
|
29
|
+
def fetch(topic, partition)
|
30
|
+
current[topic][partition] ||= topic.consumer.new(topic)
|
31
|
+
end
|
32
|
+
|
33
|
+
# Removes all persisted instances of consumers from the consumer cache
|
34
|
+
# @note This is used to reload consumers instances when code reloading in development mode
|
35
|
+
# is present. This should not be used in production.
|
36
|
+
def clear
|
37
|
+
Thread
|
38
|
+
.list
|
39
|
+
.select { |thread| thread[PERSISTENCE_SCOPE] }
|
40
|
+
.each { |thread| thread[PERSISTENCE_SCOPE].clear }
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Persistence
|
5
|
+
# Local cache for routing topics
|
6
|
+
# We use it in order not to build string instances and remap incoming topic upon each
|
7
|
+
# message / message batches received
|
8
|
+
class Topics
|
9
|
+
# Thread.current scope under which we store topics data
|
10
|
+
PERSISTENCE_SCOPE = :topics
|
11
|
+
|
12
|
+
private_constant :PERSISTENCE_SCOPE
|
13
|
+
|
14
|
+
class << self
|
15
|
+
# @return [Concurrent::Hash] hash with all the topics from given groups
|
16
|
+
def current
|
17
|
+
Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
|
18
|
+
hash[key] = Concurrent::Hash.new
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
# @param group_id [String] group id for which we fetch a topic representation
|
23
|
+
# @param raw_topic_name [String] raw topic name (before remapping) for which we fetch a
|
24
|
+
# topic representation
|
25
|
+
# @return [Karafka::Routing::Topics] remapped topic representation that can be used further
|
26
|
+
# on when working with given parameters
|
27
|
+
def fetch(group_id, raw_topic_name)
|
28
|
+
current[group_id][raw_topic_name] ||= begin
|
29
|
+
# We map from incoming topic name, as it might be namespaced, etc.
|
30
|
+
# @see topic_mapper internal docs
|
31
|
+
mapped_topic_name = Karafka::App.config.topic_mapper.incoming(raw_topic_name)
|
32
|
+
Routing::Router.find("#{group_id}_#{mapped_topic_name}")
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
# Clears the whole topics cache for all the threads
|
37
|
+
# This is used for in-development code reloading as we need to get rid of all the
|
38
|
+
# preloaded and cached instances of objects to make it work
|
39
|
+
def clear
|
40
|
+
Thread
|
41
|
+
.list
|
42
|
+
.select { |thread| thread[PERSISTENCE_SCOPE] }
|
43
|
+
.each { |thread| thread[PERSISTENCE_SCOPE].clear }
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,60 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Class used to catch signals from ruby Signal class in order to manage Karafka stop
|
5
|
+
# @note There might be only one process - this class is a singleton
|
6
|
+
class Process
|
7
|
+
# Signal types that we handle
|
8
|
+
HANDLED_SIGNALS = %i[
|
9
|
+
SIGINT
|
10
|
+
SIGQUIT
|
11
|
+
SIGTERM
|
12
|
+
].freeze
|
13
|
+
|
14
|
+
HANDLED_SIGNALS.each do |signal|
|
15
|
+
# Assigns a callback that will happen when certain signal will be send
|
16
|
+
# to Karafka server instance
|
17
|
+
# @note It does not define the callback itself -it needs to be passed in a block
|
18
|
+
# @example Define an action that should be taken on_sigint
|
19
|
+
# process.on_sigint do
|
20
|
+
# Karafka.logger.info('Log something here')
|
21
|
+
# exit
|
22
|
+
# end
|
23
|
+
define_method :"on_#{signal.to_s.downcase}" do |&block|
|
24
|
+
@callbacks[signal] << block
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
# Creates an instance of process and creates empty hash for callbacks
|
29
|
+
def initialize
|
30
|
+
@callbacks = Hash.new { |hsh, key| hsh[key] = [] }
|
31
|
+
end
|
32
|
+
|
33
|
+
# Method catches all HANDLED_SIGNALS and performs appropriate callbacks (if defined)
|
34
|
+
# @note If there are no callbacks, this method will just ignore a given signal that was sent
|
35
|
+
def supervise
|
36
|
+
HANDLED_SIGNALS.each { |signal| trap_signal(signal) }
|
37
|
+
end
|
38
|
+
|
39
|
+
private
|
40
|
+
|
41
|
+
# Traps a single signal and performs callbacks (if any) or just ignores this signal
|
42
|
+
# @param [Symbol] signal type that we want to catch
|
43
|
+
def trap_signal(signal)
|
44
|
+
trap(signal) do
|
45
|
+
notice_signal(signal)
|
46
|
+
(@callbacks[signal] || []).each(&:call)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
# Informs monitoring about trapped signal
|
51
|
+
# @param [Symbol] signal type that we received
|
52
|
+
# @note We cannot perform logging from trap context, that's why
|
53
|
+
# we have to spin up a new thread to do this
|
54
|
+
def notice_signal(signal)
|
55
|
+
Thread.new do
|
56
|
+
Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Responders namespace encapsulates all the internal responder implementation parts
|
5
|
+
module Responders
|
6
|
+
# Responders builder is used for finding (based on the consumer class name) a responder
|
7
|
+
# that match the consumer. We use it when user does not provide a responder inside routing,
|
8
|
+
# but he still names responder with the same convention (and namespaces) as consumer
|
9
|
+
#
|
10
|
+
# @example Matching responder exists
|
11
|
+
# Karafka::Responder::Builder(NewEventsConsumer).build #=> NewEventsResponder
|
12
|
+
# @example Matching responder does not exist
|
13
|
+
# Karafka::Responder::Builder(NewBuildsConsumer).build #=> nil
|
14
|
+
class Builder
|
15
|
+
# @param consumer_class [Karafka::BaseConsumer, nil] descendant of
|
16
|
+
# Karafka::BaseConsumer
|
17
|
+
# @example Tries to find a responder that matches a given consumer. If nothing found,
|
18
|
+
# will return nil (nil is accepted, because it means that a given consumer don't
|
19
|
+
# pipe stuff further on)
|
20
|
+
def initialize(consumer_class)
|
21
|
+
@consumer_class = consumer_class
|
22
|
+
end
|
23
|
+
|
24
|
+
# Tries to figure out a responder based on a consumer class name
|
25
|
+
# @return [Class] Responder class (not an instance)
|
26
|
+
# @return [nil] or nil if there's no matching responding class
|
27
|
+
def build
|
28
|
+
Helpers::ClassMatcher.new(
|
29
|
+
@consumer_class,
|
30
|
+
from: 'Consumer',
|
31
|
+
to: 'Responder'
|
32
|
+
).match
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Responders
|
5
|
+
# Topic describes a single topic on which we want to respond with responding requirements
|
6
|
+
# @example Define topic (required by default)
|
7
|
+
# Karafka::Responders::Topic.new(:topic_name, {}) #=> #<Karafka::Responders::Topic...
|
8
|
+
# @example Define optional topic
|
9
|
+
# Karafka::Responders::Topic.new(:topic_name, required: false)
|
10
|
+
class Topic
|
11
|
+
# Name of the topic on which we want to respond
|
12
|
+
attr_reader :name
|
13
|
+
|
14
|
+
# @param name [Symbol, String] name of a topic on which we want to respond
|
15
|
+
# @param options [Hash] non-default options for this topic
|
16
|
+
# @return [Karafka::Responders::Topic] topic description object
|
17
|
+
def initialize(name, options)
|
18
|
+
@name = name.to_s
|
19
|
+
@options = options
|
20
|
+
end
|
21
|
+
|
22
|
+
# @return [Boolean] is this a required topic (if not, it is optional)
|
23
|
+
def required?
|
24
|
+
@options.key?(:required) ? @options[:required] : true
|
25
|
+
end
|
26
|
+
|
27
|
+
# @return [Boolean] was usage of this topic registered or not
|
28
|
+
def registered?
|
29
|
+
@options[:registered] == true
|
30
|
+
end
|
31
|
+
|
32
|
+
# @return [Class] Class to use to serialize messages for this topic
|
33
|
+
def serializer
|
34
|
+
@options[:serializer]
|
35
|
+
end
|
36
|
+
|
37
|
+
# @return [Boolean] do we want to use async producer. Defaults to false as the sync producer
|
38
|
+
# is safer and introduces less problems
|
39
|
+
def async?
|
40
|
+
@options.key?(:async) ? @options[:async] : false
|
41
|
+
end
|
42
|
+
|
43
|
+
# @return [Hash] hash with this topic attributes and options
|
44
|
+
def to_h
|
45
|
+
{
|
46
|
+
name: name,
|
47
|
+
required: required?,
|
48
|
+
registered: registered?,
|
49
|
+
serializer: serializer,
|
50
|
+
async: async?
|
51
|
+
}
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,89 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Routing
|
5
|
+
# Builder used as a DSL layer for building consumers and telling them which topics to consume
|
6
|
+
# @example Build a simple (most common) route
|
7
|
+
# consumers do
|
8
|
+
# topic :new_videos do
|
9
|
+
# consumer NewVideosConsumer
|
10
|
+
# end
|
11
|
+
# end
|
12
|
+
class Builder < Concurrent::Array
|
13
|
+
# Consumer group consistency checking contract
|
14
|
+
CONTRACT = Karafka::Contracts::ConsumerGroup.new.freeze
|
15
|
+
|
16
|
+
private_constant :CONTRACT
|
17
|
+
|
18
|
+
def initialize
|
19
|
+
@draws = Concurrent::Array.new
|
20
|
+
end
|
21
|
+
|
22
|
+
# Used to draw routes for Karafka
|
23
|
+
# @param block [Proc] block we will evaluate within the builder context
|
24
|
+
# @yield Evaluates provided block in a builder context so we can describe routes
|
25
|
+
# @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
|
26
|
+
# doesn't match with the config contract
|
27
|
+
# @note After it is done drawing it will store and validate all the routes to make sure that
|
28
|
+
# they are correct and that there are no topic/group duplications (this is forbidden)
|
29
|
+
# @example
|
30
|
+
# draw do
|
31
|
+
# topic :xyz do
|
32
|
+
# end
|
33
|
+
# end
|
34
|
+
def draw(&block)
|
35
|
+
@draws << block
|
36
|
+
|
37
|
+
instance_eval(&block)
|
38
|
+
|
39
|
+
each do |consumer_group|
|
40
|
+
hashed_group = consumer_group.to_h
|
41
|
+
validation_result = CONTRACT.call(hashed_group)
|
42
|
+
next if validation_result.success?
|
43
|
+
|
44
|
+
raise Errors::InvalidConfigurationError, validation_result.errors.to_h
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
# @return [Array<Karafka::Routing::ConsumerGroup>] only active consumer groups that
|
49
|
+
# we want to use. Since Karafka supports multi-process setup, we need to be able
|
50
|
+
# to pick only those consumer groups that should be active in our given process context
|
51
|
+
def active
|
52
|
+
select(&:active?)
|
53
|
+
end
|
54
|
+
|
55
|
+
# Clears the builder and the draws memory
|
56
|
+
def clear
|
57
|
+
@draws.clear
|
58
|
+
super
|
59
|
+
end
|
60
|
+
|
61
|
+
# Redraws all the routes for the in-process code reloading.
|
62
|
+
# @note This won't allow registration of new topics without process restart but will trigger
|
63
|
+
# cache invalidation so all the classes, etc are re-fetched after code reload
|
64
|
+
def reload
|
65
|
+
draws = @draws.dup
|
66
|
+
clear
|
67
|
+
draws.each { |block| draw(&block) }
|
68
|
+
end
|
69
|
+
|
70
|
+
private
|
71
|
+
|
72
|
+
# Builds and saves given consumer group
|
73
|
+
# @param group_id [String, Symbol] name for consumer group
|
74
|
+
# @param block [Proc] proc that should be executed in the proxy context
|
75
|
+
def consumer_group(group_id, &block)
|
76
|
+
consumer_group = ConsumerGroup.new(group_id.to_s)
|
77
|
+
self << Proxy.new(consumer_group, &block).target
|
78
|
+
end
|
79
|
+
|
80
|
+
# @param topic_name [String, Symbol] name of a topic from which we want to consumer
|
81
|
+
# @param block [Proc] proc we want to evaluate in the topic context
|
82
|
+
def topic(topic_name, &block)
|
83
|
+
consumer_group(topic_name) do
|
84
|
+
topic(topic_name, &block).tap(&:build)
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Routing
|
5
|
+
# Object used to describe a single consumer group that is going to subscribe to
|
6
|
+
# given topics
|
7
|
+
# It is a part of Karafka's DSL
|
8
|
+
class ConsumerGroup
|
9
|
+
extend Helpers::ConfigRetriever
|
10
|
+
|
11
|
+
attr_reader :topics
|
12
|
+
attr_reader :id
|
13
|
+
attr_reader :name
|
14
|
+
|
15
|
+
# @param name [String, Symbol] raw name of this consumer group. Raw means, that it does not
|
16
|
+
# yet have an application client_id namespace, this will be added here by default.
|
17
|
+
# We add it to make a multi-system development easier for people that don't use
|
18
|
+
# kafka and don't understand the concept of consumer groups.
|
19
|
+
def initialize(name)
|
20
|
+
@name = name
|
21
|
+
@id = Karafka::App.config.consumer_mapper.call(name)
|
22
|
+
@topics = []
|
23
|
+
end
|
24
|
+
|
25
|
+
# @return [Boolean] true if this consumer group should be active in our current process
|
26
|
+
def active?
|
27
|
+
Karafka::Server.consumer_groups.include?(name)
|
28
|
+
end
|
29
|
+
|
30
|
+
# Builds a topic representation inside of a current consumer group route
|
31
|
+
# @param name [String, Symbol] name of topic to which we want to subscribe
|
32
|
+
# @param block [Proc] block that we want to evaluate in the topic context
|
33
|
+
# @return [Karafka::Routing::Topic] newly built topic instance
|
34
|
+
def topic=(name, &block)
|
35
|
+
topic = Topic.new(name, self)
|
36
|
+
@topics << Proxy.new(topic, &block).target.tap(&:build)
|
37
|
+
@topics.last
|
38
|
+
end
|
39
|
+
|
40
|
+
Karafka::AttributesMap.consumer_group.each do |attribute|
|
41
|
+
config_retriever_for(attribute)
|
42
|
+
end
|
43
|
+
|
44
|
+
# Hashed version of consumer group that can be used for validation purposes
|
45
|
+
# @return [Hash] hash with consumer group attributes including serialized to hash
|
46
|
+
# topics inside of it.
|
47
|
+
def to_h
|
48
|
+
result = {
|
49
|
+
topics: topics.map(&:to_h),
|
50
|
+
id: id
|
51
|
+
}
|
52
|
+
|
53
|
+
Karafka::AttributesMap.consumer_group.each do |attribute|
|
54
|
+
result[attribute] = public_send(attribute)
|
55
|
+
end
|
56
|
+
|
57
|
+
result
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Routing
|
5
|
+
# Default consumer mapper that builds consumer ids based on app id and consumer group name
|
6
|
+
# Different mapper can be used in case of preexisting consumer names or for applying
|
7
|
+
# other naming conventions not compatible with Karafka client_id + consumer name concept
|
8
|
+
#
|
9
|
+
# @example Mapper for using consumer groups without a client_id prefix
|
10
|
+
# class MyMapper
|
11
|
+
# def call(raw_consumer_group_name)
|
12
|
+
# raw_consumer_group_name
|
13
|
+
# end
|
14
|
+
# end
|
15
|
+
#
|
16
|
+
# @example Mapper for replacing "_" with "." in topic names
|
17
|
+
# class MyMapper
|
18
|
+
# def call(raw_consumer_group_name)
|
19
|
+
# [
|
20
|
+
# Karafka::Helpers::Inflector.map(Karafka::App.config.client_id.to_s),
|
21
|
+
# raw_consumer_group_name
|
22
|
+
# ].join('_').gsub('_', '.')
|
23
|
+
# end
|
24
|
+
# end
|
25
|
+
class ConsumerMapper
|
26
|
+
# @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
|
27
|
+
# @return [String] remapped final consumer group name
|
28
|
+
def call(raw_consumer_group_name)
|
29
|
+
client_name = Karafka::Helpers::Inflector.map(Karafka::App.config.client_id.to_s)
|
30
|
+
"#{client_name}_#{raw_consumer_group_name}"
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|