karafka 1.2.9 → 1.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +2 -0
- data.tar.gz.sig +0 -0
- data/.coditsu/ci.yml +3 -0
- data/.console_irbrc +1 -3
- data/.github/FUNDING.yml +3 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.gitignore +1 -0
- data/.ruby-version +1 -1
- data/.travis.yml +30 -10
- data/CHANGELOG.md +69 -1
- data/CONTRIBUTING.md +1 -1
- data/Gemfile +2 -5
- data/Gemfile.lock +67 -56
- data/README.md +10 -11
- data/bin/karafka +1 -1
- data/certs/mensfeld.pem +25 -0
- data/config/errors.yml +38 -5
- data/karafka.gemspec +16 -14
- data/lib/karafka.rb +8 -15
- data/lib/karafka/app.rb +14 -6
- data/lib/karafka/attributes_map.rb +7 -6
- data/lib/karafka/base_consumer.rb +19 -30
- data/lib/karafka/base_responder.rb +45 -27
- data/lib/karafka/cli.rb +1 -1
- data/lib/karafka/cli/console.rb +11 -9
- data/lib/karafka/cli/flow.rb +0 -1
- data/lib/karafka/cli/info.rb +3 -1
- data/lib/karafka/cli/install.rb +28 -6
- data/lib/karafka/cli/server.rb +11 -6
- data/lib/karafka/code_reloader.rb +67 -0
- data/lib/karafka/connection/api_adapter.rb +11 -4
- data/lib/karafka/connection/batch_delegator.rb +51 -0
- data/lib/karafka/connection/builder.rb +1 -1
- data/lib/karafka/connection/client.rb +30 -20
- data/lib/karafka/connection/listener.rb +22 -11
- data/lib/karafka/connection/message_delegator.rb +36 -0
- data/lib/karafka/consumers/callbacks.rb +32 -15
- data/lib/karafka/consumers/includer.rb +30 -18
- data/lib/karafka/consumers/metadata.rb +10 -0
- data/lib/karafka/consumers/responders.rb +2 -2
- data/lib/karafka/contracts.rb +10 -0
- data/lib/karafka/contracts/config.rb +21 -0
- data/lib/karafka/contracts/consumer_group.rb +206 -0
- data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
- data/lib/karafka/contracts/responder_usage.rb +54 -0
- data/lib/karafka/contracts/server_cli_options.rb +29 -0
- data/lib/karafka/errors.rb +17 -16
- data/lib/karafka/fetcher.rb +28 -30
- data/lib/karafka/helpers/class_matcher.rb +11 -1
- data/lib/karafka/helpers/config_retriever.rb +1 -1
- data/lib/karafka/helpers/inflector.rb +26 -0
- data/lib/karafka/helpers/multi_delegator.rb +0 -1
- data/lib/karafka/instrumentation/logger.rb +5 -3
- data/lib/karafka/instrumentation/monitor.rb +15 -9
- data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
- data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
- data/lib/karafka/params/builders/metadata.rb +33 -0
- data/lib/karafka/params/builders/params.rb +36 -0
- data/lib/karafka/params/builders/params_batch.rb +25 -0
- data/lib/karafka/params/metadata.rb +35 -0
- data/lib/karafka/params/params.rb +68 -0
- data/lib/karafka/params/params_batch.rb +35 -20
- data/lib/karafka/patches/ruby_kafka.rb +21 -8
- data/lib/karafka/persistence/client.rb +15 -11
- data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
- data/lib/karafka/persistence/topics.rb +48 -0
- data/lib/karafka/process.rb +0 -2
- data/lib/karafka/responders/builder.rb +1 -1
- data/lib/karafka/responders/topic.rb +6 -8
- data/lib/karafka/routing/builder.rb +36 -8
- data/lib/karafka/routing/consumer_group.rb +1 -1
- data/lib/karafka/routing/consumer_mapper.rb +9 -9
- data/lib/karafka/routing/proxy.rb +10 -1
- data/lib/karafka/routing/topic.rb +5 -3
- data/lib/karafka/routing/topic_mapper.rb +16 -18
- data/lib/karafka/serialization/json/deserializer.rb +27 -0
- data/lib/karafka/serialization/json/serializer.rb +31 -0
- data/lib/karafka/server.rb +25 -27
- data/lib/karafka/setup/config.rb +65 -37
- data/lib/karafka/setup/configurators/water_drop.rb +7 -3
- data/lib/karafka/setup/dsl.rb +0 -1
- data/lib/karafka/status.rb +7 -3
- data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
- data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
- data/lib/karafka/templates/karafka.rb.erb +92 -0
- data/lib/karafka/version.rb +1 -1
- metadata +94 -61
- metadata.gz.sig +0 -0
- data/lib/karafka/callbacks.rb +0 -30
- data/lib/karafka/callbacks/config.rb +0 -22
- data/lib/karafka/callbacks/dsl.rb +0 -16
- data/lib/karafka/connection/delegator.rb +0 -46
- data/lib/karafka/instrumentation/listener.rb +0 -112
- data/lib/karafka/loader.rb +0 -28
- data/lib/karafka/params/dsl.rb +0 -158
- data/lib/karafka/parsers/json.rb +0 -38
- data/lib/karafka/patches/dry_configurable.rb +0 -35
- data/lib/karafka/persistence/topic.rb +0 -29
- data/lib/karafka/schemas/config.rb +0 -24
- data/lib/karafka/schemas/consumer_group.rb +0 -78
- data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
- data/lib/karafka/schemas/responder_usage.rb +0 -39
- data/lib/karafka/schemas/server_cli_options.rb +0 -43
- data/lib/karafka/setup/configurators/base.rb +0 -29
- data/lib/karafka/setup/configurators/params.rb +0 -25
- data/lib/karafka/templates/karafka.rb.example +0 -54
data/lib/karafka/process.rb
CHANGED
@@ -4,8 +4,6 @@ module Karafka
|
|
4
4
|
# Class used to catch signals from ruby Signal class in order to manage Karafka stop
|
5
5
|
# @note There might be only one process - this class is a singleton
|
6
6
|
class Process
|
7
|
-
include Singleton
|
8
|
-
|
9
7
|
# Signal types that we handle
|
10
8
|
HANDLED_SIGNALS = %i[
|
11
9
|
SIGINT
|
@@ -3,7 +3,7 @@
|
|
3
3
|
module Karafka
|
4
4
|
# Responders namespace encapsulates all the internal responder implementation parts
|
5
5
|
module Responders
|
6
|
-
# Responders builder is used
|
6
|
+
# Responders builder is used for finding (based on the consumer class name) a responder
|
7
7
|
# that match the consumer. We use it when user does not provide a responder inside routing,
|
8
8
|
# but he still names responder with the same convention (and namespaces) as consumer
|
9
9
|
#
|
@@ -7,8 +7,6 @@ module Karafka
|
|
7
7
|
# Karafka::Responders::Topic.new(:topic_name, {}) #=> #<Karafka::Responders::Topic...
|
8
8
|
# @example Define optional topic
|
9
9
|
# Karafka::Responders::Topic.new(:topic_name, required: false)
|
10
|
-
# @example Define topic that on which we want to respond multiple times
|
11
|
-
# Karafka::Responders::Topic.new(:topic_name, multiple_usage: true)
|
12
10
|
class Topic
|
13
11
|
# Name of the topic on which we want to respond
|
14
12
|
attr_reader :name
|
@@ -26,16 +24,16 @@ module Karafka
|
|
26
24
|
@options.key?(:required) ? @options[:required] : true
|
27
25
|
end
|
28
26
|
|
29
|
-
# @return [Boolean] do we expect to use it multiple times in a single respond flow
|
30
|
-
def multiple_usage?
|
31
|
-
@options[:multiple_usage] || false
|
32
|
-
end
|
33
|
-
|
34
27
|
# @return [Boolean] was usage of this topic registered or not
|
35
28
|
def registered?
|
36
29
|
@options[:registered] == true
|
37
30
|
end
|
38
31
|
|
32
|
+
# @return [Class] Class to use to serialize messages for this topic
|
33
|
+
def serializer
|
34
|
+
@options[:serializer]
|
35
|
+
end
|
36
|
+
|
39
37
|
# @return [Boolean] do we want to use async producer. Defaults to false as the sync producer
|
40
38
|
# is safer and introduces less problems
|
41
39
|
def async?
|
@@ -46,9 +44,9 @@ module Karafka
|
|
46
44
|
def to_h
|
47
45
|
{
|
48
46
|
name: name,
|
49
|
-
multiple_usage: multiple_usage?,
|
50
47
|
required: required?,
|
51
48
|
registered: registered?,
|
49
|
+
serializer: serializer,
|
52
50
|
async: async?
|
53
51
|
}
|
54
52
|
end
|
@@ -9,26 +9,39 @@ module Karafka
|
|
9
9
|
# consumer NewVideosConsumer
|
10
10
|
# end
|
11
11
|
# end
|
12
|
-
class Builder < Array
|
13
|
-
|
12
|
+
class Builder < Concurrent::Array
|
13
|
+
# Consumer group consistency checking contract
|
14
|
+
CONTRACT = Karafka::Contracts::ConsumerGroup.new.freeze
|
15
|
+
|
16
|
+
private_constant :CONTRACT
|
17
|
+
|
18
|
+
def initialize
|
19
|
+
@draws = Concurrent::Array.new
|
20
|
+
end
|
14
21
|
|
15
22
|
# Used to draw routes for Karafka
|
23
|
+
# @param block [Proc] block we will evaluate within the builder context
|
24
|
+
# @yield Evaluates provided block in a builder context so we can describe routes
|
25
|
+
# @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
|
26
|
+
# doesn't match with the config contract
|
16
27
|
# @note After it is done drawing it will store and validate all the routes to make sure that
|
17
28
|
# they are correct and that there are no topic/group duplications (this is forbidden)
|
18
|
-
# @yield Evaluates provided block in a builder context so we can describe routes
|
19
29
|
# @example
|
20
30
|
# draw do
|
21
31
|
# topic :xyz do
|
22
32
|
# end
|
23
33
|
# end
|
24
34
|
def draw(&block)
|
35
|
+
@draws << block
|
36
|
+
|
25
37
|
instance_eval(&block)
|
26
38
|
|
27
39
|
each do |consumer_group|
|
28
40
|
hashed_group = consumer_group.to_h
|
29
|
-
validation_result =
|
30
|
-
|
31
|
-
|
41
|
+
validation_result = CONTRACT.call(hashed_group)
|
42
|
+
next if validation_result.success?
|
43
|
+
|
44
|
+
raise Errors::InvalidConfigurationError, validation_result.errors.to_h
|
32
45
|
end
|
33
46
|
end
|
34
47
|
|
@@ -39,18 +52,33 @@ module Karafka
|
|
39
52
|
select(&:active?)
|
40
53
|
end
|
41
54
|
|
55
|
+
# Clears the builder and the draws memory
|
56
|
+
def clear
|
57
|
+
@draws.clear
|
58
|
+
super
|
59
|
+
end
|
60
|
+
|
61
|
+
# Redraws all the routes for the in-process code reloading.
|
62
|
+
# @note This won't allow registration of new topics without process restart but will trigger
|
63
|
+
# cache invalidation so all the classes, etc are re-fetched after code reload
|
64
|
+
def reload
|
65
|
+
draws = @draws.dup
|
66
|
+
clear
|
67
|
+
draws.each { |block| draw(&block) }
|
68
|
+
end
|
69
|
+
|
42
70
|
private
|
43
71
|
|
44
72
|
# Builds and saves given consumer group
|
45
73
|
# @param group_id [String, Symbol] name for consumer group
|
46
|
-
# @
|
74
|
+
# @param block [Proc] proc that should be executed in the proxy context
|
47
75
|
def consumer_group(group_id, &block)
|
48
76
|
consumer_group = ConsumerGroup.new(group_id.to_s)
|
49
77
|
self << Proxy.new(consumer_group, &block).target
|
50
78
|
end
|
51
79
|
|
52
80
|
# @param topic_name [String, Symbol] name of a topic from which we want to consumer
|
53
|
-
# @
|
81
|
+
# @param block [Proc] proc we want to evaluate in the topic context
|
54
82
|
def topic(topic_name, &block)
|
55
83
|
consumer_group(topic_name) do
|
56
84
|
topic(topic_name, &block).tap(&:build)
|
@@ -29,7 +29,7 @@ module Karafka
|
|
29
29
|
|
30
30
|
# Builds a topic representation inside of a current consumer group route
|
31
31
|
# @param name [String, Symbol] name of topic to which we want to subscribe
|
32
|
-
# @
|
32
|
+
# @param block [Proc] block that we want to evaluate in the topic context
|
33
33
|
# @return [Karafka::Routing::Topic] newly built topic instance
|
34
34
|
def topic=(name, &block)
|
35
35
|
topic = Topic.new(name, self)
|
@@ -4,29 +4,29 @@ module Karafka
|
|
4
4
|
module Routing
|
5
5
|
# Default consumer mapper that builds consumer ids based on app id and consumer group name
|
6
6
|
# Different mapper can be used in case of preexisting consumer names or for applying
|
7
|
-
# other naming conventions not compatible
|
7
|
+
# other naming conventions not compatible with Karafka client_id + consumer name concept
|
8
8
|
#
|
9
9
|
# @example Mapper for using consumer groups without a client_id prefix
|
10
|
-
#
|
11
|
-
# def
|
10
|
+
# class MyMapper
|
11
|
+
# def call(raw_consumer_group_name)
|
12
12
|
# raw_consumer_group_name
|
13
13
|
# end
|
14
14
|
# end
|
15
15
|
#
|
16
16
|
# @example Mapper for replacing "_" with "." in topic names
|
17
|
-
#
|
18
|
-
# def
|
17
|
+
# class MyMapper
|
18
|
+
# def call(raw_consumer_group_name)
|
19
19
|
# [
|
20
|
-
#
|
20
|
+
# Karafka::Helpers::Inflector.map(Karafka::App.config.client_id.to_s),
|
21
21
|
# raw_consumer_group_name
|
22
22
|
# ].join('_').gsub('_', '.')
|
23
23
|
# end
|
24
24
|
# end
|
25
|
-
|
25
|
+
class ConsumerMapper
|
26
26
|
# @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
|
27
27
|
# @return [String] remapped final consumer group name
|
28
|
-
def
|
29
|
-
client_name =
|
28
|
+
def call(raw_consumer_group_name)
|
29
|
+
client_name = Karafka::Helpers::Inflector.map(Karafka::App.config.client_id.to_s)
|
30
30
|
"#{client_name}_#{raw_consumer_group_name}"
|
31
31
|
end
|
32
32
|
end
|
@@ -14,22 +14,31 @@ module Karafka
|
|
14
14
|
!
|
15
15
|
].freeze
|
16
16
|
|
17
|
+
private_constant :IGNORED_POSTFIXES
|
18
|
+
|
17
19
|
# @param target [Object] target object to which we proxy any DSL call
|
18
|
-
# @
|
20
|
+
# @param block [Proc] block that we want to evaluate in the proxy context
|
19
21
|
def initialize(target, &block)
|
20
22
|
@target = target
|
21
23
|
instance_eval(&block)
|
22
24
|
end
|
23
25
|
|
24
26
|
# Translates the no "=" DSL of routing into elements assignments on target
|
27
|
+
# @param method_name [Symbol] name of the missing method
|
28
|
+
# @param arguments [Array] array with it's arguments
|
29
|
+
# @param block [Proc] block provided to the method
|
25
30
|
def method_missing(method_name, *arguments, &block)
|
26
31
|
return super unless respond_to_missing?(method_name)
|
32
|
+
|
27
33
|
@target.public_send(:"#{method_name}=", *arguments, &block)
|
28
34
|
end
|
29
35
|
|
30
36
|
# Tells whether or not a given element exists on the target
|
37
|
+
# @param method_name [Symbol] name of the missing method
|
38
|
+
# @param include_private [Boolean] should we include private in the check as well
|
31
39
|
def respond_to_missing?(method_name, include_private = false)
|
32
40
|
return false if IGNORED_POSTFIXES.any? { |postfix| method_name.to_s.end_with?(postfix) }
|
41
|
+
|
33
42
|
@target.respond_to?(:"#{method_name}=", include_private) || super
|
34
43
|
end
|
35
44
|
end
|
@@ -7,10 +7,13 @@ module Karafka
|
|
7
7
|
# It is a part of Karafka's DSL
|
8
8
|
class Topic
|
9
9
|
extend Helpers::ConfigRetriever
|
10
|
+
extend Forwardable
|
10
11
|
|
11
12
|
attr_reader :id, :consumer_group
|
12
13
|
attr_accessor :consumer
|
13
14
|
|
15
|
+
def_delegator :@consumer_group, :batch_fetching
|
16
|
+
|
14
17
|
# @param [String, Symbol] name of a topic on which we want to listen
|
15
18
|
# @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
|
16
19
|
def initialize(name, consumer_group)
|
@@ -19,7 +22,7 @@ module Karafka
|
|
19
22
|
@attributes = {}
|
20
23
|
# @note We use identifier related to the consumer group that owns a topic, because from
|
21
24
|
# Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
|
22
|
-
# have same topic name across
|
25
|
+
# have same topic name across multiple Kafkas
|
23
26
|
@id = "#{consumer_group.id}_#{@name}"
|
24
27
|
end
|
25
28
|
|
@@ -29,12 +32,11 @@ module Karafka
|
|
29
32
|
# example for Sidekiq
|
30
33
|
def build
|
31
34
|
Karafka::AttributesMap.topic.each { |attr| send(attr) }
|
32
|
-
consumer&.topic = self
|
33
35
|
self
|
34
36
|
end
|
35
37
|
|
36
38
|
# @return [Class, nil] Class (not an instance) of a responder that should respond from
|
37
|
-
# consumer back to Kafka (
|
39
|
+
# consumer back to Kafka (useful for piping data flows)
|
38
40
|
def responder
|
39
41
|
@responder ||= Karafka::Responders::Builder.new(consumer).build
|
40
42
|
end
|
@@ -8,7 +8,7 @@ module Karafka
|
|
8
8
|
# routes and responders
|
9
9
|
#
|
10
10
|
# @example Mapper for mapping prefixed topics
|
11
|
-
#
|
11
|
+
# class MyMapper
|
12
12
|
# PREFIX = "my_user_name."
|
13
13
|
#
|
14
14
|
# def incoming(topic)
|
@@ -21,7 +21,7 @@ module Karafka
|
|
21
21
|
# end
|
22
22
|
#
|
23
23
|
# @example Mapper for replacing "." with "_" in topic names
|
24
|
-
#
|
24
|
+
# class MyMapper
|
25
25
|
# PREFIX = "my_user_name."
|
26
26
|
#
|
27
27
|
# def incoming(topic)
|
@@ -32,23 +32,21 @@ module Karafka
|
|
32
32
|
# topic.to_s.gsub('_', '.')
|
33
33
|
# end
|
34
34
|
# end
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
end
|
35
|
+
class TopicMapper
|
36
|
+
# @param topic [String, Symbol] topic
|
37
|
+
# @return [String, Symbol] same topic as on input
|
38
|
+
# @example
|
39
|
+
# incoming('topic_name') #=> 'topic_name'
|
40
|
+
def incoming(topic)
|
41
|
+
topic
|
42
|
+
end
|
44
43
|
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
end
|
44
|
+
# @param topic [String, Symbol] topic
|
45
|
+
# @return [String, Symbol] same topic as on input
|
46
|
+
# @example
|
47
|
+
# outgoing('topic_name') #=> 'topic_name'
|
48
|
+
def outgoing(topic)
|
49
|
+
topic
|
52
50
|
end
|
53
51
|
end
|
54
52
|
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Module for all supported by default serialization and deserialization ways
|
5
|
+
module Serialization
|
6
|
+
# Namespace for json ser/der
|
7
|
+
module Json
|
8
|
+
# Default Karafka Json deserializer for loading JSON data
|
9
|
+
class Deserializer
|
10
|
+
# @param params [Karafka::Params::Params] Full params object that we want to deserialize
|
11
|
+
# @return [Hash] hash with deserialized JSON data
|
12
|
+
# @example
|
13
|
+
# params = {
|
14
|
+
# 'payload' => "{\"a\":1}",
|
15
|
+
# 'topic' => 'my-topic',
|
16
|
+
# 'headers' => { 'message_type' => :test }
|
17
|
+
# }
|
18
|
+
# Deserializer.call(params) #=> { 'a' => 1 }
|
19
|
+
def call(params)
|
20
|
+
::MultiJson.load(params['payload'])
|
21
|
+
rescue ::MultiJson::ParseError => e
|
22
|
+
raise ::Karafka::Errors::DeserializationError, e
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Module for all supported by default serialization and deserialization ways
|
5
|
+
module Serialization
|
6
|
+
module Json
|
7
|
+
# Default Karafka Json serializer for serializing data
|
8
|
+
class Serializer
|
9
|
+
# @param content [Object] any object that we want to convert to a json string
|
10
|
+
# @return [String] Valid JSON string containing serialized data
|
11
|
+
# @raise [Karafka::Errors::SerializationError] raised when we don't have a way to
|
12
|
+
# serialize provided data to json
|
13
|
+
# @note When string is passed to this method, we assume that it is already a json
|
14
|
+
# string and we don't serialize it again. This allows us to serialize data before
|
15
|
+
# it is being forwarded to this serializer if we want to have a custom (not that simple)
|
16
|
+
# json serialization
|
17
|
+
#
|
18
|
+
# @example From an ActiveRecord object
|
19
|
+
# Serializer.call(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
|
20
|
+
# @example From a string (no changes)
|
21
|
+
# Serializer.call("{\"a\":1}") #=> "{\"a\":1}"
|
22
|
+
def call(content)
|
23
|
+
return content if content.is_a?(String)
|
24
|
+
return content.to_json if content.respond_to?(:to_json)
|
25
|
+
|
26
|
+
raise Karafka::Errors::SerializationError, content
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
data/lib/karafka/server.rb
CHANGED
@@ -6,9 +6,14 @@ module Karafka
|
|
6
6
|
@consumer_threads = Concurrent::Array.new
|
7
7
|
|
8
8
|
# How long should we sleep between checks on shutting down consumers
|
9
|
-
SUPERVISION_SLEEP = 1
|
9
|
+
SUPERVISION_SLEEP = 0.1
|
10
10
|
# What system exit code should we use when we terminated forcefully
|
11
11
|
FORCEFUL_EXIT_CODE = 2
|
12
|
+
# This factor allows us to calculate how many times we have to sleep before
|
13
|
+
# a forceful shutdown
|
14
|
+
SUPERVISION_CHECK_FACTOR = (1 / SUPERVISION_SLEEP)
|
15
|
+
|
16
|
+
private_constant :SUPERVISION_SLEEP, :FORCEFUL_EXIT_CODE, :SUPERVISION_CHECK_FACTOR
|
12
17
|
|
13
18
|
class << self
|
14
19
|
# Set of consuming threads. Each consumer thread contains a single consumer
|
@@ -22,7 +27,7 @@ module Karafka
|
|
22
27
|
process.on_sigint { stop_supervised }
|
23
28
|
process.on_sigquit { stop_supervised }
|
24
29
|
process.on_sigterm { stop_supervised }
|
25
|
-
|
30
|
+
run_supervised
|
26
31
|
end
|
27
32
|
|
28
33
|
# @return [Array<String>] array with names of consumer groups that should be consumed in a
|
@@ -36,49 +41,42 @@ module Karafka
|
|
36
41
|
|
37
42
|
# @return [Karafka::Process] process wrapper instance used to catch system signal calls
|
38
43
|
def process
|
39
|
-
Karafka::
|
44
|
+
Karafka::App.config.internal.process
|
40
45
|
end
|
41
46
|
|
42
47
|
# Starts Karafka with a supervision
|
43
48
|
# @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
|
44
|
-
# finish loop (and it won't happen until we
|
45
|
-
def
|
49
|
+
# finish loop (and it won't happen until we explicitly want to stop)
|
50
|
+
def run_supervised
|
46
51
|
process.supervise
|
47
52
|
Karafka::App.run!
|
48
|
-
Karafka::
|
53
|
+
Karafka::App.config.internal.fetcher.call
|
49
54
|
end
|
50
55
|
|
51
56
|
# Stops Karafka with a supervision (as long as there is a shutdown timeout)
|
52
|
-
# If consumers won't stop in a given
|
57
|
+
# If consumers won't stop in a given time frame, it will force them to exit
|
53
58
|
def stop_supervised
|
54
|
-
# Because this is called in the trap context, there is a chance that instrumentation
|
55
|
-
# listeners contain things that aren't allowed from within a trap context.
|
56
|
-
# To bypass that (instead of telling users not to do things they need to)
|
57
|
-
# we spin up a thread to instrument server.stop and server.stop.error and wait until
|
58
|
-
# they're finished
|
59
|
-
Thread.new { Karafka.monitor.instrument('server.stop', {}) }.join
|
60
|
-
|
61
59
|
Karafka::App.stop!
|
62
|
-
# If there is no shutdown timeout, we don't exit and wait until all the consumers
|
63
|
-
# had done their work
|
64
|
-
return unless Karafka::App.config.shutdown_timeout
|
65
60
|
|
66
|
-
#
|
67
|
-
#
|
68
|
-
|
69
|
-
|
70
|
-
|
61
|
+
# We check from time to time (for the timeout period) if all the threads finished
|
62
|
+
# their work and if so, we can just return and normal shutdown process will take place
|
63
|
+
(Karafka::App.config.shutdown_timeout * SUPERVISION_CHECK_FACTOR).to_i.times do
|
64
|
+
if consumer_threads.count(&:alive?).zero?
|
65
|
+
Thread.new { Karafka.monitor.instrument('app.stopped') }.join
|
66
|
+
return
|
67
|
+
end
|
68
|
+
|
71
69
|
sleep SUPERVISION_SLEEP
|
72
70
|
end
|
73
71
|
|
74
|
-
raise Errors::
|
75
|
-
rescue Errors::
|
76
|
-
Thread.new { Karafka.monitor.instrument('
|
72
|
+
raise Errors::ForcefulShutdownError
|
73
|
+
rescue Errors::ForcefulShutdownError => e
|
74
|
+
Thread.new { Karafka.monitor.instrument('app.stopping.error', error: e) }.join
|
77
75
|
# We're done waiting, lets kill them!
|
78
76
|
consumer_threads.each(&:terminate)
|
79
77
|
|
80
|
-
# exit is not within the instrumentation as it would not trigger due to exit
|
81
|
-
Kernel.exit FORCEFUL_EXIT_CODE
|
78
|
+
# exit! is not within the instrumentation as it would not trigger due to exit
|
79
|
+
Kernel.exit! FORCEFUL_EXIT_CODE
|
82
80
|
end
|
83
81
|
end
|
84
82
|
end
|