karafka 1.2.8 → 1.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data.tar.gz.sig +0 -0
- data/.coditsu/ci.yml +3 -0
- data/.console_irbrc +1 -3
- data/.diffend.yml +3 -0
- data/.github/FUNDING.yml +3 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.github/workflows/ci.yml +52 -0
- data/.gitignore +1 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +134 -14
- data/CODE_OF_CONDUCT.md +1 -1
- data/CONTRIBUTING.md +1 -1
- data/Gemfile +4 -5
- data/Gemfile.lock +92 -81
- data/README.md +9 -12
- data/bin/karafka +1 -1
- data/certs/mensfeld.pem +25 -0
- data/config/errors.yml +38 -5
- data/docker-compose.yml +17 -0
- data/karafka.gemspec +18 -17
- data/lib/karafka.rb +10 -16
- data/lib/karafka/app.rb +14 -6
- data/lib/karafka/attributes_map.rb +5 -10
- data/lib/karafka/base_consumer.rb +19 -30
- data/lib/karafka/base_responder.rb +45 -27
- data/lib/karafka/cli.rb +2 -2
- data/lib/karafka/cli/console.rb +11 -9
- data/lib/karafka/cli/flow.rb +9 -7
- data/lib/karafka/cli/info.rb +4 -2
- data/lib/karafka/cli/install.rb +30 -6
- data/lib/karafka/cli/server.rb +11 -6
- data/lib/karafka/code_reloader.rb +67 -0
- data/lib/karafka/connection/api_adapter.rb +22 -9
- data/lib/karafka/connection/batch_delegator.rb +55 -0
- data/lib/karafka/connection/builder.rb +5 -3
- data/lib/karafka/connection/client.rb +31 -31
- data/lib/karafka/connection/listener.rb +26 -15
- data/lib/karafka/connection/message_delegator.rb +36 -0
- data/lib/karafka/consumers/batch_metadata.rb +10 -0
- data/lib/karafka/consumers/callbacks.rb +32 -15
- data/lib/karafka/consumers/includer.rb +31 -18
- data/lib/karafka/consumers/responders.rb +2 -2
- data/lib/karafka/contracts.rb +10 -0
- data/lib/karafka/contracts/config.rb +21 -0
- data/lib/karafka/contracts/consumer_group.rb +206 -0
- data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
- data/lib/karafka/contracts/responder_usage.rb +54 -0
- data/lib/karafka/contracts/server_cli_options.rb +31 -0
- data/lib/karafka/errors.rb +17 -16
- data/lib/karafka/fetcher.rb +28 -30
- data/lib/karafka/helpers/class_matcher.rb +12 -2
- data/lib/karafka/helpers/config_retriever.rb +1 -1
- data/lib/karafka/helpers/inflector.rb +26 -0
- data/lib/karafka/helpers/multi_delegator.rb +0 -1
- data/lib/karafka/instrumentation/logger.rb +9 -6
- data/lib/karafka/instrumentation/monitor.rb +15 -9
- data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
- data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
- data/lib/karafka/params/batch_metadata.rb +26 -0
- data/lib/karafka/params/builders/batch_metadata.rb +30 -0
- data/lib/karafka/params/builders/params.rb +38 -0
- data/lib/karafka/params/builders/params_batch.rb +25 -0
- data/lib/karafka/params/metadata.rb +20 -0
- data/lib/karafka/params/params.rb +54 -0
- data/lib/karafka/params/params_batch.rb +35 -21
- data/lib/karafka/patches/ruby_kafka.rb +21 -8
- data/lib/karafka/persistence/client.rb +15 -11
- data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
- data/lib/karafka/persistence/topics.rb +48 -0
- data/lib/karafka/process.rb +0 -2
- data/lib/karafka/responders/builder.rb +1 -1
- data/lib/karafka/responders/topic.rb +6 -8
- data/lib/karafka/routing/builder.rb +36 -8
- data/lib/karafka/routing/consumer_group.rb +1 -1
- data/lib/karafka/routing/consumer_mapper.rb +9 -9
- data/lib/karafka/routing/proxy.rb +10 -1
- data/lib/karafka/routing/topic.rb +5 -3
- data/lib/karafka/routing/topic_mapper.rb +16 -18
- data/lib/karafka/serialization/json/deserializer.rb +27 -0
- data/lib/karafka/serialization/json/serializer.rb +31 -0
- data/lib/karafka/server.rb +29 -28
- data/lib/karafka/setup/config.rb +67 -37
- data/lib/karafka/setup/configurators/water_drop.rb +7 -3
- data/lib/karafka/setup/dsl.rb +0 -1
- data/lib/karafka/status.rb +7 -3
- data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
- data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
- data/lib/karafka/templates/karafka.rb.erb +92 -0
- data/lib/karafka/version.rb +1 -1
- metadata +94 -72
- metadata.gz.sig +0 -0
- data/.travis.yml +0 -21
- data/lib/karafka/callbacks.rb +0 -30
- data/lib/karafka/callbacks/config.rb +0 -22
- data/lib/karafka/callbacks/dsl.rb +0 -16
- data/lib/karafka/connection/delegator.rb +0 -46
- data/lib/karafka/instrumentation/listener.rb +0 -112
- data/lib/karafka/loader.rb +0 -28
- data/lib/karafka/params/dsl.rb +0 -156
- data/lib/karafka/parsers/json.rb +0 -38
- data/lib/karafka/patches/dry_configurable.rb +0 -35
- data/lib/karafka/persistence/topic.rb +0 -29
- data/lib/karafka/schemas/config.rb +0 -24
- data/lib/karafka/schemas/consumer_group.rb +0 -78
- data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
- data/lib/karafka/schemas/responder_usage.rb +0 -39
- data/lib/karafka/schemas/server_cli_options.rb +0 -43
- data/lib/karafka/setup/configurators/base.rb +0 -29
- data/lib/karafka/setup/configurators/params.rb +0 -25
- data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -0,0 +1,19 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Contracts
|
5
|
+
# Consumer group topic validation rules
|
6
|
+
class ConsumerGroupTopic < Dry::Validation::Contract
|
7
|
+
params do
|
8
|
+
required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
|
9
|
+
required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
|
10
|
+
required(:backend).filled(included_in?: %i[inline sidekiq])
|
11
|
+
required(:consumer).filled
|
12
|
+
required(:deserializer).filled
|
13
|
+
required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
|
14
|
+
required(:start_from_beginning).filled(:bool?)
|
15
|
+
required(:batch_consuming).filled(:bool?)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Contracts
|
5
|
+
# Validator to check responder topic usage
|
6
|
+
class ResponderUsageTopic < Dry::Validation::Contract
|
7
|
+
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
8
|
+
|
9
|
+
params do
|
10
|
+
required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
|
11
|
+
required(:required).filled(:bool?)
|
12
|
+
required(:usage_count).filled(:int?, gteq?: 0)
|
13
|
+
required(:registered).filled(eql?: true)
|
14
|
+
required(:async).filled(:bool?)
|
15
|
+
required(:serializer).filled
|
16
|
+
end
|
17
|
+
|
18
|
+
rule(:required, :usage_count) do
|
19
|
+
key(:name).failure(:required_usage_count) if values[:required] && values[:usage_count] < 1
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
# Validator to check that everything in a responder flow matches responder rules
|
24
|
+
class ResponderUsage < Dry::Validation::Contract
|
25
|
+
include Dry::Core::Constants
|
26
|
+
|
27
|
+
# Contract for verifying the topic usage details
|
28
|
+
TOPIC_CONTRACT = ResponderUsageTopic.new.freeze
|
29
|
+
|
30
|
+
private_constant :TOPIC_CONTRACT
|
31
|
+
|
32
|
+
params do
|
33
|
+
required(:used_topics)
|
34
|
+
required(:registered_topics)
|
35
|
+
end
|
36
|
+
|
37
|
+
rule(:used_topics) do
|
38
|
+
(value || EMPTY_ARRAY).each do |used_topic|
|
39
|
+
TOPIC_CONTRACT.call(used_topic).errors.each do |error|
|
40
|
+
key([:used_topics, used_topic, error.path[0]]).failure(error.text)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
rule(:registered_topics) do
|
46
|
+
(value || EMPTY_ARRAY).each do |used_topic|
|
47
|
+
TOPIC_CONTRACT.call(used_topic).errors.each do |error|
|
48
|
+
key([:registered_topics, used_topic, error.path[0]]).failure(error.text)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Contracts
|
5
|
+
# Contract for validating correctness of the server cli command options
|
6
|
+
# We validate some basics + the list of consumer_groups on which we want to use, to make
|
7
|
+
# sure that all of them are defined, plus that a pidfile does not exist
|
8
|
+
class ServerCliOptions < Dry::Validation::Contract
|
9
|
+
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
10
|
+
|
11
|
+
params do
|
12
|
+
optional(:pid).filled(:str?)
|
13
|
+
optional(:daemon).filled(:bool?)
|
14
|
+
optional(:consumer_groups).value(:array, :filled?)
|
15
|
+
end
|
16
|
+
|
17
|
+
rule(:pid) do
|
18
|
+
key(:pid).failure(:pid_already_exists) if value && File.exist?(value)
|
19
|
+
end
|
20
|
+
|
21
|
+
rule(:consumer_groups) do
|
22
|
+
# If there were no consumer_groups declared in the server cli, it means that we will
|
23
|
+
# run all of them and no need to validate them here at all
|
24
|
+
if !value.nil? &&
|
25
|
+
!(value - Karafka::App.config.internal.routing_builder.map(&:name)).empty?
|
26
|
+
key(:consumer_groups).failure(:consumer_groups_inclusion)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
data/lib/karafka/errors.rb
CHANGED
@@ -6,17 +6,18 @@ module Karafka
|
|
6
6
|
# Base class for all the Karafka internal errors
|
7
7
|
BaseError = Class.new(StandardError)
|
8
8
|
|
9
|
-
# Should be raised when we
|
10
|
-
|
11
|
-
|
12
|
-
|
9
|
+
# Should be raised when we have that that we cannot serialize
|
10
|
+
SerializationError = Class.new(BaseError)
|
11
|
+
|
12
|
+
# Should be raised when we tried to deserialize incoming data but we failed
|
13
|
+
DeserializationError = Class.new(BaseError)
|
13
14
|
|
14
15
|
# Raised when router receives topic name which does not correspond with any routes
|
15
16
|
# This can only happen in a case when:
|
16
17
|
# - you've received a message and we cannot match it with a consumer
|
17
18
|
# - you've changed the routing, so router can no longer associate your topic to
|
18
19
|
# any consumer
|
19
|
-
# - or in a case when you do a lot of
|
20
|
+
# - or in a case when you do a lot of meta-programming and you change routing/etc on runtime
|
20
21
|
#
|
21
22
|
# In case this happens, you will have to create a temporary route that will allow
|
22
23
|
# you to "eat" everything from the Sidekiq queue.
|
@@ -25,26 +26,26 @@ module Karafka
|
|
25
26
|
|
26
27
|
# Raised when we don't use or use responder not in the way it expected to based on the
|
27
28
|
# topics usage definitions
|
28
|
-
|
29
|
+
InvalidResponderUsageError = Class.new(BaseError)
|
29
30
|
|
30
|
-
# Raised when options that we provide to the responder to respond aren't what the
|
31
|
+
# Raised when options that we provide to the responder to respond aren't what the contract
|
31
32
|
# requires
|
32
|
-
|
33
|
+
InvalidResponderMessageOptionsError = Class.new(BaseError)
|
33
34
|
|
34
|
-
# Raised when configuration doesn't match with validation
|
35
|
-
|
35
|
+
# Raised when configuration doesn't match with validation contract
|
36
|
+
InvalidConfigurationError = Class.new(BaseError)
|
36
37
|
|
37
|
-
# Raised when we try to use Karafka CLI commands (except install) without a
|
38
|
-
|
38
|
+
# Raised when we try to use Karafka CLI commands (except install) without a boot file
|
39
|
+
MissingBootFileError = Class.new(BaseError)
|
39
40
|
|
40
41
|
# Raised when we want to read a persisted thread messages consumer but it is unavailable
|
41
42
|
# This should never happen and if it does, please contact us
|
42
|
-
|
43
|
+
MissingClientError = Class.new(BaseError)
|
43
44
|
|
44
45
|
# Raised when want to hook up to an event that is not registered and supported
|
45
|
-
|
46
|
+
UnregisteredMonitorEventError = Class.new(BaseError)
|
46
47
|
|
47
|
-
# Raised when we've waited enough for shutting down
|
48
|
-
|
48
|
+
# Raised when we've waited enough for shutting down a non-responsive process
|
49
|
+
ForcefulShutdownError = Class.new(BaseError)
|
49
50
|
end
|
50
51
|
end
|
data/lib/karafka/fetcher.rb
CHANGED
@@ -5,39 +5,37 @@ module Karafka
|
|
5
5
|
# @note Creating multiple fetchers will result in having multiple connections to the same
|
6
6
|
# topics, which means that if there are no partitions, it won't use them.
|
7
7
|
class Fetcher
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
.tap { |thread| thread.abort_on_exception = true }
|
21
|
-
end
|
22
|
-
|
23
|
-
# We aggregate threads here for a supervised shutdown process
|
24
|
-
threads.each { |thread| Karafka::Server.consumer_threads << thread }
|
25
|
-
threads.each(&:join)
|
26
|
-
# If anything crashes here, we need to raise the error and crush the runner because it means
|
27
|
-
# that something terrible happened
|
28
|
-
rescue StandardError => e
|
29
|
-
Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
|
30
|
-
Karafka::App.stop!
|
31
|
-
raise e
|
8
|
+
# Starts listening on all the listeners asynchronously
|
9
|
+
# Fetch loop should never end, which means that we won't create more actor clusters
|
10
|
+
# so we don't have to terminate them
|
11
|
+
def call
|
12
|
+
threads = listeners.map do |listener|
|
13
|
+
# We abort on exception because there should be an exception handling developed for
|
14
|
+
# each listener running in separate threads, so the exceptions should never leak
|
15
|
+
# and if that happens, it means that something really bad happened and we should stop
|
16
|
+
# the whole process
|
17
|
+
Thread
|
18
|
+
.new { listener.call }
|
19
|
+
.tap { |thread| thread.abort_on_exception = true }
|
32
20
|
end
|
33
21
|
|
34
|
-
|
22
|
+
# We aggregate threads here for a supervised shutdown process
|
23
|
+
threads.each { |thread| Karafka::Server.consumer_threads << thread }
|
24
|
+
threads.each(&:join)
|
25
|
+
# If anything crashes here, we need to raise the error and crush the runner because it means
|
26
|
+
# that something terrible happened
|
27
|
+
rescue StandardError => e
|
28
|
+
Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
|
29
|
+
Karafka::App.stop!
|
30
|
+
raise e
|
31
|
+
end
|
32
|
+
|
33
|
+
private
|
35
34
|
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
end
|
35
|
+
# @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
|
36
|
+
def listeners
|
37
|
+
@listeners ||= App.consumer_groups.active.map do |consumer_group|
|
38
|
+
Karafka::Connection::Listener.new(consumer_group)
|
41
39
|
end
|
42
40
|
end
|
43
41
|
end
|
@@ -8,7 +8,9 @@ module Karafka
|
|
8
8
|
class ClassMatcher
|
9
9
|
# Regexp used to remove any non classy like characters that might be in the consumer
|
10
10
|
# class name (if defined dynamically, etc)
|
11
|
-
CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}
|
11
|
+
CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}.freeze
|
12
|
+
|
13
|
+
private_constant :CONSTANT_REGEXP
|
12
14
|
|
13
15
|
# @param klass [Class] class to which we want to find a corresponding class
|
14
16
|
# @param from [String] what type of object is it (based on postfix name part)
|
@@ -30,6 +32,7 @@ module Karafka
|
|
30
32
|
def match
|
31
33
|
return nil if name.empty?
|
32
34
|
return nil unless scope.const_defined?(name)
|
35
|
+
|
33
36
|
matching = scope.const_get(name)
|
34
37
|
same_scope?(matching) ? matching : nil
|
35
38
|
end
|
@@ -41,7 +44,13 @@ module Karafka
|
|
41
44
|
# @example From Namespaced::Super2Consumer matching responder
|
42
45
|
# matcher.name #=> Super2Responder
|
43
46
|
def name
|
44
|
-
inflected =
|
47
|
+
inflected = +@klass.to_s.split('::').last.to_s
|
48
|
+
# We inject the from into the name just in case it is missing as in a situation like
|
49
|
+
# that it would just sanitize the name without adding the "to" postfix.
|
50
|
+
# It could create cases when we want to build for example a responder to a consumer
|
51
|
+
# that does not have the "Consumer" postfix and would do nothing returning the same name.
|
52
|
+
# That would be bad as the matching classes shouldn't be matched to themselves.
|
53
|
+
inflected << @from unless inflected.include?(@from)
|
45
54
|
inflected.gsub!(@from, @to)
|
46
55
|
inflected.gsub!(CONSTANT_REGEXP, '')
|
47
56
|
inflected
|
@@ -65,6 +74,7 @@ module Karafka
|
|
65
74
|
def scope_of(klass)
|
66
75
|
enclosing = klass.to_s.split('::')[0...-1]
|
67
76
|
return ::Object if enclosing.empty?
|
77
|
+
|
68
78
|
::Object.const_get(enclosing.join('::'))
|
69
79
|
end
|
70
80
|
|
@@ -5,7 +5,7 @@ module Karafka
|
|
5
5
|
# A helper method that allows us to build methods that try to get a given
|
6
6
|
# attribute from its instance value and if it fails, will fallback to
|
7
7
|
# the default config or config.kafka value for a given attribute.
|
8
|
-
# It is used to simplify the
|
8
|
+
# It is used to simplify the checks.
|
9
9
|
# @note Worth noticing, that the value might be equal to false, so even
|
10
10
|
# then we need to return it. That's why we check for nil?
|
11
11
|
# @example Define config retried attribute for start_from_beginning
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Helpers
|
5
|
+
# Inflector provides inflection for the whole Karafka framework with additional inflection
|
6
|
+
# caching (due to the fact, that Dry::Inflector is slow)
|
7
|
+
module Inflector
|
8
|
+
# What inflection engine do we want to use
|
9
|
+
ENGINE = Dry::Inflector.new
|
10
|
+
|
11
|
+
@map = Concurrent::Hash.new
|
12
|
+
|
13
|
+
private_constant :ENGINE
|
14
|
+
|
15
|
+
class << self
|
16
|
+
# @param string [String] string that we want to convert to our underscore format
|
17
|
+
# @return [String] inflected string
|
18
|
+
# @example
|
19
|
+
# Karafka::Helpers::Inflector.map('Module/ControllerName') #=> 'module_controller_name'
|
20
|
+
def map(string)
|
21
|
+
@map[string] ||= ENGINE.underscore(string).tr('/', '_')
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -5,8 +5,6 @@ module Karafka
|
|
5
5
|
# Default logger for Event Delegator
|
6
6
|
# @note It uses ::Logger features - providing basic logging
|
7
7
|
class Logger < ::Logger
|
8
|
-
include Singleton
|
9
|
-
|
10
8
|
# Map containing information about log level for given environment
|
11
9
|
ENV_MAP = {
|
12
10
|
'production' => ::Logger::ERROR,
|
@@ -16,7 +14,11 @@ module Karafka
|
|
16
14
|
'default' => ::Logger::INFO
|
17
15
|
}.freeze
|
18
16
|
|
17
|
+
private_constant :ENV_MAP
|
18
|
+
|
19
19
|
# Creates a new instance of logger ensuring that it has a place to write to
|
20
|
+
# @param _args Any arguments that we don't care about but that are needed in order to
|
21
|
+
# make this logger compatible with the default Ruby one
|
20
22
|
def initialize(*_args)
|
21
23
|
ensure_dir_exists
|
22
24
|
super(target)
|
@@ -26,7 +28,7 @@ module Karafka
|
|
26
28
|
private
|
27
29
|
|
28
30
|
# @return [Karafka::Helpers::MultiDelegator] multi delegator instance
|
29
|
-
# to which we will be
|
31
|
+
# to which we will be writing logs
|
30
32
|
# We use this approach to log stuff to file and to the STDOUT at the same time
|
31
33
|
def target
|
32
34
|
Karafka::Helpers::MultiDelegator
|
@@ -34,10 +36,11 @@ module Karafka
|
|
34
36
|
.to(STDOUT, file)
|
35
37
|
end
|
36
38
|
|
37
|
-
# Makes sure the log directory exists
|
39
|
+
# Makes sure the log directory exists as long as we can write to it
|
38
40
|
def ensure_dir_exists
|
39
|
-
|
40
|
-
|
41
|
+
FileUtils.mkdir_p(File.dirname(log_path))
|
42
|
+
rescue Errno::EACCES
|
43
|
+
nil
|
41
44
|
end
|
42
45
|
|
43
46
|
# @return [Pathname] Path to a file to which we should log
|
@@ -6,13 +6,11 @@ module Karafka
|
|
6
6
|
# Monitor is used to hookup external monitoring services to monitor how Karafka works
|
7
7
|
# It provides a standardized API for checking incoming messages/enqueueing etc
|
8
8
|
# Since it is a pub-sub based on dry-monitor, you can use as many subscribers/loggers at the
|
9
|
-
# same time, which means that you might have for example file logging and
|
9
|
+
# same time, which means that you might have for example file logging and NewRelic at the same
|
10
10
|
# time
|
11
11
|
# @note This class acts as a singleton because we are only permitted to have single monitor
|
12
12
|
# per running process (just as logger)
|
13
13
|
class Monitor < Dry::Monitor::Notifications
|
14
|
-
include Singleton
|
15
|
-
|
16
14
|
# List of events that we support in the system and to which a monitor client can hook up
|
17
15
|
# @note The non-error once support timestamp benchmarking
|
18
16
|
# @note Depending on Karafka extensions and additional engines, this might not be the
|
@@ -21,11 +19,14 @@ module Karafka
|
|
21
19
|
# Last 4 events are from WaterDrop but for convenience we use the same monitor for the
|
22
20
|
# whole karafka ecosystem
|
23
21
|
BASE_EVENTS = %w[
|
24
|
-
params.params.
|
25
|
-
params.params.
|
22
|
+
params.params.deserialize
|
23
|
+
params.params.deserialize.error
|
24
|
+
connection.listener.before_fetch_loop
|
25
|
+
connection.listener.fetch_loop
|
26
26
|
connection.listener.fetch_loop.error
|
27
27
|
connection.client.fetch_loop.error
|
28
|
-
connection.
|
28
|
+
connection.batch_delegator.call
|
29
|
+
connection.message_delegator.call
|
29
30
|
fetcher.call.error
|
30
31
|
backends.inline.process
|
31
32
|
process.notice_signal
|
@@ -34,8 +35,12 @@ module Karafka
|
|
34
35
|
async_producer.call.retry
|
35
36
|
sync_producer.call.error
|
36
37
|
sync_producer.call.retry
|
37
|
-
|
38
|
-
|
38
|
+
app.initializing
|
39
|
+
app.initialized
|
40
|
+
app.running
|
41
|
+
app.stopping
|
42
|
+
app.stopping.error
|
43
|
+
app.stopped
|
39
44
|
].freeze
|
40
45
|
|
41
46
|
private_constant :BASE_EVENTS
|
@@ -52,7 +57,8 @@ module Karafka
|
|
52
57
|
def subscribe(event_name_or_listener)
|
53
58
|
return super unless event_name_or_listener.is_a?(String)
|
54
59
|
return super if available_events.include?(event_name_or_listener)
|
55
|
-
|
60
|
+
|
61
|
+
raise Errors::UnregisteredMonitorEventError, event_name_or_listener
|
56
62
|
end
|
57
63
|
|
58
64
|
# @return [Array<String>] names of available events to which we can subscribe
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Instrumentation
|
5
|
+
# Listener that sets a proc title with a nice descriptive value
|
6
|
+
class ProctitleListener
|
7
|
+
# Updates proc title to an initializing one
|
8
|
+
# @param _event [Dry::Events::Event] event details including payload
|
9
|
+
def on_app_initializing(_event)
|
10
|
+
setproctitle('initializing')
|
11
|
+
end
|
12
|
+
|
13
|
+
# Updates proc title to a running one
|
14
|
+
# @param _event [Dry::Events::Event] event details including payload
|
15
|
+
def on_app_running(_event)
|
16
|
+
setproctitle('running')
|
17
|
+
end
|
18
|
+
|
19
|
+
# Updates proc title to a stopping one
|
20
|
+
# @param _event [Dry::Events::Event] event details including payload
|
21
|
+
def on_app_stopping(_event)
|
22
|
+
setproctitle('stopping')
|
23
|
+
end
|
24
|
+
|
25
|
+
private
|
26
|
+
|
27
|
+
# Sets a proper proc title with our constant prefix
|
28
|
+
# @param status [String] any status we want to set
|
29
|
+
def setproctitle(status)
|
30
|
+
::Process.setproctitle(
|
31
|
+
"karafka #{Karafka::App.config.client_id} (#{status})"
|
32
|
+
)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|