karafka 1.4.0.rc1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- checksums.yaml.gz.sig +2 -0
- data.tar.gz.sig +0 -0
- data/.coditsu/ci.yml +3 -0
- data/.console_irbrc +11 -0
- data/.diffend.yml +3 -0
- data/.github/FUNDING.yml +3 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.github/workflows/ci.yml +52 -0
- data/.gitignore +69 -0
- data/.rspec +1 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/CHANGELOG.md +566 -0
- data/CODE_OF_CONDUCT.md +46 -0
- data/CONTRIBUTING.md +41 -0
- data/Gemfile +14 -0
- data/Gemfile.lock +139 -0
- data/MIT-LICENCE +18 -0
- data/README.md +99 -0
- data/bin/karafka +19 -0
- data/certs/mensfeld.pem +25 -0
- data/config/errors.yml +39 -0
- data/docker-compose.yml +17 -0
- data/karafka.gemspec +43 -0
- data/lib/karafka.rb +72 -0
- data/lib/karafka/app.rb +53 -0
- data/lib/karafka/attributes_map.rb +62 -0
- data/lib/karafka/backends/inline.rb +16 -0
- data/lib/karafka/base_consumer.rb +57 -0
- data/lib/karafka/base_responder.rb +226 -0
- data/lib/karafka/cli.rb +54 -0
- data/lib/karafka/cli/base.rb +78 -0
- data/lib/karafka/cli/console.rb +31 -0
- data/lib/karafka/cli/flow.rb +48 -0
- data/lib/karafka/cli/info.rb +31 -0
- data/lib/karafka/cli/install.rb +66 -0
- data/lib/karafka/cli/server.rb +71 -0
- data/lib/karafka/code_reloader.rb +67 -0
- data/lib/karafka/connection/api_adapter.rb +161 -0
- data/lib/karafka/connection/batch_delegator.rb +55 -0
- data/lib/karafka/connection/builder.rb +18 -0
- data/lib/karafka/connection/client.rb +117 -0
- data/lib/karafka/connection/listener.rb +71 -0
- data/lib/karafka/connection/message_delegator.rb +36 -0
- data/lib/karafka/consumers/batch_metadata.rb +10 -0
- data/lib/karafka/consumers/callbacks.rb +71 -0
- data/lib/karafka/consumers/includer.rb +64 -0
- data/lib/karafka/consumers/responders.rb +24 -0
- data/lib/karafka/consumers/single_params.rb +15 -0
- data/lib/karafka/contracts.rb +10 -0
- data/lib/karafka/contracts/config.rb +21 -0
- data/lib/karafka/contracts/consumer_group.rb +206 -0
- data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
- data/lib/karafka/contracts/responder_usage.rb +54 -0
- data/lib/karafka/contracts/server_cli_options.rb +31 -0
- data/lib/karafka/errors.rb +51 -0
- data/lib/karafka/fetcher.rb +42 -0
- data/lib/karafka/helpers/class_matcher.rb +88 -0
- data/lib/karafka/helpers/config_retriever.rb +46 -0
- data/lib/karafka/helpers/inflector.rb +26 -0
- data/lib/karafka/helpers/multi_delegator.rb +32 -0
- data/lib/karafka/instrumentation/logger.rb +58 -0
- data/lib/karafka/instrumentation/monitor.rb +70 -0
- data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
- data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
- data/lib/karafka/params/batch_metadata.rb +26 -0
- data/lib/karafka/params/builders/batch_metadata.rb +30 -0
- data/lib/karafka/params/builders/params.rb +38 -0
- data/lib/karafka/params/builders/params_batch.rb +25 -0
- data/lib/karafka/params/metadata.rb +20 -0
- data/lib/karafka/params/params.rb +50 -0
- data/lib/karafka/params/params_batch.rb +60 -0
- data/lib/karafka/patches/ruby_kafka.rb +47 -0
- data/lib/karafka/persistence/client.rb +29 -0
- data/lib/karafka/persistence/consumers.rb +45 -0
- data/lib/karafka/persistence/topics.rb +48 -0
- data/lib/karafka/process.rb +60 -0
- data/lib/karafka/responders/builder.rb +36 -0
- data/lib/karafka/responders/topic.rb +55 -0
- data/lib/karafka/routing/builder.rb +89 -0
- data/lib/karafka/routing/consumer_group.rb +61 -0
- data/lib/karafka/routing/consumer_mapper.rb +34 -0
- data/lib/karafka/routing/proxy.rb +46 -0
- data/lib/karafka/routing/router.rb +29 -0
- data/lib/karafka/routing/topic.rb +62 -0
- data/lib/karafka/routing/topic_mapper.rb +53 -0
- data/lib/karafka/serialization/json/deserializer.rb +27 -0
- data/lib/karafka/serialization/json/serializer.rb +31 -0
- data/lib/karafka/server.rb +86 -0
- data/lib/karafka/setup/config.rb +223 -0
- data/lib/karafka/setup/configurators/water_drop.rb +36 -0
- data/lib/karafka/setup/dsl.rb +21 -0
- data/lib/karafka/status.rb +29 -0
- data/lib/karafka/templates/application_consumer.rb.erb +7 -0
- data/lib/karafka/templates/application_responder.rb.erb +11 -0
- data/lib/karafka/templates/karafka.rb.erb +92 -0
- data/lib/karafka/version.rb +7 -0
- data/log/.gitkeep +0 -0
- metadata +325 -0
- metadata.gz.sig +4 -0
@@ -0,0 +1,54 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Contracts
|
5
|
+
# Validator to check responder topic usage
|
6
|
+
class ResponderUsageTopic < Dry::Validation::Contract
|
7
|
+
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
8
|
+
|
9
|
+
params do
|
10
|
+
required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
|
11
|
+
required(:required).filled(:bool?)
|
12
|
+
required(:usage_count).filled(:int?, gteq?: 0)
|
13
|
+
required(:registered).filled(eql?: true)
|
14
|
+
required(:async).filled(:bool?)
|
15
|
+
required(:serializer).filled
|
16
|
+
end
|
17
|
+
|
18
|
+
rule(:required, :usage_count) do
|
19
|
+
key(:name).failure(:required_usage_count) if values[:required] && values[:usage_count] < 1
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
# Validator to check that everything in a responder flow matches responder rules
|
24
|
+
class ResponderUsage < Dry::Validation::Contract
|
25
|
+
include Dry::Core::Constants
|
26
|
+
|
27
|
+
# Contract for verifying the topic usage details
|
28
|
+
TOPIC_CONTRACT = ResponderUsageTopic.new.freeze
|
29
|
+
|
30
|
+
private_constant :TOPIC_CONTRACT
|
31
|
+
|
32
|
+
params do
|
33
|
+
required(:used_topics)
|
34
|
+
required(:registered_topics)
|
35
|
+
end
|
36
|
+
|
37
|
+
rule(:used_topics) do
|
38
|
+
(value || EMPTY_ARRAY).each do |used_topic|
|
39
|
+
TOPIC_CONTRACT.call(used_topic).errors.each do |error|
|
40
|
+
key([:used_topics, used_topic, error.path[0]]).failure(error.text)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
rule(:registered_topics) do
|
46
|
+
(value || EMPTY_ARRAY).each do |used_topic|
|
47
|
+
TOPIC_CONTRACT.call(used_topic).errors.each do |error|
|
48
|
+
key([:registered_topics, used_topic, error.path[0]]).failure(error.text)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Contracts
|
5
|
+
# Contract for validating correctness of the server cli command options
|
6
|
+
# We validate some basics + the list of consumer_groups on which we want to use, to make
|
7
|
+
# sure that all of them are defined, plus that a pidfile does not exist
|
8
|
+
class ServerCliOptions < Dry::Validation::Contract
|
9
|
+
config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
|
10
|
+
|
11
|
+
params do
|
12
|
+
optional(:pid).filled(:str?)
|
13
|
+
optional(:daemon).filled(:bool?)
|
14
|
+
optional(:consumer_groups).value(:array, :filled?)
|
15
|
+
end
|
16
|
+
|
17
|
+
rule(:pid) do
|
18
|
+
key(:pid).failure(:pid_already_exists) if value && File.exist?(value)
|
19
|
+
end
|
20
|
+
|
21
|
+
rule(:consumer_groups) do
|
22
|
+
# If there were no consumer_groups declared in the server cli, it means that we will
|
23
|
+
# run all of them and no need to validate them here at all
|
24
|
+
if !value.nil? &&
|
25
|
+
!(value - Karafka::App.config.internal.routing_builder.map(&:name)).empty?
|
26
|
+
key(:consumer_groups).failure(:consumer_groups_inclusion)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Namespace used to encapsulate all the internal errors of Karafka
|
5
|
+
module Errors
|
6
|
+
# Base class for all the Karafka internal errors
|
7
|
+
BaseError = Class.new(StandardError)
|
8
|
+
|
9
|
+
# Should be raised when we have that that we cannot serialize
|
10
|
+
SerializationError = Class.new(BaseError)
|
11
|
+
|
12
|
+
# Should be raised when we tried to deserialize incoming data but we failed
|
13
|
+
DeserializationError = Class.new(BaseError)
|
14
|
+
|
15
|
+
# Raised when router receives topic name which does not correspond with any routes
|
16
|
+
# This can only happen in a case when:
|
17
|
+
# - you've received a message and we cannot match it with a consumer
|
18
|
+
# - you've changed the routing, so router can no longer associate your topic to
|
19
|
+
# any consumer
|
20
|
+
# - or in a case when you do a lot of meta-programming and you change routing/etc on runtime
|
21
|
+
#
|
22
|
+
# In case this happens, you will have to create a temporary route that will allow
|
23
|
+
# you to "eat" everything from the Sidekiq queue.
|
24
|
+
# @see https://github.com/karafka/karafka/issues/135
|
25
|
+
NonMatchingRouteError = Class.new(BaseError)
|
26
|
+
|
27
|
+
# Raised when we don't use or use responder not in the way it expected to based on the
|
28
|
+
# topics usage definitions
|
29
|
+
InvalidResponderUsageError = Class.new(BaseError)
|
30
|
+
|
31
|
+
# Raised when options that we provide to the responder to respond aren't what the contract
|
32
|
+
# requires
|
33
|
+
InvalidResponderMessageOptionsError = Class.new(BaseError)
|
34
|
+
|
35
|
+
# Raised when configuration doesn't match with validation contract
|
36
|
+
InvalidConfigurationError = Class.new(BaseError)
|
37
|
+
|
38
|
+
# Raised when we try to use Karafka CLI commands (except install) without a boot file
|
39
|
+
MissingBootFileError = Class.new(BaseError)
|
40
|
+
|
41
|
+
# Raised when we want to read a persisted thread messages consumer but it is unavailable
|
42
|
+
# This should never happen and if it does, please contact us
|
43
|
+
MissingClientError = Class.new(BaseError)
|
44
|
+
|
45
|
+
# Raised when want to hook up to an event that is not registered and supported
|
46
|
+
UnregisteredMonitorEventError = Class.new(BaseError)
|
47
|
+
|
48
|
+
# Raised when we've waited enough for shutting down a non-responsive process
|
49
|
+
ForcefulShutdownError = Class.new(BaseError)
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Class used to run the Karafka consumer and handle shutting down, restarting etc
|
5
|
+
# @note Creating multiple fetchers will result in having multiple connections to the same
|
6
|
+
# topics, which means that if there are no partitions, it won't use them.
|
7
|
+
class Fetcher
|
8
|
+
# Starts listening on all the listeners asynchronously
|
9
|
+
# Fetch loop should never end, which means that we won't create more actor clusters
|
10
|
+
# so we don't have to terminate them
|
11
|
+
def call
|
12
|
+
threads = listeners.map do |listener|
|
13
|
+
# We abort on exception because there should be an exception handling developed for
|
14
|
+
# each listener running in separate threads, so the exceptions should never leak
|
15
|
+
# and if that happens, it means that something really bad happened and we should stop
|
16
|
+
# the whole process
|
17
|
+
Thread
|
18
|
+
.new { listener.call }
|
19
|
+
.tap { |thread| thread.abort_on_exception = true }
|
20
|
+
end
|
21
|
+
|
22
|
+
# We aggregate threads here for a supervised shutdown process
|
23
|
+
threads.each { |thread| Karafka::Server.consumer_threads << thread }
|
24
|
+
threads.each(&:join)
|
25
|
+
# If anything crashes here, we need to raise the error and crush the runner because it means
|
26
|
+
# that something terrible happened
|
27
|
+
rescue StandardError => e
|
28
|
+
Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
|
29
|
+
Karafka::App.stop!
|
30
|
+
raise e
|
31
|
+
end
|
32
|
+
|
33
|
+
private
|
34
|
+
|
35
|
+
# @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
|
36
|
+
def listeners
|
37
|
+
@listeners ||= App.consumer_groups.active.map do |consumer_group|
|
38
|
+
Karafka::Connection::Listener.new(consumer_group)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,88 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Helpers
|
5
|
+
# Class used to autodetect corresponding classes that are internally inside Karafka framework
|
6
|
+
# It is used among others to match:
|
7
|
+
# consumer => responder
|
8
|
+
class ClassMatcher
|
9
|
+
# Regexp used to remove any non classy like characters that might be in the consumer
|
10
|
+
# class name (if defined dynamically, etc)
|
11
|
+
CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}.freeze
|
12
|
+
|
13
|
+
private_constant :CONSTANT_REGEXP
|
14
|
+
|
15
|
+
# @param klass [Class] class to which we want to find a corresponding class
|
16
|
+
# @param from [String] what type of object is it (based on postfix name part)
|
17
|
+
# @param to [String] what are we looking for (based on a postfix name part)
|
18
|
+
# @example Consumer that has a corresponding responder
|
19
|
+
# matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
|
20
|
+
# matcher.match #=> SuperResponder
|
21
|
+
# @example Consumer without a corresponding responder
|
22
|
+
# matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
|
23
|
+
# matcher.match #=> nil
|
24
|
+
def initialize(klass, from:, to:)
|
25
|
+
@klass = klass
|
26
|
+
@from = from
|
27
|
+
@to = to
|
28
|
+
end
|
29
|
+
|
30
|
+
# @return [Class] matched class
|
31
|
+
# @return [nil] nil if we couldn't find matching class
|
32
|
+
def match
|
33
|
+
return nil if name.empty?
|
34
|
+
return nil unless scope.const_defined?(name)
|
35
|
+
|
36
|
+
matching = scope.const_get(name)
|
37
|
+
same_scope?(matching) ? matching : nil
|
38
|
+
end
|
39
|
+
|
40
|
+
# @return [String] name of a new class that we're looking for
|
41
|
+
# @note This method returns name of a class without a namespace
|
42
|
+
# @example From SuperConsumer matching responder
|
43
|
+
# matcher.name #=> 'SuperResponder'
|
44
|
+
# @example From Namespaced::Super2Consumer matching responder
|
45
|
+
# matcher.name #=> Super2Responder
|
46
|
+
def name
|
47
|
+
inflected = +@klass.to_s.split('::').last.to_s
|
48
|
+
# We inject the from into the name just in case it is missing as in a situation like
|
49
|
+
# that it would just sanitize the name without adding the "to" postfix.
|
50
|
+
# It could create cases when we want to build for example a responder to a consumer
|
51
|
+
# that does not have the "Consumer" postfix and would do nothing returning the same name.
|
52
|
+
# That would be bad as the matching classes shouldn't be matched to themselves.
|
53
|
+
inflected << @from unless inflected.include?(@from)
|
54
|
+
inflected.gsub!(@from, @to)
|
55
|
+
inflected.gsub!(CONSTANT_REGEXP, '')
|
56
|
+
inflected
|
57
|
+
end
|
58
|
+
|
59
|
+
# @return [Class, Module] class or module in which we're looking for a matching
|
60
|
+
def scope
|
61
|
+
scope_of(@klass)
|
62
|
+
end
|
63
|
+
|
64
|
+
private
|
65
|
+
|
66
|
+
# @param klass [Class] class for which we want to extract it's enclosing class/module
|
67
|
+
# @return [Class, Module] enclosing class/module
|
68
|
+
# @return [::Object] object if it was a root class
|
69
|
+
#
|
70
|
+
# @example Non-namespaced class
|
71
|
+
# scope_of(SuperClass) #=> Object
|
72
|
+
# @example Namespaced class
|
73
|
+
# scope_of(Abc::SuperClass) #=> Abc
|
74
|
+
def scope_of(klass)
|
75
|
+
enclosing = klass.to_s.split('::')[0...-1]
|
76
|
+
return ::Object if enclosing.empty?
|
77
|
+
|
78
|
+
::Object.const_get(enclosing.join('::'))
|
79
|
+
end
|
80
|
+
|
81
|
+
# @param matching [Class] class of which scope we want to check
|
82
|
+
# @return [Boolean] true if the scope of class is the same as scope of matching
|
83
|
+
def same_scope?(matching)
|
84
|
+
scope == scope_of(matching)
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Helpers
|
5
|
+
# A helper method that allows us to build methods that try to get a given
|
6
|
+
# attribute from its instance value and if it fails, will fallback to
|
7
|
+
# the default config or config.kafka value for a given attribute.
|
8
|
+
# It is used to simplify the checks.
|
9
|
+
# @note Worth noticing, that the value might be equal to false, so even
|
10
|
+
# then we need to return it. That's why we check for nil?
|
11
|
+
# @example Define config retried attribute for start_from_beginning
|
12
|
+
# class Test
|
13
|
+
# extend Karafka::Helpers::ConfigRetriever
|
14
|
+
# config_retriever_for :start_from_beginning
|
15
|
+
# end
|
16
|
+
#
|
17
|
+
# Test.new.start_from_beginning #=> false
|
18
|
+
# test_instance = Test.new
|
19
|
+
# test_instance.start_from_beginning = true
|
20
|
+
# test_instance.start_from_beginning #=> true
|
21
|
+
module ConfigRetriever
|
22
|
+
# Builds proper methods for setting and retrieving (with fallback) given attribute value
|
23
|
+
# @param attribute [Symbol] attribute name based on which we will build
|
24
|
+
# accessor with fallback
|
25
|
+
def config_retriever_for(attribute)
|
26
|
+
attr_writer attribute unless method_defined? :"#{attribute}="
|
27
|
+
|
28
|
+
# Don't redefine if we already have accessor for a given element
|
29
|
+
return if method_defined? attribute
|
30
|
+
|
31
|
+
define_method attribute do
|
32
|
+
current_value = instance_variable_get(:"@#{attribute}")
|
33
|
+
return current_value unless current_value.nil?
|
34
|
+
|
35
|
+
value = if Karafka::App.config.respond_to?(attribute)
|
36
|
+
Karafka::App.config.send(attribute)
|
37
|
+
else
|
38
|
+
Karafka::App.config.kafka.send(attribute)
|
39
|
+
end
|
40
|
+
|
41
|
+
instance_variable_set(:"@#{attribute}", value)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Helpers
|
5
|
+
# Inflector provides inflection for the whole Karafka framework with additional inflection
|
6
|
+
# caching (due to the fact, that Dry::Inflector is slow)
|
7
|
+
module Inflector
|
8
|
+
# What inflection engine do we want to use
|
9
|
+
ENGINE = Dry::Inflector.new
|
10
|
+
|
11
|
+
@map = Concurrent::Hash.new
|
12
|
+
|
13
|
+
private_constant :ENGINE
|
14
|
+
|
15
|
+
class << self
|
16
|
+
# @param string [String] string that we want to convert to our underscore format
|
17
|
+
# @return [String] inflected string
|
18
|
+
# @example
|
19
|
+
# Karafka::Helpers::Inflector.map('Module/ControllerName') #=> 'module_controller_name'
|
20
|
+
def map(string)
|
21
|
+
@map[string] ||= ENGINE.underscore(string).tr('/', '_')
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Module containing classes and methods that provide some additional functionalities
|
5
|
+
module Helpers
|
6
|
+
# @note Taken from http://stackoverflow.com/questions/6407141
|
7
|
+
# Multidelegator is used to delegate calls to multiple targets
|
8
|
+
class MultiDelegator
|
9
|
+
# @param targets to which we want to delegate methods
|
10
|
+
def initialize(*targets)
|
11
|
+
@targets = targets
|
12
|
+
end
|
13
|
+
|
14
|
+
class << self
|
15
|
+
# @param methods names that should be delegated to
|
16
|
+
# @example Delegate write and close to STDOUT and file
|
17
|
+
# Logger.new MultiDelegator.delegate(:write, :close).to(STDOUT, log_file)
|
18
|
+
def delegate(*methods)
|
19
|
+
methods.each do |m|
|
20
|
+
define_method(m) do |*args|
|
21
|
+
@targets.map { |t| t.send(m, *args) }
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
self
|
26
|
+
end
|
27
|
+
|
28
|
+
alias to new
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Instrumentation
|
5
|
+
# Default logger for Event Delegator
|
6
|
+
# @note It uses ::Logger features - providing basic logging
|
7
|
+
class Logger < ::Logger
|
8
|
+
# Map containing information about log level for given environment
|
9
|
+
ENV_MAP = {
|
10
|
+
'production' => ::Logger::ERROR,
|
11
|
+
'test' => ::Logger::ERROR,
|
12
|
+
'development' => ::Logger::INFO,
|
13
|
+
'debug' => ::Logger::DEBUG,
|
14
|
+
'default' => ::Logger::INFO
|
15
|
+
}.freeze
|
16
|
+
|
17
|
+
private_constant :ENV_MAP
|
18
|
+
|
19
|
+
# Creates a new instance of logger ensuring that it has a place to write to
|
20
|
+
# @param _args Any arguments that we don't care about but that are needed in order to
|
21
|
+
# make this logger compatible with the default Ruby one
|
22
|
+
def initialize(*_args)
|
23
|
+
ensure_dir_exists
|
24
|
+
super(target)
|
25
|
+
self.level = ENV_MAP[Karafka.env] || ENV_MAP['default']
|
26
|
+
end
|
27
|
+
|
28
|
+
private
|
29
|
+
|
30
|
+
# @return [Karafka::Helpers::MultiDelegator] multi delegator instance
|
31
|
+
# to which we will be writing logs
|
32
|
+
# We use this approach to log stuff to file and to the STDOUT at the same time
|
33
|
+
def target
|
34
|
+
Karafka::Helpers::MultiDelegator
|
35
|
+
.delegate(:write, :close)
|
36
|
+
.to(STDOUT, file)
|
37
|
+
end
|
38
|
+
|
39
|
+
# Makes sure the log directory exists as long as we can write to it
|
40
|
+
def ensure_dir_exists
|
41
|
+
FileUtils.mkdir_p(File.dirname(log_path))
|
42
|
+
rescue Errno::EACCES
|
43
|
+
nil
|
44
|
+
end
|
45
|
+
|
46
|
+
# @return [Pathname] Path to a file to which we should log
|
47
|
+
def log_path
|
48
|
+
@log_path ||= Karafka::App.root.join("log/#{Karafka.env}.log")
|
49
|
+
end
|
50
|
+
|
51
|
+
# @return [File] file to which we want to write our logs
|
52
|
+
# @note File is being opened in append mode ('a')
|
53
|
+
def file
|
54
|
+
@file ||= File.open(log_path, 'a')
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Namespace for all the things related with Karafka instrumentation process
|
5
|
+
module Instrumentation
|
6
|
+
# Monitor is used to hookup external monitoring services to monitor how Karafka works
|
7
|
+
# It provides a standardized API for checking incoming messages/enqueueing etc
|
8
|
+
# Since it is a pub-sub based on dry-monitor, you can use as many subscribers/loggers at the
|
9
|
+
# same time, which means that you might have for example file logging and NewRelic at the same
|
10
|
+
# time
|
11
|
+
# @note This class acts as a singleton because we are only permitted to have single monitor
|
12
|
+
# per running process (just as logger)
|
13
|
+
class Monitor < Dry::Monitor::Notifications
|
14
|
+
# List of events that we support in the system and to which a monitor client can hook up
|
15
|
+
# @note The non-error once support timestamp benchmarking
|
16
|
+
# @note Depending on Karafka extensions and additional engines, this might not be the
|
17
|
+
# complete list of all the events. Please use the #available_events on fully loaded
|
18
|
+
# Karafka system to determine all of the events you can use.
|
19
|
+
# Last 4 events are from WaterDrop but for convenience we use the same monitor for the
|
20
|
+
# whole karafka ecosystem
|
21
|
+
BASE_EVENTS = %w[
|
22
|
+
params.params.deserialize
|
23
|
+
params.params.deserialize.error
|
24
|
+
connection.listener.before_fetch_loop
|
25
|
+
connection.listener.fetch_loop
|
26
|
+
connection.listener.fetch_loop.error
|
27
|
+
connection.client.fetch_loop.error
|
28
|
+
connection.batch_delegator.call
|
29
|
+
connection.message_delegator.call
|
30
|
+
fetcher.call.error
|
31
|
+
backends.inline.process
|
32
|
+
process.notice_signal
|
33
|
+
consumers.responders.respond_with
|
34
|
+
async_producer.call.error
|
35
|
+
async_producer.call.retry
|
36
|
+
sync_producer.call.error
|
37
|
+
sync_producer.call.retry
|
38
|
+
app.initializing
|
39
|
+
app.initialized
|
40
|
+
app.running
|
41
|
+
app.stopping
|
42
|
+
app.stopping.error
|
43
|
+
app.stopped
|
44
|
+
].freeze
|
45
|
+
|
46
|
+
private_constant :BASE_EVENTS
|
47
|
+
|
48
|
+
# @return [Karafka::Instrumentation::Monitor] monitor instance for system instrumentation
|
49
|
+
def initialize
|
50
|
+
super(:karafka)
|
51
|
+
BASE_EVENTS.each(&method(:register_event))
|
52
|
+
end
|
53
|
+
|
54
|
+
# Allows us to subscribe to events with a code that will be yielded upon events
|
55
|
+
# @param event_name_or_listener [String, Object] name of the event we want to subscribe to
|
56
|
+
# or a listener if we decide to go with object listener
|
57
|
+
def subscribe(event_name_or_listener)
|
58
|
+
return super unless event_name_or_listener.is_a?(String)
|
59
|
+
return super if available_events.include?(event_name_or_listener)
|
60
|
+
|
61
|
+
raise Errors::UnregisteredMonitorEventError, event_name_or_listener
|
62
|
+
end
|
63
|
+
|
64
|
+
# @return [Array<String>] names of available events to which we can subscribe
|
65
|
+
def available_events
|
66
|
+
__bus__.events.keys
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|