karafka 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/.ruby-version +1 -1
- data/.travis.yml +3 -1
- data/CHANGELOG.md +90 -3
- data/CONTRIBUTING.md +5 -6
- data/Gemfile +1 -1
- data/Gemfile.lock +59 -64
- data/README.md +28 -57
- data/bin/karafka +13 -1
- data/config/errors.yml +6 -0
- data/karafka.gemspec +10 -9
- data/lib/karafka.rb +19 -10
- data/lib/karafka/app.rb +8 -15
- data/lib/karafka/attributes_map.rb +4 -4
- data/lib/karafka/backends/inline.rb +2 -3
- data/lib/karafka/base_consumer.rb +68 -0
- data/lib/karafka/base_responder.rb +41 -17
- data/lib/karafka/callbacks.rb +30 -0
- data/lib/karafka/callbacks/config.rb +22 -0
- data/lib/karafka/callbacks/dsl.rb +16 -0
- data/lib/karafka/cli/base.rb +2 -0
- data/lib/karafka/cli/flow.rb +1 -1
- data/lib/karafka/cli/info.rb +1 -2
- data/lib/karafka/cli/install.rb +2 -3
- data/lib/karafka/cli/server.rb +9 -12
- data/lib/karafka/connection/client.rb +117 -0
- data/lib/karafka/connection/config_adapter.rb +30 -14
- data/lib/karafka/connection/delegator.rb +46 -0
- data/lib/karafka/connection/listener.rb +22 -20
- data/lib/karafka/consumers/callbacks.rb +54 -0
- data/lib/karafka/consumers/includer.rb +51 -0
- data/lib/karafka/consumers/responders.rb +24 -0
- data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
- data/lib/karafka/errors.rb +19 -2
- data/lib/karafka/fetcher.rb +30 -28
- data/lib/karafka/helpers/class_matcher.rb +8 -8
- data/lib/karafka/helpers/config_retriever.rb +2 -2
- data/lib/karafka/instrumentation/listener.rb +112 -0
- data/lib/karafka/instrumentation/logger.rb +55 -0
- data/lib/karafka/instrumentation/monitor.rb +64 -0
- data/lib/karafka/loader.rb +0 -1
- data/lib/karafka/params/dsl.rb +156 -0
- data/lib/karafka/params/params_batch.rb +7 -2
- data/lib/karafka/patches/dry_configurable.rb +7 -7
- data/lib/karafka/patches/ruby_kafka.rb +34 -0
- data/lib/karafka/persistence/client.rb +25 -0
- data/lib/karafka/persistence/consumer.rb +38 -0
- data/lib/karafka/persistence/topic.rb +29 -0
- data/lib/karafka/process.rb +6 -5
- data/lib/karafka/responders/builder.rb +15 -14
- data/lib/karafka/responders/topic.rb +8 -1
- data/lib/karafka/routing/builder.rb +2 -2
- data/lib/karafka/routing/consumer_group.rb +1 -1
- data/lib/karafka/routing/consumer_mapper.rb +34 -0
- data/lib/karafka/routing/router.rb +1 -1
- data/lib/karafka/routing/topic.rb +5 -11
- data/lib/karafka/routing/{mapper.rb → topic_mapper.rb} +2 -2
- data/lib/karafka/schemas/config.rb +4 -5
- data/lib/karafka/schemas/consumer_group.rb +45 -24
- data/lib/karafka/schemas/consumer_group_topic.rb +18 -0
- data/lib/karafka/schemas/responder_usage.rb +1 -0
- data/lib/karafka/server.rb +39 -20
- data/lib/karafka/setup/config.rb +74 -51
- data/lib/karafka/setup/configurators/base.rb +6 -12
- data/lib/karafka/setup/configurators/params.rb +25 -0
- data/lib/karafka/setup/configurators/water_drop.rb +15 -14
- data/lib/karafka/setup/dsl.rb +22 -0
- data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
- data/lib/karafka/templates/karafka.rb.example +18 -5
- data/lib/karafka/version.rb +1 -1
- metadata +87 -63
- data/.github/ISSUE_TEMPLATE.md +0 -2
- data/Rakefile +0 -7
- data/lib/karafka/base_controller.rb +0 -118
- data/lib/karafka/connection/messages_consumer.rb +0 -106
- data/lib/karafka/connection/messages_processor.rb +0 -59
- data/lib/karafka/controllers/includer.rb +0 -51
- data/lib/karafka/controllers/responders.rb +0 -19
- data/lib/karafka/logger.rb +0 -53
- data/lib/karafka/monitor.rb +0 -98
- data/lib/karafka/params/params.rb +0 -101
- data/lib/karafka/persistence.rb +0 -18
- data/lib/karafka/setup/configurators/celluloid.rb +0 -22
@@ -0,0 +1,51 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Additional functionalities for consumers
|
5
|
+
module Consumers
|
6
|
+
# Module used to inject functionalities into a given consumer class, based on the consumer
|
7
|
+
# topic and its settings
|
8
|
+
# We don't need all the behaviors in all the cases, so it is not worth having everything
|
9
|
+
# in all the cases all the time
|
10
|
+
module Includer
|
11
|
+
class << self
|
12
|
+
# @param consumer_class [Class] consumer class, that will get some functionalities
|
13
|
+
# based on the topic under which it operates
|
14
|
+
def call(consumer_class)
|
15
|
+
topic = consumer_class.topic
|
16
|
+
|
17
|
+
bind_backend(consumer_class, topic)
|
18
|
+
bind_params(consumer_class, topic)
|
19
|
+
bind_responders(consumer_class, topic)
|
20
|
+
end
|
21
|
+
|
22
|
+
private
|
23
|
+
|
24
|
+
# Figures out backend for a given consumer class, based on the topic backend and
|
25
|
+
# includes it into the consumer class
|
26
|
+
# @param consumer_class [Class] consumer class
|
27
|
+
# @param topic [Karafka::Routing::Topic] topic of a consumer class
|
28
|
+
def bind_backend(consumer_class, topic)
|
29
|
+
backend = Kernel.const_get("::Karafka::Backends::#{topic.backend.to_s.capitalize}")
|
30
|
+
consumer_class.include backend
|
31
|
+
end
|
32
|
+
|
33
|
+
# Adds a single #params support for non batch processed topics
|
34
|
+
# @param consumer_class [Class] consumer class
|
35
|
+
# @param topic [Karafka::Routing::Topic] topic of a consumer class
|
36
|
+
def bind_params(consumer_class, topic)
|
37
|
+
return if topic.batch_consuming
|
38
|
+
consumer_class.include SingleParams
|
39
|
+
end
|
40
|
+
|
41
|
+
# Adds responders support for topics and consumers with responders defined for them
|
42
|
+
# @param consumer_class [Class] consumer class
|
43
|
+
# @param topic [Karafka::Routing::Topic] topic of a consumer class
|
44
|
+
def bind_responders(consumer_class, topic)
|
45
|
+
return unless topic.responder
|
46
|
+
consumer_class.include Responders
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Consumers
|
5
|
+
# Feature that allows us to use responders flow in consumer
|
6
|
+
module Responders
|
7
|
+
# Responds with given data using given responder. This allows us to have a similar way of
|
8
|
+
# defining flows like synchronous protocols
|
9
|
+
# @param data Anything we want to pass to responder based on which we want to trigger further
|
10
|
+
# Kafka responding
|
11
|
+
def respond_with(*data)
|
12
|
+
Karafka.monitor.instrument(
|
13
|
+
'consumers.responders.respond_with',
|
14
|
+
caller: self,
|
15
|
+
data: data
|
16
|
+
) do
|
17
|
+
# @note we build a new instance of responder each time, as a long-running (persisted)
|
18
|
+
# consumers can respond multiple times during the lifecycle
|
19
|
+
topic.responder.new(topic.parser).call(*data)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -1,12 +1,12 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module Karafka
|
4
|
-
module
|
5
|
-
# Params alias for single message
|
4
|
+
module Consumers
|
5
|
+
# Params alias for single message consumption consumers
|
6
6
|
module SingleParams
|
7
7
|
private
|
8
8
|
|
9
|
-
# @return [Karafka::Params::Params] params instance for non batch
|
9
|
+
# @return [Karafka::Params::Params] params instance for non batch consumption consumers
|
10
10
|
def params
|
11
11
|
params_batch.first
|
12
12
|
end
|
data/lib/karafka/errors.rb
CHANGED
@@ -13,9 +13,9 @@ module Karafka
|
|
13
13
|
|
14
14
|
# Raised when router receives topic name which does not correspond with any routes
|
15
15
|
# This can only happen in a case when:
|
16
|
-
# - you've received a message and we cannot match it with a
|
16
|
+
# - you've received a message and we cannot match it with a consumer
|
17
17
|
# - you've changed the routing, so router can no longer associate your topic to
|
18
|
-
# any
|
18
|
+
# any consumer
|
19
19
|
# - or in a case when you do a lot of metaprogramming and you change routing/etc on runtime
|
20
20
|
#
|
21
21
|
# In case this happens, you will have to create a temporary route that will allow
|
@@ -27,7 +27,24 @@ module Karafka
|
|
27
27
|
# topics usage definitions
|
28
28
|
InvalidResponderUsage = Class.new(BaseError)
|
29
29
|
|
30
|
+
# Raised when options that we provide to the responder to respond aren't what the schema
|
31
|
+
# requires
|
32
|
+
InvalidResponderMessageOptions = Class.new(BaseError)
|
33
|
+
|
30
34
|
# Raised when configuration doesn't match with validation schema
|
31
35
|
InvalidConfiguration = Class.new(BaseError)
|
36
|
+
|
37
|
+
# Raised when we try to use Karafka CLI commands (except install) without a bootfile
|
38
|
+
MissingBootFile = Class.new(BaseError)
|
39
|
+
|
40
|
+
# Raised when we want to read a persisted thread messages consumer but it is unavailable
|
41
|
+
# This should never happen and if it does, please contact us
|
42
|
+
MissingClient = Class.new(BaseError)
|
43
|
+
|
44
|
+
# Raised when we attemp to pause a partition but the pause timeout is equal to 0
|
45
|
+
InvalidPauseTimeout = Class.new(BaseError)
|
46
|
+
|
47
|
+
# Raised when want to hook up to an event that is not registered and supported
|
48
|
+
UnregisteredMonitorEvent = Class.new(BaseError)
|
32
49
|
end
|
33
50
|
end
|
data/lib/karafka/fetcher.rb
CHANGED
@@ -5,37 +5,39 @@ module Karafka
|
|
5
5
|
# @note Creating multiple fetchers will result in having multiple connections to the same
|
6
6
|
# topics, which means that if there are no partitions, it won't use them.
|
7
7
|
class Fetcher
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
raise e
|
23
|
-
end
|
8
|
+
class << self
|
9
|
+
# Starts listening on all the listeners asynchronously
|
10
|
+
# Fetch loop should never end, which means that we won't create more actor clusters
|
11
|
+
# so we don't have to terminate them
|
12
|
+
def call
|
13
|
+
threads = listeners.map do |listener|
|
14
|
+
# We abort on exception because there should be an exception handling developed for
|
15
|
+
# each listener running in separate threads, so the exceptions should never leak
|
16
|
+
# and if that happens, it means that something really bad happened and we should stop
|
17
|
+
# the whole process
|
18
|
+
Thread
|
19
|
+
.new { listener.call }
|
20
|
+
.tap { |thread| thread.abort_on_exception = true }
|
21
|
+
end
|
24
22
|
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
23
|
+
# We aggregate threads here for a supervised shutdown process
|
24
|
+
threads.each { |thread| Karafka::Server.consumer_threads << thread }
|
25
|
+
threads.each(&:join)
|
26
|
+
# If anything crashes here, we need to raise the error and crush the runner because it means
|
27
|
+
# that something terrible happened
|
28
|
+
rescue StandardError => e
|
29
|
+
Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
|
30
|
+
Karafka::App.stop!
|
31
|
+
raise e
|
31
32
|
end
|
32
|
-
end
|
33
33
|
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
34
|
+
private
|
35
|
+
|
36
|
+
# @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
|
37
|
+
def listeners
|
38
|
+
@listeners ||= App.consumer_groups.active.map do |consumer_group|
|
39
|
+
Karafka::Connection::Listener.new(consumer_group)
|
40
|
+
end
|
39
41
|
end
|
40
42
|
end
|
41
43
|
end
|
@@ -4,20 +4,20 @@ module Karafka
|
|
4
4
|
module Helpers
|
5
5
|
# Class used to autodetect corresponding classes that are internally inside Karafka framework
|
6
6
|
# It is used among others to match:
|
7
|
-
#
|
7
|
+
# consumer => responder
|
8
8
|
class ClassMatcher
|
9
|
-
# Regexp used to remove any non classy like characters that might be in the
|
9
|
+
# Regexp used to remove any non classy like characters that might be in the consumer
|
10
10
|
# class name (if defined dynamically, etc)
|
11
11
|
CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}
|
12
12
|
|
13
13
|
# @param klass [Class] class to which we want to find a corresponding class
|
14
14
|
# @param from [String] what type of object is it (based on postfix name part)
|
15
15
|
# @param to [String] what are we looking for (based on a postfix name part)
|
16
|
-
# @example
|
17
|
-
# matcher = Karafka::Helpers::ClassMatcher.new(
|
16
|
+
# @example Consumer that has a corresponding responder
|
17
|
+
# matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
|
18
18
|
# matcher.match #=> SuperResponder
|
19
|
-
# @example
|
20
|
-
# matcher = Karafka::Helpers::ClassMatcher.new(
|
19
|
+
# @example Consumer without a corresponding responder
|
20
|
+
# matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
|
21
21
|
# matcher.match #=> nil
|
22
22
|
def initialize(klass, from:, to:)
|
23
23
|
@klass = klass
|
@@ -36,9 +36,9 @@ module Karafka
|
|
36
36
|
|
37
37
|
# @return [String] name of a new class that we're looking for
|
38
38
|
# @note This method returns name of a class without a namespace
|
39
|
-
# @example From
|
39
|
+
# @example From SuperConsumer matching responder
|
40
40
|
# matcher.name #=> 'SuperResponder'
|
41
|
-
# @example From Namespaced::
|
41
|
+
# @example From Namespaced::Super2Consumer matching responder
|
42
42
|
# matcher.name #=> Super2Responder
|
43
43
|
def name
|
44
44
|
inflected = @klass.to_s.split('::').last.to_s
|
@@ -33,9 +33,9 @@ module Karafka
|
|
33
33
|
return current_value unless current_value.nil?
|
34
34
|
|
35
35
|
value = if Karafka::App.config.respond_to?(attribute)
|
36
|
-
Karafka::App.config.
|
36
|
+
Karafka::App.config.send(attribute)
|
37
37
|
else
|
38
|
-
Karafka::App.config.kafka.
|
38
|
+
Karafka::App.config.kafka.send(attribute)
|
39
39
|
end
|
40
40
|
|
41
41
|
instance_variable_set(:"@#{attribute}", value)
|
@@ -0,0 +1,112 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Instrumentation
|
5
|
+
# Default listener that hooks up to our instrumentation and uses its events for logging
|
6
|
+
# It can be removed/replaced or anything without any harm to the Karafka app flow
|
7
|
+
module Listener
|
8
|
+
# Log levels that we use in this particular listener
|
9
|
+
USED_LOG_LEVELS = %i[
|
10
|
+
debug
|
11
|
+
info
|
12
|
+
error
|
13
|
+
fatal
|
14
|
+
].freeze
|
15
|
+
|
16
|
+
# Injects WaterDrop listener logger actions
|
17
|
+
extend WaterDrop::Instrumentation::Listener
|
18
|
+
|
19
|
+
class << self
|
20
|
+
# Logs details about incoming messages and with which consumer we will consume them
|
21
|
+
# @param event [Dry::Events::Event] event details including payload
|
22
|
+
def on_connection_delegator_call(event)
|
23
|
+
consumer = event[:consumer]
|
24
|
+
topic = consumer.topic.name
|
25
|
+
kafka_messages = event[:kafka_messages]
|
26
|
+
info "#{kafka_messages.count} messages on #{topic} topic delegated to #{consumer.class}"
|
27
|
+
end
|
28
|
+
|
29
|
+
# Logs details about each received message value parsing
|
30
|
+
# @param event [Dry::Events::Event] event details including payload
|
31
|
+
def on_params_params_parse(event)
|
32
|
+
# Keep in mind, that a caller here is a param object not a controller,
|
33
|
+
# so it returns a topic as a string, not a routing topic
|
34
|
+
debug "Params parsing for #{event[:caller].topic} topic successful in #{event[:time]} ms"
|
35
|
+
end
|
36
|
+
|
37
|
+
# Logs unsuccessful parsing attempts of incoming data
|
38
|
+
# @param event [Dry::Events::Event] event details including payload
|
39
|
+
def on_params_params_parse_error(event)
|
40
|
+
error "Params parsing error for #{event[:caller].topic} topic: #{event[:error]}"
|
41
|
+
end
|
42
|
+
|
43
|
+
# Logs errors that occured in a listener fetch loop
|
44
|
+
# @param event [Dry::Events::Event] event details including payload
|
45
|
+
# @note It's an error as we can recover from it not a fatal
|
46
|
+
def on_connection_listener_fetch_loop_error(event)
|
47
|
+
error "Listener fetch loop error: #{event[:error]}"
|
48
|
+
end
|
49
|
+
|
50
|
+
# Logs errors that are related to the connection itself
|
51
|
+
# @note Karafka will attempt to reconnect, so an error not a fatal
|
52
|
+
# @param event [Dry::Events::Event] event details including payload
|
53
|
+
def on_connection_client_fetch_loop_error(event)
|
54
|
+
error "Client fetch loop error: #{event[:error]}"
|
55
|
+
end
|
56
|
+
|
57
|
+
# Logs info about crashed fetcher
|
58
|
+
# @note If this happens, Karafka will shutdown as it means a critical error
|
59
|
+
# in one of the threads
|
60
|
+
# @param event [Dry::Events::Event] event details including payload
|
61
|
+
def on_fetcher_call_error(event)
|
62
|
+
fatal "Fetcher crash due to an error: #{event[:error]}"
|
63
|
+
end
|
64
|
+
|
65
|
+
# Logs info about processing of a certain dataset with an inline backend
|
66
|
+
# @param event [Dry::Events::Event] event details including payload
|
67
|
+
def on_backends_inline_process(event)
|
68
|
+
count = event[:caller].send(:params_batch).to_a.size
|
69
|
+
topic = event[:caller].topic.name
|
70
|
+
time = event[:time]
|
71
|
+
info "Inline processing of topic #{topic} with #{count} messages took #{time} ms"
|
72
|
+
end
|
73
|
+
|
74
|
+
# Logs info about system signals that Karafka received
|
75
|
+
# @param event [Dry::Events::Event] event details including payload
|
76
|
+
def on_process_notice_signal(event)
|
77
|
+
info "Received #{event[:signal]} system signal"
|
78
|
+
end
|
79
|
+
|
80
|
+
# Logs info about responder usage withing a controller flow
|
81
|
+
# @param event [Dry::Events::Event] event details including payload
|
82
|
+
def on_consumers_responders_respond_with(event)
|
83
|
+
calling = event[:caller].class
|
84
|
+
responder = calling.topic.responder
|
85
|
+
data = event[:data]
|
86
|
+
info "Responded from #{calling} using #{responder} with following data #{data}"
|
87
|
+
end
|
88
|
+
|
89
|
+
# Logs info that we're going to stop the Karafka server
|
90
|
+
# @param _event [Dry::Events::Event] event details including payload
|
91
|
+
def on_server_stop(_event)
|
92
|
+
# We use a separate thread as logging can't be called from trap context
|
93
|
+
Thread.new { info "Stopping Karafka server #{::Process.pid}" }
|
94
|
+
end
|
95
|
+
|
96
|
+
# Logs an error that Karafka was unable to stop the server gracefully and it had to do a
|
97
|
+
# forced exit
|
98
|
+
# @param _event [Dry::Events::Event] event details including payload
|
99
|
+
def on_server_stop_error(_event)
|
100
|
+
# We use a separate thread as logging can't be called from trap context
|
101
|
+
Thread.new { error "Forceful Karafka server #{::Process.pid} stop" }
|
102
|
+
end
|
103
|
+
|
104
|
+
USED_LOG_LEVELS.each do |log_level|
|
105
|
+
define_method log_level do |*args|
|
106
|
+
Karafka.logger.send(log_level, *args)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Instrumentation
|
5
|
+
# Default logger for Event Delegator
|
6
|
+
# @note It uses ::Logger features - providing basic logging
|
7
|
+
class Logger < ::Logger
|
8
|
+
include Singleton
|
9
|
+
|
10
|
+
# Map containing information about log level for given environment
|
11
|
+
ENV_MAP = {
|
12
|
+
'production' => ::Logger::ERROR,
|
13
|
+
'test' => ::Logger::ERROR,
|
14
|
+
'development' => ::Logger::INFO,
|
15
|
+
'debug' => ::Logger::DEBUG,
|
16
|
+
'default' => ::Logger::INFO
|
17
|
+
}.freeze
|
18
|
+
|
19
|
+
# Creates a new instance of logger ensuring that it has a place to write to
|
20
|
+
def initialize(*_args)
|
21
|
+
ensure_dir_exists
|
22
|
+
super(target)
|
23
|
+
self.level = ENV_MAP[Karafka.env] || ENV_MAP['default']
|
24
|
+
end
|
25
|
+
|
26
|
+
private
|
27
|
+
|
28
|
+
# @return [Karafka::Helpers::MultiDelegator] multi delegator instance
|
29
|
+
# to which we will be writtng logs
|
30
|
+
# We use this approach to log stuff to file and to the STDOUT at the same time
|
31
|
+
def target
|
32
|
+
Karafka::Helpers::MultiDelegator
|
33
|
+
.delegate(:write, :close)
|
34
|
+
.to(STDOUT, file)
|
35
|
+
end
|
36
|
+
|
37
|
+
# Makes sure the log directory exists
|
38
|
+
def ensure_dir_exists
|
39
|
+
dir = File.dirname(log_path)
|
40
|
+
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
|
41
|
+
end
|
42
|
+
|
43
|
+
# @return [Pathname] Path to a file to which we should log
|
44
|
+
def log_path
|
45
|
+
@log_path ||= Karafka::App.root.join("log/#{Karafka.env}.log")
|
46
|
+
end
|
47
|
+
|
48
|
+
# @return [File] file to which we want to write our logs
|
49
|
+
# @note File is being opened in append mode ('a')
|
50
|
+
def file
|
51
|
+
@file ||= File.open(log_path, 'a')
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,64 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Namespace for all the things related with Karafka instrumentation process
|
5
|
+
module Instrumentation
|
6
|
+
# Monitor is used to hookup external monitoring services to monitor how Karafka works
|
7
|
+
# It provides a standardized API for checking incoming messages/enqueueing etc
|
8
|
+
# Since it is a pub-sub based on dry-monitor, you can use as many subscribers/loggers at the
|
9
|
+
# same time, which means that you might have for example file logging and newrelic at the same
|
10
|
+
# time
|
11
|
+
# @note This class acts as a singleton because we are only permitted to have single monitor
|
12
|
+
# per running process (just as logger)
|
13
|
+
class Monitor < Dry::Monitor::Notifications
|
14
|
+
include Singleton
|
15
|
+
|
16
|
+
# List of events that we support in the system and to which a monitor client can hook up
|
17
|
+
# @note The non-error once support timestamp benchmarking
|
18
|
+
# @note Depending on Karafka extensions and additional engines, this might not be the
|
19
|
+
# complete list of all the events. Please use the #available_events on fully loaded
|
20
|
+
# Karafka system to determine all of the events you can use.
|
21
|
+
# Last 4 events are from WaterDrop but for convenience we use the same monitor for the
|
22
|
+
# whole karafka ecosystem
|
23
|
+
BASE_EVENTS = %w[
|
24
|
+
params.params.parse
|
25
|
+
params.params.parse.error
|
26
|
+
connection.listener.fetch_loop.error
|
27
|
+
connection.client.fetch_loop.error
|
28
|
+
connection.delegator.call
|
29
|
+
fetcher.call.error
|
30
|
+
backends.inline.process
|
31
|
+
process.notice_signal
|
32
|
+
consumers.responders.respond_with
|
33
|
+
async_producer.call.error
|
34
|
+
async_producer.call.retry
|
35
|
+
sync_producer.call.error
|
36
|
+
sync_producer.call.retry
|
37
|
+
server.stop
|
38
|
+
server.stop.error
|
39
|
+
].freeze
|
40
|
+
|
41
|
+
private_constant :BASE_EVENTS
|
42
|
+
|
43
|
+
# @return [Karafka::Instrumentation::Monitor] monitor instance for system instrumentation
|
44
|
+
def initialize
|
45
|
+
super(:karafka)
|
46
|
+
BASE_EVENTS.each(&method(:register_event))
|
47
|
+
end
|
48
|
+
|
49
|
+
# Allows us to subscribe to events with a code that will be yielded upon events
|
50
|
+
# @param event_name_or_listener [String, Object] name of the event we want to subscribe to
|
51
|
+
# or a listener if we decide to go with object listener
|
52
|
+
def subscribe(event_name_or_listener)
|
53
|
+
return super unless event_name_or_listener.is_a?(String)
|
54
|
+
return super if available_events.include?(event_name_or_listener)
|
55
|
+
raise Errors::UnregisteredMonitorEvent, event_name_or_listener
|
56
|
+
end
|
57
|
+
|
58
|
+
# @return [Array<String>] names of available events to which we can subscribe
|
59
|
+
def available_events
|
60
|
+
__bus__.events.keys
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|