karafka 1.1.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/.ruby-version +1 -1
- data/.travis.yml +1 -0
- data/CHANGELOG.md +46 -2
- data/CONTRIBUTING.md +5 -6
- data/Gemfile +1 -2
- data/Gemfile.lock +41 -29
- data/README.md +13 -19
- data/karafka.gemspec +6 -4
- data/lib/karafka.rb +17 -7
- data/lib/karafka/app.rb +8 -15
- data/lib/karafka/attributes_map.rb +1 -1
- data/lib/karafka/backends/inline.rb +1 -2
- data/lib/karafka/{base_controller.rb → base_consumer.rb} +19 -11
- data/lib/karafka/base_responder.rb +34 -15
- data/lib/karafka/callbacks.rb +30 -0
- data/lib/karafka/callbacks/config.rb +22 -0
- data/lib/karafka/callbacks/dsl.rb +16 -0
- data/lib/karafka/cli/install.rb +2 -3
- data/lib/karafka/cli/server.rb +0 -1
- data/lib/karafka/connection/{consumer.rb → client.rb} +32 -36
- data/lib/karafka/connection/config_adapter.rb +14 -6
- data/lib/karafka/connection/delegator.rb +46 -0
- data/lib/karafka/connection/listener.rb +22 -13
- data/lib/karafka/{controllers → consumers}/callbacks.rb +9 -9
- data/lib/karafka/consumers/includer.rb +51 -0
- data/lib/karafka/consumers/responders.rb +24 -0
- data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
- data/lib/karafka/errors.rb +10 -3
- data/lib/karafka/fetcher.rb +30 -34
- data/lib/karafka/helpers/class_matcher.rb +8 -8
- data/lib/karafka/helpers/config_retriever.rb +2 -2
- data/lib/karafka/instrumentation/listener.rb +112 -0
- data/lib/karafka/instrumentation/logger.rb +55 -0
- data/lib/karafka/instrumentation/monitor.rb +64 -0
- data/lib/karafka/loader.rb +0 -1
- data/lib/karafka/params/{params.rb → dsl.rb} +71 -43
- data/lib/karafka/params/params_batch.rb +7 -2
- data/lib/karafka/patches/dry_configurable.rb +6 -2
- data/lib/karafka/patches/ruby_kafka.rb +10 -10
- data/lib/karafka/persistence/client.rb +25 -0
- data/lib/karafka/persistence/consumer.rb +27 -14
- data/lib/karafka/persistence/topic.rb +29 -0
- data/lib/karafka/process.rb +5 -4
- data/lib/karafka/responders/builder.rb +15 -14
- data/lib/karafka/routing/builder.rb +1 -1
- data/lib/karafka/routing/consumer_mapper.rb +3 -2
- data/lib/karafka/routing/router.rb +1 -1
- data/lib/karafka/routing/topic.rb +5 -11
- data/lib/karafka/schemas/config.rb +3 -0
- data/lib/karafka/schemas/consumer_group.rb +15 -3
- data/lib/karafka/schemas/consumer_group_topic.rb +1 -1
- data/lib/karafka/server.rb +37 -5
- data/lib/karafka/setup/config.rb +47 -21
- data/lib/karafka/setup/configurators/base.rb +6 -12
- data/lib/karafka/setup/configurators/params.rb +25 -0
- data/lib/karafka/setup/configurators/water_drop.rb +6 -3
- data/lib/karafka/setup/dsl.rb +22 -0
- data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
- data/lib/karafka/templates/karafka.rb.example +17 -4
- data/lib/karafka/version.rb +1 -1
- metadata +58 -24
- data/.github/ISSUE_TEMPLATE.md +0 -2
- data/lib/karafka/connection/processor.rb +0 -61
- data/lib/karafka/controllers/includer.rb +0 -51
- data/lib/karafka/controllers/responders.rb +0 -19
- data/lib/karafka/logger.rb +0 -53
- data/lib/karafka/monitor.rb +0 -98
- data/lib/karafka/persistence/controller.rb +0 -38
@@ -1,19 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Controllers
|
5
|
-
# Feature that allows us to use responders flow in controller
|
6
|
-
module Responders
|
7
|
-
# Responds with given data using given responder. This allows us to have a similar way of
|
8
|
-
# defining flows like synchronous protocols
|
9
|
-
# @param data Anything we want to pass to responder based on which we want to trigger further
|
10
|
-
# Kafka responding
|
11
|
-
def respond_with(*data)
|
12
|
-
Karafka.monitor.notice(self.class, data: data)
|
13
|
-
# @note we build a new instance of responder each time, as a long running (persisted)
|
14
|
-
# controllers can respond multiple times during the lifecycle
|
15
|
-
topic.responder.new(topic.parser).call(*data)
|
16
|
-
end
|
17
|
-
end
|
18
|
-
end
|
19
|
-
end
|
data/lib/karafka/logger.rb
DELETED
@@ -1,53 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
# Default logger for Event Delegator
|
5
|
-
# @note It uses ::Logger features - providing basic logging
|
6
|
-
class Logger < ::Logger
|
7
|
-
include Singleton
|
8
|
-
|
9
|
-
# Map containing informations about log level for given environment
|
10
|
-
ENV_MAP = {
|
11
|
-
'production' => ::Logger::ERROR,
|
12
|
-
'test' => ::Logger::ERROR,
|
13
|
-
'development' => ::Logger::INFO,
|
14
|
-
'debug' => ::Logger::DEBUG,
|
15
|
-
default: ::Logger::INFO
|
16
|
-
}.freeze
|
17
|
-
|
18
|
-
# Creates a new instance of logger ensuring that it has a place to write to
|
19
|
-
def initialize(*_args)
|
20
|
-
ensure_dir_exists
|
21
|
-
super(target)
|
22
|
-
self.level = ENV_MAP[Karafka.env] || ENV_MAP[:default]
|
23
|
-
end
|
24
|
-
|
25
|
-
private
|
26
|
-
|
27
|
-
# @return [Karafka::Helpers::MultiDelegator] multi delegator instance
|
28
|
-
# to which we will be writtng logs
|
29
|
-
# We use this approach to log stuff to file and to the STDOUT at the same time
|
30
|
-
def target
|
31
|
-
Karafka::Helpers::MultiDelegator
|
32
|
-
.delegate(:write, :close)
|
33
|
-
.to(STDOUT, file)
|
34
|
-
end
|
35
|
-
|
36
|
-
# Makes sure the log directory exists
|
37
|
-
def ensure_dir_exists
|
38
|
-
dir = File.dirname(log_path)
|
39
|
-
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
|
40
|
-
end
|
41
|
-
|
42
|
-
# @return [Pathname] Path to a file to which we should log
|
43
|
-
def log_path
|
44
|
-
@log_path ||= Karafka::App.root.join("log/#{Karafka.env}.log")
|
45
|
-
end
|
46
|
-
|
47
|
-
# @return [File] file to which we want to write our logs
|
48
|
-
# @note File is being opened in append mode ('a')
|
49
|
-
def file
|
50
|
-
@file ||= File.open(log_path, 'a')
|
51
|
-
end
|
52
|
-
end
|
53
|
-
end
|
data/lib/karafka/monitor.rb
DELETED
@@ -1,98 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
# Monitor is used to hookup external monitoring services to monitor how Karafka works
|
5
|
-
# It provides a standarized API for checking incoming messages/enqueueing etc
|
6
|
-
# By default it implements logging functionalities but can be replaced with any more
|
7
|
-
# sophisticated logging/monitoring system like Errbit, Airbrake, NewRelic
|
8
|
-
# @note This class acts as a singleton because we are only permitted to have single monitor
|
9
|
-
# per running process (just as logger)
|
10
|
-
# Keep in mind, that if you create your own monitor object, you will have to implement also
|
11
|
-
# logging functionality (or just inherit, super and do whatever you want)
|
12
|
-
class Monitor
|
13
|
-
include Singleton
|
14
|
-
|
15
|
-
# This method is executed in many important places in the code (during data flow), like
|
16
|
-
# the moment before #consume_async, etc. For full list just grep for 'monitor.notice'
|
17
|
-
# @param caller_class [Class] class of object that executed this call
|
18
|
-
# @param options [Hash] hash with options that we passed to notice. It differs based
|
19
|
-
# on of who and when is calling
|
20
|
-
# @note We don't provide a name of method in which this was called, because we can take
|
21
|
-
# it directly from Ruby (see #caller_label method of this class for more details)
|
22
|
-
# @example Notice about consuming with controller_class
|
23
|
-
# Karafka.monitor.notice(self.class, controller_class: controller_class)
|
24
|
-
# @example Notice about terminating with a signal
|
25
|
-
# Karafka.monitor.notice(self.class, signal: signal)
|
26
|
-
def notice(caller_class, options = {})
|
27
|
-
logger.info("#{caller_class}##{caller_label} with #{options}")
|
28
|
-
end
|
29
|
-
|
30
|
-
# This method is executed when we want to notify about an error that happened somewhere
|
31
|
-
# in the system
|
32
|
-
# @param caller_class [Class] class of object that executed this call
|
33
|
-
# @param e [Exception] exception that was raised
|
34
|
-
# @note We don't provide a name of method in which this was called, because we can take
|
35
|
-
# it directly from Ruby (see #caller_label method of this class for more details)
|
36
|
-
# @example Notify about error
|
37
|
-
# Karafka.monitor.notice(self.class, e)
|
38
|
-
def notice_error(caller_class, e)
|
39
|
-
caller_exceptions_map.each do |level, types|
|
40
|
-
next unless types.include?(caller_class)
|
41
|
-
|
42
|
-
return logger.public_send(level, e)
|
43
|
-
end
|
44
|
-
|
45
|
-
logger.info(e)
|
46
|
-
end
|
47
|
-
|
48
|
-
private
|
49
|
-
|
50
|
-
# @return [Hash] Hash containing informations on which level of notification should
|
51
|
-
# we use for exceptions that happen in certain parts of Karafka
|
52
|
-
# @note Keep in mind that any not handled here class should be logged with info
|
53
|
-
# @note Those are not maps of exceptions classes but of classes that were callers of this
|
54
|
-
# particular exception
|
55
|
-
def caller_exceptions_map
|
56
|
-
@caller_exceptions_map ||= {
|
57
|
-
error: [
|
58
|
-
Karafka::Connection::Consumer,
|
59
|
-
Karafka::Connection::Listener,
|
60
|
-
Karafka::Params::Params
|
61
|
-
],
|
62
|
-
fatal: [
|
63
|
-
Karafka::Fetcher
|
64
|
-
]
|
65
|
-
}
|
66
|
-
end
|
67
|
-
|
68
|
-
# @return [String] label of method that invoked #notice or #notice_error
|
69
|
-
# @example Check label of method that invoked #notice
|
70
|
-
# caller_label #=> 'fetch'
|
71
|
-
# @example Check label of method that invoked #notice in a block
|
72
|
-
# caller_label #=> 'block in fetch'
|
73
|
-
# @example Check label of method that invoked #notice_error
|
74
|
-
# caller_label #=> 'rescue in target'
|
75
|
-
def caller_label
|
76
|
-
# We need to calculate ancestors because if someone inherits
|
77
|
-
# from this class, caller chains is longer
|
78
|
-
index = self.class.ancestors.index(Karafka::Monitor)
|
79
|
-
# caller_locations has a differs in result whether it is a subclass of
|
80
|
-
# Karafka::Monitor, the basic Karafka::Monitor itself or a super for a subclass.
|
81
|
-
# So to cover all the cases we need to differentiate.
|
82
|
-
# @see https://github.com/karafka/karafka/issues/128
|
83
|
-
# @note It won't work if the monitor caller_label caller class is defined using
|
84
|
-
# define method
|
85
|
-
super_execution = caller_locations(1, 2)[0].label == caller_locations(1, 2)[1].label
|
86
|
-
|
87
|
-
scope = super_execution ? 1 : nil
|
88
|
-
scope ||= index.positive? ? 0 : 1
|
89
|
-
|
90
|
-
caller_locations(index + 1, 2)[scope].label
|
91
|
-
end
|
92
|
-
|
93
|
-
# @return [Logger] logger instance
|
94
|
-
def logger
|
95
|
-
Karafka.logger
|
96
|
-
end
|
97
|
-
end
|
98
|
-
end
|
@@ -1,38 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
# Module used to provide a persistent cache layer for Karafka components that need to be
|
5
|
-
# shared inside of a same thread
|
6
|
-
module Persistence
|
7
|
-
# Module used to provide a persistent cache across batch requests for a given
|
8
|
-
# topic and partition to store some additional details when the persistent mode
|
9
|
-
# for a given topic is turned on
|
10
|
-
class Controller
|
11
|
-
# Thread.current scope under which we store controllers data
|
12
|
-
PERSISTENCE_SCOPE = :controllers
|
13
|
-
|
14
|
-
class << self
|
15
|
-
# @return [Hash] current thread persistence scope hash with all the controllers
|
16
|
-
def all
|
17
|
-
Thread.current[PERSISTENCE_SCOPE] ||= {}
|
18
|
-
end
|
19
|
-
|
20
|
-
# Used to build (if block given) and/or fetch a current controller instance that will be
|
21
|
-
# used to process messages from a given topic and partition
|
22
|
-
# @return [Karafka::BaseController] base controller descendant
|
23
|
-
# @param topic [Karafka::Routing::Topic] topic instance for which we might cache
|
24
|
-
# @param partition [Integer] number of partition for which we want to cache
|
25
|
-
def fetch(topic, partition)
|
26
|
-
all[topic.id] ||= {}
|
27
|
-
|
28
|
-
# We always store a current instance
|
29
|
-
if topic.persistent
|
30
|
-
all[topic.id][partition] ||= yield
|
31
|
-
else
|
32
|
-
all[topic.id][partition] = yield
|
33
|
-
end
|
34
|
-
end
|
35
|
-
end
|
36
|
-
end
|
37
|
-
end
|
38
|
-
end
|