karafka 1.2.10 → 1.3.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data.tar.gz.sig +0 -0
- data/{.coditsu.yml → .coditsu/ci.yml} +1 -1
- data/.console_irbrc +1 -3
- data/.github/FUNDING.yml +3 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.gitignore +1 -0
- data/.ruby-version +1 -1
- data/.travis.yml +4 -17
- data/CHANGELOG.md +73 -0
- data/CONTRIBUTING.md +1 -1
- data/Gemfile +2 -5
- data/Gemfile.lock +77 -66
- data/README.md +10 -11
- data/bin/karafka +1 -1
- data/certs/mensfeld.pem +25 -0
- data/config/errors.yml +38 -5
- data/karafka.gemspec +16 -14
- data/lib/karafka.rb +7 -15
- data/lib/karafka/app.rb +14 -6
- data/lib/karafka/attributes_map.rb +7 -6
- data/lib/karafka/base_consumer.rb +19 -30
- data/lib/karafka/base_responder.rb +45 -27
- data/lib/karafka/cli.rb +1 -1
- data/lib/karafka/cli/console.rb +11 -9
- data/lib/karafka/cli/flow.rb +0 -1
- data/lib/karafka/cli/info.rb +3 -1
- data/lib/karafka/cli/install.rb +30 -6
- data/lib/karafka/cli/server.rb +11 -6
- data/lib/karafka/code_reloader.rb +67 -0
- data/lib/karafka/connection/api_adapter.rb +11 -4
- data/lib/karafka/connection/batch_delegator.rb +51 -0
- data/lib/karafka/connection/builder.rb +1 -1
- data/lib/karafka/connection/client.rb +30 -20
- data/lib/karafka/connection/listener.rb +24 -13
- data/lib/karafka/connection/message_delegator.rb +36 -0
- data/lib/karafka/consumers/callbacks.rb +32 -15
- data/lib/karafka/consumers/includer.rb +30 -18
- data/lib/karafka/consumers/metadata.rb +10 -0
- data/lib/karafka/consumers/responders.rb +2 -2
- data/lib/karafka/contracts.rb +10 -0
- data/lib/karafka/contracts/config.rb +21 -0
- data/lib/karafka/contracts/consumer_group.rb +206 -0
- data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
- data/lib/karafka/contracts/responder_usage.rb +54 -0
- data/lib/karafka/contracts/server_cli_options.rb +29 -0
- data/lib/karafka/errors.rb +17 -16
- data/lib/karafka/fetcher.rb +28 -30
- data/lib/karafka/helpers/class_matcher.rb +11 -1
- data/lib/karafka/helpers/config_retriever.rb +1 -1
- data/lib/karafka/helpers/inflector.rb +26 -0
- data/lib/karafka/helpers/multi_delegator.rb +0 -1
- data/lib/karafka/instrumentation/logger.rb +9 -6
- data/lib/karafka/instrumentation/monitor.rb +15 -9
- data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
- data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
- data/lib/karafka/params/builders/metadata.rb +33 -0
- data/lib/karafka/params/builders/params.rb +36 -0
- data/lib/karafka/params/builders/params_batch.rb +25 -0
- data/lib/karafka/params/metadata.rb +35 -0
- data/lib/karafka/params/params.rb +68 -0
- data/lib/karafka/params/params_batch.rb +35 -20
- data/lib/karafka/patches/ruby_kafka.rb +21 -8
- data/lib/karafka/persistence/client.rb +15 -11
- data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
- data/lib/karafka/persistence/topics.rb +48 -0
- data/lib/karafka/process.rb +0 -2
- data/lib/karafka/responders/builder.rb +1 -1
- data/lib/karafka/responders/topic.rb +6 -8
- data/lib/karafka/routing/builder.rb +36 -8
- data/lib/karafka/routing/consumer_group.rb +1 -1
- data/lib/karafka/routing/consumer_mapper.rb +9 -9
- data/lib/karafka/routing/proxy.rb +10 -1
- data/lib/karafka/routing/topic.rb +5 -3
- data/lib/karafka/routing/topic_mapper.rb +16 -18
- data/lib/karafka/serialization/json/deserializer.rb +27 -0
- data/lib/karafka/serialization/json/serializer.rb +31 -0
- data/lib/karafka/server.rb +25 -27
- data/lib/karafka/setup/config.rb +65 -37
- data/lib/karafka/setup/configurators/water_drop.rb +7 -3
- data/lib/karafka/setup/dsl.rb +0 -1
- data/lib/karafka/status.rb +7 -3
- data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
- data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
- data/lib/karafka/templates/karafka.rb.erb +92 -0
- data/lib/karafka/version.rb +1 -1
- metadata +94 -61
- metadata.gz.sig +0 -0
- data/lib/karafka/callbacks.rb +0 -30
- data/lib/karafka/callbacks/config.rb +0 -22
- data/lib/karafka/callbacks/dsl.rb +0 -16
- data/lib/karafka/connection/delegator.rb +0 -46
- data/lib/karafka/instrumentation/listener.rb +0 -112
- data/lib/karafka/loader.rb +0 -28
- data/lib/karafka/params/dsl.rb +0 -158
- data/lib/karafka/parsers/json.rb +0 -38
- data/lib/karafka/patches/dry_configurable.rb +0 -35
- data/lib/karafka/persistence/topic.rb +0 -29
- data/lib/karafka/schemas/config.rb +0 -24
- data/lib/karafka/schemas/consumer_group.rb +0 -78
- data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
- data/lib/karafka/schemas/responder_usage.rb +0 -39
- data/lib/karafka/schemas/server_cli_options.rb +0 -43
- data/lib/karafka/setup/configurators/base.rb +0 -29
- data/lib/karafka/setup/configurators/params.rb +0 -25
- data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Module for all supported by default serialization and deserialization ways
|
5
|
+
module Serialization
|
6
|
+
# Namespace for json ser/der
|
7
|
+
module Json
|
8
|
+
# Default Karafka Json deserializer for loading JSON data
|
9
|
+
class Deserializer
|
10
|
+
# @param params [Karafka::Params::Params] Full params object that we want to deserialize
|
11
|
+
# @return [Hash] hash with deserialized JSON data
|
12
|
+
# @example
|
13
|
+
# params = {
|
14
|
+
# 'payload' => "{\"a\":1}",
|
15
|
+
# 'topic' => 'my-topic',
|
16
|
+
# 'headers' => { 'message_type' => :test }
|
17
|
+
# }
|
18
|
+
# Deserializer.call(params) #=> { 'a' => 1 }
|
19
|
+
def call(params)
|
20
|
+
::MultiJson.load(params['payload'])
|
21
|
+
rescue ::MultiJson::ParseError => e
|
22
|
+
raise ::Karafka::Errors::DeserializationError, e
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Module for all supported by default serialization and deserialization ways
|
5
|
+
module Serialization
|
6
|
+
module Json
|
7
|
+
# Default Karafka Json serializer for serializing data
|
8
|
+
class Serializer
|
9
|
+
# @param content [Object] any object that we want to convert to a json string
|
10
|
+
# @return [String] Valid JSON string containing serialized data
|
11
|
+
# @raise [Karafka::Errors::SerializationError] raised when we don't have a way to
|
12
|
+
# serialize provided data to json
|
13
|
+
# @note When string is passed to this method, we assume that it is already a json
|
14
|
+
# string and we don't serialize it again. This allows us to serialize data before
|
15
|
+
# it is being forwarded to this serializer if we want to have a custom (not that simple)
|
16
|
+
# json serialization
|
17
|
+
#
|
18
|
+
# @example From an ActiveRecord object
|
19
|
+
# Serializer.call(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
|
20
|
+
# @example From a string (no changes)
|
21
|
+
# Serializer.call("{\"a\":1}") #=> "{\"a\":1}"
|
22
|
+
def call(content)
|
23
|
+
return content if content.is_a?(String)
|
24
|
+
return content.to_json if content.respond_to?(:to_json)
|
25
|
+
|
26
|
+
raise Karafka::Errors::SerializationError, content
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
data/lib/karafka/server.rb
CHANGED
@@ -6,9 +6,14 @@ module Karafka
|
|
6
6
|
@consumer_threads = Concurrent::Array.new
|
7
7
|
|
8
8
|
# How long should we sleep between checks on shutting down consumers
|
9
|
-
SUPERVISION_SLEEP = 1
|
9
|
+
SUPERVISION_SLEEP = 0.1
|
10
10
|
# What system exit code should we use when we terminated forcefully
|
11
11
|
FORCEFUL_EXIT_CODE = 2
|
12
|
+
# This factor allows us to calculate how many times we have to sleep before
|
13
|
+
# a forceful shutdown
|
14
|
+
SUPERVISION_CHECK_FACTOR = (1 / SUPERVISION_SLEEP)
|
15
|
+
|
16
|
+
private_constant :SUPERVISION_SLEEP, :FORCEFUL_EXIT_CODE, :SUPERVISION_CHECK_FACTOR
|
12
17
|
|
13
18
|
class << self
|
14
19
|
# Set of consuming threads. Each consumer thread contains a single consumer
|
@@ -22,7 +27,7 @@ module Karafka
|
|
22
27
|
process.on_sigint { stop_supervised }
|
23
28
|
process.on_sigquit { stop_supervised }
|
24
29
|
process.on_sigterm { stop_supervised }
|
25
|
-
|
30
|
+
run_supervised
|
26
31
|
end
|
27
32
|
|
28
33
|
# @return [Array<String>] array with names of consumer groups that should be consumed in a
|
@@ -36,49 +41,42 @@ module Karafka
|
|
36
41
|
|
37
42
|
# @return [Karafka::Process] process wrapper instance used to catch system signal calls
|
38
43
|
def process
|
39
|
-
Karafka::
|
44
|
+
Karafka::App.config.internal.process
|
40
45
|
end
|
41
46
|
|
42
47
|
# Starts Karafka with a supervision
|
43
48
|
# @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
|
44
|
-
# finish loop (and it won't happen until we
|
45
|
-
def
|
49
|
+
# finish loop (and it won't happen until we explicitly want to stop)
|
50
|
+
def run_supervised
|
46
51
|
process.supervise
|
47
52
|
Karafka::App.run!
|
48
|
-
Karafka::
|
53
|
+
Karafka::App.config.internal.fetcher.call
|
49
54
|
end
|
50
55
|
|
51
56
|
# Stops Karafka with a supervision (as long as there is a shutdown timeout)
|
52
|
-
# If consumers won't stop in a given
|
57
|
+
# If consumers won't stop in a given time frame, it will force them to exit
|
53
58
|
def stop_supervised
|
54
|
-
# Because this is called in the trap context, there is a chance that instrumentation
|
55
|
-
# listeners contain things that aren't allowed from within a trap context.
|
56
|
-
# To bypass that (instead of telling users not to do things they need to)
|
57
|
-
# we spin up a thread to instrument server.stop and server.stop.error and wait until
|
58
|
-
# they're finished
|
59
|
-
Thread.new { Karafka.monitor.instrument('server.stop', {}) }.join
|
60
|
-
|
61
59
|
Karafka::App.stop!
|
62
|
-
# If there is no shutdown timeout, we don't exit and wait until all the consumers
|
63
|
-
# had done their work
|
64
|
-
return unless Karafka::App.config.shutdown_timeout
|
65
60
|
|
66
|
-
#
|
67
|
-
#
|
68
|
-
|
69
|
-
|
70
|
-
|
61
|
+
# We check from time to time (for the timeout period) if all the threads finished
|
62
|
+
# their work and if so, we can just return and normal shutdown process will take place
|
63
|
+
(Karafka::App.config.shutdown_timeout * SUPERVISION_CHECK_FACTOR).to_i.times do
|
64
|
+
if consumer_threads.count(&:alive?).zero?
|
65
|
+
Thread.new { Karafka.monitor.instrument('app.stopped') }.join
|
66
|
+
return
|
67
|
+
end
|
68
|
+
|
71
69
|
sleep SUPERVISION_SLEEP
|
72
70
|
end
|
73
71
|
|
74
|
-
raise Errors::
|
75
|
-
rescue Errors::
|
76
|
-
Thread.new { Karafka.monitor.instrument('
|
72
|
+
raise Errors::ForcefulShutdownError
|
73
|
+
rescue Errors::ForcefulShutdownError => e
|
74
|
+
Thread.new { Karafka.monitor.instrument('app.stopping.error', error: e) }.join
|
77
75
|
# We're done waiting, lets kill them!
|
78
76
|
consumer_threads.each(&:terminate)
|
79
77
|
|
80
|
-
# exit is not within the instrumentation as it would not trigger due to exit
|
81
|
-
Kernel.exit FORCEFUL_EXIT_CODE
|
78
|
+
# exit! is not within the instrumentation as it would not trigger due to exit
|
79
|
+
Kernel.exit! FORCEFUL_EXIT_CODE
|
82
80
|
end
|
83
81
|
end
|
84
82
|
end
|
data/lib/karafka/setup/config.rb
CHANGED
@@ -8,12 +8,16 @@ module Karafka
|
|
8
8
|
# @note If you want to do some configurations after all of this is done, please add to
|
9
9
|
# karafka/config a proper file (needs to inherit from Karafka::Setup::Configurators::Base
|
10
10
|
# and implement setup method) after that everything will happen automatically
|
11
|
-
# @note This config object allows to create a 1 level
|
11
|
+
# @note This config object allows to create a 1 level nesting (nodes) only. This should be
|
12
12
|
# enough and will still keep the code simple
|
13
13
|
# @see Karafka::Setup::Configurators::Base for more details about configurators api
|
14
14
|
class Config
|
15
15
|
extend Dry::Configurable
|
16
|
-
|
16
|
+
|
17
|
+
# Contract for checking the config provided by the user
|
18
|
+
CONTRACT = Karafka::Contracts::Config.new.freeze
|
19
|
+
|
20
|
+
private_constant :CONTRACT
|
17
21
|
|
18
22
|
# Available settings
|
19
23
|
# option client_id [String] kafka client_id - used to provide
|
@@ -22,21 +26,23 @@ module Karafka
|
|
22
26
|
# What backend do we want to use to process messages
|
23
27
|
setting :backend, :inline
|
24
28
|
# option logger [Instance] logger that we want to use
|
25
|
-
setting :logger,
|
29
|
+
setting :logger, ::Karafka::Instrumentation::Logger.new
|
26
30
|
# option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
|
27
|
-
setting :monitor,
|
31
|
+
setting :monitor, ::Karafka::Instrumentation::Monitor.new
|
28
32
|
# Mapper used to remap consumer groups ids, so in case users migrate from other tools
|
29
33
|
# or they need to maintain their own internal consumer group naming conventions, they
|
30
34
|
# can easily do it, replacing the default client_id + consumer name pattern concept
|
31
|
-
setting :consumer_mapper,
|
32
|
-
# Mapper used to remap names of topics, so we can have a clean internal topic
|
35
|
+
setting :consumer_mapper, Routing::ConsumerMapper.new
|
36
|
+
# Mapper used to remap names of topics, so we can have a clean internal topic naming
|
33
37
|
# despite using any Kafka provider that uses namespacing, etc
|
34
38
|
# It needs to implement two methods:
|
35
39
|
# - #incoming - for remapping from the incoming message to our internal format
|
36
40
|
# - #outgoing - for remapping from internal topic name into outgoing message
|
37
|
-
setting :topic_mapper,
|
38
|
-
# Default
|
39
|
-
setting :
|
41
|
+
setting :topic_mapper, Routing::TopicMapper.new
|
42
|
+
# Default serializer for converting whatever we want to send to kafka to json
|
43
|
+
setting :serializer, Karafka::Serialization::Json::Serializer.new
|
44
|
+
# Default deserializer for converting incoming data into ruby objects
|
45
|
+
setting :deserializer, Karafka::Serialization::Json::Deserializer.new
|
40
46
|
# If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
|
41
47
|
# @note Fetching does not equal consuming, see batch_consuming description for details
|
42
48
|
setting :batch_fetching, true
|
@@ -44,29 +50,15 @@ module Karafka
|
|
44
50
|
# #params_batch will contain params received from Kafka (may be more than 1) so we can
|
45
51
|
# process them in batches
|
46
52
|
setting :batch_consuming, false
|
47
|
-
# Should we operate in a single consumer instance across multiple batches of messages,
|
48
|
-
# from the same partition or should we build a new one for each incoming batch.
|
49
|
-
# Disabling that can be useful when you want to create a new consumer instance for each
|
50
|
-
# incoming batch. It's disabled by default, not to create more objects that needed
|
51
|
-
# on each batch
|
52
|
-
setting :persistent, true
|
53
53
|
# option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
|
54
|
-
# longer wait for the consumers to stop gracefully but instead we force
|
55
|
-
#
|
56
|
-
# @note Keep in mind, that if your business logic
|
57
|
-
# @note If set to nil, it won't forcefully shutdown the process at all.
|
54
|
+
# longer wait for the consumers to stop gracefully but instead we force terminate
|
55
|
+
# everything.
|
58
56
|
setting :shutdown_timeout, 60
|
59
|
-
# option params_base_class [Class] base class for params class initialization
|
60
|
-
# This can be either a Hash or a HashWithIndifferentAccess depending on your
|
61
|
-
# requirements. Note, that by using HashWithIndifferentAccess, you remove some of the
|
62
|
-
# performance in favor of convenience. This can be useful especially if you already use
|
63
|
-
# it with Rails, etc
|
64
|
-
setting :params_base_class, Hash
|
65
57
|
|
66
58
|
# option kafka [Hash] - optional - kafka configuration options
|
67
59
|
setting :kafka do
|
68
60
|
# Array with at least one host
|
69
|
-
setting :seed_brokers
|
61
|
+
setting :seed_brokers, %w[kafka://127.0.0.1:9092]
|
70
62
|
# option session_timeout [Integer] the number of seconds after which, if a client
|
71
63
|
# hasn't contacted the Kafka cluster, it will be kicked out of the group.
|
72
64
|
setting :session_timeout, 30
|
@@ -75,6 +67,11 @@ module Karafka
|
|
75
67
|
# resolved and also "slows" things down, so it prevents from "eating" up all messages and
|
76
68
|
# consuming them with failed code. Use `nil` if you want to pause forever and never retry.
|
77
69
|
setting :pause_timeout, 10
|
70
|
+
# option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
|
71
|
+
# or `nil` if no maximum should be enforced.
|
72
|
+
setting :pause_max_timeout, nil
|
73
|
+
# option pause_exponential_backoff [Boolean] whether to enable exponential backoff
|
74
|
+
setting :pause_exponential_backoff, false
|
78
75
|
# option offset_commit_interval [Integer] the interval between offset commits,
|
79
76
|
# in seconds.
|
80
77
|
setting :offset_commit_interval, 10
|
@@ -91,7 +88,7 @@ module Karafka
|
|
91
88
|
# option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
|
92
89
|
# are stored for further processing. Note, that each item in the queue represents a
|
93
90
|
# response from a single broker
|
94
|
-
setting :fetcher_max_queue_size,
|
91
|
+
setting :fetcher_max_queue_size, 10
|
95
92
|
# option max_bytes_per_partition [Integer] the maximum amount of data fetched
|
96
93
|
# from a single partition at a time.
|
97
94
|
setting :max_bytes_per_partition, 1_048_576
|
@@ -136,6 +133,8 @@ module Karafka
|
|
136
133
|
# option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
|
137
134
|
# certificate store
|
138
135
|
setting :ssl_ca_certs_from_system, false
|
136
|
+
# option ssl_verify_hostname [Boolean] Verify the hostname for client certs
|
137
|
+
setting :ssl_verify_hostname, true
|
139
138
|
# option ssl_client_cert [String, nil] SSL client certificate
|
140
139
|
setting :ssl_client_cert, nil
|
141
140
|
# option ssl_client_cert_key [String, nil] SSL client certificate password
|
@@ -156,10 +155,39 @@ module Karafka
|
|
156
155
|
setting :sasl_scram_password, nil
|
157
156
|
# option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
|
158
157
|
setting :sasl_scram_mechanism, nil
|
158
|
+
# option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
|
159
|
+
setting :sasl_over_ssl, true
|
160
|
+
# option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
|
161
|
+
setting :ssl_client_cert_chain, nil
|
162
|
+
# option ssl_client_cert_key_password [String, nil] the password required to read
|
163
|
+
# the ssl_client_cert_key
|
164
|
+
setting :ssl_client_cert_key_password, nil
|
165
|
+
# @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
|
166
|
+
# implements method token.
|
167
|
+
setting :sasl_oauth_token_provider, nil
|
168
|
+
end
|
169
|
+
|
170
|
+
# Namespace for internal settings that should not be modified
|
171
|
+
# It's a temporary step to "declassify" several things internally before we move to a
|
172
|
+
# non global state
|
173
|
+
setting :internal do
|
174
|
+
# option routing_builder [Karafka::Routing::Builder] builder instance
|
175
|
+
setting :routing_builder, Routing::Builder.new
|
176
|
+
# option status [Karafka::Status] app status
|
177
|
+
setting :status, Status.new
|
178
|
+
# option process [Karafka::Process] process status
|
179
|
+
# @note In the future, we need to have a single process representation for all the karafka
|
180
|
+
# instances
|
181
|
+
setting :process, Process.new
|
182
|
+
# option fetcher [Karafka::Fetcher] fetcher instance
|
183
|
+
setting :fetcher, Fetcher.new
|
184
|
+
# option configurators [Array<Object>] all configurators that we want to run after
|
185
|
+
# the setup
|
186
|
+
setting :configurators, [Configurators::WaterDrop.new]
|
159
187
|
end
|
160
188
|
|
161
189
|
class << self
|
162
|
-
#
|
190
|
+
# Configuring method
|
163
191
|
# @yield Runs a block of code providing a config singleton instance to it
|
164
192
|
# @yieldparam [Karafka::Setup::Config] Karafka config instance
|
165
193
|
def setup
|
@@ -170,22 +198,22 @@ module Karafka
|
|
170
198
|
# Components are in karafka/config directory and are all loaded one by one
|
171
199
|
# If you want to configure a next component, please add a proper file to config dir
|
172
200
|
def setup_components
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
201
|
+
config
|
202
|
+
.internal
|
203
|
+
.configurators
|
204
|
+
.each { |configurator| configurator.call(config) }
|
177
205
|
end
|
178
206
|
|
179
|
-
# Validate config based on
|
207
|
+
# Validate config based on the config contract
|
180
208
|
# @return [Boolean] true if configuration is valid
|
181
|
-
# @raise [Karafka::Errors::
|
182
|
-
# doesn't match with
|
209
|
+
# @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
|
210
|
+
# doesn't match with the config contract
|
183
211
|
def validate!
|
184
|
-
validation_result =
|
212
|
+
validation_result = CONTRACT.call(config.to_h)
|
185
213
|
|
186
214
|
return true if validation_result.success?
|
187
215
|
|
188
|
-
raise Errors::
|
216
|
+
raise Errors::InvalidConfigurationError, validation_result.errors.to_h
|
189
217
|
end
|
190
218
|
end
|
191
219
|
end
|
@@ -2,26 +2,30 @@
|
|
2
2
|
|
3
3
|
module Karafka
|
4
4
|
module Setup
|
5
|
-
|
5
|
+
# Configurators are used to post setup some of the components of Karafka after the core
|
6
|
+
# framework is initialized
|
7
|
+
module Configurators
|
6
8
|
# Class responsible for setting up WaterDrop configuration
|
7
|
-
class WaterDrop
|
9
|
+
class WaterDrop
|
8
10
|
# Sets up a WaterDrop settings
|
9
11
|
# @param config [Karafka::Setup::Config] Config we can user to setup things
|
10
12
|
# @note This will also inject Karafka monitor as a default monitor into WaterDrop,
|
11
13
|
# so we have the same monitor within whole Karafka framework (same with logger)
|
12
|
-
def
|
14
|
+
def call(config)
|
13
15
|
::WaterDrop.setup do |water_config|
|
14
16
|
water_config.deliver = true
|
15
17
|
|
16
18
|
config.to_h.reject { |k, _v| k == :kafka }.each do |k, v|
|
17
19
|
key_assignment = :"#{k}="
|
18
20
|
next unless water_config.respond_to?(key_assignment)
|
21
|
+
|
19
22
|
water_config.public_send(key_assignment, v)
|
20
23
|
end
|
21
24
|
|
22
25
|
config.kafka.to_h.each do |k, v|
|
23
26
|
key_assignment = :"#{k}="
|
24
27
|
next unless water_config.kafka.respond_to?(key_assignment)
|
28
|
+
|
25
29
|
water_config.kafka.public_send(key_assignment, v)
|
26
30
|
end
|
27
31
|
end
|
data/lib/karafka/setup/dsl.rb
CHANGED
data/lib/karafka/status.rb
CHANGED
@@ -3,15 +3,16 @@
|
|
3
3
|
module Karafka
|
4
4
|
# App status monitor
|
5
5
|
class Status
|
6
|
-
include Singleton
|
7
|
-
|
8
6
|
# Available states and their transitions
|
9
7
|
STATES = {
|
10
8
|
initializing: :initialize!,
|
9
|
+
initialized: :initialized!,
|
11
10
|
running: :run!,
|
12
|
-
|
11
|
+
stopping: :stop!
|
13
12
|
}.freeze
|
14
13
|
|
14
|
+
private_constant :STATES
|
15
|
+
|
15
16
|
STATES.each do |state, transition|
|
16
17
|
define_method :"#{state}?" do
|
17
18
|
@status == state
|
@@ -19,6 +20,9 @@ module Karafka
|
|
19
20
|
|
20
21
|
define_method transition do
|
21
22
|
@status = state
|
23
|
+
# Trap context disallows to run certain things that we instrument
|
24
|
+
# so the state changes are executed from a separate thread
|
25
|
+
Thread.new { Karafka.monitor.instrument("app.#{state}") }.join
|
22
26
|
end
|
23
27
|
end
|
24
28
|
end
|
@@ -3,4 +3,5 @@
|
|
3
3
|
# Application consumer from which all Karafka consumers should inherit
|
4
4
|
# You can rename it if it would conflict with your current code base (in case you're integrating
|
5
5
|
# Karafka with other frameworks)
|
6
|
-
ApplicationConsumer
|
6
|
+
class ApplicationConsumer < Karafka::BaseConsumer
|
7
|
+
end
|
data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb}
RENAMED
File without changes
|
@@ -0,0 +1,92 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
<% if rails? -%>
|
4
|
+
ENV['RAILS_ENV'] ||= 'development'
|
5
|
+
ENV['KARAFKA_ENV'] = ENV['RAILS_ENV']
|
6
|
+
require ::File.expand_path('../config/environment', __FILE__)
|
7
|
+
Rails.application.eager_load!
|
8
|
+
|
9
|
+
# This lines will make Karafka print to stdout like puma or unicorn
|
10
|
+
if Rails.env.development?
|
11
|
+
Rails.logger.extend(
|
12
|
+
ActiveSupport::Logger.broadcast(
|
13
|
+
ActiveSupport::Logger.new($stdout)
|
14
|
+
)
|
15
|
+
)
|
16
|
+
end
|
17
|
+
<% else -%>
|
18
|
+
# This file is auto-generated during the install process.
|
19
|
+
# If by any chance you've wanted a setup for Rails app, either run the `karafka:install`
|
20
|
+
# command again or refer to the install templates available in the source codes
|
21
|
+
|
22
|
+
ENV['RACK_ENV'] ||= 'development'
|
23
|
+
ENV['KARAFKA_ENV'] ||= ENV['RACK_ENV']
|
24
|
+
Bundler.require(:default, ENV['KARAFKA_ENV'])
|
25
|
+
|
26
|
+
# Zeitwerk custom loader for loading the app components before the whole
|
27
|
+
# Karafka framework configuration
|
28
|
+
APP_LOADER = Zeitwerk::Loader.new
|
29
|
+
APP_LOADER.enable_reloading
|
30
|
+
|
31
|
+
%w[
|
32
|
+
lib
|
33
|
+
app/consumers
|
34
|
+
app/responders
|
35
|
+
app/workers
|
36
|
+
].each(&APP_LOADER.method(:push_dir))
|
37
|
+
|
38
|
+
APP_LOADER.setup
|
39
|
+
APP_LOADER.eager_load
|
40
|
+
<% end -%>
|
41
|
+
|
42
|
+
class KarafkaApp < Karafka::App
|
43
|
+
setup do |config|
|
44
|
+
config.kafka.seed_brokers = %w[kafka://127.0.0.1:9092]
|
45
|
+
config.client_id = 'example_app'
|
46
|
+
<% if rails? -%>
|
47
|
+
config.logger = Rails.logger
|
48
|
+
<% end -%>
|
49
|
+
end
|
50
|
+
|
51
|
+
# Comment out this part if you are not using instrumentation and/or you are not
|
52
|
+
# interested in logging events for certain environments. Since instrumentation
|
53
|
+
# notifications add extra boilerplate, if you want to achieve max performance,
|
54
|
+
# listen to only what you really need for given environment.
|
55
|
+
Karafka.monitor.subscribe(WaterDrop::Instrumentation::StdoutListener.new)
|
56
|
+
Karafka.monitor.subscribe(Karafka::Instrumentation::StdoutListener.new)
|
57
|
+
Karafka.monitor.subscribe(Karafka::Instrumentation::ProctitleListener.new)
|
58
|
+
|
59
|
+
# Uncomment that in order to achieve code reload in development mode
|
60
|
+
# Be aware, that this might have some side-effects. Please refer to the wiki
|
61
|
+
# for more details on benefits and downsides of the code reload in the
|
62
|
+
# development mode
|
63
|
+
#
|
64
|
+
# Karafka.monitor.subscribe(
|
65
|
+
# Karafka::CodeReloader.new(
|
66
|
+
# <%= rails? ? '*Rails.application.reloaders' : 'APP_LOADER' %>
|
67
|
+
# )
|
68
|
+
# )
|
69
|
+
|
70
|
+
consumer_groups.draw do
|
71
|
+
# topic :example do
|
72
|
+
# consumer ExampleConsumer
|
73
|
+
# end
|
74
|
+
|
75
|
+
# consumer_group :bigger_group do
|
76
|
+
# topic :test do
|
77
|
+
# consumer TestConsumer
|
78
|
+
# end
|
79
|
+
#
|
80
|
+
# topic :test2 do
|
81
|
+
# consumer Test2Consumer
|
82
|
+
# end
|
83
|
+
# end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
Karafka.monitor.subscribe('app.initialized') do
|
88
|
+
# Put here all the things you want to do after the Karafka framework
|
89
|
+
# initialization
|
90
|
+
end
|
91
|
+
|
92
|
+
KarafkaApp.boot!
|