karafka 1.0.1 → 1.4.14
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- checksums.yaml.gz.sig +0 -0
- data/.coditsu/ci.yml +3 -0
- data/.console_irbrc +1 -3
- data/.diffend.yml +3 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.github/workflows/ci.yml +76 -0
- data/.gitignore +1 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +286 -16
- data/CODE_OF_CONDUCT.md +1 -1
- data/CONTRIBUTING.md +6 -7
- data/Gemfile +5 -2
- data/Gemfile.lock +100 -103
- data/README.md +54 -74
- data/bin/karafka +1 -1
- data/certs/mensfeld.pem +26 -0
- data/config/errors.yml +40 -5
- data/docker-compose.yml +17 -0
- data/karafka.gemspec +31 -15
- data/lib/karafka/app.rb +19 -18
- data/lib/karafka/assignment_strategies/round_robin.rb +13 -0
- data/lib/karafka/attributes_map.rb +17 -21
- data/lib/karafka/backends/inline.rb +2 -3
- data/lib/karafka/base_consumer.rb +57 -0
- data/lib/karafka/base_responder.rb +77 -31
- data/lib/karafka/cli/base.rb +4 -4
- data/lib/karafka/cli/console.rb +11 -9
- data/lib/karafka/cli/flow.rb +9 -7
- data/lib/karafka/cli/info.rb +5 -4
- data/lib/karafka/cli/install.rb +32 -8
- data/lib/karafka/cli/missingno.rb +19 -0
- data/lib/karafka/cli/server.rb +18 -16
- data/lib/karafka/cli.rb +10 -2
- data/lib/karafka/code_reloader.rb +67 -0
- data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +71 -22
- data/lib/karafka/connection/batch_delegator.rb +55 -0
- data/lib/karafka/connection/builder.rb +23 -0
- data/lib/karafka/connection/client.rb +120 -0
- data/lib/karafka/connection/listener.rb +39 -26
- data/lib/karafka/connection/message_delegator.rb +36 -0
- data/lib/karafka/consumers/batch_metadata.rb +10 -0
- data/lib/karafka/consumers/callbacks.rb +71 -0
- data/lib/karafka/consumers/includer.rb +64 -0
- data/lib/karafka/consumers/responders.rb +24 -0
- data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
- data/lib/karafka/contracts/config.rb +21 -0
- data/lib/karafka/contracts/consumer_group.rb +211 -0
- data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
- data/lib/karafka/contracts/responder_usage.rb +54 -0
- data/lib/karafka/contracts/server_cli_options.rb +31 -0
- data/lib/karafka/contracts.rb +10 -0
- data/lib/karafka/errors.rb +27 -12
- data/lib/karafka/fetcher.rb +15 -15
- data/lib/karafka/helpers/class_matcher.rb +20 -10
- data/lib/karafka/helpers/config_retriever.rb +3 -3
- data/lib/karafka/helpers/inflector.rb +26 -0
- data/lib/karafka/helpers/multi_delegator.rb +0 -1
- data/lib/karafka/instrumentation/logger.rb +54 -0
- data/lib/karafka/instrumentation/monitor.rb +70 -0
- data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
- data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
- data/lib/karafka/params/batch_metadata.rb +26 -0
- data/lib/karafka/params/builders/batch_metadata.rb +30 -0
- data/lib/karafka/params/builders/params.rb +38 -0
- data/lib/karafka/params/builders/params_batch.rb +25 -0
- data/lib/karafka/params/metadata.rb +20 -0
- data/lib/karafka/params/params.rb +35 -107
- data/lib/karafka/params/params_batch.rb +38 -19
- data/lib/karafka/patches/ruby_kafka.rb +47 -0
- data/lib/karafka/persistence/client.rb +29 -0
- data/lib/karafka/persistence/consumers.rb +45 -0
- data/lib/karafka/persistence/topics.rb +48 -0
- data/lib/karafka/process.rb +6 -9
- data/lib/karafka/responders/builder.rb +15 -14
- data/lib/karafka/responders/topic.rb +14 -9
- data/lib/karafka/routing/builder.rb +38 -9
- data/lib/karafka/routing/consumer_group.rb +6 -4
- data/lib/karafka/routing/consumer_mapper.rb +10 -9
- data/lib/karafka/routing/proxy.rb +10 -1
- data/lib/karafka/routing/router.rb +1 -1
- data/lib/karafka/routing/topic.rb +8 -12
- data/lib/karafka/routing/topic_mapper.rb +16 -18
- data/lib/karafka/serialization/json/deserializer.rb +27 -0
- data/lib/karafka/serialization/json/serializer.rb +31 -0
- data/lib/karafka/server.rb +50 -39
- data/lib/karafka/setup/config.rb +138 -91
- data/lib/karafka/setup/configurators/water_drop.rb +21 -16
- data/lib/karafka/setup/dsl.rb +21 -0
- data/lib/karafka/status.rb +7 -3
- data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.erb} +2 -2
- data/lib/karafka/templates/karafka.rb.erb +92 -0
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +19 -15
- data.tar.gz.sig +0 -0
- metadata +119 -81
- metadata.gz.sig +5 -0
- data/.github/ISSUE_TEMPLATE.md +0 -2
- data/.travis.yml +0 -17
- data/Rakefile +0 -7
- data/lib/karafka/base_controller.rb +0 -117
- data/lib/karafka/connection/messages_consumer.rb +0 -106
- data/lib/karafka/connection/messages_processor.rb +0 -61
- data/lib/karafka/controllers/includer.rb +0 -51
- data/lib/karafka/controllers/responders.rb +0 -19
- data/lib/karafka/loader.rb +0 -29
- data/lib/karafka/logger.rb +0 -53
- data/lib/karafka/monitor.rb +0 -98
- data/lib/karafka/parsers/json.rb +0 -38
- data/lib/karafka/patches/dry_configurable.rb +0 -33
- data/lib/karafka/persistence/controller.rb +0 -23
- data/lib/karafka/schemas/config.rb +0 -31
- data/lib/karafka/schemas/consumer_group.rb +0 -64
- data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
- data/lib/karafka/schemas/responder_usage.rb +0 -38
- data/lib/karafka/schemas/server_cli_options.rb +0 -43
- data/lib/karafka/setup/configurators/base.rb +0 -35
- data/lib/karafka/setup/configurators/celluloid.rb +0 -19
- data/lib/karafka/templates/karafka.rb.example +0 -41
- /data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
@@ -0,0 +1,13 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
# Strategies for Kafka partitions assignments
|
5
|
+
module AssignmentStrategies
|
6
|
+
# Standard RoundRobin strategy
|
7
|
+
class RoundRobin < SimpleDelegator
|
8
|
+
def initialize
|
9
|
+
super(Kafka::RoundRobinAssignmentStrategy.new)
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
@@ -11,34 +11,34 @@ module Karafka
|
|
11
11
|
module AttributesMap
|
12
12
|
class << self
|
13
13
|
# What settings should go where in ruby-kafka
|
14
|
+
# @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
|
14
15
|
# @note All other settings will be passed to Kafka.new method invocation.
|
15
16
|
# All elements in this hash are just edge cases
|
16
|
-
|
17
|
-
def config_adapter
|
17
|
+
def api_adapter
|
18
18
|
{
|
19
19
|
consumer: %i[
|
20
20
|
session_timeout offset_commit_interval offset_commit_threshold
|
21
|
-
offset_retention_time heartbeat_interval
|
21
|
+
offset_retention_time heartbeat_interval fetcher_max_queue_size
|
22
|
+
assignment_strategy
|
22
23
|
],
|
23
|
-
|
24
|
-
|
25
|
-
|
24
|
+
subscribe: %i[start_from_beginning max_bytes_per_partition],
|
25
|
+
consumption: %i[min_bytes max_bytes max_wait_time],
|
26
|
+
pause: %i[pause_timeout pause_max_timeout pause_exponential_backoff],
|
26
27
|
# All the options that are under kafka config namespace, but are not used
|
27
28
|
# directly with kafka api, but from the Karafka user perspective, they are
|
28
29
|
# still related to kafka. They should not be proxied anywhere
|
29
|
-
ignored: %i[reconnect_timeout]
|
30
|
+
ignored: %i[reconnect_timeout automatically_mark_as_consumed]
|
30
31
|
}
|
31
32
|
end
|
32
33
|
|
33
34
|
# @return [Array<Symbol>] properties that can be set on a per topic level
|
34
35
|
def topic
|
35
|
-
(
|
36
|
+
(api_adapter[:subscribe] + %i[
|
36
37
|
backend
|
37
38
|
name
|
38
|
-
|
39
|
+
deserializer
|
39
40
|
responder
|
40
|
-
|
41
|
-
persistent
|
41
|
+
batch_consuming
|
42
42
|
]).uniq
|
43
43
|
end
|
44
44
|
|
@@ -48,17 +48,13 @@ module Karafka
|
|
48
48
|
# Thanks to this solution, if any new setting is available for ruby-kafka, we just need
|
49
49
|
# to add it to our configuration class and it will be handled automatically.
|
50
50
|
def consumer_group
|
51
|
-
# @note We don't ignore the
|
51
|
+
# @note We don't ignore the api_adapter[:ignored] values as they should be ignored
|
52
52
|
# only when proxying details go ruby-kafka. We use ignored fields internally in karafka
|
53
|
-
ignored_settings =
|
54
|
-
defined_settings =
|
55
|
-
karafka_settings = %i[
|
56
|
-
|
57
|
-
dynamically_proxied = Karafka::Setup::Config
|
58
|
-
._settings
|
59
|
-
.find { |s| s.name == :kafka }
|
60
|
-
.value
|
61
|
-
.instance_variable_get('@klass').settings
|
53
|
+
ignored_settings = api_adapter[:subscribe]
|
54
|
+
defined_settings = api_adapter.values.flatten
|
55
|
+
karafka_settings = %i[batch_fetching]
|
56
|
+
|
57
|
+
dynamically_proxied = Karafka::Setup::Config.config.kafka.to_h.keys
|
62
58
|
|
63
59
|
(defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
|
64
60
|
end
|
@@ -7,10 +7,9 @@ module Karafka
|
|
7
7
|
module Inline
|
8
8
|
private
|
9
9
|
|
10
|
-
# Executes
|
10
|
+
# Executes consume code immediately (without enqueuing)
|
11
11
|
def process
|
12
|
-
Karafka.monitor.
|
13
|
-
perform
|
12
|
+
Karafka.monitor.instrument('backends.inline.process', caller: self) { consume }
|
14
13
|
end
|
15
14
|
end
|
16
15
|
end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# Karafka module namespace
|
4
|
+
module Karafka
|
5
|
+
# Base consumer from which all Karafka consumers should inherit
|
6
|
+
class BaseConsumer
|
7
|
+
extend Forwardable
|
8
|
+
|
9
|
+
# Allows us to mark messages as consumed for non-automatic mode without having
|
10
|
+
# to use consumer client directly. We do this that way, because most of the people should not
|
11
|
+
# mess with the client instance directly (just in case)
|
12
|
+
%i[
|
13
|
+
mark_as_consumed
|
14
|
+
mark_as_consumed!
|
15
|
+
trigger_heartbeat
|
16
|
+
trigger_heartbeat!
|
17
|
+
].each do |delegated_method_name|
|
18
|
+
def_delegator :client, delegated_method_name
|
19
|
+
|
20
|
+
private delegated_method_name
|
21
|
+
end
|
22
|
+
|
23
|
+
# @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
|
24
|
+
attr_reader :topic
|
25
|
+
# @return [Karafka::Params:ParamsBatch] current params batch
|
26
|
+
attr_accessor :params_batch
|
27
|
+
|
28
|
+
# Assigns a topic to a consumer and builds up proper consumer functionalities
|
29
|
+
# so that it can cooperate with the topic settings
|
30
|
+
# @param topic [Karafka::Routing::Topic]
|
31
|
+
def initialize(topic)
|
32
|
+
@topic = topic
|
33
|
+
Consumers::Includer.call(self)
|
34
|
+
end
|
35
|
+
|
36
|
+
# Executes the default consumer flow.
|
37
|
+
def call
|
38
|
+
process
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
# @return [Karafka::Connection::Client] messages consuming client that can be used to
|
44
|
+
# commit manually offset or pause / stop consumer based on the business logic
|
45
|
+
def client
|
46
|
+
Persistence::Client.read
|
47
|
+
end
|
48
|
+
|
49
|
+
# Method that will perform business logic and on data received from Kafka (it will consume
|
50
|
+
# the data)
|
51
|
+
# @note This method needs bo be implemented in a subclass. We stub it here as a failover if
|
52
|
+
# someone forgets about it or makes on with typo
|
53
|
+
def consume
|
54
|
+
raise NotImplementedError, 'Implement this in a subclass'
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
@@ -39,7 +39,7 @@ module Karafka
|
|
39
39
|
#
|
40
40
|
# @example Multiple times used topic
|
41
41
|
# class Responder < BaseResponder
|
42
|
-
# topic :required_topic
|
42
|
+
# topic :required_topic
|
43
43
|
#
|
44
44
|
# def respond(data)
|
45
45
|
# data.each do |subset|
|
@@ -48,6 +48,17 @@ module Karafka
|
|
48
48
|
# end
|
49
49
|
# end
|
50
50
|
#
|
51
|
+
# @example Specify serializer for a topic
|
52
|
+
# class Responder < BaseResponder
|
53
|
+
# topic :xml_topic, serializer: MyXMLSerializer
|
54
|
+
#
|
55
|
+
# def respond(data)
|
56
|
+
# data.each do |subset|
|
57
|
+
# respond_to :xml_topic, subset
|
58
|
+
# end
|
59
|
+
# end
|
60
|
+
# end
|
61
|
+
#
|
51
62
|
# @example Accept multiple arguments to a respond method
|
52
63
|
# class Responder < BaseResponder
|
53
64
|
# topic :users_actions
|
@@ -59,26 +70,35 @@ module Karafka
|
|
59
70
|
# end
|
60
71
|
# end
|
61
72
|
class BaseResponder
|
62
|
-
#
|
63
|
-
|
73
|
+
# Responder usage contract
|
74
|
+
CONTRACT = Karafka::Contracts::ResponderUsage.new.freeze
|
64
75
|
|
65
|
-
|
76
|
+
private_constant :CONTRACT
|
66
77
|
|
67
78
|
class << self
|
79
|
+
# Definitions of all topics that we want to be able to use in this responder should go here
|
80
|
+
attr_accessor :topics
|
81
|
+
# Contract that we can use to control and/or require some additional details upon options
|
82
|
+
# that are being passed to the producer. This can be in particular useful if we want to make
|
83
|
+
# sure that for example partition_key is always present.
|
84
|
+
attr_accessor :options_contract
|
85
|
+
|
68
86
|
# Registers a topic as on to which we will be able to respond
|
69
87
|
# @param topic_name [Symbol, String] name of topic to which we want to respond
|
70
88
|
# @param options [Hash] hash with optional configuration details
|
71
89
|
def topic(topic_name, options = {})
|
90
|
+
options[:serializer] ||= Karafka::App.config.serializer
|
91
|
+
options[:registered] = true
|
72
92
|
self.topics ||= {}
|
73
|
-
topic_obj = Responders::Topic.new(topic_name, options
|
93
|
+
topic_obj = Responders::Topic.new(topic_name, options)
|
74
94
|
self.topics[topic_obj.name] = topic_obj
|
75
95
|
end
|
76
96
|
|
77
97
|
# A simple alias for easier standalone responder usage.
|
78
|
-
# Instead of building it with new.call it allows (in case of
|
98
|
+
# Instead of building it with new.call it allows (in case of using JSON serializer)
|
79
99
|
# to just run it directly from the class level
|
80
100
|
# @param data Anything that we want to respond with
|
81
|
-
# @example Send user data with a responder
|
101
|
+
# @example Send user data with a responder
|
82
102
|
# UsersCreatedResponder.call(@created_user)
|
83
103
|
def call(*data)
|
84
104
|
# Just in case there were no topics defined for a responder, we initialize with
|
@@ -88,12 +108,11 @@ module Karafka
|
|
88
108
|
end
|
89
109
|
end
|
90
110
|
|
111
|
+
attr_reader :messages_buffer
|
112
|
+
|
91
113
|
# Creates a responder object
|
92
|
-
# @param parser_class [Class] parser class that we can use to generate appropriate string
|
93
|
-
# or nothing if we want to default to Karafka::Parsers::Json
|
94
114
|
# @return [Karafka::BaseResponder] base responder descendant responder
|
95
|
-
def initialize
|
96
|
-
@parser_class = parser_class
|
115
|
+
def initialize
|
97
116
|
@messages_buffer = {}
|
98
117
|
end
|
99
118
|
|
@@ -102,13 +121,14 @@ module Karafka
|
|
102
121
|
# @note We know that validators should be executed also before sending data to topics, however
|
103
122
|
# the implementation gets way more complicated then, that's why we check after everything
|
104
123
|
# was sent using responder
|
105
|
-
# @example Send user data with a responder
|
124
|
+
# @example Send user data with a responder
|
106
125
|
# UsersCreatedResponder.new.call(@created_user)
|
107
126
|
# @example Send user data with a responder using non default Parser
|
108
127
|
# UsersCreatedResponder.new(MyParser).call(@created_user)
|
109
128
|
def call(*data)
|
110
129
|
respond(*data)
|
111
|
-
|
130
|
+
validate_usage!
|
131
|
+
validate_options!
|
112
132
|
deliver!
|
113
133
|
end
|
114
134
|
|
@@ -116,7 +136,7 @@ module Karafka
|
|
116
136
|
|
117
137
|
# Checks if we met all the topics requirements. It will fail if we didn't send a message to
|
118
138
|
# a registered required topic, etc.
|
119
|
-
def
|
139
|
+
def validate_usage!
|
120
140
|
registered_topics = self.class.topics.map do |name, topic|
|
121
141
|
topic.to_h.merge!(
|
122
142
|
usage_count: messages_buffer[name]&.count || 0
|
@@ -128,37 +148,48 @@ module Karafka
|
|
128
148
|
topic.to_h.merge!(usage_count: usage.count)
|
129
149
|
end
|
130
150
|
|
131
|
-
result =
|
151
|
+
result = CONTRACT.call(
|
132
152
|
registered_topics: registered_topics,
|
133
153
|
used_topics: used_topics
|
134
154
|
)
|
135
155
|
|
136
156
|
return if result.success?
|
137
157
|
|
138
|
-
raise Karafka::Errors::
|
158
|
+
raise Karafka::Errors::InvalidResponderUsageError, result.errors.to_h
|
159
|
+
end
|
160
|
+
|
161
|
+
# Checks if we met all the options requirements before sending them to the producer.
|
162
|
+
def validate_options!
|
163
|
+
return true unless self.class.options_contract
|
164
|
+
|
165
|
+
messages_buffer.each_value do |messages_set|
|
166
|
+
messages_set.each do |message_data|
|
167
|
+
result = self.class.options_contract.call(message_data.last)
|
168
|
+
next if result.success?
|
169
|
+
|
170
|
+
raise Karafka::Errors::InvalidResponderMessageOptionsError, result.errors.to_h
|
171
|
+
end
|
172
|
+
end
|
139
173
|
end
|
140
174
|
|
141
175
|
# Takes all the messages from the buffer and delivers them one by one
|
142
176
|
# @note This method is executed after the validation, so we're sure that
|
143
177
|
# what we send is legit and it will go to a proper topics
|
144
178
|
def deliver!
|
145
|
-
messages_buffer.
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
mapped_topic,
|
153
|
-
data,
|
154
|
-
options
|
155
|
-
).send!
|
179
|
+
messages_buffer.each_value do |data_elements|
|
180
|
+
data_elements.each do |data, options|
|
181
|
+
# We map this topic name, so it will match namespaced/etc topic in Kafka
|
182
|
+
# @note By default will not change topic (if default mapper used)
|
183
|
+
mapped_topic = Karafka::App.config.topic_mapper.outgoing(options[:topic])
|
184
|
+
external_options = options.merge(topic: mapped_topic)
|
185
|
+
producer(options).call(data, external_options)
|
156
186
|
end
|
157
187
|
end
|
158
188
|
end
|
159
189
|
|
160
190
|
# Method that needs to be implemented in a subclass. It should handle responding
|
161
191
|
# on registered topics
|
192
|
+
# @param _data [Object] anything that we want to use to send to Kafka
|
162
193
|
# @raise [NotImplementedError] This method needs to be implemented in a subclass
|
163
194
|
def respond(*_data)
|
164
195
|
raise NotImplementedError, 'Implement this in a subclass'
|
@@ -168,13 +199,28 @@ module Karafka
|
|
168
199
|
# as many times as we need. Especially when we have 1:n flow
|
169
200
|
# @param topic [Symbol, String] topic to which we want to respond
|
170
201
|
# @param data [String, Object] string or object that we want to send
|
171
|
-
# @param options [Hash] options for waterdrop (e.g. partition_key)
|
202
|
+
# @param options [Hash] options for waterdrop (e.g. partition_key).
|
172
203
|
# @note Respond to does not accept multiple data arguments.
|
173
204
|
def respond_to(topic, data, options = {})
|
174
|
-
|
205
|
+
# We normalize the format to string, as WaterDrop and Ruby-Kafka support only
|
206
|
+
# string topics
|
207
|
+
topic = topic.to_s
|
208
|
+
|
209
|
+
messages_buffer[topic] ||= []
|
210
|
+
messages_buffer[topic] << [
|
211
|
+
self.class.topics[topic].serializer.call(data),
|
212
|
+
options.merge(topic: topic)
|
213
|
+
]
|
214
|
+
end
|
175
215
|
|
176
|
-
|
177
|
-
|
216
|
+
# @param options [Hash] options for waterdrop
|
217
|
+
# @return [Class] WaterDrop producer (sync or async based on the settings)
|
218
|
+
def producer(options)
|
219
|
+
if self.class.topics[options[:topic]].async?
|
220
|
+
WaterDrop::AsyncProducer
|
221
|
+
else
|
222
|
+
WaterDrop::SyncProducer
|
223
|
+
end
|
178
224
|
end
|
179
225
|
end
|
180
226
|
end
|
data/lib/karafka/cli/base.rb
CHANGED
@@ -43,16 +43,16 @@ module Karafka
|
|
43
43
|
end
|
44
44
|
|
45
45
|
# Allows to set description of a given cli command
|
46
|
-
# @param
|
47
|
-
def desc(
|
48
|
-
@desc ||=
|
46
|
+
# @param args [Array] All the arguments that Thor desc method accepts
|
47
|
+
def desc(*args)
|
48
|
+
@desc ||= args
|
49
49
|
end
|
50
50
|
|
51
51
|
# This method will bind a given Cli command into Karafka Cli
|
52
52
|
# This method is a wrapper to way Thor defines its commands
|
53
53
|
# @param cli_class [Karafka::Cli] Karafka cli_class
|
54
54
|
def bind_to(cli_class)
|
55
|
-
cli_class.desc name,
|
55
|
+
cli_class.desc name, *@desc
|
56
56
|
|
57
57
|
(@options || []).each { |option| cli_class.option(*option) }
|
58
58
|
|
data/lib/karafka/cli/console.rb
CHANGED
@@ -8,15 +8,17 @@ module Karafka
|
|
8
8
|
desc 'Start the Karafka console (short-cut alias: "c")'
|
9
9
|
option aliases: 'c'
|
10
10
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
11
|
+
class << self
|
12
|
+
# @return [String] Console executing command
|
13
|
+
# @example
|
14
|
+
# Karafka::Cli::Console.command #=> 'KARAFKA_CONSOLE=true bundle exec irb...'
|
15
|
+
def command
|
16
|
+
envs = [
|
17
|
+
"IRBRC='#{Karafka.gem_root}/.console_irbrc'",
|
18
|
+
'KARAFKA_CONSOLE=true'
|
19
|
+
]
|
20
|
+
"#{envs.join(' ')} bundle exec irb -r #{Karafka.boot_file}"
|
21
|
+
end
|
20
22
|
end
|
21
23
|
|
22
24
|
# Start the Karafka console
|
data/lib/karafka/cli/flow.rb
CHANGED
@@ -11,20 +11,22 @@ module Karafka
|
|
11
11
|
def call
|
12
12
|
topics.each do |topic|
|
13
13
|
any_topics = !topic.responder&.topics.nil?
|
14
|
+
log_messages = []
|
14
15
|
|
15
16
|
if any_topics
|
16
|
-
|
17
|
+
log_messages << "#{topic.name} =>"
|
17
18
|
|
18
19
|
topic.responder.topics.each_value do |responder_topic|
|
19
20
|
features = []
|
20
21
|
features << (responder_topic.required? ? 'always' : 'conditionally')
|
21
|
-
features << (responder_topic.multiple_usage? ? 'one or more' : 'exactly once')
|
22
22
|
|
23
|
-
|
23
|
+
log_messages << format(responder_topic.name, "(#{features.join(', ')})")
|
24
24
|
end
|
25
25
|
else
|
26
|
-
|
26
|
+
log_messages << "#{topic.name} => (nothing)"
|
27
27
|
end
|
28
|
+
|
29
|
+
Karafka.logger.info(log_messages.join("\n"))
|
28
30
|
end
|
29
31
|
end
|
30
32
|
|
@@ -35,11 +37,11 @@ module Karafka
|
|
35
37
|
Karafka::App.consumer_groups.map(&:topics).flatten.sort_by(&:name)
|
36
38
|
end
|
37
39
|
|
38
|
-
#
|
40
|
+
# Formats a given value with label in a nice way
|
39
41
|
# @param label [String] label describing value
|
40
42
|
# @param value [String] value that should be printed
|
41
|
-
def
|
42
|
-
|
43
|
+
def format(label, value)
|
44
|
+
" - #{label}: #{value}"
|
43
45
|
end
|
44
46
|
end
|
45
47
|
end
|
data/lib/karafka/cli/info.rb
CHANGED
@@ -12,18 +12,19 @@ module Karafka
|
|
12
12
|
config = Karafka::App.config
|
13
13
|
|
14
14
|
info = [
|
15
|
-
"Karafka
|
15
|
+
"Karafka version: #{Karafka::VERSION}",
|
16
|
+
"Ruby version: #{RUBY_VERSION}",
|
17
|
+
"Ruby-kafka version: #{::Kafka::VERSION}",
|
16
18
|
"Application client id: #{config.client_id}",
|
17
19
|
"Backend: #{config.backend}",
|
20
|
+
"Batch fetching: #{config.batch_fetching}",
|
18
21
|
"Batch consuming: #{config.batch_consuming}",
|
19
|
-
"Batch processing: #{config.batch_processing}",
|
20
|
-
"Number of threads: #{config.concurrency}",
|
21
22
|
"Boot file: #{Karafka.boot_file}",
|
22
23
|
"Environment: #{Karafka.env}",
|
23
24
|
"Kafka seed brokers: #{config.kafka.seed_brokers}"
|
24
25
|
]
|
25
26
|
|
26
|
-
|
27
|
+
Karafka.logger.info(info.join("\n"))
|
27
28
|
end
|
28
29
|
end
|
29
30
|
end
|
data/lib/karafka/cli/install.rb
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require 'erb'
|
4
|
+
|
3
5
|
module Karafka
|
4
6
|
# Karafka framework Cli
|
5
7
|
class Cli < Thor
|
@@ -9,21 +11,33 @@ module Karafka
|
|
9
11
|
|
10
12
|
# Directories created by default
|
11
13
|
INSTALL_DIRS = %w[
|
12
|
-
app/
|
13
|
-
app/controllers
|
14
|
+
app/consumers
|
14
15
|
app/responders
|
16
|
+
app/workers
|
15
17
|
config
|
18
|
+
lib
|
16
19
|
log
|
17
20
|
tmp/pids
|
18
21
|
].freeze
|
19
22
|
|
20
23
|
# Where should we map proper files from templates
|
21
24
|
INSTALL_FILES_MAP = {
|
22
|
-
'karafka.rb.
|
23
|
-
'
|
24
|
-
'application_responder.rb.
|
25
|
+
'karafka.rb.erb' => Karafka.boot_file.basename,
|
26
|
+
'application_consumer.rb.erb' => 'app/consumers/application_consumer.rb',
|
27
|
+
'application_responder.rb.erb' => 'app/responders/application_responder.rb'
|
25
28
|
}.freeze
|
26
29
|
|
30
|
+
# @param args [Array] all the things that Thor CLI accepts
|
31
|
+
def initialize(*args)
|
32
|
+
super
|
33
|
+
dependencies = Bundler::LockfileParser.new(
|
34
|
+
Bundler.read_file(
|
35
|
+
Bundler.default_lockfile
|
36
|
+
)
|
37
|
+
).dependencies
|
38
|
+
@rails = dependencies.key?('railties') || dependencies.key?('rails')
|
39
|
+
end
|
40
|
+
|
27
41
|
# Install all required things for Karafka application in current directory
|
28
42
|
def call
|
29
43
|
INSTALL_DIRS.each do |dir|
|
@@ -32,12 +46,22 @@ module Karafka
|
|
32
46
|
|
33
47
|
INSTALL_FILES_MAP.each do |source, target|
|
34
48
|
target = Karafka.root.join(target)
|
35
|
-
next if File.exist?(target)
|
36
49
|
|
37
|
-
|
38
|
-
|
50
|
+
template = File.read(Karafka.core_root.join("templates/#{source}"))
|
51
|
+
# @todo Replace with the keyword argument version once we don't have to support
|
52
|
+
# Ruby < 2.6
|
53
|
+
render = ::ERB.new(template, nil, '-').result(binding)
|
54
|
+
|
55
|
+
File.open(target, 'w') { |file| file.write(render) }
|
39
56
|
end
|
40
57
|
end
|
58
|
+
|
59
|
+
# @return [Boolean] true if we have Rails loaded
|
60
|
+
# This allows us to generate customized karafka.rb template with some tweaks specific for
|
61
|
+
# Rails
|
62
|
+
def rails?
|
63
|
+
@rails
|
64
|
+
end
|
41
65
|
end
|
42
66
|
end
|
43
67
|
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
class Cli < Thor
|
5
|
+
# Command that gets invoked when no method is provided when running the CLI
|
6
|
+
# It allows us to exit with exit code 1 instead of default 0 to indicate that something
|
7
|
+
# was missing
|
8
|
+
# @see https://github.com/karafka/karafka/issues/619
|
9
|
+
class Missingno < Base
|
10
|
+
desc 'Hidden command that gets invoked when no command is provided', hide: true
|
11
|
+
|
12
|
+
# Prints an error about the lack of command (nothing selected)
|
13
|
+
def call
|
14
|
+
Karafka.logger.error('No command provided')
|
15
|
+
exit 1
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
data/lib/karafka/cli/server.rb
CHANGED
@@ -5,6 +5,11 @@ module Karafka
|
|
5
5
|
class Cli < Thor
|
6
6
|
# Server Karafka Cli action
|
7
7
|
class Server < Base
|
8
|
+
# Server config settings contract
|
9
|
+
CONTRACT = Contracts::ServerCliOptions.new.freeze
|
10
|
+
|
11
|
+
private_constant :CONTRACT
|
12
|
+
|
8
13
|
desc 'Start the Karafka server (short-cut alias: "s")'
|
9
14
|
option aliases: 's'
|
10
15
|
option :daemon, default: false, type: :boolean, aliases: :d
|
@@ -13,31 +18,19 @@ module Karafka
|
|
13
18
|
|
14
19
|
# Start the Karafka server
|
15
20
|
def call
|
16
|
-
validate!
|
17
|
-
|
18
|
-
puts 'Starting Karafka server'
|
19
21
|
cli.info
|
20
22
|
|
23
|
+
validate!
|
24
|
+
|
21
25
|
if cli.options[:daemon]
|
22
26
|
FileUtils.mkdir_p File.dirname(cli.options[:pid])
|
23
|
-
# For some reason Celluloid spins threads that break forking
|
24
|
-
# Threads are not shutdown immediately so deamonization will stale until
|
25
|
-
# those threads are killed by Celluloid manager (via timeout)
|
26
|
-
# There's nothing initialized here yet, so instead we shutdown celluloid
|
27
|
-
# and run it again when we need (after fork)
|
28
|
-
Celluloid.shutdown
|
29
27
|
daemonize
|
30
|
-
Celluloid.boot
|
31
28
|
end
|
32
29
|
|
33
30
|
# We assign active topics on a server level, as only server is expected to listen on
|
34
31
|
# part of the topics
|
35
32
|
Karafka::Server.consumer_groups = cli.options[:consumer_groups]
|
36
33
|
|
37
|
-
# Remove pidfile on shutdown, just before the server instance is going to be GCed
|
38
|
-
ObjectSpace.define_finalizer(self, proc { send(:clean) })
|
39
|
-
|
40
|
-
# After we fork, we can boot celluloid again
|
41
34
|
Karafka::Server.run
|
42
35
|
end
|
43
36
|
|
@@ -46,9 +39,10 @@ module Karafka
|
|
46
39
|
# Checks the server cli configuration
|
47
40
|
# options validations in terms of app setup (topics, pid existence, etc)
|
48
41
|
def validate!
|
49
|
-
result =
|
42
|
+
result = CONTRACT.call(cli.options)
|
50
43
|
return if result.success?
|
51
|
-
|
44
|
+
|
45
|
+
raise Errors::InvalidConfigurationError, result.errors.to_h
|
52
46
|
end
|
53
47
|
|
54
48
|
# Detaches current process into background and writes its pidfile
|
@@ -58,6 +52,14 @@ module Karafka
|
|
58
52
|
cli.options[:pid],
|
59
53
|
'w'
|
60
54
|
) { |file| file.write(::Process.pid) }
|
55
|
+
|
56
|
+
# Remove pidfile on stop, just before the server instance is going to be GCed
|
57
|
+
# We want to delay the moment in which the pidfile is removed as much as we can,
|
58
|
+
# so instead of removing it after the server stops running, we rely on the gc moment
|
59
|
+
# when this object gets removed (it is a bit later), so it is closer to the actual
|
60
|
+
# system process end. We do that, so monitoring and deployment tools that rely on a pid
|
61
|
+
# won't alarm or start new system process up until the current one is finished
|
62
|
+
ObjectSpace.define_finalizer(self, proc { send(:clean) })
|
61
63
|
end
|
62
64
|
|
63
65
|
# Removes a pidfile (if exist)
|