karafka 1.2.8 → 1.4.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +134 -14
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +4 -5
  17. data/Gemfile.lock +92 -81
  18. data/README.md +9 -12
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +18 -17
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +5 -10
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +45 -27
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/api_adapter.rb +22 -9
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +5 -3
  39. data/lib/karafka/connection/client.rb +31 -31
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -16
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +54 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -2
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +29 -28
  85. data/lib/karafka/setup/config.rb +67 -37
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +94 -72
  94. metadata.gz.sig +0 -0
  95. data/.travis.yml +0 -21
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -78
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module for all supported by default serialization and deserialization ways
5
+ module Serialization
6
+ # Namespace for json ser/der
7
+ module Json
8
+ # Default Karafka Json deserializer for loading JSON data
9
+ class Deserializer
10
+ # @param params [Karafka::Params::Params] Full params object that we want to deserialize
11
+ # @return [Hash] hash with deserialized JSON data
12
+ # @example
13
+ # params = {
14
+ # 'payload' => "{\"a\":1}",
15
+ # 'topic' => 'my-topic',
16
+ # 'headers' => { 'message_type' => :test }
17
+ # }
18
+ # Deserializer.call(params) #=> { 'a' => 1 }
19
+ def call(params)
20
+ params.raw_payload.nil? ? nil : ::JSON.parse(params.raw_payload)
21
+ rescue ::JSON::ParserError => e
22
+ raise ::Karafka::Errors::DeserializationError, e
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module for all supported by default serialization and deserialization ways
5
+ module Serialization
6
+ module Json
7
+ # Default Karafka Json serializer for serializing data
8
+ class Serializer
9
+ # @param content [Object] any object that we want to convert to a json string
10
+ # @return [String] Valid JSON string containing serialized data
11
+ # @raise [Karafka::Errors::SerializationError] raised when we don't have a way to
12
+ # serialize provided data to json
13
+ # @note When string is passed to this method, we assume that it is already a json
14
+ # string and we don't serialize it again. This allows us to serialize data before
15
+ # it is being forwarded to this serializer if we want to have a custom (not that simple)
16
+ # json serialization
17
+ #
18
+ # @example From an ActiveRecord object
19
+ # Serializer.call(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
20
+ # @example From a string (no changes)
21
+ # Serializer.call("{\"a\":1}") #=> "{\"a\":1}"
22
+ def call(content)
23
+ return content if content.is_a?(String)
24
+ return content.to_json if content.respond_to?(:to_json)
25
+
26
+ raise Karafka::Errors::SerializationError, content
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -6,9 +6,14 @@ module Karafka
6
6
  @consumer_threads = Concurrent::Array.new
7
7
 
8
8
  # How long should we sleep between checks on shutting down consumers
9
- SUPERVISION_SLEEP = 1
9
+ SUPERVISION_SLEEP = 0.1
10
10
  # What system exit code should we use when we terminated forcefully
11
11
  FORCEFUL_EXIT_CODE = 2
12
+ # This factor allows us to calculate how many times we have to sleep before
13
+ # a forceful shutdown
14
+ SUPERVISION_CHECK_FACTOR = (1 / SUPERVISION_SLEEP)
15
+
16
+ private_constant :SUPERVISION_SLEEP, :FORCEFUL_EXIT_CODE, :SUPERVISION_CHECK_FACTOR
12
17
 
13
18
  class << self
14
19
  # Set of consuming threads. Each consumer thread contains a single consumer
@@ -22,7 +27,7 @@ module Karafka
22
27
  process.on_sigint { stop_supervised }
23
28
  process.on_sigquit { stop_supervised }
24
29
  process.on_sigterm { stop_supervised }
25
- start_supervised
30
+ run_supervised
26
31
  end
27
32
 
28
33
  # @return [Array<String>] array with names of consumer groups that should be consumed in a
@@ -36,49 +41,45 @@ module Karafka
36
41
 
37
42
  # @return [Karafka::Process] process wrapper instance used to catch system signal calls
38
43
  def process
39
- Karafka::Process.instance
44
+ Karafka::App.config.internal.process
40
45
  end
41
46
 
42
47
  # Starts Karafka with a supervision
43
48
  # @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
44
- # finish loop (and it won't happen until we explicitily want to stop)
45
- def start_supervised
49
+ # finish loop (and it won't happen until we explicitly want to stop)
50
+ def run_supervised
46
51
  process.supervise
47
52
  Karafka::App.run!
48
- Karafka::Fetcher.call
53
+ Karafka::App.config.internal.fetcher.call
49
54
  end
50
55
 
51
56
  # Stops Karafka with a supervision (as long as there is a shutdown timeout)
52
- # If consumers won't stop in a given timeframe, it will force them to exit
57
+ # If consumers won't stop in a given time frame, it will force them to exit
53
58
  def stop_supervised
54
- # Because this is called in the trap context, there is a chance that instrumentation
55
- # listeners contain things that aren't allowed from within a trap context.
56
- # To bypass that (instead of telling users not to do things they need to)
57
- # we spin up a thread to instrument server.stop and server.stop.error and wait until
58
- # they're finished
59
- Thread.new { Karafka.monitor.instrument('server.stop', {}) }.join
60
-
61
59
  Karafka::App.stop!
62
- # If there is no shutdown timeout, we don't exit and wait until all the consumers
63
- # had done their work
64
- return unless Karafka::App.config.shutdown_timeout
65
-
66
- # If there is a timeout, we check every 1 second (for the timeout period) if all
67
- # the threads finished their work and if so, we can just return and normal
68
- # shutdown process will take place
69
- Karafka::App.config.shutdown_timeout.to_i.times do
70
- return if consumer_threads.count(&:alive?).zero?
60
+
61
+ # Temporary patch until https://github.com/dry-rb/dry-configurable/issues/93 is fixed
62
+ timeout = Thread.new { Karafka::App.config.shutdown_timeout }.join.value
63
+
64
+ # We check from time to time (for the timeout period) if all the threads finished
65
+ # their work and if so, we can just return and normal shutdown process will take place
66
+ (timeout * SUPERVISION_CHECK_FACTOR).to_i.times do
67
+ if consumer_threads.count(&:alive?).zero?
68
+ Thread.new { Karafka.monitor.instrument('app.stopped') }.join
69
+ return
70
+ end
71
+
71
72
  sleep SUPERVISION_SLEEP
72
73
  end
73
74
 
74
- raise Errors::ForcefulShutdown
75
- rescue Errors::ForcefulShutdown => error
76
- Thread.new { Karafka.monitor.instrument('server.stop.error', error: error) }.join
75
+ raise Errors::ForcefulShutdownError
76
+ rescue Errors::ForcefulShutdownError => e
77
+ Thread.new { Karafka.monitor.instrument('app.stopping.error', error: e) }.join
77
78
  # We're done waiting, lets kill them!
78
79
  consumer_threads.each(&:terminate)
79
80
 
80
- # exit is not within the instrumentation as it would not trigger due to exit
81
- Kernel.exit FORCEFUL_EXIT_CODE
81
+ # exit! is not within the instrumentation as it would not trigger due to exit
82
+ Kernel.exit! FORCEFUL_EXIT_CODE
82
83
  end
83
84
  end
84
85
  end
@@ -8,12 +8,16 @@ module Karafka
8
8
  # @note If you want to do some configurations after all of this is done, please add to
9
9
  # karafka/config a proper file (needs to inherit from Karafka::Setup::Configurators::Base
10
10
  # and implement setup method) after that everything will happen automatically
11
- # @note This config object allows to create a 1 level nestings (nodes) only. This should be
11
+ # @note This config object allows to create a 1 level nesting (nodes) only. This should be
12
12
  # enough and will still keep the code simple
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
15
  extend Dry::Configurable
16
- extend Callbacks::Config
16
+
17
+ # Contract for checking the config provided by the user
18
+ CONTRACT = Karafka::Contracts::Config.new.freeze
19
+
20
+ private_constant :CONTRACT
17
21
 
18
22
  # Available settings
19
23
  # option client_id [String] kafka client_id - used to provide
@@ -22,21 +26,23 @@ module Karafka
22
26
  # What backend do we want to use to process messages
23
27
  setting :backend, :inline
24
28
  # option logger [Instance] logger that we want to use
25
- setting :logger, -> { ::Karafka::Instrumentation::Logger.instance }
29
+ setting :logger, ::Karafka::Instrumentation::Logger.new
26
30
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
27
- setting :monitor, -> { ::Karafka::Instrumentation::Monitor.instance }
31
+ setting :monitor, ::Karafka::Instrumentation::Monitor.new
28
32
  # Mapper used to remap consumer groups ids, so in case users migrate from other tools
29
33
  # or they need to maintain their own internal consumer group naming conventions, they
30
34
  # can easily do it, replacing the default client_id + consumer name pattern concept
31
- setting :consumer_mapper, -> { Routing::ConsumerMapper }
32
- # Mapper used to remap names of topics, so we can have a clean internal topic namings
35
+ setting :consumer_mapper, Routing::ConsumerMapper.new
36
+ # Mapper used to remap names of topics, so we can have a clean internal topic naming
33
37
  # despite using any Kafka provider that uses namespacing, etc
34
38
  # It needs to implement two methods:
35
39
  # - #incoming - for remapping from the incoming message to our internal format
36
40
  # - #outgoing - for remapping from internal topic name into outgoing message
37
- setting :topic_mapper, -> { Routing::TopicMapper }
38
- # Default parser for parsing and unparsing incoming and outgoing data
39
- setting :parser, -> { Karafka::Parsers::Json }
41
+ setting :topic_mapper, Routing::TopicMapper.new
42
+ # Default serializer for converting whatever we want to send to kafka to json
43
+ setting :serializer, Karafka::Serialization::Json::Serializer.new
44
+ # Default deserializer for converting incoming data into ruby objects
45
+ setting :deserializer, Karafka::Serialization::Json::Deserializer.new
40
46
  # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
41
47
  # @note Fetching does not equal consuming, see batch_consuming description for details
42
48
  setting :batch_fetching, true
@@ -44,29 +50,15 @@ module Karafka
44
50
  # #params_batch will contain params received from Kafka (may be more than 1) so we can
45
51
  # process them in batches
46
52
  setting :batch_consuming, false
47
- # Should we operate in a single consumer instance across multiple batches of messages,
48
- # from the same partition or should we build a new one for each incoming batch.
49
- # Disabling that can be useful when you want to create a new consumer instance for each
50
- # incoming batch. It's disabled by default, not to create more objects that needed
51
- # on each batch
52
- setting :persistent, true
53
53
  # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
- # longer wait for the consumers to stop gracefully but instead we force
55
- # terminate everything.
56
- # @note Keep in mind, that if your business logic
57
- # @note If set to nil, it won't forcefully shutdown the process at all.
54
+ # longer wait for the consumers to stop gracefully but instead we force terminate
55
+ # everything.
58
56
  setting :shutdown_timeout, 60
59
- # option params_base_class [Class] base class for params class initialization
60
- # This can be either a Hash or a HashWithIndifferentAccess depending on your
61
- # requirements. Note, that by using HashWithIndifferentAccess, you remove some of the
62
- # performance in favor of convenience. This can be useful especially if you already use
63
- # it with Rails, etc
64
- setting :params_base_class, Hash
65
57
 
66
58
  # option kafka [Hash] - optional - kafka configuration options
67
59
  setting :kafka do
68
60
  # Array with at least one host
69
- setting :seed_brokers
61
+ setting :seed_brokers, %w[kafka://127.0.0.1:9092]
70
62
  # option session_timeout [Integer] the number of seconds after which, if a client
71
63
  # hasn't contacted the Kafka cluster, it will be kicked out of the group.
72
64
  setting :session_timeout, 30
@@ -75,6 +67,11 @@ module Karafka
75
67
  # resolved and also "slows" things down, so it prevents from "eating" up all messages and
76
68
  # consuming them with failed code. Use `nil` if you want to pause forever and never retry.
77
69
  setting :pause_timeout, 10
70
+ # option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
71
+ # or `nil` if no maximum should be enforced.
72
+ setting :pause_max_timeout, nil
73
+ # option pause_exponential_backoff [Boolean] whether to enable exponential backoff
74
+ setting :pause_exponential_backoff, false
78
75
  # option offset_commit_interval [Integer] the interval between offset commits,
79
76
  # in seconds.
80
77
  setting :offset_commit_interval, 10
@@ -91,7 +88,7 @@ module Karafka
91
88
  # option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
92
89
  # are stored for further processing. Note, that each item in the queue represents a
93
90
  # response from a single broker
94
- setting :fetcher_max_queue_size, 100
91
+ setting :fetcher_max_queue_size, 10
95
92
  # option max_bytes_per_partition [Integer] the maximum amount of data fetched
96
93
  # from a single partition at a time.
97
94
  setting :max_bytes_per_partition, 1_048_576
@@ -127,6 +124,8 @@ module Karafka
127
124
  # will be killed. Note that some Kafka operations are by definition long-running, such as
128
125
  # waiting for new messages to arrive in a partition, so don't set this value too low
129
126
  setting :socket_timeout, 30
127
+ # option partitioner [Object, nil] the partitioner that should be used by the client
128
+ setting :partitioner, nil
130
129
 
131
130
  # SSL authentication related settings
132
131
  # option ca_cert [String, nil] SSL CA certificate
@@ -136,6 +135,8 @@ module Karafka
136
135
  # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
137
136
  # certificate store
138
137
  setting :ssl_ca_certs_from_system, false
138
+ # option ssl_verify_hostname [Boolean] Verify the hostname for client certs
139
+ setting :ssl_verify_hostname, true
139
140
  # option ssl_client_cert [String, nil] SSL client certificate
140
141
  setting :ssl_client_cert, nil
141
142
  # option ssl_client_cert_key [String, nil] SSL client certificate password
@@ -156,10 +157,39 @@ module Karafka
156
157
  setting :sasl_scram_password, nil
157
158
  # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
158
159
  setting :sasl_scram_mechanism, nil
160
+ # option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
161
+ setting :sasl_over_ssl, true
162
+ # option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
163
+ setting :ssl_client_cert_chain, nil
164
+ # option ssl_client_cert_key_password [String, nil] the password required to read
165
+ # the ssl_client_cert_key
166
+ setting :ssl_client_cert_key_password, nil
167
+ # @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
168
+ # implements method token.
169
+ setting :sasl_oauth_token_provider, nil
170
+ end
171
+
172
+ # Namespace for internal settings that should not be modified
173
+ # It's a temporary step to "declassify" several things internally before we move to a
174
+ # non global state
175
+ setting :internal do
176
+ # option routing_builder [Karafka::Routing::Builder] builder instance
177
+ setting :routing_builder, Routing::Builder.new
178
+ # option status [Karafka::Status] app status
179
+ setting :status, Status.new
180
+ # option process [Karafka::Process] process status
181
+ # @note In the future, we need to have a single process representation for all the karafka
182
+ # instances
183
+ setting :process, Process.new
184
+ # option fetcher [Karafka::Fetcher] fetcher instance
185
+ setting :fetcher, Fetcher.new
186
+ # option configurators [Array<Object>] all configurators that we want to run after
187
+ # the setup
188
+ setting :configurators, [Configurators::WaterDrop.new]
159
189
  end
160
190
 
161
191
  class << self
162
- # Configurating method
192
+ # Configuring method
163
193
  # @yield Runs a block of code providing a config singleton instance to it
164
194
  # @yieldparam [Karafka::Setup::Config] Karafka config instance
165
195
  def setup
@@ -170,22 +200,22 @@ module Karafka
170
200
  # Components are in karafka/config directory and are all loaded one by one
171
201
  # If you want to configure a next component, please add a proper file to config dir
172
202
  def setup_components
173
- [
174
- Configurators::Params,
175
- Configurators::WaterDrop
176
- ].each { |klass| klass.setup(config) }
203
+ config
204
+ .internal
205
+ .configurators
206
+ .each { |configurator| configurator.call(config) }
177
207
  end
178
208
 
179
- # Validate config based on ConfigurationSchema
209
+ # Validate config based on the config contract
180
210
  # @return [Boolean] true if configuration is valid
181
- # @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
182
- # doesn't match with ConfigurationSchema
211
+ # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
212
+ # doesn't match with the config contract
183
213
  def validate!
184
- validation_result = Karafka::Schemas::Config.call(config.to_h)
214
+ validation_result = CONTRACT.call(config.to_h)
185
215
 
186
216
  return true if validation_result.success?
187
217
 
188
- raise Errors::InvalidConfiguration, validation_result.errors
218
+ raise Errors::InvalidConfigurationError, validation_result.errors.to_h
189
219
  end
190
220
  end
191
221
  end
@@ -2,26 +2,30 @@
2
2
 
3
3
  module Karafka
4
4
  module Setup
5
- class Configurators
5
+ # Configurators are used to post setup some of the components of Karafka after the core
6
+ # framework is initialized
7
+ module Configurators
6
8
  # Class responsible for setting up WaterDrop configuration
7
- class WaterDrop < Base
9
+ class WaterDrop
8
10
  # Sets up a WaterDrop settings
9
11
  # @param config [Karafka::Setup::Config] Config we can user to setup things
10
12
  # @note This will also inject Karafka monitor as a default monitor into WaterDrop,
11
13
  # so we have the same monitor within whole Karafka framework (same with logger)
12
- def self.setup(config)
14
+ def call(config)
13
15
  ::WaterDrop.setup do |water_config|
14
16
  water_config.deliver = true
15
17
 
16
18
  config.to_h.reject { |k, _v| k == :kafka }.each do |k, v|
17
19
  key_assignment = :"#{k}="
18
20
  next unless water_config.respond_to?(key_assignment)
21
+
19
22
  water_config.public_send(key_assignment, v)
20
23
  end
21
24
 
22
25
  config.kafka.to_h.each do |k, v|
23
26
  key_assignment = :"#{k}="
24
27
  next unless water_config.kafka.respond_to?(key_assignment)
28
+
25
29
  water_config.kafka.public_send(key_assignment, v)
26
30
  end
27
31
  end
@@ -10,7 +10,6 @@ module Karafka
10
10
  # @param [Block] block configuration block
11
11
  def setup(&block)
12
12
  Setup::Config.setup(&block)
13
- initialize!
14
13
  end
15
14
 
16
15
  # @return [Karafka::Config] config instance
@@ -3,15 +3,16 @@
3
3
  module Karafka
4
4
  # App status monitor
5
5
  class Status
6
- include Singleton
7
-
8
6
  # Available states and their transitions
9
7
  STATES = {
10
8
  initializing: :initialize!,
9
+ initialized: :initialized!,
11
10
  running: :run!,
12
- stopped: :stop!
11
+ stopping: :stop!
13
12
  }.freeze
14
13
 
14
+ private_constant :STATES
15
+
15
16
  STATES.each do |state, transition|
16
17
  define_method :"#{state}?" do
17
18
  @status == state
@@ -19,6 +20,9 @@ module Karafka
19
20
 
20
21
  define_method transition do
21
22
  @status = state
23
+ # Trap context disallows to run certain things that we instrument
24
+ # so the state changes are executed from a separate thread
25
+ Thread.new { Karafka.monitor.instrument("app.#{state}") }.join
22
26
  end
23
27
  end
24
28
  end
@@ -3,4 +3,5 @@
3
3
  # Application consumer from which all Karafka consumers should inherit
4
4
  # You can rename it if it would conflict with your current code base (in case you're integrating
5
5
  # Karafka with other frameworks)
6
- ApplicationConsumer = Class.new(Karafka::BaseConsumer)
6
+ class ApplicationConsumer < Karafka::BaseConsumer
7
+ end
@@ -0,0 +1,92 @@
1
+ # frozen_string_literal: true
2
+
3
+ <% if rails? -%>
4
+ ENV['RAILS_ENV'] ||= 'development'
5
+ ENV['KARAFKA_ENV'] = ENV['RAILS_ENV']
6
+ require ::File.expand_path('../config/environment', __FILE__)
7
+ Rails.application.eager_load!
8
+
9
+ # This lines will make Karafka print to stdout like puma or unicorn
10
+ if Rails.env.development?
11
+ Rails.logger.extend(
12
+ ActiveSupport::Logger.broadcast(
13
+ ActiveSupport::Logger.new($stdout)
14
+ )
15
+ )
16
+ end
17
+ <% else -%>
18
+ # This file is auto-generated during the install process.
19
+ # If by any chance you've wanted a setup for Rails app, either run the `karafka:install`
20
+ # command again or refer to the install templates available in the source codes
21
+
22
+ ENV['RACK_ENV'] ||= 'development'
23
+ ENV['KARAFKA_ENV'] ||= ENV['RACK_ENV']
24
+ Bundler.require(:default, ENV['KARAFKA_ENV'])
25
+
26
+ # Zeitwerk custom loader for loading the app components before the whole
27
+ # Karafka framework configuration
28
+ APP_LOADER = Zeitwerk::Loader.new
29
+ APP_LOADER.enable_reloading
30
+
31
+ %w[
32
+ lib
33
+ app/consumers
34
+ app/responders
35
+ app/workers
36
+ ].each(&APP_LOADER.method(:push_dir))
37
+
38
+ APP_LOADER.setup
39
+ APP_LOADER.eager_load
40
+ <% end -%>
41
+
42
+ class KarafkaApp < Karafka::App
43
+ setup do |config|
44
+ config.kafka.seed_brokers = %w[kafka://127.0.0.1:9092]
45
+ config.client_id = 'example_app'
46
+ <% if rails? -%>
47
+ config.logger = Rails.logger
48
+ <% end -%>
49
+ end
50
+
51
+ # Comment out this part if you are not using instrumentation and/or you are not
52
+ # interested in logging events for certain environments. Since instrumentation
53
+ # notifications add extra boilerplate, if you want to achieve max performance,
54
+ # listen to only what you really need for given environment.
55
+ Karafka.monitor.subscribe(WaterDrop::Instrumentation::StdoutListener.new)
56
+ Karafka.monitor.subscribe(Karafka::Instrumentation::StdoutListener.new)
57
+ Karafka.monitor.subscribe(Karafka::Instrumentation::ProctitleListener.new)
58
+
59
+ # Uncomment that in order to achieve code reload in development mode
60
+ # Be aware, that this might have some side-effects. Please refer to the wiki
61
+ # for more details on benefits and downsides of the code reload in the
62
+ # development mode
63
+ #
64
+ # Karafka.monitor.subscribe(
65
+ # Karafka::CodeReloader.new(
66
+ # <%= rails? ? '*Rails.application.reloaders' : 'APP_LOADER' %>
67
+ # )
68
+ # )
69
+
70
+ consumer_groups.draw do
71
+ # topic :example do
72
+ # consumer ExampleConsumer
73
+ # end
74
+
75
+ # consumer_group :bigger_group do
76
+ # topic :test do
77
+ # consumer TestConsumer
78
+ # end
79
+ #
80
+ # topic :test2 do
81
+ # consumer Test2Consumer
82
+ # end
83
+ # end
84
+ end
85
+ end
86
+
87
+ Karafka.monitor.subscribe('app.initialized') do
88
+ # Put here all the things you want to do after the Karafka framework
89
+ # initialization
90
+ end
91
+
92
+ KarafkaApp.boot!