karafka 1.1.0 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (114) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/.travis.yml +35 -16
  12. data/CHANGELOG.md +151 -2
  13. data/CONTRIBUTING.md +6 -7
  14. data/Gemfile +3 -3
  15. data/Gemfile.lock +96 -70
  16. data/README.md +29 -23
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +25 -0
  19. data/config/errors.yml +38 -5
  20. data/karafka.gemspec +19 -10
  21. data/lib/karafka.rb +15 -12
  22. data/lib/karafka/app.rb +19 -18
  23. data/lib/karafka/attributes_map.rb +15 -14
  24. data/lib/karafka/backends/inline.rb +1 -2
  25. data/lib/karafka/base_consumer.rb +57 -0
  26. data/lib/karafka/base_responder.rb +72 -31
  27. data/lib/karafka/cli.rb +1 -1
  28. data/lib/karafka/cli/console.rb +11 -9
  29. data/lib/karafka/cli/flow.rb +0 -1
  30. data/lib/karafka/cli/info.rb +3 -1
  31. data/lib/karafka/cli/install.rb +29 -8
  32. data/lib/karafka/cli/server.rb +11 -7
  33. data/lib/karafka/code_reloader.rb +67 -0
  34. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +67 -24
  35. data/lib/karafka/connection/batch_delegator.rb +51 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +117 -0
  38. data/lib/karafka/connection/listener.rb +37 -17
  39. data/lib/karafka/connection/message_delegator.rb +36 -0
  40. data/lib/karafka/consumers/callbacks.rb +71 -0
  41. data/lib/karafka/consumers/includer.rb +63 -0
  42. data/lib/karafka/consumers/metadata.rb +10 -0
  43. data/lib/karafka/consumers/responders.rb +24 -0
  44. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  45. data/lib/karafka/contracts.rb +10 -0
  46. data/lib/karafka/contracts/config.rb +21 -0
  47. data/lib/karafka/contracts/consumer_group.rb +206 -0
  48. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  49. data/lib/karafka/contracts/responder_usage.rb +54 -0
  50. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  51. data/lib/karafka/errors.rb +23 -15
  52. data/lib/karafka/fetcher.rb +6 -12
  53. data/lib/karafka/helpers/class_matcher.rb +19 -9
  54. data/lib/karafka/helpers/config_retriever.rb +3 -3
  55. data/lib/karafka/helpers/inflector.rb +26 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  57. data/lib/karafka/instrumentation/logger.rb +57 -0
  58. data/lib/karafka/instrumentation/monitor.rb +70 -0
  59. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  60. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  61. data/lib/karafka/params/builders/metadata.rb +33 -0
  62. data/lib/karafka/params/builders/params.rb +36 -0
  63. data/lib/karafka/params/builders/params_batch.rb +25 -0
  64. data/lib/karafka/params/metadata.rb +35 -0
  65. data/lib/karafka/params/params.rb +35 -95
  66. data/lib/karafka/params/params_batch.rb +38 -18
  67. data/lib/karafka/patches/ruby_kafka.rb +25 -12
  68. data/lib/karafka/persistence/client.rb +29 -0
  69. data/lib/karafka/persistence/consumers.rb +45 -0
  70. data/lib/karafka/persistence/topics.rb +48 -0
  71. data/lib/karafka/process.rb +5 -8
  72. data/lib/karafka/responders/builder.rb +15 -14
  73. data/lib/karafka/responders/topic.rb +6 -8
  74. data/lib/karafka/routing/builder.rb +37 -9
  75. data/lib/karafka/routing/consumer_group.rb +1 -1
  76. data/lib/karafka/routing/consumer_mapper.rb +10 -9
  77. data/lib/karafka/routing/proxy.rb +10 -1
  78. data/lib/karafka/routing/router.rb +1 -1
  79. data/lib/karafka/routing/topic.rb +8 -12
  80. data/lib/karafka/routing/topic_mapper.rb +16 -18
  81. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  82. data/lib/karafka/serialization/json/serializer.rb +31 -0
  83. data/lib/karafka/server.rb +45 -24
  84. data/lib/karafka/setup/config.rb +95 -37
  85. data/lib/karafka/setup/configurators/water_drop.rb +12 -5
  86. data/lib/karafka/setup/dsl.rb +21 -0
  87. data/lib/karafka/status.rb +7 -3
  88. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.erb} +2 -2
  89. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  90. data/lib/karafka/templates/karafka.rb.erb +92 -0
  91. data/lib/karafka/version.rb +1 -1
  92. metadata +126 -57
  93. metadata.gz.sig +0 -0
  94. data/.github/ISSUE_TEMPLATE.md +0 -2
  95. data/lib/karafka/base_controller.rb +0 -60
  96. data/lib/karafka/connection/consumer.rb +0 -121
  97. data/lib/karafka/connection/processor.rb +0 -61
  98. data/lib/karafka/controllers/callbacks.rb +0 -54
  99. data/lib/karafka/controllers/includer.rb +0 -51
  100. data/lib/karafka/controllers/responders.rb +0 -19
  101. data/lib/karafka/loader.rb +0 -29
  102. data/lib/karafka/logger.rb +0 -53
  103. data/lib/karafka/monitor.rb +0 -98
  104. data/lib/karafka/parsers/json.rb +0 -38
  105. data/lib/karafka/patches/dry_configurable.rb +0 -31
  106. data/lib/karafka/persistence/consumer.rb +0 -25
  107. data/lib/karafka/persistence/controller.rb +0 -38
  108. data/lib/karafka/schemas/config.rb +0 -21
  109. data/lib/karafka/schemas/consumer_group.rb +0 -65
  110. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  111. data/lib/karafka/schemas/responder_usage.rb +0 -39
  112. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  113. data/lib/karafka/setup/configurators/base.rb +0 -35
  114. data/lib/karafka/templates/karafka.rb.example +0 -41
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Karafka module namespace
4
+ module Karafka
5
+ # Base consumer from which all Karafka consumers should inherit
6
+ class BaseConsumer
7
+ extend Forwardable
8
+
9
+ # Allows us to mark messages as consumed for non-automatic mode without having
10
+ # to use consumer client directly. We do this that way, because most of the people should not
11
+ # mess with the client instance directly (just in case)
12
+ %i[
13
+ mark_as_consumed
14
+ mark_as_consumed!
15
+ trigger_heartbeat
16
+ trigger_heartbeat!
17
+ ].each do |delegated_method_name|
18
+ def_delegator :client, delegated_method_name
19
+
20
+ private delegated_method_name
21
+ end
22
+
23
+ # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
24
+ attr_reader :topic
25
+ # @return [Karafka::Params:ParamsBatch] current params batch
26
+ attr_accessor :params_batch
27
+
28
+ # Assigns a topic to a consumer and builds up proper consumer functionalities
29
+ # so that it can cooperate with the topic settings
30
+ # @param topic [Karafka::Routing::Topic]
31
+ def initialize(topic)
32
+ @topic = topic
33
+ Consumers::Includer.call(self)
34
+ end
35
+
36
+ # Executes the default consumer flow.
37
+ def call
38
+ process
39
+ end
40
+
41
+ private
42
+
43
+ # @return [Karafka::Connection::Client] messages consuming client that can be used to
44
+ # commit manually offset or pause / stop consumer based on the business logic
45
+ def client
46
+ Persistence::Client.read
47
+ end
48
+
49
+ # Method that will perform business logic and on data received from Kafka (it will consume
50
+ # the data)
51
+ # @note This method needs bo be implemented in a subclass. We stub it here as a failover if
52
+ # someone forgets about it or makes on with typo
53
+ def consume
54
+ raise NotImplementedError, 'Implement this in a subclass'
55
+ end
56
+ end
57
+ end
@@ -39,7 +39,7 @@ module Karafka
39
39
  #
40
40
  # @example Multiple times used topic
41
41
  # class Responder < BaseResponder
42
- # topic :required_topic, multiple_usage: true
42
+ # topic :required_topic
43
43
  #
44
44
  # def respond(data)
45
45
  # data.each do |subset|
@@ -48,6 +48,17 @@ module Karafka
48
48
  # end
49
49
  # end
50
50
  #
51
+ # @example Specify serializer for a topic
52
+ # class Responder < BaseResponder
53
+ # topic :xml_topic, serializer: MyXMLSerializer
54
+ #
55
+ # def respond(data)
56
+ # data.each do |subset|
57
+ # respond_to :xml_topic, subset
58
+ # end
59
+ # end
60
+ # end
61
+ #
51
62
  # @example Accept multiple arguments to a respond method
52
63
  # class Responder < BaseResponder
53
64
  # topic :users_actions
@@ -59,26 +70,35 @@ module Karafka
59
70
  # end
60
71
  # end
61
72
  class BaseResponder
62
- # Definitions of all topics that we want to be able to use in this responder should go here
63
- class_attribute :topics
73
+ # Responder usage contract
74
+ CONTRACT = Karafka::Contracts::ResponderUsage.new.freeze
64
75
 
65
- attr_reader :messages_buffer
76
+ private_constant :CONTRACT
66
77
 
67
78
  class << self
79
+ # Definitions of all topics that we want to be able to use in this responder should go here
80
+ attr_accessor :topics
81
+ # Contract that we can use to control and/or require some additional details upon options
82
+ # that are being passed to the producer. This can be in particular useful if we want to make
83
+ # sure that for example partition_key is always present.
84
+ attr_accessor :options_contract
85
+
68
86
  # Registers a topic as on to which we will be able to respond
69
87
  # @param topic_name [Symbol, String] name of topic to which we want to respond
70
88
  # @param options [Hash] hash with optional configuration details
71
89
  def topic(topic_name, options = {})
90
+ options[:serializer] ||= Karafka::App.config.serializer
91
+ options[:registered] = true
72
92
  self.topics ||= {}
73
- topic_obj = Responders::Topic.new(topic_name, options.merge(registered: true))
93
+ topic_obj = Responders::Topic.new(topic_name, options)
74
94
  self.topics[topic_obj.name] = topic_obj
75
95
  end
76
96
 
77
97
  # A simple alias for easier standalone responder usage.
78
- # Instead of building it with new.call it allows (in case of usin JSON parser)
98
+ # Instead of building it with new.call it allows (in case of using JSON serializer)
79
99
  # to just run it directly from the class level
80
100
  # @param data Anything that we want to respond with
81
- # @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
101
+ # @example Send user data with a responder
82
102
  # UsersCreatedResponder.call(@created_user)
83
103
  def call(*data)
84
104
  # Just in case there were no topics defined for a responder, we initialize with
@@ -88,12 +108,11 @@ module Karafka
88
108
  end
89
109
  end
90
110
 
111
+ attr_reader :messages_buffer
112
+
91
113
  # Creates a responder object
92
- # @param parser_class [Class] parser class that we can use to generate appropriate string
93
- # or nothing if we want to default to Karafka::Parsers::Json
94
114
  # @return [Karafka::BaseResponder] base responder descendant responder
95
- def initialize(parser_class = Karafka::Parsers::Json)
96
- @parser_class = parser_class
115
+ def initialize
97
116
  @messages_buffer = {}
98
117
  end
99
118
 
@@ -102,13 +121,14 @@ module Karafka
102
121
  # @note We know that validators should be executed also before sending data to topics, however
103
122
  # the implementation gets way more complicated then, that's why we check after everything
104
123
  # was sent using responder
105
- # @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
124
+ # @example Send user data with a responder
106
125
  # UsersCreatedResponder.new.call(@created_user)
107
126
  # @example Send user data with a responder using non default Parser
108
127
  # UsersCreatedResponder.new(MyParser).call(@created_user)
109
128
  def call(*data)
110
129
  respond(*data)
111
- validate!
130
+ validate_usage!
131
+ validate_options!
112
132
  deliver!
113
133
  end
114
134
 
@@ -116,7 +136,7 @@ module Karafka
116
136
 
117
137
  # Checks if we met all the topics requirements. It will fail if we didn't send a message to
118
138
  # a registered required topic, etc.
119
- def validate!
139
+ def validate_usage!
120
140
  registered_topics = self.class.topics.map do |name, topic|
121
141
  topic.to_h.merge!(
122
142
  usage_count: messages_buffer[name]&.count || 0
@@ -128,36 +148,48 @@ module Karafka
128
148
  topic.to_h.merge!(usage_count: usage.count)
129
149
  end
130
150
 
131
- result = Karafka::Schemas::ResponderUsage.call(
151
+ result = CONTRACT.call(
132
152
  registered_topics: registered_topics,
133
153
  used_topics: used_topics
134
154
  )
135
155
 
136
156
  return if result.success?
137
157
 
138
- raise Karafka::Errors::InvalidResponderUsage, result.errors
158
+ raise Karafka::Errors::InvalidResponderUsageError, result.errors.to_h
159
+ end
160
+
161
+ # Checks if we met all the options requirements before sending them to the producer.
162
+ def validate_options!
163
+ return true unless self.class.options_contract
164
+
165
+ messages_buffer.each_value do |messages_set|
166
+ messages_set.each do |message_data|
167
+ result = self.class.options_contract.call(message_data.last)
168
+ next if result.success?
169
+
170
+ raise Karafka::Errors::InvalidResponderMessageOptionsError, result.errors.to_h
171
+ end
172
+ end
139
173
  end
140
174
 
141
175
  # Takes all the messages from the buffer and delivers them one by one
142
176
  # @note This method is executed after the validation, so we're sure that
143
177
  # what we send is legit and it will go to a proper topics
144
178
  def deliver!
145
- messages_buffer.each do |topic, data_elements|
146
- # We map this topic name, so it will match namespaced/etc topic in Kafka
147
- # @note By default will not change topic (if default mapper used)
148
- mapped_topic = Karafka::App.config.topic_mapper.outgoing(topic)
149
-
179
+ messages_buffer.each_value do |data_elements|
150
180
  data_elements.each do |data, options|
151
- producer(options).call(
152
- data,
153
- options.merge(topic: mapped_topic)
154
- )
181
+ # We map this topic name, so it will match namespaced/etc topic in Kafka
182
+ # @note By default will not change topic (if default mapper used)
183
+ mapped_topic = Karafka::App.config.topic_mapper.outgoing(options[:topic])
184
+ external_options = options.merge(topic: mapped_topic)
185
+ producer(options).call(data, external_options)
155
186
  end
156
187
  end
157
188
  end
158
189
 
159
190
  # Method that needs to be implemented in a subclass. It should handle responding
160
191
  # on registered topics
192
+ # @param _data [Object] anything that we want to use to send to Kafka
161
193
  # @raise [NotImplementedError] This method needs to be implemented in a subclass
162
194
  def respond(*_data)
163
195
  raise NotImplementedError, 'Implement this in a subclass'
@@ -167,19 +199,28 @@ module Karafka
167
199
  # as many times as we need. Especially when we have 1:n flow
168
200
  # @param topic [Symbol, String] topic to which we want to respond
169
201
  # @param data [String, Object] string or object that we want to send
170
- # @param options [Hash] options for waterdrop (e.g. partition_key)
202
+ # @param options [Hash] options for waterdrop (e.g. partition_key).
171
203
  # @note Respond to does not accept multiple data arguments.
172
204
  def respond_to(topic, data, options = {})
173
- Karafka.monitor.notice(self.class, topic: topic, data: data, options: options)
174
-
175
- messages_buffer[topic.to_s] ||= []
176
- messages_buffer[topic.to_s] << [@parser_class.generate(data), options]
205
+ # We normalize the format to string, as WaterDrop and Ruby-Kafka support only
206
+ # string topics
207
+ topic = topic.to_s
208
+
209
+ messages_buffer[topic] ||= []
210
+ messages_buffer[topic] << [
211
+ self.class.topics[topic].serializer.call(data),
212
+ options.merge(topic: topic)
213
+ ]
177
214
  end
178
215
 
179
216
  # @param options [Hash] options for waterdrop
180
217
  # @return [Class] WaterDrop producer (sync or async based on the settings)
181
218
  def producer(options)
182
- options[:async] ? WaterDrop::AsyncProducer : WaterDrop::SyncProducer
219
+ if self.class.topics[options[:topic]].async?
220
+ WaterDrop::AsyncProducer
221
+ else
222
+ WaterDrop::SyncProducer
223
+ end
183
224
  end
184
225
  end
185
226
  end
@@ -37,7 +37,7 @@ end
37
37
  # This is kinda trick - since we don't have a autoload and other magic stuff
38
38
  # like Rails does, so instead this method allows us to replace currently running
39
39
  # console with a new one via Kernel.exec. It will start console with new code loaded
40
- # Yes we know that it is not turbofast, however it is turbo convinient and small
40
+ # Yes, we know that it is not turbo fast, however it is turbo convenient and small
41
41
  #
42
42
  # Also - the KARAFKA_CONSOLE is used to detect that we're executing the irb session
43
43
  # so this method is only available when the Karafka console is running
@@ -8,15 +8,17 @@ module Karafka
8
8
  desc 'Start the Karafka console (short-cut alias: "c")'
9
9
  option aliases: 'c'
10
10
 
11
- # @return [String] Console executing command
12
- # @example
13
- # Karafka::Cli::Console.command #=> 'KARAFKA_CONSOLE=true bundle exec irb...'
14
- def self.command
15
- envs = [
16
- "IRBRC='#{Karafka.gem_root}/.console_irbrc'",
17
- 'KARAFKA_CONSOLE=true'
18
- ]
19
- "#{envs.join(' ')} bundle exec irb"
11
+ class << self
12
+ # @return [String] Console executing command
13
+ # @example
14
+ # Karafka::Cli::Console.command #=> 'KARAFKA_CONSOLE=true bundle exec irb...'
15
+ def command
16
+ envs = [
17
+ "IRBRC='#{Karafka.gem_root}/.console_irbrc'",
18
+ 'KARAFKA_CONSOLE=true'
19
+ ]
20
+ "#{envs.join(' ')} bundle exec irb -r #{Karafka.boot_file}"
21
+ end
20
22
  end
21
23
 
22
24
  # Start the Karafka console
@@ -18,7 +18,6 @@ module Karafka
18
18
  topic.responder.topics.each_value do |responder_topic|
19
19
  features = []
20
20
  features << (responder_topic.required? ? 'always' : 'conditionally')
21
- features << (responder_topic.multiple_usage? ? 'one or more' : 'exactly once')
22
21
 
23
22
  print responder_topic.name, "(#{features.join(', ')})"
24
23
  end
@@ -12,7 +12,9 @@ module Karafka
12
12
  config = Karafka::App.config
13
13
 
14
14
  info = [
15
- "Karafka framework version: #{Karafka::VERSION}",
15
+ "Karafka version: #{Karafka::VERSION}",
16
+ "Ruby version: #{RUBY_VERSION}",
17
+ "Ruby-kafka version: #{::Kafka::VERSION}",
16
18
  "Application client id: #{config.client_id}",
17
19
  "Backend: #{config.backend}",
18
20
  "Batch fetching: #{config.batch_fetching}",
@@ -1,5 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require 'erb'
4
+
3
5
  module Karafka
4
6
  # Karafka framework Cli
5
7
  class Cli < Thor
@@ -9,8 +11,7 @@ module Karafka
9
11
 
10
12
  # Directories created by default
11
13
  INSTALL_DIRS = %w[
12
- app/models
13
- app/controllers
14
+ app/consumers
14
15
  app/responders
15
16
  config
16
17
  log
@@ -19,11 +20,21 @@ module Karafka
19
20
 
20
21
  # Where should we map proper files from templates
21
22
  INSTALL_FILES_MAP = {
22
- 'karafka.rb.example' => Karafka.boot_file.basename,
23
- 'application_controller.rb.example' => 'app/controllers/application_controller.rb',
24
- 'application_responder.rb.example' => 'app/responders/application_responder.rb'
23
+ 'karafka.rb.erb' => Karafka.boot_file.basename,
24
+ 'application_consumer.rb.erb' => 'app/consumers/application_consumer.rb',
25
+ 'application_responder.rb.erb' => 'app/responders/application_responder.rb'
25
26
  }.freeze
26
27
 
28
+ # @param args [Array] all the things that Thor CLI accepts
29
+ def initialize(*args)
30
+ super
31
+ @rails = Bundler::LockfileParser.new(
32
+ Bundler.read_file(
33
+ Bundler.default_lockfile
34
+ )
35
+ ).dependencies.key?('rails')
36
+ end
37
+
27
38
  # Install all required things for Karafka application in current directory
28
39
  def call
29
40
  INSTALL_DIRS.each do |dir|
@@ -32,12 +43,22 @@ module Karafka
32
43
 
33
44
  INSTALL_FILES_MAP.each do |source, target|
34
45
  target = Karafka.root.join(target)
35
- next if File.exist?(target)
36
46
 
37
- source = Karafka.core_root.join("templates/#{source}")
38
- FileUtils.cp_r(source, target)
47
+ template = File.read(Karafka.core_root.join("templates/#{source}"))
48
+ # @todo Replace with the keyword argument version once we don't have to support
49
+ # Ruby < 2.6
50
+ render = ::ERB.new(template, nil, '-').result(binding)
51
+
52
+ File.open(target, 'w') { |file| file.write(render) }
39
53
  end
40
54
  end
55
+
56
+ # @return [Boolean] true if we have Rails loaded
57
+ # This allows us to generate customized karafka.rb template with some tweaks specific for
58
+ # Rails
59
+ def rails?
60
+ @rails
61
+ end
41
62
  end
42
63
  end
43
64
  end
@@ -5,6 +5,11 @@ module Karafka
5
5
  class Cli < Thor
6
6
  # Server Karafka Cli action
7
7
  class Server < Base
8
+ # Server config settings contract
9
+ CONTRACT = Contracts::ServerCliOptions.new.freeze
10
+
11
+ private_constant :CONTRACT
12
+
8
13
  desc 'Start the Karafka server (short-cut alias: "s")'
9
14
  option aliases: 's'
10
15
  option :daemon, default: false, type: :boolean, aliases: :d
@@ -13,11 +18,10 @@ module Karafka
13
18
 
14
19
  # Start the Karafka server
15
20
  def call
16
- validate!
17
-
18
- puts 'Starting Karafka server'
19
21
  cli.info
20
22
 
23
+ validate!
24
+
21
25
  if cli.options[:daemon]
22
26
  FileUtils.mkdir_p File.dirname(cli.options[:pid])
23
27
  daemonize
@@ -31,11 +35,10 @@ module Karafka
31
35
  # We want to delay the moment in which the pidfile is removed as much as we can,
32
36
  # so instead of removing it after the server stops running, we rely on the gc moment
33
37
  # when this object gets removed (it is a bit later), so it is closer to the actual
34
- # system process end. We do that, so monitoring and deployment tools that rely on pids
38
+ # system process end. We do that, so monitoring and deployment tools that rely on a pid
35
39
  # won't alarm or start new system process up until the current one is finished
36
40
  ObjectSpace.define_finalizer(self, proc { send(:clean) })
37
41
 
38
- # After we fork, we can boot celluloid again
39
42
  Karafka::Server.run
40
43
  end
41
44
 
@@ -44,9 +47,10 @@ module Karafka
44
47
  # Checks the server cli configuration
45
48
  # options validations in terms of app setup (topics, pid existence, etc)
46
49
  def validate!
47
- result = Schemas::ServerCliOptions.call(cli.options)
50
+ result = CONTRACT.call(cli.options)
48
51
  return if result.success?
49
- raise Errors::InvalidConfiguration, result.errors
52
+
53
+ raise Errors::InvalidConfigurationError, result.errors.to_h
50
54
  end
51
55
 
52
56
  # Detaches current process into background and writes its pidfile
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Special type of a listener, that is not an instrumentation one, but one that triggers
5
+ # code reload in the development mode after each fetched batch (or message)
6
+ #
7
+ # Please refer to the development code reload sections for details on the benefits and downsides
8
+ # of the in-process code reloading
9
+ class CodeReloader
10
+ # This mutex is needed as we might have an application that has multiple consumer groups
11
+ # running in separate threads and we should not trigger reload before fully reloading the app
12
+ # in previous thread
13
+ MUTEX = Mutex.new
14
+
15
+ private_constant :MUTEX
16
+
17
+ # @param reloaders [Array<Object>] any code loaders that we use in this app. Whether it is
18
+ # the Rails loader, Zeitwerk or anything else that allows reloading triggering
19
+ # @param block [Proc] yields given block just before reloading. This can be used to hook custom
20
+ # reloading stuff, that ain't reloaders (for example for resetting dry-events registry)
21
+ def initialize(*reloaders, &block)
22
+ @reloaders = reloaders
23
+ @block = block
24
+ end
25
+
26
+ # Binds to the instrumentation events and triggers reload
27
+ # @param _event [Dry::Event] empty dry event
28
+ # @note Since we de-register all the user defined objects and redraw routes, it means that
29
+ # we won't be able to do a multi-batch buffering in the development mode as each of the
30
+ # batches will be buffered on a newly created "per fetch" instance.
31
+ def on_connection_listener_fetch_loop(_event)
32
+ reload
33
+ end
34
+
35
+ private
36
+
37
+ # Triggers reload of both standard and Rails reloaders as well as expires all internals of
38
+ # Karafka, so it can be rediscovered and rebuilt
39
+ def reload
40
+ MUTEX.synchronize do
41
+ if @reloaders[0].respond_to?(:execute)
42
+ reload_with_rails
43
+ else
44
+ reload_without_rails
45
+ end
46
+ end
47
+ end
48
+
49
+ # Rails reloading procedure
50
+ def reload_with_rails
51
+ updatable = @reloaders.select(&:updated?)
52
+
53
+ return if updatable.empty?
54
+
55
+ updatable.each(&:execute)
56
+ @block&.call
57
+ Karafka::App.reload
58
+ end
59
+
60
+ # Zeitwerk and other reloaders
61
+ def reload_without_rails
62
+ @reloaders.each(&:reload)
63
+ @block&.call
64
+ Karafka::App.reload
65
+ end
66
+ end
67
+ end