karafka 1.4.0 → 2.0.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (172) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +89 -18
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +365 -1
  6. data/CONTRIBUTING.md +10 -19
  7. data/Gemfile +6 -0
  8. data/Gemfile.lock +56 -112
  9. data/LICENSE +17 -0
  10. data/LICENSE-COMM +89 -0
  11. data/LICENSE-LGPL +165 -0
  12. data/README.md +61 -68
  13. data/bin/benchmarks +85 -0
  14. data/bin/create_token +22 -0
  15. data/bin/integrations +272 -0
  16. data/bin/karafka +10 -0
  17. data/bin/scenario +29 -0
  18. data/bin/stress_many +13 -0
  19. data/bin/stress_one +13 -0
  20. data/certs/cert_chain.pem +26 -0
  21. data/certs/karafka-pro.pem +11 -0
  22. data/config/errors.yml +59 -38
  23. data/docker-compose.yml +10 -3
  24. data/karafka.gemspec +18 -21
  25. data/lib/active_job/karafka.rb +21 -0
  26. data/lib/active_job/queue_adapters/karafka_adapter.rb +26 -0
  27. data/lib/karafka/active_job/consumer.rb +26 -0
  28. data/lib/karafka/active_job/dispatcher.rb +38 -0
  29. data/lib/karafka/active_job/job_extensions.rb +34 -0
  30. data/lib/karafka/active_job/job_options_contract.rb +21 -0
  31. data/lib/karafka/active_job/routing/extensions.rb +33 -0
  32. data/lib/karafka/admin.rb +63 -0
  33. data/lib/karafka/app.rb +15 -20
  34. data/lib/karafka/base_consumer.rb +197 -31
  35. data/lib/karafka/cli/info.rb +44 -10
  36. data/lib/karafka/cli/install.rb +22 -12
  37. data/lib/karafka/cli/server.rb +17 -42
  38. data/lib/karafka/cli.rb +4 -3
  39. data/lib/karafka/connection/client.rb +379 -89
  40. data/lib/karafka/connection/listener.rb +250 -38
  41. data/lib/karafka/connection/listeners_batch.rb +24 -0
  42. data/lib/karafka/connection/messages_buffer.rb +84 -0
  43. data/lib/karafka/connection/pauses_manager.rb +46 -0
  44. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  45. data/lib/karafka/connection/rebalance_manager.rb +78 -0
  46. data/lib/karafka/contracts/base.rb +17 -0
  47. data/lib/karafka/contracts/config.rb +88 -11
  48. data/lib/karafka/contracts/consumer_group.rb +21 -184
  49. data/lib/karafka/contracts/consumer_group_topic.rb +35 -11
  50. data/lib/karafka/contracts/server_cli_options.rb +19 -18
  51. data/lib/karafka/contracts.rb +1 -1
  52. data/lib/karafka/env.rb +46 -0
  53. data/lib/karafka/errors.rb +21 -21
  54. data/lib/karafka/helpers/async.rb +33 -0
  55. data/lib/karafka/helpers/colorize.rb +20 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  57. data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
  58. data/lib/karafka/instrumentation/callbacks/statistics.rb +41 -0
  59. data/lib/karafka/instrumentation/logger.rb +6 -10
  60. data/lib/karafka/instrumentation/logger_listener.rb +174 -0
  61. data/lib/karafka/instrumentation/monitor.rb +13 -61
  62. data/lib/karafka/instrumentation/notifications.rb +53 -0
  63. data/lib/karafka/instrumentation/proctitle_listener.rb +3 -3
  64. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  65. data/lib/karafka/instrumentation/vendors/datadog/listener.rb +232 -0
  66. data/lib/karafka/instrumentation.rb +21 -0
  67. data/lib/karafka/licenser.rb +75 -0
  68. data/lib/karafka/messages/batch_metadata.rb +45 -0
  69. data/lib/karafka/messages/builders/batch_metadata.rb +39 -0
  70. data/lib/karafka/messages/builders/message.rb +39 -0
  71. data/lib/karafka/messages/builders/messages.rb +34 -0
  72. data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
  73. data/lib/karafka/messages/messages.rb +64 -0
  74. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  75. data/lib/karafka/messages/seek.rb +9 -0
  76. data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
  77. data/lib/karafka/pro/active_job/consumer.rb +46 -0
  78. data/lib/karafka/pro/active_job/dispatcher.rb +61 -0
  79. data/lib/karafka/pro/active_job/job_options_contract.rb +32 -0
  80. data/lib/karafka/pro/base_consumer.rb +107 -0
  81. data/lib/karafka/pro/contracts/base.rb +21 -0
  82. data/lib/karafka/pro/contracts/consumer_group.rb +34 -0
  83. data/lib/karafka/pro/contracts/consumer_group_topic.rb +69 -0
  84. data/lib/karafka/pro/loader.rb +76 -0
  85. data/lib/karafka/pro/performance_tracker.rb +80 -0
  86. data/lib/karafka/pro/processing/coordinator.rb +85 -0
  87. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +38 -0
  88. data/lib/karafka/pro/processing/jobs_builder.rb +32 -0
  89. data/lib/karafka/pro/processing/partitioner.rb +58 -0
  90. data/lib/karafka/pro/processing/scheduler.rb +56 -0
  91. data/lib/karafka/pro/routing/builder_extensions.rb +30 -0
  92. data/lib/karafka/pro/routing/topic_extensions.rb +74 -0
  93. data/lib/karafka/pro.rb +13 -0
  94. data/lib/karafka/process.rb +1 -0
  95. data/lib/karafka/processing/coordinator.rb +103 -0
  96. data/lib/karafka/processing/coordinators_buffer.rb +54 -0
  97. data/lib/karafka/processing/executor.rb +126 -0
  98. data/lib/karafka/processing/executors_buffer.rb +88 -0
  99. data/lib/karafka/processing/jobs/base.rb +55 -0
  100. data/lib/karafka/processing/jobs/consume.rb +47 -0
  101. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  102. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  103. data/lib/karafka/processing/jobs_builder.rb +29 -0
  104. data/lib/karafka/processing/jobs_queue.rb +144 -0
  105. data/lib/karafka/processing/partitioner.rb +22 -0
  106. data/lib/karafka/processing/result.rb +37 -0
  107. data/lib/karafka/processing/scheduler.rb +22 -0
  108. data/lib/karafka/processing/worker.rb +91 -0
  109. data/lib/karafka/processing/workers_batch.rb +27 -0
  110. data/lib/karafka/railtie.rb +127 -0
  111. data/lib/karafka/routing/builder.rb +26 -23
  112. data/lib/karafka/routing/consumer_group.rb +37 -17
  113. data/lib/karafka/routing/consumer_mapper.rb +1 -2
  114. data/lib/karafka/routing/proxy.rb +9 -16
  115. data/lib/karafka/routing/router.rb +1 -1
  116. data/lib/karafka/routing/subscription_group.rb +53 -0
  117. data/lib/karafka/routing/subscription_groups_builder.rb +54 -0
  118. data/lib/karafka/routing/topic.rb +65 -24
  119. data/lib/karafka/routing/topics.rb +38 -0
  120. data/lib/karafka/runner.rb +51 -0
  121. data/lib/karafka/serialization/json/deserializer.rb +6 -15
  122. data/lib/karafka/server.rb +67 -26
  123. data/lib/karafka/setup/config.rb +153 -175
  124. data/lib/karafka/status.rb +14 -5
  125. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  126. data/lib/karafka/templates/karafka.rb.erb +17 -55
  127. data/lib/karafka/time_trackers/base.rb +19 -0
  128. data/lib/karafka/time_trackers/pause.rb +92 -0
  129. data/lib/karafka/time_trackers/poll.rb +65 -0
  130. data/lib/karafka/version.rb +1 -1
  131. data/lib/karafka.rb +46 -16
  132. data.tar.gz.sig +0 -0
  133. metadata +145 -171
  134. metadata.gz.sig +0 -0
  135. data/.github/FUNDING.yml +0 -3
  136. data/MIT-LICENCE +0 -18
  137. data/certs/mensfeld.pem +0 -25
  138. data/lib/karafka/attributes_map.rb +0 -62
  139. data/lib/karafka/backends/inline.rb +0 -16
  140. data/lib/karafka/base_responder.rb +0 -226
  141. data/lib/karafka/cli/flow.rb +0 -48
  142. data/lib/karafka/code_reloader.rb +0 -67
  143. data/lib/karafka/connection/api_adapter.rb +0 -161
  144. data/lib/karafka/connection/batch_delegator.rb +0 -55
  145. data/lib/karafka/connection/builder.rb +0 -18
  146. data/lib/karafka/connection/message_delegator.rb +0 -36
  147. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  148. data/lib/karafka/consumers/callbacks.rb +0 -71
  149. data/lib/karafka/consumers/includer.rb +0 -64
  150. data/lib/karafka/consumers/responders.rb +0 -24
  151. data/lib/karafka/consumers/single_params.rb +0 -15
  152. data/lib/karafka/contracts/responder_usage.rb +0 -54
  153. data/lib/karafka/fetcher.rb +0 -42
  154. data/lib/karafka/helpers/class_matcher.rb +0 -88
  155. data/lib/karafka/helpers/config_retriever.rb +0 -46
  156. data/lib/karafka/helpers/inflector.rb +0 -26
  157. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  158. data/lib/karafka/params/batch_metadata.rb +0 -26
  159. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  160. data/lib/karafka/params/builders/params.rb +0 -38
  161. data/lib/karafka/params/builders/params_batch.rb +0 -25
  162. data/lib/karafka/params/params_batch.rb +0 -60
  163. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  164. data/lib/karafka/persistence/client.rb +0 -29
  165. data/lib/karafka/persistence/consumers.rb +0 -45
  166. data/lib/karafka/persistence/topics.rb +0 -48
  167. data/lib/karafka/responders/builder.rb +0 -36
  168. data/lib/karafka/responders/topic.rb +0 -55
  169. data/lib/karafka/routing/topic_mapper.rb +0 -53
  170. data/lib/karafka/serialization/json/serializer.rb +0 -31
  171. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  172. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -4,54 +4,220 @@
4
4
  module Karafka
5
5
  # Base consumer from which all Karafka consumers should inherit
6
6
  class BaseConsumer
7
- extend Forwardable
7
+ # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
8
+ attr_accessor :topic
9
+ # @return [Karafka::Messages::Messages] current messages batch
10
+ attr_accessor :messages
11
+ # @return [Karafka::Connection::Client] kafka connection client
12
+ attr_accessor :client
13
+ # @return [Karafka::Processing::Coordinator] coordinator
14
+ attr_accessor :coordinator
15
+ # @return [Waterdrop::Producer] producer instance
16
+ attr_accessor :producer
8
17
 
9
- # Allows us to mark messages as consumed for non-automatic mode without having
10
- # to use consumer client directly. We do this that way, because most of the people should not
11
- # mess with the client instance directly (just in case)
12
- %i[
13
- mark_as_consumed
14
- mark_as_consumed!
15
- trigger_heartbeat
16
- trigger_heartbeat!
17
- ].each do |delegated_method_name|
18
- def_delegator :client, delegated_method_name
18
+ # Can be used to run preparation code prior to the job being enqueued
19
+ #
20
+ # @private
21
+ # @note This should not be used by the end users as it is part of the lifecycle of things and
22
+ # not as a part of the public api. This should not perform any extensive operations as it is
23
+ # blocking and running in the listener thread.
24
+ def on_before_enqueue; end
19
25
 
20
- private delegated_method_name
26
+ # Can be used to run preparation code in the worker
27
+ #
28
+ # @private
29
+ # @note This should not be used by the end users as it is part of the lifecycle of things and
30
+ # not as part of the public api. This can act as a hook when creating non-blocking
31
+ # consumers and doing other advanced stuff
32
+ def on_before_consume
33
+ messages.metadata.processed_at = Time.now
34
+ messages.metadata.freeze
21
35
  end
22
36
 
23
- # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
24
- attr_reader :topic
25
- # @return [Karafka::Params:ParamsBatch] current params batch
26
- attr_accessor :params_batch
37
+ # Executes the default consumer flow.
38
+ #
39
+ # @return [Boolean] true if there was no exception, otherwise false.
40
+ #
41
+ # @note We keep the seek offset tracking, and use it to compensate for async offset flushing
42
+ # that may not yet kick in when error occurs. That way we pause always on the last processed
43
+ # message.
44
+ def on_consume
45
+ Karafka.monitor.instrument('consumer.consumed', caller: self) do
46
+ consume
47
+ end
27
48
 
28
- # Assigns a topic to a consumer and builds up proper consumer functionalities
29
- # so that it can cooperate with the topic settings
30
- # @param topic [Karafka::Routing::Topic]
31
- def initialize(topic)
32
- @topic = topic
33
- Consumers::Includer.call(self)
49
+ coordinator.consumption(self).success!
50
+ rescue StandardError => e
51
+ coordinator.consumption(self).failure!(e)
52
+
53
+ Karafka.monitor.instrument(
54
+ 'error.occurred',
55
+ error: e,
56
+ caller: self,
57
+ type: 'consumer.consume.error'
58
+ )
59
+ ensure
60
+ # We need to decrease number of jobs that this coordinator coordinates as it has finished
61
+ coordinator.decrement
34
62
  end
35
63
 
36
- # Executes the default consumer flow.
37
- def call
38
- process
64
+ # @private
65
+ # @note This should not be used by the end users as it is part of the lifecycle of things but
66
+ # not as part of the public api.
67
+ def on_after_consume
68
+ return if revoked?
69
+
70
+ if coordinator.success?
71
+ coordinator.pause_tracker.reset
72
+
73
+ # Mark as consumed only if manual offset management is not on
74
+ return if topic.manual_offset_management?
75
+
76
+ # We use the non-blocking one here. If someone needs the blocking one, can implement it
77
+ # with manual offset management
78
+ mark_as_consumed(messages.last)
79
+ else
80
+ pause(coordinator.seek_offset)
81
+ end
39
82
  end
40
83
 
41
- private
84
+ # Trigger method for running on partition revocation.
85
+ #
86
+ # @private
87
+ def on_revoked
88
+ # We need to always un-pause the processing in case we have lost a given partition.
89
+ # Otherwise the underlying librdkafka would not know we may want to continue processing and
90
+ # the pause could in theory last forever
91
+ resume
42
92
 
43
- # @return [Karafka::Connection::Client] messages consuming client that can be used to
44
- # commit manually offset or pause / stop consumer based on the business logic
45
- def client
46
- Persistence::Client.read
93
+ coordinator.revoke
94
+
95
+ Karafka.monitor.instrument('consumer.revoked', caller: self) do
96
+ revoked
97
+ end
98
+ rescue StandardError => e
99
+ Karafka.monitor.instrument(
100
+ 'error.occurred',
101
+ error: e,
102
+ caller: self,
103
+ type: 'consumer.revoked.error'
104
+ )
105
+ end
106
+
107
+ # Trigger method for running on shutdown.
108
+ #
109
+ # @private
110
+ def on_shutdown
111
+ Karafka.monitor.instrument('consumer.shutdown', caller: self) do
112
+ shutdown
113
+ end
114
+ rescue StandardError => e
115
+ Karafka.monitor.instrument(
116
+ 'error.occurred',
117
+ error: e,
118
+ caller: self,
119
+ type: 'consumer.shutdown.error'
120
+ )
47
121
  end
48
122
 
123
+ private
124
+
49
125
  # Method that will perform business logic and on data received from Kafka (it will consume
50
126
  # the data)
51
- # @note This method needs bo be implemented in a subclass. We stub it here as a failover if
127
+ # @note This method needs to be implemented in a subclass. We stub it here as a failover if
52
128
  # someone forgets about it or makes on with typo
53
129
  def consume
54
130
  raise NotImplementedError, 'Implement this in a subclass'
55
131
  end
132
+
133
+ # Method that will be executed when a given topic partition is revoked. You can use it for
134
+ # some teardown procedures (closing file handler, etc).
135
+ def revoked; end
136
+
137
+ # Method that will be executed when the process is shutting down. You can use it for
138
+ # some teardown procedures (closing file handler, etc).
139
+ def shutdown; end
140
+
141
+ # Marks message as consumed in an async way.
142
+ #
143
+ # @param message [Messages::Message] last successfully processed message.
144
+ # @return [Boolean] true if we were able to mark the offset, false otherwise. False indicates
145
+ # that we were not able and that we have lost the partition.
146
+ #
147
+ # @note We keep track of this offset in case we would mark as consumed and got error when
148
+ # processing another message. In case like this we do not pause on the message we've already
149
+ # processed but rather at the next one. This applies to both sync and async versions of this
150
+ # method.
151
+ def mark_as_consumed(message)
152
+ unless client.mark_as_consumed(message)
153
+ coordinator.revoke
154
+
155
+ return false
156
+ end
157
+
158
+ coordinator.seek_offset = message.offset + 1
159
+
160
+ true
161
+ end
162
+
163
+ # Marks message as consumed in a sync way.
164
+ #
165
+ # @param message [Messages::Message] last successfully processed message.
166
+ # @return [Boolean] true if we were able to mark the offset, false otherwise. False indicates
167
+ # that we were not able and that we have lost the partition.
168
+ def mark_as_consumed!(message)
169
+ unless client.mark_as_consumed!(message)
170
+ coordinator.revoke
171
+
172
+ return false
173
+ end
174
+
175
+ coordinator.seek_offset = message.offset + 1
176
+
177
+ true
178
+ end
179
+
180
+ # Pauses processing on a given offset for the current topic partition
181
+ #
182
+ # After given partition is resumed, it will continue processing from the given offset
183
+ # @param offset [Integer] offset from which we want to restart the processing
184
+ # @param timeout [Integer, nil] how long in milliseconds do we want to pause or nil to use the
185
+ # default exponential pausing strategy defined for retries
186
+ def pause(offset, timeout = nil)
187
+ timeout ? coordinator.pause_tracker.pause(timeout) : coordinator.pause_tracker.pause
188
+
189
+ client.pause(
190
+ messages.metadata.topic,
191
+ messages.metadata.partition,
192
+ offset
193
+ )
194
+ end
195
+
196
+ # Resumes processing of the current topic partition
197
+ def resume
198
+ # This is sufficient to expire a partition pause, as with it will be resumed by the listener
199
+ # thread before the next poll.
200
+ coordinator.pause_tracker.expire
201
+ end
202
+
203
+ # Seeks in the context of current topic and partition
204
+ #
205
+ # @param offset [Integer] offset where we want to seek
206
+ def seek(offset)
207
+ client.seek(
208
+ Karafka::Messages::Seek.new(
209
+ messages.metadata.topic,
210
+ messages.metadata.partition,
211
+ offset
212
+ )
213
+ )
214
+ end
215
+
216
+ # @return [Boolean] true if partition was revoked from the current consumer
217
+ # @note We know that partition got revoked because when we try to mark message as consumed,
218
+ # unless if is successful, it will return false
219
+ def revoked?
220
+ coordinator.revoked?
221
+ end
56
222
  end
57
223
  end
@@ -7,24 +7,58 @@ module Karafka
7
7
  class Info < Base
8
8
  desc 'Print configuration details and other options of your application'
9
9
 
10
+ # Nice karafka banner
11
+ BANNER = <<~BANNER
12
+
13
+ @@@ @@@@@ @@@
14
+ @@@ @@@ @@@
15
+ @@@ @@@ @@@@@@@@@ @@@ @@@ @@@@@@@@@ @@@@@@@@ @@@ @@@@ @@@@@@@@@
16
+ @@@@@@ @@@ @@@ @@@@@ @@@ @@@ @@@ @@@@@@@ @@@ @@@
17
+ @@@@@@@ @@@ @@@ @@@ @@@@ @@@ @@@ @@@@@@@ @@@ @@@
18
+ @@@ @@@@ @@@@@@@@@@ @@@ @@@@@@@@@@ @@@ @@@ @@@@ @@@@@@@@@@
19
+
20
+ BANNER
21
+
10
22
  # Print configuration details and other options of your application
11
23
  def call
24
+ Karafka.logger.info(BANNER)
25
+ Karafka.logger.info((core_info + license_info).join("\n"))
26
+ end
27
+
28
+ private
29
+
30
+ # @return [Array<String>] core framework related info
31
+ def core_info
12
32
  config = Karafka::App.config
13
33
 
14
- info = [
15
- "Karafka version: #{Karafka::VERSION}",
16
- "Ruby version: #{RUBY_VERSION}",
17
- "Ruby-kafka version: #{::Kafka::VERSION}",
34
+ postfix = Karafka.pro? ? ' + Pro' : ''
35
+
36
+ [
37
+ "Karafka version: #{Karafka::VERSION}#{postfix}",
38
+ "Ruby version: #{RUBY_DESCRIPTION}",
39
+ "Rdkafka version: #{::Rdkafka::VERSION}",
40
+ "Subscription groups count: #{Karafka::App.subscription_groups.size}",
41
+ "Workers count: #{Karafka::App.config.concurrency}",
18
42
  "Application client id: #{config.client_id}",
19
- "Backend: #{config.backend}",
20
- "Batch fetching: #{config.batch_fetching}",
21
- "Batch consuming: #{config.batch_consuming}",
22
43
  "Boot file: #{Karafka.boot_file}",
23
- "Environment: #{Karafka.env}",
24
- "Kafka seed brokers: #{config.kafka.seed_brokers}"
44
+ "Environment: #{Karafka.env}"
25
45
  ]
46
+ end
47
+
48
+ # @return [Array<String>] license related info
49
+ def license_info
50
+ config = Karafka::App.config
26
51
 
27
- Karafka.logger.info(info.join("\n"))
52
+ if Karafka.pro?
53
+ [
54
+ 'License: Commercial',
55
+ "License entity: #{config.license.entity}"
56
+ ]
57
+ else
58
+ [
59
+ 'License: LGPL-3.0'
60
+ ]
61
+ end
28
62
  end
29
63
  end
30
64
  end
@@ -7,34 +7,35 @@ module Karafka
7
7
  class Cli < Thor
8
8
  # Install Karafka Cli action
9
9
  class Install < Base
10
+ include Helpers::Colorize
11
+
10
12
  desc 'Install all required things for Karafka application in current directory'
11
13
 
12
14
  # Directories created by default
13
15
  INSTALL_DIRS = %w[
14
16
  app/consumers
15
- app/responders
16
- app/workers
17
17
  config
18
- lib
19
18
  log
20
- tmp/pids
19
+ lib
21
20
  ].freeze
22
21
 
23
22
  # Where should we map proper files from templates
24
23
  INSTALL_FILES_MAP = {
25
24
  'karafka.rb.erb' => Karafka.boot_file.basename,
26
25
  'application_consumer.rb.erb' => 'app/consumers/application_consumer.rb',
27
- 'application_responder.rb.erb' => 'app/responders/application_responder.rb'
26
+ 'example_consumer.rb.erb' => 'app/consumers/example_consumer.rb'
28
27
  }.freeze
29
28
 
30
29
  # @param args [Array] all the things that Thor CLI accepts
31
30
  def initialize(*args)
32
31
  super
33
- @rails = Bundler::LockfileParser.new(
32
+ dependencies = Bundler::LockfileParser.new(
34
33
  Bundler.read_file(
35
34
  Bundler.default_lockfile
36
35
  )
37
- ).dependencies.key?('rails')
36
+ ).dependencies
37
+
38
+ @rails = dependencies.key?('railties') || dependencies.key?('rails')
38
39
  end
39
40
 
40
41
  # Install all required things for Karafka application in current directory
@@ -43,16 +44,25 @@ module Karafka
43
44
  FileUtils.mkdir_p Karafka.root.join(dir)
44
45
  end
45
46
 
47
+ puts
48
+ puts 'Installing Karafka framework...'
49
+ puts 'Ruby on Rails detected...' if rails?
50
+ puts
51
+
46
52
  INSTALL_FILES_MAP.each do |source, target|
47
- target = Karafka.root.join(target)
53
+ pathed_target = Karafka.root.join(target)
48
54
 
49
55
  template = File.read(Karafka.core_root.join("templates/#{source}"))
50
- # @todo Replace with the keyword argument version once we don't have to support
51
- # Ruby < 2.6
52
- render = ::ERB.new(template, nil, '-').result(binding)
56
+ render = ::ERB.new(template, trim_mode: '-').result(binding)
57
+
58
+ File.open(pathed_target, 'w') { |file| file.write(render) }
53
59
 
54
- File.open(target, 'w') { |file| file.write(render) }
60
+ puts "#{green('Created')} #{target}"
55
61
  end
62
+
63
+ puts
64
+ puts("Installation #{green('completed')}. Have fun!")
65
+ puts
56
66
  end
57
67
 
58
68
  # @return [Boolean] true if we have Rails loaded
@@ -5,66 +5,41 @@ module Karafka
5
5
  class Cli < Thor
6
6
  # Server Karafka Cli action
7
7
  class Server < Base
8
- # Server config settings contract
9
- CONTRACT = Contracts::ServerCliOptions.new.freeze
10
-
11
- private_constant :CONTRACT
8
+ include Helpers::Colorize
12
9
 
13
10
  desc 'Start the Karafka server (short-cut alias: "s")'
14
11
  option aliases: 's'
15
- option :daemon, default: false, type: :boolean, aliases: :d
16
- option :pid, default: 'tmp/pids/karafka', type: :string, aliases: :p
17
12
  option :consumer_groups, type: :array, default: nil, aliases: :g
18
13
 
19
14
  # Start the Karafka server
20
15
  def call
21
- cli.info
22
-
23
- validate!
16
+ # Print our banner and info in the dev mode
17
+ print_marketing_info if Karafka::App.env.development?
24
18
 
25
- if cli.options[:daemon]
26
- FileUtils.mkdir_p File.dirname(cli.options[:pid])
27
- daemonize
28
- end
19
+ Contracts::ServerCliOptions.new.validate!(cli.options)
29
20
 
30
21
  # We assign active topics on a server level, as only server is expected to listen on
31
22
  # part of the topics
32
23
  Karafka::Server.consumer_groups = cli.options[:consumer_groups]
33
24
 
34
- # Remove pidfile on stop, just before the server instance is going to be GCed
35
- # We want to delay the moment in which the pidfile is removed as much as we can,
36
- # so instead of removing it after the server stops running, we rely on the gc moment
37
- # when this object gets removed (it is a bit later), so it is closer to the actual
38
- # system process end. We do that, so monitoring and deployment tools that rely on a pid
39
- # won't alarm or start new system process up until the current one is finished
40
- ObjectSpace.define_finalizer(self, proc { send(:clean) })
41
-
42
25
  Karafka::Server.run
43
26
  end
44
27
 
45
28
  private
46
29
 
47
- # Checks the server cli configuration
48
- # options validations in terms of app setup (topics, pid existence, etc)
49
- def validate!
50
- result = CONTRACT.call(cli.options)
51
- return if result.success?
52
-
53
- raise Errors::InvalidConfigurationError, result.errors.to_h
54
- end
55
-
56
- # Detaches current process into background and writes its pidfile
57
- def daemonize
58
- ::Process.daemon(true)
59
- File.open(
60
- cli.options[:pid],
61
- 'w'
62
- ) { |file| file.write(::Process.pid) }
63
- end
64
-
65
- # Removes a pidfile (if exist)
66
- def clean
67
- FileUtils.rm_f(cli.options[:pid]) if cli.options[:pid]
30
+ # Prints marketing info
31
+ def print_marketing_info
32
+ Karafka.logger.info Info::BANNER
33
+
34
+ if Karafka.pro?
35
+ Karafka.logger.info(
36
+ green('Thank you for using Karafka Pro!')
37
+ )
38
+ else
39
+ Karafka.logger.info(
40
+ red('Upgrade to Karafka Pro for more features and support: https://karafka.io')
41
+ )
42
+ end
68
43
  end
69
44
  end
70
45
  end
data/lib/karafka/cli.rb CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Karafka
4
4
  # Karafka framework Cli
5
+ #
5
6
  # If you want to add/modify command that belongs to CLI, please review all commands
6
7
  # available in cli/ directory inside Karafka source code.
7
8
  #
@@ -11,9 +12,9 @@ module Karafka
11
12
  package_name 'Karafka'
12
13
 
13
14
  class << self
14
- # Loads all Cli commands into Thor framework
15
+ # Loads all Cli commands into Thor framework.
15
16
  # This method should be executed before we run Karafka::Cli.start, otherwise we won't
16
- # have any Cli commands available
17
+ # have any Cli commands available.
17
18
  def prepare
18
19
  cli_commands.each do |action|
19
20
  action.bind_to(self)
@@ -34,7 +35,7 @@ module Karafka
34
35
  end
35
36
  end
36
37
 
37
- # This is kinda trick - since we don't have a autoload and other magic stuff
38
+ # This is kinda tricky - since we don't have an autoload and other magic stuff
38
39
  # like Rails does, so instead this method allows us to replace currently running
39
40
  # console with a new one via Kernel.exec. It will start console with new code loaded
40
41
  # Yes, we know that it is not turbo fast, however it is turbo convenient and small