karafka 1.4.13 → 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (170) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +3 -3
  3. data/.github/workflows/ci.yml +85 -30
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +268 -7
  6. data/CONTRIBUTING.md +10 -19
  7. data/Gemfile +6 -0
  8. data/Gemfile.lock +44 -87
  9. data/LICENSE +17 -0
  10. data/LICENSE-COMM +89 -0
  11. data/LICENSE-LGPL +165 -0
  12. data/README.md +44 -48
  13. data/bin/benchmarks +85 -0
  14. data/bin/create_token +22 -0
  15. data/bin/integrations +237 -0
  16. data/bin/karafka +4 -0
  17. data/bin/scenario +29 -0
  18. data/bin/stress_many +13 -0
  19. data/bin/stress_one +13 -0
  20. data/bin/wait_for_kafka +20 -0
  21. data/certs/karafka-pro.pem +11 -0
  22. data/config/errors.yml +55 -40
  23. data/docker-compose.yml +39 -3
  24. data/karafka.gemspec +11 -17
  25. data/lib/active_job/karafka.rb +21 -0
  26. data/lib/active_job/queue_adapters/karafka_adapter.rb +26 -0
  27. data/lib/karafka/active_job/consumer.rb +26 -0
  28. data/lib/karafka/active_job/dispatcher.rb +38 -0
  29. data/lib/karafka/active_job/job_extensions.rb +34 -0
  30. data/lib/karafka/active_job/job_options_contract.rb +21 -0
  31. data/lib/karafka/active_job/routing/extensions.rb +31 -0
  32. data/lib/karafka/app.rb +15 -20
  33. data/lib/karafka/base_consumer.rb +181 -31
  34. data/lib/karafka/cli/base.rb +4 -4
  35. data/lib/karafka/cli/info.rb +43 -9
  36. data/lib/karafka/cli/install.rb +19 -10
  37. data/lib/karafka/cli/server.rb +17 -42
  38. data/lib/karafka/cli.rb +4 -11
  39. data/lib/karafka/connection/client.rb +385 -90
  40. data/lib/karafka/connection/listener.rb +246 -38
  41. data/lib/karafka/connection/listeners_batch.rb +24 -0
  42. data/lib/karafka/connection/messages_buffer.rb +84 -0
  43. data/lib/karafka/connection/pauses_manager.rb +46 -0
  44. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  45. data/lib/karafka/connection/rebalance_manager.rb +78 -0
  46. data/lib/karafka/contracts/base.rb +17 -0
  47. data/lib/karafka/contracts/config.rb +88 -11
  48. data/lib/karafka/contracts/consumer_group.rb +21 -189
  49. data/lib/karafka/contracts/consumer_group_topic.rb +34 -11
  50. data/lib/karafka/contracts/server_cli_options.rb +19 -18
  51. data/lib/karafka/contracts.rb +1 -1
  52. data/lib/karafka/env.rb +46 -0
  53. data/lib/karafka/errors.rb +21 -21
  54. data/lib/karafka/helpers/async.rb +33 -0
  55. data/lib/karafka/helpers/colorize.rb +20 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  57. data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
  58. data/lib/karafka/instrumentation/callbacks/statistics.rb +41 -0
  59. data/lib/karafka/instrumentation/logger_listener.rb +164 -0
  60. data/lib/karafka/instrumentation/monitor.rb +13 -61
  61. data/lib/karafka/instrumentation/notifications.rb +52 -0
  62. data/lib/karafka/instrumentation/proctitle_listener.rb +3 -3
  63. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  64. data/lib/karafka/instrumentation/vendors/datadog/listener.rb +232 -0
  65. data/lib/karafka/instrumentation.rb +21 -0
  66. data/lib/karafka/licenser.rb +75 -0
  67. data/lib/karafka/messages/batch_metadata.rb +45 -0
  68. data/lib/karafka/messages/builders/batch_metadata.rb +40 -0
  69. data/lib/karafka/messages/builders/message.rb +39 -0
  70. data/lib/karafka/messages/builders/messages.rb +32 -0
  71. data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
  72. data/lib/karafka/messages/messages.rb +64 -0
  73. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  74. data/lib/karafka/messages/seek.rb +9 -0
  75. data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
  76. data/lib/karafka/pro/active_job/consumer.rb +46 -0
  77. data/lib/karafka/pro/active_job/dispatcher.rb +61 -0
  78. data/lib/karafka/pro/active_job/job_options_contract.rb +32 -0
  79. data/lib/karafka/pro/base_consumer.rb +82 -0
  80. data/lib/karafka/pro/contracts/base.rb +21 -0
  81. data/lib/karafka/pro/contracts/consumer_group.rb +34 -0
  82. data/lib/karafka/pro/contracts/consumer_group_topic.rb +33 -0
  83. data/lib/karafka/pro/loader.rb +76 -0
  84. data/lib/karafka/pro/performance_tracker.rb +80 -0
  85. data/lib/karafka/pro/processing/coordinator.rb +72 -0
  86. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +37 -0
  87. data/lib/karafka/pro/processing/jobs_builder.rb +32 -0
  88. data/lib/karafka/pro/processing/partitioner.rb +60 -0
  89. data/lib/karafka/pro/processing/scheduler.rb +56 -0
  90. data/lib/karafka/pro/routing/builder_extensions.rb +30 -0
  91. data/lib/karafka/pro/routing/topic_extensions.rb +38 -0
  92. data/lib/karafka/pro.rb +13 -0
  93. data/lib/karafka/process.rb +1 -0
  94. data/lib/karafka/processing/coordinator.rb +88 -0
  95. data/lib/karafka/processing/coordinators_buffer.rb +54 -0
  96. data/lib/karafka/processing/executor.rb +118 -0
  97. data/lib/karafka/processing/executors_buffer.rb +88 -0
  98. data/lib/karafka/processing/jobs/base.rb +51 -0
  99. data/lib/karafka/processing/jobs/consume.rb +42 -0
  100. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  101. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  102. data/lib/karafka/processing/jobs_builder.rb +29 -0
  103. data/lib/karafka/processing/jobs_queue.rb +144 -0
  104. data/lib/karafka/processing/partitioner.rb +22 -0
  105. data/lib/karafka/processing/result.rb +29 -0
  106. data/lib/karafka/processing/scheduler.rb +22 -0
  107. data/lib/karafka/processing/worker.rb +88 -0
  108. data/lib/karafka/processing/workers_batch.rb +27 -0
  109. data/lib/karafka/railtie.rb +113 -0
  110. data/lib/karafka/routing/builder.rb +15 -24
  111. data/lib/karafka/routing/consumer_group.rb +11 -19
  112. data/lib/karafka/routing/consumer_mapper.rb +1 -2
  113. data/lib/karafka/routing/router.rb +1 -1
  114. data/lib/karafka/routing/subscription_group.rb +53 -0
  115. data/lib/karafka/routing/subscription_groups_builder.rb +53 -0
  116. data/lib/karafka/routing/topic.rb +61 -24
  117. data/lib/karafka/routing/topics.rb +38 -0
  118. data/lib/karafka/runner.rb +51 -0
  119. data/lib/karafka/serialization/json/deserializer.rb +6 -15
  120. data/lib/karafka/server.rb +67 -26
  121. data/lib/karafka/setup/config.rb +147 -175
  122. data/lib/karafka/status.rb +14 -5
  123. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  124. data/lib/karafka/templates/karafka.rb.erb +15 -51
  125. data/lib/karafka/time_trackers/base.rb +19 -0
  126. data/lib/karafka/time_trackers/pause.rb +92 -0
  127. data/lib/karafka/time_trackers/poll.rb +65 -0
  128. data/lib/karafka/version.rb +1 -1
  129. data/lib/karafka.rb +38 -17
  130. data.tar.gz.sig +0 -0
  131. metadata +118 -120
  132. metadata.gz.sig +0 -0
  133. data/MIT-LICENCE +0 -18
  134. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  135. data/lib/karafka/attributes_map.rb +0 -63
  136. data/lib/karafka/backends/inline.rb +0 -16
  137. data/lib/karafka/base_responder.rb +0 -226
  138. data/lib/karafka/cli/flow.rb +0 -48
  139. data/lib/karafka/cli/missingno.rb +0 -19
  140. data/lib/karafka/code_reloader.rb +0 -67
  141. data/lib/karafka/connection/api_adapter.rb +0 -158
  142. data/lib/karafka/connection/batch_delegator.rb +0 -55
  143. data/lib/karafka/connection/builder.rb +0 -23
  144. data/lib/karafka/connection/message_delegator.rb +0 -36
  145. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  146. data/lib/karafka/consumers/callbacks.rb +0 -71
  147. data/lib/karafka/consumers/includer.rb +0 -64
  148. data/lib/karafka/consumers/responders.rb +0 -24
  149. data/lib/karafka/consumers/single_params.rb +0 -15
  150. data/lib/karafka/contracts/responder_usage.rb +0 -54
  151. data/lib/karafka/fetcher.rb +0 -42
  152. data/lib/karafka/helpers/class_matcher.rb +0 -88
  153. data/lib/karafka/helpers/config_retriever.rb +0 -46
  154. data/lib/karafka/helpers/inflector.rb +0 -26
  155. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  156. data/lib/karafka/params/batch_metadata.rb +0 -26
  157. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  158. data/lib/karafka/params/builders/params.rb +0 -38
  159. data/lib/karafka/params/builders/params_batch.rb +0 -25
  160. data/lib/karafka/params/params_batch.rb +0 -60
  161. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  162. data/lib/karafka/persistence/client.rb +0 -29
  163. data/lib/karafka/persistence/consumers.rb +0 -45
  164. data/lib/karafka/persistence/topics.rb +0 -48
  165. data/lib/karafka/responders/builder.rb +0 -36
  166. data/lib/karafka/responders/topic.rb +0 -55
  167. data/lib/karafka/routing/topic_mapper.rb +0 -53
  168. data/lib/karafka/serialization/json/serializer.rb +0 -31
  169. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  170. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ begin
4
+ require 'active_job'
5
+ require_relative 'queue_adapters/karafka_adapter'
6
+
7
+ module ActiveJob
8
+ # Namespace for usage simplification outside of Rails where Railtie will not kick in.
9
+ # That way a require 'active_job/karafka' should be enough to use it
10
+ module Karafka
11
+ end
12
+ end
13
+
14
+ # We extend routing builder by adding a simple wrapper for easier jobs topics defining
15
+ # This needs to be extended here as it is going to be used in karafka routes, hence doing that in
16
+ # the railtie initializer would be too late
17
+ ::Karafka::Routing::Builder.include ::Karafka::ActiveJob::Routing::Extensions
18
+ ::Karafka::Routing::Proxy.include ::Karafka::ActiveJob::Routing::Extensions
19
+ rescue LoadError
20
+ # We extend ActiveJob stuff in the railtie
21
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ # ActiveJob components to allow for jobs consumption with Karafka
4
+ module ActiveJob
5
+ # ActiveJob queue adapters
6
+ module QueueAdapters
7
+ # Karafka adapter for enqueuing jobs
8
+ # This is here for ease of integration with ActiveJob.
9
+ class KarafkaAdapter
10
+ # Enqueues the job using the configured dispatcher
11
+ #
12
+ # @param job [Object] job that should be enqueued
13
+ def enqueue(job)
14
+ ::Karafka::App.config.internal.active_job.dispatcher.call(job)
15
+ end
16
+
17
+ # Raises info, that Karafka backend does not support scheduling jobs
18
+ #
19
+ # @param _job [Object] job we cannot enqueue
20
+ # @param _timestamp [Time] time when job should run
21
+ def enqueue_at(_job, _timestamp)
22
+ raise NotImplementedError, 'This queueing backend does not support scheduling jobs.'
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # This is the consumer for ActiveJob that eats the messages enqueued with it one after another.
6
+ # It marks the offset after each message, so we make sure, none of the jobs is executed twice
7
+ class Consumer < ::Karafka::BaseConsumer
8
+ # Executes the ActiveJob logic
9
+ # @note ActiveJob does not support batches, so we just run one message after another
10
+ def consume
11
+ messages.each do |message|
12
+ break if Karafka::App.stopping?
13
+
14
+ ::ActiveJob::Base.execute(
15
+ # We technically speaking could set this as deserializer and reference it from the
16
+ # message instead of using the `#raw_payload`. This is not done on purpose to simplify
17
+ # the ActiveJob setup here
18
+ ::ActiveSupport::JSON.decode(message.raw_payload)
19
+ )
20
+
21
+ mark_as_consumed(message)
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # Dispatcher that sends the ActiveJob job to a proper topic based on the queue name
6
+ class Dispatcher
7
+ # Defaults for dispatching
8
+ # The can be updated by using `#karafka_options` on the job
9
+ DEFAULTS = {
10
+ dispatch_method: :produce_async
11
+ }.freeze
12
+
13
+ private_constant :DEFAULTS
14
+
15
+ # @param job [ActiveJob::Base] job
16
+ def call(job)
17
+ ::Karafka.producer.public_send(
18
+ fetch_option(job, :dispatch_method, DEFAULTS),
19
+ topic: job.queue_name,
20
+ payload: ::ActiveSupport::JSON.encode(job.serialize)
21
+ )
22
+ end
23
+
24
+ private
25
+
26
+ # @param job [ActiveJob::Base] job
27
+ # @param key [Symbol] key we want to fetch
28
+ # @param defaults [Hash]
29
+ # @return [Object] options we are interested in
30
+ def fetch_option(job, key, defaults)
31
+ job
32
+ .class
33
+ .karafka_options
34
+ .fetch(key, defaults.fetch(key))
35
+ end
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # Allows for setting karafka specific options in ActiveJob jobs
6
+ module JobExtensions
7
+ class << self
8
+ # Defines all the needed accessors and sets defaults
9
+ # @param klass [ActiveJob::Base] active job base
10
+ def extended(klass)
11
+ klass.class_attribute :_karafka_options
12
+ klass._karafka_options = {}
13
+ end
14
+ end
15
+
16
+ # @param new_options [Hash] additional options that allow for jobs Karafka related options
17
+ # customization
18
+ # @return [Hash] karafka options
19
+ def karafka_options(new_options = {})
20
+ return _karafka_options if new_options.empty?
21
+
22
+ # Make sure, that karafka options that someone wants to use are valid before assigning
23
+ # them
24
+ App.config.internal.active_job.job_options_contract.validate!(new_options)
25
+
26
+ new_options.each do |name, value|
27
+ _karafka_options[name] = value
28
+ end
29
+
30
+ _karafka_options
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # Contract for validating the options that can be altered with `#karafka_options` per job class
6
+ # @note We keep this in the `Karafka::ActiveJob` namespace instead of `Karafka::Contracts` as
7
+ # we want to keep ActiveJob related Karafka components outside of the core Karafka code and
8
+ # all in the same place
9
+ class JobOptionsContract < Contracts::Base
10
+ configure do |config|
11
+ config.error_messages = YAML.safe_load(
12
+ File.read(
13
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
14
+ )
15
+ ).fetch('en').fetch('validations').fetch('job_options')
16
+ end
17
+
18
+ optional(:dispatch_method) { |val| %i[produce_async produce_sync].include?(val) }
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # ActiveJob related Karafka stuff
5
+ module ActiveJob
6
+ # Karafka routing ActiveJob related components
7
+ module Routing
8
+ # Routing extensions for ActiveJob
9
+ module Extensions
10
+ # This method simplifies routes definition for ActiveJob topics / queues by auto-injecting
11
+ # the consumer class
12
+ # @param name [String, Symbol] name of the topic where ActiveJobs jobs should go
13
+ # @param block [Proc] block that we can use for some extra configuration
14
+ def active_job_topic(name, &block)
15
+ topic(name) do
16
+ consumer App.config.internal.active_job.consumer_class
17
+
18
+ next unless block
19
+
20
+ instance_eval(&block)
21
+
22
+ # This is handled by our custom ActiveJob consumer
23
+ # Without this, default behaviour would cause messages to skip upon shutdown as the
24
+ # offset would be committed for the last message
25
+ manual_offset_management true
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
data/lib/karafka/app.rb CHANGED
@@ -6,31 +6,24 @@ module Karafka
6
6
  extend Setup::Dsl
7
7
 
8
8
  class << self
9
- # Sets up all the internal components and bootstrap whole app
10
- # We need to know details about consumers in order to setup components,
11
- # that's why we don't setup them after std setup is done
12
- # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
13
- # doesn't match with the config contract
14
- def boot!
15
- initialize!
16
- Setup::Config.validate!
17
- Setup::Config.setup_components
18
- initialized!
19
- end
20
-
21
- # @return [Karafka::Routing::Builder] consumers builder instance
9
+ # @return [Karafka::Routing::Builder] consumers builder instance alias
22
10
  def consumer_groups
23
- config.internal.routing_builder
11
+ config
12
+ .internal
13
+ .routing
14
+ .builder
24
15
  end
25
16
 
26
- # Triggers reload of all cached Karafka app components, so we can use in-process
27
- # in-development hot code reloading without Karafka process restart
28
- def reload
29
- Karafka::Persistence::Consumers.clear
30
- Karafka::Persistence::Topics.clear
31
- config.internal.routing_builder.reload
17
+ # @return [Array<Karafka::Routing::SubscriptionGroup>] active subscription groups
18
+ def subscription_groups
19
+ consumer_groups
20
+ .active
21
+ .flat_map(&:subscription_groups)
32
22
  end
33
23
 
24
+ # Just a nicer name for the consumer groups
25
+ alias routes consumer_groups
26
+
34
27
  Status.instance_methods(false).each do |delegated|
35
28
  define_method(delegated) do
36
29
  App.config.internal.status.send(delegated)
@@ -42,7 +35,9 @@ module Karafka
42
35
  root
43
36
  env
44
37
  logger
38
+ producer
45
39
  monitor
40
+ pro?
46
41
  ].each do |delegated|
47
42
  define_method(delegated) do
48
43
  Karafka.send(delegated)
@@ -4,48 +4,108 @@
4
4
  module Karafka
5
5
  # Base consumer from which all Karafka consumers should inherit
6
6
  class BaseConsumer
7
- extend Forwardable
7
+ # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
8
+ attr_accessor :topic
9
+ # @return [Karafka::Messages::Messages] current messages batch
10
+ attr_accessor :messages
11
+ # @return [Karafka::Connection::Client] kafka connection client
12
+ attr_accessor :client
13
+ # @return [Karafka::Processing::Coordinator] coordinator
14
+ attr_accessor :coordinator
15
+ # @return [Waterdrop::Producer] producer instance
16
+ attr_accessor :producer
8
17
 
9
- # Allows us to mark messages as consumed for non-automatic mode without having
10
- # to use consumer client directly. We do this that way, because most of the people should not
11
- # mess with the client instance directly (just in case)
12
- %i[
13
- mark_as_consumed
14
- mark_as_consumed!
15
- trigger_heartbeat
16
- trigger_heartbeat!
17
- ].each do |delegated_method_name|
18
- def_delegator :client, delegated_method_name
18
+ # Can be used to run preparation code
19
+ #
20
+ # @private
21
+ # @note This should not be used by the end users as it is part of the lifecycle of things but
22
+ # not as part of the public api. This can act as a hook when creating non-blocking
23
+ # consumers and doing other advanced stuff
24
+ def on_before_consume; end
19
25
 
20
- private delegated_method_name
21
- end
26
+ # Executes the default consumer flow.
27
+ #
28
+ # @return [Boolean] true if there was no exception, otherwise false.
29
+ #
30
+ # @note We keep the seek offset tracking, and use it to compensate for async offset flushing
31
+ # that may not yet kick in when error occurs. That way we pause always on the last processed
32
+ # message.
33
+ def on_consume
34
+ Karafka.monitor.instrument('consumer.consumed', caller: self) do
35
+ consume
36
+ end
22
37
 
23
- # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
24
- attr_reader :topic
25
- # @return [Karafka::Params:ParamsBatch] current params batch
26
- attr_accessor :params_batch
38
+ coordinator.consumption(self).success!
39
+ rescue StandardError => e
40
+ coordinator.consumption(self).failure!
27
41
 
28
- # Assigns a topic to a consumer and builds up proper consumer functionalities
29
- # so that it can cooperate with the topic settings
30
- # @param topic [Karafka::Routing::Topic]
31
- def initialize(topic)
32
- @topic = topic
33
- Consumers::Includer.call(self)
42
+ Karafka.monitor.instrument(
43
+ 'error.occurred',
44
+ error: e,
45
+ caller: self,
46
+ type: 'consumer.consume.error'
47
+ )
48
+ ensure
49
+ # We need to decrease number of jobs that this coordinator coordinates as it has finished
50
+ coordinator.decrement
34
51
  end
35
52
 
36
- # Executes the default consumer flow.
37
- def call
38
- process
53
+ # @private
54
+ # @note This should not be used by the end users as it is part of the lifecycle of things but
55
+ # not as part of the public api.
56
+ def on_after_consume
57
+ return if revoked?
58
+
59
+ if coordinator.success?
60
+ coordinator.pause_tracker.reset
61
+
62
+ # Mark as consumed only if manual offset management is not on
63
+ return if topic.manual_offset_management?
64
+
65
+ # We use the non-blocking one here. If someone needs the blocking one, can implement it
66
+ # with manual offset management
67
+ mark_as_consumed(messages.last)
68
+ else
69
+ pause(@seek_offset || messages.first.offset)
70
+ end
39
71
  end
40
72
 
41
- private
73
+ # Trigger method for running on shutdown.
74
+ #
75
+ # @private
76
+ def on_revoked
77
+ coordinator.revoke
42
78
 
43
- # @return [Karafka::Connection::Client] messages consuming client that can be used to
44
- # commit manually offset or pause / stop consumer based on the business logic
45
- def client
46
- Persistence::Client.read
79
+ Karafka.monitor.instrument('consumer.revoked', caller: self) do
80
+ revoked
81
+ end
82
+ rescue StandardError => e
83
+ Karafka.monitor.instrument(
84
+ 'error.occurred',
85
+ error: e,
86
+ caller: self,
87
+ type: 'consumer.revoked.error'
88
+ )
47
89
  end
48
90
 
91
+ # Trigger method for running on shutdown.
92
+ #
93
+ # @private
94
+ def on_shutdown
95
+ Karafka.monitor.instrument('consumer.shutdown', caller: self) do
96
+ shutdown
97
+ end
98
+ rescue StandardError => e
99
+ Karafka.monitor.instrument(
100
+ 'error.occurred',
101
+ error: e,
102
+ caller: self,
103
+ type: 'consumer.shutdown.error'
104
+ )
105
+ end
106
+
107
+ private
108
+
49
109
  # Method that will perform business logic and on data received from Kafka (it will consume
50
110
  # the data)
51
111
  # @note This method needs bo be implemented in a subclass. We stub it here as a failover if
@@ -53,5 +113,95 @@ module Karafka
53
113
  def consume
54
114
  raise NotImplementedError, 'Implement this in a subclass'
55
115
  end
116
+
117
+ # Method that will be executed when a given topic partition is revoked. You can use it for
118
+ # some teardown procedures (closing file handler, etc).
119
+ def revoked; end
120
+
121
+ # Method that will be executed when the process is shutting down. You can use it for
122
+ # some teardown procedures (closing file handler, etc).
123
+ def shutdown; end
124
+
125
+ # Marks message as consumed in an async way.
126
+ #
127
+ # @param message [Messages::Message] last successfully processed message.
128
+ # @return [Boolean] true if we were able to mark the offset, false otherwise. False indicates
129
+ # that we were not able and that we have lost the partition.
130
+ #
131
+ # @note We keep track of this offset in case we would mark as consumed and got error when
132
+ # processing another message. In case like this we do not pause on the message we've already
133
+ # processed but rather at the next one. This applies to both sync and async versions of this
134
+ # method.
135
+ def mark_as_consumed(message)
136
+ unless client.mark_as_consumed(message)
137
+ coordinator.revoke
138
+
139
+ return false
140
+ end
141
+
142
+ @seek_offset = message.offset + 1
143
+
144
+ true
145
+ end
146
+
147
+ # Marks message as consumed in a sync way.
148
+ #
149
+ # @param message [Messages::Message] last successfully processed message.
150
+ # @return [Boolean] true if we were able to mark the offset, false otherwise. False indicates
151
+ # that we were not able and that we have lost the partition.
152
+ def mark_as_consumed!(message)
153
+ unless client.mark_as_consumed!(message)
154
+ coordinator.revoke
155
+
156
+ return false
157
+ end
158
+
159
+ @seek_offset = message.offset + 1
160
+
161
+ true
162
+ end
163
+
164
+ # Pauses processing on a given offset for the current topic partition
165
+ #
166
+ # After given partition is resumed, it will continue processing from the given offset
167
+ # @param offset [Integer] offset from which we want to restart the processing
168
+ # @param timeout [Integer, nil] how long in milliseconds do we want to pause or nil to use the
169
+ # default exponential pausing strategy defined for retries
170
+ def pause(offset, timeout = nil)
171
+ timeout ? coordinator.pause_tracker.pause(timeout) : coordinator.pause_tracker.pause
172
+
173
+ client.pause(
174
+ messages.metadata.topic,
175
+ messages.metadata.partition,
176
+ offset
177
+ )
178
+ end
179
+
180
+ # Resumes processing of the current topic partition
181
+ def resume
182
+ # This is sufficient to expire a partition pause, as with it will be resumed by the listener
183
+ # thread before the next poll.
184
+ coordinator.pause_tracker.expire
185
+ end
186
+
187
+ # Seeks in the context of current topic and partition
188
+ #
189
+ # @param offset [Integer] offset where we want to seek
190
+ def seek(offset)
191
+ client.seek(
192
+ Karafka::Messages::Seek.new(
193
+ messages.metadata.topic,
194
+ messages.metadata.partition,
195
+ offset
196
+ )
197
+ )
198
+ end
199
+
200
+ # @return [Boolean] true if partition was revoked from the current consumer
201
+ # @note We know that partition got revoked because when we try to mark message as consumed,
202
+ # unless if is successful, it will return false
203
+ def revoked?
204
+ coordinator.revoked?
205
+ end
56
206
  end
57
207
  end
@@ -43,16 +43,16 @@ module Karafka
43
43
  end
44
44
 
45
45
  # Allows to set description of a given cli command
46
- # @param args [Array] All the arguments that Thor desc method accepts
47
- def desc(*args)
48
- @desc ||= args
46
+ # @param desc [String] Description of a given cli command
47
+ def desc(desc)
48
+ @desc ||= desc
49
49
  end
50
50
 
51
51
  # This method will bind a given Cli command into Karafka Cli
52
52
  # This method is a wrapper to way Thor defines its commands
53
53
  # @param cli_class [Karafka::Cli] Karafka cli_class
54
54
  def bind_to(cli_class)
55
- cli_class.desc name, *@desc
55
+ cli_class.desc name, @desc
56
56
 
57
57
  (@options || []).each { |option| cli_class.option(*option) }
58
58
 
@@ -7,24 +7,58 @@ module Karafka
7
7
  class Info < Base
8
8
  desc 'Print configuration details and other options of your application'
9
9
 
10
+ # Nice karafka banner
11
+ BANNER = <<~BANNER
12
+
13
+ @@@ @@@@@ @@@
14
+ @@@ @@@ @@@
15
+ @@@ @@@ @@@@@@@@@ @@@ @@@ @@@@@@@@@ @@@@@@@@ @@@ @@@@ @@@@@@@@@
16
+ @@@@@@ @@@ @@@ @@@@@ @@@ @@@ @@@ @@@@@@@ @@@ @@@
17
+ @@@@@@@ @@@ @@@ @@@ @@@@ @@@ @@@ @@@@@@@ @@@ @@@
18
+ @@@ @@@@ @@@@@@@@@@ @@@ @@@@@@@@@@ @@@ @@@ @@@@ @@@@@@@@@@
19
+
20
+ BANNER
21
+
10
22
  # Print configuration details and other options of your application
11
23
  def call
24
+ Karafka.logger.info(BANNER)
25
+ Karafka.logger.info((core_info + license_info).join("\n"))
26
+ end
27
+
28
+ private
29
+
30
+ # @return [Array<String>] core framework related info
31
+ def core_info
12
32
  config = Karafka::App.config
13
33
 
14
- info = [
15
- "Karafka version: #{Karafka::VERSION}",
34
+ postfix = Karafka.pro? ? ' + Pro' : ''
35
+
36
+ [
37
+ "Karafka version: #{Karafka::VERSION}#{postfix}",
16
38
  "Ruby version: #{RUBY_VERSION}",
17
- "Ruby-kafka version: #{::Kafka::VERSION}",
39
+ "Rdkafka version: #{::Rdkafka::VERSION}",
40
+ "Subscription groups count: #{Karafka::App.subscription_groups.size}",
41
+ "Workers count: #{Karafka::App.config.concurrency}",
18
42
  "Application client id: #{config.client_id}",
19
- "Backend: #{config.backend}",
20
- "Batch fetching: #{config.batch_fetching}",
21
- "Batch consuming: #{config.batch_consuming}",
22
43
  "Boot file: #{Karafka.boot_file}",
23
- "Environment: #{Karafka.env}",
24
- "Kafka seed brokers: #{config.kafka.seed_brokers}"
44
+ "Environment: #{Karafka.env}"
25
45
  ]
46
+ end
47
+
48
+ # @return [Array<String>] license related info
49
+ def license_info
50
+ config = Karafka::App.config
26
51
 
27
- Karafka.logger.info(info.join("\n"))
52
+ if Karafka.pro?
53
+ [
54
+ 'License: Commercial',
55
+ "License entity: #{config.license.entity}"
56
+ ]
57
+ else
58
+ [
59
+ 'License: LGPL-3.0'
60
+ ]
61
+ end
28
62
  end
29
63
  end
30
64
  end
@@ -7,24 +7,23 @@ module Karafka
7
7
  class Cli < Thor
8
8
  # Install Karafka Cli action
9
9
  class Install < Base
10
+ include Helpers::Colorize
11
+
10
12
  desc 'Install all required things for Karafka application in current directory'
11
13
 
12
14
  # Directories created by default
13
15
  INSTALL_DIRS = %w[
14
16
  app/consumers
15
- app/responders
16
- app/workers
17
17
  config
18
- lib
19
18
  log
20
- tmp/pids
19
+ lib
21
20
  ].freeze
22
21
 
23
22
  # Where should we map proper files from templates
24
23
  INSTALL_FILES_MAP = {
25
24
  'karafka.rb.erb' => Karafka.boot_file.basename,
26
25
  'application_consumer.rb.erb' => 'app/consumers/application_consumer.rb',
27
- 'application_responder.rb.erb' => 'app/responders/application_responder.rb'
26
+ 'example_consumer.rb.erb' => 'app/consumers/example_consumer.rb'
28
27
  }.freeze
29
28
 
30
29
  # @param args [Array] all the things that Thor CLI accepts
@@ -35,6 +34,7 @@ module Karafka
35
34
  Bundler.default_lockfile
36
35
  )
37
36
  ).dependencies
37
+
38
38
  @rails = dependencies.key?('railties') || dependencies.key?('rails')
39
39
  end
40
40
 
@@ -44,16 +44,25 @@ module Karafka
44
44
  FileUtils.mkdir_p Karafka.root.join(dir)
45
45
  end
46
46
 
47
+ puts
48
+ puts 'Installing Karafka framework...'
49
+ puts 'Ruby on Rails detected...' if rails?
50
+ puts
51
+
47
52
  INSTALL_FILES_MAP.each do |source, target|
48
- target = Karafka.root.join(target)
53
+ pathed_target = Karafka.root.join(target)
49
54
 
50
55
  template = File.read(Karafka.core_root.join("templates/#{source}"))
51
- # @todo Replace with the keyword argument version once we don't have to support
52
- # Ruby < 2.6
53
- render = ::ERB.new(template, nil, '-').result(binding)
56
+ render = ::ERB.new(template, trim_mode: '-').result(binding)
57
+
58
+ File.open(pathed_target, 'w') { |file| file.write(render) }
54
59
 
55
- File.open(target, 'w') { |file| file.write(render) }
60
+ puts "#{green('Created')} #{target}"
56
61
  end
62
+
63
+ puts
64
+ puts("Installation #{green('completed')}. Have fun!")
65
+ puts
57
66
  end
58
67
 
59
68
  # @return [Boolean] true if we have Rails loaded