karafka 1.1.0 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (114) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/.travis.yml +35 -16
  12. data/CHANGELOG.md +151 -2
  13. data/CONTRIBUTING.md +6 -7
  14. data/Gemfile +3 -3
  15. data/Gemfile.lock +96 -70
  16. data/README.md +29 -23
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +25 -0
  19. data/config/errors.yml +38 -5
  20. data/karafka.gemspec +19 -10
  21. data/lib/karafka.rb +15 -12
  22. data/lib/karafka/app.rb +19 -18
  23. data/lib/karafka/attributes_map.rb +15 -14
  24. data/lib/karafka/backends/inline.rb +1 -2
  25. data/lib/karafka/base_consumer.rb +57 -0
  26. data/lib/karafka/base_responder.rb +72 -31
  27. data/lib/karafka/cli.rb +1 -1
  28. data/lib/karafka/cli/console.rb +11 -9
  29. data/lib/karafka/cli/flow.rb +0 -1
  30. data/lib/karafka/cli/info.rb +3 -1
  31. data/lib/karafka/cli/install.rb +29 -8
  32. data/lib/karafka/cli/server.rb +11 -7
  33. data/lib/karafka/code_reloader.rb +67 -0
  34. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +67 -24
  35. data/lib/karafka/connection/batch_delegator.rb +51 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +117 -0
  38. data/lib/karafka/connection/listener.rb +37 -17
  39. data/lib/karafka/connection/message_delegator.rb +36 -0
  40. data/lib/karafka/consumers/callbacks.rb +71 -0
  41. data/lib/karafka/consumers/includer.rb +63 -0
  42. data/lib/karafka/consumers/metadata.rb +10 -0
  43. data/lib/karafka/consumers/responders.rb +24 -0
  44. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  45. data/lib/karafka/contracts.rb +10 -0
  46. data/lib/karafka/contracts/config.rb +21 -0
  47. data/lib/karafka/contracts/consumer_group.rb +206 -0
  48. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  49. data/lib/karafka/contracts/responder_usage.rb +54 -0
  50. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  51. data/lib/karafka/errors.rb +23 -15
  52. data/lib/karafka/fetcher.rb +6 -12
  53. data/lib/karafka/helpers/class_matcher.rb +19 -9
  54. data/lib/karafka/helpers/config_retriever.rb +3 -3
  55. data/lib/karafka/helpers/inflector.rb +26 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  57. data/lib/karafka/instrumentation/logger.rb +57 -0
  58. data/lib/karafka/instrumentation/monitor.rb +70 -0
  59. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  60. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  61. data/lib/karafka/params/builders/metadata.rb +33 -0
  62. data/lib/karafka/params/builders/params.rb +36 -0
  63. data/lib/karafka/params/builders/params_batch.rb +25 -0
  64. data/lib/karafka/params/metadata.rb +35 -0
  65. data/lib/karafka/params/params.rb +35 -95
  66. data/lib/karafka/params/params_batch.rb +38 -18
  67. data/lib/karafka/patches/ruby_kafka.rb +25 -12
  68. data/lib/karafka/persistence/client.rb +29 -0
  69. data/lib/karafka/persistence/consumers.rb +45 -0
  70. data/lib/karafka/persistence/topics.rb +48 -0
  71. data/lib/karafka/process.rb +5 -8
  72. data/lib/karafka/responders/builder.rb +15 -14
  73. data/lib/karafka/responders/topic.rb +6 -8
  74. data/lib/karafka/routing/builder.rb +37 -9
  75. data/lib/karafka/routing/consumer_group.rb +1 -1
  76. data/lib/karafka/routing/consumer_mapper.rb +10 -9
  77. data/lib/karafka/routing/proxy.rb +10 -1
  78. data/lib/karafka/routing/router.rb +1 -1
  79. data/lib/karafka/routing/topic.rb +8 -12
  80. data/lib/karafka/routing/topic_mapper.rb +16 -18
  81. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  82. data/lib/karafka/serialization/json/serializer.rb +31 -0
  83. data/lib/karafka/server.rb +45 -24
  84. data/lib/karafka/setup/config.rb +95 -37
  85. data/lib/karafka/setup/configurators/water_drop.rb +12 -5
  86. data/lib/karafka/setup/dsl.rb +21 -0
  87. data/lib/karafka/status.rb +7 -3
  88. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.erb} +2 -2
  89. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  90. data/lib/karafka/templates/karafka.rb.erb +92 -0
  91. data/lib/karafka/version.rb +1 -1
  92. metadata +126 -57
  93. metadata.gz.sig +0 -0
  94. data/.github/ISSUE_TEMPLATE.md +0 -2
  95. data/lib/karafka/base_controller.rb +0 -60
  96. data/lib/karafka/connection/consumer.rb +0 -121
  97. data/lib/karafka/connection/processor.rb +0 -61
  98. data/lib/karafka/controllers/callbacks.rb +0 -54
  99. data/lib/karafka/controllers/includer.rb +0 -51
  100. data/lib/karafka/controllers/responders.rb +0 -19
  101. data/lib/karafka/loader.rb +0 -29
  102. data/lib/karafka/logger.rb +0 -53
  103. data/lib/karafka/monitor.rb +0 -98
  104. data/lib/karafka/parsers/json.rb +0 -38
  105. data/lib/karafka/patches/dry_configurable.rb +0 -31
  106. data/lib/karafka/persistence/consumer.rb +0 -25
  107. data/lib/karafka/persistence/controller.rb +0 -38
  108. data/lib/karafka/schemas/config.rb +0 -21
  109. data/lib/karafka/schemas/consumer_group.rb +0 -65
  110. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  111. data/lib/karafka/schemas/responder_usage.rb +0 -39
  112. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  113. data/lib/karafka/setup/configurators/base.rb +0 -35
  114. data/lib/karafka/templates/karafka.rb.example +0 -41
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Validator to check responder topic usage
6
+ class ResponderUsageTopic < Dry::Validation::Contract
7
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
+
9
+ params do
10
+ required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
11
+ required(:required).filled(:bool?)
12
+ required(:usage_count).filled(:int?, gteq?: 0)
13
+ required(:registered).filled(eql?: true)
14
+ required(:async).filled(:bool?)
15
+ required(:serializer).filled
16
+ end
17
+
18
+ rule(:required, :usage_count) do
19
+ key(:name).failure(:required_usage_count) if values[:required] && values[:usage_count] < 1
20
+ end
21
+ end
22
+
23
+ # Validator to check that everything in a responder flow matches responder rules
24
+ class ResponderUsage < Dry::Validation::Contract
25
+ include Dry::Core::Constants
26
+
27
+ # Contract for verifying the topic usage details
28
+ TOPIC_CONTRACT = ResponderUsageTopic.new.freeze
29
+
30
+ private_constant :TOPIC_CONTRACT
31
+
32
+ params do
33
+ required(:used_topics)
34
+ required(:registered_topics)
35
+ end
36
+
37
+ rule(:used_topics) do
38
+ (value || EMPTY_ARRAY).each do |used_topic|
39
+ TOPIC_CONTRACT.call(used_topic).errors.each do |error|
40
+ key([:used_topics, used_topic, error.path[0]]).failure(error.text)
41
+ end
42
+ end
43
+ end
44
+
45
+ rule(:registered_topics) do
46
+ (value || EMPTY_ARRAY).each do |used_topic|
47
+ TOPIC_CONTRACT.call(used_topic).errors.each do |error|
48
+ key([:registered_topics, used_topic, error.path[0]]).failure(error.text)
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract for validating correctness of the server cli command options
6
+ # We validate some basics + the list of consumer_groups on which we want to use, to make
7
+ # sure that all of them are defined, plus that a pidfile does not exist
8
+ class ServerCliOptions < Dry::Validation::Contract
9
+ params do
10
+ optional(:pid).filled(:str?)
11
+ optional(:daemon).filled(:bool?)
12
+ optional(:consumer_groups).value(:array, :filled?)
13
+ end
14
+
15
+ rule(:pid) do
16
+ key(:pid).failure(:pid_already_exists) if value && File.exist?(value)
17
+ end
18
+
19
+ rule(:consumer_groups) do
20
+ # If there were no consumer_groups declared in the server cli, it means that we will
21
+ # run all of them and no need to validate them here at all
22
+ if !value.nil? &&
23
+ !(value - Karafka::App.config.internal.routing_builder.map(&:name)).empty?
24
+ key(:consumer_groups).failure(:consumer_groups_inclusion)
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -6,17 +6,18 @@ module Karafka
6
6
  # Base class for all the Karafka internal errors
7
7
  BaseError = Class.new(StandardError)
8
8
 
9
- # Should be raised when we attemp to parse incoming params but parsing fails
10
- # If this error (or its descendant) is detected, we will pass the raw message
11
- # into params and proceed further
12
- ParserError = Class.new(BaseError)
9
+ # Should be raised when we have that that we cannot serialize
10
+ SerializationError = Class.new(BaseError)
11
+
12
+ # Should be raised when we tried to deserialize incoming data but we failed
13
+ DeserializationError = Class.new(BaseError)
13
14
 
14
15
  # Raised when router receives topic name which does not correspond with any routes
15
16
  # This can only happen in a case when:
16
- # - you've received a message and we cannot match it with a controller
17
+ # - you've received a message and we cannot match it with a consumer
17
18
  # - you've changed the routing, so router can no longer associate your topic to
18
- # any controller
19
- # - or in a case when you do a lot of metaprogramming and you change routing/etc on runtime
19
+ # any consumer
20
+ # - or in a case when you do a lot of meta-programming and you change routing/etc on runtime
20
21
  #
21
22
  # In case this happens, you will have to create a temporary route that will allow
22
23
  # you to "eat" everything from the Sidekiq queue.
@@ -25,19 +26,26 @@ module Karafka
25
26
 
26
27
  # Raised when we don't use or use responder not in the way it expected to based on the
27
28
  # topics usage definitions
28
- InvalidResponderUsage = Class.new(BaseError)
29
+ InvalidResponderUsageError = Class.new(BaseError)
30
+
31
+ # Raised when options that we provide to the responder to respond aren't what the contract
32
+ # requires
33
+ InvalidResponderMessageOptionsError = Class.new(BaseError)
29
34
 
30
- # Raised when configuration doesn't match with validation schema
31
- InvalidConfiguration = Class.new(BaseError)
35
+ # Raised when configuration doesn't match with validation contract
36
+ InvalidConfigurationError = Class.new(BaseError)
32
37
 
33
- # Raised when we try to use Karafka CLI commands (except install) without a bootfile
34
- MissingBootFile = Class.new(BaseError)
38
+ # Raised when we try to use Karafka CLI commands (except install) without a boot file
39
+ MissingBootFileError = Class.new(BaseError)
35
40
 
36
41
  # Raised when we want to read a persisted thread messages consumer but it is unavailable
37
42
  # This should never happen and if it does, please contact us
38
- MissingConsumer = Class.new(BaseError)
43
+ MissingClientError = Class.new(BaseError)
44
+
45
+ # Raised when want to hook up to an event that is not registered and supported
46
+ UnregisteredMonitorEventError = Class.new(BaseError)
39
47
 
40
- # Raised when we attemp to pause a partition but the pause timeout is equal to 0
41
- InvalidPauseTimeout = Class.new(BaseError)
48
+ # Raised when we've waited enough for shutting down a non-responsive process
49
+ ForcefulShutdownError = Class.new(BaseError)
42
50
  end
43
51
  end
@@ -8,22 +8,24 @@ module Karafka
8
8
  # Starts listening on all the listeners asynchronously
9
9
  # Fetch loop should never end, which means that we won't create more actor clusters
10
10
  # so we don't have to terminate them
11
- def fetch_loop
11
+ def call
12
12
  threads = listeners.map do |listener|
13
13
  # We abort on exception because there should be an exception handling developed for
14
14
  # each listener running in separate threads, so the exceptions should never leak
15
15
  # and if that happens, it means that something really bad happened and we should stop
16
16
  # the whole process
17
17
  Thread
18
- .new { listener.fetch_loop(processor) }
18
+ .new { listener.call }
19
19
  .tap { |thread| thread.abort_on_exception = true }
20
20
  end
21
21
 
22
+ # We aggregate threads here for a supervised shutdown process
23
+ threads.each { |thread| Karafka::Server.consumer_threads << thread }
22
24
  threads.each(&:join)
23
25
  # If anything crashes here, we need to raise the error and crush the runner because it means
24
- # that something really bad happened
26
+ # that something terrible happened
25
27
  rescue StandardError => e
26
- Karafka.monitor.notice_error(self.class, e)
28
+ Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
27
29
  Karafka::App.stop!
28
30
  raise e
29
31
  end
@@ -36,13 +38,5 @@ module Karafka
36
38
  Karafka::Connection::Listener.new(consumer_group)
37
39
  end
38
40
  end
39
-
40
- # @return [Proc] proc that should be processed when a messages arrive
41
- # @yieldparam messages [Array<Kafka::FetchedMessage>] messages from kafka (raw)
42
- def processor
43
- lambda do |group_id, messages|
44
- Karafka::Connection::Processor.process(group_id, messages)
45
- end
46
- end
47
41
  end
48
42
  end
@@ -4,20 +4,22 @@ module Karafka
4
4
  module Helpers
5
5
  # Class used to autodetect corresponding classes that are internally inside Karafka framework
6
6
  # It is used among others to match:
7
- # controller => responder
7
+ # consumer => responder
8
8
  class ClassMatcher
9
- # Regexp used to remove any non classy like characters that might be in the controller
9
+ # Regexp used to remove any non classy like characters that might be in the consumer
10
10
  # class name (if defined dynamically, etc)
11
- CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}
11
+ CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}.freeze
12
+
13
+ private_constant :CONSTANT_REGEXP
12
14
 
13
15
  # @param klass [Class] class to which we want to find a corresponding class
14
16
  # @param from [String] what type of object is it (based on postfix name part)
15
17
  # @param to [String] what are we looking for (based on a postfix name part)
16
- # @example Controller that has a corresponding responder
17
- # matcher = Karafka::Helpers::ClassMatcher.new(SuperController, 'Controller', 'Responder')
18
+ # @example Consumer that has a corresponding responder
19
+ # matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
18
20
  # matcher.match #=> SuperResponder
19
- # @example Controller without a corresponding responder
20
- # matcher = Karafka::Helpers::ClassMatcher.new(Super2Controller, 'Controller', 'Responder')
21
+ # @example Consumer without a corresponding responder
22
+ # matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
21
23
  # matcher.match #=> nil
22
24
  def initialize(klass, from:, to:)
23
25
  @klass = klass
@@ -30,18 +32,25 @@ module Karafka
30
32
  def match
31
33
  return nil if name.empty?
32
34
  return nil unless scope.const_defined?(name)
35
+
33
36
  matching = scope.const_get(name)
34
37
  same_scope?(matching) ? matching : nil
35
38
  end
36
39
 
37
40
  # @return [String] name of a new class that we're looking for
38
41
  # @note This method returns name of a class without a namespace
39
- # @example From SuperController matching responder
42
+ # @example From SuperConsumer matching responder
40
43
  # matcher.name #=> 'SuperResponder'
41
- # @example From Namespaced::Super2Controller matching responder
44
+ # @example From Namespaced::Super2Consumer matching responder
42
45
  # matcher.name #=> Super2Responder
43
46
  def name
44
47
  inflected = @klass.to_s.split('::').last.to_s
48
+ # We inject the from into the name just in case it is missing as in a situation like
49
+ # that it would just sanitize the name without adding the "to" postfix.
50
+ # It could create cases when we want to build for example a responder to a consumer
51
+ # that does not have the "Consumer" postfix and would do nothing returning the same name.
52
+ # That would be bad as the matching classes shouldn't be matched to themselves.
53
+ inflected << @from unless inflected.include?(@from)
45
54
  inflected.gsub!(@from, @to)
46
55
  inflected.gsub!(CONSTANT_REGEXP, '')
47
56
  inflected
@@ -65,6 +74,7 @@ module Karafka
65
74
  def scope_of(klass)
66
75
  enclosing = klass.to_s.split('::')[0...-1]
67
76
  return ::Object if enclosing.empty?
77
+
68
78
  ::Object.const_get(enclosing.join('::'))
69
79
  end
70
80
 
@@ -5,7 +5,7 @@ module Karafka
5
5
  # A helper method that allows us to build methods that try to get a given
6
6
  # attribute from its instance value and if it fails, will fallback to
7
7
  # the default config or config.kafka value for a given attribute.
8
- # It is used to simplify the checkings.
8
+ # It is used to simplify the checks.
9
9
  # @note Worth noticing, that the value might be equal to false, so even
10
10
  # then we need to return it. That's why we check for nil?
11
11
  # @example Define config retried attribute for start_from_beginning
@@ -33,9 +33,9 @@ module Karafka
33
33
  return current_value unless current_value.nil?
34
34
 
35
35
  value = if Karafka::App.config.respond_to?(attribute)
36
- Karafka::App.config.public_send(attribute)
36
+ Karafka::App.config.send(attribute)
37
37
  else
38
- Karafka::App.config.kafka.public_send(attribute)
38
+ Karafka::App.config.kafka.send(attribute)
39
39
  end
40
40
 
41
41
  instance_variable_set(:"@#{attribute}", value)
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Helpers
5
+ # Inflector provides inflection for the whole Karafka framework with additional inflection
6
+ # caching (due to the fact, that Dry::Inflector is slow)
7
+ module Inflector
8
+ # What inflection engine do we want to use
9
+ ENGINE = Dry::Inflector.new
10
+
11
+ @map = Concurrent::Hash.new
12
+
13
+ private_constant :ENGINE
14
+
15
+ class << self
16
+ # @param string [String] string that we want to convert to our underscore format
17
+ # @return [String] inflected string
18
+ # @example
19
+ # Karafka::Helpers::Inflector.map('Module/ControllerName') #=> 'module_controller_name'
20
+ def map(string)
21
+ @map[string] ||= ENGINE.underscore(string).tr('/', '_')
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
@@ -7,7 +7,6 @@ module Karafka
7
7
  # Multidelegator is used to delegate calls to multiple targets
8
8
  class MultiDelegator
9
9
  # @param targets to which we want to delegate methods
10
- #
11
10
  def initialize(*targets)
12
11
  @targets = targets
13
12
  end
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Default logger for Event Delegator
6
+ # @note It uses ::Logger features - providing basic logging
7
+ class Logger < ::Logger
8
+ # Map containing information about log level for given environment
9
+ ENV_MAP = {
10
+ 'production' => ::Logger::ERROR,
11
+ 'test' => ::Logger::ERROR,
12
+ 'development' => ::Logger::INFO,
13
+ 'debug' => ::Logger::DEBUG,
14
+ 'default' => ::Logger::INFO
15
+ }.freeze
16
+
17
+ private_constant :ENV_MAP
18
+
19
+ # Creates a new instance of logger ensuring that it has a place to write to
20
+ # @param _args Any arguments that we don't care about but that are needed in order to
21
+ # make this logger compatible with the default Ruby one
22
+ def initialize(*_args)
23
+ ensure_dir_exists
24
+ super(target)
25
+ self.level = ENV_MAP[Karafka.env] || ENV_MAP['default']
26
+ end
27
+
28
+ private
29
+
30
+ # @return [Karafka::Helpers::MultiDelegator] multi delegator instance
31
+ # to which we will be writing logs
32
+ # We use this approach to log stuff to file and to the STDOUT at the same time
33
+ def target
34
+ Karafka::Helpers::MultiDelegator
35
+ .delegate(:write, :close)
36
+ .to(STDOUT, file)
37
+ end
38
+
39
+ # Makes sure the log directory exists
40
+ def ensure_dir_exists
41
+ dir = File.dirname(log_path)
42
+ FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
43
+ end
44
+
45
+ # @return [Pathname] Path to a file to which we should log
46
+ def log_path
47
+ @log_path ||= Karafka::App.root.join("log/#{Karafka.env}.log")
48
+ end
49
+
50
+ # @return [File] file to which we want to write our logs
51
+ # @note File is being opened in append mode ('a')
52
+ def file
53
+ @file ||= File.open(log_path, 'a')
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,70 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the things related with Karafka instrumentation process
5
+ module Instrumentation
6
+ # Monitor is used to hookup external monitoring services to monitor how Karafka works
7
+ # It provides a standardized API for checking incoming messages/enqueueing etc
8
+ # Since it is a pub-sub based on dry-monitor, you can use as many subscribers/loggers at the
9
+ # same time, which means that you might have for example file logging and NewRelic at the same
10
+ # time
11
+ # @note This class acts as a singleton because we are only permitted to have single monitor
12
+ # per running process (just as logger)
13
+ class Monitor < Dry::Monitor::Notifications
14
+ # List of events that we support in the system and to which a monitor client can hook up
15
+ # @note The non-error once support timestamp benchmarking
16
+ # @note Depending on Karafka extensions and additional engines, this might not be the
17
+ # complete list of all the events. Please use the #available_events on fully loaded
18
+ # Karafka system to determine all of the events you can use.
19
+ # Last 4 events are from WaterDrop but for convenience we use the same monitor for the
20
+ # whole karafka ecosystem
21
+ BASE_EVENTS = %w[
22
+ params.params.deserialize
23
+ params.params.deserialize.error
24
+ connection.listener.before_fetch_loop
25
+ connection.listener.fetch_loop
26
+ connection.listener.fetch_loop.error
27
+ connection.client.fetch_loop.error
28
+ connection.batch_delegator.call
29
+ connection.message_delegator.call
30
+ fetcher.call.error
31
+ backends.inline.process
32
+ process.notice_signal
33
+ consumers.responders.respond_with
34
+ async_producer.call.error
35
+ async_producer.call.retry
36
+ sync_producer.call.error
37
+ sync_producer.call.retry
38
+ app.initializing
39
+ app.initialized
40
+ app.running
41
+ app.stopping
42
+ app.stopping.error
43
+ app.stopped
44
+ ].freeze
45
+
46
+ private_constant :BASE_EVENTS
47
+
48
+ # @return [Karafka::Instrumentation::Monitor] monitor instance for system instrumentation
49
+ def initialize
50
+ super(:karafka)
51
+ BASE_EVENTS.each(&method(:register_event))
52
+ end
53
+
54
+ # Allows us to subscribe to events with a code that will be yielded upon events
55
+ # @param event_name_or_listener [String, Object] name of the event we want to subscribe to
56
+ # or a listener if we decide to go with object listener
57
+ def subscribe(event_name_or_listener)
58
+ return super unless event_name_or_listener.is_a?(String)
59
+ return super if available_events.include?(event_name_or_listener)
60
+
61
+ raise Errors::UnregisteredMonitorEventError, event_name_or_listener
62
+ end
63
+
64
+ # @return [Array<String>] names of available events to which we can subscribe
65
+ def available_events
66
+ __bus__.events.keys
67
+ end
68
+ end
69
+ end
70
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Listener that sets a proc title with a nice descriptive value
6
+ class ProctitleListener
7
+ # Updates proc title to an initializing one
8
+ # @param _event [Dry::Events::Event] event details including payload
9
+ def on_app_initializing(_event)
10
+ setproctitle('initializing')
11
+ end
12
+
13
+ # Updates proc title to a running one
14
+ # @param _event [Dry::Events::Event] event details including payload
15
+ def on_app_running(_event)
16
+ setproctitle('running')
17
+ end
18
+
19
+ # Updates proc title to a stopping one
20
+ # @param _event [Dry::Events::Event] event details including payload
21
+ def on_app_stopping(_event)
22
+ setproctitle('stopping')
23
+ end
24
+
25
+ private
26
+
27
+ # Sets a proper proc title with our constant prefix
28
+ # @param status [String] any status we want to set
29
+ def setproctitle(status)
30
+ ::Process.setproctitle(
31
+ "karafka #{Karafka::App.config.client_id} (#{status})"
32
+ )
33
+ end
34
+ end
35
+ end
36
+ end