karafka 1.1.2 → 1.2.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. checksums.yaml +5 -5
  2. data/.ruby-version +1 -1
  3. data/.travis.yml +1 -0
  4. data/CHANGELOG.md +34 -0
  5. data/Gemfile +1 -2
  6. data/Gemfile.lock +35 -22
  7. data/README.md +4 -3
  8. data/karafka.gemspec +5 -3
  9. data/lib/karafka.rb +4 -5
  10. data/lib/karafka/app.rb +8 -15
  11. data/lib/karafka/attributes_map.rb +1 -1
  12. data/lib/karafka/backends/inline.rb +1 -2
  13. data/lib/karafka/{base_controller.rb → base_consumer.rb} +19 -11
  14. data/lib/karafka/base_responder.rb +33 -14
  15. data/lib/karafka/callbacks.rb +30 -0
  16. data/lib/karafka/callbacks/config.rb +22 -0
  17. data/lib/karafka/callbacks/dsl.rb +16 -0
  18. data/lib/karafka/cli/install.rb +2 -3
  19. data/lib/karafka/cli/server.rb +0 -1
  20. data/lib/karafka/connection/{consumer.rb → client.rb} +25 -33
  21. data/lib/karafka/connection/config_adapter.rb +14 -6
  22. data/lib/karafka/connection/delegator.rb +46 -0
  23. data/lib/karafka/connection/listener.rb +22 -13
  24. data/lib/karafka/{controllers → consumers}/callbacks.rb +9 -9
  25. data/lib/karafka/consumers/includer.rb +51 -0
  26. data/lib/karafka/consumers/responders.rb +24 -0
  27. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  28. data/lib/karafka/errors.rb +10 -3
  29. data/lib/karafka/fetcher.rb +30 -34
  30. data/lib/karafka/helpers/class_matcher.rb +8 -8
  31. data/lib/karafka/helpers/config_retriever.rb +2 -2
  32. data/lib/karafka/instrumentation/listener.rb +97 -0
  33. data/lib/karafka/instrumentation/logger.rb +55 -0
  34. data/lib/karafka/instrumentation/monitor.rb +62 -0
  35. data/lib/karafka/loader.rb +0 -1
  36. data/lib/karafka/params/{params.rb → dsl.rb} +69 -44
  37. data/lib/karafka/params/params_batch.rb +2 -2
  38. data/lib/karafka/patches/dry_configurable.rb +6 -2
  39. data/lib/karafka/patches/ruby_kafka.rb +10 -10
  40. data/lib/karafka/persistence/client.rb +25 -0
  41. data/lib/karafka/persistence/consumer.rb +27 -14
  42. data/lib/karafka/persistence/topic.rb +29 -0
  43. data/lib/karafka/process.rb +5 -4
  44. data/lib/karafka/responders/builder.rb +15 -14
  45. data/lib/karafka/routing/builder.rb +1 -1
  46. data/lib/karafka/routing/consumer_mapper.rb +3 -2
  47. data/lib/karafka/routing/router.rb +1 -1
  48. data/lib/karafka/routing/topic.rb +5 -5
  49. data/lib/karafka/schemas/config.rb +3 -0
  50. data/lib/karafka/schemas/consumer_group.rb +14 -2
  51. data/lib/karafka/schemas/consumer_group_topic.rb +1 -1
  52. data/lib/karafka/server.rb +33 -5
  53. data/lib/karafka/setup/config.rb +45 -21
  54. data/lib/karafka/setup/configurators/base.rb +6 -12
  55. data/lib/karafka/setup/configurators/params.rb +25 -0
  56. data/lib/karafka/setup/configurators/water_drop.rb +6 -3
  57. data/lib/karafka/setup/dsl.rb +22 -0
  58. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
  59. data/lib/karafka/templates/karafka.rb.example +14 -3
  60. data/lib/karafka/version.rb +1 -1
  61. metadata +58 -23
  62. data/lib/karafka/connection/processor.rb +0 -61
  63. data/lib/karafka/controllers/includer.rb +0 -51
  64. data/lib/karafka/controllers/responders.rb +0 -19
  65. data/lib/karafka/logger.rb +0 -53
  66. data/lib/karafka/monitor.rb +0 -98
  67. data/lib/karafka/persistence/controller.rb +0 -38
@@ -4,7 +4,7 @@ module Karafka
4
4
  module Params
5
5
  # Params batch represents a set of messages received from Kafka.
6
6
  # @note Params internally are lazy loaded before first use. That way we can skip parsing
7
- # process if we have after_fetched that rejects some incoming messages without using params
7
+ # process if we have after_fetch that rejects some incoming messages without using params
8
8
  # It can be also used when handling really heavy data (in terms of parsing).
9
9
  class ParamsBatch
10
10
  include Enumerable
@@ -13,7 +13,7 @@ module Karafka
13
13
  # @param messages_batch [Array<Kafka::FetchedMessage>] messages batch
14
14
  # @param topic_parser [Class] topic parser for unparsing messages values
15
15
  def initialize(messages_batch, topic_parser)
16
- @params_batch = messages_batch.map do |message|
16
+ @params_batch = messages_batch.map! do |message|
17
17
  Karafka::Params::Params.build(message, topic_parser)
18
18
  end
19
19
  end
@@ -19,11 +19,15 @@ module Karafka
19
19
  private
20
20
 
21
21
  # Method that rebuilds a given accessor, so when it consists a proc value, it will
22
- # evaluate it upon return
22
+ # evaluate it upon return for blocks that don't require any arguments, otherwise
23
+ # it will return the block
23
24
  # @param method_name [Symbol] name of an accessor that we want to rebuild
24
25
  def rebuild(method_name)
25
26
  define_singleton_method method_name do
26
- super().is_a?(Proc) ? super().call : super()
27
+ value = super()
28
+ return value unless value.is_a?(Proc)
29
+ return value unless value.parameters.empty?
30
+ value.call
27
31
  end
28
32
  end
29
33
  end
@@ -2,7 +2,7 @@
2
2
 
3
3
  module Karafka
4
4
  module Patches
5
- # Batches for Ruby Kafka gem
5
+ # Patches for Ruby Kafka gem
6
6
  module RubyKafka
7
7
  # This patch allows us to inject business logic in between fetches and before the consumer
8
8
  # stop, so we can perform stop commit or anything else that we need since
@@ -13,19 +13,19 @@ module Karafka
13
13
  # thread)
14
14
  def consumer_loop
15
15
  super do
16
- controllers = Karafka::Persistence::Controller
17
- .all
18
- .values
19
- .flat_map(&:values)
20
- .select { |ctrl| ctrl.respond_to?(:run_callbacks) }
16
+ consumers = Karafka::Persistence::Consumer
17
+ .all
18
+ .values
19
+ .flat_map(&:values)
20
+ .select { |ctrl| ctrl.respond_to?(:run_callbacks) }
21
21
 
22
22
  if Karafka::App.stopped?
23
- controllers.each { |ctrl| ctrl.run_callbacks :before_stop }
24
- Karafka::Persistence::Consumer.read.stop
23
+ consumers.each { |ctrl| ctrl.run_callbacks :before_stop }
24
+ Karafka::Persistence::Client.read.stop
25
25
  else
26
- controllers.each { |ctrl| ctrl.run_callbacks :before_poll }
26
+ consumers.each { |ctrl| ctrl.run_callbacks :before_poll }
27
27
  yield
28
- controllers.each { |ctrl| ctrl.run_callbacks :after_poll }
28
+ consumers.each { |ctrl| ctrl.run_callbacks :after_poll }
29
29
  end
30
30
  end
31
31
  end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Persistence
5
+ # Persistence layer to store current thread messages consumer client for further use
6
+ class Client
7
+ # Thread.current key under which we store current thread messages consumer client
8
+ PERSISTENCE_SCOPE = :client
9
+
10
+ # @param client [Karafka::Connection::Client] messages consumer client of
11
+ # a current thread
12
+ # @return [Karafka::Connection::Client] persisted messages consumer client
13
+ def self.write(client)
14
+ Thread.current[PERSISTENCE_SCOPE] = client
15
+ end
16
+
17
+ # @return [Karafka::Connection::Client] persisted messages consumer client
18
+ # @raise [Karafka::Errors::MissingConsumer] raised when no thread messages consumer
19
+ # client but we try to use it anyway
20
+ def self.read
21
+ Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingClient)
22
+ end
23
+ end
24
+ end
25
+ end
@@ -1,24 +1,37 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
+ # Module used to provide a persistent cache layer for Karafka components that need to be
5
+ # shared inside of a same thread
4
6
  module Persistence
5
- # Persistence layer to store current thread messages consumer for further use
7
+ # Module used to provide a persistent cache across batch requests for a given
8
+ # topic and partition to store some additional details when the persistent mode
9
+ # for a given topic is turned on
6
10
  class Consumer
7
- # Thread.current key under which we store current thread messages consumer
8
- PERSISTENCE_SCOPE = :consumer
11
+ # Thread.current scope under which we store consumers data
12
+ PERSISTENCE_SCOPE = :consumers
9
13
 
10
- # @param consumer [Karafka::Connection::Consumer] messages consumer of
11
- # a current thread
12
- # @return [Karafka::Connection::Consumer] persisted messages consumer
13
- def self.write(consumer)
14
- Thread.current[PERSISTENCE_SCOPE] = consumer
15
- end
14
+ class << self
15
+ # @return [Hash] current thread persistence scope hash with all the consumers
16
+ def all
17
+ # @note This does not need to be threadsafe (Hash) as it is always executed in a
18
+ # current thread context
19
+ Thread.current[PERSISTENCE_SCOPE] ||= Hash.new { |hash, key| hash[key] = {} }
20
+ end
16
21
 
17
- # @return [Karafka::Connection::Consumer] persisted messages consumer
18
- # @raise [Karafka::Errors::MissingConsumer] raised when no thread messages consumer
19
- # but we try to use it anyway
20
- def self.read
21
- Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingConsumer)
22
+ # Used to build (if block given) and/or fetch a current consumer instance that will be
23
+ # used to process messages from a given topic and partition
24
+ # @return [Karafka::BaseConsumer] base consumer descendant
25
+ # @param topic [Karafka::Routing::Topic] topic instance for which we might cache
26
+ # @param partition [Integer] number of partition for which we want to cache
27
+ def fetch(topic, partition)
28
+ # We always store a current instance for callback reasons
29
+ if topic.persistent
30
+ all[topic][partition] ||= topic.consumer.new
31
+ else
32
+ all[topic][partition] = topic.consumer.new
33
+ end
34
+ end
22
35
  end
23
36
  end
24
37
  end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Persistence
5
+ # Local cache for routing topics
6
+ # We use it in order not to build string instances and remap incoming topic upon each
7
+ # message / message batches received
8
+ class Topic
9
+ # Thread.current scope under which we store topics data
10
+ PERSISTENCE_SCOPE = :topics
11
+
12
+ # @param group_id [String] group id for which we fetch a topic representation
13
+ # @param raw_topic_name [String] raw topic name (before remapping) for which we fetch a
14
+ # topic representation
15
+ # @return [Karafka::Routing::Topic] remapped topic representation that can be used further
16
+ # on when working with given parameters
17
+ def self.fetch(group_id, raw_topic_name)
18
+ Thread.current[PERSISTENCE_SCOPE] ||= Hash.new { |hash, key| hash[key] = {} }
19
+
20
+ Thread.current[PERSISTENCE_SCOPE][group_id][raw_topic_name] ||= begin
21
+ # We map from incoming topic name, as it might be namespaced, etc.
22
+ # @see topic_mapper internal docs
23
+ mapped_topic_name = Karafka::App.config.topic_mapper.incoming(raw_topic_name)
24
+ Routing::Router.find("#{group_id}_#{mapped_topic_name}")
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -8,7 +8,9 @@ module Karafka
8
8
 
9
9
  # Signal types that we handle
10
10
  HANDLED_SIGNALS = %i[
11
- SIGINT SIGQUIT SIGTERM
11
+ SIGINT
12
+ SIGQUIT
13
+ SIGTERM
12
14
  ].freeze
13
15
 
14
16
  HANDLED_SIGNALS.each do |signal|
@@ -27,8 +29,7 @@ module Karafka
27
29
 
28
30
  # Creates an instance of process and creates empty hash for callbacks
29
31
  def initialize
30
- @callbacks = {}
31
- HANDLED_SIGNALS.each { |signal| @callbacks[signal] = [] }
32
+ @callbacks = Hash.new { |hsh, key| hsh[key] = [] }
32
33
  end
33
34
 
34
35
  # Method catches all HANDLED_SIGNALS and performs appropriate callbacks (if defined)
@@ -56,7 +57,7 @@ module Karafka
56
57
  # we have to spin up a new thread to do this
57
58
  def notice_signal(signal)
58
59
  Thread.new do
59
- Karafka.monitor.notice(self.class, signal: signal)
60
+ Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)
60
61
  end
61
62
  end
62
63
  end
@@ -3,30 +3,31 @@
3
3
  module Karafka
4
4
  # Responders namespace encapsulates all the internal responder implementation parts
5
5
  module Responders
6
- # Responders builder is used to find (based on the controller class name) a responder that
7
- # match the controller. This is used when user does not provide a responder inside routing
8
- # but he still names responder with the same convention (and namespaces) as controller
6
+ # Responders builder is used to finding (based on the consumer class name) a responder
7
+ # that match the consumer. We use it when user does not provide a responder inside routing,
8
+ # but he still names responder with the same convention (and namespaces) as consumer
9
+ #
9
10
  # @example Matching responder exists
10
- # Karafka::Responder::Builder(NewEventsController).build #=> NewEventsResponder
11
+ # Karafka::Responder::Builder(NewEventsConsumer).build #=> NewEventsResponder
11
12
  # @example Matching responder does not exist
12
- # Karafka::Responder::Builder(NewBuildsController).build #=> nil
13
+ # Karafka::Responder::Builder(NewBuildsConsumer).build #=> nil
13
14
  class Builder
14
- # @param controller_class [Karafka::BaseController, nil] descendant of
15
- # Karafka::BaseController
16
- # @example Tries to find a responder that matches a given controller. If nothing found,
17
- # will return nil (nil is accepted, because it means that a given controller don't
15
+ # @param consumer_class [Karafka::BaseConsumer, nil] descendant of
16
+ # Karafka::BaseConsumer
17
+ # @example Tries to find a responder that matches a given consumer. If nothing found,
18
+ # will return nil (nil is accepted, because it means that a given consumer don't
18
19
  # pipe stuff further on)
19
- def initialize(controller_class)
20
- @controller_class = controller_class
20
+ def initialize(consumer_class)
21
+ @consumer_class = consumer_class
21
22
  end
22
23
 
23
- # Tries to figure out a responder based on a controller class name
24
+ # Tries to figure out a responder based on a consumer class name
24
25
  # @return [Class] Responder class (not an instance)
25
26
  # @return [nil] or nil if there's no matching responding class
26
27
  def build
27
28
  Helpers::ClassMatcher.new(
28
- @controller_class,
29
- from: 'Controller',
29
+ @consumer_class,
30
+ from: 'Consumer',
30
31
  to: 'Responder'
31
32
  ).match
32
33
  end
@@ -6,7 +6,7 @@ module Karafka
6
6
  # @example Build a simple (most common) route
7
7
  # consumers do
8
8
  # topic :new_videos do
9
- # controller NewVideosController
9
+ # consumer NewVideosConsumer
10
10
  # end
11
11
  # end
12
12
  class Builder < Array
@@ -17,7 +17,7 @@ module Karafka
17
17
  # module MyMapper
18
18
  # def self.call(raw_consumer_group_name)
19
19
  # [
20
- # Karafka::App.config.client_id.to_s.underscope,
20
+ # Dry::Inflector.new.underscore(Karafka::App.config.client_id.to_s),
21
21
  # raw_consumer_group_name
22
22
  # ].join('_').gsub('_', '.')
23
23
  # end
@@ -26,7 +26,8 @@ module Karafka
26
26
  # @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
27
27
  # @return [String] remapped final consumer group name
28
28
  def self.call(raw_consumer_group_name)
29
- "#{Karafka::App.config.client_id.to_s.underscore}_#{raw_consumer_group_name}"
29
+ client_name = Dry::Inflector.new.underscore(Karafka::App.config.client_id.to_s)
30
+ "#{client_name}_#{raw_consumer_group_name}"
30
31
  end
31
32
  end
32
33
  end
@@ -3,7 +3,7 @@
3
3
  module Karafka
4
4
  # Namespace for all elements related to requests routing
5
5
  module Routing
6
- # Karafka framework Router for routing incoming messages to proper controllers
6
+ # Karafka framework Router for routing incoming messages to proper consumers
7
7
  # @note Since Kafka does not provide namespaces or modules for topics, they all have "flat"
8
8
  # structure so all the routes are being stored in a single level array
9
9
  module Router
@@ -9,7 +9,7 @@ module Karafka
9
9
  extend Helpers::ConfigRetriever
10
10
 
11
11
  attr_reader :id, :consumer_group
12
- attr_accessor :controller
12
+ attr_accessor :consumer
13
13
 
14
14
  # @param [String, Symbol] name of a topic on which we want to listen
15
15
  # @param consumer_group [Karafka::Routing::ConsumerGroup] owning consumer group of this topic
@@ -29,14 +29,14 @@ module Karafka
29
29
  # example for Sidekiq
30
30
  def build
31
31
  Karafka::AttributesMap.topic.each { |attr| send(attr) }
32
- controller&.topic = self
32
+ consumer&.topic = self
33
33
  self
34
34
  end
35
35
 
36
36
  # @return [Class, nil] Class (not an instance) of a responder that should respond from
37
- # controller back to Kafka (usefull for piping dataflows)
37
+ # consumer back to Kafka (usefull for piping dataflows)
38
38
  def responder
39
- @responder ||= Karafka::Responders::Builder.new(controller).build
39
+ @responder ||= Karafka::Responders::Builder.new(consumer).build
40
40
  end
41
41
 
42
42
  Karafka::AttributesMap.topic.each do |attribute|
@@ -52,7 +52,7 @@ module Karafka
52
52
 
53
53
  Hash[map].merge!(
54
54
  id: id,
55
- controller: controller
55
+ consumer: consumer
56
56
  )
57
57
  end
58
58
  end
@@ -13,8 +13,11 @@ module Karafka
13
13
  # so we validate all of that once all the routes are defined and ready
14
14
  Config = Dry::Validation.Schema do
15
15
  required(:client_id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
16
+ required(:shutdown_timeout) { none? | (int? & gteq?(0)) }
16
17
  required(:consumer_mapper)
17
18
  required(:topic_mapper)
19
+ required(:params_base_class).filled
20
+
18
21
  optional(:backend).filled
19
22
  end
20
23
  end
@@ -9,6 +9,9 @@ module Karafka
9
9
  # but someetimes loads things twice
10
10
  URI_SCHEMES ||= %w[kafka kafka+ssl].freeze
11
11
 
12
+ # Available sasl scram mechanism of authentication (plus nil)
13
+ SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
14
+
12
15
  configure do
13
16
  config.messages_file = File.join(
14
17
  Karafka.gem_root, 'config', 'errors.yml'
@@ -36,6 +39,7 @@ module Karafka
36
39
  required(:connect_timeout).filled { (int? | float?) & gt?(0) }
37
40
  required(:socket_timeout).filled { (int? | float?) & gt?(0) }
38
41
  required(:min_bytes).filled(:int?, gt?: 0)
42
+ required(:max_bytes).filled(:int?, gt?: 0)
39
43
  required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
40
44
  required(:batch_fetching).filled(:bool?)
41
45
  required(:topics).filled { each { schema(ConsumerGroupTopic) } }
@@ -52,14 +56,22 @@ module Karafka
52
56
  ssl_ca_cert_file_path
53
57
  ssl_client_cert
54
58
  ssl_client_cert_key
59
+ sasl_gssapi_principal
60
+ sasl_gssapi_keytab
55
61
  sasl_plain_authzid
56
62
  sasl_plain_username
57
63
  sasl_plain_password
58
- sasl_gssapi_principal
59
- sasl_gssapi_keytab
64
+ sasl_scram_username
65
+ sasl_scram_password
60
66
  ].each do |encryption_attribute|
61
67
  optional(encryption_attribute).maybe(:str?)
62
68
  end
69
+
70
+ optional(:ssl_ca_certs_from_system).maybe(:bool?)
71
+
72
+ # It's not with other encryptions as it has some more rules
73
+ optional(:sasl_scram_mechanism)
74
+ .maybe(:str?, included_in?: Karafka::Schemas::SASL_SCRAM_MECHANISMS)
63
75
  end
64
76
  end
65
77
  end
@@ -7,7 +7,7 @@ module Karafka
7
7
  required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
8
  required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
9
9
  required(:backend).filled(included_in?: %i[inline sidekiq])
10
- required(:controller).filled
10
+ required(:consumer).filled
11
11
  required(:parser).filled
12
12
  required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
13
13
  required(:start_from_beginning).filled(:bool?)
@@ -3,6 +3,13 @@
3
3
  module Karafka
4
4
  # Karafka consuming server class
5
5
  class Server
6
+ @consumer_threads = Concurrent::Array.new
7
+
8
+ # How long should we sleep between checks on shutting down consumers
9
+ SUPERVISION_SLEEP = 1
10
+ # What system exit code should we use when we terminated forcefully
11
+ FORCEFUL_EXIT_CODE = 2
12
+
6
13
  class << self
7
14
  # Set of consuming threads. Each consumer thread contains a single consumer
8
15
  attr_accessor :consumer_threads
@@ -12,7 +19,6 @@ module Karafka
12
19
 
13
20
  # Method which runs app
14
21
  def run
15
- @consumer_threads = Concurrent::Array.new
16
22
  bind_on_sigint
17
23
  bind_on_sigquit
18
24
  bind_on_sigterm
@@ -35,17 +41,17 @@ module Karafka
35
41
 
36
42
  # What should happen when we decide to quit with sigint
37
43
  def bind_on_sigint
38
- process.on_sigint { Karafka::App.stop! }
44
+ process.on_sigint { stop_supervised }
39
45
  end
40
46
 
41
47
  # What should happen when we decide to quit with sigquit
42
48
  def bind_on_sigquit
43
- process.on_sigquit { Karafka::App.stop! }
49
+ process.on_sigquit { stop_supervised }
44
50
  end
45
51
 
46
52
  # What should happen when we decide to quit with sigterm
47
53
  def bind_on_sigterm
48
- process.on_sigterm { Karafka::App.stop! }
54
+ process.on_sigterm { stop_supervised }
49
55
  end
50
56
 
51
57
  # Starts Karafka with a supervision
@@ -54,8 +60,30 @@ module Karafka
54
60
  def start_supervised
55
61
  process.supervise do
56
62
  Karafka::App.run!
57
- Karafka::Fetcher.new.fetch_loop
63
+ Karafka::Fetcher.call
64
+ end
65
+ end
66
+
67
+ # Stops Karafka with a supervision (as long as there is a shutdown timeout)
68
+ # If consumers won't stop in a given timeframe, it will force them to exit
69
+ def stop_supervised
70
+ Karafka::App.stop!
71
+
72
+ # If there is no shutdown timeout, we don't exit and wait until all the consumers
73
+ # had done their work
74
+ return unless Karafka::App.config.shutdown_timeout
75
+
76
+ # If there is a timeout, we check every 1 second (for the timeout period) if all
77
+ # the threads finished their work and if so, we can just return and normal
78
+ # shutdown process will take place
79
+ Karafka::App.config.shutdown_timeout.to_i.times do
80
+ return if consumer_threads.count(&:alive?).zero?
81
+ sleep SUPERVISION_SLEEP
58
82
  end
83
+
84
+ # We're done waiting, lets kill them!
85
+ consumer_threads.each(&:terminate)
86
+ Kernel.exit FORCEFUL_EXIT_CODE
59
87
  end
60
88
  end
61
89
  end