karafka 1.2.2 → 1.4.0.rc1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +157 -13
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +5 -2
  17. data/Gemfile.lock +95 -79
  18. data/README.md +15 -3
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +19 -13
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +13 -18
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +51 -29
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +62 -21
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +18 -0
  39. data/lib/karafka/connection/client.rb +40 -40
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -19
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +50 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -4
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +34 -49
  85. data/lib/karafka/setup/config.rb +74 -40
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +97 -73
  94. metadata.gz.sig +4 -0
  95. data/.travis.yml +0 -13
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -77
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -0,0 +1,206 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract for single full route (consumer group + topics) validation.
6
+ class ConsumerGroup < Dry::Validation::Contract
7
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
+
9
+ # Valid uri schemas of Kafka broker url
10
+ # The ||= is due to the behavior of require_all that resolves dependencies
11
+ # but sometimes loads things twice
12
+ URI_SCHEMES ||= %w[kafka kafka+ssl plaintext ssl].freeze
13
+
14
+ # Available sasl scram mechanism of authentication (plus nil)
15
+ SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
16
+
17
+ # Internal contract for sub-validating topics schema
18
+ TOPIC_CONTRACT = ConsumerGroupTopic.new.freeze
19
+
20
+ private_constant :TOPIC_CONTRACT
21
+
22
+ params do
23
+ required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
24
+ required(:topics).value(:array, :filled?)
25
+ required(:seed_brokers).value(:array, :filled?)
26
+ required(:session_timeout).filled { int? | float? }
27
+ required(:pause_timeout).maybe(%i[integer float]) { filled? > gteq?(0) }
28
+ required(:pause_max_timeout).maybe(%i[integer float]) { filled? > gteq?(0) }
29
+ required(:pause_exponential_backoff).filled(:bool?)
30
+ required(:offset_commit_interval) { int? | float? }
31
+ required(:offset_commit_threshold).filled(:int?)
32
+ required(:offset_retention_time).maybe(:integer)
33
+ required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
34
+ required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
35
+ required(:connect_timeout).filled { (int? | float?) & gt?(0) }
36
+ required(:reconnect_timeout).filled { (int? | float?) & gteq?(0) }
37
+ required(:socket_timeout).filled { (int? | float?) & gt?(0) }
38
+ required(:min_bytes).filled(:int?, gt?: 0)
39
+ required(:max_bytes).filled(:int?, gt?: 0)
40
+ required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
41
+ required(:batch_fetching).filled(:bool?)
42
+
43
+ %i[
44
+ ssl_ca_cert
45
+ ssl_ca_cert_file_path
46
+ ssl_client_cert
47
+ ssl_client_cert_key
48
+ ssl_client_cert_chain
49
+ ssl_client_cert_key_password
50
+ sasl_gssapi_principal
51
+ sasl_gssapi_keytab
52
+ sasl_plain_authzid
53
+ sasl_plain_username
54
+ sasl_plain_password
55
+ sasl_scram_username
56
+ sasl_scram_password
57
+ ].each do |encryption_attribute|
58
+ optional(encryption_attribute).maybe(:str?)
59
+ end
60
+
61
+ optional(:ssl_verify_hostname).maybe(:bool?)
62
+ optional(:ssl_ca_certs_from_system).maybe(:bool?)
63
+ optional(:sasl_over_ssl).maybe(:bool?)
64
+ optional(:sasl_oauth_token_provider).value(:any)
65
+
66
+ # It's not with other encryptions as it has some more rules
67
+ optional(:sasl_scram_mechanism)
68
+ .maybe(:str?, included_in?: SASL_SCRAM_MECHANISMS)
69
+ end
70
+
71
+ # Uri rule to check if uri is in a Karafka acceptable format
72
+ rule(:seed_brokers) do
73
+ if value&.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
+ key.failure(:invalid_broker_schema)
75
+ end
76
+ end
77
+
78
+ rule(:topics) do
79
+ if value&.is_a?(Array)
80
+ names = value.map { |topic| topic[:name] }
81
+
82
+ key.failure(:topics_names_not_unique) if names.size != names.uniq.size
83
+ end
84
+ end
85
+
86
+ rule(:topics) do
87
+ if value&.is_a?(Array)
88
+ value.each_with_index do |topic, index|
89
+ TOPIC_CONTRACT.call(topic).errors.each do |error|
90
+ key([:topics, index, error.path[0]]).failure(error.text)
91
+ end
92
+ end
93
+ end
94
+ end
95
+
96
+ rule(:ssl_client_cert, :ssl_client_cert_key) do
97
+ if values[:ssl_client_cert] && !values[:ssl_client_cert_key]
98
+ key(:ssl_client_cert_key).failure(:ssl_client_cert_with_ssl_client_cert_key)
99
+ end
100
+ end
101
+
102
+ rule(:ssl_client_cert, :ssl_client_cert_key) do
103
+ if values[:ssl_client_cert_key] && !values[:ssl_client_cert]
104
+ key(:ssl_client_cert).failure(:ssl_client_cert_key_with_ssl_client_cert)
105
+ end
106
+ end
107
+
108
+ rule(:ssl_client_cert, :ssl_client_cert_chain) do
109
+ if values[:ssl_client_cert_chain] && !values[:ssl_client_cert]
110
+ key(:ssl_client_cert).failure(:ssl_client_cert_chain_with_ssl_client_cert)
111
+ end
112
+ end
113
+
114
+ rule(:ssl_client_cert_chain, :ssl_client_cert_key) do
115
+ if values[:ssl_client_cert_chain] && !values[:ssl_client_cert]
116
+ key(:ssl_client_cert).failure(:ssl_client_cert_chain_with_ssl_client_cert_key)
117
+ end
118
+ end
119
+
120
+ rule(:ssl_client_cert_key_password, :ssl_client_cert_key) do
121
+ if values[:ssl_client_cert_key_password] && !values[:ssl_client_cert_key]
122
+ key(:ssl_client_cert_key).failure(:ssl_client_cert_key_password_with_ssl_client_cert_key)
123
+ end
124
+ end
125
+
126
+ rule(:ssl_ca_cert) do
127
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
128
+ end
129
+
130
+ rule(:ssl_client_cert) do
131
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
132
+ end
133
+
134
+ rule(:ssl_ca_cert_file_path) do
135
+ if value
136
+ if File.exist?(value)
137
+ key.failure(:invalid_certificate_from_path) unless valid_certificate?(File.read(value))
138
+ else
139
+ key.failure(:does_not_exist)
140
+ end
141
+ end
142
+ end
143
+
144
+ rule(:ssl_client_cert_key) do
145
+ key.failure(:invalid_private_key) if value && !valid_private_key?(value)
146
+ end
147
+
148
+ rule(:ssl_client_cert_chain) do
149
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
150
+ end
151
+
152
+ rule(:sasl_oauth_token_provider) do
153
+ key.failure(:does_not_respond_to_token) if value && !value.respond_to?(:token)
154
+ end
155
+
156
+ rule(:max_wait_time, :socket_timeout) do
157
+ max_wait_time = values[:max_wait_time]
158
+ socket_timeout = values[:socket_timeout]
159
+
160
+ if socket_timeout.is_a?(Numeric) &&
161
+ max_wait_time.is_a?(Numeric) &&
162
+ max_wait_time > socket_timeout
163
+
164
+ key(:max_wait_time).failure(:max_wait_time_limit)
165
+ end
166
+ end
167
+
168
+ rule(:pause_timeout, :pause_max_timeout, :pause_exponential_backoff) do
169
+ if values[:pause_exponential_backoff]
170
+ if values[:pause_timeout].to_i > values[:pause_max_timeout].to_i
171
+ key(:pause_max_timeout).failure(:max_timeout_size_for_exponential)
172
+ end
173
+ end
174
+ end
175
+
176
+ private
177
+
178
+ # @param value [String] potential RSA key value
179
+ # @return [Boolean] is the given string a valid RSA key
180
+ def valid_private_key?(value)
181
+ OpenSSL::PKey.read(value)
182
+ true
183
+ rescue OpenSSL::PKey::PKeyError
184
+ false
185
+ end
186
+
187
+ # @param value [String] potential X509 cert value
188
+ # @return [Boolean] is the given string a valid X509 cert
189
+ def valid_certificate?(value)
190
+ OpenSSL::X509::Certificate.new(value)
191
+ true
192
+ rescue OpenSSL::X509::CertificateError
193
+ false
194
+ end
195
+
196
+ # @param value [String] potential kafka uri
197
+ # @return [Boolean] true if it is a kafka uri, otherwise false
198
+ def kafka_uri?(value)
199
+ uri = URI.parse(value)
200
+ URI_SCHEMES.include?(uri.scheme) && uri.port
201
+ rescue URI::InvalidURIError
202
+ false
203
+ end
204
+ end
205
+ end
206
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Consumer group topic validation rules
6
+ class ConsumerGroupTopic < Dry::Validation::Contract
7
+ params do
8
+ required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
9
+ required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
10
+ required(:backend).filled(included_in?: %i[inline sidekiq])
11
+ required(:consumer).filled
12
+ required(:deserializer).filled
13
+ required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
14
+ required(:start_from_beginning).filled(:bool?)
15
+ required(:batch_consuming).filled(:bool?)
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Validator to check responder topic usage
6
+ class ResponderUsageTopic < Dry::Validation::Contract
7
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
+
9
+ params do
10
+ required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
11
+ required(:required).filled(:bool?)
12
+ required(:usage_count).filled(:int?, gteq?: 0)
13
+ required(:registered).filled(eql?: true)
14
+ required(:async).filled(:bool?)
15
+ required(:serializer).filled
16
+ end
17
+
18
+ rule(:required, :usage_count) do
19
+ key(:name).failure(:required_usage_count) if values[:required] && values[:usage_count] < 1
20
+ end
21
+ end
22
+
23
+ # Validator to check that everything in a responder flow matches responder rules
24
+ class ResponderUsage < Dry::Validation::Contract
25
+ include Dry::Core::Constants
26
+
27
+ # Contract for verifying the topic usage details
28
+ TOPIC_CONTRACT = ResponderUsageTopic.new.freeze
29
+
30
+ private_constant :TOPIC_CONTRACT
31
+
32
+ params do
33
+ required(:used_topics)
34
+ required(:registered_topics)
35
+ end
36
+
37
+ rule(:used_topics) do
38
+ (value || EMPTY_ARRAY).each do |used_topic|
39
+ TOPIC_CONTRACT.call(used_topic).errors.each do |error|
40
+ key([:used_topics, used_topic, error.path[0]]).failure(error.text)
41
+ end
42
+ end
43
+ end
44
+
45
+ rule(:registered_topics) do
46
+ (value || EMPTY_ARRAY).each do |used_topic|
47
+ TOPIC_CONTRACT.call(used_topic).errors.each do |error|
48
+ key([:registered_topics, used_topic, error.path[0]]).failure(error.text)
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract for validating correctness of the server cli command options
6
+ # We validate some basics + the list of consumer_groups on which we want to use, to make
7
+ # sure that all of them are defined, plus that a pidfile does not exist
8
+ class ServerCliOptions < Dry::Validation::Contract
9
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
10
+
11
+ params do
12
+ optional(:pid).filled(:str?)
13
+ optional(:daemon).filled(:bool?)
14
+ optional(:consumer_groups).value(:array, :filled?)
15
+ end
16
+
17
+ rule(:pid) do
18
+ key(:pid).failure(:pid_already_exists) if value && File.exist?(value)
19
+ end
20
+
21
+ rule(:consumer_groups) do
22
+ # If there were no consumer_groups declared in the server cli, it means that we will
23
+ # run all of them and no need to validate them here at all
24
+ if !value.nil? &&
25
+ !(value - Karafka::App.config.internal.routing_builder.map(&:name)).empty?
26
+ key(:consumer_groups).failure(:consumer_groups_inclusion)
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -6,17 +6,18 @@ module Karafka
6
6
  # Base class for all the Karafka internal errors
7
7
  BaseError = Class.new(StandardError)
8
8
 
9
- # Should be raised when we attemp to parse incoming params but parsing fails
10
- # If this error (or its descendant) is detected, we will pass the raw message
11
- # into params and proceed further
12
- ParserError = Class.new(BaseError)
9
+ # Should be raised when we have that that we cannot serialize
10
+ SerializationError = Class.new(BaseError)
11
+
12
+ # Should be raised when we tried to deserialize incoming data but we failed
13
+ DeserializationError = Class.new(BaseError)
13
14
 
14
15
  # Raised when router receives topic name which does not correspond with any routes
15
16
  # This can only happen in a case when:
16
17
  # - you've received a message and we cannot match it with a consumer
17
18
  # - you've changed the routing, so router can no longer associate your topic to
18
19
  # any consumer
19
- # - or in a case when you do a lot of metaprogramming and you change routing/etc on runtime
20
+ # - or in a case when you do a lot of meta-programming and you change routing/etc on runtime
20
21
  #
21
22
  # In case this happens, you will have to create a temporary route that will allow
22
23
  # you to "eat" everything from the Sidekiq queue.
@@ -25,29 +26,26 @@ module Karafka
25
26
 
26
27
  # Raised when we don't use or use responder not in the way it expected to based on the
27
28
  # topics usage definitions
28
- InvalidResponderUsage = Class.new(BaseError)
29
+ InvalidResponderUsageError = Class.new(BaseError)
29
30
 
30
- # Raised when options that we provide to the responder to respond aren't what the schema
31
+ # Raised when options that we provide to the responder to respond aren't what the contract
31
32
  # requires
32
- InvalidResponderMessageOptions = Class.new(BaseError)
33
+ InvalidResponderMessageOptionsError = Class.new(BaseError)
33
34
 
34
- # Raised when configuration doesn't match with validation schema
35
- InvalidConfiguration = Class.new(BaseError)
35
+ # Raised when configuration doesn't match with validation contract
36
+ InvalidConfigurationError = Class.new(BaseError)
36
37
 
37
- # Raised when we try to use Karafka CLI commands (except install) without a bootfile
38
- MissingBootFile = Class.new(BaseError)
38
+ # Raised when we try to use Karafka CLI commands (except install) without a boot file
39
+ MissingBootFileError = Class.new(BaseError)
39
40
 
40
41
  # Raised when we want to read a persisted thread messages consumer but it is unavailable
41
42
  # This should never happen and if it does, please contact us
42
- MissingClient = Class.new(BaseError)
43
-
44
- # Raised when we attemp to pause a partition but the pause timeout is equal to 0
45
- InvalidPauseTimeout = Class.new(BaseError)
43
+ MissingClientError = Class.new(BaseError)
46
44
 
47
45
  # Raised when want to hook up to an event that is not registered and supported
48
- UnregisteredMonitorEvent = Class.new(BaseError)
46
+ UnregisteredMonitorEventError = Class.new(BaseError)
49
47
 
50
- # Raised when we've waited enough for shutting down an unresponding process
51
- ForcefulShutdown = Class.new(BaseError)
48
+ # Raised when we've waited enough for shutting down a non-responsive process
49
+ ForcefulShutdownError = Class.new(BaseError)
52
50
  end
53
51
  end
@@ -5,39 +5,37 @@ module Karafka
5
5
  # @note Creating multiple fetchers will result in having multiple connections to the same
6
6
  # topics, which means that if there are no partitions, it won't use them.
7
7
  class Fetcher
8
- class << self
9
- # Starts listening on all the listeners asynchronously
10
- # Fetch loop should never end, which means that we won't create more actor clusters
11
- # so we don't have to terminate them
12
- def call
13
- threads = listeners.map do |listener|
14
- # We abort on exception because there should be an exception handling developed for
15
- # each listener running in separate threads, so the exceptions should never leak
16
- # and if that happens, it means that something really bad happened and we should stop
17
- # the whole process
18
- Thread
19
- .new { listener.call }
20
- .tap { |thread| thread.abort_on_exception = true }
21
- end
22
-
23
- # We aggregate threads here for a supervised shutdown process
24
- threads.each { |thread| Karafka::Server.consumer_threads << thread }
25
- threads.each(&:join)
26
- # If anything crashes here, we need to raise the error and crush the runner because it means
27
- # that something terrible happened
28
- rescue StandardError => e
29
- Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
30
- Karafka::App.stop!
31
- raise e
8
+ # Starts listening on all the listeners asynchronously
9
+ # Fetch loop should never end, which means that we won't create more actor clusters
10
+ # so we don't have to terminate them
11
+ def call
12
+ threads = listeners.map do |listener|
13
+ # We abort on exception because there should be an exception handling developed for
14
+ # each listener running in separate threads, so the exceptions should never leak
15
+ # and if that happens, it means that something really bad happened and we should stop
16
+ # the whole process
17
+ Thread
18
+ .new { listener.call }
19
+ .tap { |thread| thread.abort_on_exception = true }
32
20
  end
33
21
 
34
- private
22
+ # We aggregate threads here for a supervised shutdown process
23
+ threads.each { |thread| Karafka::Server.consumer_threads << thread }
24
+ threads.each(&:join)
25
+ # If anything crashes here, we need to raise the error and crush the runner because it means
26
+ # that something terrible happened
27
+ rescue StandardError => e
28
+ Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
29
+ Karafka::App.stop!
30
+ raise e
31
+ end
32
+
33
+ private
35
34
 
36
- # @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
37
- def listeners
38
- @listeners ||= App.consumer_groups.active.map do |consumer_group|
39
- Karafka::Connection::Listener.new(consumer_group)
40
- end
35
+ # @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
36
+ def listeners
37
+ @listeners ||= App.consumer_groups.active.map do |consumer_group|
38
+ Karafka::Connection::Listener.new(consumer_group)
41
39
  end
42
40
  end
43
41
  end