karafka 1.0.1 → 1.4.14

Sign up to get free protection for your applications and to get access to all the features.
Files changed (121) hide show
  1. checksums.yaml +5 -5
  2. checksums.yaml.gz.sig +0 -0
  3. data/.coditsu/ci.yml +3 -0
  4. data/.console_irbrc +1 -3
  5. data/.diffend.yml +3 -0
  6. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  7. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  8. data/.github/workflows/ci.yml +76 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/CHANGELOG.md +286 -16
  12. data/CODE_OF_CONDUCT.md +1 -1
  13. data/CONTRIBUTING.md +6 -7
  14. data/Gemfile +5 -2
  15. data/Gemfile.lock +100 -103
  16. data/README.md +54 -74
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +26 -0
  19. data/config/errors.yml +40 -5
  20. data/docker-compose.yml +17 -0
  21. data/karafka.gemspec +31 -15
  22. data/lib/karafka/app.rb +19 -18
  23. data/lib/karafka/assignment_strategies/round_robin.rb +13 -0
  24. data/lib/karafka/attributes_map.rb +17 -21
  25. data/lib/karafka/backends/inline.rb +2 -3
  26. data/lib/karafka/base_consumer.rb +57 -0
  27. data/lib/karafka/base_responder.rb +77 -31
  28. data/lib/karafka/cli/base.rb +4 -4
  29. data/lib/karafka/cli/console.rb +11 -9
  30. data/lib/karafka/cli/flow.rb +9 -7
  31. data/lib/karafka/cli/info.rb +5 -4
  32. data/lib/karafka/cli/install.rb +32 -8
  33. data/lib/karafka/cli/missingno.rb +19 -0
  34. data/lib/karafka/cli/server.rb +18 -16
  35. data/lib/karafka/cli.rb +10 -2
  36. data/lib/karafka/code_reloader.rb +67 -0
  37. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +71 -22
  38. data/lib/karafka/connection/batch_delegator.rb +55 -0
  39. data/lib/karafka/connection/builder.rb +23 -0
  40. data/lib/karafka/connection/client.rb +120 -0
  41. data/lib/karafka/connection/listener.rb +39 -26
  42. data/lib/karafka/connection/message_delegator.rb +36 -0
  43. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  44. data/lib/karafka/consumers/callbacks.rb +71 -0
  45. data/lib/karafka/consumers/includer.rb +64 -0
  46. data/lib/karafka/consumers/responders.rb +24 -0
  47. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  48. data/lib/karafka/contracts/config.rb +21 -0
  49. data/lib/karafka/contracts/consumer_group.rb +211 -0
  50. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  51. data/lib/karafka/contracts/responder_usage.rb +54 -0
  52. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  53. data/lib/karafka/contracts.rb +10 -0
  54. data/lib/karafka/errors.rb +27 -12
  55. data/lib/karafka/fetcher.rb +15 -15
  56. data/lib/karafka/helpers/class_matcher.rb +20 -10
  57. data/lib/karafka/helpers/config_retriever.rb +3 -3
  58. data/lib/karafka/helpers/inflector.rb +26 -0
  59. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  60. data/lib/karafka/instrumentation/logger.rb +54 -0
  61. data/lib/karafka/instrumentation/monitor.rb +70 -0
  62. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  63. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  64. data/lib/karafka/params/batch_metadata.rb +26 -0
  65. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  66. data/lib/karafka/params/builders/params.rb +38 -0
  67. data/lib/karafka/params/builders/params_batch.rb +25 -0
  68. data/lib/karafka/params/metadata.rb +20 -0
  69. data/lib/karafka/params/params.rb +35 -107
  70. data/lib/karafka/params/params_batch.rb +38 -19
  71. data/lib/karafka/patches/ruby_kafka.rb +47 -0
  72. data/lib/karafka/persistence/client.rb +29 -0
  73. data/lib/karafka/persistence/consumers.rb +45 -0
  74. data/lib/karafka/persistence/topics.rb +48 -0
  75. data/lib/karafka/process.rb +6 -9
  76. data/lib/karafka/responders/builder.rb +15 -14
  77. data/lib/karafka/responders/topic.rb +14 -9
  78. data/lib/karafka/routing/builder.rb +38 -9
  79. data/lib/karafka/routing/consumer_group.rb +6 -4
  80. data/lib/karafka/routing/consumer_mapper.rb +10 -9
  81. data/lib/karafka/routing/proxy.rb +10 -1
  82. data/lib/karafka/routing/router.rb +1 -1
  83. data/lib/karafka/routing/topic.rb +8 -12
  84. data/lib/karafka/routing/topic_mapper.rb +16 -18
  85. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  86. data/lib/karafka/serialization/json/serializer.rb +31 -0
  87. data/lib/karafka/server.rb +50 -39
  88. data/lib/karafka/setup/config.rb +138 -91
  89. data/lib/karafka/setup/configurators/water_drop.rb +21 -16
  90. data/lib/karafka/setup/dsl.rb +21 -0
  91. data/lib/karafka/status.rb +7 -3
  92. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.erb} +2 -2
  93. data/lib/karafka/templates/karafka.rb.erb +92 -0
  94. data/lib/karafka/version.rb +1 -1
  95. data/lib/karafka.rb +19 -15
  96. data.tar.gz.sig +0 -0
  97. metadata +119 -81
  98. metadata.gz.sig +5 -0
  99. data/.github/ISSUE_TEMPLATE.md +0 -2
  100. data/.travis.yml +0 -17
  101. data/Rakefile +0 -7
  102. data/lib/karafka/base_controller.rb +0 -117
  103. data/lib/karafka/connection/messages_consumer.rb +0 -106
  104. data/lib/karafka/connection/messages_processor.rb +0 -61
  105. data/lib/karafka/controllers/includer.rb +0 -51
  106. data/lib/karafka/controllers/responders.rb +0 -19
  107. data/lib/karafka/loader.rb +0 -29
  108. data/lib/karafka/logger.rb +0 -53
  109. data/lib/karafka/monitor.rb +0 -98
  110. data/lib/karafka/parsers/json.rb +0 -38
  111. data/lib/karafka/patches/dry_configurable.rb +0 -33
  112. data/lib/karafka/persistence/controller.rb +0 -23
  113. data/lib/karafka/schemas/config.rb +0 -31
  114. data/lib/karafka/schemas/consumer_group.rb +0 -64
  115. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  116. data/lib/karafka/schemas/responder_usage.rb +0 -38
  117. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  118. data/lib/karafka/setup/configurators/base.rb +0 -35
  119. data/lib/karafka/setup/configurators/celluloid.rb +0 -19
  120. data/lib/karafka/templates/karafka.rb.example +0 -41
  121. /data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
@@ -0,0 +1,64 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Additional functionalities for consumers
5
+ module Consumers
6
+ # Module used to inject functionalities into a given consumer instance, based on the consumer
7
+ # topic and its settings
8
+ # We don't need all the behaviors in all the cases, so it is not worth having everything
9
+ # in all the cases all the time
10
+ module Includer
11
+ class << self
12
+ # @param consumer [Karafka::BaseConsumer] consumer instance, that will get some
13
+ # functionalities based on the topic under which it operates
14
+ def call(consumer)
15
+ topic = consumer.topic
16
+
17
+ bind_backend(consumer, topic)
18
+ bind_params(consumer, topic)
19
+ bind_batch_metadata(consumer, topic)
20
+ bind_responders(consumer, topic)
21
+ end
22
+
23
+ private
24
+
25
+ # Figures out backend for a given consumer class, based on the topic backend and
26
+ # includes it into the consumer class
27
+ # @param consumer [Karafka::BaseConsumer] consumer instance
28
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
29
+ def bind_backend(consumer, topic)
30
+ backend = Kernel.const_get("::Karafka::Backends::#{topic.backend.to_s.capitalize}")
31
+ consumer.extend(backend)
32
+ end
33
+
34
+ # Adds a single #params support for non batch processed topics
35
+ # @param consumer [Karafka::BaseConsumer] consumer instance
36
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
37
+ def bind_params(consumer, topic)
38
+ return if topic.batch_consuming
39
+
40
+ consumer.extend(SingleParams)
41
+ end
42
+
43
+ # Adds an option to work with batch metadata for consumer instances that have
44
+ # batch fetching enabled
45
+ # @param consumer [Karafka::BaseConsumer] consumer instance
46
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
47
+ def bind_batch_metadata(consumer, topic)
48
+ return unless topic.batch_fetching
49
+
50
+ consumer.extend(BatchMetadata)
51
+ end
52
+
53
+ # Adds responders support for topics and consumers with responders defined for them
54
+ # @param consumer [Karafka::BaseConsumer] consumer instance
55
+ # @param topic [Karafka::Routing::Topic] topic of a consumer class
56
+ def bind_responders(consumer, topic)
57
+ return unless topic.responder
58
+
59
+ consumer.extend(Responders)
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Consumers
5
+ # Feature that allows us to use responders flow in consumer
6
+ module Responders
7
+ # Responds with given data using given responder. This allows us to have a similar way of
8
+ # defining flows like synchronous protocols
9
+ # @param data Anything we want to pass to responder based on which we want to trigger further
10
+ # Kafka responding
11
+ def respond_with(*data)
12
+ Karafka.monitor.instrument(
13
+ 'consumers.responders.respond_with',
14
+ caller: self,
15
+ data: data
16
+ ) do
17
+ # @note we build a new instance of responder each time, as a long-running (persisted)
18
+ # consumers can respond multiple times during the life-cycle
19
+ topic.responder.new.call(*data)
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -1,12 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
- module Controllers
5
- # Params alias for single message processing controllers
4
+ module Consumers
5
+ # Params alias for single message consumption consumers
6
6
  module SingleParams
7
7
  private
8
8
 
9
- # @return [Karafka::Params::Params] params instance for non batch processed controllers
9
+ # @return [Karafka::Params::Params] params instance for non batch consumption consumers
10
10
  def params
11
11
  params_batch.first
12
12
  end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract with validation rules for Karafka configuration details
6
+ # @note There are many more configuration options inside of the
7
+ # Karafka::Setup::Config model, but we don't validate them here as they are
8
+ # validated per each route (topic + consumer_group) because they can be overwritten,
9
+ # so we validate all of that once all the routes are defined and ready
10
+ class Config < Dry::Validation::Contract
11
+ params do
12
+ required(:client_id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
13
+ required(:shutdown_timeout) { (int? & gt?(0)) }
14
+ required(:consumer_mapper)
15
+ required(:topic_mapper)
16
+
17
+ optional(:backend).filled
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,211 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract for single full route (consumer group + topics) validation.
6
+ class ConsumerGroup < Dry::Validation::Contract
7
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
+
9
+ # Valid uri schemas of Kafka broker url
10
+ # The ||= is due to the behavior of require_all that resolves dependencies
11
+ # but sometimes loads things twice
12
+ URI_SCHEMES ||= %w[kafka kafka+ssl plaintext ssl].freeze
13
+
14
+ # Available sasl scram mechanism of authentication (plus nil)
15
+ SASL_SCRAM_MECHANISMS ||= %w[sha256 sha512].freeze
16
+
17
+ # Internal contract for sub-validating topics schema
18
+ TOPIC_CONTRACT = ConsumerGroupTopic.new.freeze
19
+
20
+ private_constant :TOPIC_CONTRACT
21
+
22
+ params do
23
+ required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
24
+ required(:topics).value(:array, :filled?)
25
+ required(:seed_brokers).value(:array, :filled?)
26
+ required(:session_timeout).filled { int? | float? }
27
+ required(:pause_timeout).maybe(%i[integer float]) { filled? > gteq?(0) }
28
+ required(:pause_max_timeout).maybe(%i[integer float]) { filled? > gteq?(0) }
29
+ required(:pause_exponential_backoff).filled(:bool?)
30
+ required(:offset_commit_interval) { int? | float? }
31
+ required(:offset_commit_threshold).filled(:int?)
32
+ required(:offset_retention_time).maybe(:integer)
33
+ required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
34
+ required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
35
+ required(:assignment_strategy).value(:any)
36
+ required(:connect_timeout).filled { (int? | float?) & gt?(0) }
37
+ required(:reconnect_timeout).filled { (int? | float?) & gteq?(0) }
38
+ required(:socket_timeout).filled { (int? | float?) & gt?(0) }
39
+ required(:min_bytes).filled(:int?, gt?: 0)
40
+ required(:max_bytes).filled(:int?, gt?: 0)
41
+ required(:max_wait_time).filled { (int? | float?) & gteq?(0) }
42
+ required(:batch_fetching).filled(:bool?)
43
+
44
+ %i[
45
+ ssl_ca_cert
46
+ ssl_ca_cert_file_path
47
+ ssl_client_cert
48
+ ssl_client_cert_key
49
+ ssl_client_cert_chain
50
+ ssl_client_cert_key_password
51
+ sasl_gssapi_principal
52
+ sasl_gssapi_keytab
53
+ sasl_plain_authzid
54
+ sasl_plain_username
55
+ sasl_plain_password
56
+ sasl_scram_username
57
+ sasl_scram_password
58
+ ].each do |encryption_attribute|
59
+ optional(encryption_attribute).maybe(:str?)
60
+ end
61
+
62
+ optional(:ssl_verify_hostname).maybe(:bool?)
63
+ optional(:ssl_ca_certs_from_system).maybe(:bool?)
64
+ optional(:sasl_over_ssl).maybe(:bool?)
65
+ optional(:sasl_oauth_token_provider).value(:any)
66
+
67
+ # It's not with other encryptions as it has some more rules
68
+ optional(:sasl_scram_mechanism)
69
+ .maybe(:str?, included_in?: SASL_SCRAM_MECHANISMS)
70
+ end
71
+
72
+ # Uri rule to check if uri is in a Karafka acceptable format
73
+ rule(:seed_brokers) do
74
+ if value.is_a?(Array) && !value.all?(&method(:kafka_uri?))
75
+ key.failure(:invalid_broker_schema)
76
+ end
77
+ end
78
+
79
+ rule(:topics) do
80
+ if value.is_a?(Array)
81
+ names = value.map { |topic| topic[:name] }
82
+
83
+ key.failure(:topics_names_not_unique) if names.size != names.uniq.size
84
+ end
85
+ end
86
+
87
+ rule(:topics) do
88
+ if value.is_a?(Array)
89
+ value.each_with_index do |topic, index|
90
+ TOPIC_CONTRACT.call(topic).errors.each do |error|
91
+ key([:topics, index, error.path[0]]).failure(error.text)
92
+ end
93
+ end
94
+ end
95
+ end
96
+
97
+ rule(:assignment_strategy) do
98
+ key.failure(:does_not_respond_to_call) unless value.respond_to?(:call)
99
+ end
100
+
101
+ rule(:ssl_client_cert, :ssl_client_cert_key) do
102
+ if values[:ssl_client_cert] && !values[:ssl_client_cert_key]
103
+ key(:ssl_client_cert_key).failure(:ssl_client_cert_with_ssl_client_cert_key)
104
+ end
105
+ end
106
+
107
+ rule(:ssl_client_cert, :ssl_client_cert_key) do
108
+ if values[:ssl_client_cert_key] && !values[:ssl_client_cert]
109
+ key(:ssl_client_cert).failure(:ssl_client_cert_key_with_ssl_client_cert)
110
+ end
111
+ end
112
+
113
+ rule(:ssl_client_cert, :ssl_client_cert_chain) do
114
+ if values[:ssl_client_cert_chain] && !values[:ssl_client_cert]
115
+ key(:ssl_client_cert).failure(:ssl_client_cert_chain_with_ssl_client_cert)
116
+ end
117
+ end
118
+
119
+ rule(:ssl_client_cert_chain, :ssl_client_cert_key) do
120
+ if values[:ssl_client_cert_chain] && !values[:ssl_client_cert]
121
+ key(:ssl_client_cert).failure(:ssl_client_cert_chain_with_ssl_client_cert_key)
122
+ end
123
+ end
124
+
125
+ rule(:ssl_client_cert_key_password, :ssl_client_cert_key) do
126
+ if values[:ssl_client_cert_key_password] && !values[:ssl_client_cert_key]
127
+ key(:ssl_client_cert_key).failure(:ssl_client_cert_key_password_with_ssl_client_cert_key)
128
+ end
129
+ end
130
+
131
+ rule(:ssl_ca_cert) do
132
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
133
+ end
134
+
135
+ rule(:ssl_client_cert) do
136
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
137
+ end
138
+
139
+ rule(:ssl_ca_cert_file_path) do
140
+ if value
141
+ if File.exist?(value)
142
+ key.failure(:invalid_certificate_from_path) unless valid_certificate?(File.read(value))
143
+ else
144
+ key.failure(:does_not_exist)
145
+ end
146
+ end
147
+ end
148
+
149
+ rule(:ssl_client_cert_key) do
150
+ key.failure(:invalid_private_key) if value && !valid_private_key?(value)
151
+ end
152
+
153
+ rule(:ssl_client_cert_chain) do
154
+ key.failure(:invalid_certificate) if value && !valid_certificate?(value)
155
+ end
156
+
157
+ rule(:sasl_oauth_token_provider) do
158
+ key.failure(:does_not_respond_to_token) if value && !value.respond_to?(:token)
159
+ end
160
+
161
+ rule(:max_wait_time, :socket_timeout) do
162
+ max_wait_time = values[:max_wait_time]
163
+ socket_timeout = values[:socket_timeout]
164
+
165
+ if socket_timeout.is_a?(Numeric) &&
166
+ max_wait_time.is_a?(Numeric) &&
167
+ max_wait_time > socket_timeout
168
+
169
+ key(:max_wait_time).failure(:max_wait_time_limit)
170
+ end
171
+ end
172
+
173
+ rule(:pause_timeout, :pause_max_timeout, :pause_exponential_backoff) do
174
+ if values[:pause_exponential_backoff]
175
+ if values[:pause_timeout].to_i > values[:pause_max_timeout].to_i
176
+ key(:pause_max_timeout).failure(:max_timeout_size_for_exponential)
177
+ end
178
+ end
179
+ end
180
+
181
+ private
182
+
183
+ # @param value [String] potential RSA key value
184
+ # @return [Boolean] is the given string a valid RSA key
185
+ def valid_private_key?(value)
186
+ OpenSSL::PKey.read(value)
187
+ true
188
+ rescue OpenSSL::PKey::PKeyError
189
+ false
190
+ end
191
+
192
+ # @param value [String] potential X509 cert value
193
+ # @return [Boolean] is the given string a valid X509 cert
194
+ def valid_certificate?(value)
195
+ OpenSSL::X509::Certificate.new(value)
196
+ true
197
+ rescue OpenSSL::X509::CertificateError
198
+ false
199
+ end
200
+
201
+ # @param value [String] potential kafka uri
202
+ # @return [Boolean] true if it is a kafka uri, otherwise false
203
+ def kafka_uri?(value)
204
+ uri = URI.parse(value)
205
+ URI_SCHEMES.include?(uri.scheme) && uri.port
206
+ rescue URI::InvalidURIError
207
+ false
208
+ end
209
+ end
210
+ end
211
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Consumer group topic validation rules
6
+ class ConsumerGroupTopic < Dry::Validation::Contract
7
+ params do
8
+ required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
9
+ required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
10
+ required(:backend).filled(included_in?: %i[inline sidekiq])
11
+ required(:consumer).filled
12
+ required(:deserializer).filled
13
+ required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
14
+ required(:start_from_beginning).filled(:bool?)
15
+ required(:batch_consuming).filled(:bool?)
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Validator to check responder topic usage
6
+ class ResponderUsageTopic < Dry::Validation::Contract
7
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
+
9
+ params do
10
+ required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
11
+ required(:required).filled(:bool?)
12
+ required(:usage_count).filled(:int?, gteq?: 0)
13
+ required(:registered).filled(eql?: true)
14
+ required(:async).filled(:bool?)
15
+ required(:serializer).filled
16
+ end
17
+
18
+ rule(:required, :usage_count) do
19
+ key(:name).failure(:required_usage_count) if values[:required] && values[:usage_count] < 1
20
+ end
21
+ end
22
+
23
+ # Validator to check that everything in a responder flow matches responder rules
24
+ class ResponderUsage < Dry::Validation::Contract
25
+ include Dry::Core::Constants
26
+
27
+ # Contract for verifying the topic usage details
28
+ TOPIC_CONTRACT = ResponderUsageTopic.new.freeze
29
+
30
+ private_constant :TOPIC_CONTRACT
31
+
32
+ params do
33
+ required(:used_topics)
34
+ required(:registered_topics)
35
+ end
36
+
37
+ rule(:used_topics) do
38
+ (value || EMPTY_ARRAY).each do |used_topic|
39
+ TOPIC_CONTRACT.call(used_topic).errors.each do |error|
40
+ key([:used_topics, used_topic, error.path[0]]).failure(error.text)
41
+ end
42
+ end
43
+ end
44
+
45
+ rule(:registered_topics) do
46
+ (value || EMPTY_ARRAY).each do |used_topic|
47
+ TOPIC_CONTRACT.call(used_topic).errors.each do |error|
48
+ key([:registered_topics, used_topic, error.path[0]]).failure(error.text)
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Contracts
5
+ # Contract for validating correctness of the server cli command options
6
+ # We validate some basics + the list of consumer_groups on which we want to use, to make
7
+ # sure that all of them are defined, plus that a pidfile does not exist
8
+ class ServerCliOptions < Dry::Validation::Contract
9
+ config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
10
+
11
+ params do
12
+ optional(:pid).filled(:str?)
13
+ optional(:daemon).filled(:bool?)
14
+ optional(:consumer_groups).value(:array, :filled?)
15
+ end
16
+
17
+ rule(:pid) do
18
+ key(:pid).failure(:pid_already_exists) if value && File.exist?(value)
19
+ end
20
+
21
+ rule(:consumer_groups) do
22
+ # If there were no consumer_groups declared in the server cli, it means that we will
23
+ # run all of them and no need to validate them here at all
24
+ if !value.nil? &&
25
+ !(value - Karafka::App.config.internal.routing_builder.map(&:name)).empty?
26
+ key(:consumer_groups).failure(:consumer_groups_inclusion)
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the validation contracts that we use to check input
5
+ module Contracts
6
+ # Regexp for validating format of groups and topics
7
+ # @note It is not nested inside of the contracts, as it is used by couple of them
8
+ TOPIC_REGEXP = /\A(\w|-|\.)+\z/.freeze
9
+ end
10
+ end
@@ -6,17 +6,18 @@ module Karafka
6
6
  # Base class for all the Karafka internal errors
7
7
  BaseError = Class.new(StandardError)
8
8
 
9
- # Should be raised when we attemp to parse incoming params but parsing fails
10
- # If this error (or its descendant) is detected, we will pass the raw message
11
- # into params and proceed further
12
- ParserError = Class.new(BaseError)
9
+ # Should be raised when we have that that we cannot serialize
10
+ SerializationError = Class.new(BaseError)
11
+
12
+ # Should be raised when we tried to deserialize incoming data but we failed
13
+ DeserializationError = Class.new(BaseError)
13
14
 
14
15
  # Raised when router receives topic name which does not correspond with any routes
15
16
  # This can only happen in a case when:
16
- # - you've received a message and we cannot match it with a controller
17
+ # - you've received a message and we cannot match it with a consumer
17
18
  # - you've changed the routing, so router can no longer associate your topic to
18
- # any controller
19
- # - or in a case when you do a lot of metaprogramming and you change routing/etc on runtime
19
+ # any consumer
20
+ # - or in a case when you do a lot of meta-programming and you change routing/etc on runtime
20
21
  #
21
22
  # In case this happens, you will have to create a temporary route that will allow
22
23
  # you to "eat" everything from the Sidekiq queue.
@@ -25,12 +26,26 @@ module Karafka
25
26
 
26
27
  # Raised when we don't use or use responder not in the way it expected to based on the
27
28
  # topics usage definitions
28
- InvalidResponderUsage = Class.new(BaseError)
29
+ InvalidResponderUsageError = Class.new(BaseError)
30
+
31
+ # Raised when options that we provide to the responder to respond aren't what the contract
32
+ # requires
33
+ InvalidResponderMessageOptionsError = Class.new(BaseError)
34
+
35
+ # Raised when configuration doesn't match with validation contract
36
+ InvalidConfigurationError = Class.new(BaseError)
37
+
38
+ # Raised when we try to use Karafka CLI commands (except install) without a boot file
39
+ MissingBootFileError = Class.new(BaseError)
40
+
41
+ # Raised when we want to read a persisted thread messages consumer but it is unavailable
42
+ # This should never happen and if it does, please contact us
43
+ MissingClientError = Class.new(BaseError)
29
44
 
30
- # Raised when configuration doesn't match with validation schema
31
- InvalidConfiguration = Class.new(BaseError)
45
+ # Raised when want to hook up to an event that is not registered and supported
46
+ UnregisteredMonitorEventError = Class.new(BaseError)
32
47
 
33
- # Raised when we try to use Karafka CLI commands (except install) without a bootfile
34
- MissingBootFile = Class.new(BaseError)
48
+ # Raised when we've waited enough for shutting down a non-responsive process
49
+ ForcefulShutdownError = Class.new(BaseError)
35
50
  end
36
51
  end
@@ -8,16 +8,24 @@ module Karafka
8
8
  # Starts listening on all the listeners asynchronously
9
9
  # Fetch loop should never end, which means that we won't create more actor clusters
10
10
  # so we don't have to terminate them
11
- def fetch_loop
12
- futures = listeners.map do |listener|
13
- listener.future.public_send(:fetch_loop, processor)
11
+ def call
12
+ threads = listeners.map do |listener|
13
+ # We abort on exception because there should be an exception handling developed for
14
+ # each listener running in separate threads, so the exceptions should never leak
15
+ # and if that happens, it means that something really bad happened and we should stop
16
+ # the whole process
17
+ Thread
18
+ .new { listener.call }
19
+ .tap { |thread| thread.abort_on_exception = true }
14
20
  end
15
21
 
16
- futures.map(&:value)
22
+ # We aggregate threads here for a supervised shutdown process
23
+ threads.each { |thread| Karafka::Server.consumer_threads << thread }
24
+ threads.each(&:join)
17
25
  # If anything crashes here, we need to raise the error and crush the runner because it means
18
- # that something really bad happened
19
- rescue => e
20
- Karafka.monitor.notice_error(self.class, e)
26
+ # that something terrible happened
27
+ rescue StandardError => e
28
+ Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
21
29
  Karafka::App.stop!
22
30
  raise e
23
31
  end
@@ -30,13 +38,5 @@ module Karafka
30
38
  Karafka::Connection::Listener.new(consumer_group)
31
39
  end
32
40
  end
33
-
34
- # @return [Proc] proc that should be processed when a messages arrive
35
- # @yieldparam messages [Array<Kafka::FetchedMessage>] messages from kafka (raw)
36
- def processor
37
- lambda do |group_id, messages|
38
- Karafka::Connection::MessagesProcessor.process(group_id, messages)
39
- end
40
- end
41
41
  end
42
42
  end
@@ -4,20 +4,22 @@ module Karafka
4
4
  module Helpers
5
5
  # Class used to autodetect corresponding classes that are internally inside Karafka framework
6
6
  # It is used among others to match:
7
- # controller => responder
7
+ # consumer => responder
8
8
  class ClassMatcher
9
- # Regexp used to remove any non classy like characters that might be in the controller
9
+ # Regexp used to remove any non classy like characters that might be in the consumer
10
10
  # class name (if defined dynamically, etc)
11
- CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}
11
+ CONSTANT_REGEXP = %r{[?!=+\-*/\^|&\[\]<>%~\#:\s()]}.freeze
12
+
13
+ private_constant :CONSTANT_REGEXP
12
14
 
13
15
  # @param klass [Class] class to which we want to find a corresponding class
14
16
  # @param from [String] what type of object is it (based on postfix name part)
15
17
  # @param to [String] what are we looking for (based on a postfix name part)
16
- # @example Controller that has a corresponding responder
17
- # matcher = Karafka::Helpers::ClassMatcher.new(SuperController, 'Controller', 'Responder')
18
+ # @example Consumer that has a corresponding responder
19
+ # matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
18
20
  # matcher.match #=> SuperResponder
19
- # @example Controller without a corresponding responder
20
- # matcher = Karafka::Helpers::ClassMatcher.new(Super2Controller, 'Controller', 'Responder')
21
+ # @example Consumer without a corresponding responder
22
+ # matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
21
23
  # matcher.match #=> nil
22
24
  def initialize(klass, from:, to:)
23
25
  @klass = klass
@@ -30,18 +32,25 @@ module Karafka
30
32
  def match
31
33
  return nil if name.empty?
32
34
  return nil unless scope.const_defined?(name)
35
+
33
36
  matching = scope.const_get(name)
34
37
  same_scope?(matching) ? matching : nil
35
38
  end
36
39
 
37
40
  # @return [String] name of a new class that we're looking for
38
41
  # @note This method returns name of a class without a namespace
39
- # @example From SuperController matching responder
42
+ # @example From SuperConsumer matching responder
40
43
  # matcher.name #=> 'SuperResponder'
41
- # @example From Namespaced::Super2Controller matching responder
44
+ # @example From Namespaced::Super2Consumer matching responder
42
45
  # matcher.name #=> Super2Responder
43
46
  def name
44
- inflected = @klass.to_s.split('::').last.to_s
47
+ inflected = +@klass.to_s.split('::').last.to_s
48
+ # We inject the from into the name just in case it is missing as in a situation like
49
+ # that it would just sanitize the name without adding the "to" postfix.
50
+ # It could create cases when we want to build for example a responder to a consumer
51
+ # that does not have the "Consumer" postfix and would do nothing returning the same name.
52
+ # That would be bad as the matching classes shouldn't be matched to themselves.
53
+ inflected << @from unless inflected.include?(@from)
45
54
  inflected.gsub!(@from, @to)
46
55
  inflected.gsub!(CONSTANT_REGEXP, '')
47
56
  inflected
@@ -65,6 +74,7 @@ module Karafka
65
74
  def scope_of(klass)
66
75
  enclosing = klass.to_s.split('::')[0...-1]
67
76
  return ::Object if enclosing.empty?
77
+
68
78
  ::Object.const_get(enclosing.join('::'))
69
79
  end
70
80
 
@@ -5,7 +5,7 @@ module Karafka
5
5
  # A helper method that allows us to build methods that try to get a given
6
6
  # attribute from its instance value and if it fails, will fallback to
7
7
  # the default config or config.kafka value for a given attribute.
8
- # It is used to simplify the checkings.
8
+ # It is used to simplify the checks.
9
9
  # @note Worth noticing, that the value might be equal to false, so even
10
10
  # then we need to return it. That's why we check for nil?
11
11
  # @example Define config retried attribute for start_from_beginning
@@ -33,9 +33,9 @@ module Karafka
33
33
  return current_value unless current_value.nil?
34
34
 
35
35
  value = if Karafka::App.config.respond_to?(attribute)
36
- Karafka::App.config.public_send(attribute)
36
+ Karafka::App.config.send(attribute)
37
37
  else
38
- Karafka::App.config.kafka.public_send(attribute)
38
+ Karafka::App.config.kafka.send(attribute)
39
39
  end
40
40
 
41
41
  instance_variable_set(:"@#{attribute}", value)
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Helpers
5
+ # Inflector provides inflection for the whole Karafka framework with additional inflection
6
+ # caching (due to the fact, that Dry::Inflector is slow)
7
+ module Inflector
8
+ # What inflection engine do we want to use
9
+ ENGINE = Dry::Inflector.new
10
+
11
+ @map = Concurrent::Hash.new
12
+
13
+ private_constant :ENGINE
14
+
15
+ class << self
16
+ # @param string [String] string that we want to convert to our underscore format
17
+ # @return [String] inflected string
18
+ # @example
19
+ # Karafka::Helpers::Inflector.map('Module/ControllerName') #=> 'module_controller_name'
20
+ def map(string)
21
+ @map[string] ||= ENGINE.underscore(string).tr('/', '_')
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end