karafka 1.4.0 → 2.0.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (172) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +89 -18
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +365 -1
  6. data/CONTRIBUTING.md +10 -19
  7. data/Gemfile +6 -0
  8. data/Gemfile.lock +56 -112
  9. data/LICENSE +17 -0
  10. data/LICENSE-COMM +89 -0
  11. data/LICENSE-LGPL +165 -0
  12. data/README.md +61 -68
  13. data/bin/benchmarks +85 -0
  14. data/bin/create_token +22 -0
  15. data/bin/integrations +272 -0
  16. data/bin/karafka +10 -0
  17. data/bin/scenario +29 -0
  18. data/bin/stress_many +13 -0
  19. data/bin/stress_one +13 -0
  20. data/certs/cert_chain.pem +26 -0
  21. data/certs/karafka-pro.pem +11 -0
  22. data/config/errors.yml +59 -38
  23. data/docker-compose.yml +10 -3
  24. data/karafka.gemspec +18 -21
  25. data/lib/active_job/karafka.rb +21 -0
  26. data/lib/active_job/queue_adapters/karafka_adapter.rb +26 -0
  27. data/lib/karafka/active_job/consumer.rb +26 -0
  28. data/lib/karafka/active_job/dispatcher.rb +38 -0
  29. data/lib/karafka/active_job/job_extensions.rb +34 -0
  30. data/lib/karafka/active_job/job_options_contract.rb +21 -0
  31. data/lib/karafka/active_job/routing/extensions.rb +33 -0
  32. data/lib/karafka/admin.rb +63 -0
  33. data/lib/karafka/app.rb +15 -20
  34. data/lib/karafka/base_consumer.rb +197 -31
  35. data/lib/karafka/cli/info.rb +44 -10
  36. data/lib/karafka/cli/install.rb +22 -12
  37. data/lib/karafka/cli/server.rb +17 -42
  38. data/lib/karafka/cli.rb +4 -3
  39. data/lib/karafka/connection/client.rb +379 -89
  40. data/lib/karafka/connection/listener.rb +250 -38
  41. data/lib/karafka/connection/listeners_batch.rb +24 -0
  42. data/lib/karafka/connection/messages_buffer.rb +84 -0
  43. data/lib/karafka/connection/pauses_manager.rb +46 -0
  44. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  45. data/lib/karafka/connection/rebalance_manager.rb +78 -0
  46. data/lib/karafka/contracts/base.rb +17 -0
  47. data/lib/karafka/contracts/config.rb +88 -11
  48. data/lib/karafka/contracts/consumer_group.rb +21 -184
  49. data/lib/karafka/contracts/consumer_group_topic.rb +35 -11
  50. data/lib/karafka/contracts/server_cli_options.rb +19 -18
  51. data/lib/karafka/contracts.rb +1 -1
  52. data/lib/karafka/env.rb +46 -0
  53. data/lib/karafka/errors.rb +21 -21
  54. data/lib/karafka/helpers/async.rb +33 -0
  55. data/lib/karafka/helpers/colorize.rb +20 -0
  56. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  57. data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
  58. data/lib/karafka/instrumentation/callbacks/statistics.rb +41 -0
  59. data/lib/karafka/instrumentation/logger.rb +6 -10
  60. data/lib/karafka/instrumentation/logger_listener.rb +174 -0
  61. data/lib/karafka/instrumentation/monitor.rb +13 -61
  62. data/lib/karafka/instrumentation/notifications.rb +53 -0
  63. data/lib/karafka/instrumentation/proctitle_listener.rb +3 -3
  64. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  65. data/lib/karafka/instrumentation/vendors/datadog/listener.rb +232 -0
  66. data/lib/karafka/instrumentation.rb +21 -0
  67. data/lib/karafka/licenser.rb +75 -0
  68. data/lib/karafka/messages/batch_metadata.rb +45 -0
  69. data/lib/karafka/messages/builders/batch_metadata.rb +39 -0
  70. data/lib/karafka/messages/builders/message.rb +39 -0
  71. data/lib/karafka/messages/builders/messages.rb +34 -0
  72. data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
  73. data/lib/karafka/messages/messages.rb +64 -0
  74. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  75. data/lib/karafka/messages/seek.rb +9 -0
  76. data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
  77. data/lib/karafka/pro/active_job/consumer.rb +46 -0
  78. data/lib/karafka/pro/active_job/dispatcher.rb +61 -0
  79. data/lib/karafka/pro/active_job/job_options_contract.rb +32 -0
  80. data/lib/karafka/pro/base_consumer.rb +107 -0
  81. data/lib/karafka/pro/contracts/base.rb +21 -0
  82. data/lib/karafka/pro/contracts/consumer_group.rb +34 -0
  83. data/lib/karafka/pro/contracts/consumer_group_topic.rb +69 -0
  84. data/lib/karafka/pro/loader.rb +76 -0
  85. data/lib/karafka/pro/performance_tracker.rb +80 -0
  86. data/lib/karafka/pro/processing/coordinator.rb +85 -0
  87. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +38 -0
  88. data/lib/karafka/pro/processing/jobs_builder.rb +32 -0
  89. data/lib/karafka/pro/processing/partitioner.rb +58 -0
  90. data/lib/karafka/pro/processing/scheduler.rb +56 -0
  91. data/lib/karafka/pro/routing/builder_extensions.rb +30 -0
  92. data/lib/karafka/pro/routing/topic_extensions.rb +74 -0
  93. data/lib/karafka/pro.rb +13 -0
  94. data/lib/karafka/process.rb +1 -0
  95. data/lib/karafka/processing/coordinator.rb +103 -0
  96. data/lib/karafka/processing/coordinators_buffer.rb +54 -0
  97. data/lib/karafka/processing/executor.rb +126 -0
  98. data/lib/karafka/processing/executors_buffer.rb +88 -0
  99. data/lib/karafka/processing/jobs/base.rb +55 -0
  100. data/lib/karafka/processing/jobs/consume.rb +47 -0
  101. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  102. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  103. data/lib/karafka/processing/jobs_builder.rb +29 -0
  104. data/lib/karafka/processing/jobs_queue.rb +144 -0
  105. data/lib/karafka/processing/partitioner.rb +22 -0
  106. data/lib/karafka/processing/result.rb +37 -0
  107. data/lib/karafka/processing/scheduler.rb +22 -0
  108. data/lib/karafka/processing/worker.rb +91 -0
  109. data/lib/karafka/processing/workers_batch.rb +27 -0
  110. data/lib/karafka/railtie.rb +127 -0
  111. data/lib/karafka/routing/builder.rb +26 -23
  112. data/lib/karafka/routing/consumer_group.rb +37 -17
  113. data/lib/karafka/routing/consumer_mapper.rb +1 -2
  114. data/lib/karafka/routing/proxy.rb +9 -16
  115. data/lib/karafka/routing/router.rb +1 -1
  116. data/lib/karafka/routing/subscription_group.rb +53 -0
  117. data/lib/karafka/routing/subscription_groups_builder.rb +54 -0
  118. data/lib/karafka/routing/topic.rb +65 -24
  119. data/lib/karafka/routing/topics.rb +38 -0
  120. data/lib/karafka/runner.rb +51 -0
  121. data/lib/karafka/serialization/json/deserializer.rb +6 -15
  122. data/lib/karafka/server.rb +67 -26
  123. data/lib/karafka/setup/config.rb +153 -175
  124. data/lib/karafka/status.rb +14 -5
  125. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  126. data/lib/karafka/templates/karafka.rb.erb +17 -55
  127. data/lib/karafka/time_trackers/base.rb +19 -0
  128. data/lib/karafka/time_trackers/pause.rb +92 -0
  129. data/lib/karafka/time_trackers/poll.rb +65 -0
  130. data/lib/karafka/version.rb +1 -1
  131. data/lib/karafka.rb +46 -16
  132. data.tar.gz.sig +0 -0
  133. metadata +145 -171
  134. metadata.gz.sig +0 -0
  135. data/.github/FUNDING.yml +0 -3
  136. data/MIT-LICENCE +0 -18
  137. data/certs/mensfeld.pem +0 -25
  138. data/lib/karafka/attributes_map.rb +0 -62
  139. data/lib/karafka/backends/inline.rb +0 -16
  140. data/lib/karafka/base_responder.rb +0 -226
  141. data/lib/karafka/cli/flow.rb +0 -48
  142. data/lib/karafka/code_reloader.rb +0 -67
  143. data/lib/karafka/connection/api_adapter.rb +0 -161
  144. data/lib/karafka/connection/batch_delegator.rb +0 -55
  145. data/lib/karafka/connection/builder.rb +0 -18
  146. data/lib/karafka/connection/message_delegator.rb +0 -36
  147. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  148. data/lib/karafka/consumers/callbacks.rb +0 -71
  149. data/lib/karafka/consumers/includer.rb +0 -64
  150. data/lib/karafka/consumers/responders.rb +0 -24
  151. data/lib/karafka/consumers/single_params.rb +0 -15
  152. data/lib/karafka/contracts/responder_usage.rb +0 -54
  153. data/lib/karafka/fetcher.rb +0 -42
  154. data/lib/karafka/helpers/class_matcher.rb +0 -88
  155. data/lib/karafka/helpers/config_retriever.rb +0 -46
  156. data/lib/karafka/helpers/inflector.rb +0 -26
  157. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  158. data/lib/karafka/params/batch_metadata.rb +0 -26
  159. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  160. data/lib/karafka/params/builders/params.rb +0 -38
  161. data/lib/karafka/params/builders/params_batch.rb +0 -25
  162. data/lib/karafka/params/params_batch.rb +0 -60
  163. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  164. data/lib/karafka/persistence/client.rb +0 -29
  165. data/lib/karafka/persistence/consumers.rb +0 -45
  166. data/lib/karafka/persistence/topics.rb +0 -48
  167. data/lib/karafka/responders/builder.rb +0 -36
  168. data/lib/karafka/responders/topic.rb +0 -55
  169. data/lib/karafka/routing/topic_mapper.rb +0 -53
  170. data/lib/karafka/serialization/json/serializer.rb +0 -31
  171. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  172. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -1,36 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Connection
5
- # Class that delegates processing of a single received message for which we listen to
6
- # a proper processor
7
- module MessageDelegator
8
- class << self
9
- # Delegates message (does something with it)
10
- # It will either schedule or run a proper processor action for the incoming message
11
- # @param group_id [String] group_id of a group from which a given message came
12
- # @param kafka_message [<Kafka::FetchedMessage>] raw message from kafka
13
- # @note This should be looped to obtain a constant delegating of new messages
14
- def call(group_id, kafka_message)
15
- topic = Persistence::Topics.fetch(group_id, kafka_message.topic)
16
- consumer = Persistence::Consumers.fetch(topic, kafka_message.partition)
17
-
18
- Karafka.monitor.instrument(
19
- 'connection.message_delegator.call',
20
- caller: self,
21
- consumer: consumer,
22
- kafka_message: kafka_message
23
- ) do
24
- # @note We always get a single message within single delegator, which means that
25
- # we don't care if user marked it as a batch consumed or not.
26
- consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
27
- [kafka_message],
28
- topic
29
- )
30
- consumer.call
31
- end
32
- end
33
- end
34
- end
35
- end
36
- end
@@ -1,10 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Brings the batch metadata into consumers that support batch_fetching
6
- module BatchMetadata
7
- attr_accessor :batch_metadata
8
- end
9
- end
10
- end
@@ -1,71 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Additional callbacks that can be used to trigger some actions on certain moments like
6
- # manual offset management, committing or anything else outside of a standard messages flow
7
- # They are not included by default, as we don't want to provide functionalities that are
8
- # not required by users by default
9
- # Please refer to the wiki callbacks page for more details on how to use them
10
- module Callbacks
11
- # Types of events on which we run callbacks
12
- TYPES = %i[
13
- after_fetch
14
- after_poll
15
- before_poll
16
- before_stop
17
- ].freeze
18
-
19
- private_constant :TYPES
20
-
21
- # Class methods needed to make callbacks run
22
- module ClassMethods
23
- TYPES.each do |type|
24
- # Creates a callback wrapper
25
- #
26
- # @param method_name [Symbol, String] method name or nil if we plan to provide a block
27
- # @yield A block with a code that should be executed before scheduling
28
- # @note We don't have to optimize the key fetching here as those are class methods that
29
- # are evaluated once upon start
30
- define_method(type) do |method_name = nil, &block|
31
- key = "consumers.#{Helpers::Inflector.map(to_s)}.#{type}"
32
- Karafka::App.monitor.register_event(key)
33
-
34
- Karafka::App.monitor.subscribe(key) do |event|
35
- context = event[:context]
36
-
37
- if method_name
38
- context.send(method_name)
39
- else
40
- context.instance_eval(&block)
41
- end
42
- end
43
- end
44
- end
45
- end
46
-
47
- class << self
48
- # @param consumer_class [Class] consumer class that we extend with callbacks
49
- def included(consumer_class)
50
- consumer_class.class_eval do
51
- extend ClassMethods
52
- end
53
- end
54
- end
55
-
56
- # Executes the default consumer flow, runs callbacks and if not halted will call process
57
- # method of a proper backend. It is here because it interacts with the default Karafka
58
- # call flow and needs to be overwritten to support callbacks
59
- def call
60
- if self.class.respond_to?(:after_fetch)
61
- Karafka::App.monitor.instrument(
62
- "consumers.#{Helpers::Inflector.map(self.class.to_s)}.after_fetch",
63
- context: self
64
- )
65
- end
66
-
67
- process
68
- end
69
- end
70
- end
71
- end
@@ -1,64 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Additional functionalities for consumers
5
- module Consumers
6
- # Module used to inject functionalities into a given consumer instance, based on the consumer
7
- # topic and its settings
8
- # We don't need all the behaviors in all the cases, so it is not worth having everything
9
- # in all the cases all the time
10
- module Includer
11
- class << self
12
- # @param consumer [Karafka::BaseConsumer] consumer instance, that will get some
13
- # functionalities based on the topic under which it operates
14
- def call(consumer)
15
- topic = consumer.topic
16
-
17
- bind_backend(consumer, topic)
18
- bind_params(consumer, topic)
19
- bind_batch_metadata(consumer, topic)
20
- bind_responders(consumer, topic)
21
- end
22
-
23
- private
24
-
25
- # Figures out backend for a given consumer class, based on the topic backend and
26
- # includes it into the consumer class
27
- # @param consumer [Karafka::BaseConsumer] consumer instance
28
- # @param topic [Karafka::Routing::Topic] topic of a consumer class
29
- def bind_backend(consumer, topic)
30
- backend = Kernel.const_get("::Karafka::Backends::#{topic.backend.to_s.capitalize}")
31
- consumer.extend(backend)
32
- end
33
-
34
- # Adds a single #params support for non batch processed topics
35
- # @param consumer [Karafka::BaseConsumer] consumer instance
36
- # @param topic [Karafka::Routing::Topic] topic of a consumer class
37
- def bind_params(consumer, topic)
38
- return if topic.batch_consuming
39
-
40
- consumer.extend(SingleParams)
41
- end
42
-
43
- # Adds an option to work with batch metadata for consumer instances that have
44
- # batch fetching enabled
45
- # @param consumer [Karafka::BaseConsumer] consumer instance
46
- # @param topic [Karafka::Routing::Topic] topic of a consumer class
47
- def bind_batch_metadata(consumer, topic)
48
- return unless topic.batch_fetching
49
-
50
- consumer.extend(BatchMetadata)
51
- end
52
-
53
- # Adds responders support for topics and consumers with responders defined for them
54
- # @param consumer [Karafka::BaseConsumer] consumer instance
55
- # @param topic [Karafka::Routing::Topic] topic of a consumer class
56
- def bind_responders(consumer, topic)
57
- return unless topic.responder
58
-
59
- consumer.extend(Responders)
60
- end
61
- end
62
- end
63
- end
64
- end
@@ -1,24 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Feature that allows us to use responders flow in consumer
6
- module Responders
7
- # Responds with given data using given responder. This allows us to have a similar way of
8
- # defining flows like synchronous protocols
9
- # @param data Anything we want to pass to responder based on which we want to trigger further
10
- # Kafka responding
11
- def respond_with(*data)
12
- Karafka.monitor.instrument(
13
- 'consumers.responders.respond_with',
14
- caller: self,
15
- data: data
16
- ) do
17
- # @note we build a new instance of responder each time, as a long-running (persisted)
18
- # consumers can respond multiple times during the life-cycle
19
- topic.responder.new.call(*data)
20
- end
21
- end
22
- end
23
- end
24
- end
@@ -1,15 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Params alias for single message consumption consumers
6
- module SingleParams
7
- private
8
-
9
- # @return [Karafka::Params::Params] params instance for non batch consumption consumers
10
- def params
11
- params_batch.first
12
- end
13
- end
14
- end
15
- end
@@ -1,54 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Contracts
5
- # Validator to check responder topic usage
6
- class ResponderUsageTopic < Dry::Validation::Contract
7
- config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
-
9
- params do
10
- required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
11
- required(:required).filled(:bool?)
12
- required(:usage_count).filled(:int?, gteq?: 0)
13
- required(:registered).filled(eql?: true)
14
- required(:async).filled(:bool?)
15
- required(:serializer).filled
16
- end
17
-
18
- rule(:required, :usage_count) do
19
- key(:name).failure(:required_usage_count) if values[:required] && values[:usage_count] < 1
20
- end
21
- end
22
-
23
- # Validator to check that everything in a responder flow matches responder rules
24
- class ResponderUsage < Dry::Validation::Contract
25
- include Dry::Core::Constants
26
-
27
- # Contract for verifying the topic usage details
28
- TOPIC_CONTRACT = ResponderUsageTopic.new.freeze
29
-
30
- private_constant :TOPIC_CONTRACT
31
-
32
- params do
33
- required(:used_topics)
34
- required(:registered_topics)
35
- end
36
-
37
- rule(:used_topics) do
38
- (value || EMPTY_ARRAY).each do |used_topic|
39
- TOPIC_CONTRACT.call(used_topic).errors.each do |error|
40
- key([:used_topics, used_topic, error.path[0]]).failure(error.text)
41
- end
42
- end
43
- end
44
-
45
- rule(:registered_topics) do
46
- (value || EMPTY_ARRAY).each do |used_topic|
47
- TOPIC_CONTRACT.call(used_topic).errors.each do |error|
48
- key([:registered_topics, used_topic, error.path[0]]).failure(error.text)
49
- end
50
- end
51
- end
52
- end
53
- end
54
- end
@@ -1,42 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Class used to run the Karafka consumer and handle shutting down, restarting etc
5
- # @note Creating multiple fetchers will result in having multiple connections to the same
6
- # topics, which means that if there are no partitions, it won't use them.
7
- class Fetcher
8
- # Starts listening on all the listeners asynchronously
9
- # Fetch loop should never end, which means that we won't create more actor clusters
10
- # so we don't have to terminate them
11
- def call
12
- threads = listeners.map do |listener|
13
- # We abort on exception because there should be an exception handling developed for
14
- # each listener running in separate threads, so the exceptions should never leak
15
- # and if that happens, it means that something really bad happened and we should stop
16
- # the whole process
17
- Thread
18
- .new { listener.call }
19
- .tap { |thread| thread.abort_on_exception = true }
20
- end
21
-
22
- # We aggregate threads here for a supervised shutdown process
23
- threads.each { |thread| Karafka::Server.consumer_threads << thread }
24
- threads.each(&:join)
25
- # If anything crashes here, we need to raise the error and crush the runner because it means
26
- # that something terrible happened
27
- rescue StandardError => e
28
- Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
29
- Karafka::App.stop!
30
- raise e
31
- end
32
-
33
- private
34
-
35
- # @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
36
- def listeners
37
- @listeners ||= App.consumer_groups.active.map do |consumer_group|
38
- Karafka::Connection::Listener.new(consumer_group)
39
- end
40
- end
41
- end
42
- end
@@ -1,88 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Helpers
5
- # Class used to autodetect corresponding classes that are internally inside Karafka framework
6
- # It is used among others to match:
7
- # consumer => responder
8
- class ClassMatcher
9
- # Regexp used to remove any non classy like characters that might be in the consumer
10
- # class name (if defined dynamically, etc)
11
- CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}.freeze
12
-
13
- private_constant :CONSTANT_REGEXP
14
-
15
- # @param klass [Class] class to which we want to find a corresponding class
16
- # @param from [String] what type of object is it (based on postfix name part)
17
- # @param to [String] what are we looking for (based on a postfix name part)
18
- # @example Consumer that has a corresponding responder
19
- # matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
20
- # matcher.match #=> SuperResponder
21
- # @example Consumer without a corresponding responder
22
- # matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
23
- # matcher.match #=> nil
24
- def initialize(klass, from:, to:)
25
- @klass = klass
26
- @from = from
27
- @to = to
28
- end
29
-
30
- # @return [Class] matched class
31
- # @return [nil] nil if we couldn't find matching class
32
- def match
33
- return nil if name.empty?
34
- return nil unless scope.const_defined?(name)
35
-
36
- matching = scope.const_get(name)
37
- same_scope?(matching) ? matching : nil
38
- end
39
-
40
- # @return [String] name of a new class that we're looking for
41
- # @note This method returns name of a class without a namespace
42
- # @example From SuperConsumer matching responder
43
- # matcher.name #=> 'SuperResponder'
44
- # @example From Namespaced::Super2Consumer matching responder
45
- # matcher.name #=> Super2Responder
46
- def name
47
- inflected = +@klass.to_s.split('::').last.to_s
48
- # We inject the from into the name just in case it is missing as in a situation like
49
- # that it would just sanitize the name without adding the "to" postfix.
50
- # It could create cases when we want to build for example a responder to a consumer
51
- # that does not have the "Consumer" postfix and would do nothing returning the same name.
52
- # That would be bad as the matching classes shouldn't be matched to themselves.
53
- inflected << @from unless inflected.include?(@from)
54
- inflected.gsub!(@from, @to)
55
- inflected.gsub!(CONSTANT_REGEXP, '')
56
- inflected
57
- end
58
-
59
- # @return [Class, Module] class or module in which we're looking for a matching
60
- def scope
61
- scope_of(@klass)
62
- end
63
-
64
- private
65
-
66
- # @param klass [Class] class for which we want to extract it's enclosing class/module
67
- # @return [Class, Module] enclosing class/module
68
- # @return [::Object] object if it was a root class
69
- #
70
- # @example Non-namespaced class
71
- # scope_of(SuperClass) #=> Object
72
- # @example Namespaced class
73
- # scope_of(Abc::SuperClass) #=> Abc
74
- def scope_of(klass)
75
- enclosing = klass.to_s.split('::')[0...-1]
76
- return ::Object if enclosing.empty?
77
-
78
- ::Object.const_get(enclosing.join('::'))
79
- end
80
-
81
- # @param matching [Class] class of which scope we want to check
82
- # @return [Boolean] true if the scope of class is the same as scope of matching
83
- def same_scope?(matching)
84
- scope == scope_of(matching)
85
- end
86
- end
87
- end
88
- end
@@ -1,46 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Helpers
5
- # A helper method that allows us to build methods that try to get a given
6
- # attribute from its instance value and if it fails, will fallback to
7
- # the default config or config.kafka value for a given attribute.
8
- # It is used to simplify the checks.
9
- # @note Worth noticing, that the value might be equal to false, so even
10
- # then we need to return it. That's why we check for nil?
11
- # @example Define config retried attribute for start_from_beginning
12
- # class Test
13
- # extend Karafka::Helpers::ConfigRetriever
14
- # config_retriever_for :start_from_beginning
15
- # end
16
- #
17
- # Test.new.start_from_beginning #=> false
18
- # test_instance = Test.new
19
- # test_instance.start_from_beginning = true
20
- # test_instance.start_from_beginning #=> true
21
- module ConfigRetriever
22
- # Builds proper methods for setting and retrieving (with fallback) given attribute value
23
- # @param attribute [Symbol] attribute name based on which we will build
24
- # accessor with fallback
25
- def config_retriever_for(attribute)
26
- attr_writer attribute unless method_defined? :"#{attribute}="
27
-
28
- # Don't redefine if we already have accessor for a given element
29
- return if method_defined? attribute
30
-
31
- define_method attribute do
32
- current_value = instance_variable_get(:"@#{attribute}")
33
- return current_value unless current_value.nil?
34
-
35
- value = if Karafka::App.config.respond_to?(attribute)
36
- Karafka::App.config.send(attribute)
37
- else
38
- Karafka::App.config.kafka.send(attribute)
39
- end
40
-
41
- instance_variable_set(:"@#{attribute}", value)
42
- end
43
- end
44
- end
45
- end
46
- end
@@ -1,26 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Helpers
5
- # Inflector provides inflection for the whole Karafka framework with additional inflection
6
- # caching (due to the fact, that Dry::Inflector is slow)
7
- module Inflector
8
- # What inflection engine do we want to use
9
- ENGINE = Dry::Inflector.new
10
-
11
- @map = Concurrent::Hash.new
12
-
13
- private_constant :ENGINE
14
-
15
- class << self
16
- # @param string [String] string that we want to convert to our underscore format
17
- # @return [String] inflected string
18
- # @example
19
- # Karafka::Helpers::Inflector.map('Module/ControllerName') #=> 'module_controller_name'
20
- def map(string)
21
- @map[string] ||= ENGINE.underscore(string).tr('/', '_')
22
- end
23
- end
24
- end
25
- end
26
- end
@@ -1,140 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Instrumentation
5
- # Default listener that hooks up to our instrumentation and uses its events for logging
6
- # It can be removed/replaced or anything without any harm to the Karafka app flow
7
- class StdoutListener
8
- # Log levels that we use in this particular listener
9
- USED_LOG_LEVELS = %i[
10
- debug
11
- info
12
- error
13
- fatal
14
- ].freeze
15
-
16
- # Logs details about incoming batches and with which consumer we will consume them
17
- # @param event [Dry::Events::Event] event details including payload
18
- def on_connection_batch_delegator_call(event)
19
- consumer = event[:consumer]
20
- topic = consumer.topic.name
21
- kafka_messages = event[:kafka_batch].messages
22
- info(
23
- <<~MSG.chomp.tr("\n", ' ')
24
- #{kafka_messages.count} messages
25
- on #{topic} topic
26
- delegated to #{consumer.class}
27
- MSG
28
- )
29
- end
30
-
31
- # Logs details about incoming message and with which consumer we will consume it
32
- # @param event [Dry::Events::Event] event details including payload
33
- def on_connection_message_delegator_call(event)
34
- consumer = event[:consumer]
35
- topic = consumer.topic.name
36
- info "1 message on #{topic} topic delegated to #{consumer.class}"
37
- end
38
-
39
- # Logs details about each received message value deserialization
40
- # @param event [Dry::Events::Event] event details including payload
41
- def on_params_params_deserialize(event)
42
- # Keep in mind, that a caller here is a param object not a controller,
43
- # so it returns a topic as a string, not a routing topic
44
- debug(
45
- <<~MSG.chomp.tr("\n", ' ')
46
- Params deserialization for #{event[:caller].metadata.topic} topic
47
- successful in #{event[:time]} ms
48
- MSG
49
- )
50
- end
51
-
52
- # Logs unsuccessful deserialization attempts of incoming data
53
- # @param event [Dry::Events::Event] event details including payload
54
- def on_params_params_deserialize_error(event)
55
- topic = event[:caller].metadata.topic
56
- error = event[:error]
57
- error "Params deserialization error for #{topic} topic: #{error}"
58
- end
59
-
60
- # Logs errors that occurred in a listener fetch loop
61
- # @param event [Dry::Events::Event] event details including payload
62
- # @note It's an error as we can recover from it not a fatal
63
- def on_connection_listener_fetch_loop_error(event)
64
- error "Listener fetch loop error: #{event[:error]}"
65
- end
66
-
67
- # Logs errors that are related to the connection itself
68
- # @param event [Dry::Events::Event] event details including payload
69
- # @note Karafka will attempt to reconnect, so an error not a fatal
70
- def on_connection_client_fetch_loop_error(event)
71
- error "Client fetch loop error: #{event[:error]}"
72
- end
73
-
74
- # Logs info about crashed fetcher
75
- # @param event [Dry::Events::Event] event details including payload
76
- # @note If this happens, Karafka will shutdown as it means a critical error
77
- # in one of the threads
78
- def on_fetcher_call_error(event)
79
- fatal "Fetcher crash due to an error: #{event[:error]}"
80
- end
81
-
82
- # Logs info about processing of a certain dataset with an inline backend
83
- # @param event [Dry::Events::Event] event details including payload
84
- def on_backends_inline_process(event)
85
- count = event[:caller].send(:params_batch).to_a.size
86
- topic = event[:caller].topic.name
87
- time = event[:time]
88
- info "Inline processing of topic #{topic} with #{count} messages took #{time} ms"
89
- end
90
-
91
- # Logs info about system signals that Karafka received
92
- # @param event [Dry::Events::Event] event details including payload
93
- def on_process_notice_signal(event)
94
- info "Received #{event[:signal]} system signal"
95
- end
96
-
97
- # Logs info about responder usage withing a controller flow
98
- # @param event [Dry::Events::Event] event details including payload
99
- def on_consumers_responders_respond_with(event)
100
- calling = event[:caller]
101
- responder = calling.topic.responder
102
- data = event[:data]
103
- info "Responded from #{calling.class} using #{responder} with following data #{data}"
104
- end
105
-
106
- # Logs info that we're initializing Karafka app
107
- # @param _event [Dry::Events::Event] event details including payload
108
- def on_app_initializing(_event)
109
- info "Initializing Karafka server #{::Process.pid}"
110
- end
111
-
112
- # Logs info that we're running Karafka app
113
- # @param _event [Dry::Events::Event] event details including payload
114
- def on_app_running(_event)
115
- info "Running Karafka server #{::Process.pid}"
116
- end
117
-
118
- # Logs info that we're going to stop the Karafka server
119
- # @param _event [Dry::Events::Event] event details including payload
120
- def on_app_stopping(_event)
121
- # We use a separate thread as logging can't be called from trap context
122
- Thread.new { info "Stopping Karafka server #{::Process.pid}" }
123
- end
124
-
125
- # Logs an error that Karafka was unable to stop the server gracefully and it had to do a
126
- # forced exit
127
- # @param _event [Dry::Events::Event] event details including payload
128
- def on_app_stopping_error(_event)
129
- # We use a separate thread as logging can't be called from trap context
130
- Thread.new { error "Forceful Karafka server #{::Process.pid} stop" }
131
- end
132
-
133
- USED_LOG_LEVELS.each do |log_level|
134
- define_method log_level do |*args|
135
- Karafka.logger.send(log_level, *args)
136
- end
137
- end
138
- end
139
- end
140
- end
@@ -1,26 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Params
5
- # Simple batch metadata object that stores all non-message information received from Kafka
6
- # cluster while fetching the data
7
- # @note This metadata object refers to per batch metadata, not `#params.metadata`
8
- BatchMetadata = Struct.new(
9
- :batch_size,
10
- :first_offset,
11
- :highwater_mark_offset,
12
- :unknown_last_offset,
13
- :last_offset,
14
- :offset_lag,
15
- :deserializer,
16
- :partition,
17
- :topic,
18
- keyword_init: true
19
- ) do
20
- # @return [Boolean] is the last offset known or unknown
21
- def unknown_last_offset?
22
- unknown_last_offset
23
- end
24
- end
25
- end
26
- end
@@ -1,30 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Params
5
- module Builders
6
- # Builder for creating batch metadata object based on the batch informations
7
- module BatchMetadata
8
- class << self
9
- # Creates metadata based on the kafka batch data
10
- # @param kafka_batch [Kafka::FetchedBatch] kafka batch details
11
- # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
12
- # @return [Karafka::Params::BatchMetadata] batch metadata object
13
- def from_kafka_batch(kafka_batch, topic)
14
- Karafka::Params::BatchMetadata.new(
15
- batch_size: kafka_batch.messages.count,
16
- first_offset: kafka_batch.first_offset,
17
- highwater_mark_offset: kafka_batch.highwater_mark_offset,
18
- unknown_last_offset: kafka_batch.unknown_last_offset?,
19
- last_offset: kafka_batch.last_offset,
20
- offset_lag: kafka_batch.offset_lag,
21
- deserializer: topic.deserializer,
22
- partition: kafka_batch.partition,
23
- topic: topic.name
24
- ).freeze
25
- end
26
- end
27
- end
28
- end
29
- end
30
- end