karafka 1.4.9 → 2.0.0.alpha1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (127) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +3 -0
  4. data/.github/workflows/ci.yml +78 -26
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +46 -0
  7. data/Gemfile +6 -0
  8. data/Gemfile.lock +39 -49
  9. data/LICENSE +14 -0
  10. data/LICENSE-COMM +89 -0
  11. data/LICENSE-LGPL +165 -0
  12. data/README.md +16 -48
  13. data/bin/benchmarks +85 -0
  14. data/bin/create_token +28 -0
  15. data/bin/integrations +160 -0
  16. data/bin/stress +13 -0
  17. data/certs/karafka-pro.pem +11 -0
  18. data/config/errors.yml +4 -38
  19. data/docker-compose.yml +11 -3
  20. data/karafka.gemspec +17 -17
  21. data/lib/active_job/consumer.rb +22 -0
  22. data/lib/active_job/karafka.rb +18 -0
  23. data/lib/active_job/queue_adapters/karafka_adapter.rb +29 -0
  24. data/lib/active_job/routing_extensions.rb +15 -0
  25. data/lib/karafka/app.rb +13 -20
  26. data/lib/karafka/base_consumer.rb +103 -34
  27. data/lib/karafka/cli/base.rb +4 -4
  28. data/lib/karafka/cli/info.rb +43 -8
  29. data/lib/karafka/cli/install.rb +3 -8
  30. data/lib/karafka/cli/server.rb +17 -30
  31. data/lib/karafka/cli.rb +4 -11
  32. data/lib/karafka/connection/client.rb +279 -93
  33. data/lib/karafka/connection/listener.rb +137 -38
  34. data/lib/karafka/connection/messages_buffer.rb +57 -0
  35. data/lib/karafka/connection/pauses_manager.rb +46 -0
  36. data/lib/karafka/connection/rebalance_manager.rb +62 -0
  37. data/lib/karafka/contracts/config.rb +25 -7
  38. data/lib/karafka/contracts/consumer_group.rb +0 -173
  39. data/lib/karafka/contracts/consumer_group_topic.rb +17 -7
  40. data/lib/karafka/contracts/server_cli_options.rb +1 -9
  41. data/lib/karafka/contracts.rb +1 -1
  42. data/lib/karafka/env.rb +46 -0
  43. data/lib/karafka/errors.rb +14 -18
  44. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  45. data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
  46. data/lib/karafka/instrumentation/callbacks/statistics.rb +42 -0
  47. data/lib/karafka/instrumentation/monitor.rb +14 -21
  48. data/lib/karafka/instrumentation/stdout_listener.rb +64 -91
  49. data/lib/karafka/instrumentation.rb +21 -0
  50. data/lib/karafka/licenser.rb +65 -0
  51. data/lib/karafka/{params → messages}/batch_metadata.rb +7 -13
  52. data/lib/karafka/messages/builders/batch_metadata.rb +30 -0
  53. data/lib/karafka/messages/builders/message.rb +38 -0
  54. data/lib/karafka/messages/builders/messages.rb +40 -0
  55. data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
  56. data/lib/karafka/messages/messages.rb +64 -0
  57. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  58. data/lib/karafka/messages/seek.rb +9 -0
  59. data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
  60. data/lib/karafka/processing/executor.rb +96 -0
  61. data/lib/karafka/processing/executors_buffer.rb +49 -0
  62. data/lib/karafka/processing/jobs/base.rb +18 -0
  63. data/lib/karafka/processing/jobs/consume.rb +28 -0
  64. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  65. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  66. data/lib/karafka/processing/jobs_queue.rb +121 -0
  67. data/lib/karafka/processing/worker.rb +57 -0
  68. data/lib/karafka/processing/workers_batch.rb +22 -0
  69. data/lib/karafka/railtie.rb +65 -0
  70. data/lib/karafka/routing/builder.rb +15 -14
  71. data/lib/karafka/routing/consumer_group.rb +10 -18
  72. data/lib/karafka/routing/consumer_mapper.rb +1 -2
  73. data/lib/karafka/routing/router.rb +1 -1
  74. data/lib/karafka/routing/subscription_group.rb +53 -0
  75. data/lib/karafka/routing/subscription_groups_builder.rb +51 -0
  76. data/lib/karafka/routing/topic.rb +47 -25
  77. data/lib/karafka/runner.rb +59 -0
  78. data/lib/karafka/serialization/json/deserializer.rb +6 -15
  79. data/lib/karafka/server.rb +62 -25
  80. data/lib/karafka/setup/config.rb +86 -159
  81. data/lib/karafka/status.rb +13 -3
  82. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  83. data/lib/karafka/templates/karafka.rb.erb +14 -50
  84. data/lib/karafka/time_trackers/base.rb +19 -0
  85. data/lib/karafka/time_trackers/pause.rb +84 -0
  86. data/lib/karafka/time_trackers/poll.rb +65 -0
  87. data/lib/karafka/version.rb +1 -1
  88. data/lib/karafka.rb +30 -13
  89. data.tar.gz.sig +0 -0
  90. metadata +78 -108
  91. metadata.gz.sig +0 -0
  92. data/MIT-LICENCE +0 -18
  93. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  94. data/lib/karafka/attributes_map.rb +0 -63
  95. data/lib/karafka/backends/inline.rb +0 -16
  96. data/lib/karafka/base_responder.rb +0 -226
  97. data/lib/karafka/cli/flow.rb +0 -48
  98. data/lib/karafka/cli/missingno.rb +0 -19
  99. data/lib/karafka/code_reloader.rb +0 -67
  100. data/lib/karafka/connection/api_adapter.rb +0 -158
  101. data/lib/karafka/connection/batch_delegator.rb +0 -55
  102. data/lib/karafka/connection/builder.rb +0 -23
  103. data/lib/karafka/connection/message_delegator.rb +0 -36
  104. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  105. data/lib/karafka/consumers/callbacks.rb +0 -71
  106. data/lib/karafka/consumers/includer.rb +0 -64
  107. data/lib/karafka/consumers/responders.rb +0 -24
  108. data/lib/karafka/consumers/single_params.rb +0 -15
  109. data/lib/karafka/contracts/responder_usage.rb +0 -54
  110. data/lib/karafka/fetcher.rb +0 -42
  111. data/lib/karafka/helpers/class_matcher.rb +0 -88
  112. data/lib/karafka/helpers/config_retriever.rb +0 -46
  113. data/lib/karafka/helpers/inflector.rb +0 -26
  114. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  115. data/lib/karafka/params/builders/params.rb +0 -38
  116. data/lib/karafka/params/builders/params_batch.rb +0 -25
  117. data/lib/karafka/params/params_batch.rb +0 -60
  118. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  119. data/lib/karafka/persistence/client.rb +0 -29
  120. data/lib/karafka/persistence/consumers.rb +0 -45
  121. data/lib/karafka/persistence/topics.rb +0 -48
  122. data/lib/karafka/responders/builder.rb +0 -36
  123. data/lib/karafka/responders/topic.rb +0 -55
  124. data/lib/karafka/routing/topic_mapper.rb +0 -53
  125. data/lib/karafka/serialization/json/serializer.rb +0 -31
  126. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  127. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -1,71 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Additional callbacks that can be used to trigger some actions on certain moments like
6
- # manual offset management, committing or anything else outside of a standard messages flow
7
- # They are not included by default, as we don't want to provide functionalities that are
8
- # not required by users by default
9
- # Please refer to the wiki callbacks page for more details on how to use them
10
- module Callbacks
11
- # Types of events on which we run callbacks
12
- TYPES = %i[
13
- after_fetch
14
- after_poll
15
- before_poll
16
- before_stop
17
- ].freeze
18
-
19
- private_constant :TYPES
20
-
21
- # Class methods needed to make callbacks run
22
- module ClassMethods
23
- TYPES.each do |type|
24
- # Creates a callback wrapper
25
- #
26
- # @param method_name [Symbol, String] method name or nil if we plan to provide a block
27
- # @yield A block with a code that should be executed before scheduling
28
- # @note We don't have to optimize the key fetching here as those are class methods that
29
- # are evaluated once upon start
30
- define_method(type) do |method_name = nil, &block|
31
- key = "consumers.#{Helpers::Inflector.map(to_s)}.#{type}"
32
- Karafka::App.monitor.register_event(key)
33
-
34
- Karafka::App.monitor.subscribe(key) do |event|
35
- context = event[:context]
36
-
37
- if method_name
38
- context.send(method_name)
39
- else
40
- context.instance_eval(&block)
41
- end
42
- end
43
- end
44
- end
45
- end
46
-
47
- class << self
48
- # @param consumer_class [Class] consumer class that we extend with callbacks
49
- def included(consumer_class)
50
- consumer_class.class_eval do
51
- extend ClassMethods
52
- end
53
- end
54
- end
55
-
56
- # Executes the default consumer flow, runs callbacks and if not halted will call process
57
- # method of a proper backend. It is here because it interacts with the default Karafka
58
- # call flow and needs to be overwritten to support callbacks
59
- def call
60
- if self.class.respond_to?(:after_fetch)
61
- Karafka::App.monitor.instrument(
62
- "consumers.#{Helpers::Inflector.map(self.class.to_s)}.after_fetch",
63
- context: self
64
- )
65
- end
66
-
67
- process
68
- end
69
- end
70
- end
71
- end
@@ -1,64 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Additional functionalities for consumers
5
- module Consumers
6
- # Module used to inject functionalities into a given consumer instance, based on the consumer
7
- # topic and its settings
8
- # We don't need all the behaviors in all the cases, so it is not worth having everything
9
- # in all the cases all the time
10
- module Includer
11
- class << self
12
- # @param consumer [Karafka::BaseConsumer] consumer instance, that will get some
13
- # functionalities based on the topic under which it operates
14
- def call(consumer)
15
- topic = consumer.topic
16
-
17
- bind_backend(consumer, topic)
18
- bind_params(consumer, topic)
19
- bind_batch_metadata(consumer, topic)
20
- bind_responders(consumer, topic)
21
- end
22
-
23
- private
24
-
25
- # Figures out backend for a given consumer class, based on the topic backend and
26
- # includes it into the consumer class
27
- # @param consumer [Karafka::BaseConsumer] consumer instance
28
- # @param topic [Karafka::Routing::Topic] topic of a consumer class
29
- def bind_backend(consumer, topic)
30
- backend = Kernel.const_get("::Karafka::Backends::#{topic.backend.to_s.capitalize}")
31
- consumer.extend(backend)
32
- end
33
-
34
- # Adds a single #params support for non batch processed topics
35
- # @param consumer [Karafka::BaseConsumer] consumer instance
36
- # @param topic [Karafka::Routing::Topic] topic of a consumer class
37
- def bind_params(consumer, topic)
38
- return if topic.batch_consuming
39
-
40
- consumer.extend(SingleParams)
41
- end
42
-
43
- # Adds an option to work with batch metadata for consumer instances that have
44
- # batch fetching enabled
45
- # @param consumer [Karafka::BaseConsumer] consumer instance
46
- # @param topic [Karafka::Routing::Topic] topic of a consumer class
47
- def bind_batch_metadata(consumer, topic)
48
- return unless topic.batch_fetching
49
-
50
- consumer.extend(BatchMetadata)
51
- end
52
-
53
- # Adds responders support for topics and consumers with responders defined for them
54
- # @param consumer [Karafka::BaseConsumer] consumer instance
55
- # @param topic [Karafka::Routing::Topic] topic of a consumer class
56
- def bind_responders(consumer, topic)
57
- return unless topic.responder
58
-
59
- consumer.extend(Responders)
60
- end
61
- end
62
- end
63
- end
64
- end
@@ -1,24 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Feature that allows us to use responders flow in consumer
6
- module Responders
7
- # Responds with given data using given responder. This allows us to have a similar way of
8
- # defining flows like synchronous protocols
9
- # @param data Anything we want to pass to responder based on which we want to trigger further
10
- # Kafka responding
11
- def respond_with(*data)
12
- Karafka.monitor.instrument(
13
- 'consumers.responders.respond_with',
14
- caller: self,
15
- data: data
16
- ) do
17
- # @note we build a new instance of responder each time, as a long-running (persisted)
18
- # consumers can respond multiple times during the life-cycle
19
- topic.responder.new.call(*data)
20
- end
21
- end
22
- end
23
- end
24
- end
@@ -1,15 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Params alias for single message consumption consumers
6
- module SingleParams
7
- private
8
-
9
- # @return [Karafka::Params::Params] params instance for non batch consumption consumers
10
- def params
11
- params_batch.first
12
- end
13
- end
14
- end
15
- end
@@ -1,54 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Contracts
5
- # Validator to check responder topic usage
6
- class ResponderUsageTopic < Dry::Validation::Contract
7
- config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
-
9
- params do
10
- required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
11
- required(:required).filled(:bool?)
12
- required(:usage_count).filled(:int?, gteq?: 0)
13
- required(:registered).filled(eql?: true)
14
- required(:async).filled(:bool?)
15
- required(:serializer).filled
16
- end
17
-
18
- rule(:required, :usage_count) do
19
- key(:name).failure(:required_usage_count) if values[:required] && values[:usage_count] < 1
20
- end
21
- end
22
-
23
- # Validator to check that everything in a responder flow matches responder rules
24
- class ResponderUsage < Dry::Validation::Contract
25
- include Dry::Core::Constants
26
-
27
- # Contract for verifying the topic usage details
28
- TOPIC_CONTRACT = ResponderUsageTopic.new.freeze
29
-
30
- private_constant :TOPIC_CONTRACT
31
-
32
- params do
33
- required(:used_topics)
34
- required(:registered_topics)
35
- end
36
-
37
- rule(:used_topics) do
38
- (value || EMPTY_ARRAY).each do |used_topic|
39
- TOPIC_CONTRACT.call(used_topic).errors.each do |error|
40
- key([:used_topics, used_topic, error.path[0]]).failure(error.text)
41
- end
42
- end
43
- end
44
-
45
- rule(:registered_topics) do
46
- (value || EMPTY_ARRAY).each do |used_topic|
47
- TOPIC_CONTRACT.call(used_topic).errors.each do |error|
48
- key([:registered_topics, used_topic, error.path[0]]).failure(error.text)
49
- end
50
- end
51
- end
52
- end
53
- end
54
- end
@@ -1,42 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Class used to run the Karafka consumer and handle shutting down, restarting etc
5
- # @note Creating multiple fetchers will result in having multiple connections to the same
6
- # topics, which means that if there are no partitions, it won't use them.
7
- class Fetcher
8
- # Starts listening on all the listeners asynchronously
9
- # Fetch loop should never end, which means that we won't create more actor clusters
10
- # so we don't have to terminate them
11
- def call
12
- threads = listeners.map do |listener|
13
- # We abort on exception because there should be an exception handling developed for
14
- # each listener running in separate threads, so the exceptions should never leak
15
- # and if that happens, it means that something really bad happened and we should stop
16
- # the whole process
17
- Thread
18
- .new { listener.call }
19
- .tap { |thread| thread.abort_on_exception = true }
20
- end
21
-
22
- # We aggregate threads here for a supervised shutdown process
23
- threads.each { |thread| Karafka::Server.consumer_threads << thread }
24
- threads.each(&:join)
25
- # If anything crashes here, we need to raise the error and crush the runner because it means
26
- # that something terrible happened
27
- rescue StandardError => e
28
- Karafka.monitor.instrument('fetcher.call.error', caller: self, error: e)
29
- Karafka::App.stop!
30
- raise e
31
- end
32
-
33
- private
34
-
35
- # @return [Array<Karafka::Connection::Listener>] listeners that will consume messages
36
- def listeners
37
- @listeners ||= App.consumer_groups.active.map do |consumer_group|
38
- Karafka::Connection::Listener.new(consumer_group)
39
- end
40
- end
41
- end
42
- end
@@ -1,88 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Helpers
5
- # Class used to autodetect corresponding classes that are internally inside Karafka framework
6
- # It is used among others to match:
7
- # consumer => responder
8
- class ClassMatcher
9
- # Regexp used to remove any non classy like characters that might be in the consumer
10
- # class name (if defined dynamically, etc)
11
- CONSTANT_REGEXP = %r{[?!=+\-*/\^|&\[\]<>%~\#:\s()]}.freeze
12
-
13
- private_constant :CONSTANT_REGEXP
14
-
15
- # @param klass [Class] class to which we want to find a corresponding class
16
- # @param from [String] what type of object is it (based on postfix name part)
17
- # @param to [String] what are we looking for (based on a postfix name part)
18
- # @example Consumer that has a corresponding responder
19
- # matcher = Karafka::Helpers::ClassMatcher.new(SuperConsumer, 'Consumer', 'Responder')
20
- # matcher.match #=> SuperResponder
21
- # @example Consumer without a corresponding responder
22
- # matcher = Karafka::Helpers::ClassMatcher.new(Super2Consumer, 'Consumer', 'Responder')
23
- # matcher.match #=> nil
24
- def initialize(klass, from:, to:)
25
- @klass = klass
26
- @from = from
27
- @to = to
28
- end
29
-
30
- # @return [Class] matched class
31
- # @return [nil] nil if we couldn't find matching class
32
- def match
33
- return nil if name.empty?
34
- return nil unless scope.const_defined?(name)
35
-
36
- matching = scope.const_get(name)
37
- same_scope?(matching) ? matching : nil
38
- end
39
-
40
- # @return [String] name of a new class that we're looking for
41
- # @note This method returns name of a class without a namespace
42
- # @example From SuperConsumer matching responder
43
- # matcher.name #=> 'SuperResponder'
44
- # @example From Namespaced::Super2Consumer matching responder
45
- # matcher.name #=> Super2Responder
46
- def name
47
- inflected = +@klass.to_s.split('::').last.to_s
48
- # We inject the from into the name just in case it is missing as in a situation like
49
- # that it would just sanitize the name without adding the "to" postfix.
50
- # It could create cases when we want to build for example a responder to a consumer
51
- # that does not have the "Consumer" postfix and would do nothing returning the same name.
52
- # That would be bad as the matching classes shouldn't be matched to themselves.
53
- inflected << @from unless inflected.include?(@from)
54
- inflected.gsub!(@from, @to)
55
- inflected.gsub!(CONSTANT_REGEXP, '')
56
- inflected
57
- end
58
-
59
- # @return [Class, Module] class or module in which we're looking for a matching
60
- def scope
61
- scope_of(@klass)
62
- end
63
-
64
- private
65
-
66
- # @param klass [Class] class for which we want to extract it's enclosing class/module
67
- # @return [Class, Module] enclosing class/module
68
- # @return [::Object] object if it was a root class
69
- #
70
- # @example Non-namespaced class
71
- # scope_of(SuperClass) #=> Object
72
- # @example Namespaced class
73
- # scope_of(Abc::SuperClass) #=> Abc
74
- def scope_of(klass)
75
- enclosing = klass.to_s.split('::')[0...-1]
76
- return ::Object if enclosing.empty?
77
-
78
- ::Object.const_get(enclosing.join('::'))
79
- end
80
-
81
- # @param matching [Class] class of which scope we want to check
82
- # @return [Boolean] true if the scope of class is the same as scope of matching
83
- def same_scope?(matching)
84
- scope == scope_of(matching)
85
- end
86
- end
87
- end
88
- end
@@ -1,46 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Helpers
5
- # A helper method that allows us to build methods that try to get a given
6
- # attribute from its instance value and if it fails, will fallback to
7
- # the default config or config.kafka value for a given attribute.
8
- # It is used to simplify the checks.
9
- # @note Worth noticing, that the value might be equal to false, so even
10
- # then we need to return it. That's why we check for nil?
11
- # @example Define config retried attribute for start_from_beginning
12
- # class Test
13
- # extend Karafka::Helpers::ConfigRetriever
14
- # config_retriever_for :start_from_beginning
15
- # end
16
- #
17
- # Test.new.start_from_beginning #=> false
18
- # test_instance = Test.new
19
- # test_instance.start_from_beginning = true
20
- # test_instance.start_from_beginning #=> true
21
- module ConfigRetriever
22
- # Builds proper methods for setting and retrieving (with fallback) given attribute value
23
- # @param attribute [Symbol] attribute name based on which we will build
24
- # accessor with fallback
25
- def config_retriever_for(attribute)
26
- attr_writer attribute unless method_defined? :"#{attribute}="
27
-
28
- # Don't redefine if we already have accessor for a given element
29
- return if method_defined? attribute
30
-
31
- define_method attribute do
32
- current_value = instance_variable_get(:"@#{attribute}")
33
- return current_value unless current_value.nil?
34
-
35
- value = if Karafka::App.config.respond_to?(attribute)
36
- Karafka::App.config.send(attribute)
37
- else
38
- Karafka::App.config.kafka.send(attribute)
39
- end
40
-
41
- instance_variable_set(:"@#{attribute}", value)
42
- end
43
- end
44
- end
45
- end
46
- end
@@ -1,26 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Helpers
5
- # Inflector provides inflection for the whole Karafka framework with additional inflection
6
- # caching (due to the fact, that Dry::Inflector is slow)
7
- module Inflector
8
- # What inflection engine do we want to use
9
- ENGINE = Dry::Inflector.new
10
-
11
- @map = Concurrent::Hash.new
12
-
13
- private_constant :ENGINE
14
-
15
- class << self
16
- # @param string [String] string that we want to convert to our underscore format
17
- # @return [String] inflected string
18
- # @example
19
- # Karafka::Helpers::Inflector.map('Module/ControllerName') #=> 'module_controller_name'
20
- def map(string)
21
- @map[string] ||= ENGINE.underscore(string).tr('/', '_')
22
- end
23
- end
24
- end
25
- end
26
- end
@@ -1,30 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Params
5
- module Builders
6
- # Builder for creating batch metadata object based on the batch informations
7
- module BatchMetadata
8
- class << self
9
- # Creates metadata based on the kafka batch data
10
- # @param kafka_batch [Kafka::FetchedBatch] kafka batch details
11
- # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
12
- # @return [Karafka::Params::BatchMetadata] batch metadata object
13
- def from_kafka_batch(kafka_batch, topic)
14
- Karafka::Params::BatchMetadata.new(
15
- batch_size: kafka_batch.messages.count,
16
- first_offset: kafka_batch.first_offset,
17
- highwater_mark_offset: kafka_batch.highwater_mark_offset,
18
- unknown_last_offset: kafka_batch.unknown_last_offset?,
19
- last_offset: kafka_batch.last_offset,
20
- offset_lag: kafka_batch.offset_lag,
21
- deserializer: topic.deserializer,
22
- partition: kafka_batch.partition,
23
- topic: topic.name
24
- ).freeze
25
- end
26
- end
27
- end
28
- end
29
- end
30
- end
@@ -1,38 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Params
5
- # Due to the fact, that we create params related objects in couple contexts / places
6
- # plus backends can build up them their own way we have this namespace.
7
- # It allows to isolate actual params objects from their building process that can be
8
- # context dependent.
9
- module Builders
10
- # Builder for params
11
- module Params
12
- class << self
13
- # @param kafka_message [Kafka::FetchedMessage] message fetched from Kafka
14
- # @param topic [Karafka::Routing::Topic] topic for which this message was fetched
15
- # @return [Karafka::Params::Params] params object with payload and message metadata
16
- def from_kafka_message(kafka_message, topic)
17
- metadata = Karafka::Params::Metadata.new(
18
- create_time: kafka_message.create_time,
19
- headers: kafka_message.headers || {},
20
- is_control_record: kafka_message.is_control_record,
21
- key: kafka_message.key,
22
- offset: kafka_message.offset,
23
- deserializer: topic.deserializer,
24
- partition: kafka_message.partition,
25
- receive_time: Time.now,
26
- topic: topic.name
27
- ).freeze
28
-
29
- Karafka::Params::Params.new(
30
- kafka_message.value,
31
- metadata
32
- )
33
- end
34
- end
35
- end
36
- end
37
- end
38
- end
@@ -1,25 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Params
5
- module Builders
6
- # Builder for creating params batch instances
7
- module ParamsBatch
8
- class << self
9
- # Creates params batch with params inside based on the incoming messages
10
- # and the topic from which it comes
11
- # @param kafka_messages [Array<Kafka::FetchedMessage>] raw fetched messages
12
- # @param topic [Karafka::Routing::Topic] topic for which we're received messages
13
- # @return [Karafka::Params::ParamsBatch<Karafka::Params::Params>] batch with params
14
- def from_kafka_messages(kafka_messages, topic)
15
- params_array = kafka_messages.map do |message|
16
- Karafka::Params::Builders::Params.from_kafka_message(message, topic)
17
- end
18
-
19
- Karafka::Params::ParamsBatch.new(params_array).freeze
20
- end
21
- end
22
- end
23
- end
24
- end
25
- end
@@ -1,60 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Params
5
- # Params batch represents a set of messages received from Kafka.
6
- # @note Params internally are lazy loaded before first use. That way we can skip
7
- # deserialization process if we have after_fetch that rejects some incoming messages
8
- # without using params It can be also used when handling really heavy data.
9
- class ParamsBatch
10
- include Enumerable
11
-
12
- # @param params_array [Array<Karafka::Params::Params>] array with karafka params
13
- # @return [Karafka::Params::ParamsBatch] lazy evaluated params batch object
14
- def initialize(params_array)
15
- @params_array = params_array
16
- end
17
-
18
- # @yieldparam [Karafka::Params::Params] each params instance
19
- # @note Invocation of this method will not cause loading and deserializing each param after
20
- # another.
21
- def each
22
- @params_array.each { |param| yield(param) }
23
- end
24
-
25
- # @return [Array<Karafka::Params::Params>] returns all the params in a loaded state, so they
26
- # can be used for batch insert, etc. Without invoking all, up until first use, they won't
27
- # be deserialized
28
- def deserialize!
29
- each(&:payload)
30
- end
31
-
32
- # @return [Array<Object>] array with deserialized payloads. This method can be useful when
33
- # we don't care about metadata and just want to extract all the data payloads from the
34
- # batch
35
- def payloads
36
- map(&:payload)
37
- end
38
-
39
- # @return [Karafka::Params::Params] first element
40
- def first
41
- @params_array.first
42
- end
43
-
44
- # @return [Karafka::Params::Params] last element
45
- def last
46
- @params_array.last
47
- end
48
-
49
- # @return [Integer] number of messages in the batch
50
- def size
51
- @params_array.size
52
- end
53
-
54
- # @return [Array<Karafka::Params::Params>] pure array with params
55
- def to_a
56
- @params_array
57
- end
58
- end
59
- end
60
- end
@@ -1,47 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Namespace for various other libs patches
5
- module Patches
6
- # Patches for Ruby Kafka gem
7
- module RubyKafka
8
- # This patch allows us to inject business logic in between fetches and before the consumer
9
- # stop, so we can perform stop commit or anything else that we need since
10
- # ruby-kafka fetch loop does not allow that directly
11
- # We don't won't to use poll ruby-kafka api as it brings many more problems that we would
12
- # have to take care of. That way, nothing like that ever happens but we get the control
13
- # over the stopping process that we need (since we're the once that initiate it for each
14
- # thread)
15
- def consumer_loop
16
- super do
17
- consumers = Karafka::Persistence::Consumers
18
- .current
19
- .values
20
- .flat_map(&:values)
21
- .select { |consumer| consumer.class.respond_to?(:after_fetch) }
22
-
23
- if Karafka::App.stopping?
24
- publish_event(consumers, 'before_stop')
25
- Karafka::Persistence::Client.read.stop
26
- else
27
- publish_event(consumers, 'before_poll')
28
- yield
29
- publish_event(consumers, 'after_poll')
30
- end
31
- end
32
- end
33
-
34
- private
35
-
36
- # Notifies consumers about particular events happening
37
- # @param consumers [Array<Object>] all consumers that want to be notified about an event
38
- # @param event_name [String] name of the event that happened
39
- def publish_event(consumers, event_name)
40
- consumers.each do |consumer|
41
- key = "consumers.#{Helpers::Inflector.map(consumer.class.to_s)}.#{event_name}"
42
- Karafka::App.monitor.instrument(key, context: consumer)
43
- end
44
- end
45
- end
46
- end
47
- end
@@ -1,29 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Persistence
5
- # Persistence layer to store current thread messages consumer client for further use
6
- class Client
7
- # Thread.current key under which we store current thread messages consumer client
8
- PERSISTENCE_SCOPE = :client
9
-
10
- private_constant :PERSISTENCE_SCOPE
11
-
12
- class << self
13
- # @param client [Karafka::Connection::Client] messages consumer client of
14
- # a current thread
15
- # @return [Karafka::Connection::Client] persisted messages consumer client
16
- def write(client)
17
- Thread.current[PERSISTENCE_SCOPE] = client
18
- end
19
-
20
- # @return [Karafka::Connection::Client] persisted messages consumer client
21
- # @raise [Karafka::Errors::MissingClientError] raised when no thread messages consumer
22
- # client but we try to use it anyway
23
- def read
24
- Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingClientError)
25
- end
26
- end
27
- end
28
- end
29
- end