karafka 1.4.15 → 2.0.0.alpha1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (128) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +3 -0
  4. data/.github/workflows/ci.yml +74 -24
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +38 -39
  7. data/Gemfile +6 -0
  8. data/Gemfile.lock +50 -52
  9. data/LICENSE +14 -0
  10. data/LICENSE-COMM +89 -0
  11. data/LICENSE-LGPL +165 -0
  12. data/README.md +59 -14
  13. data/bin/benchmarks +85 -0
  14. data/bin/create_token +28 -0
  15. data/bin/integrations +160 -0
  16. data/bin/stress +13 -0
  17. data/certs/karafka-pro.pem +11 -0
  18. data/certs/mensfeld.pem +23 -24
  19. data/config/errors.yml +4 -38
  20. data/docker-compose.yml +11 -3
  21. data/karafka.gemspec +10 -20
  22. data/lib/active_job/consumer.rb +22 -0
  23. data/lib/active_job/karafka.rb +18 -0
  24. data/lib/active_job/queue_adapters/karafka_adapter.rb +29 -0
  25. data/lib/active_job/routing_extensions.rb +15 -0
  26. data/lib/karafka/app.rb +13 -20
  27. data/lib/karafka/base_consumer.rb +103 -34
  28. data/lib/karafka/cli/base.rb +4 -4
  29. data/lib/karafka/cli/info.rb +43 -8
  30. data/lib/karafka/cli/install.rb +3 -8
  31. data/lib/karafka/cli/server.rb +17 -30
  32. data/lib/karafka/cli.rb +4 -11
  33. data/lib/karafka/connection/client.rb +279 -93
  34. data/lib/karafka/connection/listener.rb +137 -38
  35. data/lib/karafka/connection/messages_buffer.rb +57 -0
  36. data/lib/karafka/connection/pauses_manager.rb +46 -0
  37. data/lib/karafka/connection/rebalance_manager.rb +62 -0
  38. data/lib/karafka/contracts/config.rb +25 -7
  39. data/lib/karafka/contracts/consumer_group.rb +0 -173
  40. data/lib/karafka/contracts/consumer_group_topic.rb +17 -7
  41. data/lib/karafka/contracts/server_cli_options.rb +1 -9
  42. data/lib/karafka/contracts.rb +1 -1
  43. data/lib/karafka/env.rb +46 -0
  44. data/lib/karafka/errors.rb +14 -18
  45. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  46. data/lib/karafka/instrumentation/callbacks/error.rb +40 -0
  47. data/lib/karafka/instrumentation/callbacks/statistics.rb +42 -0
  48. data/lib/karafka/instrumentation/monitor.rb +14 -21
  49. data/lib/karafka/instrumentation/stdout_listener.rb +64 -91
  50. data/lib/karafka/instrumentation.rb +21 -0
  51. data/lib/karafka/licenser.rb +65 -0
  52. data/lib/karafka/{params → messages}/batch_metadata.rb +7 -13
  53. data/lib/karafka/messages/builders/batch_metadata.rb +30 -0
  54. data/lib/karafka/messages/builders/message.rb +38 -0
  55. data/lib/karafka/messages/builders/messages.rb +40 -0
  56. data/lib/karafka/{params/params.rb → messages/message.rb} +7 -12
  57. data/lib/karafka/messages/messages.rb +64 -0
  58. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  59. data/lib/karafka/messages/seek.rb +9 -0
  60. data/lib/karafka/patches/rdkafka/consumer.rb +22 -0
  61. data/lib/karafka/processing/executor.rb +96 -0
  62. data/lib/karafka/processing/executors_buffer.rb +49 -0
  63. data/lib/karafka/processing/jobs/base.rb +18 -0
  64. data/lib/karafka/processing/jobs/consume.rb +28 -0
  65. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  66. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  67. data/lib/karafka/processing/jobs_queue.rb +121 -0
  68. data/lib/karafka/processing/worker.rb +57 -0
  69. data/lib/karafka/processing/workers_batch.rb +22 -0
  70. data/lib/karafka/railtie.rb +65 -0
  71. data/lib/karafka/routing/builder.rb +15 -14
  72. data/lib/karafka/routing/consumer_group.rb +10 -18
  73. data/lib/karafka/routing/consumer_mapper.rb +1 -2
  74. data/lib/karafka/routing/router.rb +1 -1
  75. data/lib/karafka/routing/subscription_group.rb +53 -0
  76. data/lib/karafka/routing/subscription_groups_builder.rb +51 -0
  77. data/lib/karafka/routing/topic.rb +47 -25
  78. data/lib/karafka/runner.rb +59 -0
  79. data/lib/karafka/serialization/json/deserializer.rb +6 -15
  80. data/lib/karafka/server.rb +62 -25
  81. data/lib/karafka/setup/config.rb +86 -159
  82. data/lib/karafka/status.rb +13 -3
  83. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  84. data/lib/karafka/templates/karafka.rb.erb +14 -50
  85. data/lib/karafka/time_trackers/base.rb +19 -0
  86. data/lib/karafka/time_trackers/pause.rb +84 -0
  87. data/lib/karafka/time_trackers/poll.rb +65 -0
  88. data/lib/karafka/version.rb +1 -1
  89. data/lib/karafka.rb +30 -44
  90. data.tar.gz.sig +0 -0
  91. metadata +96 -132
  92. metadata.gz.sig +0 -0
  93. data/MIT-LICENCE +0 -18
  94. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  95. data/lib/karafka/attributes_map.rb +0 -63
  96. data/lib/karafka/backends/inline.rb +0 -16
  97. data/lib/karafka/base_responder.rb +0 -226
  98. data/lib/karafka/cli/flow.rb +0 -48
  99. data/lib/karafka/cli/missingno.rb +0 -19
  100. data/lib/karafka/code_reloader.rb +0 -67
  101. data/lib/karafka/connection/api_adapter.rb +0 -158
  102. data/lib/karafka/connection/batch_delegator.rb +0 -55
  103. data/lib/karafka/connection/builder.rb +0 -23
  104. data/lib/karafka/connection/message_delegator.rb +0 -36
  105. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  106. data/lib/karafka/consumers/callbacks.rb +0 -71
  107. data/lib/karafka/consumers/includer.rb +0 -64
  108. data/lib/karafka/consumers/responders.rb +0 -24
  109. data/lib/karafka/consumers/single_params.rb +0 -15
  110. data/lib/karafka/contracts/responder_usage.rb +0 -54
  111. data/lib/karafka/fetcher.rb +0 -42
  112. data/lib/karafka/helpers/class_matcher.rb +0 -88
  113. data/lib/karafka/helpers/config_retriever.rb +0 -46
  114. data/lib/karafka/helpers/inflector.rb +0 -26
  115. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  116. data/lib/karafka/params/builders/params.rb +0 -38
  117. data/lib/karafka/params/builders/params_batch.rb +0 -25
  118. data/lib/karafka/params/params_batch.rb +0 -60
  119. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  120. data/lib/karafka/persistence/client.rb +0 -29
  121. data/lib/karafka/persistence/consumers.rb +0 -45
  122. data/lib/karafka/persistence/topics.rb +0 -48
  123. data/lib/karafka/responders/builder.rb +0 -36
  124. data/lib/karafka/responders/topic.rb +0 -55
  125. data/lib/karafka/routing/topic_mapper.rb +0 -53
  126. data/lib/karafka/serialization/json/serializer.rb +0 -31
  127. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  128. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -1,226 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Base responder from which all Karafka responders should inherit
5
- # Similar to Rails responders concept. It allows us to design flow from one app to another
6
- # by isolating what responses should be sent (and where) based on a given action
7
- # It differs from Rails responders in the way it works: in std http request we can have one
8
- # response, here we can have unlimited number of them
9
- #
10
- # It has a simple API for defining where should we respond (and if it is required)
11
- #
12
- # @example Basic usage (each registered topic is required to be used by default)
13
- # class Responder < BaseResponder
14
- # topic :new_action
15
- #
16
- # def respond(data)
17
- # respond_to :new_action, data
18
- # end
19
- # end
20
- #
21
- # @example Responding to a topic with extra options
22
- # class Responder < BaseResponder
23
- # topic :new_action
24
- #
25
- # def respond(data)
26
- # respond_to :new_action, data, partition_key: 'thing'
27
- # end
28
- # end
29
- #
30
- # @example Marking topic as not required (we won't have to use it)
31
- # class Responder < BaseResponder
32
- # topic :required_topic
33
- # topic :new_action, required: false
34
- #
35
- # def respond(data)
36
- # respond_to :required_topic, data
37
- # end
38
- # end
39
- #
40
- # @example Multiple times used topic
41
- # class Responder < BaseResponder
42
- # topic :required_topic
43
- #
44
- # def respond(data)
45
- # data.each do |subset|
46
- # respond_to :required_topic, subset
47
- # end
48
- # end
49
- # end
50
- #
51
- # @example Specify serializer for a topic
52
- # class Responder < BaseResponder
53
- # topic :xml_topic, serializer: MyXMLSerializer
54
- #
55
- # def respond(data)
56
- # data.each do |subset|
57
- # respond_to :xml_topic, subset
58
- # end
59
- # end
60
- # end
61
- #
62
- # @example Accept multiple arguments to a respond method
63
- # class Responder < BaseResponder
64
- # topic :users_actions
65
- # topic :articles_viewed
66
- #
67
- # def respond(user, article)
68
- # respond_to :users_actions, user
69
- # respond_to :articles_viewed, article
70
- # end
71
- # end
72
- class BaseResponder
73
- # Responder usage contract
74
- CONTRACT = Karafka::Contracts::ResponderUsage.new.freeze
75
-
76
- private_constant :CONTRACT
77
-
78
- class << self
79
- # Definitions of all topics that we want to be able to use in this responder should go here
80
- attr_accessor :topics
81
- # Contract that we can use to control and/or require some additional details upon options
82
- # that are being passed to the producer. This can be in particular useful if we want to make
83
- # sure that for example partition_key is always present.
84
- attr_accessor :options_contract
85
-
86
- # Registers a topic as on to which we will be able to respond
87
- # @param topic_name [Symbol, String] name of topic to which we want to respond
88
- # @param options [Hash] hash with optional configuration details
89
- def topic(topic_name, options = {})
90
- options[:serializer] ||= Karafka::App.config.serializer
91
- options[:registered] = true
92
- self.topics ||= {}
93
- topic_obj = Responders::Topic.new(topic_name, options)
94
- self.topics[topic_obj.name] = topic_obj
95
- end
96
-
97
- # A simple alias for easier standalone responder usage.
98
- # Instead of building it with new.call it allows (in case of using JSON serializer)
99
- # to just run it directly from the class level
100
- # @param data Anything that we want to respond with
101
- # @example Send user data with a responder
102
- # UsersCreatedResponder.call(@created_user)
103
- def call(*data)
104
- # Just in case there were no topics defined for a responder, we initialize with
105
- # empty hash not to handle a nil case
106
- self.topics ||= {}
107
- new.call(*data)
108
- end
109
- end
110
-
111
- attr_reader :messages_buffer
112
-
113
- # Creates a responder object
114
- # @return [Karafka::BaseResponder] base responder descendant responder
115
- def initialize
116
- @messages_buffer = {}
117
- end
118
-
119
- # Performs respond and validates that all the response requirement were met
120
- # @param data Anything that we want to respond with
121
- # @note We know that validators should be executed also before sending data to topics, however
122
- # the implementation gets way more complicated then, that's why we check after everything
123
- # was sent using responder
124
- # @example Send user data with a responder
125
- # UsersCreatedResponder.new.call(@created_user)
126
- # @example Send user data with a responder using non default Parser
127
- # UsersCreatedResponder.new(MyParser).call(@created_user)
128
- def call(*data)
129
- respond(*data)
130
- validate_usage!
131
- validate_options!
132
- deliver!
133
- end
134
-
135
- private
136
-
137
- # Checks if we met all the topics requirements. It will fail if we didn't send a message to
138
- # a registered required topic, etc.
139
- def validate_usage!
140
- registered_topics = self.class.topics.map do |name, topic|
141
- topic.to_h.merge!(
142
- usage_count: messages_buffer[name]&.count || 0
143
- )
144
- end
145
-
146
- used_topics = messages_buffer.map do |name, usage|
147
- topic = self.class.topics[name] || Responders::Topic.new(name, registered: false)
148
- topic.to_h.merge!(usage_count: usage.count)
149
- end
150
-
151
- result = CONTRACT.call(
152
- registered_topics: registered_topics,
153
- used_topics: used_topics
154
- )
155
-
156
- return if result.success?
157
-
158
- raise Karafka::Errors::InvalidResponderUsageError, result.errors.to_h
159
- end
160
-
161
- # Checks if we met all the options requirements before sending them to the producer.
162
- def validate_options!
163
- return true unless self.class.options_contract
164
-
165
- messages_buffer.each_value do |messages_set|
166
- messages_set.each do |message_data|
167
- result = self.class.options_contract.call(message_data.last)
168
- next if result.success?
169
-
170
- raise Karafka::Errors::InvalidResponderMessageOptionsError, result.errors.to_h
171
- end
172
- end
173
- end
174
-
175
- # Takes all the messages from the buffer and delivers them one by one
176
- # @note This method is executed after the validation, so we're sure that
177
- # what we send is legit and it will go to a proper topics
178
- def deliver!
179
- messages_buffer.each_value do |data_elements|
180
- data_elements.each do |data, options|
181
- # We map this topic name, so it will match namespaced/etc topic in Kafka
182
- # @note By default will not change topic (if default mapper used)
183
- mapped_topic = Karafka::App.config.topic_mapper.outgoing(options[:topic])
184
- external_options = options.merge(topic: mapped_topic)
185
- producer(options).call(data, external_options)
186
- end
187
- end
188
- end
189
-
190
- # Method that needs to be implemented in a subclass. It should handle responding
191
- # on registered topics
192
- # @param _data [Object] anything that we want to use to send to Kafka
193
- # @raise [NotImplementedError] This method needs to be implemented in a subclass
194
- def respond(*_data)
195
- raise NotImplementedError, 'Implement this in a subclass'
196
- end
197
-
198
- # This method allow us to respond to a single topic with a given data. It can be used
199
- # as many times as we need. Especially when we have 1:n flow
200
- # @param topic [Symbol, String] topic to which we want to respond
201
- # @param data [String, Object] string or object that we want to send
202
- # @param options [Hash] options for waterdrop (e.g. partition_key).
203
- # @note Respond to does not accept multiple data arguments.
204
- def respond_to(topic, data, options = {})
205
- # We normalize the format to string, as WaterDrop and Ruby-Kafka support only
206
- # string topics
207
- topic = topic.to_s
208
-
209
- messages_buffer[topic] ||= []
210
- messages_buffer[topic] << [
211
- self.class.topics[topic].serializer.call(data),
212
- options.merge(topic: topic)
213
- ]
214
- end
215
-
216
- # @param options [Hash] options for waterdrop
217
- # @return [Class] WaterDrop producer (sync or async based on the settings)
218
- def producer(options)
219
- if self.class.topics[options[:topic]].async?
220
- WaterDrop::AsyncProducer
221
- else
222
- WaterDrop::SyncProducer
223
- end
224
- end
225
- end
226
- end
@@ -1,48 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Karafka framework Cli
5
- class Cli < Thor
6
- # Description of topics flow (incoming/outgoing)
7
- class Flow < Base
8
- desc 'Print application data flow (incoming => outgoing)'
9
-
10
- # Print out all defined routes in alphabetical order
11
- def call
12
- topics.each do |topic|
13
- any_topics = !topic.responder&.topics.nil?
14
- log_messages = []
15
-
16
- if any_topics
17
- log_messages << "#{topic.name} =>"
18
-
19
- topic.responder.topics.each_value do |responder_topic|
20
- features = []
21
- features << (responder_topic.required? ? 'always' : 'conditionally')
22
-
23
- log_messages << format(responder_topic.name, "(#{features.join(', ')})")
24
- end
25
- else
26
- log_messages << "#{topic.name} => (nothing)"
27
- end
28
-
29
- Karafka.logger.info(log_messages.join("\n"))
30
- end
31
- end
32
-
33
- private
34
-
35
- # @return [Array<Karafka::Routing::Topic>] all topics sorted in alphabetical order
36
- def topics
37
- Karafka::App.consumer_groups.map(&:topics).flatten.sort_by(&:name)
38
- end
39
-
40
- # Formats a given value with label in a nice way
41
- # @param label [String] label describing value
42
- # @param value [String] value that should be printed
43
- def format(label, value)
44
- " - #{label}: #{value}"
45
- end
46
- end
47
- end
48
- end
@@ -1,19 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- class Cli < Thor
5
- # Command that gets invoked when no method is provided when running the CLI
6
- # It allows us to exit with exit code 1 instead of default 0 to indicate that something
7
- # was missing
8
- # @see https://github.com/karafka/karafka/issues/619
9
- class Missingno < Base
10
- desc 'Hidden command that gets invoked when no command is provided', hide: true
11
-
12
- # Prints an error about the lack of command (nothing selected)
13
- def call
14
- Karafka.logger.error('No command provided')
15
- exit 1
16
- end
17
- end
18
- end
19
- end
@@ -1,67 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Special type of a listener, that is not an instrumentation one, but one that triggers
5
- # code reload in the development mode after each fetched batch (or message)
6
- #
7
- # Please refer to the development code reload sections for details on the benefits and downsides
8
- # of the in-process code reloading
9
- class CodeReloader
10
- # This mutex is needed as we might have an application that has multiple consumer groups
11
- # running in separate threads and we should not trigger reload before fully reloading the app
12
- # in previous thread
13
- MUTEX = Mutex.new
14
-
15
- private_constant :MUTEX
16
-
17
- # @param reloaders [Array<Object>] any code loaders that we use in this app. Whether it is
18
- # the Rails loader, Zeitwerk or anything else that allows reloading triggering
19
- # @param block [Proc] yields given block just before reloading. This can be used to hook custom
20
- # reloading stuff, that ain't reloaders (for example for resetting dry-events registry)
21
- def initialize(*reloaders, &block)
22
- @reloaders = reloaders
23
- @block = block
24
- end
25
-
26
- # Binds to the instrumentation events and triggers reload
27
- # @param _event [Dry::Event] empty dry event
28
- # @note Since we de-register all the user defined objects and redraw routes, it means that
29
- # we won't be able to do a multi-batch buffering in the development mode as each of the
30
- # batches will be buffered on a newly created "per fetch" instance.
31
- def on_connection_listener_fetch_loop(_event)
32
- reload
33
- end
34
-
35
- private
36
-
37
- # Triggers reload of both standard and Rails reloaders as well as expires all internals of
38
- # Karafka, so it can be rediscovered and rebuilt
39
- def reload
40
- MUTEX.synchronize do
41
- if @reloaders[0].respond_to?(:execute)
42
- reload_with_rails
43
- else
44
- reload_without_rails
45
- end
46
- end
47
- end
48
-
49
- # Rails reloading procedure
50
- def reload_with_rails
51
- updatable = @reloaders.select(&:updated?)
52
-
53
- return if updatable.empty?
54
-
55
- updatable.each(&:execute)
56
- @block&.call
57
- Karafka::App.reload
58
- end
59
-
60
- # Zeitwerk and other reloaders
61
- def reload_without_rails
62
- @reloaders.each(&:reload)
63
- @block&.call
64
- Karafka::App.reload
65
- end
66
- end
67
- end
@@ -1,158 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Namespace for all the things related to Kafka connection
5
- module Connection
6
- # Mapper used to convert our internal settings into ruby-kafka settings based on their
7
- # API requirements.
8
- # Since ruby-kafka has more and more options and there are few "levels" on which
9
- # we have to apply them (despite the fact, that in Karafka you configure all of it
10
- # in one place), we have to remap it into what ruby-kafka driver requires
11
- # @note The good thing about Kafka.new method is that it ignores all options that
12
- # do nothing. So we don't have to worry about injecting our internal settings
13
- # into the client and breaking stuff
14
- module ApiAdapter
15
- class << self
16
- # Builds all the configuration settings for Kafka.new method
17
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
18
- # @return [Array<Hash>] Array with all the client arguments including hash with all
19
- # the settings required by Kafka.new method
20
- # @note We return array, so we can inject any arguments we want, in case of changes in the
21
- # raw driver
22
- def client(consumer_group)
23
- # This one is a default that takes all the settings except special
24
- # cases defined in the map
25
- settings = {
26
- logger: ::Karafka.logger,
27
- client_id: ::Karafka::App.config.client_id
28
- }
29
-
30
- kafka_configs.each_key do |setting_name|
31
- # All options for config adapter should be ignored as we're just interested
32
- # in what is left, as we want to pass all the options that are "typical"
33
- # and not listed in the api_adapter special cases mapping. All the values
34
- # from the api_adapter mapping go somewhere else, not to the client directly
35
- next if AttributesMap.api_adapter.values.flatten.include?(setting_name)
36
-
37
- # Settings for each consumer group are either defined per consumer group or are
38
- # inherited from the global/general settings level, thus we don't have to fetch them
39
- # from the kafka settings as they are already on a consumer group level
40
- settings[setting_name] = consumer_group.public_send(setting_name)
41
- end
42
-
43
- settings_hash = sanitize(settings)
44
-
45
- # Normalization for the way Kafka::Client accepts arguments from 0.5.3
46
- [settings_hash.delete(:seed_brokers), settings_hash]
47
- end
48
-
49
- # Builds all the configuration settings for kafka#consumer method
50
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
51
- # @return [Hash] all the consumer keyword arguments including hash with all
52
- # the settings required by Kafka#consumer
53
- def consumer(consumer_group)
54
- settings = { group_id: consumer_group.id }
55
- settings = fetch_for(:consumer, consumer_group, settings)
56
- sanitize(settings)
57
- end
58
-
59
- # Builds all the configuration settings for kafka consumer consume_each_batch and
60
- # consume_each_message methods
61
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
62
- # @return [Hash] hash with all the arguments required by consuming method
63
- # including all the settings required by
64
- # Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
65
- def consumption(consumer_group)
66
- sanitize(
67
- fetch_for(
68
- :consumption,
69
- consumer_group,
70
- automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
71
- )
72
- )
73
- end
74
-
75
- # Builds all the configuration settings for kafka consumer#subscribe method
76
- # @param topic [Karafka::Routing::Topic] topic that holds details for a given subscription
77
- # @return [Hash] hash with all the settings required by kafka consumer#subscribe method
78
- def subscribe(topic)
79
- settings = fetch_for(:subscribe, topic)
80
- [Karafka::App.config.topic_mapper.outgoing(topic.name), sanitize(settings)]
81
- end
82
-
83
- # Builds all the configuration settings required by kafka consumer#pause method
84
- # @param topic [String] topic that we want to pause
85
- # @param partition [Integer] number partition that we want to pause
86
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
87
- # @return [Hash] hash with all the details required to pause kafka consumer
88
- def pause(topic, partition, consumer_group)
89
- {
90
- args: [Karafka::App.config.topic_mapper.outgoing(topic), partition],
91
- kwargs: {
92
- timeout: consumer_group.pause_timeout,
93
- max_timeout: consumer_group.pause_max_timeout,
94
- exponential_backoff: consumer_group.pause_exponential_backoff
95
- }
96
- }
97
- end
98
-
99
- # Remaps topic details taking the topic mapper feature into consideration.
100
- # @param params [Karafka::Params::Params] params instance
101
- # @return [Array] array with all the details needed by ruby-kafka to mark message
102
- # as processed
103
- # @note When default empty topic mapper is used, no need for any conversion as the
104
- # internal and external format are exactly the same
105
- def mark_message_as_processed(params)
106
- # Majority of users don't use custom topic mappers. No need to change anything when it
107
- # is a default mapper that does not change anything. Only some cloud providers require
108
- # topics to be remapped
109
- return [params.metadata] if Karafka::App.config.topic_mapper.is_a?(
110
- Karafka::Routing::TopicMapper
111
- )
112
-
113
- # @note We don't use tap as it is around 13% slower than non-dup version
114
- dupped = params.metadata.dup
115
- dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.metadata.topic)
116
- [dupped]
117
- end
118
-
119
- private
120
-
121
- # Fetches proper settings for a given map namespace
122
- # @param namespace_key [Symbol] namespace from attributes map config adapter hash
123
- # @param route_layer [Object] route topic or consumer group
124
- # @param preexisting_settings [Hash] hash with some preexisting settings that might have
125
- # been loaded in a different way
126
- def fetch_for(namespace_key, route_layer, preexisting_settings = {})
127
- kafka_configs.each_key do |setting_name|
128
- # Ignore settings that are not related to our namespace
129
- next unless AttributesMap.api_adapter[namespace_key].include?(setting_name)
130
-
131
- # Ignore settings that are already initialized
132
- # In case they are in preexisting settings fetched differently
133
- next if preexisting_settings.key?(setting_name)
134
-
135
- # Fetch all the settings from a given layer object. Objects can handle the fallback
136
- # to the kafka settings, so
137
- preexisting_settings[setting_name] = route_layer.send(setting_name)
138
- end
139
-
140
- preexisting_settings
141
- end
142
-
143
- # Removes nil containing keys from the final settings so it can use Kafkas driver
144
- # defaults for those
145
- # @param settings [Hash] settings that may contain nil values
146
- # @return [Hash] settings without nil using keys (non of karafka options should be nil)
147
- def sanitize(settings)
148
- settings.reject { |_key, value| value.nil? }
149
- end
150
-
151
- # @return [Hash] Kafka config details as a hash
152
- def kafka_configs
153
- ::Karafka::App.config.kafka.to_h
154
- end
155
- end
156
- end
157
- end
158
- end
@@ -1,55 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Connection
5
- # Class that delegates processing of batch received messages for which we listen to
6
- # a proper processor
7
- module BatchDelegator
8
- class << self
9
- # Delegates messages (does something with them)
10
- # It will either schedule or run a proper processor action for messages
11
- # @param group_id [String] group_id of a group from which a given message came
12
- # @param kafka_batch [<Kafka::FetchedBatch>] raw messages fetched batch
13
- # @note This should be looped to obtain a constant delegating of new messages
14
- def call(group_id, kafka_batch)
15
- topic = Persistence::Topics.fetch(group_id, kafka_batch.topic)
16
- consumer = Persistence::Consumers.fetch(topic, kafka_batch.partition)
17
-
18
- Karafka.monitor.instrument(
19
- 'connection.batch_delegator.call',
20
- caller: self,
21
- consumer: consumer,
22
- kafka_batch: kafka_batch
23
- ) do
24
- # Due to how ruby-kafka is built, we have the metadata that is stored on the batch
25
- # level only available for batch consuming
26
- consumer.batch_metadata = Params::Builders::BatchMetadata.from_kafka_batch(
27
- kafka_batch,
28
- topic
29
- )
30
-
31
- kafka_messages = kafka_batch.messages
32
-
33
- # Depending on a case (persisted or not) we might use new consumer instance per
34
- # each batch, or use the same one for all of them (for implementing buffering, etc.)
35
- if topic.batch_consuming
36
- consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
37
- kafka_messages,
38
- topic
39
- )
40
- consumer.call
41
- else
42
- kafka_messages.each do |kafka_message|
43
- consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
44
- [kafka_message],
45
- topic
46
- )
47
- consumer.call
48
- end
49
- end
50
- end
51
- end
52
- end
53
- end
54
- end
55
- end
@@ -1,23 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Connection
5
- # Builder used to construct Kafka client
6
- module Builder
7
- class << self
8
- # Builds a Kafka::Client instance that we use to work with Kafka cluster
9
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group for which we want
10
- # to have a new Kafka client
11
- # @return [::Kafka::Client] returns a Kafka client
12
- def call(consumer_group)
13
- settings = ApiAdapter.client(consumer_group)
14
-
15
- Kafka.new(
16
- settings[0],
17
- **settings[1]
18
- )
19
- end
20
- end
21
- end
22
- end
23
- end
@@ -1,36 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Connection
5
- # Class that delegates processing of a single received message for which we listen to
6
- # a proper processor
7
- module MessageDelegator
8
- class << self
9
- # Delegates message (does something with it)
10
- # It will either schedule or run a proper processor action for the incoming message
11
- # @param group_id [String] group_id of a group from which a given message came
12
- # @param kafka_message [<Kafka::FetchedMessage>] raw message from kafka
13
- # @note This should be looped to obtain a constant delegating of new messages
14
- def call(group_id, kafka_message)
15
- topic = Persistence::Topics.fetch(group_id, kafka_message.topic)
16
- consumer = Persistence::Consumers.fetch(topic, kafka_message.partition)
17
-
18
- Karafka.monitor.instrument(
19
- 'connection.message_delegator.call',
20
- caller: self,
21
- consumer: consumer,
22
- kafka_message: kafka_message
23
- ) do
24
- # @note We always get a single message within single delegator, which means that
25
- # we don't care if user marked it as a batch consumed or not.
26
- consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
27
- [kafka_message],
28
- topic
29
- )
30
- consumer.call
31
- end
32
- end
33
- end
34
- end
35
- end
36
- end
@@ -1,10 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Brings the batch metadata into consumers that support batch_fetching
6
- module BatchMetadata
7
- attr_accessor :batch_metadata
8
- end
9
- end
10
- end