karafka 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (99) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +11 -0
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +69 -0
  10. data/.rspec +1 -0
  11. data/.ruby-gemset +1 -0
  12. data/.ruby-version +1 -0
  13. data/.travis.yml +36 -0
  14. data/CHANGELOG.md +520 -0
  15. data/CODE_OF_CONDUCT.md +46 -0
  16. data/CONTRIBUTING.md +41 -0
  17. data/Gemfile +12 -0
  18. data/Gemfile.lock +137 -0
  19. data/MIT-LICENCE +18 -0
  20. data/README.md +101 -0
  21. data/bin/karafka +19 -0
  22. data/certs/mensfeld.pem +25 -0
  23. data/config/errors.yml +39 -0
  24. data/karafka.gemspec +44 -0
  25. data/lib/karafka.rb +71 -0
  26. data/lib/karafka/app.rb +53 -0
  27. data/lib/karafka/attributes_map.rb +68 -0
  28. data/lib/karafka/backends/inline.rb +16 -0
  29. data/lib/karafka/base_consumer.rb +57 -0
  30. data/lib/karafka/base_responder.rb +226 -0
  31. data/lib/karafka/cli.rb +54 -0
  32. data/lib/karafka/cli/base.rb +78 -0
  33. data/lib/karafka/cli/console.rb +31 -0
  34. data/lib/karafka/cli/flow.rb +45 -0
  35. data/lib/karafka/cli/info.rb +31 -0
  36. data/lib/karafka/cli/install.rb +64 -0
  37. data/lib/karafka/cli/server.rb +71 -0
  38. data/lib/karafka/code_reloader.rb +67 -0
  39. data/lib/karafka/connection/api_adapter.rb +155 -0
  40. data/lib/karafka/connection/batch_delegator.rb +51 -0
  41. data/lib/karafka/connection/builder.rb +16 -0
  42. data/lib/karafka/connection/client.rb +117 -0
  43. data/lib/karafka/connection/listener.rb +71 -0
  44. data/lib/karafka/connection/message_delegator.rb +36 -0
  45. data/lib/karafka/consumers/callbacks.rb +71 -0
  46. data/lib/karafka/consumers/includer.rb +63 -0
  47. data/lib/karafka/consumers/metadata.rb +10 -0
  48. data/lib/karafka/consumers/responders.rb +24 -0
  49. data/lib/karafka/consumers/single_params.rb +15 -0
  50. data/lib/karafka/contracts.rb +10 -0
  51. data/lib/karafka/contracts/config.rb +21 -0
  52. data/lib/karafka/contracts/consumer_group.rb +206 -0
  53. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  54. data/lib/karafka/contracts/responder_usage.rb +54 -0
  55. data/lib/karafka/contracts/server_cli_options.rb +29 -0
  56. data/lib/karafka/errors.rb +51 -0
  57. data/lib/karafka/fetcher.rb +42 -0
  58. data/lib/karafka/helpers/class_matcher.rb +88 -0
  59. data/lib/karafka/helpers/config_retriever.rb +46 -0
  60. data/lib/karafka/helpers/inflector.rb +26 -0
  61. data/lib/karafka/helpers/multi_delegator.rb +32 -0
  62. data/lib/karafka/instrumentation/logger.rb +57 -0
  63. data/lib/karafka/instrumentation/monitor.rb +70 -0
  64. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  65. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  66. data/lib/karafka/params/builders/metadata.rb +33 -0
  67. data/lib/karafka/params/builders/params.rb +36 -0
  68. data/lib/karafka/params/builders/params_batch.rb +25 -0
  69. data/lib/karafka/params/metadata.rb +35 -0
  70. data/lib/karafka/params/params.rb +68 -0
  71. data/lib/karafka/params/params_batch.rb +61 -0
  72. data/lib/karafka/patches/ruby_kafka.rb +47 -0
  73. data/lib/karafka/persistence/client.rb +29 -0
  74. data/lib/karafka/persistence/consumers.rb +45 -0
  75. data/lib/karafka/persistence/topics.rb +48 -0
  76. data/lib/karafka/process.rb +60 -0
  77. data/lib/karafka/responders/builder.rb +36 -0
  78. data/lib/karafka/responders/topic.rb +55 -0
  79. data/lib/karafka/routing/builder.rb +89 -0
  80. data/lib/karafka/routing/consumer_group.rb +61 -0
  81. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  82. data/lib/karafka/routing/proxy.rb +46 -0
  83. data/lib/karafka/routing/router.rb +29 -0
  84. data/lib/karafka/routing/topic.rb +62 -0
  85. data/lib/karafka/routing/topic_mapper.rb +53 -0
  86. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  87. data/lib/karafka/serialization/json/serializer.rb +31 -0
  88. data/lib/karafka/server.rb +83 -0
  89. data/lib/karafka/setup/config.rb +221 -0
  90. data/lib/karafka/setup/configurators/water_drop.rb +36 -0
  91. data/lib/karafka/setup/dsl.rb +21 -0
  92. data/lib/karafka/status.rb +29 -0
  93. data/lib/karafka/templates/application_consumer.rb.erb +7 -0
  94. data/lib/karafka/templates/application_responder.rb.erb +11 -0
  95. data/lib/karafka/templates/karafka.rb.erb +92 -0
  96. data/lib/karafka/version.rb +7 -0
  97. data/log/.gitkeep +0 -0
  98. metadata +336 -0
  99. metadata.gz.sig +0 -0
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ lib = File.expand_path('lib', __dir__)
4
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
+
6
+ require 'karafka/version'
7
+
8
+ # rubocop:disable Metrics/BlockLength
9
+ Gem::Specification.new do |spec|
10
+ spec.name = 'karafka'
11
+ spec.version = ::Karafka::VERSION
12
+ spec.platform = Gem::Platform::RUBY
13
+ spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
14
+ spec.email = %w[maciej@coditsu.io pavlo.vavruk@gmail.com adam99g@gmail.com]
15
+ spec.homepage = 'https://github.com/karafka/karafka'
16
+ spec.summary = 'Ruby based framework for working with Apache Kafka'
17
+ spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
18
+ spec.license = 'MIT'
19
+
20
+ spec.add_dependency 'dry-configurable', '~> 0.8'
21
+ spec.add_dependency 'dry-inflector', '~> 0.1'
22
+ spec.add_dependency 'dry-monitor', '~> 0.3'
23
+ spec.add_dependency 'dry-validation', '~> 1.2'
24
+ spec.add_dependency 'envlogic', '~> 1.1'
25
+ spec.add_dependency 'irb', '~> 1.0'
26
+ spec.add_dependency 'multi_json', '>= 1.12'
27
+ spec.add_dependency 'rake', '>= 11.3'
28
+ spec.add_dependency 'ruby-kafka', '>= 0.7.8'
29
+ spec.add_dependency 'thor', '~> 0.20'
30
+ spec.add_dependency 'waterdrop', '~> 1.3.0'
31
+ spec.add_dependency 'zeitwerk', '~> 2.1'
32
+
33
+ spec.required_ruby_version = '>= 2.4.0'
34
+
35
+ if $PROGRAM_NAME.end_with?('gem')
36
+ spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
37
+ end
38
+
39
+ spec.cert_chain = %w[certs/mensfeld.pem]
40
+ spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
41
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
42
+ spec.require_paths = %w[lib]
43
+ end
44
+ # rubocop:enable Metrics/BlockLength
@@ -0,0 +1,71 @@
1
+ # frozen_string_literal: true
2
+
3
+ %w[
4
+ English
5
+ waterdrop
6
+ kafka
7
+ envlogic
8
+ thor
9
+ forwardable
10
+ fileutils
11
+ multi_json
12
+ dry-configurable
13
+ dry-validation
14
+ dry/events/publisher
15
+ dry/inflector
16
+ dry/monitor/notifications
17
+ dry/core/constants
18
+ zeitwerk
19
+ ].each(&method(:require))
20
+
21
+ # Karafka library
22
+ module Karafka
23
+ extend Envlogic
24
+
25
+ class << self
26
+ # @return [Logger] logger that we want to use. Will use ::Karafka::Logger by default
27
+ def logger
28
+ @logger ||= App.config.logger
29
+ end
30
+
31
+ # @return [::Karafka::Monitor] monitor that we want to use
32
+ def monitor
33
+ @monitor ||= App.config.monitor
34
+ end
35
+
36
+ # @return [String] root path of this gem
37
+ def gem_root
38
+ Pathname.new(File.expand_path('..', __dir__))
39
+ end
40
+
41
+ # @return [String] Karafka app root path (user application path)
42
+ def root
43
+ Pathname.new(ENV['KARAFKA_ROOT_DIR'] || File.dirname(ENV['BUNDLE_GEMFILE']))
44
+ end
45
+
46
+ # @return [String] path to Karafka gem root core
47
+ def core_root
48
+ Pathname.new(File.expand_path('karafka', __dir__))
49
+ end
50
+
51
+ # @return [String] path to a default file that contains booting procedure etc
52
+ # @note By default it is a file called 'karafka.rb' but it can be specified as you wish if you
53
+ # have Karafka that is merged into a Sinatra/Rails app and karafka.rb is taken.
54
+ # It will be used for console/consumers/etc
55
+ # @example Standard only-Karafka case
56
+ # Karafka.boot_file #=> '/home/app_path/karafka.rb'
57
+ # @example Non standard case
58
+ # KARAFKA_BOOT_FILE='/home/app_path/app.rb'
59
+ # Karafka.boot_file #=> '/home/app_path/app.rb'
60
+ def boot_file
61
+ Pathname.new(ENV['KARAFKA_BOOT_FILE'] || File.join(Karafka.root, 'karafka.rb'))
62
+ end
63
+ end
64
+ end
65
+
66
+ Zeitwerk::Loader
67
+ .for_gem
68
+ .tap(&:setup)
69
+ .tap(&:eager_load)
70
+
71
+ Kafka::Consumer.prepend(Karafka::Patches::RubyKafka)
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # App class
5
+ class App
6
+ extend Setup::Dsl
7
+
8
+ class << self
9
+ # Sets up all the internal components and bootstrap whole app
10
+ # We need to know details about consumers in order to setup components,
11
+ # that's why we don't setup them after std setup is done
12
+ # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
13
+ # doesn't match with the config contract
14
+ def boot!
15
+ initialize!
16
+ Setup::Config.validate!
17
+ Setup::Config.setup_components
18
+ initialized!
19
+ end
20
+
21
+ # @return [Karafka::Routing::Builder] consumers builder instance
22
+ def consumer_groups
23
+ config.internal.routing_builder
24
+ end
25
+
26
+ # Triggers reload of all cached Karafka app components, so we can use in-process
27
+ # in-development hot code reloading without Karafka process restart
28
+ def reload
29
+ Karafka::Persistence::Consumers.clear
30
+ Karafka::Persistence::Topics.clear
31
+ config.internal.routing_builder.reload
32
+ end
33
+
34
+ Status.instance_methods(false).each do |delegated|
35
+ define_method(delegated) do
36
+ App.config.internal.status.send(delegated)
37
+ end
38
+ end
39
+
40
+ # Methods that should be delegated to Karafka module
41
+ %i[
42
+ root
43
+ env
44
+ logger
45
+ monitor
46
+ ].each do |delegated|
47
+ define_method(delegated) do
48
+ Karafka.send(delegated)
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Both Karafka and Ruby-Kafka contain a lot of settings that can be applied on multiple
5
+ # levels. In Karafka that is on consumer group and on the topic level. In Ruby-Kafka it
6
+ # is on consumer, subscription and consumption levels. In order to maintain an order
7
+ # in managing those settings, this module was created. It contains details on what setting
8
+ # where should go and which layer (both on Karafka and Ruby-Kafka) is responsible for
9
+ # setting it and sending it forward
10
+ # @note Settings presented here cover all the settings that are being used across Karafka
11
+ module AttributesMap
12
+ class << self
13
+ # What settings should go where in ruby-kafka
14
+ # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
15
+ # @note All other settings will be passed to Kafka.new method invocation.
16
+ # All elements in this hash are just edge cases
17
+ def api_adapter
18
+ {
19
+ consumer: %i[
20
+ session_timeout offset_commit_interval offset_commit_threshold
21
+ offset_retention_time heartbeat_interval fetcher_max_queue_size
22
+ ],
23
+ subscribe: %i[start_from_beginning max_bytes_per_partition],
24
+ consumption: %i[min_bytes max_bytes max_wait_time],
25
+ pause: %i[pause_timeout pause_max_timeout pause_exponential_backoff],
26
+ # All the options that are under kafka config namespace, but are not used
27
+ # directly with kafka api, but from the Karafka user perspective, they are
28
+ # still related to kafka. They should not be proxied anywhere
29
+ ignored: %i[reconnect_timeout automatically_mark_as_consumed]
30
+ }
31
+ end
32
+
33
+ # @return [Array<Symbol>] properties that can be set on a per topic level
34
+ def topic
35
+ (api_adapter[:subscribe] + %i[
36
+ backend
37
+ name
38
+ deserializer
39
+ responder
40
+ batch_consuming
41
+ ]).uniq
42
+ end
43
+
44
+ # @return [Array<Symbol>] properties that can be set on a per consumer group level
45
+ # @note Note that there are settings directly extracted from the config kafka namespace
46
+ # I did this that way, so I won't have to repeat same setting keys over and over again
47
+ # Thanks to this solution, if any new setting is available for ruby-kafka, we just need
48
+ # to add it to our configuration class and it will be handled automatically.
49
+ def consumer_group
50
+ # @note We don't ignore the api_adapter[:ignored] values as they should be ignored
51
+ # only when proxying details go ruby-kafka. We use ignored fields internally in karafka
52
+ ignored_settings = api_adapter[:subscribe]
53
+ defined_settings = api_adapter.values.flatten
54
+ karafka_settings = %i[batch_fetching]
55
+ # This is a dirty and bad hack of dry-configurable to get keys before setting values
56
+ dynamically_proxied = Karafka::Setup::Config
57
+ ._settings
58
+ .settings
59
+ .find { |s| s.name == :kafka }
60
+ .value
61
+ .names
62
+ .to_a
63
+
64
+ (defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
65
+ end
66
+ end
67
+ end
68
+ end
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all different backends Karafka supports
5
+ module Backends
6
+ # Backend that just runs stuff asap without any scheduling
7
+ module Inline
8
+ private
9
+
10
+ # Executes consume code immediately (without enqueuing)
11
+ def process
12
+ Karafka.monitor.instrument('backends.inline.process', caller: self) { consume }
13
+ end
14
+ end
15
+ end
16
+ end
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Karafka module namespace
4
+ module Karafka
5
+ # Base consumer from which all Karafka consumers should inherit
6
+ class BaseConsumer
7
+ extend Forwardable
8
+
9
+ # Allows us to mark messages as consumed for non-automatic mode without having
10
+ # to use consumer client directly. We do this that way, because most of the people should not
11
+ # mess with the client instance directly (just in case)
12
+ %i[
13
+ mark_as_consumed
14
+ mark_as_consumed!
15
+ trigger_heartbeat
16
+ trigger_heartbeat!
17
+ ].each do |delegated_method_name|
18
+ def_delegator :client, delegated_method_name
19
+
20
+ private delegated_method_name
21
+ end
22
+
23
+ # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
24
+ attr_reader :topic
25
+ # @return [Karafka::Params:ParamsBatch] current params batch
26
+ attr_accessor :params_batch
27
+
28
+ # Assigns a topic to a consumer and builds up proper consumer functionalities
29
+ # so that it can cooperate with the topic settings
30
+ # @param topic [Karafka::Routing::Topic]
31
+ def initialize(topic)
32
+ @topic = topic
33
+ Consumers::Includer.call(self)
34
+ end
35
+
36
+ # Executes the default consumer flow.
37
+ def call
38
+ process
39
+ end
40
+
41
+ private
42
+
43
+ # @return [Karafka::Connection::Client] messages consuming client that can be used to
44
+ # commit manually offset or pause / stop consumer based on the business logic
45
+ def client
46
+ Persistence::Client.read
47
+ end
48
+
49
+ # Method that will perform business logic and on data received from Kafka (it will consume
50
+ # the data)
51
+ # @note This method needs bo be implemented in a subclass. We stub it here as a failover if
52
+ # someone forgets about it or makes on with typo
53
+ def consume
54
+ raise NotImplementedError, 'Implement this in a subclass'
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,226 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Base responder from which all Karafka responders should inherit
5
+ # Similar to Rails responders concept. It allows us to design flow from one app to another
6
+ # by isolating what responses should be sent (and where) based on a given action
7
+ # It differs from Rails responders in the way it works: in std http request we can have one
8
+ # response, here we can have unlimited number of them
9
+ #
10
+ # It has a simple API for defining where should we respond (and if it is required)
11
+ #
12
+ # @example Basic usage (each registered topic is required to be used by default)
13
+ # class Responder < BaseResponder
14
+ # topic :new_action
15
+ #
16
+ # def respond(data)
17
+ # respond_to :new_action, data
18
+ # end
19
+ # end
20
+ #
21
+ # @example Responding to a topic with extra options
22
+ # class Responder < BaseResponder
23
+ # topic :new_action
24
+ #
25
+ # def respond(data)
26
+ # respond_to :new_action, data, partition_key: 'thing'
27
+ # end
28
+ # end
29
+ #
30
+ # @example Marking topic as not required (we won't have to use it)
31
+ # class Responder < BaseResponder
32
+ # topic :required_topic
33
+ # topic :new_action, required: false
34
+ #
35
+ # def respond(data)
36
+ # respond_to :required_topic, data
37
+ # end
38
+ # end
39
+ #
40
+ # @example Multiple times used topic
41
+ # class Responder < BaseResponder
42
+ # topic :required_topic
43
+ #
44
+ # def respond(data)
45
+ # data.each do |subset|
46
+ # respond_to :required_topic, subset
47
+ # end
48
+ # end
49
+ # end
50
+ #
51
+ # @example Specify serializer for a topic
52
+ # class Responder < BaseResponder
53
+ # topic :xml_topic, serializer: MyXMLSerializer
54
+ #
55
+ # def respond(data)
56
+ # data.each do |subset|
57
+ # respond_to :xml_topic, subset
58
+ # end
59
+ # end
60
+ # end
61
+ #
62
+ # @example Accept multiple arguments to a respond method
63
+ # class Responder < BaseResponder
64
+ # topic :users_actions
65
+ # topic :articles_viewed
66
+ #
67
+ # def respond(user, article)
68
+ # respond_to :users_actions, user
69
+ # respond_to :articles_viewed, article
70
+ # end
71
+ # end
72
+ class BaseResponder
73
+ # Responder usage contract
74
+ CONTRACT = Karafka::Contracts::ResponderUsage.new.freeze
75
+
76
+ private_constant :CONTRACT
77
+
78
+ class << self
79
+ # Definitions of all topics that we want to be able to use in this responder should go here
80
+ attr_accessor :topics
81
+ # Contract that we can use to control and/or require some additional details upon options
82
+ # that are being passed to the producer. This can be in particular useful if we want to make
83
+ # sure that for example partition_key is always present.
84
+ attr_accessor :options_contract
85
+
86
+ # Registers a topic as on to which we will be able to respond
87
+ # @param topic_name [Symbol, String] name of topic to which we want to respond
88
+ # @param options [Hash] hash with optional configuration details
89
+ def topic(topic_name, options = {})
90
+ options[:serializer] ||= Karafka::App.config.serializer
91
+ options[:registered] = true
92
+ self.topics ||= {}
93
+ topic_obj = Responders::Topic.new(topic_name, options)
94
+ self.topics[topic_obj.name] = topic_obj
95
+ end
96
+
97
+ # A simple alias for easier standalone responder usage.
98
+ # Instead of building it with new.call it allows (in case of using JSON serializer)
99
+ # to just run it directly from the class level
100
+ # @param data Anything that we want to respond with
101
+ # @example Send user data with a responder
102
+ # UsersCreatedResponder.call(@created_user)
103
+ def call(*data)
104
+ # Just in case there were no topics defined for a responder, we initialize with
105
+ # empty hash not to handle a nil case
106
+ self.topics ||= {}
107
+ new.call(*data)
108
+ end
109
+ end
110
+
111
+ attr_reader :messages_buffer
112
+
113
+ # Creates a responder object
114
+ # @return [Karafka::BaseResponder] base responder descendant responder
115
+ def initialize
116
+ @messages_buffer = {}
117
+ end
118
+
119
+ # Performs respond and validates that all the response requirement were met
120
+ # @param data Anything that we want to respond with
121
+ # @note We know that validators should be executed also before sending data to topics, however
122
+ # the implementation gets way more complicated then, that's why we check after everything
123
+ # was sent using responder
124
+ # @example Send user data with a responder
125
+ # UsersCreatedResponder.new.call(@created_user)
126
+ # @example Send user data with a responder using non default Parser
127
+ # UsersCreatedResponder.new(MyParser).call(@created_user)
128
+ def call(*data)
129
+ respond(*data)
130
+ validate_usage!
131
+ validate_options!
132
+ deliver!
133
+ end
134
+
135
+ private
136
+
137
+ # Checks if we met all the topics requirements. It will fail if we didn't send a message to
138
+ # a registered required topic, etc.
139
+ def validate_usage!
140
+ registered_topics = self.class.topics.map do |name, topic|
141
+ topic.to_h.merge!(
142
+ usage_count: messages_buffer[name]&.count || 0
143
+ )
144
+ end
145
+
146
+ used_topics = messages_buffer.map do |name, usage|
147
+ topic = self.class.topics[name] || Responders::Topic.new(name, registered: false)
148
+ topic.to_h.merge!(usage_count: usage.count)
149
+ end
150
+
151
+ result = CONTRACT.call(
152
+ registered_topics: registered_topics,
153
+ used_topics: used_topics
154
+ )
155
+
156
+ return if result.success?
157
+
158
+ raise Karafka::Errors::InvalidResponderUsageError, result.errors.to_h
159
+ end
160
+
161
+ # Checks if we met all the options requirements before sending them to the producer.
162
+ def validate_options!
163
+ return true unless self.class.options_contract
164
+
165
+ messages_buffer.each_value do |messages_set|
166
+ messages_set.each do |message_data|
167
+ result = self.class.options_contract.call(message_data.last)
168
+ next if result.success?
169
+
170
+ raise Karafka::Errors::InvalidResponderMessageOptionsError, result.errors.to_h
171
+ end
172
+ end
173
+ end
174
+
175
+ # Takes all the messages from the buffer and delivers them one by one
176
+ # @note This method is executed after the validation, so we're sure that
177
+ # what we send is legit and it will go to a proper topics
178
+ def deliver!
179
+ messages_buffer.each_value do |data_elements|
180
+ data_elements.each do |data, options|
181
+ # We map this topic name, so it will match namespaced/etc topic in Kafka
182
+ # @note By default will not change topic (if default mapper used)
183
+ mapped_topic = Karafka::App.config.topic_mapper.outgoing(options[:topic])
184
+ external_options = options.merge(topic: mapped_topic)
185
+ producer(options).call(data, external_options)
186
+ end
187
+ end
188
+ end
189
+
190
+ # Method that needs to be implemented in a subclass. It should handle responding
191
+ # on registered topics
192
+ # @param _data [Object] anything that we want to use to send to Kafka
193
+ # @raise [NotImplementedError] This method needs to be implemented in a subclass
194
+ def respond(*_data)
195
+ raise NotImplementedError, 'Implement this in a subclass'
196
+ end
197
+
198
+ # This method allow us to respond to a single topic with a given data. It can be used
199
+ # as many times as we need. Especially when we have 1:n flow
200
+ # @param topic [Symbol, String] topic to which we want to respond
201
+ # @param data [String, Object] string or object that we want to send
202
+ # @param options [Hash] options for waterdrop (e.g. partition_key).
203
+ # @note Respond to does not accept multiple data arguments.
204
+ def respond_to(topic, data, options = {})
205
+ # We normalize the format to string, as WaterDrop and Ruby-Kafka support only
206
+ # string topics
207
+ topic = topic.to_s
208
+
209
+ messages_buffer[topic] ||= []
210
+ messages_buffer[topic] << [
211
+ self.class.topics[topic].serializer.call(data),
212
+ options.merge(topic: topic)
213
+ ]
214
+ end
215
+
216
+ # @param options [Hash] options for waterdrop
217
+ # @return [Class] WaterDrop producer (sync or async based on the settings)
218
+ def producer(options)
219
+ if self.class.topics[options[:topic]].async?
220
+ WaterDrop::AsyncProducer
221
+ else
222
+ WaterDrop::SyncProducer
223
+ end
224
+ end
225
+ end
226
+ end