karafka 1.2.2 → 1.4.0.rc1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +2 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +157 -13
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +5 -2
  17. data/Gemfile.lock +95 -79
  18. data/README.md +15 -3
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +19 -13
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +13 -18
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +51 -29
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/{config_adapter.rb → api_adapter.rb} +62 -21
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +18 -0
  39. data/lib/karafka/connection/client.rb +40 -40
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -19
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +50 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -4
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +34 -49
  85. data/lib/karafka/setup/config.rb +74 -40
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +97 -73
  94. metadata.gz.sig +4 -0
  95. data/.travis.yml +0 -13
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -77
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -11,20 +11,22 @@ module Karafka
11
11
  def call
12
12
  topics.each do |topic|
13
13
  any_topics = !topic.responder&.topics.nil?
14
+ log_messages = []
14
15
 
15
16
  if any_topics
16
- puts "#{topic.name} =>"
17
+ log_messages << "#{topic.name} =>"
17
18
 
18
19
  topic.responder.topics.each_value do |responder_topic|
19
20
  features = []
20
21
  features << (responder_topic.required? ? 'always' : 'conditionally')
21
- features << (responder_topic.multiple_usage? ? 'one or more' : 'exactly once')
22
22
 
23
- print responder_topic.name, "(#{features.join(', ')})"
23
+ log_messages << format(responder_topic.name, "(#{features.join(', ')})")
24
24
  end
25
25
  else
26
- puts "#{topic.name} => (nothing)"
26
+ log_messages << "#{topic.name} => (nothing)"
27
27
  end
28
+
29
+ Karafka.logger.info(log_messages.join("\n"))
28
30
  end
29
31
  end
30
32
 
@@ -35,11 +37,11 @@ module Karafka
35
37
  Karafka::App.consumer_groups.map(&:topics).flatten.sort_by(&:name)
36
38
  end
37
39
 
38
- # Prints a given value with label in a nice way
40
+ # Formats a given value with label in a nice way
39
41
  # @param label [String] label describing value
40
42
  # @param value [String] value that should be printed
41
- def print(label, value)
42
- printf "%-25s %s\n", " - #{label}:", value
43
+ def format(label, value)
44
+ " - #{label}: #{value}"
43
45
  end
44
46
  end
45
47
  end
@@ -12,7 +12,9 @@ module Karafka
12
12
  config = Karafka::App.config
13
13
 
14
14
  info = [
15
- "Karafka framework version: #{Karafka::VERSION}",
15
+ "Karafka version: #{Karafka::VERSION}",
16
+ "Ruby version: #{RUBY_VERSION}",
17
+ "Ruby-kafka version: #{::Kafka::VERSION}",
16
18
  "Application client id: #{config.client_id}",
17
19
  "Backend: #{config.backend}",
18
20
  "Batch fetching: #{config.batch_fetching}",
@@ -22,7 +24,7 @@ module Karafka
22
24
  "Kafka seed brokers: #{config.kafka.seed_brokers}"
23
25
  ]
24
26
 
25
- puts(info.join("\n"))
27
+ Karafka.logger.info(info.join("\n"))
26
28
  end
27
29
  end
28
30
  end
@@ -1,5 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require 'erb'
4
+
3
5
  module Karafka
4
6
  # Karafka framework Cli
5
7
  class Cli < Thor
@@ -11,18 +13,30 @@ module Karafka
11
13
  INSTALL_DIRS = %w[
12
14
  app/consumers
13
15
  app/responders
16
+ app/workers
14
17
  config
18
+ lib
15
19
  log
16
20
  tmp/pids
17
21
  ].freeze
18
22
 
19
23
  # Where should we map proper files from templates
20
24
  INSTALL_FILES_MAP = {
21
- 'karafka.rb.example' => Karafka.boot_file.basename,
22
- 'application_consumer.rb.example' => 'app/consumers/application_consumer.rb',
23
- 'application_responder.rb.example' => 'app/responders/application_responder.rb'
25
+ 'karafka.rb.erb' => Karafka.boot_file.basename,
26
+ 'application_consumer.rb.erb' => 'app/consumers/application_consumer.rb',
27
+ 'application_responder.rb.erb' => 'app/responders/application_responder.rb'
24
28
  }.freeze
25
29
 
30
+ # @param args [Array] all the things that Thor CLI accepts
31
+ def initialize(*args)
32
+ super
33
+ @rails = Bundler::LockfileParser.new(
34
+ Bundler.read_file(
35
+ Bundler.default_lockfile
36
+ )
37
+ ).dependencies.key?('rails')
38
+ end
39
+
26
40
  # Install all required things for Karafka application in current directory
27
41
  def call
28
42
  INSTALL_DIRS.each do |dir|
@@ -31,12 +45,22 @@ module Karafka
31
45
 
32
46
  INSTALL_FILES_MAP.each do |source, target|
33
47
  target = Karafka.root.join(target)
34
- next if File.exist?(target)
35
48
 
36
- source = Karafka.core_root.join("templates/#{source}")
37
- FileUtils.cp_r(source, target)
49
+ template = File.read(Karafka.core_root.join("templates/#{source}"))
50
+ # @todo Replace with the keyword argument version once we don't have to support
51
+ # Ruby < 2.6
52
+ render = ::ERB.new(template, nil, '-').result(binding)
53
+
54
+ File.open(target, 'w') { |file| file.write(render) }
38
55
  end
39
56
  end
57
+
58
+ # @return [Boolean] true if we have Rails loaded
59
+ # This allows us to generate customized karafka.rb template with some tweaks specific for
60
+ # Rails
61
+ def rails?
62
+ @rails
63
+ end
40
64
  end
41
65
  end
42
66
  end
@@ -5,6 +5,11 @@ module Karafka
5
5
  class Cli < Thor
6
6
  # Server Karafka Cli action
7
7
  class Server < Base
8
+ # Server config settings contract
9
+ CONTRACT = Contracts::ServerCliOptions.new.freeze
10
+
11
+ private_constant :CONTRACT
12
+
8
13
  desc 'Start the Karafka server (short-cut alias: "s")'
9
14
  option aliases: 's'
10
15
  option :daemon, default: false, type: :boolean, aliases: :d
@@ -13,11 +18,10 @@ module Karafka
13
18
 
14
19
  # Start the Karafka server
15
20
  def call
16
- validate!
17
-
18
- puts 'Starting Karafka server'
19
21
  cli.info
20
22
 
23
+ validate!
24
+
21
25
  if cli.options[:daemon]
22
26
  FileUtils.mkdir_p File.dirname(cli.options[:pid])
23
27
  daemonize
@@ -31,7 +35,7 @@ module Karafka
31
35
  # We want to delay the moment in which the pidfile is removed as much as we can,
32
36
  # so instead of removing it after the server stops running, we rely on the gc moment
33
37
  # when this object gets removed (it is a bit later), so it is closer to the actual
34
- # system process end. We do that, so monitoring and deployment tools that rely on pids
38
+ # system process end. We do that, so monitoring and deployment tools that rely on a pid
35
39
  # won't alarm or start new system process up until the current one is finished
36
40
  ObjectSpace.define_finalizer(self, proc { send(:clean) })
37
41
 
@@ -43,9 +47,10 @@ module Karafka
43
47
  # Checks the server cli configuration
44
48
  # options validations in terms of app setup (topics, pid existence, etc)
45
49
  def validate!
46
- result = Schemas::ServerCliOptions.call(cli.options)
50
+ result = CONTRACT.call(cli.options)
47
51
  return if result.success?
48
- raise Errors::InvalidConfiguration, result.errors
52
+
53
+ raise Errors::InvalidConfigurationError, result.errors.to_h
49
54
  end
50
55
 
51
56
  # Detaches current process into background and writes its pidfile
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Special type of a listener, that is not an instrumentation one, but one that triggers
5
+ # code reload in the development mode after each fetched batch (or message)
6
+ #
7
+ # Please refer to the development code reload sections for details on the benefits and downsides
8
+ # of the in-process code reloading
9
+ class CodeReloader
10
+ # This mutex is needed as we might have an application that has multiple consumer groups
11
+ # running in separate threads and we should not trigger reload before fully reloading the app
12
+ # in previous thread
13
+ MUTEX = Mutex.new
14
+
15
+ private_constant :MUTEX
16
+
17
+ # @param reloaders [Array<Object>] any code loaders that we use in this app. Whether it is
18
+ # the Rails loader, Zeitwerk or anything else that allows reloading triggering
19
+ # @param block [Proc] yields given block just before reloading. This can be used to hook custom
20
+ # reloading stuff, that ain't reloaders (for example for resetting dry-events registry)
21
+ def initialize(*reloaders, &block)
22
+ @reloaders = reloaders
23
+ @block = block
24
+ end
25
+
26
+ # Binds to the instrumentation events and triggers reload
27
+ # @param _event [Dry::Event] empty dry event
28
+ # @note Since we de-register all the user defined objects and redraw routes, it means that
29
+ # we won't be able to do a multi-batch buffering in the development mode as each of the
30
+ # batches will be buffered on a newly created "per fetch" instance.
31
+ def on_connection_listener_fetch_loop(_event)
32
+ reload
33
+ end
34
+
35
+ private
36
+
37
+ # Triggers reload of both standard and Rails reloaders as well as expires all internals of
38
+ # Karafka, so it can be rediscovered and rebuilt
39
+ def reload
40
+ MUTEX.synchronize do
41
+ if @reloaders[0].respond_to?(:execute)
42
+ reload_with_rails
43
+ else
44
+ reload_without_rails
45
+ end
46
+ end
47
+ end
48
+
49
+ # Rails reloading procedure
50
+ def reload_with_rails
51
+ updatable = @reloaders.select(&:updated?)
52
+
53
+ return if updatable.empty?
54
+
55
+ updatable.each(&:execute)
56
+ @block&.call
57
+ Karafka::App.reload
58
+ end
59
+
60
+ # Zeitwerk and other reloaders
61
+ def reload_without_rails
62
+ @reloaders.each(&:reload)
63
+ @block&.call
64
+ Karafka::App.reload
65
+ end
66
+ end
67
+ end
@@ -3,22 +3,23 @@
3
3
  module Karafka
4
4
  # Namespace for all the things related to Kafka connection
5
5
  module Connection
6
- # Mapper used to convert our internal settings into ruby-kafka settings
6
+ # Mapper used to convert our internal settings into ruby-kafka settings based on their
7
+ # API requirements.
7
8
  # Since ruby-kafka has more and more options and there are few "levels" on which
8
9
  # we have to apply them (despite the fact, that in Karafka you configure all of it
9
10
  # in one place), we have to remap it into what ruby-kafka driver requires
10
11
  # @note The good thing about Kafka.new method is that it ignores all options that
11
12
  # do nothing. So we don't have to worry about injecting our internal settings
12
13
  # into the client and breaking stuff
13
- module ConfigAdapter
14
+ module ApiAdapter
14
15
  class << self
15
16
  # Builds all the configuration settings for Kafka.new method
16
- # @param _consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
17
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
17
18
  # @return [Array<Hash>] Array with all the client arguments including hash with all
18
19
  # the settings required by Kafka.new method
19
20
  # @note We return array, so we can inject any arguments we want, in case of changes in the
20
21
  # raw driver
21
- def client(_consumer_group)
22
+ def client(consumer_group)
22
23
  # This one is a default that takes all the settings except special
23
24
  # cases defined in the map
24
25
  settings = {
@@ -26,14 +27,17 @@ module Karafka
26
27
  client_id: ::Karafka::App.config.client_id
27
28
  }
28
29
 
29
- kafka_configs.each do |setting_name, setting_value|
30
+ kafka_configs.each_key do |setting_name|
30
31
  # All options for config adapter should be ignored as we're just interested
31
32
  # in what is left, as we want to pass all the options that are "typical"
32
- # and not listed in the config_adapter special cases mapping. All the values
33
- # from the config_adapter mapping go somewhere else, not to the client directly
34
- next if AttributesMap.config_adapter.values.flatten.include?(setting_name)
33
+ # and not listed in the api_adapter special cases mapping. All the values
34
+ # from the api_adapter mapping go somewhere else, not to the client directly
35
+ next if AttributesMap.api_adapter.values.flatten.include?(setting_name)
35
36
 
36
- settings[setting_name] = setting_value
37
+ # Settings for each consumer group are either defined per consumer group or are
38
+ # inherited from the global/general settings level, thus we don't have to fetch them
39
+ # from the kafka settings as they are already on a consumer group level
40
+ settings[setting_name] = consumer_group.public_send(setting_name)
37
41
  end
38
42
 
39
43
  settings_hash = sanitize(settings)
@@ -58,26 +62,61 @@ module Karafka
58
62
  # @return [Array<Hash>] Array with all the arguments required by consuming method
59
63
  # including hash with all the settings required by
60
64
  # Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
61
- def consuming(consumer_group)
62
- settings = {
63
- automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
64
- }
65
- [sanitize(fetch_for(:consuming, consumer_group, settings))]
65
+ def consumption(consumer_group)
66
+ [
67
+ sanitize(
68
+ fetch_for(
69
+ :consumption,
70
+ consumer_group,
71
+ automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
72
+ )
73
+ )
74
+ ]
66
75
  end
67
76
 
68
77
  # Builds all the configuration settings for kafka consumer#subscribe method
69
78
  # @param topic [Karafka::Routing::Topic] topic that holds details for a given subscription
70
79
  # @return [Hash] hash with all the settings required by kafka consumer#subscribe method
71
- def subscription(topic)
72
- settings = fetch_for(:subscription, topic)
80
+ def subscribe(topic)
81
+ settings = fetch_for(:subscribe, topic)
73
82
  [Karafka::App.config.topic_mapper.outgoing(topic.name), sanitize(settings)]
74
83
  end
75
84
 
76
85
  # Builds all the configuration settings required by kafka consumer#pause method
86
+ # @param topic [String] topic that we want to pause
87
+ # @param partition [Integer] number partition that we want to pause
77
88
  # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
78
- # @return [Hash] hash with all the settings required to pause kafka consumer
79
- def pausing(consumer_group)
80
- { timeout: consumer_group.pause_timeout }
89
+ # @return [Array] array with all the details required to pause kafka consumer
90
+ def pause(topic, partition, consumer_group)
91
+ [
92
+ Karafka::App.config.topic_mapper.outgoing(topic),
93
+ partition,
94
+ {
95
+ timeout: consumer_group.pause_timeout,
96
+ max_timeout: consumer_group.pause_max_timeout,
97
+ exponential_backoff: consumer_group.pause_exponential_backoff
98
+ }
99
+ ]
100
+ end
101
+
102
+ # Remaps topic details taking the topic mapper feature into consideration.
103
+ # @param params [Karafka::Params::Params] params instance
104
+ # @return [Array] array with all the details needed by ruby-kafka to mark message
105
+ # as processed
106
+ # @note When default empty topic mapper is used, no need for any conversion as the
107
+ # internal and external format are exactly the same
108
+ def mark_message_as_processed(params)
109
+ # Majority of users don't use custom topic mappers. No need to change anything when it
110
+ # is a default mapper that does not change anything. Only some cloud providers require
111
+ # topics to be remapped
112
+ return [params.metadata] if Karafka::App.config.topic_mapper.is_a?(
113
+ Karafka::Routing::TopicMapper
114
+ )
115
+
116
+ # @note We don't use tap as it is around 13% slower than non-dup version
117
+ dupped = params.metadata.dup
118
+ dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.metadata.topic)
119
+ [dupped]
81
120
  end
82
121
 
83
122
  private
@@ -90,10 +129,12 @@ module Karafka
90
129
  def fetch_for(namespace_key, route_layer, preexisting_settings = {})
91
130
  kafka_configs.each_key do |setting_name|
92
131
  # Ignore settings that are not related to our namespace
93
- next unless AttributesMap.config_adapter[namespace_key].include?(setting_name)
132
+ next unless AttributesMap.api_adapter[namespace_key].include?(setting_name)
133
+
94
134
  # Ignore settings that are already initialized
95
135
  # In case they are in preexisting settings fetched differently
96
- next if preexisting_settings.keys.include?(setting_name)
136
+ next if preexisting_settings.key?(setting_name)
137
+
97
138
  # Fetch all the settings from a given layer object. Objects can handle the fallback
98
139
  # to the kafka settings, so
99
140
  preexisting_settings[setting_name] = route_layer.send(setting_name)
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Connection
5
+ # Class that delegates processing of batch received messages for which we listen to
6
+ # a proper processor
7
+ module BatchDelegator
8
+ class << self
9
+ # Delegates messages (does something with them)
10
+ # It will either schedule or run a proper processor action for messages
11
+ # @param group_id [String] group_id of a group from which a given message came
12
+ # @param kafka_batch [<Kafka::FetchedBatch>] raw messages fetched batch
13
+ # @note This should be looped to obtain a constant delegating of new messages
14
+ def call(group_id, kafka_batch)
15
+ topic = Persistence::Topics.fetch(group_id, kafka_batch.topic)
16
+ consumer = Persistence::Consumers.fetch(topic, kafka_batch.partition)
17
+
18
+ Karafka.monitor.instrument(
19
+ 'connection.batch_delegator.call',
20
+ caller: self,
21
+ consumer: consumer,
22
+ kafka_batch: kafka_batch
23
+ ) do
24
+ # Due to how ruby-kafka is built, we have the metadata that is stored on the batch
25
+ # level only available for batch consuming
26
+ consumer.batch_metadata = Params::Builders::BatchMetadata.from_kafka_batch(
27
+ kafka_batch,
28
+ topic
29
+ )
30
+
31
+ kafka_messages = kafka_batch.messages
32
+
33
+ # Depending on a case (persisted or not) we might use new consumer instance per
34
+ # each batch, or use the same one for all of them (for implementing buffering, etc.)
35
+ if topic.batch_consuming
36
+ consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
37
+ kafka_messages,
38
+ topic
39
+ )
40
+ consumer.call
41
+ else
42
+ kafka_messages.each do |kafka_message|
43
+ consumer.params_batch = Params::Builders::ParamsBatch.from_kafka_messages(
44
+ [kafka_message],
45
+ topic
46
+ )
47
+ consumer.call
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Connection
5
+ # Builder used to construct Kafka client
6
+ module Builder
7
+ class << self
8
+ # Builds a Kafka::Client instance that we use to work with Kafka cluster
9
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group for which we want
10
+ # to have a new Kafka client
11
+ # @return [::Kafka::Client] returns a Kafka client
12
+ def call(consumer_group)
13
+ Kafka.new(*ApiAdapter.client(consumer_group))
14
+ end
15
+ end
16
+ end
17
+ end
18
+ end