karafka 0.5.0.3 → 0.6.0.rc1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (76) hide show
  1. checksums.yaml +4 -4
  2. data/.console_irbrc +13 -0
  3. data/.github/ISSUE_TEMPLATE.md +2 -0
  4. data/.gitignore +1 -0
  5. data/CHANGELOG.md +59 -1
  6. data/CODE_OF_CONDUCT.md +46 -0
  7. data/CONTRIBUTING.md +67 -0
  8. data/Gemfile +2 -1
  9. data/Gemfile.lock +46 -147
  10. data/README.md +51 -952
  11. data/Rakefile +5 -14
  12. data/karafka.gemspec +19 -13
  13. data/lib/karafka.rb +7 -4
  14. data/lib/karafka/app.rb +10 -6
  15. data/lib/karafka/attributes_map.rb +67 -0
  16. data/lib/karafka/base_controller.rb +42 -52
  17. data/lib/karafka/base_responder.rb +30 -14
  18. data/lib/karafka/base_worker.rb +11 -26
  19. data/lib/karafka/cli.rb +2 -0
  20. data/lib/karafka/cli/base.rb +2 -0
  21. data/lib/karafka/cli/console.rb +7 -1
  22. data/lib/karafka/cli/flow.rb +13 -13
  23. data/lib/karafka/cli/info.rb +7 -4
  24. data/lib/karafka/cli/install.rb +4 -3
  25. data/lib/karafka/cli/server.rb +3 -1
  26. data/lib/karafka/cli/worker.rb +2 -0
  27. data/lib/karafka/connection/config_adapter.rb +103 -0
  28. data/lib/karafka/connection/listener.rb +16 -12
  29. data/lib/karafka/connection/messages_consumer.rb +86 -0
  30. data/lib/karafka/connection/messages_processor.rb +74 -0
  31. data/lib/karafka/errors.rb +15 -29
  32. data/lib/karafka/fetcher.rb +10 -8
  33. data/lib/karafka/helpers/class_matcher.rb +2 -0
  34. data/lib/karafka/helpers/config_retriever.rb +46 -0
  35. data/lib/karafka/helpers/multi_delegator.rb +2 -0
  36. data/lib/karafka/loader.rb +4 -2
  37. data/lib/karafka/logger.rb +37 -36
  38. data/lib/karafka/monitor.rb +3 -1
  39. data/lib/karafka/params/interchanger.rb +2 -0
  40. data/lib/karafka/params/params.rb +34 -41
  41. data/lib/karafka/params/params_batch.rb +46 -0
  42. data/lib/karafka/parsers/json.rb +4 -2
  43. data/lib/karafka/patches/dry_configurable.rb +2 -0
  44. data/lib/karafka/process.rb +4 -2
  45. data/lib/karafka/responders/builder.rb +2 -0
  46. data/lib/karafka/responders/topic.rb +14 -6
  47. data/lib/karafka/routing/builder.rb +22 -59
  48. data/lib/karafka/routing/consumer_group.rb +54 -0
  49. data/lib/karafka/routing/mapper.rb +2 -0
  50. data/lib/karafka/routing/proxy.rb +37 -0
  51. data/lib/karafka/routing/router.rb +18 -16
  52. data/lib/karafka/routing/topic.rb +78 -0
  53. data/lib/karafka/schemas/config.rb +36 -0
  54. data/lib/karafka/schemas/consumer_group.rb +56 -0
  55. data/lib/karafka/schemas/responder_usage.rb +38 -0
  56. data/lib/karafka/server.rb +5 -3
  57. data/lib/karafka/setup/config.rb +79 -32
  58. data/lib/karafka/setup/configurators/base.rb +2 -0
  59. data/lib/karafka/setup/configurators/celluloid.rb +2 -0
  60. data/lib/karafka/setup/configurators/sidekiq.rb +2 -0
  61. data/lib/karafka/setup/configurators/water_drop.rb +15 -3
  62. data/lib/karafka/status.rb +2 -0
  63. data/lib/karafka/templates/app.rb.example +15 -5
  64. data/lib/karafka/templates/application_worker.rb.example +0 -6
  65. data/lib/karafka/version.rb +2 -1
  66. data/lib/karafka/workers/builder.rb +2 -0
  67. metadata +109 -60
  68. data/lib/karafka/cli/routes.rb +0 -36
  69. data/lib/karafka/connection/consumer.rb +0 -33
  70. data/lib/karafka/connection/message.rb +0 -17
  71. data/lib/karafka/connection/topic_consumer.rb +0 -94
  72. data/lib/karafka/responders/usage_validator.rb +0 -60
  73. data/lib/karafka/routing/route.rb +0 -113
  74. data/lib/karafka/setup/config_schema.rb +0 -44
  75. data/lib/karafka/setup/configurators/worker_glass.rb +0 -13
  76. data/lib/karafka/templates/config.ru.example +0 -13
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Karafka framework Cli
3
5
  # If you want to add/modify command that belongs to CLI, please review all commands
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  class Cli < Thor
3
5
  # Base class for all the command that we want to define
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Karafka framework Cli
3
5
  class Cli
@@ -10,7 +12,11 @@ module Karafka
10
12
  # @example
11
13
  # Karafka::Cli::Console.command #=> 'KARAFKA_CONSOLE=true bundle exec irb...'
12
14
  def self.command
13
- "KARAFKA_CONSOLE=true bundle exec irb -r #{Karafka.boot_file}"
15
+ envs = [
16
+ "IRBRC='#{Karafka.gem_root}/.console_irbrc'",
17
+ 'KARAFKA_CONSOLE=true'
18
+ ]
19
+ "#{envs.join(' ')} bundle exec irb"
14
20
  end
15
21
 
16
22
  # Start the Karafka console
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Karafka framework Cli
3
5
  class Cli
@@ -7,32 +9,30 @@ module Karafka
7
9
 
8
10
  # Print out all defined routes in alphabetical order
9
11
  def call
10
- routes.each do |route|
11
- any_topics = !route.responder&.topics.nil?
12
+ topics.each do |topic|
13
+ any_topics = !topic.responder&.topics.nil?
12
14
 
13
15
  if any_topics
14
- puts "#{route.topic} =>"
16
+ puts "#{topic.name} =>"
15
17
 
16
- route.responder.topics.each do |_name, topic|
18
+ topic.responder.topics.each do |_name, responder_topic|
17
19
  features = []
18
- features << (topic.required? ? 'always' : 'conditionally')
19
- features << (topic.multiple_usage? ? 'one or more' : 'exactly once')
20
+ features << (responder_topic.required? ? 'always' : 'conditionally')
21
+ features << (responder_topic.multiple_usage? ? 'one or more' : 'exactly once')
20
22
 
21
- print topic.name, "(#{features.join(', ')})"
23
+ print responder_topic.name, "(#{features.join(', ')})"
22
24
  end
23
25
  else
24
- puts "#{route.topic} => (nothing)"
26
+ puts "#{topic.name} => (nothing)"
25
27
  end
26
28
  end
27
29
  end
28
30
 
29
31
  private
30
32
 
31
- # @return [Array<Karafka::Routing::Route>] all routes sorted in alphabetical order
32
- def routes
33
- Karafka::App.routes.sort do |route1, route2|
34
- route1.topic <=> route2.topic
35
- end
33
+ # @return [Array<Karafka::Routing::Topic>] all topics sorted in alphabetical order
34
+ def topics
35
+ Karafka::App.consumer_groups.map(&:topics).flatten.sort_by(&:name)
36
36
  end
37
37
 
38
38
  # Prints a given value with label in a nice way
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Karafka framework Cli
3
5
  class Cli
@@ -11,13 +13,14 @@ module Karafka
11
13
 
12
14
  info = [
13
15
  "Karafka framework version: #{Karafka::VERSION}",
14
- "Application name: #{config.name}",
15
- "Inline mode: #{config.inline_mode}",
16
- "Batch mode: #{config.batch_mode}",
16
+ "Application client id: #{config.client_id}",
17
+ "Inline processing: #{config.inline_processing}",
18
+ "Batch consuming: #{config.batch_consuming}",
19
+ "Batch processing: #{config.batch_processing}",
17
20
  "Number of threads: #{config.concurrency}",
18
21
  "Boot file: #{Karafka.boot_file}",
19
22
  "Environment: #{Karafka.env}",
20
- "Kafka hosts: #{config.kafka.hosts}",
23
+ "Kafka seed brokers: #{config.kafka.seed_brokers}",
21
24
  "Redis: #{config.redis.to_h}"
22
25
  ]
23
26
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Karafka framework Cli
3
5
  class Cli
@@ -6,7 +8,7 @@ module Karafka
6
8
  desc 'Install all required things for Karafka application in current directory'
7
9
 
8
10
  # Directories created by default
9
- INSTALL_DIRS = %w(
11
+ INSTALL_DIRS = %w[
10
12
  app/models
11
13
  app/controllers
12
14
  app/responders
@@ -14,12 +16,11 @@ module Karafka
14
16
  config
15
17
  log
16
18
  tmp/pids
17
- ).freeze
19
+ ].freeze
18
20
 
19
21
  # Where should we map proper files from templates
20
22
  INSTALL_FILES_MAP = {
21
23
  'app.rb.example' => Karafka.boot_file.basename,
22
- 'config.ru.example' => 'config.ru',
23
24
  'sidekiq.yml.example' => 'config/sidekiq.yml.example',
24
25
  'application_worker.rb.example' => 'app/workers/application_worker.rb',
25
26
  'application_controller.rb.example' => 'app/controllers/application_controller.rb',
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Karafka framework Cli
3
5
  class Cli
@@ -26,7 +28,7 @@ module Karafka
26
28
  end
27
29
 
28
30
  # Remove pidfile on shutdown
29
- ObjectSpace.define_finalizer('string', proc { send(:clean) })
31
+ ObjectSpace.define_finalizer(String.new, proc { send(:clean) })
30
32
 
31
33
  # After we fork, we can boot celluloid again
32
34
  Karafka::Server.run
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Karafka framework Cli
3
5
  class Cli
@@ -0,0 +1,103 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the things related to Kafka connection
5
+ module Connection
6
+ # Mapper used to convert our internal settings into ruby-kafka settings
7
+ # Since ruby-kafka has more and more options and there are few "levels" on which
8
+ # we have to apply them (despite the fact, that in Karafka you configure all of it
9
+ # in one place), we have to remap it into what ruby-kafka driver requires
10
+ # @note The good thing about Kafka.new method is that it ignores all options that
11
+ # do nothing. So we don't have to worry about injecting our internal settings
12
+ # into the client and breaking stuff
13
+ module ConfigAdapter
14
+ class << self
15
+ # Builds all the configuration settings for Kafka.new method
16
+ # @param _consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
17
+ # @return [Hash] hash with all the settings required by Kafka.new method
18
+ def client(_consumer_group)
19
+ # This one is a default that takes all the settings except special
20
+ # cases defined in the map
21
+ settings = {
22
+ logger: ::Karafka.logger,
23
+ client_id: ::Karafka::App.config.client_id
24
+ }
25
+
26
+ kafka_configs.each do |setting_name, setting_value|
27
+ # All options for config adapter should be ignored as we're just interested
28
+ # in what is left, as we want to pass all the options that are "typical"
29
+ # and not listed in the config_adapter special cases mapping. All the values
30
+ # from the config_adapter mapping go somewhere else, not to the client directly
31
+ next if AttributesMap.config_adapter.values.flatten.include?(setting_name)
32
+
33
+ settings[setting_name] = setting_value
34
+ end
35
+
36
+ sanitize(settings)
37
+ end
38
+
39
+ # Builds all the configuration settings for kafka#consumer method
40
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
41
+ # @return [Hash] hash with all the settings required by Kafka#consumer method
42
+ def consumer(consumer_group)
43
+ settings = { group_id: consumer_group.id }
44
+
45
+ kafka_configs.each do |setting_name, setting_value|
46
+ next unless AttributesMap.config_adapter[:consumer].include?(setting_name)
47
+ next if settings.keys.include?(setting_name)
48
+ settings[setting_name] = setting_value
49
+ end
50
+
51
+ sanitize(settings)
52
+ end
53
+
54
+ # Builds all the configuration settings for kafka consumer consume_each_batch and
55
+ # consume_each_message methods
56
+ # @param _consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
57
+ # @return [Hash] hash with all the settings required by
58
+ # Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
59
+ def consuming(_consumer_group)
60
+ settings = {}
61
+
62
+ kafka_configs.each do |setting_name, setting_value|
63
+ next unless AttributesMap.config_adapter[:consuming].include?(setting_name)
64
+ next if settings.keys.include?(setting_name)
65
+ settings[setting_name] = setting_value
66
+ end
67
+
68
+ sanitize(settings)
69
+ end
70
+
71
+ # Builds all the configuration settings for kafka consumer#subscribe method
72
+ # @param topic [Karafka::Routing::Topic] topic that holds details for a given subscription
73
+ # @return [Hash] hash with all the settings required by kafka consumer#subscribe method
74
+ def subscription(topic)
75
+ settings = { start_from_beginning: topic.start_from_beginning }
76
+
77
+ kafka_configs.each do |setting_name, setting_value|
78
+ next unless AttributesMap.config_adapter[:subscription].include?(setting_name)
79
+ next if settings.keys.include?(setting_name)
80
+ settings[setting_name] = setting_value
81
+ end
82
+
83
+ [topic.name, sanitize(settings)]
84
+ end
85
+
86
+ private
87
+
88
+ # Removes nil containing keys from the final settings so it can use Kafkas driver
89
+ # defaults for those
90
+ # @param settings [Hash] settings that may contain nil values
91
+ # @return [Hash] settings without nil using keys (non of karafka options should be nil)
92
+ def sanitize(settings)
93
+ settings.reject { |_key, value| value.nil? }
94
+ end
95
+
96
+ # @return [Hash] Kafka config details as a hash
97
+ def kafka_configs
98
+ ::Karafka::App.config.kafka.to_h
99
+ end
100
+ end
101
+ end
102
+ end
103
+ end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  module Connection
3
5
  # A single listener that listens to incoming messages from a single route
@@ -9,16 +11,18 @@ module Karafka
9
11
 
10
12
  execute_block_on_receiver :fetch_loop
11
13
 
12
- attr_reader :route
14
+ attr_reader :consumer_group
13
15
 
16
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group that holds details
17
+ # on what topics and with what settings should we listen
14
18
  # @return [Karafka::Connection::Listener] listener instance
15
- def initialize(route)
16
- @route = route
19
+ def initialize(consumer_group)
20
+ @consumer_group = consumer_group
17
21
  end
18
22
 
19
23
  # Opens connection, gets messages and calls a block for each of the incoming messages
20
- # @yieldparam [Karafka::BaseController] base controller descendant
21
- # @yieldparam [Kafka::FetchedMessage] kafka fetched message
24
+ # @yieldparam [String] consumer group id
25
+ # @yieldparam [Array<Kafka::FetchedMessage>] kafka fetched messages
22
26
  # @note This will yield with a raw message - no preprocessing or reformatting
23
27
  # @note We catch all the errors here, so they don't affect other listeners (or this one)
24
28
  # so we will be able to listen and consume other incoming messages.
@@ -27,25 +31,25 @@ module Karafka
27
31
  # Kafka connections / Internet connection issues / Etc. Business logic problems should not
28
32
  # propagate this far
29
33
  def fetch_loop(block)
30
- topic_consumer.fetch_loop do |raw_message|
31
- block.call(raw_message)
34
+ messages_consumer.fetch_loop do |raw_messages|
35
+ block.call(consumer_group.id, raw_messages)
32
36
  end
33
37
  # This is on purpose - see the notes for this method
34
38
  # rubocop:disable RescueException
35
39
  rescue Exception => e
36
40
  # rubocop:enable RescueException
37
41
  Karafka.monitor.notice_error(self.class, e)
38
- @topic_consumer&.stop
39
- retry if @topic_consumer
42
+ @messages_consumer&.stop
43
+ retry if @messages_consumer
40
44
  end
41
45
 
42
46
  private
43
47
 
44
- # @return [Karafka::Connection::TopicConsumer] wrapped kafka consumer for a given topic
48
+ # @return [Karafka::Connection::MessagesConsumer] wrapped kafka consumer for a given topic
45
49
  # consumption
46
50
  # @note It adds consumer into Karafka::Server consumers pool for graceful shutdown on exit
47
- def topic_consumer
48
- @topic_consumer ||= TopicConsumer.new(@route).tap do |consumer|
51
+ def messages_consumer
52
+ @messages_consumer ||= MessagesConsumer.new(consumer_group).tap do |consumer|
49
53
  Karafka::Server.consumers << consumer if Karafka::Server.consumers
50
54
  end
51
55
  end
@@ -0,0 +1,86 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Connection
5
+ # Class used as a wrapper around Ruby-Kafka to simplify additional
6
+ # features that we provide/might provide in future
7
+ class MessagesConsumer
8
+ # Creates a queue consumer that will pull the data from Kafka
9
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group for which
10
+ # we create a client
11
+ # @return [Karafka::Connection::MessagesConsumer] group consumer that can subscribe to
12
+ # multiple topics
13
+ def initialize(consumer_group)
14
+ @consumer_group = consumer_group
15
+ end
16
+
17
+ # Opens connection, gets messages and calls a block for each of the incoming messages
18
+ # @yieldparam [Array<Kafka::FetchedMessage>] kafka fetched messages
19
+ # @note This will yield with raw messages - no preprocessing or reformatting.
20
+ def fetch_loop
21
+ send(
22
+ consumer_group.batch_consuming ? :consume_each_batch : :consume_each_message
23
+ ) do |messages|
24
+ yield(messages)
25
+ end
26
+ end
27
+
28
+ # Gracefuly stops topic consumption
29
+ def stop
30
+ @kafka_consumer&.stop
31
+ @kafka_consumer = nil
32
+ end
33
+
34
+ private
35
+
36
+ attr_reader :consumer_group
37
+
38
+ # Consumes messages from Kafka in batches
39
+ # @yieldparam [Array<Kafka::FetchedMessage>] kafka fetched messages
40
+ def consume_each_batch
41
+ kafka_consumer.each_batch(
42
+ ConfigAdapter.consuming(consumer_group)
43
+ ) do |batch|
44
+ yield(batch.messages)
45
+ end
46
+ end
47
+
48
+ # Consumes messages from Kafka one by one
49
+ # @yieldparam [Array<Kafka::FetchedMessage>] kafka fetched messages
50
+ def consume_each_message
51
+ kafka_consumer.each_message(
52
+ ConfigAdapter.consuming(consumer_group)
53
+ ) do |message|
54
+ # always yield an array of messages, so we have consistent API (always a batch)
55
+ yield([message])
56
+ end
57
+ end
58
+
59
+ # @return [Kafka::Consumer] returns a ready to consume Kafka consumer
60
+ # that is set up to consume from topics of a given consumer group
61
+ def kafka_consumer
62
+ @kafka_consumer ||= kafka.consumer(
63
+ ConfigAdapter.consumer(consumer_group)
64
+ ).tap do |consumer|
65
+ consumer_group.topics.each do |topic|
66
+ consumer.subscribe(*ConfigAdapter.subscription(topic))
67
+ end
68
+ end
69
+ rescue Kafka::ConnectionError
70
+ # If we would not wait it would totally spam log file with failed
71
+ # attempts if Kafka is down
72
+ sleep(consumer_group.reconnect_timeout)
73
+ # We don't log and just reraise - this will be logged
74
+ # down the road
75
+ raise
76
+ end
77
+
78
+ # @return [Kafka] returns a Kafka
79
+ # @note We don't cache it internally because we cache kafka_consumer that uses kafka
80
+ # object instance
81
+ def kafka
82
+ Kafka.new(ConfigAdapter.client(consumer_group))
83
+ end
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,74 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Connection
5
+ # Class that consumes messages for which we listen
6
+ module MessagesProcessor
7
+ class << self
8
+ # Processes messages (does something with them)
9
+ # It will either schedule or run a proper controller action for messages
10
+ # @note This should be looped to obtain a constant listening
11
+ # @note We catch all the errors here, to make sure that none failures
12
+ # for a given consumption will affect other consumed messages
13
+ # If we wouldn't catch it, it would propagate up until killing the Celluloid actor
14
+ # @param group_id [String] group_id of a group from which a given message came
15
+ # @param kafka_messages [Array<Kafka::FetchedMessage>] raw messages fetched from kafka
16
+ def process(group_id, kafka_messages)
17
+ # @note We always get messages by topic and partition so we can take topic from the
18
+ # first one and it will be valid for all the messages
19
+ # We map from incoming topic name, as it might be namespaced, etc.
20
+ # @see topic_mapper internal docs
21
+ mapped_topic = Karafka::App.config.topic_mapper.incoming(kafka_messages[0].topic)
22
+ # @note We search based on the topic id - that is a combination of group id and
23
+ # topic name
24
+ controller = Karafka::Routing::Router.build("#{group_id}_#{mapped_topic}")
25
+ handler = controller.topic.batch_processing ? :process_batch : :process_each
26
+
27
+ send(handler, controller, mapped_topic, kafka_messages)
28
+ # This is on purpose - see the notes for this method
29
+ # rubocop:disable RescueException
30
+ rescue Exception => e
31
+ # rubocop:enable RescueException
32
+ Karafka.monitor.notice_error(self, e)
33
+ end
34
+
35
+ private
36
+
37
+ # Processes whole batch in one request (all at once)
38
+ # @param controller [Karafka::BaseController] base controller descendant
39
+ # @param mapped_topic [String] mapped topic name
40
+ # @param kafka_messages [Array<Kafka::FetchedMessage>] raw messages from kafka
41
+ def process_batch(controller, mapped_topic, kafka_messages)
42
+ messages_batch = kafka_messages.map do |kafka_message|
43
+ # Since we support topic mapping (for Kafka providers that require namespaces)
44
+ # we have to overwrite topic with our mapped topic version
45
+ # @note For the default mapper, it will be the same as topic
46
+ # @note We have to use instance_variable_set, as the Kafka::FetchedMessage does not
47
+ # provide attribute writers
48
+ kafka_message.instance_variable_set(:'@topic', mapped_topic)
49
+ kafka_message
50
+ end
51
+
52
+ controller.params_batch = messages_batch
53
+
54
+ Karafka.monitor.notice(self, messages_batch)
55
+
56
+ controller.schedule
57
+ end
58
+
59
+ # Processes messages one by one (like with std http requests)
60
+ # @param controller [Karafka::BaseController] base controller descendant
61
+ # @param mapped_topic [String] mapped topic name
62
+ # @param kafka_messages [Array<Kafka::FetchedMessage>] raw messages from kafka
63
+ def process_each(controller, mapped_topic, kafka_messages)
64
+ kafka_messages.each do |kafka_message|
65
+ # @note This is a simple trick - we just process one after another, but in order
66
+ # not to handle everywhere both cases (single vs batch), we just "fake" batching with
67
+ # a single message for each
68
+ process_batch(controller, mapped_topic, [kafka_message])
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
74
+ end