karafka 0.5.0.3 → 0.6.0.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. checksums.yaml +4 -4
  2. data/.console_irbrc +13 -0
  3. data/.github/ISSUE_TEMPLATE.md +2 -0
  4. data/.gitignore +1 -0
  5. data/CHANGELOG.md +59 -1
  6. data/CODE_OF_CONDUCT.md +46 -0
  7. data/CONTRIBUTING.md +67 -0
  8. data/Gemfile +2 -1
  9. data/Gemfile.lock +46 -147
  10. data/README.md +51 -952
  11. data/Rakefile +5 -14
  12. data/karafka.gemspec +19 -13
  13. data/lib/karafka.rb +7 -4
  14. data/lib/karafka/app.rb +10 -6
  15. data/lib/karafka/attributes_map.rb +67 -0
  16. data/lib/karafka/base_controller.rb +42 -52
  17. data/lib/karafka/base_responder.rb +30 -14
  18. data/lib/karafka/base_worker.rb +11 -26
  19. data/lib/karafka/cli.rb +2 -0
  20. data/lib/karafka/cli/base.rb +2 -0
  21. data/lib/karafka/cli/console.rb +7 -1
  22. data/lib/karafka/cli/flow.rb +13 -13
  23. data/lib/karafka/cli/info.rb +7 -4
  24. data/lib/karafka/cli/install.rb +4 -3
  25. data/lib/karafka/cli/server.rb +3 -1
  26. data/lib/karafka/cli/worker.rb +2 -0
  27. data/lib/karafka/connection/config_adapter.rb +103 -0
  28. data/lib/karafka/connection/listener.rb +16 -12
  29. data/lib/karafka/connection/messages_consumer.rb +86 -0
  30. data/lib/karafka/connection/messages_processor.rb +74 -0
  31. data/lib/karafka/errors.rb +15 -29
  32. data/lib/karafka/fetcher.rb +10 -8
  33. data/lib/karafka/helpers/class_matcher.rb +2 -0
  34. data/lib/karafka/helpers/config_retriever.rb +46 -0
  35. data/lib/karafka/helpers/multi_delegator.rb +2 -0
  36. data/lib/karafka/loader.rb +4 -2
  37. data/lib/karafka/logger.rb +37 -36
  38. data/lib/karafka/monitor.rb +3 -1
  39. data/lib/karafka/params/interchanger.rb +2 -0
  40. data/lib/karafka/params/params.rb +34 -41
  41. data/lib/karafka/params/params_batch.rb +46 -0
  42. data/lib/karafka/parsers/json.rb +4 -2
  43. data/lib/karafka/patches/dry_configurable.rb +2 -0
  44. data/lib/karafka/process.rb +4 -2
  45. data/lib/karafka/responders/builder.rb +2 -0
  46. data/lib/karafka/responders/topic.rb +14 -6
  47. data/lib/karafka/routing/builder.rb +22 -59
  48. data/lib/karafka/routing/consumer_group.rb +54 -0
  49. data/lib/karafka/routing/mapper.rb +2 -0
  50. data/lib/karafka/routing/proxy.rb +37 -0
  51. data/lib/karafka/routing/router.rb +18 -16
  52. data/lib/karafka/routing/topic.rb +78 -0
  53. data/lib/karafka/schemas/config.rb +36 -0
  54. data/lib/karafka/schemas/consumer_group.rb +56 -0
  55. data/lib/karafka/schemas/responder_usage.rb +38 -0
  56. data/lib/karafka/server.rb +5 -3
  57. data/lib/karafka/setup/config.rb +79 -32
  58. data/lib/karafka/setup/configurators/base.rb +2 -0
  59. data/lib/karafka/setup/configurators/celluloid.rb +2 -0
  60. data/lib/karafka/setup/configurators/sidekiq.rb +2 -0
  61. data/lib/karafka/setup/configurators/water_drop.rb +15 -3
  62. data/lib/karafka/status.rb +2 -0
  63. data/lib/karafka/templates/app.rb.example +15 -5
  64. data/lib/karafka/templates/application_worker.rb.example +0 -6
  65. data/lib/karafka/version.rb +2 -1
  66. data/lib/karafka/workers/builder.rb +2 -0
  67. metadata +109 -60
  68. data/lib/karafka/cli/routes.rb +0 -36
  69. data/lib/karafka/connection/consumer.rb +0 -33
  70. data/lib/karafka/connection/message.rb +0 -17
  71. data/lib/karafka/connection/topic_consumer.rb +0 -94
  72. data/lib/karafka/responders/usage_validator.rb +0 -60
  73. data/lib/karafka/routing/route.rb +0 -113
  74. data/lib/karafka/setup/config_schema.rb +0 -44
  75. data/lib/karafka/setup/configurators/worker_glass.rb +0 -13
  76. data/lib/karafka/templates/config.ru.example +0 -13
data/Rakefile CHANGED
@@ -1,17 +1,8 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'bundler'
2
4
  require 'rake'
3
- require 'polishgeeks-dev-tools'
4
-
5
- PolishGeeks::DevTools.setup do |config|
6
- config.brakeman = false
7
- config.haml_lint = false
8
- end
9
-
10
- desc 'Self check using polishgeeks-dev-tools'
11
- task :check do
12
- PolishGeeks::DevTools::Runner.new.execute(
13
- PolishGeeks::DevTools::Logger.new
14
- )
15
- end
5
+ require 'rspec/core/rake_task'
16
6
 
17
- task default: :check
7
+ RSpec::Core::RakeTask.new(:spec)
8
+ task default: :spec
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  lib = File.expand_path('../lib', __FILE__)
2
4
  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
5
 
@@ -8,28 +10,32 @@ Gem::Specification.new do |spec|
8
10
  spec.version = ::Karafka::VERSION
9
11
  spec.platform = Gem::Platform::RUBY
10
12
  spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
11
- spec.email = %w( maciej@coditsu.io pavlo.vavruk@gmail.com adam99g@gmail.com )
13
+ spec.email = %w[maciej@coditsu.io pavlo.vavruk@gmail.com adam99g@gmail.com]
12
14
  spec.homepage = 'https://github.com/karafka/karafka'
13
- spec.summary = %q{ Ruby based framework for working with Apache Kafka }
14
- spec.description = %q{ Framework used to simplify Apache Kafka based Ruby applications development }
15
+ spec.summary = 'Ruby based framework for working with Apache Kafka'
16
+ spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
15
17
  spec.license = 'MIT'
16
18
 
17
- spec.add_development_dependency 'bundler', '~> 1.2'
18
-
19
- spec.add_dependency 'ruby-kafka', '= 0.3.17'
19
+ spec.add_dependency 'ruby-kafka', '>= 0.4'
20
20
  spec.add_dependency 'sidekiq', '>= 4.2'
21
- spec.add_dependency 'worker-glass', '~> 0.2'
22
- spec.add_dependency 'celluloid', '~> 0.17'
21
+ spec.add_dependency 'celluloid'
23
22
  spec.add_dependency 'envlogic', '~> 1.0'
24
- spec.add_dependency 'waterdrop', '~> 0.3.2.4'
25
- spec.add_dependency 'rake', '~> 11.3'
23
+ spec.add_dependency 'waterdrop', '>= 0.4'
24
+ spec.add_dependency 'rake', '>= 11.3'
26
25
  spec.add_dependency 'thor', '~> 0.19'
27
- spec.add_dependency 'activesupport', '~> 5.0'
28
- spec.add_dependency 'dry-validation', '~> 0.10.6'
26
+ spec.add_dependency 'activesupport', '>= 5.0'
27
+ spec.add_dependency 'dry-validation', '~> 0.11'
29
28
  spec.add_dependency 'dry-configurable', '~> 0.7'
29
+ spec.add_dependency 'yajl-ruby', '>= 1.3.0'
30
+
31
+ spec.add_development_dependency 'bundler', '~> 1.2'
32
+ spec.add_development_dependency 'rspec', '>= 3.6'
33
+ spec.add_development_dependency 'simplecov', '>= 0.14'
34
+ spec.add_development_dependency 'byebug'
35
+
30
36
  spec.required_ruby_version = '>= 2.3.0'
31
37
 
32
38
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
33
39
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
34
- spec.require_paths = %w( lib )
40
+ spec.require_paths = %w[lib]
35
41
  end
@@ -1,4 +1,6 @@
1
- %w(
1
+ # frozen_string_literal: true
2
+
3
+ %w[
2
4
  rake
3
5
  ostruct
4
6
  rubygems
@@ -11,10 +13,10 @@
11
13
  logger
12
14
  kafka
13
15
  sidekiq
14
- worker_glass
15
16
  envlogic
16
17
  thor
17
18
  fileutils
19
+ yajl
18
20
  dry-configurable
19
21
  dry-validation
20
22
  active_support/callbacks
@@ -23,9 +25,10 @@
23
25
  active_support/descendants_tracker
24
26
  active_support/inflector
25
27
  karafka/loader
28
+ karafka/setup/config
26
29
  karafka/status
27
- karafka/routing/route
28
- ).each { |lib| require lib }
30
+ karafka/routing/router
31
+ ].each(&method(:require))
29
32
 
30
33
  # Karafka library
31
34
  module Karafka
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # App class
3
5
  class App
@@ -10,7 +12,7 @@ module Karafka
10
12
  end
11
13
 
12
14
  # Sets up all the internal components and bootstrap whole app
13
- # We need to know details about routes in order to setup components,
15
+ # We need to know details about consumers in order to setup components,
14
16
  # that's why we don't setup them after std setup is done
15
17
  # @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
16
18
  # doesn't match with ConfigurationSchema
@@ -24,8 +26,8 @@ module Karafka
24
26
  Setup::Config.config
25
27
  end
26
28
 
27
- # @return [Karafka::Routing::Builder] routes builder instance
28
- def routes
29
+ # @return [Karafka::Routing::Builder] consumers builder instance
30
+ def consumer_groups
29
31
  Routing::Builder.instance
30
32
  end
31
33
 
@@ -36,9 +38,11 @@ module Karafka
36
38
  end
37
39
 
38
40
  # Methods that should be delegated to Karafka module
39
- %i(
40
- root env logger monitor
41
- ).each do |delegated|
41
+ %i[
42
+ root
43
+ env
44
+ logger monitor
45
+ ].each do |delegated|
42
46
  define_method(delegated) do
43
47
  Karafka.public_send(delegated)
44
48
  end
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Both Karafka and Ruby-Kafka contain a lot of settings that can be applied on multiple
5
+ # levels. In Karafka that is on consumer group and on the topic level. In Ruby-Kafka it
6
+ # is on consumer, subscription and consumption levels. In order to maintain an order
7
+ # in managing those settings, this module was created. It contains details on what setting
8
+ # where should go and which layer (both on Karafka and Ruby-Kafka) is responsible for
9
+ # setting it and sending it forward
10
+ # @note Settings presented here cover all the settings that are being used across Karafka
11
+ module AttributesMap
12
+ class << self
13
+ # What settings should go where in ruby-kafka
14
+ # @note All other settings will be passed to Kafka.new method invocation.
15
+ # All elements in this hash are just edge cases
16
+ # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
17
+ def config_adapter
18
+ {
19
+ consumer: %i[
20
+ session_timeout offset_commit_interval offset_commit_threshold
21
+ offset_retention_time heartbeat_interval
22
+ ],
23
+ subscription: %i[start_from_beginning max_bytes_per_partition],
24
+ consuming: %i[min_bytes max_wait_time],
25
+ # All the options that are under kafka config namespace, but are not used
26
+ # directly with kafka api, but from the Karafka user perspective, they are
27
+ # still related to kafka. They should not be proxied anywhere
28
+ ignored: %i[reconnect_timeout]
29
+ }
30
+ end
31
+
32
+ # @return [Array<Symbol>] properties that can be set on a per topic level
33
+ def topic
34
+ (config_adapter[:subscription] + %i[
35
+ inline_processing
36
+ name
37
+ worker
38
+ parser
39
+ interchanger
40
+ responder
41
+ batch_processing
42
+ ]).uniq
43
+ end
44
+
45
+ # @return [Array<Symbol>] properties that can be set on a per consumer group level
46
+ # @note Note that there are settings directly extracted from the config kafka namespace
47
+ # I did this that way, so I won't have to repeat same setting keys over and over again
48
+ # Thanks to this solution, if any new setting is available for ruby-kafka, we just need
49
+ # to add it to our configuration class and it will be handled automatically.
50
+ def consumer_group
51
+ # @note We don't ignore the config_adapter[:ignored] values as they should be ignored
52
+ # only when proxying details go ruby-kafka. We use ignored fields internally in karafka
53
+ ignored_settings = config_adapter[:subscription]
54
+ defined_settings = config_adapter.values.flatten
55
+ karafka_settings = %i[batch_consuming topic_mapper]
56
+ # This is a drity and bad hack of dry-configurable to get keys before setting values
57
+ dynamically_proxied = Karafka::Setup::Config
58
+ ._settings
59
+ .find { |s| s.name == :kafka }
60
+ .value
61
+ .instance_variable_get('@klass').settings
62
+
63
+ (defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
64
+ end
65
+ end
66
+ end
67
+ end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Karafka module namespace
2
4
  module Karafka
3
5
  # Base controller from which all Karafka controllers should inherit
@@ -62,17 +64,10 @@ module Karafka
62
64
  # @see http://api.rubyonrails.org/classes/ActiveSupport/Callbacks/ClassMethods.html#method-i-get_callbacks
63
65
  define_callbacks :schedule
64
66
 
65
- # This will be set based on routing settings
66
- # From 0.4 a single controller can handle multiple topics jobs
67
- # All the attributes are taken from route
68
- Karafka::Routing::Route::ATTRIBUTES.each do |attr|
69
- attr_reader attr
70
-
71
- define_method(:"#{attr}=") do |new_attr_value|
72
- instance_variable_set(:"@#{attr}", new_attr_value)
73
- @params[attr] = new_attr_value if @params
74
- end
75
- end
67
+ # Each controller instance is always bind to a single topic. We don't place it on a class
68
+ # level because some programmers use same controller for multiple topics
69
+ attr_accessor :topic
70
+ attr_accessor :params_batch
76
71
 
77
72
  class << self
78
73
  # Creates a callback that will be executed before scheduling to Sidekiq
@@ -91,30 +86,39 @@ module Karafka
91
86
  end
92
87
  end
93
88
 
94
- # Creates lazy loaded params object
89
+ # Creates lazy loaded params batch object
95
90
  # @note Until first params usage, it won't parse data at all
96
- # @param message [Karafka::Connection::Message, Hash] message with raw content or a hash
97
- # from Sidekiq that allows us to build params.
98
- def params=(message)
99
- @params = Karafka::Params::Params.build(message, self)
91
+ # @param messages [Array<Kafka::FetchedMessage>, Array<Hash>] messages with raw
92
+ # content (from Kafka) or messages inside a hash (from Sidekiq, etc)
93
+ # @return [Karafka::Params::ParamsBatch] lazy loaded params batch
94
+ def params_batch=(messages)
95
+ @params_batch = Karafka::Params::ParamsBatch.new(messages, topic.parser)
96
+ end
97
+
98
+ # @return [Karafka::Params::Params] params instance for non batch processed controllers
99
+ # @raise [Karafka::Errors::ParamsMethodUnavailable] raised when we try to use params
100
+ # method in a batch_processed controller
101
+ def params
102
+ raise Karafka::Errors::ParamsMethodUnavailable if topic.batch_processing
103
+ params_batch.first
100
104
  end
101
105
 
102
106
  # Executes the default controller flow, runs callbacks and if not halted
103
- # will schedule a perform task in sidekiq
107
+ # will schedule a call task in sidekiq
104
108
  def schedule
105
109
  run_callbacks :schedule do
106
- inline_mode ? perform_inline : perform_async
110
+ topic.inline_processing ? call_inline : call_async
107
111
  end
108
112
  end
109
113
 
110
- # @return [Hash] hash with all controller details - it works similar to #params method however
111
- # it won't parse data so it will return unparsed details about controller and its parameters
112
- # @example Get data about ctrl
113
- # ctrl.to_h #=> { "worker"=>WorkerClass, "parsed"=>false, "content"=>"{}" }
114
- def to_h
115
- @params
114
+ # @note We want to leave the #perform method as a public API, but just in case we will do some
115
+ # pre or post processing we use call method instead of directly executing #perform
116
+ def call
117
+ perform
116
118
  end
117
119
 
120
+ private
121
+
118
122
  # Method that will perform business logic on data received from Kafka
119
123
  # @note This method needs bo be implemented in a subclass. We stub it here as a failover if
120
124
  # someone forgets about it or makes on with typo
@@ -122,21 +126,10 @@ module Karafka
122
126
  raise NotImplementedError, 'Implement this in a subclass'
123
127
  end
124
128
 
125
- private
126
-
127
- # @return [Karafka::Params::Params] Karafka params that is a hash with indifferent access
128
- # @note Params internally are lazy loaded before first use. That way we can skip parsing
129
- # process if we have before_enqueue that rejects some incoming messages without using params
130
- # It can be also used when handling really heavy data (in terms of parsing). Without direct
131
- # usage outside of worker scope, it will pass raw data into sidekiq, so we won't use Karafka
132
- # working time to parse this data. It will happen only in the worker (where it can take time)
133
- # that way Karafka will be able to process data really quickly. On the other hand, if we
134
- # decide to use params somewhere before it hits worker logic, it won't parse it again in
135
- # the worker - it will use already loaded data and pass it to Redis
136
- # @note Invokation of this method will cause load all the data into params object. If you want
137
- # to get access without parsing, please access @params directly
138
- def params
139
- @params.retrieve
129
+ # @return [Karafka::BaseResponder] responder instance if defined
130
+ # @return [nil] nil if no responder for this controller
131
+ def responder
132
+ @responder ||= topic.responder&.new(topic.parser)
140
133
  end
141
134
 
142
135
  # Responds with given data using given responder. This allows us to have a similar way of
@@ -147,30 +140,27 @@ module Karafka
147
140
  # but we still try to use this method
148
141
  def respond_with(*data)
149
142
  raise(Errors::ResponderMissing, self.class) unless responder
150
-
151
143
  Karafka.monitor.notice(self.class, data: data)
152
- responder.new(parser).call(*data)
144
+ responder.call(*data)
153
145
  end
154
146
 
155
147
  # Executes perform code immediately (without enqueuing)
156
148
  # @note Despite the fact, that workers won't be used, we still initialize all the
157
149
  # classes and other framework elements
158
- def perform_inline
159
- Karafka.monitor.notice(self.class, to_h)
160
- perform
150
+ def call_inline
151
+ Karafka.monitor.notice(self.class, params_batch)
152
+ call
161
153
  end
162
154
 
163
155
  # Enqueues the execution of perform method into a worker.
164
156
  # @note Each worker needs to have a class #perform_async method that will allow us to pass
165
- # parameters into it. We always pass topic as a first argument and this request params
157
+ # parameters into it. We always pass topic as a first argument and this request params_batch
166
158
  # as a second one (we pass topic to be able to build back the controller in the worker)
167
- def perform_async
168
- Karafka.monitor.notice(self.class, to_h)
169
- # We use @params directly (instead of #params) because of lazy loading logic that is behind
170
- # it. See Karafka::Params::Params class for more details about that
171
- worker.perform_async(
172
- topic,
173
- interchanger.load(@params)
159
+ def call_async
160
+ Karafka.monitor.notice(self.class, params_batch)
161
+ topic.worker.perform_async(
162
+ topic.id,
163
+ topic.interchanger.load(params_batch.to_a)
174
164
  )
175
165
  end
176
166
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Base responder from which all Karafka responders should inherit
3
5
  # Similar to Rails responders concept. It allows us to design flow from one app to another
@@ -68,7 +70,7 @@ module Karafka
68
70
  # @param options [Hash] hash with optional configuration details
69
71
  def topic(topic_name, options = {})
70
72
  self.topics ||= {}
71
- topic_obj = Responders::Topic.new(topic_name, options)
73
+ topic_obj = Responders::Topic.new(topic_name, options.merge(registered: true))
72
74
  self.topics[topic_obj.name] = topic_obj
73
75
  end
74
76
 
@@ -79,6 +81,9 @@ module Karafka
79
81
  # @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
80
82
  # UsersCreatedResponder.call(@created_user)
81
83
  def call(*data)
84
+ # Just in case there were no topics defined for a responder, we initialize with
85
+ # empty hash not to handle a nil case
86
+ self.topics ||= {}
82
87
  new.call(*data)
83
88
  end
84
89
  end
@@ -109,6 +114,30 @@ module Karafka
109
114
 
110
115
  private
111
116
 
117
+ # Checks if we met all the topics requirements. It will fail if we didn't send a message to
118
+ # a registered required topic, etc.
119
+ def validate!
120
+ registered_topics = self.class.topics.map do |name, topic|
121
+ topic.to_h.merge!(
122
+ usage_count: messages_buffer[name]&.count || 0
123
+ )
124
+ end
125
+
126
+ used_topics = messages_buffer.map do |name, usage|
127
+ topic = self.class.topics[name] || Responders::Topic.new(name, registered: false)
128
+ topic.to_h.merge!(usage_count: usage.count)
129
+ end
130
+
131
+ result = Karafka::Schemas::ResponderUsage.call(
132
+ registered_topics: registered_topics,
133
+ used_topics: used_topics
134
+ )
135
+
136
+ return if result.success?
137
+
138
+ raise Karafka::Errors::InvalidResponderUsage, result.errors
139
+ end
140
+
112
141
  # Method that needs to be implemented in a subclass. It should handle responding
113
142
  # on registered topics
114
143
  # @raise [NotImplementedError] This method needs to be implemented in a subclass
@@ -129,19 +158,6 @@ module Karafka
129
158
  messages_buffer[topic.to_s] << [@parser_class.generate(data), options]
130
159
  end
131
160
 
132
- # Checks if we met all the topics requirements. It will fail if we didn't send a message to
133
- # a registered required topic, etc.
134
- def validate!
135
- used_topics = messages_buffer.map do |key, data_elements|
136
- Array.new(data_elements.count) { key }
137
- end
138
-
139
- Responders::UsageValidator.new(
140
- self.class.topics || {},
141
- used_topics.flatten
142
- ).validate!
143
- end
144
-
145
161
  # Takes all the messages from the buffer and delivers them one by one
146
162
  # @note This method is executed after the validation, so we're sure that
147
163
  # what we send is legit and it will go to a proper topics
@@ -1,40 +1,25 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Karafka
2
4
  # Worker wrapper for Sidekiq workers
3
5
  class BaseWorker
4
6
  include Sidekiq::Worker
5
7
 
6
- attr_accessor :params, :topic
7
-
8
8
  # Executes the logic that lies in #perform Karafka controller method
9
- # @param topic [String] Topic that we will use to route to a proper controller
10
- # @param params [Hash] params hash that we use to build Karafka params object
11
- def perform(topic, params)
12
- self.topic = topic
13
- self.params = params
14
- Karafka.monitor.notice(self.class, controller.to_h)
15
- controller.perform
16
- end
17
-
18
- # What action should be taken when perform method fails
19
- # @param topic [String] Topic bthat we will use to route to a proper controller
20
- # @param params [Hash] params hash that we use to build Karafka params object
21
- def after_failure(topic, params)
22
- self.topic = topic
23
- self.params = params
24
-
25
- return unless controller.respond_to?(:after_failure)
26
-
27
- Karafka.monitor.notice(self.class, controller.to_h)
28
- controller.after_failure
9
+ # @param topic_id [String] Unique topic id that we will use to find a proper topic
10
+ # @param params_batch [Array] Array with messages batch
11
+ def perform(topic_id, params_batch)
12
+ Karafka.monitor.notice(self.class, params_batch)
13
+ controller(topic_id, params_batch).call
29
14
  end
30
15
 
31
16
  private
32
17
 
33
18
  # @return [Karafka::Controller] descendant of Karafka::BaseController that matches the topic
34
- # with params assigned already (controller is ready to use)
35
- def controller
36
- @controller ||= Karafka::Routing::Router.new(topic).build.tap do |ctrl|
37
- ctrl.params = ctrl.interchanger.parse(params)
19
+ # with params_batch assigned already (controller is ready to use)
20
+ def controller(topic_id, params_batch)
21
+ @controller ||= Karafka::Routing::Router.build(topic_id).tap do |ctrl|
22
+ ctrl.params_batch = ctrl.topic.interchanger.parse(params_batch)
38
23
  end
39
24
  end
40
25
  end