karafka 1.1.2 → 1.2.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. checksums.yaml +5 -5
  2. data/.ruby-version +1 -1
  3. data/.travis.yml +1 -0
  4. data/CHANGELOG.md +34 -0
  5. data/Gemfile +1 -2
  6. data/Gemfile.lock +35 -22
  7. data/README.md +4 -3
  8. data/karafka.gemspec +5 -3
  9. data/lib/karafka.rb +4 -5
  10. data/lib/karafka/app.rb +8 -15
  11. data/lib/karafka/attributes_map.rb +1 -1
  12. data/lib/karafka/backends/inline.rb +1 -2
  13. data/lib/karafka/{base_controller.rb → base_consumer.rb} +19 -11
  14. data/lib/karafka/base_responder.rb +33 -14
  15. data/lib/karafka/callbacks.rb +30 -0
  16. data/lib/karafka/callbacks/config.rb +22 -0
  17. data/lib/karafka/callbacks/dsl.rb +16 -0
  18. data/lib/karafka/cli/install.rb +2 -3
  19. data/lib/karafka/cli/server.rb +0 -1
  20. data/lib/karafka/connection/{consumer.rb → client.rb} +25 -33
  21. data/lib/karafka/connection/config_adapter.rb +14 -6
  22. data/lib/karafka/connection/delegator.rb +46 -0
  23. data/lib/karafka/connection/listener.rb +22 -13
  24. data/lib/karafka/{controllers → consumers}/callbacks.rb +9 -9
  25. data/lib/karafka/consumers/includer.rb +51 -0
  26. data/lib/karafka/consumers/responders.rb +24 -0
  27. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  28. data/lib/karafka/errors.rb +10 -3
  29. data/lib/karafka/fetcher.rb +30 -34
  30. data/lib/karafka/helpers/class_matcher.rb +8 -8
  31. data/lib/karafka/helpers/config_retriever.rb +2 -2
  32. data/lib/karafka/instrumentation/listener.rb +97 -0
  33. data/lib/karafka/instrumentation/logger.rb +55 -0
  34. data/lib/karafka/instrumentation/monitor.rb +62 -0
  35. data/lib/karafka/loader.rb +0 -1
  36. data/lib/karafka/params/{params.rb → dsl.rb} +69 -44
  37. data/lib/karafka/params/params_batch.rb +2 -2
  38. data/lib/karafka/patches/dry_configurable.rb +6 -2
  39. data/lib/karafka/patches/ruby_kafka.rb +10 -10
  40. data/lib/karafka/persistence/client.rb +25 -0
  41. data/lib/karafka/persistence/consumer.rb +27 -14
  42. data/lib/karafka/persistence/topic.rb +29 -0
  43. data/lib/karafka/process.rb +5 -4
  44. data/lib/karafka/responders/builder.rb +15 -14
  45. data/lib/karafka/routing/builder.rb +1 -1
  46. data/lib/karafka/routing/consumer_mapper.rb +3 -2
  47. data/lib/karafka/routing/router.rb +1 -1
  48. data/lib/karafka/routing/topic.rb +5 -5
  49. data/lib/karafka/schemas/config.rb +3 -0
  50. data/lib/karafka/schemas/consumer_group.rb +14 -2
  51. data/lib/karafka/schemas/consumer_group_topic.rb +1 -1
  52. data/lib/karafka/server.rb +33 -5
  53. data/lib/karafka/setup/config.rb +45 -21
  54. data/lib/karafka/setup/configurators/base.rb +6 -12
  55. data/lib/karafka/setup/configurators/params.rb +25 -0
  56. data/lib/karafka/setup/configurators/water_drop.rb +6 -3
  57. data/lib/karafka/setup/dsl.rb +22 -0
  58. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
  59. data/lib/karafka/templates/karafka.rb.example +14 -3
  60. data/lib/karafka/version.rb +1 -1
  61. metadata +58 -23
  62. data/lib/karafka/connection/processor.rb +0 -61
  63. data/lib/karafka/controllers/includer.rb +0 -51
  64. data/lib/karafka/controllers/responders.rb +0 -19
  65. data/lib/karafka/logger.rb +0 -53
  66. data/lib/karafka/monitor.rb +0 -98
  67. data/lib/karafka/persistence/controller.rb +0 -38
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: c472e65702014af8a34972c3be5f9c552c71fbf7
4
- data.tar.gz: e5664770c53adc058e6b41d0e6473022ac45276b
2
+ SHA256:
3
+ metadata.gz: 3155a5ed8f8a5ebabcef64de6c30256d6555dfa4af20bbe48527a7080226fe35
4
+ data.tar.gz: 26c9e9adc3a23ce31848280d1d855069bd384e91f35e29e2ac6672c838082d01
5
5
  SHA512:
6
- metadata.gz: bf3a7b57f04795b04987be56c8d1d805237efe32a6547f45bb32a15ad6184b62f8a920be96d67d45f431689f36b4c3d83ae3c52d426c5c99508da3cf7aa57672
7
- data.tar.gz: e97ba211015095db1516c88e572c319013094ef90e22b6a0c1388ef3262c0417cefa170fa53fb669250bae59e3c6aea9358eb409cf640b8b7255798596c0f80e
6
+ metadata.gz: d9b791b522c88b9c594282679d9788d4fc76e7660525946c51e60d6c67857777bfb5d6b1fec60a5a9a695d1e5bee5ac8e19669d86f833f31b51f2055cb51dc16
7
+ data.tar.gz: a8aede80af84e97d26222908cb5b759e3c3e4d7da1bee802806511bdc9dcf013d03d902e8a77acdfcb38bbb46d7ba5dee5dffd2442a757b9781e4d41b2623580
@@ -1 +1 @@
1
- 2.4.2
1
+ 2.5.0
@@ -9,6 +9,7 @@ rvm:
9
9
  - 2.4.0
10
10
  - 2.4.1
11
11
  - 2.4.2
12
+ - 2.5.0
12
13
  - jruby-head
13
14
  script: bundle exec rspec spec/
14
15
  env:
@@ -1,5 +1,39 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 1.2.0-beta1
4
+ - Spec improvements
5
+ - #260 - Specs missing randomization
6
+ - #251 - Shutdown upon non responding (unreachable) cluster is not possible
7
+ - #258 - Investigate lowering requirements on activesupport
8
+ - #246 - Alias consumer#mark_as_consumed on controller
9
+ - #259 - Allow forcing key/partition key on responders
10
+ - #267 - Styling inconsistency
11
+ - #242 - Support setting the max bytes to fetch per request
12
+ - #247 - Support SCRAM once released
13
+ - #271 - Provide an after_init option to pass a configuration block
14
+ - #262 - Error in the monitor code for NewRelic
15
+ - #241 - Performance metrics
16
+ - #274 - Rename controllers to consumers
17
+ - #184 - Seek to
18
+ - #284 - Dynamic Params parent class
19
+ - #275 - ssl_ca_certs_from_system
20
+ - Replaced some of the activesupport parts with dry-inflector
21
+ - Lower ActiveSupport dependency
22
+ - Remove configurators in favor of the after_init block configurator
23
+ - Ruby 2.5.0 support
24
+ - Renamed Karafka::Connection::Processor to Karafka::Connection::Delegator to match incoming naming conventions
25
+ - Renamed Karafka::Connection::Consumer to Karafka::Connection::Client due to #274
26
+ - Removed HashWithIndifferentAccess in favor of a regular hash
27
+ - JSON parsing defaults now to string keys
28
+ - Lower memory usage due to less params data internal details
29
+ - Support multiple ```after_init``` blocks in favor of a single one
30
+ - Renamed ```received_at``` to ```receive_time``` to follow ruby-kafka and WaterDrop conventions
31
+ - Adjust internal setup to easier map Ruby-Kafka config changes
32
+ - System callbacks reorganization
33
+ - Added ```before_fetch_loop``` configuration block for early client usage (```#seek```, etc)
34
+ - Renamed ```after_fetched``` to ```after_fetch``` to normalize the naming convention
35
+ - Instrumentation on a connection delegator level
36
+
3
37
  ## 1.1.2
4
38
  - #256 - Default kafka.seed_brokers configuration is created in invalid format
5
39
 
data/Gemfile CHANGED
@@ -5,8 +5,7 @@ source 'https://rubygems.org'
5
5
  gemspec
6
6
 
7
7
  group :development, :test do
8
- gem 'waterdrop'
9
- gem 'timecop'
10
8
  gem 'rspec'
11
9
  gem 'simplecov'
10
+ gem 'timecop'
12
11
  end
@@ -1,30 +1,32 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (1.1.2)
5
- activesupport (>= 5.0)
4
+ karafka (1.2.0.beta1)
5
+ activesupport (>= 4.0)
6
6
  dry-configurable (~> 0.7)
7
+ dry-inflector (~> 0.1.1)
8
+ dry-monitor (~> 0.1)
7
9
  dry-validation (~> 0.11)
8
10
  envlogic (~> 1.0)
9
11
  multi_json (>= 1.12)
10
12
  rake (>= 11.3)
11
13
  require_all (>= 1.4)
12
- ruby-kafka (>= 0.5)
14
+ ruby-kafka (>= 0.5.3)
13
15
  thor (~> 0.19)
14
- waterdrop (>= 1.0.1)
16
+ waterdrop (>= 1.2.0.beta1)
15
17
 
16
18
  GEM
17
19
  remote: https://rubygems.org/
18
20
  specs:
19
- activesupport (5.1.4)
21
+ activesupport (5.1.5)
20
22
  concurrent-ruby (~> 1.0, >= 1.0.2)
21
23
  i18n (~> 0.7)
22
24
  minitest (~> 5.1)
23
25
  tzinfo (~> 1.1)
24
26
  concurrent-ruby (1.0.5)
25
- delivery_boy (0.2.2)
27
+ delivery_boy (0.2.4)
26
28
  king_konf (~> 0.1.8)
27
- ruby-kafka (~> 0.4)
29
+ ruby-kafka (~> 0.5.1)
28
30
  diff-lcs (1.3)
29
31
  docile (1.1.5)
30
32
  dry-configurable (0.7.0)
@@ -32,13 +34,23 @@ GEM
32
34
  dry-container (0.6.0)
33
35
  concurrent-ruby (~> 1.0)
34
36
  dry-configurable (~> 0.1, >= 0.1.3)
35
- dry-core (0.4.1)
37
+ dry-core (0.4.4)
36
38
  concurrent-ruby (~> 1.0)
37
39
  dry-equalizer (0.2.0)
40
+ dry-events (0.1.0)
41
+ concurrent-ruby (~> 1.0)
42
+ dry-core (~> 0.4)
43
+ dry-equalizer (~> 0.2)
44
+ dry-inflector (0.1.1)
38
45
  dry-logic (0.4.2)
39
46
  dry-container (~> 0.2, >= 0.2.6)
40
47
  dry-core (~> 0.2)
41
48
  dry-equalizer (~> 0.2)
49
+ dry-monitor (0.1.2)
50
+ dry-configurable (~> 0.5)
51
+ dry-equalizer (~> 0.2)
52
+ dry-events (~> 0.1)
53
+ rouge (~> 2.0, >= 2.2.1)
42
54
  dry-types (0.12.2)
43
55
  concurrent-ruby (~> 1.0)
44
56
  dry-configurable (~> 0.1)
@@ -56,21 +68,22 @@ GEM
56
68
  dry-types (~> 0.12.0)
57
69
  envlogic (1.0.4)
58
70
  activesupport
59
- i18n (0.9.1)
71
+ i18n (0.9.5)
60
72
  concurrent-ruby (~> 1.0)
61
73
  inflecto (0.0.2)
62
74
  json (2.1.0)
63
- king_konf (0.1.8)
64
- minitest (5.10.3)
65
- multi_json (1.12.2)
75
+ king_konf (0.1.10)
76
+ minitest (5.11.3)
77
+ multi_json (1.13.1)
66
78
  null-logger (0.1.4)
67
79
  rake (12.3.0)
68
- require_all (1.4.0)
80
+ require_all (1.5.0)
81
+ rouge (2.2.1)
69
82
  rspec (3.7.0)
70
83
  rspec-core (~> 3.7.0)
71
84
  rspec-expectations (~> 3.7.0)
72
85
  rspec-mocks (~> 3.7.0)
73
- rspec-core (3.7.0)
86
+ rspec-core (3.7.1)
74
87
  rspec-support (~> 3.7.0)
75
88
  rspec-expectations (3.7.0)
76
89
  diff-lcs (>= 1.2.0, < 2.0)
@@ -78,8 +91,8 @@ GEM
78
91
  rspec-mocks (3.7.0)
79
92
  diff-lcs (>= 1.2.0, < 2.0)
80
93
  rspec-support (~> 3.7.0)
81
- rspec-support (3.7.0)
82
- ruby-kafka (0.5.0)
94
+ rspec-support (3.7.1)
95
+ ruby-kafka (0.5.3)
83
96
  simplecov (0.15.1)
84
97
  docile (~> 1.1.0)
85
98
  json (>= 1.8, < 3)
@@ -88,14 +101,15 @@ GEM
88
101
  thor (0.20.0)
89
102
  thread_safe (0.3.6)
90
103
  timecop (0.9.1)
91
- tzinfo (1.2.4)
104
+ tzinfo (1.2.5)
92
105
  thread_safe (~> 0.1)
93
- waterdrop (1.0.1)
94
- delivery_boy (>= 0.2.2)
106
+ waterdrop (1.2.0.beta1)
107
+ delivery_boy (>= 0.2.3)
95
108
  dry-configurable (~> 0.7)
109
+ dry-monitor (~> 0.1)
96
110
  dry-validation (~> 0.11)
97
111
  null-logger
98
- ruby-kafka (>= 0.5)
112
+ ruby-kafka (>= 0.5.3)
99
113
 
100
114
  PLATFORMS
101
115
  ruby
@@ -105,7 +119,6 @@ DEPENDENCIES
105
119
  rspec
106
120
  simplecov
107
121
  timecop
108
- waterdrop
109
122
 
110
123
  BUNDLED WITH
111
- 1.16.0
124
+ 1.16.1
data/README.md CHANGED
@@ -14,10 +14,11 @@ Karafka provides a higher-level abstraction that allows you to focus on your bus
14
14
 
15
15
  ### Some things you might wonder about:
16
16
 
17
- - You can integrate Karafka with any Ruby based application.
17
+ - You can integrate Karafka with **any** Ruby based application.
18
18
  - Karafka does **not** require Sidekiq or any other third party software (apart from Kafka itself).
19
- - Karafka works with Ruby on Rails but it is a standalone framework that can work without it.
20
- - Karafka has a minimal set of dependencies, so adding it won't be a huge burden for your already existing applications.
19
+ - Karafka works with Ruby on Rails but it is a **standalone** framework that can work without it.
20
+ - Karafka has a **minimal** set of dependencies, so adding it won't be a huge burden for your already existing applications.
21
+ - Karafka processes can be executed for a **given subset** of consumer groups and/or topics, so you can fine tune it depending on your business logic.
21
22
 
22
23
  Karafka based applications can be easily deployed to any type of infrastructure, including those based on:
23
24
 
@@ -16,16 +16,18 @@ Gem::Specification.new do |spec|
16
16
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
17
17
  spec.license = 'MIT'
18
18
 
19
- spec.add_dependency 'activesupport', '>= 5.0'
19
+ spec.add_dependency 'activesupport', '>= 4.0'
20
20
  spec.add_dependency 'dry-configurable', '~> 0.7'
21
+ spec.add_dependency 'dry-inflector', '~> 0.1.1'
22
+ spec.add_dependency 'dry-monitor', '~> 0.1'
21
23
  spec.add_dependency 'dry-validation', '~> 0.11'
22
24
  spec.add_dependency 'envlogic', '~> 1.0'
23
25
  spec.add_dependency 'multi_json', '>= 1.12'
24
26
  spec.add_dependency 'rake', '>= 11.3'
25
27
  spec.add_dependency 'require_all', '>= 1.4'
26
- spec.add_dependency 'ruby-kafka', '>= 0.5'
28
+ spec.add_dependency 'ruby-kafka', '>= 0.5.3'
27
29
  spec.add_dependency 'thor', '~> 0.19'
28
- spec.add_dependency 'waterdrop', '>= 1.0.1'
30
+ spec.add_dependency 'waterdrop', '>= 1.2.0.beta1'
29
31
 
30
32
  spec.required_ruby_version = '>= 2.3.0'
31
33
 
@@ -11,10 +11,9 @@
11
11
  require_all
12
12
  dry-configurable
13
13
  dry-validation
14
+ dry/inflector
15
+ dry/monitor/notifications
14
16
  active_support/callbacks
15
- active_support/core_ext/hash/indifferent_access
16
- active_support/descendants_tracker
17
- active_support/inflector
18
17
  karafka/loader
19
18
  ].each(&method(:require))
20
19
 
@@ -28,7 +27,7 @@ module Karafka
28
27
  @logger ||= App.config.logger
29
28
  end
30
29
 
31
- # @return [::Karafka::Monitor] monitor that we want to use. Will use dummy monitor by default
30
+ # @return [::Karafka::Monitor] monitor that we want to use
32
31
  def monitor
33
32
  @monitor ||= App.config.monitor
34
33
  end
@@ -51,7 +50,7 @@ module Karafka
51
50
  # @return [String] path to a default file that contains booting procedure etc
52
51
  # @note By default it is a file called 'karafka.rb' but it can be specified as you wish if you
53
52
  # have Karafka that is merged into a Sinatra/Rails app and karafka.rb is taken.
54
- # It will be used for console/controllers/etc
53
+ # It will be used for console/consumers/etc
55
54
  # @example Standard only-Karafka case
56
55
  # Karafka.boot_file #=> '/home/app_path/karafka.rb'
57
56
  # @example Non standard case
@@ -3,14 +3,10 @@
3
3
  module Karafka
4
4
  # App class
5
5
  class App
6
- class << self
7
- # Sets up the whole configuration
8
- # @param [Block] block configuration block
9
- def setup(&block)
10
- Setup::Config.setup(&block)
11
- initialize!
12
- end
6
+ extend Setup::Dsl
7
+ extend Callbacks::Dsl
13
8
 
9
+ class << self
14
10
  # Sets up all the internal components and bootstrap whole app
15
11
  # We need to know details about consumers in order to setup components,
16
12
  # that's why we don't setup them after std setup is done
@@ -19,11 +15,7 @@ module Karafka
19
15
  def boot!
20
16
  Setup::Config.validate!
21
17
  Setup::Config.setup_components
22
- end
23
-
24
- # @return [Karafka::Config] config instance
25
- def config
26
- Setup::Config.config
18
+ Callbacks.after_init(Karafka::App.config)
27
19
  end
28
20
 
29
21
  # @return [Karafka::Routing::Builder] consumers builder instance
@@ -33,7 +25,7 @@ module Karafka
33
25
 
34
26
  Status.instance_methods(false).each do |delegated|
35
27
  define_method(delegated) do
36
- Status.instance.public_send(delegated)
28
+ Status.instance.send(delegated)
37
29
  end
38
30
  end
39
31
 
@@ -41,10 +33,11 @@ module Karafka
41
33
  %i[
42
34
  root
43
35
  env
44
- logger monitor
36
+ logger
37
+ monitor
45
38
  ].each do |delegated|
46
39
  define_method(delegated) do
47
- Karafka.public_send(delegated)
40
+ Karafka.send(delegated)
48
41
  end
49
42
  end
50
43
  end
@@ -21,7 +21,7 @@ module Karafka
21
21
  offset_retention_time heartbeat_interval
22
22
  ],
23
23
  subscription: %i[start_from_beginning max_bytes_per_partition],
24
- consuming: %i[min_bytes max_wait_time],
24
+ consuming: %i[min_bytes max_bytes max_wait_time],
25
25
  pausing: %i[pause_timeout],
26
26
  # All the options that are under kafka config namespace, but are not used
27
27
  # directly with kafka api, but from the Karafka user perspective, they are
@@ -9,8 +9,7 @@ module Karafka
9
9
 
10
10
  # Executes consume code immediately (without enqueuing)
11
11
  def process
12
- Karafka.monitor.notice(self.class, params_batch)
13
- consume
12
+ Karafka.monitor.instrument('backends.inline.process', caller: self) { consume }
14
13
  end
15
14
  end
16
15
  end
@@ -2,24 +2,32 @@
2
2
 
3
3
  # Karafka module namespace
4
4
  module Karafka
5
- # Base controller from which all Karafka controllers should inherit
6
- class BaseController
5
+ # Base consumer from which all Karafka consumers should inherit
6
+ class BaseConsumer
7
7
  extend ActiveSupport::DescendantsTracker
8
+ extend Forwardable
9
+
10
+ # Allows us to mark messages as consumed for non-automatic mode without having
11
+ # to use consumer client directly. We do this that way, because most of the people should not
12
+ # mess with the client instance directly (just in case)
13
+ def_delegator :client, :mark_as_consumed
14
+
15
+ private :mark_as_consumed
8
16
 
9
17
  class << self
10
18
  attr_reader :topic
11
19
 
12
- # Assigns a topic to a controller and build up proper controller functionalities, so it can
13
- # cooperate with the topic settings
20
+ # Assigns a topic to a consumer and builds up proper consumer functionalities
21
+ # so that it can cooperate with the topic settings
14
22
  # @param topic [Karafka::Routing::Topic]
15
23
  # @return [Karafka::Routing::Topic] assigned topic
16
24
  def topic=(topic)
17
25
  @topic = topic
18
- Controllers::Includer.call(self)
26
+ Consumers::Includer.call(self)
19
27
  end
20
28
  end
21
29
 
22
- # @return [Karafka::Routing::Topic] topic to which a given controller is subscribed
30
+ # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
23
31
  def topic
24
32
  self.class.topic
25
33
  end
@@ -33,20 +41,20 @@ module Karafka
33
41
  @params_batch = Karafka::Params::ParamsBatch.new(messages, topic.parser)
34
42
  end
35
43
 
36
- # Executes the default controller flow.
44
+ # Executes the default consumer flow.
37
45
  def call
38
46
  process
39
47
  end
40
48
 
41
49
  private
42
50
 
43
- # We make it private as it should be accesible only from the inside of a controller
51
+ # We make it private as it should be accessible only from the inside of a consumer
44
52
  attr_reader :params_batch
45
53
 
46
- # @return [Karafka::Connection::Consumer] messages consumer that can be used to
54
+ # @return [Karafka::Connection::Client] messages consuming client that can be used to
47
55
  # commit manually offset or pause / stop consumer based on the business logic
48
- def consumer
49
- Persistence::Consumer.read
56
+ def client
57
+ Persistence::Client.read
50
58
  end
51
59
 
52
60
  # Method that will perform business logic and on data received from Kafka (it will consume
@@ -62,6 +62,11 @@ module Karafka
62
62
  # Definitions of all topics that we want to be able to use in this responder should go here
63
63
  class_attribute :topics
64
64
 
65
+ # Schema that we can use to control and/or require some additional details upon options
66
+ # that are being passed to the producer. This can be in particular useful if we want to make
67
+ # sure that for example partition_key is always present.
68
+ class_attribute :options_schema
69
+
65
70
  attr_reader :messages_buffer
66
71
 
67
72
  class << self
@@ -108,7 +113,8 @@ module Karafka
108
113
  # UsersCreatedResponder.new(MyParser).call(@created_user)
109
114
  def call(*data)
110
115
  respond(*data)
111
- validate!
116
+ validate_usage!
117
+ validate_options!
112
118
  deliver!
113
119
  end
114
120
 
@@ -116,7 +122,7 @@ module Karafka
116
122
 
117
123
  # Checks if we met all the topics requirements. It will fail if we didn't send a message to
118
124
  # a registered required topic, etc.
119
- def validate!
125
+ def validate_usage!
120
126
  registered_topics = self.class.topics.map do |name, topic|
121
127
  topic.to_h.merge!(
122
128
  usage_count: messages_buffer[name]&.count || 0
@@ -138,20 +144,26 @@ module Karafka
138
144
  raise Karafka::Errors::InvalidResponderUsage, result.errors
139
145
  end
140
146
 
147
+ # Checks if we met all the options requirements before sending them to the producer.
148
+ def validate_options!
149
+ return true unless self.class.options_schema
150
+
151
+ messages_buffer.each_value do |messages_set|
152
+ messages_set.each do |message_data|
153
+ result = self.class.options_schema.call(message_data.last)
154
+ next if result.success?
155
+ raise Karafka::Errors::InvalidResponderMessageOptions, result.errors
156
+ end
157
+ end
158
+ end
159
+
141
160
  # Takes all the messages from the buffer and delivers them one by one
142
161
  # @note This method is executed after the validation, so we're sure that
143
162
  # what we send is legit and it will go to a proper topics
144
163
  def deliver!
145
- messages_buffer.each do |topic, data_elements|
146
- # We map this topic name, so it will match namespaced/etc topic in Kafka
147
- # @note By default will not change topic (if default mapper used)
148
- mapped_topic = Karafka::App.config.topic_mapper.outgoing(topic)
149
-
164
+ messages_buffer.each_value do |data_elements|
150
165
  data_elements.each do |data, options|
151
- producer(options).call(
152
- data,
153
- options.merge(topic: mapped_topic)
154
- )
166
+ producer(options).call(data, options)
155
167
  end
156
168
  end
157
169
  end
@@ -170,10 +182,17 @@ module Karafka
170
182
  # @param options [Hash] options for waterdrop (e.g. partition_key)
171
183
  # @note Respond to does not accept multiple data arguments.
172
184
  def respond_to(topic, data, options = {})
173
- Karafka.monitor.notice(self.class, topic: topic, data: data, options: options)
185
+ # We normalize the format to string, as WaterDrop and Ruby-Kafka support only
186
+ # string topics
187
+ topic = topic.to_s
174
188
 
175
- messages_buffer[topic.to_s] ||= []
176
- messages_buffer[topic.to_s] << [@parser_class.generate(data), options]
189
+ messages_buffer[topic] ||= []
190
+ messages_buffer[topic] << [
191
+ @parser_class.generate(data),
192
+ # We map this topic name, so it will match namespaced/etc topic in Kafka
193
+ # @note By default will not change topic (if default mapper used)
194
+ options.merge(topic: Karafka::App.config.topic_mapper.outgoing(topic))
195
+ ]
177
196
  end
178
197
 
179
198
  # @param options [Hash] options for waterdrop