karafka 1.2.13 → 1.3.4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (107) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +2 -0
  4. data/{.coditsu.yml → .coditsu/ci.yml} +1 -1
  5. data/.console_irbrc +1 -3
  6. data/.github/FUNDING.yml +3 -0
  7. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  8. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/.travis.yml +8 -21
  12. data/CHANGELOG.md +91 -18
  13. data/CONTRIBUTING.md +1 -1
  14. data/Gemfile +2 -5
  15. data/Gemfile.lock +79 -65
  16. data/README.md +10 -11
  17. data/bin/karafka +1 -1
  18. data/certs/mensfeld.pem +25 -0
  19. data/config/errors.yml +38 -5
  20. data/karafka.gemspec +14 -12
  21. data/lib/karafka.rb +8 -15
  22. data/lib/karafka/app.rb +14 -6
  23. data/lib/karafka/attributes_map.rb +5 -12
  24. data/lib/karafka/base_consumer.rb +19 -30
  25. data/lib/karafka/base_responder.rb +45 -27
  26. data/lib/karafka/cli.rb +1 -1
  27. data/lib/karafka/cli/console.rb +11 -9
  28. data/lib/karafka/cli/flow.rb +5 -2
  29. data/lib/karafka/cli/info.rb +3 -1
  30. data/lib/karafka/cli/install.rb +30 -6
  31. data/lib/karafka/cli/server.rb +11 -6
  32. data/lib/karafka/code_reloader.rb +67 -0
  33. data/lib/karafka/connection/api_adapter.rb +11 -4
  34. data/lib/karafka/connection/batch_delegator.rb +51 -0
  35. data/lib/karafka/connection/builder.rb +1 -1
  36. data/lib/karafka/connection/client.rb +30 -20
  37. data/lib/karafka/connection/listener.rb +24 -13
  38. data/lib/karafka/connection/message_delegator.rb +36 -0
  39. data/lib/karafka/consumers/callbacks.rb +32 -15
  40. data/lib/karafka/consumers/includer.rb +30 -18
  41. data/lib/karafka/consumers/metadata.rb +10 -0
  42. data/lib/karafka/consumers/responders.rb +2 -2
  43. data/lib/karafka/contracts.rb +10 -0
  44. data/lib/karafka/contracts/config.rb +21 -0
  45. data/lib/karafka/contracts/consumer_group.rb +206 -0
  46. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  47. data/lib/karafka/contracts/responder_usage.rb +54 -0
  48. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  49. data/lib/karafka/errors.rb +17 -16
  50. data/lib/karafka/fetcher.rb +28 -30
  51. data/lib/karafka/helpers/class_matcher.rb +6 -2
  52. data/lib/karafka/helpers/config_retriever.rb +1 -1
  53. data/lib/karafka/helpers/inflector.rb +26 -0
  54. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  55. data/lib/karafka/instrumentation/logger.rb +9 -6
  56. data/lib/karafka/instrumentation/monitor.rb +15 -9
  57. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  58. data/lib/karafka/instrumentation/stdout_listener.rb +138 -0
  59. data/lib/karafka/params/builders/metadata.rb +33 -0
  60. data/lib/karafka/params/builders/params.rb +36 -0
  61. data/lib/karafka/params/builders/params_batch.rb +25 -0
  62. data/lib/karafka/params/metadata.rb +35 -0
  63. data/lib/karafka/params/params.rb +68 -0
  64. data/lib/karafka/params/params_batch.rb +35 -20
  65. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  66. data/lib/karafka/persistence/client.rb +15 -11
  67. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  68. data/lib/karafka/persistence/topics.rb +48 -0
  69. data/lib/karafka/process.rb +0 -2
  70. data/lib/karafka/responders/topic.rb +6 -8
  71. data/lib/karafka/routing/builder.rb +36 -8
  72. data/lib/karafka/routing/consumer_group.rb +1 -1
  73. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  74. data/lib/karafka/routing/proxy.rb +10 -1
  75. data/lib/karafka/routing/topic.rb +5 -3
  76. data/lib/karafka/routing/topic_mapper.rb +16 -18
  77. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  78. data/lib/karafka/serialization/json/serializer.rb +31 -0
  79. data/lib/karafka/server.rb +25 -27
  80. data/lib/karafka/setup/config.rb +63 -37
  81. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  82. data/lib/karafka/setup/dsl.rb +0 -1
  83. data/lib/karafka/status.rb +7 -3
  84. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  85. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  86. data/lib/karafka/templates/karafka.rb.erb +92 -0
  87. data/lib/karafka/version.rb +1 -1
  88. metadata +90 -57
  89. metadata.gz.sig +0 -0
  90. data/lib/karafka/callbacks.rb +0 -30
  91. data/lib/karafka/callbacks/config.rb +0 -22
  92. data/lib/karafka/callbacks/dsl.rb +0 -16
  93. data/lib/karafka/connection/delegator.rb +0 -46
  94. data/lib/karafka/instrumentation/listener.rb +0 -112
  95. data/lib/karafka/loader.rb +0 -28
  96. data/lib/karafka/params/dsl.rb +0 -158
  97. data/lib/karafka/parsers/json.rb +0 -38
  98. data/lib/karafka/patches/dry_configurable.rb +0 -33
  99. data/lib/karafka/persistence/topic.rb +0 -29
  100. data/lib/karafka/schemas/config.rb +0 -24
  101. data/lib/karafka/schemas/consumer_group.rb +0 -79
  102. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  103. data/lib/karafka/schemas/responder_usage.rb +0 -39
  104. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  105. data/lib/karafka/setup/configurators/base.rb +0 -29
  106. data/lib/karafka/setup/configurators/params.rb +0 -25
  107. data/lib/karafka/templates/karafka.rb.example +0 -54
data/README.md CHANGED
@@ -2,6 +2,14 @@
2
2
 
3
3
  [![Build Status](https://travis-ci.org/karafka/karafka.svg?branch=master)](https://travis-ci.org/karafka/karafka)
4
4
 
5
+ **Note**: Documentation presented here refers to Karafka `1.3.0`.
6
+
7
+ If you're upgrading from `1.2.0`, please refer to our [Upgrade Notes article](https://mensfeld.pl/2019/09/karafka-framework-1-3-0-release-notes-ruby-kafka/).
8
+
9
+ If you are looking for the documentation for Karafka `1.2.*`, it can be found [here](https://github.com/karafka/wiki/tree/1.2).
10
+
11
+ ## About Karafka
12
+
5
13
  Framework used to simplify Apache Kafka based Ruby applications development.
6
14
 
7
15
  Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming and processing this data, without having to focus on things that are not your business domain.
@@ -27,15 +35,6 @@ Karafka based applications can be easily deployed to any type of infrastructure,
27
35
  * Docker
28
36
  * Terraform
29
37
 
30
- ## Kafka 0.10 or prior
31
-
32
- If you're using Kafka 0.10, please lock `ruby-kafka` gem in your Gemfile to version `0.6.8`:
33
-
34
- ```ruby
35
- gem 'karafka'
36
- gem 'ruby-kafka', '~> 0.6.8'
37
- ```
38
-
39
38
  ## Support
40
39
 
41
40
  Karafka has a [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup, and deployment along with other useful details on how to run Karafka.
@@ -52,7 +51,7 @@ If you're completely new to the subject, you can start with our "Kafka on Rails"
52
51
  If you want to get started with Kafka and Karafka as fast as possible, then the best idea is to just clone our example repository:
53
52
 
54
53
  ```bash
55
- git clone https://github.com/karafka/karafka-example-app ./example_app
54
+ git clone https://github.com/karafka/example-app ./example_app
56
55
  ```
57
56
 
58
57
  then, just bundle install all the dependencies:
@@ -62,7 +61,7 @@ cd ./example_app
62
61
  bundle install
63
62
  ```
64
63
 
65
- and follow the instructions from the [example app Wiki](https://github.com/karafka/karafka-example-app/blob/master/README.md).
64
+ and follow the instructions from the [example app Wiki](https://github.com/karafka/example-app/blob/master/README.md).
66
65
 
67
66
  **Note**: you need to ensure, that you have Kafka up and running and you need to configure Kafka seed_brokers in the ```karafka.rb``` file.
68
67
 
@@ -10,7 +10,7 @@ else
10
10
  # However when it is unavailable, we still want to be able to run help command
11
11
  # and install command as they don't require configured app itself to run
12
12
  raise(
13
- Karafka::Errors::MissingBootFile,
13
+ Karafka::Errors::MissingBootFileError,
14
14
  Karafka.boot_file
15
15
  ) unless %w[-h install].include?(ARGV[0])
16
16
  end
@@ -0,0 +1,25 @@
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
3
+ ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMTkwNzMwMTQ1NDU0WhcNMjAwNzI5MTQ1
4
+ NDU0WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
5
+ CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC9fCwtaHZG2SyyNXiH8r0QbJQx/xxl
6
+ dkvwWz9QGJO+O8rEx20FB1Ab+MVkfOscwIv5jWpmk1U9whzDPl1uFtIbgu+sk+Zb
7
+ uQlZyK/DPN6c+/BbBL+RryTBRyvkPLoCVwm7uxc/JZ1n4AI6eF4cCZ2ieZ9QgQbU
8
+ MQs2QPqs9hT50Ez/40GnOdadVfiDDGz+NME2C4ms0BriXwZ1tcRTfJIHe2xjIbbb
9
+ y5qRGfsLKcgMzvLQR24olixyX1MR0s4+Wveq3QL/gBhL4veUcv+UABJA8IJR0kyB
10
+ seHHutusiwZ1v3SjjjW1xLLrc2ARV0mgCb0WaK2T4iA3oFTGLh6Ydz8LNl31KQFv
11
+ 94nRd8IhmJxrhQ6dQ/WT9IXoa5S9lfT5lPJeINemH4/6QPABzf9W2IZlCdI9wCdB
12
+ TBaw57MKneGAYZiKjw6OALSy2ltQUCl3RqFl3VP7n8uFy1U987Q5VIIQ3O1UUsQD
13
+ Oe/h+r7GUU4RSPKgPlrwvW9bD/UQ+zF51v8CAwEAAaN3MHUwCQYDVR0TBAIwADAL
14
+ BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFJNIBHdfEUD7TqHqIer2YhWaWhwcMB0GA1Ud
15
+ EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
16
+ c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKA4eqko6BTNhlysip6rfBkVTGri
17
+ ZXsL+kRb2hLvsQJS/kLyM21oMlu+LN0aPj3qEFR8mE/YeDD8rLAfruBRTltPNbR7
18
+ xA5eE1gkxY5LfExUtK3b2wPqfmo7mZgfcsMwfYg/tUXw1WpBCnrhAJodpGH6SXmp
19
+ A40qFUZst0vjiOoO+aTblIHPmMJXoZ3K42dTlNKlEiDKUWMRKSgpjjYGEYalFNWI
20
+ hHfCz2r8L2t+dYdMZg1JGbEkq4ADGsAA8ioZIpJd7V4hI17u5TCdi7X5wh/0gN0E
21
+ CgP+nLox3D+l2q0QuQEkayr+auFYkzTCkF+BmEk1D0Ru4mcf3F4CJvEmW4Pzbjqt
22
+ i1tsCWPtJ4E/UUKnKaWKqGbjrjHJ0MuShYzHkodox5IOiCXIQg+1+YSzfXUV6WEK
23
+ KJG/fhg1JV5vVDdVy6x+tv5SQ5ctU0feCsVfESi3rE3zRd+nvzE9HcZ5aXeL1UtJ
24
+ nT5Xrioegu2w1jPyVEgyZgTZC5rvD0nNS5sFNQ==
25
+ -----END CERTIFICATE-----
@@ -1,6 +1,39 @@
1
1
  en:
2
- errors:
3
- broker_schema?: >
4
- has an invalid format.
5
- Expected schema, host and port number.
6
- Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
2
+ dry_validation:
3
+ errors:
4
+ invalid_broker_schema: >
5
+ has an invalid format
6
+ Expected schema, host and port number
7
+ Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
8
+ invalid_certificate: >
9
+ is not a valid certificate
10
+ invalid_certificate_from_path: >
11
+ is not a valid certificate
12
+ invalid_private_key: >
13
+ is not a valid private key
14
+ max_timeout_size_for_exponential: >
15
+ pause_timeout cannot be more than pause_max_timeout
16
+ max_wait_time_limit:
17
+ max_wait_time cannot be more than socket_timeout
18
+ topics_names_not_unique: >
19
+ all topic names within a single consumer group must be unique
20
+ ssl_client_cert_with_ssl_client_cert_key: >
21
+ Both ssl_client_cert and ssl_client_cert_key need to be provided
22
+ ssl_client_cert_key_with_ssl_client_cert: >
23
+ Both ssl_client_cert_key and ssl_client_cert need to be provided
24
+ ssl_client_cert_chain_with_ssl_client_cert: >
25
+ Both ssl_client_cert_chain and ssl_client_cert need to be provided
26
+ ssl_client_cert_chain_with_ssl_client_cert_key: >
27
+ Both ssl_client_cert_chain and ssl_client_cert_key need to be provided
28
+ ssl_client_cert_key_password_with_ssl_client_cert_key: >
29
+ Both ssl_client_cert_key_password and ssl_client_cert_key need to be provided
30
+ does_not_respond_to_token: >
31
+ needs to respond to a #token method
32
+ required_usage_count: >
33
+ Given topic must be used at least once
34
+ pid_already_exists: >
35
+ Pidfile already exists
36
+ consumer_groups_inclusion: >
37
+ Unknown consumer group
38
+ does_not_exist:
39
+ Given file does not exist or cannot be read
@@ -5,6 +5,7 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
5
 
6
6
  require 'karafka/version'
7
7
 
8
+ # rubocop:disable Metrics/BlockLength
8
9
  Gem::Specification.new do |spec|
9
10
  spec.name = 'karafka'
10
11
  spec.version = ::Karafka::VERSION
@@ -16,27 +17,28 @@ Gem::Specification.new do |spec|
16
17
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
17
18
  spec.license = 'MIT'
18
19
 
19
- spec.add_dependency 'activesupport', '>= 4.0'
20
20
  spec.add_dependency 'dry-configurable', '~> 0.8'
21
21
  spec.add_dependency 'dry-inflector', '~> 0.1'
22
22
  spec.add_dependency 'dry-monitor', '~> 0.3'
23
- spec.add_dependency 'dry-validation', '~> 0.11'
24
- spec.add_dependency 'envlogic', '~> 1.0'
23
+ spec.add_dependency 'dry-validation', '~> 1.2'
24
+ spec.add_dependency 'envlogic', '~> 1.1'
25
+ spec.add_dependency 'irb', '~> 1.0'
25
26
  spec.add_dependency 'multi_json', '>= 1.12'
26
27
  spec.add_dependency 'rake', '>= 11.3'
27
- spec.add_dependency 'require_all', '>= 1.4'
28
- spec.add_dependency 'ruby-kafka', '>= 0.6'
29
- spec.add_dependency 'thor', '~> 0.20'
30
- spec.add_dependency 'waterdrop', '~> 1.2.4'
28
+ spec.add_dependency 'ruby-kafka', '>= 0.7.8'
29
+ spec.add_dependency 'thor', '>= 0.20'
30
+ spec.add_dependency 'waterdrop', '~> 1.3.0'
31
+ spec.add_dependency 'zeitwerk', '~> 2.1'
31
32
 
32
- spec.post_install_message = <<~MSG
33
- \e[93mWarning:\e[0m If you're using Kafka 0.10, please lock ruby-kafka in your Gemfile to version '0.6.8':
34
- gem 'ruby-kafka', '~> 0.6.8'
35
- MSG
33
+ spec.required_ruby_version = '>= 2.5.0'
36
34
 
37
- spec.required_ruby_version = '>= 2.3.0'
35
+ if $PROGRAM_NAME.end_with?('gem')
36
+ spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
37
+ end
38
38
 
39
+ spec.cert_chain = %w[certs/mensfeld.pem]
39
40
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
40
41
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
41
42
  spec.require_paths = %w[lib]
42
43
  end
44
+ # rubocop:enable Metrics/BlockLength
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  %w[
4
+ delegate
4
5
  English
5
6
  waterdrop
6
7
  kafka
@@ -9,13 +10,13 @@
9
10
  forwardable
10
11
  fileutils
11
12
  multi_json
12
- require_all
13
13
  dry-configurable
14
14
  dry-validation
15
+ dry/events/publisher
15
16
  dry/inflector
16
17
  dry/monitor/notifications
17
- active_support/callbacks
18
- karafka/loader
18
+ dry/core/constants
19
+ zeitwerk
19
20
  ].each(&method(:require))
20
21
 
21
22
  # Karafka library
@@ -63,17 +64,9 @@ module Karafka
63
64
  end
64
65
  end
65
66
 
66
- %w[
67
- callbacks
68
- callbacks/*
69
- setup/dsl
70
- setup/config
71
- status
72
- schemas/config
73
- schemas/consumer_group_topic
74
- schemas/consumer_group
75
- ].each { |path| require_all File.join(Karafka.core_root, path + '.rb') }
67
+ Zeitwerk::Loader
68
+ .for_gem
69
+ .tap(&:setup)
70
+ .tap(&:eager_load)
76
71
 
77
- Karafka::Loader.load!(Karafka.core_root)
78
72
  Kafka::Consumer.prepend(Karafka::Patches::RubyKafka)
79
- Dry::Configurable::Config.prepend(Karafka::Patches::DryConfigurable)
@@ -4,28 +4,36 @@ module Karafka
4
4
  # App class
5
5
  class App
6
6
  extend Setup::Dsl
7
- extend Callbacks::Dsl
8
7
 
9
8
  class << self
10
9
  # Sets up all the internal components and bootstrap whole app
11
10
  # We need to know details about consumers in order to setup components,
12
11
  # that's why we don't setup them after std setup is done
13
- # @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
14
- # doesn't match with ConfigurationSchema
12
+ # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
13
+ # doesn't match with the config contract
15
14
  def boot!
15
+ initialize!
16
16
  Setup::Config.validate!
17
17
  Setup::Config.setup_components
18
- Callbacks.after_init(Karafka::App.config)
18
+ initialized!
19
19
  end
20
20
 
21
21
  # @return [Karafka::Routing::Builder] consumers builder instance
22
22
  def consumer_groups
23
- Routing::Builder.instance
23
+ config.internal.routing_builder
24
+ end
25
+
26
+ # Triggers reload of all cached Karafka app components, so we can use in-process
27
+ # in-development hot code reloading without Karafka process restart
28
+ def reload
29
+ Karafka::Persistence::Consumers.clear
30
+ Karafka::Persistence::Topics.clear
31
+ config.internal.routing_builder.reload
24
32
  end
25
33
 
26
34
  Status.instance_methods(false).each do |delegated|
27
35
  define_method(delegated) do
28
- Status.instance.send(delegated)
36
+ App.config.internal.status.send(delegated)
29
37
  end
30
38
  end
31
39
 
@@ -11,9 +11,9 @@ module Karafka
11
11
  module AttributesMap
12
12
  class << self
13
13
  # What settings should go where in ruby-kafka
14
+ # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
14
15
  # @note All other settings will be passed to Kafka.new method invocation.
15
16
  # All elements in this hash are just edge cases
16
- # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
17
17
  def api_adapter
18
18
  {
19
19
  consumer: %i[
@@ -22,7 +22,7 @@ module Karafka
22
22
  ],
23
23
  subscribe: %i[start_from_beginning max_bytes_per_partition],
24
24
  consumption: %i[min_bytes max_bytes max_wait_time],
25
- pause: %i[pause_timeout],
25
+ pause: %i[pause_timeout pause_max_timeout pause_exponential_backoff],
26
26
  # All the options that are under kafka config namespace, but are not used
27
27
  # directly with kafka api, but from the Karafka user perspective, they are
28
28
  # still related to kafka. They should not be proxied anywhere
@@ -35,10 +35,9 @@ module Karafka
35
35
  (api_adapter[:subscribe] + %i[
36
36
  backend
37
37
  name
38
- parser
38
+ deserializer
39
39
  responder
40
40
  batch_consuming
41
- persistent
42
41
  ]).uniq
43
42
  end
44
43
 
@@ -53,14 +52,8 @@ module Karafka
53
52
  ignored_settings = api_adapter[:subscribe]
54
53
  defined_settings = api_adapter.values.flatten
55
54
  karafka_settings = %i[batch_fetching]
56
- # This is a drity and bad hack of dry-configurable to get keys before setting values
57
- dynamically_proxied = Karafka::Setup::Config
58
- ._settings
59
- .settings
60
- .find { |s| s.name == :kafka }
61
- .value
62
- .names
63
- .to_a
55
+
56
+ dynamically_proxied = Karafka::Setup::Config.config.kafka.to_h.keys
64
57
 
65
58
  (defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
66
59
  end
@@ -4,41 +4,33 @@
4
4
  module Karafka
5
5
  # Base consumer from which all Karafka consumers should inherit
6
6
  class BaseConsumer
7
- extend ActiveSupport::DescendantsTracker
8
7
  extend Forwardable
9
8
 
10
9
  # Allows us to mark messages as consumed for non-automatic mode without having
11
10
  # to use consumer client directly. We do this that way, because most of the people should not
12
11
  # mess with the client instance directly (just in case)
13
- def_delegator :client, :mark_as_consumed
14
-
15
- private :mark_as_consumed
16
-
17
- class << self
18
- attr_reader :topic
19
-
20
- # Assigns a topic to a consumer and builds up proper consumer functionalities
21
- # so that it can cooperate with the topic settings
22
- # @param topic [Karafka::Routing::Topic]
23
- # @return [Karafka::Routing::Topic] assigned topic
24
- def topic=(topic)
25
- @topic = topic
26
- Consumers::Includer.call(self)
27
- end
12
+ %i[
13
+ mark_as_consumed
14
+ mark_as_consumed!
15
+ trigger_heartbeat
16
+ trigger_heartbeat!
17
+ ].each do |delegated_method_name|
18
+ def_delegator :client, delegated_method_name
19
+
20
+ private delegated_method_name
28
21
  end
29
22
 
30
23
  # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
31
- def topic
32
- self.class.topic
33
- end
34
-
35
- # Creates lazy loaded params batch object
36
- # @note Until first params usage, it won't parse data at all
37
- # @param messages [Array<Kafka::FetchedMessage>, Array<Hash>] messages with raw
38
- # content (from Kafka) or messages inside a hash (from backend, etc)
39
- # @return [Karafka::Params::ParamsBatch] lazy loaded params batch
40
- def params_batch=(messages)
41
- @params_batch = Karafka::Params::ParamsBatch.new(messages, topic.parser)
24
+ attr_reader :topic
25
+ # @return [Karafka::Params:ParamsBatch] current params batch
26
+ attr_accessor :params_batch
27
+
28
+ # Assigns a topic to a consumer and builds up proper consumer functionalities
29
+ # so that it can cooperate with the topic settings
30
+ # @param topic [Karafka::Routing::Topic]
31
+ def initialize(topic)
32
+ @topic = topic
33
+ Consumers::Includer.call(self)
42
34
  end
43
35
 
44
36
  # Executes the default consumer flow.
@@ -48,9 +40,6 @@ module Karafka
48
40
 
49
41
  private
50
42
 
51
- # We make it private as it should be accessible only from the inside of a consumer
52
- attr_reader :params_batch
53
-
54
43
  # @return [Karafka::Connection::Client] messages consuming client that can be used to
55
44
  # commit manually offset or pause / stop consumer based on the business logic
56
45
  def client
@@ -39,7 +39,7 @@ module Karafka
39
39
  #
40
40
  # @example Multiple times used topic
41
41
  # class Responder < BaseResponder
42
- # topic :required_topic, multiple_usage: true
42
+ # topic :required_topic
43
43
  #
44
44
  # def respond(data)
45
45
  # data.each do |subset|
@@ -48,6 +48,17 @@ module Karafka
48
48
  # end
49
49
  # end
50
50
  #
51
+ # @example Specify serializer for a topic
52
+ # class Responder < BaseResponder
53
+ # topic :xml_topic, serializer: MyXMLSerializer
54
+ #
55
+ # def respond(data)
56
+ # data.each do |subset|
57
+ # respond_to :xml_topic, subset
58
+ # end
59
+ # end
60
+ # end
61
+ #
51
62
  # @example Accept multiple arguments to a respond method
52
63
  # class Responder < BaseResponder
53
64
  # topic :users_actions
@@ -59,31 +70,35 @@ module Karafka
59
70
  # end
60
71
  # end
61
72
  class BaseResponder
62
- # Definitions of all topics that we want to be able to use in this responder should go here
63
- class_attribute :topics
64
-
65
- # Schema that we can use to control and/or require some additional details upon options
66
- # that are being passed to the producer. This can be in particular useful if we want to make
67
- # sure that for example partition_key is always present.
68
- class_attribute :options_schema
73
+ # Responder usage contract
74
+ CONTRACT = Karafka::Contracts::ResponderUsage.new.freeze
69
75
 
70
- attr_reader :messages_buffer
76
+ private_constant :CONTRACT
71
77
 
72
78
  class << self
79
+ # Definitions of all topics that we want to be able to use in this responder should go here
80
+ attr_accessor :topics
81
+ # Contract that we can use to control and/or require some additional details upon options
82
+ # that are being passed to the producer. This can be in particular useful if we want to make
83
+ # sure that for example partition_key is always present.
84
+ attr_accessor :options_contract
85
+
73
86
  # Registers a topic as on to which we will be able to respond
74
87
  # @param topic_name [Symbol, String] name of topic to which we want to respond
75
88
  # @param options [Hash] hash with optional configuration details
76
89
  def topic(topic_name, options = {})
90
+ options[:serializer] ||= Karafka::App.config.serializer
91
+ options[:registered] = true
77
92
  self.topics ||= {}
78
- topic_obj = Responders::Topic.new(topic_name, options.merge(registered: true))
93
+ topic_obj = Responders::Topic.new(topic_name, options)
79
94
  self.topics[topic_obj.name] = topic_obj
80
95
  end
81
96
 
82
97
  # A simple alias for easier standalone responder usage.
83
- # Instead of building it with new.call it allows (in case of usin JSON parser)
98
+ # Instead of building it with new.call it allows (in case of using JSON serializer)
84
99
  # to just run it directly from the class level
85
100
  # @param data Anything that we want to respond with
86
- # @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
101
+ # @example Send user data with a responder
87
102
  # UsersCreatedResponder.call(@created_user)
88
103
  def call(*data)
89
104
  # Just in case there were no topics defined for a responder, we initialize with
@@ -93,12 +108,11 @@ module Karafka
93
108
  end
94
109
  end
95
110
 
111
+ attr_reader :messages_buffer
112
+
96
113
  # Creates a responder object
97
- # @param parser_class [Class] parser class that we can use to generate appropriate string
98
- # or nothing if we want to default to Karafka::Parsers::Json
99
114
  # @return [Karafka::BaseResponder] base responder descendant responder
100
- def initialize(parser_class = Karafka::App.config.parser)
101
- @parser_class = parser_class
115
+ def initialize
102
116
  @messages_buffer = {}
103
117
  end
104
118
 
@@ -107,7 +121,7 @@ module Karafka
107
121
  # @note We know that validators should be executed also before sending data to topics, however
108
122
  # the implementation gets way more complicated then, that's why we check after everything
109
123
  # was sent using responder
110
- # @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
124
+ # @example Send user data with a responder
111
125
  # UsersCreatedResponder.new.call(@created_user)
112
126
  # @example Send user data with a responder using non default Parser
113
127
  # UsersCreatedResponder.new(MyParser).call(@created_user)
@@ -134,25 +148,26 @@ module Karafka
134
148
  topic.to_h.merge!(usage_count: usage.count)
135
149
  end
136
150
 
137
- result = Karafka::Schemas::ResponderUsage.call(
151
+ result = CONTRACT.call(
138
152
  registered_topics: registered_topics,
139
153
  used_topics: used_topics
140
154
  )
141
155
 
142
156
  return if result.success?
143
157
 
144
- raise Karafka::Errors::InvalidResponderUsage, result.errors
158
+ raise Karafka::Errors::InvalidResponderUsageError, result.errors.to_h
145
159
  end
146
160
 
147
161
  # Checks if we met all the options requirements before sending them to the producer.
148
162
  def validate_options!
149
- return true unless self.class.options_schema
163
+ return true unless self.class.options_contract
150
164
 
151
165
  messages_buffer.each_value do |messages_set|
152
166
  messages_set.each do |message_data|
153
- result = self.class.options_schema.call(message_data.last)
167
+ result = self.class.options_contract.call(message_data.last)
154
168
  next if result.success?
155
- raise Karafka::Errors::InvalidResponderMessageOptions, result.errors
169
+
170
+ raise Karafka::Errors::InvalidResponderMessageOptionsError, result.errors.to_h
156
171
  end
157
172
  end
158
173
  end
@@ -174,6 +189,7 @@ module Karafka
174
189
 
175
190
  # Method that needs to be implemented in a subclass. It should handle responding
176
191
  # on registered topics
192
+ # @param _data [Object] anything that we want to use to send to Kafka
177
193
  # @raise [NotImplementedError] This method needs to be implemented in a subclass
178
194
  def respond(*_data)
179
195
  raise NotImplementedError, 'Implement this in a subclass'
@@ -183,7 +199,7 @@ module Karafka
183
199
  # as many times as we need. Especially when we have 1:n flow
184
200
  # @param topic [Symbol, String] topic to which we want to respond
185
201
  # @param data [String, Object] string or object that we want to send
186
- # @param options [Hash] options for waterdrop (e.g. partition_key)
202
+ # @param options [Hash] options for waterdrop (e.g. partition_key).
187
203
  # @note Respond to does not accept multiple data arguments.
188
204
  def respond_to(topic, data, options = {})
189
205
  # We normalize the format to string, as WaterDrop and Ruby-Kafka support only
@@ -192,7 +208,7 @@ module Karafka
192
208
 
193
209
  messages_buffer[topic] ||= []
194
210
  messages_buffer[topic] << [
195
- @parser_class.generate(data),
211
+ self.class.topics[topic].serializer.call(data),
196
212
  options.merge(topic: topic)
197
213
  ]
198
214
  end
@@ -200,9 +216,11 @@ module Karafka
200
216
  # @param options [Hash] options for waterdrop
201
217
  # @return [Class] WaterDrop producer (sync or async based on the settings)
202
218
  def producer(options)
203
- self.class.topics[
204
- options[:topic]
205
- ].async? ? WaterDrop::AsyncProducer : WaterDrop::SyncProducer
219
+ if self.class.topics[options[:topic]].async?
220
+ WaterDrop::AsyncProducer
221
+ else
222
+ WaterDrop::SyncProducer
223
+ end
206
224
  end
207
225
  end
208
226
  end