karafka 1.3.0 → 1.4.14

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.diffend.yml +3 -0
  4. data/.github/workflows/ci.yml +76 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +112 -15
  7. data/CODE_OF_CONDUCT.md +1 -1
  8. data/Gemfile +2 -0
  9. data/Gemfile.lock +87 -98
  10. data/README.md +28 -31
  11. data/certs/mensfeld.pem +24 -23
  12. data/config/errors.yml +2 -0
  13. data/docker-compose.yml +17 -0
  14. data/karafka.gemspec +22 -14
  15. data/lib/karafka/assignment_strategies/round_robin.rb +13 -0
  16. data/lib/karafka/attributes_map.rb +3 -8
  17. data/lib/karafka/cli/base.rb +4 -4
  18. data/lib/karafka/cli/flow.rb +9 -6
  19. data/lib/karafka/cli/info.rb +1 -1
  20. data/lib/karafka/cli/install.rb +5 -2
  21. data/lib/karafka/cli/missingno.rb +19 -0
  22. data/lib/karafka/cli/server.rb +8 -8
  23. data/lib/karafka/cli.rb +9 -1
  24. data/lib/karafka/connection/api_adapter.rb +27 -24
  25. data/lib/karafka/connection/batch_delegator.rb +5 -1
  26. data/lib/karafka/connection/builder.rb +9 -2
  27. data/lib/karafka/connection/client.rb +9 -6
  28. data/lib/karafka/connection/listener.rb +2 -2
  29. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  30. data/lib/karafka/consumers/includer.rb +5 -4
  31. data/lib/karafka/contracts/consumer_group.rb +10 -5
  32. data/lib/karafka/contracts/server_cli_options.rb +2 -0
  33. data/lib/karafka/contracts.rb +1 -1
  34. data/lib/karafka/helpers/class_matcher.rb +2 -2
  35. data/lib/karafka/instrumentation/logger.rb +6 -9
  36. data/lib/karafka/instrumentation/stdout_listener.rb +6 -4
  37. data/lib/karafka/params/batch_metadata.rb +26 -0
  38. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  39. data/lib/karafka/params/builders/params.rb +17 -15
  40. data/lib/karafka/params/builders/params_batch.rb +2 -2
  41. data/lib/karafka/params/metadata.rb +14 -29
  42. data/lib/karafka/params/params.rb +27 -41
  43. data/lib/karafka/params/params_batch.rb +15 -16
  44. data/lib/karafka/routing/builder.rb +1 -0
  45. data/lib/karafka/routing/consumer_group.rb +5 -3
  46. data/lib/karafka/serialization/json/deserializer.rb +2 -2
  47. data/lib/karafka/server.rb +4 -1
  48. data/lib/karafka/setup/config.rb +60 -52
  49. data/lib/karafka/templates/karafka.rb.erb +1 -1
  50. data/lib/karafka/version.rb +1 -1
  51. data/lib/karafka.rb +3 -1
  52. data.tar.gz.sig +0 -0
  53. metadata +75 -93
  54. metadata.gz.sig +0 -0
  55. data/.github/FUNDING.yml +0 -3
  56. data/.travis.yml +0 -36
  57. data/lib/karafka/consumers/metadata.rb +0 -10
  58. data/lib/karafka/params/builders/metadata.rb +0 -33
data/certs/mensfeld.pem CHANGED
@@ -1,25 +1,26 @@
1
1
  -----BEGIN CERTIFICATE-----
2
- MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
3
- ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMTkwNzMwMTQ1NDU0WhcNMjAwNzI5MTQ1
4
- NDU0WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
5
- CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC9fCwtaHZG2SyyNXiH8r0QbJQx/xxl
6
- dkvwWz9QGJO+O8rEx20FB1Ab+MVkfOscwIv5jWpmk1U9whzDPl1uFtIbgu+sk+Zb
7
- uQlZyK/DPN6c+/BbBL+RryTBRyvkPLoCVwm7uxc/JZ1n4AI6eF4cCZ2ieZ9QgQbU
8
- MQs2QPqs9hT50Ez/40GnOdadVfiDDGz+NME2C4ms0BriXwZ1tcRTfJIHe2xjIbbb
9
- y5qRGfsLKcgMzvLQR24olixyX1MR0s4+Wveq3QL/gBhL4veUcv+UABJA8IJR0kyB
10
- seHHutusiwZ1v3SjjjW1xLLrc2ARV0mgCb0WaK2T4iA3oFTGLh6Ydz8LNl31KQFv
11
- 94nRd8IhmJxrhQ6dQ/WT9IXoa5S9lfT5lPJeINemH4/6QPABzf9W2IZlCdI9wCdB
12
- TBaw57MKneGAYZiKjw6OALSy2ltQUCl3RqFl3VP7n8uFy1U987Q5VIIQ3O1UUsQD
13
- Oe/h+r7GUU4RSPKgPlrwvW9bD/UQ+zF51v8CAwEAAaN3MHUwCQYDVR0TBAIwADAL
14
- BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFJNIBHdfEUD7TqHqIer2YhWaWhwcMB0GA1Ud
15
- EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
16
- c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKA4eqko6BTNhlysip6rfBkVTGri
17
- ZXsL+kRb2hLvsQJS/kLyM21oMlu+LN0aPj3qEFR8mE/YeDD8rLAfruBRTltPNbR7
18
- xA5eE1gkxY5LfExUtK3b2wPqfmo7mZgfcsMwfYg/tUXw1WpBCnrhAJodpGH6SXmp
19
- A40qFUZst0vjiOoO+aTblIHPmMJXoZ3K42dTlNKlEiDKUWMRKSgpjjYGEYalFNWI
20
- hHfCz2r8L2t+dYdMZg1JGbEkq4ADGsAA8ioZIpJd7V4hI17u5TCdi7X5wh/0gN0E
21
- CgP+nLox3D+l2q0QuQEkayr+auFYkzTCkF+BmEk1D0Ru4mcf3F4CJvEmW4Pzbjqt
22
- i1tsCWPtJ4E/UUKnKaWKqGbjrjHJ0MuShYzHkodox5IOiCXIQg+1+YSzfXUV6WEK
23
- KJG/fhg1JV5vVDdVy6x+tv5SQ5ctU0feCsVfESi3rE3zRd+nvzE9HcZ5aXeL1UtJ
24
- nT5Xrioegu2w1jPyVEgyZgTZC5rvD0nNS5sFNQ==
2
+ MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
3
+ YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
4
+ MB4XDTIyMDgxOTE3MjEzN1oXDTIzMDgxOTE3MjEzN1owPzEQMA4GA1UEAwwHY29u
5
+ dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
6
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAODzeO3L6lxdATzMHKNW
7
+ jFA/GGunoPuylO/BMzy8RiQHh7VIvysAKs0tHhTx3g2D0STDpF+hcQcPELFikiT2
8
+ F+1wOHj/SsrK7VKqfA8+gq04hKc5sQoX2Egf9k3V0YJ3eZ6R/koHkQ8A0TVt0w6F
9
+ ZQckoV4MqnEAx0g/FZN3mnHTlJ3VFLSBqJEIe+S6FZMl92mSv+hTrlUG8VaYxSfN
10
+ lTCvnKk284F6QZq5XIENLRmcDd/3aPBLnLwNnyMyhB+6gK8cUO+CFlDO5tjo/aBA
11
+ rUnl++wGG0JooF1ed0v+evOn9KoMBG6rHewcf79qJbVOscbD8qSAmo+sCXtcFryr
12
+ KRMTB8gNbowJkFRJDEe8tfRy11u1fYzFg/qNO82FJd62rKAw2wN0C29yCeQOPRb1
13
+ Cw9Y4ZwK9VFNEcV9L+3pHTHn2XfuZHtDaG198VweiF6raFO4yiEYccodH/USP0L5
14
+ cbcCFtmu/4HDSxL1ByQXO84A0ybJuk3/+aPUSXe9C9U8fwIDAQABo3cwdTAJBgNV
15
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUSlcEakb7gfn/5E2WY6z73BF/
16
+ iZkwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
17
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEA1aS+E7RXJ1w9g9mJ
18
+ G0NzFxe64OEuENosNlvYQCbRKGCXAU1qqelYkBQHseRgRKxLICrnypRo9IEobyHa
19
+ vDnJ4r7Tsb34dleqQW2zY/obG+cia3Ym2JsegXWF7dDOzCXJ4FN8MFoT2jHlqLLw
20
+ yrap0YO5zx0GSQ0Dwy8h2n2v2vanMEeCx7iNm3ERgR5WuN5sjzWoz2A/JLEEcK0C
21
+ EnAGKCWAd1fuG8IemDjT1edsd5FyYR4bIX0m+99oDuFZyPiiIbalmyYiSBBp59Yb
22
+ Q0P8zeBi4OfwCZNcxqz0KONmw9JLNv6DgyEAH5xe/4JzhMEgvIRiPj0pHfA7oqQF
23
+ KUNqvD1KlxbEC+bZfE5IZhnqYLdld/Ksqd22FI1RBhiS1Ejfsj99LVIm9cBuZEY2
24
+ Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
25
+ MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
25
26
  -----END CERTIFICATE-----
data/config/errors.yml CHANGED
@@ -37,3 +37,5 @@ en:
37
37
  Unknown consumer group
38
38
  does_not_exist:
39
39
  Given file does not exist or cannot be read
40
+ does_not_respond_to_call: >
41
+ needs to respond to a #call method
@@ -0,0 +1,17 @@
1
+ version: '2'
2
+ services:
3
+ zookeeper:
4
+ image: wurstmeister/zookeeper
5
+ ports:
6
+ - "2181:2181"
7
+ kafka:
8
+ image: wurstmeister/kafka:1.0.1
9
+ ports:
10
+ - "9092:9092"
11
+ environment:
12
+ KAFKA_ADVERTISED_HOST_NAME: localhost
13
+ KAFKA_ADVERTISED_PORT: 9092
14
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
15
+ KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
16
+ volumes:
17
+ - /var/run/docker.sock:/var/run/docker.sock
data/karafka.gemspec CHANGED
@@ -11,26 +11,24 @@ Gem::Specification.new do |spec|
11
11
  spec.version = ::Karafka::VERSION
12
12
  spec.platform = Gem::Platform::RUBY
13
13
  spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
14
- spec.email = %w[maciej@coditsu.io pavlo.vavruk@gmail.com adam99g@gmail.com]
15
- spec.homepage = 'https://github.com/karafka/karafka'
14
+ spec.email = %w[maciej@mensfeld.pl pavlo.vavruk@gmail.com adam99g@gmail.com]
15
+ spec.homepage = 'https://karafka.io'
16
16
  spec.summary = 'Ruby based framework for working with Apache Kafka'
17
17
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
18
18
  spec.license = 'MIT'
19
19
 
20
- spec.add_dependency 'dry-configurable', '~> 0.8'
21
- spec.add_dependency 'dry-inflector', '~> 0.1'
22
- spec.add_dependency 'dry-monitor', '~> 0.3'
23
- spec.add_dependency 'dry-validation', '~> 1.2'
20
+ spec.add_dependency 'concurrent-ruby'
21
+ spec.add_dependency 'dry-configurable', '~> 0.16'
22
+ spec.add_dependency 'dry-inflector', '~> 0.2'
23
+ spec.add_dependency 'dry-monitor', '~> 0.5'
24
+ spec.add_dependency 'dry-validation', '~> 1.7'
24
25
  spec.add_dependency 'envlogic', '~> 1.1'
25
- spec.add_dependency 'irb', '~> 1.0'
26
- spec.add_dependency 'multi_json', '>= 1.12'
27
- spec.add_dependency 'rake', '>= 11.3'
28
- spec.add_dependency 'ruby-kafka', '>= 0.7.8'
29
- spec.add_dependency 'thor', '~> 0.20'
30
- spec.add_dependency 'waterdrop', '~> 1.3.0'
31
- spec.add_dependency 'zeitwerk', '~> 2.1'
26
+ spec.add_dependency 'ruby-kafka', '>= 1.3.0'
27
+ spec.add_dependency 'thor', '>= 1.1'
28
+ spec.add_dependency 'waterdrop', '~> 1.4'
29
+ spec.add_dependency 'zeitwerk', '~> 2.6'
32
30
 
33
- spec.required_ruby_version = '>= 2.4.0'
31
+ spec.required_ruby_version = '>= 2.7'
34
32
 
35
33
  if $PROGRAM_NAME.end_with?('gem')
36
34
  spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
@@ -40,5 +38,15 @@ Gem::Specification.new do |spec|
40
38
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
41
39
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
42
40
  spec.require_paths = %w[lib]
41
+ spec.post_install_message = <<~MSG
42
+ WARN: Karafka 1.4 will reach the end of life soon.
43
+ We highly recommend updating to Karafka 2.0.
44
+ Visit this page for more details: https://karafka.io/docs/Versions-Lifecycle-and-EOL
45
+ MSG
46
+
47
+ spec.metadata = {
48
+ 'source_code_uri' => 'https://github.com/karafka/karafka',
49
+ 'rubygems_mfa_required' => 'true'
50
+ }
43
51
  end
44
52
  # rubocop:enable Metrics/BlockLength
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Strategies for Kafka partitions assignments
5
+ module AssignmentStrategies
6
+ # Standard RoundRobin strategy
7
+ class RoundRobin < SimpleDelegator
8
+ def initialize
9
+ super(Kafka::RoundRobinAssignmentStrategy.new)
10
+ end
11
+ end
12
+ end
13
+ end
@@ -19,6 +19,7 @@ module Karafka
19
19
  consumer: %i[
20
20
  session_timeout offset_commit_interval offset_commit_threshold
21
21
  offset_retention_time heartbeat_interval fetcher_max_queue_size
22
+ assignment_strategy
22
23
  ],
23
24
  subscribe: %i[start_from_beginning max_bytes_per_partition],
24
25
  consumption: %i[min_bytes max_bytes max_wait_time],
@@ -52,14 +53,8 @@ module Karafka
52
53
  ignored_settings = api_adapter[:subscribe]
53
54
  defined_settings = api_adapter.values.flatten
54
55
  karafka_settings = %i[batch_fetching]
55
- # This is a dirty and bad hack of dry-configurable to get keys before setting values
56
- dynamically_proxied = Karafka::Setup::Config
57
- ._settings
58
- .settings
59
- .find { |s| s.name == :kafka }
60
- .value
61
- .names
62
- .to_a
56
+
57
+ dynamically_proxied = Karafka::Setup::Config.config.kafka.to_h.keys
63
58
 
64
59
  (defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
65
60
  end
@@ -43,16 +43,16 @@ module Karafka
43
43
  end
44
44
 
45
45
  # Allows to set description of a given cli command
46
- # @param desc [String] Description of a given cli command
47
- def desc(desc)
48
- @desc ||= desc
46
+ # @param args [Array] All the arguments that Thor desc method accepts
47
+ def desc(*args)
48
+ @desc ||= args
49
49
  end
50
50
 
51
51
  # This method will bind a given Cli command into Karafka Cli
52
52
  # This method is a wrapper to way Thor defines its commands
53
53
  # @param cli_class [Karafka::Cli] Karafka cli_class
54
54
  def bind_to(cli_class)
55
- cli_class.desc name, @desc
55
+ cli_class.desc name, *@desc
56
56
 
57
57
  (@options || []).each { |option| cli_class.option(*option) }
58
58
 
@@ -11,19 +11,22 @@ module Karafka
11
11
  def call
12
12
  topics.each do |topic|
13
13
  any_topics = !topic.responder&.topics.nil?
14
+ log_messages = []
14
15
 
15
16
  if any_topics
16
- puts "#{topic.name} =>"
17
+ log_messages << "#{topic.name} =>"
17
18
 
18
19
  topic.responder.topics.each_value do |responder_topic|
19
20
  features = []
20
21
  features << (responder_topic.required? ? 'always' : 'conditionally')
21
22
 
22
- print responder_topic.name, "(#{features.join(', ')})"
23
+ log_messages << format(responder_topic.name, "(#{features.join(', ')})")
23
24
  end
24
25
  else
25
- puts "#{topic.name} => (nothing)"
26
+ log_messages << "#{topic.name} => (nothing)"
26
27
  end
28
+
29
+ Karafka.logger.info(log_messages.join("\n"))
27
30
  end
28
31
  end
29
32
 
@@ -34,11 +37,11 @@ module Karafka
34
37
  Karafka::App.consumer_groups.map(&:topics).flatten.sort_by(&:name)
35
38
  end
36
39
 
37
- # Prints a given value with label in a nice way
40
+ # Formats a given value with label in a nice way
38
41
  # @param label [String] label describing value
39
42
  # @param value [String] value that should be printed
40
- def print(label, value)
41
- printf "%-25s %s\n", " - #{label}:", value
43
+ def format(label, value)
44
+ " - #{label}: #{value}"
42
45
  end
43
46
  end
44
47
  end
@@ -24,7 +24,7 @@ module Karafka
24
24
  "Kafka seed brokers: #{config.kafka.seed_brokers}"
25
25
  ]
26
26
 
27
- puts(info.join("\n"))
27
+ Karafka.logger.info(info.join("\n"))
28
28
  end
29
29
  end
30
30
  end
@@ -13,7 +13,9 @@ module Karafka
13
13
  INSTALL_DIRS = %w[
14
14
  app/consumers
15
15
  app/responders
16
+ app/workers
16
17
  config
18
+ lib
17
19
  log
18
20
  tmp/pids
19
21
  ].freeze
@@ -28,11 +30,12 @@ module Karafka
28
30
  # @param args [Array] all the things that Thor CLI accepts
29
31
  def initialize(*args)
30
32
  super
31
- @rails = Bundler::LockfileParser.new(
33
+ dependencies = Bundler::LockfileParser.new(
32
34
  Bundler.read_file(
33
35
  Bundler.default_lockfile
34
36
  )
35
- ).dependencies.key?('rails')
37
+ ).dependencies
38
+ @rails = dependencies.key?('railties') || dependencies.key?('rails')
36
39
  end
37
40
 
38
41
  # Install all required things for Karafka application in current directory
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ class Cli < Thor
5
+ # Command that gets invoked when no method is provided when running the CLI
6
+ # It allows us to exit with exit code 1 instead of default 0 to indicate that something
7
+ # was missing
8
+ # @see https://github.com/karafka/karafka/issues/619
9
+ class Missingno < Base
10
+ desc 'Hidden command that gets invoked when no command is provided', hide: true
11
+
12
+ # Prints an error about the lack of command (nothing selected)
13
+ def call
14
+ Karafka.logger.error('No command provided')
15
+ exit 1
16
+ end
17
+ end
18
+ end
19
+ end
@@ -31,14 +31,6 @@ module Karafka
31
31
  # part of the topics
32
32
  Karafka::Server.consumer_groups = cli.options[:consumer_groups]
33
33
 
34
- # Remove pidfile on stop, just before the server instance is going to be GCed
35
- # We want to delay the moment in which the pidfile is removed as much as we can,
36
- # so instead of removing it after the server stops running, we rely on the gc moment
37
- # when this object gets removed (it is a bit later), so it is closer to the actual
38
- # system process end. We do that, so monitoring and deployment tools that rely on a pid
39
- # won't alarm or start new system process up until the current one is finished
40
- ObjectSpace.define_finalizer(self, proc { send(:clean) })
41
-
42
34
  Karafka::Server.run
43
35
  end
44
36
 
@@ -60,6 +52,14 @@ module Karafka
60
52
  cli.options[:pid],
61
53
  'w'
62
54
  ) { |file| file.write(::Process.pid) }
55
+
56
+ # Remove pidfile on stop, just before the server instance is going to be GCed
57
+ # We want to delay the moment in which the pidfile is removed as much as we can,
58
+ # so instead of removing it after the server stops running, we rely on the gc moment
59
+ # when this object gets removed (it is a bit later), so it is closer to the actual
60
+ # system process end. We do that, so monitoring and deployment tools that rely on a pid
61
+ # won't alarm or start new system process up until the current one is finished
62
+ ObjectSpace.define_finalizer(self, proc { send(:clean) })
63
63
  end
64
64
 
65
65
  # Removes a pidfile (if exist)
data/lib/karafka/cli.rb CHANGED
@@ -10,6 +10,8 @@ module Karafka
10
10
  class Cli < Thor
11
11
  package_name 'Karafka'
12
12
 
13
+ default_task :missingno
14
+
13
15
  class << self
14
16
  # Loads all Cli commands into Thor framework
15
17
  # This method should be executed before we run Karafka::Cli.start, otherwise we won't
@@ -20,6 +22,12 @@ module Karafka
20
22
  end
21
23
  end
22
24
 
25
+ # When there is a CLI crash, exit
26
+ # @return [true]
27
+ def exit_on_failure?
28
+ true
29
+ end
30
+
23
31
  private
24
32
 
25
33
  # @return [Array<Class>] Array with Cli action classes that can be used as commands
@@ -47,7 +55,7 @@ end
47
55
  if ENV['KARAFKA_CONSOLE']
48
56
  # Reloads Karafka irb console session
49
57
  def reload!
50
- puts "Reloading...\n"
58
+ Karafka.logger.info "Reloading...\n"
51
59
  Kernel.exec Karafka::Cli::Console.command
52
60
  end
53
61
  end
@@ -14,11 +14,12 @@ module Karafka
14
14
  module ApiAdapter
15
15
  class << self
16
16
  # Builds all the configuration settings for Kafka.new method
17
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
17
18
  # @return [Array<Hash>] Array with all the client arguments including hash with all
18
19
  # the settings required by Kafka.new method
19
20
  # @note We return array, so we can inject any arguments we want, in case of changes in the
20
21
  # raw driver
21
- def client
22
+ def client(consumer_group)
22
23
  # This one is a default that takes all the settings except special
23
24
  # cases defined in the map
24
25
  settings = {
@@ -26,14 +27,17 @@ module Karafka
26
27
  client_id: ::Karafka::App.config.client_id
27
28
  }
28
29
 
29
- kafka_configs.each do |setting_name, setting_value|
30
+ kafka_configs.each_key do |setting_name|
30
31
  # All options for config adapter should be ignored as we're just interested
31
32
  # in what is left, as we want to pass all the options that are "typical"
32
33
  # and not listed in the api_adapter special cases mapping. All the values
33
34
  # from the api_adapter mapping go somewhere else, not to the client directly
34
35
  next if AttributesMap.api_adapter.values.flatten.include?(setting_name)
35
36
 
36
- settings[setting_name] = setting_value
37
+ # Settings for each consumer group are either defined per consumer group or are
38
+ # inherited from the global/general settings level, thus we don't have to fetch them
39
+ # from the kafka settings as they are already on a consumer group level
40
+ settings[setting_name] = consumer_group.public_send(setting_name)
37
41
  end
38
42
 
39
43
  settings_hash = sanitize(settings)
@@ -44,30 +48,28 @@ module Karafka
44
48
 
45
49
  # Builds all the configuration settings for kafka#consumer method
46
50
  # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
47
- # @return [Array<Hash>] array with all the consumer arguments including hash with all
51
+ # @return [Hash] all the consumer keyword arguments including hash with all
48
52
  # the settings required by Kafka#consumer
49
53
  def consumer(consumer_group)
50
54
  settings = { group_id: consumer_group.id }
51
55
  settings = fetch_for(:consumer, consumer_group, settings)
52
- [sanitize(settings)]
56
+ sanitize(settings)
53
57
  end
54
58
 
55
59
  # Builds all the configuration settings for kafka consumer consume_each_batch and
56
60
  # consume_each_message methods
57
61
  # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
58
- # @return [Array<Hash>] Array with all the arguments required by consuming method
59
- # including hash with all the settings required by
62
+ # @return [Hash] hash with all the arguments required by consuming method
63
+ # including all the settings required by
60
64
  # Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
61
65
  def consumption(consumer_group)
62
- [
63
- sanitize(
64
- fetch_for(
65
- :consumption,
66
- consumer_group,
67
- automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
68
- )
66
+ sanitize(
67
+ fetch_for(
68
+ :consumption,
69
+ consumer_group,
70
+ automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
69
71
  )
70
- ]
72
+ )
71
73
  end
72
74
 
73
75
  # Builds all the configuration settings for kafka consumer#subscribe method
@@ -82,17 +84,16 @@ module Karafka
82
84
  # @param topic [String] topic that we want to pause
83
85
  # @param partition [Integer] number partition that we want to pause
84
86
  # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
85
- # @return [Array] array with all the details required to pause kafka consumer
87
+ # @return [Hash] hash with all the details required to pause kafka consumer
86
88
  def pause(topic, partition, consumer_group)
87
- [
88
- Karafka::App.config.topic_mapper.outgoing(topic),
89
- partition,
90
- {
89
+ {
90
+ args: [Karafka::App.config.topic_mapper.outgoing(topic), partition],
91
+ kwargs: {
91
92
  timeout: consumer_group.pause_timeout,
92
93
  max_timeout: consumer_group.pause_max_timeout,
93
94
  exponential_backoff: consumer_group.pause_exponential_backoff
94
95
  }
95
- ]
96
+ }
96
97
  end
97
98
 
98
99
  # Remaps topic details taking the topic mapper feature into consideration.
@@ -105,11 +106,13 @@ module Karafka
105
106
  # Majority of users don't use custom topic mappers. No need to change anything when it
106
107
  # is a default mapper that does not change anything. Only some cloud providers require
107
108
  # topics to be remapped
108
- return [params] if Karafka::App.config.topic_mapper.is_a?(Karafka::Routing::TopicMapper)
109
+ return [params.metadata] if Karafka::App.config.topic_mapper.is_a?(
110
+ Karafka::Routing::TopicMapper
111
+ )
109
112
 
110
113
  # @note We don't use tap as it is around 13% slower than non-dup version
111
- dupped = params.dup
112
- dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.topic)
114
+ dupped = params.metadata.dup
115
+ dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.metadata.topic)
113
116
  [dupped]
114
117
  end
115
118
 
@@ -23,7 +23,11 @@ module Karafka
23
23
  ) do
24
24
  # Due to how ruby-kafka is built, we have the metadata that is stored on the batch
25
25
  # level only available for batch consuming
26
- consumer.metadata = Params::Builders::Metadata.from_kafka_batch(kafka_batch, topic)
26
+ consumer.batch_metadata = Params::Builders::BatchMetadata.from_kafka_batch(
27
+ kafka_batch,
28
+ topic
29
+ )
30
+
27
31
  kafka_messages = kafka_batch.messages
28
32
 
29
33
  # Depending on a case (persisted or not) we might use new consumer instance per
@@ -6,9 +6,16 @@ module Karafka
6
6
  module Builder
7
7
  class << self
8
8
  # Builds a Kafka::Client instance that we use to work with Kafka cluster
9
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group for which we want
10
+ # to have a new Kafka client
9
11
  # @return [::Kafka::Client] returns a Kafka client
10
- def call
11
- Kafka.new(*ApiAdapter.client)
12
+ def call(consumer_group)
13
+ settings = ApiAdapter.client(consumer_group)
14
+
15
+ Kafka.new(
16
+ settings[0],
17
+ **settings[1]
18
+ )
12
19
  end
13
20
  end
14
21
  end
@@ -33,9 +33,9 @@ module Karafka
33
33
  settings = ApiAdapter.consumption(consumer_group)
34
34
 
35
35
  if consumer_group.batch_fetching
36
- kafka_consumer.each_batch(*settings) { |batch| yield(batch, :batch) }
36
+ kafka_consumer.each_batch(**settings) { |batch| yield(batch, :batch) }
37
37
  else
38
- kafka_consumer.each_message(*settings) { |message| yield(message, :message) }
38
+ kafka_consumer.each_message(**settings) { |message| yield(message, :message) }
39
39
  end
40
40
  # @note We catch only the processing errors as any other are considered critical (exceptions)
41
41
  # and should require a client restart with a backoff
@@ -64,7 +64,8 @@ module Karafka
64
64
  # @param topic [String] topic that we want to pause
65
65
  # @param partition [Integer] number partition that we want to pause
66
66
  def pause(topic, partition)
67
- kafka_consumer.pause(*ApiAdapter.pause(topic, partition, consumer_group))
67
+ args, kwargs = ApiAdapter.pause(topic, partition, consumer_group).values_at(:args, :kwargs)
68
+ kafka_consumer.pause(*args, **kwargs)
68
69
  end
69
70
 
70
71
  # Marks given message as consumed
@@ -97,11 +98,13 @@ module Karafka
97
98
  def kafka_consumer
98
99
  # @note We don't cache the connection internally because we cache kafka_consumer that uses
99
100
  # kafka client object instance
100
- @kafka_consumer ||= Builder.call.consumer(
101
- *ApiAdapter.consumer(consumer_group)
101
+ @kafka_consumer ||= Builder.call(consumer_group).consumer(
102
+ **ApiAdapter.consumer(consumer_group)
102
103
  ).tap do |consumer|
103
104
  consumer_group.topics.each do |topic|
104
- consumer.subscribe(*ApiAdapter.subscribe(topic))
105
+ settings = ApiAdapter.subscribe(topic)
106
+
107
+ consumer.subscribe(settings[0], **settings[1])
105
108
  end
106
109
  end
107
110
  rescue Kafka::ConnectionError
@@ -47,10 +47,10 @@ module Karafka
47
47
  end
48
48
  end
49
49
  # This is on purpose - see the notes for this method
50
- # rubocop:disable RescueException
50
+ # rubocop:disable Lint/RescueException
51
51
  rescue Exception => e
52
52
  Karafka.monitor.instrument('connection.listener.fetch_loop.error', caller: self, error: e)
53
- # rubocop:enable RescueException
53
+ # rubocop:enable Lint/RescueException
54
54
  # We can stop client without a problem, as it will reinitialize itself when running the
55
55
  # `fetch_loop` again
56
56
  @client.stop
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Consumers
5
+ # Brings the batch metadata into consumers that support batch_fetching
6
+ module BatchMetadata
7
+ attr_accessor :batch_metadata
8
+ end
9
+ end
10
+ end
@@ -16,7 +16,7 @@ module Karafka
16
16
 
17
17
  bind_backend(consumer, topic)
18
18
  bind_params(consumer, topic)
19
- bind_metadata(consumer, topic)
19
+ bind_batch_metadata(consumer, topic)
20
20
  bind_responders(consumer, topic)
21
21
  end
22
22
 
@@ -40,13 +40,14 @@ module Karafka
40
40
  consumer.extend(SingleParams)
41
41
  end
42
42
 
43
- # Adds an option to work with metadata for consumer instances that have batch fetching
43
+ # Adds an option to work with batch metadata for consumer instances that have
44
+ # batch fetching enabled
44
45
  # @param consumer [Karafka::BaseConsumer] consumer instance
45
46
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
46
- def bind_metadata(consumer, topic)
47
+ def bind_batch_metadata(consumer, topic)
47
48
  return unless topic.batch_fetching
48
49
 
49
- consumer.extend(Metadata)
50
+ consumer.extend(BatchMetadata)
50
51
  end
51
52
 
52
53
  # Adds responders support for topics and consumers with responders defined for them
@@ -32,6 +32,7 @@ module Karafka
32
32
  required(:offset_retention_time).maybe(:integer)
33
33
  required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
34
34
  required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
35
+ required(:assignment_strategy).value(:any)
35
36
  required(:connect_timeout).filled { (int? | float?) & gt?(0) }
36
37
  required(:reconnect_timeout).filled { (int? | float?) & gteq?(0) }
37
38
  required(:socket_timeout).filled { (int? | float?) & gt?(0) }
@@ -70,13 +71,13 @@ module Karafka
70
71
 
71
72
  # Uri rule to check if uri is in a Karafka acceptable format
72
73
  rule(:seed_brokers) do
73
- if value&.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
+ if value.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
75
  key.failure(:invalid_broker_schema)
75
76
  end
76
77
  end
77
78
 
78
79
  rule(:topics) do
79
- if value&.is_a?(Array)
80
+ if value.is_a?(Array)
80
81
  names = value.map { |topic| topic[:name] }
81
82
 
82
83
  key.failure(:topics_names_not_unique) if names.size != names.uniq.size
@@ -84,7 +85,7 @@ module Karafka
84
85
  end
85
86
 
86
87
  rule(:topics) do
87
- if value&.is_a?(Array)
88
+ if value.is_a?(Array)
88
89
  value.each_with_index do |topic, index|
89
90
  TOPIC_CONTRACT.call(topic).errors.each do |error|
90
91
  key([:topics, index, error.path[0]]).failure(error.text)
@@ -93,6 +94,10 @@ module Karafka
93
94
  end
94
95
  end
95
96
 
97
+ rule(:assignment_strategy) do
98
+ key.failure(:does_not_respond_to_call) unless value.respond_to?(:call)
99
+ end
100
+
96
101
  rule(:ssl_client_cert, :ssl_client_cert_key) do
97
102
  if values[:ssl_client_cert] && !values[:ssl_client_cert_key]
98
103
  key(:ssl_client_cert_key).failure(:ssl_client_cert_with_ssl_client_cert_key)
@@ -178,9 +183,9 @@ module Karafka
178
183
  # @param value [String] potential RSA key value
179
184
  # @return [Boolean] is the given string a valid RSA key
180
185
  def valid_private_key?(value)
181
- OpenSSL::PKey::RSA.new(value)
186
+ OpenSSL::PKey.read(value)
182
187
  true
183
- rescue OpenSSL::PKey::RSAError
188
+ rescue OpenSSL::PKey::PKeyError
184
189
  false
185
190
  end
186
191