karafka 2.0.0.rc1 → 2.0.0.rc2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 506ffb9aef3309eae2ee26e3283b7bc83859b26f4fe41995dca9d8f5e7bf0533
4
- data.tar.gz: 14b39f0597676207bf9f2bf10b06c51ba539c3aa01959dfb2378c2e23941d240
3
+ metadata.gz: f4103f2661316cc4e24aab8bf9715472c04f6911b3e56bb3ecdab775a0e32139
4
+ data.tar.gz: 0174d1cd163ac4fd046e4d432c0a4e2a07e2d57494ada9846ec61b6c73c2e434
5
5
  SHA512:
6
- metadata.gz: 4b2ad5ef4eff629abfc0088be0c400bd0f9420d24c05c1414541a4c167e7e6bf2b9735bf0e596358d8e3f3c2392bf2c2b4ba345cdb8f0226b54877ce111fd749
7
- data.tar.gz: 93adbc64906ff4a03e67dee646a5fe696d825357753bb804c690036800c947edaa186b67e5da8db115a0cc4a9efa46e6b1e061a608e496255bbfdc6ddeb60c14
6
+ metadata.gz: b3214a7e89730e932dbfbd3e5463ab0ce592d06589b5940f0987ded07f738d1f13557ae1d8251d8971beae7334c91bc655a8f64dd83049520d7ca889b2519e69
7
+ data.tar.gz: 803ae22e05ad14025dc4bda185571c586727227154e201d979052223b8270b157d7bd8a9588c0aa074c9d5bf0ef31dba6a1a696c7c392171c585945343d2fd10
checksums.yaml.gz.sig CHANGED
Binary file
data/CHANGELOG.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 2.0.0.rc2 (2022-07-19)
4
+ - Fix `example_consumer.rb.erb` `#shutdown` and `#revoked` signatures to correct once.
5
+ - Improve the install user experience (print status and created files).
6
+ - Change default `max_wait_time` from 10s to 5s.
7
+ - Remove direct dependency on `dry-configurable` in favour of a home-brew.
8
+ - Remove direct dependency on `dry-validation` in favour of a home-brew.
9
+
3
10
  ## 2.0.0-rc1 (2022-07-08)
4
11
  - Extract consumption partitioner out of listener inline code.
5
12
  - Introduce virtual partitioner concept for parallel processing of data from a single topic partition.
data/Gemfile.lock CHANGED
@@ -1,22 +1,20 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.0.0.rc1)
5
- dry-configurable (~> 0.13)
4
+ karafka (2.0.0.rc2)
6
5
  dry-monitor (~> 0.5)
7
- dry-validation (~> 1.7)
8
6
  rdkafka (>= 0.10)
9
7
  thor (>= 0.20)
10
- waterdrop (>= 2.3.1, < 3.0.0)
8
+ waterdrop (>= 2.3.3, < 3.0.0)
11
9
  zeitwerk (~> 2.3)
12
10
 
13
11
  GEM
14
12
  remote: https://rubygems.org/
15
13
  specs:
16
- activejob (7.0.3)
17
- activesupport (= 7.0.3)
14
+ activejob (7.0.3.1)
15
+ activesupport (= 7.0.3.1)
18
16
  globalid (>= 0.3.6)
19
- activesupport (7.0.3)
17
+ activesupport (7.0.3.1)
20
18
  concurrent-ruby (~> 1.0, >= 1.0.2)
21
19
  i18n (>= 1.6, < 2)
22
20
  minitest (>= 5.1)
@@ -28,51 +26,25 @@ GEM
28
26
  dry-configurable (0.15.0)
29
27
  concurrent-ruby (~> 1.0)
30
28
  dry-core (~> 0.6)
31
- dry-container (0.9.0)
32
- concurrent-ruby (~> 1.0)
33
- dry-configurable (~> 0.13, >= 0.13.0)
34
- dry-core (0.7.1)
29
+ dry-core (0.8.0)
35
30
  concurrent-ruby (~> 1.0)
36
31
  dry-events (0.3.0)
37
32
  concurrent-ruby (~> 1.0)
38
33
  dry-core (~> 0.5, >= 0.5)
39
- dry-inflector (0.2.1)
40
- dry-initializer (3.1.1)
41
- dry-logic (1.2.0)
42
- concurrent-ruby (~> 1.0)
43
- dry-core (~> 0.5, >= 0.5)
44
- dry-monitor (0.5.0)
34
+ dry-monitor (0.6.1)
45
35
  dry-configurable (~> 0.13, >= 0.13.0)
46
36
  dry-core (~> 0.5, >= 0.5)
47
37
  dry-events (~> 0.2)
48
- dry-schema (1.9.3)
49
- concurrent-ruby (~> 1.0)
50
- dry-configurable (~> 0.13, >= 0.13.0)
51
- dry-core (~> 0.5, >= 0.5)
52
- dry-initializer (~> 3.0)
53
- dry-logic (~> 1.0)
54
- dry-types (~> 1.5)
55
- dry-types (1.5.1)
56
- concurrent-ruby (~> 1.0)
57
- dry-container (~> 0.3)
58
- dry-core (~> 0.5, >= 0.5)
59
- dry-inflector (~> 0.1, >= 0.1.2)
60
- dry-logic (~> 1.0, >= 1.0.2)
61
- dry-validation (1.8.1)
62
- concurrent-ruby (~> 1.0)
63
- dry-container (~> 0.7, >= 0.7.1)
64
- dry-core (~> 0.5, >= 0.5)
65
- dry-initializer (~> 3.0)
66
- dry-schema (~> 1.8, >= 1.8.0)
38
+ zeitwerk (~> 2.5)
67
39
  factory_bot (6.2.1)
68
40
  activesupport (>= 5.0.0)
69
41
  ffi (1.15.5)
70
42
  globalid (1.0.0)
71
43
  activesupport (>= 5.0)
72
- i18n (1.10.0)
44
+ i18n (1.12.0)
73
45
  concurrent-ruby (~> 1.0)
74
46
  mini_portile2 (2.8.0)
75
- minitest (5.15.0)
47
+ minitest (5.16.2)
76
48
  rake (13.0.6)
77
49
  rdkafka (0.12.0)
78
50
  ffi (~> 1.15)
@@ -100,11 +72,9 @@ GEM
100
72
  thor (1.2.1)
101
73
  tzinfo (2.0.4)
102
74
  concurrent-ruby (~> 1.0)
103
- waterdrop (2.3.1)
75
+ waterdrop (2.3.3)
104
76
  concurrent-ruby (>= 1.1)
105
- dry-configurable (~> 0.13)
106
77
  dry-monitor (~> 0.5)
107
- dry-validation (~> 1.7)
108
78
  rdkafka (>= 0.10)
109
79
  zeitwerk (~> 2.3)
110
80
  zeitwerk (2.6.0)
@@ -121,4 +91,4 @@ DEPENDENCIES
121
91
  simplecov
122
92
 
123
93
  BUNDLED WITH
124
- 2.3.11
94
+ 2.3.15
data/README.md CHANGED
@@ -28,8 +28,6 @@ class EventsConsumer < ApplicationConsumer
28
28
  end
29
29
  ```
30
30
 
31
- Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming, processing and producing data, without having to focus on things that are not your business domain.
32
-
33
31
  Karafka **uses** threads to handle many messages at the same time in the same process. It does not require Rails but will integrate tightly with any Ruby on Rails applications to make event processing dead simple.
34
32
 
35
33
  ## Getting started
data/config/errors.yml CHANGED
@@ -1,9 +1,52 @@
1
1
  en:
2
- dry_validation:
3
- errors:
2
+ validations:
3
+ config:
4
+ missing: needs to be present
5
+ client_id_format: 'needs to be a string with a Kafka accepted format'
6
+ license.entity_format: needs to be a string
7
+ license.token_format: needs to be either false or a string
8
+ license.expires_on_format: needs to be a valid date
9
+ concurrency_format: needs to be an integer bigger than 0
10
+ consumer_mapper_format: needs to be present
11
+ consumer_persistence_format: needs to be either true or false
12
+ pause_timeout_format: needs to be an integer bigger than 0
13
+ pause_max_timeout_format: needs to be an integer bigger than 0
14
+ pause_with_exponential_backoff_format: needs to be either true or false
15
+ shutdown_timeout_format: needs to be an integer bigger than 0
16
+ max_wait_time_format: needs to be an integer bigger than 0
17
+ kafka_format: needs to be a filled hash
18
+ internal.status_format: needs to be present
19
+ internal.process_format: needs to be present
20
+ internal.routing.builder_format: needs to be present
21
+ internal.routing.subscription_groups_builder_format: needs to be present
22
+ key_must_be_a_symbol: All keys under the kafka settings scope need to be symbols
4
23
  max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
5
24
  shutdown_timeout_vs_max_wait_time: shutdown_timeout must be more than max_wait_time
6
- topics_names_not_unique: all topic names within a single consumer group must be unique
7
- required_usage_count: Given topic must be used at least once
25
+
26
+ server_cli_options:
27
+ missing: needs to be present
8
28
  consumer_groups_inclusion: Unknown consumer group
9
- kafka_key_must_be_a_symbol: All keys under the kafka settings scope need to be symbols
29
+
30
+ consumer_group_topic:
31
+ missing: needs to be present
32
+ name_format: 'needs to be a string with a Kafka accepted format'
33
+ deserializer_format: needs to be present
34
+ manual_offset_management_format: needs to be either true or false
35
+ consumer_format: needs to be present
36
+ id_format: 'needs to be a string with a Kafka accepted format'
37
+ initial_offset_format: needs to be either earliest or latest
38
+
39
+ consumer_group:
40
+ missing: needs to be present
41
+ topics_names_not_unique: all topic names within a single consumer group must be unique
42
+ id_format: 'needs to be a string with a Kafka accepted format'
43
+ topics_format: needs to be a non-empty array
44
+
45
+ job_options:
46
+ missing: needs to be present
47
+ dispatch_method_format: needs to be either :produce_async or :produce_sync
48
+ partitioner_format: 'needs to respond to #call'
49
+
50
+ test:
51
+ missing: needs to be present
52
+ id_format: needs to be a String
data/karafka.gemspec CHANGED
@@ -12,16 +12,14 @@ Gem::Specification.new do |spec|
12
12
  spec.authors = ['Maciej Mensfeld']
13
13
  spec.email = %w[maciej@mensfeld.pl]
14
14
  spec.homepage = 'https://karafka.io'
15
- spec.summary = 'Ruby based framework for working with Apache Kafka'
15
+ spec.summary = 'Ruby framework for working with Apache Kafka'
16
16
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
17
17
  spec.licenses = ['LGPL-3.0', 'Commercial']
18
18
 
19
- spec.add_dependency 'dry-configurable', '~> 0.13'
20
19
  spec.add_dependency 'dry-monitor', '~> 0.5'
21
- spec.add_dependency 'dry-validation', '~> 1.7'
22
20
  spec.add_dependency 'rdkafka', '>= 0.10'
23
21
  spec.add_dependency 'thor', '>= 0.20'
24
- spec.add_dependency 'waterdrop', '>= 2.3.1', '< 3.0.0'
22
+ spec.add_dependency 'waterdrop', '>= 2.3.3', '< 3.0.0'
25
23
  spec.add_dependency 'zeitwerk', '~> 2.3'
26
24
 
27
25
  spec.required_ruby_version = '>= 2.6.0'
@@ -7,9 +7,15 @@ module Karafka
7
7
  # we want to keep ActiveJob related Karafka components outside of the core Karafka code and
8
8
  # all in the same place
9
9
  class JobOptionsContract < Contracts::Base
10
- params do
11
- optional(:dispatch_method).value(included_in?: %i[produce_async produce_sync])
10
+ configure do |config|
11
+ config.error_messages = YAML.safe_load(
12
+ File.read(
13
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
14
+ )
15
+ ).fetch('en').fetch('validations').fetch('job_options')
12
16
  end
17
+
18
+ optional(:dispatch_method) { |val| %i[produce_async produce_sync].include?(val) }
13
19
  end
14
20
  end
15
21
  end
@@ -7,6 +7,8 @@ module Karafka
7
7
  class Cli < Thor
8
8
  # Install Karafka Cli action
9
9
  class Install < Base
10
+ include Helpers::Colorize
11
+
10
12
  desc 'Install all required things for Karafka application in current directory'
11
13
 
12
14
  # Directories created by default
@@ -42,14 +44,25 @@ module Karafka
42
44
  FileUtils.mkdir_p Karafka.root.join(dir)
43
45
  end
44
46
 
47
+ puts
48
+ puts 'Installing Karafka framework...'
49
+ puts 'Ruby on Rails detected...' if rails?
50
+ puts
51
+
45
52
  INSTALL_FILES_MAP.each do |source, target|
46
- target = Karafka.root.join(target)
53
+ pathed_target = Karafka.root.join(target)
47
54
 
48
55
  template = File.read(Karafka.core_root.join("templates/#{source}"))
49
56
  render = ::ERB.new(template, trim_mode: '-').result(binding)
50
57
 
51
- File.open(target, 'w') { |file| file.write(render) }
58
+ File.open(pathed_target, 'w') { |file| file.write(render) }
59
+
60
+ puts "#{green('Created')} #{target}"
52
61
  end
62
+
63
+ puts
64
+ puts("Installation #{green('completed')}. Have fun!")
65
+ puts
53
66
  end
54
67
 
55
68
  # @return [Boolean] true if we have Rails loaded
@@ -5,6 +5,8 @@ module Karafka
5
5
  class Cli < Thor
6
6
  # Server Karafka Cli action
7
7
  class Server < Base
8
+ include Helpers::Colorize
9
+
8
10
  desc 'Start the Karafka server (short-cut alias: "s")'
9
11
  option aliases: 's'
10
12
  option :consumer_groups, type: :array, default: nil, aliases: :g
@@ -31,11 +33,11 @@ module Karafka
31
33
 
32
34
  if Karafka.pro?
33
35
  Karafka.logger.info(
34
- "\033[0;32mThank you for investing in the Karafka Pro subscription!\033[0m\n"
36
+ green('Thank you for investing in the Karafka Pro subscription!')
35
37
  )
36
38
  else
37
39
  Karafka.logger.info(
38
- "\033[0;31mYou like Karafka? Please consider getting a Pro subscription!\033[0m\n"
40
+ red('You like Karafka? Please consider getting a Pro version!')
39
41
  )
40
42
  end
41
43
  end
@@ -3,20 +3,14 @@
3
3
  module Karafka
4
4
  module Contracts
5
5
  # Base contract for all Karafka contracts
6
- class Base < Dry::Validation::Contract
7
- config.messages.load_paths << File.join(Karafka.gem_root, 'config', 'errors.yml')
8
-
6
+ class Base < ::WaterDrop::Contractable::Contract
9
7
  # @param data [Hash] data for validation
10
8
  # @return [Boolean] true if all good
11
9
  # @raise [Errors::InvalidConfigurationError] invalid configuration error
12
10
  # @note We use contracts only in the config validation context, so no need to add support
13
11
  # for multiple error classes. It will be added when it will be needed.
14
12
  def validate!(data)
15
- result = call(data)
16
-
17
- return true if result.success?
18
-
19
- raise Errors::InvalidConfigurationError, result.errors.to_h
13
+ super(data, Errors::InvalidConfigurationError)
20
14
  end
21
15
  end
22
16
  end
@@ -9,71 +9,90 @@ module Karafka
9
9
  # validated per each route (topic + consumer_group) because they can be overwritten,
10
10
  # so we validate all of that once all the routes are defined and ready.
11
11
  class Config < Base
12
- params do
13
- # License validity happens in the licenser. Here we do only the simple consistency checks
14
- required(:license).schema do
15
- required(:token) { bool? | str? }
16
- required(:entity) { str? }
17
- required(:expires_on) { date? }
12
+ configure do |config|
13
+ config.error_messages = YAML.safe_load(
14
+ File.read(
15
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
16
+ )
17
+ ).fetch('en').fetch('validations').fetch('config')
18
+ end
19
+
20
+ # License validity happens in the licenser. Here we do only the simple consistency checks
21
+ nested(:license) do
22
+ required(:token) { |val| [true, false].include?(val) || val.is_a?(String) }
23
+ required(:entity) { |val| val.is_a?(String) }
24
+ required(:expires_on) { |val| val.is_a?(Date) }
25
+ end
26
+
27
+ required(:client_id) { |val| val.is_a?(String) && Contracts::TOPIC_REGEXP.match?(val) }
28
+ required(:concurrency) { |val| val.is_a?(Integer) && val.positive? }
29
+ required(:consumer_mapper) { |val| !val.nil? }
30
+ required(:consumer_persistence) { |val| [true, false].include?(val) }
31
+ required(:pause_timeout) { |val| val.is_a?(Integer) && val.positive? }
32
+ required(:pause_max_timeout) { |val| val.is_a?(Integer) && val.positive? }
33
+ required(:pause_with_exponential_backoff) { |val| [true, false].include?(val) }
34
+ required(:shutdown_timeout) { |val| val.is_a?(Integer) && val.positive? }
35
+ required(:max_wait_time) { |val| val.is_a?(Integer) && val.positive? }
36
+ required(:kafka) { |val| val.is_a?(Hash) && !val.empty? }
37
+
38
+ # We validate internals just to be sure, that they are present and working
39
+ nested(:internal) do
40
+ required(:status) { |val| !val.nil? }
41
+ required(:process) { |val| !val.nil? }
42
+
43
+ nested(:routing) do
44
+ required(:builder) { |val| !val.nil? }
45
+ required(:subscription_groups_builder) { |val| !val.nil? }
46
+ end
47
+
48
+ nested(:processing) do
49
+ required(:jobs_builder) { |val| !val.nil? }
50
+ required(:scheduler) { |val| !val.nil? }
51
+ required(:coordinator_class) { |val| !val.nil? }
52
+ required(:partitioner_class) { |val| !val.nil? }
18
53
  end
19
54
 
20
- required(:client_id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
21
- required(:concurrency) { int? & gt?(0) }
22
- required(:consumer_mapper).filled
23
- required(:consumer_persistence).filled(:bool?)
24
- required(:pause_timeout) { int? & gt?(0) }
25
- required(:pause_max_timeout) { int? & gt?(0) }
26
- required(:pause_with_exponential_backoff).filled(:bool?)
27
- required(:shutdown_timeout) { int? & gt?(0) }
28
- required(:max_wait_time) { int? & gt?(0) }
29
- required(:kafka).filled(:hash)
30
-
31
- # We validate internals just to be sure, that they are present and working
32
- required(:internal).schema do
33
- required(:status)
34
- required(:process)
35
-
36
- required(:routing).schema do
37
- required(:builder)
38
- required(:subscription_groups_builder)
39
- end
40
-
41
- required(:processing).schema do
42
- required(:jobs_builder)
43
- required(:scheduler)
44
- required(:coordinator_class)
45
- required(:partitioner_class)
46
- end
47
-
48
- required(:active_job).schema do
49
- required(:dispatcher)
50
- required(:job_options_contract)
51
- required(:consumer_class)
52
- end
55
+ nested(:active_job) do
56
+ required(:dispatcher) { |val| !val.nil? }
57
+ required(:job_options_contract) { |val| !val.nil? }
58
+ required(:consumer_class) { |val| !val.nil? }
53
59
  end
54
60
  end
55
61
 
56
- # rdkafka requires all the keys to be strings, so we ensure that
57
- rule(:kafka) do
58
- next unless value.is_a?(Hash)
62
+ virtual do |data, errors|
63
+ next unless errors.empty?
64
+
65
+ detected_errors = []
59
66
 
60
- value.each_key do |key|
67
+ data.fetch(:kafka).each_key do |key|
61
68
  next if key.is_a?(Symbol)
62
69
 
63
- key(:"kafka.#{key}").failure(:kafka_key_must_be_a_symbol)
70
+ detected_errors << [[:kafka, key], :key_must_be_a_symbol]
64
71
  end
72
+
73
+ detected_errors
65
74
  end
66
75
 
67
- rule(:pause_timeout, :pause_max_timeout) do
68
- if values[:pause_timeout].to_i > values[:pause_max_timeout].to_i
69
- key(:pause_timeout).failure(:max_timeout_vs_pause_max_timeout)
70
- end
76
+ virtual do |data, errors|
77
+ next unless errors.empty?
78
+
79
+ pause_timeout = data.fetch(:pause_timeout)
80
+ pause_max_timeout = data.fetch(:pause_max_timeout)
81
+
82
+ next if pause_timeout <= pause_max_timeout
83
+
84
+ [[%i[pause_timeout], :max_timeout_vs_pause_max_timeout]]
71
85
  end
72
86
 
73
- rule(:shutdown_timeout, :max_wait_time) do
74
- if values[:max_wait_time].to_i >= values[:shutdown_timeout].to_i
75
- key(:shutdown_timeout).failure(:shutdown_timeout_vs_max_wait_time)
76
- end
87
+ virtual do |data, errors|
88
+ next unless errors.empty?
89
+
90
+ shutdown_timeout = data.fetch(:shutdown_timeout)
91
+ max_wait_time = data.fetch(:max_wait_time)
92
+
93
+ next if max_wait_time < shutdown_timeout
94
+
95
+ [[%i[shutdown_timeout], :shutdown_timeout_vs_max_wait_time]]
77
96
  end
78
97
  end
79
98
  end
@@ -4,32 +4,39 @@ module Karafka
4
4
  module Contracts
5
5
  # Contract for single full route (consumer group + topics) validation.
6
6
  class ConsumerGroup < Base
7
- # Internal contract for sub-validating topics schema
8
- TOPIC_CONTRACT = ConsumerGroupTopic.new.freeze
7
+ configure do |config|
8
+ config.error_messages = YAML.safe_load(
9
+ File.read(
10
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
11
+ )
12
+ ).fetch('en').fetch('validations').fetch('consumer_group')
13
+ end
9
14
 
10
- private_constant :TOPIC_CONTRACT
15
+ required(:id) { |id| id.is_a?(String) && Contracts::TOPIC_REGEXP.match?(id) }
16
+ required(:topics) { |topics| topics.is_a?(Array) && !topics.empty? }
11
17
 
12
- params do
13
- required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
14
- required(:topics).value(:array, :filled?)
15
- end
18
+ virtual do |data, errors|
19
+ next unless errors.empty?
16
20
 
17
- rule(:topics) do
18
- if value.is_a?(Array)
19
- names = value.map { |topic| topic[:name] }
21
+ names = data.fetch(:topics).map { |topic| topic[:name] }
20
22
 
21
- key.failure(:topics_names_not_unique) if names.size != names.uniq.size
22
- end
23
+ next if names.size == names.uniq.size
24
+
25
+ [[%i[topics], :names_not_unique]]
23
26
  end
24
27
 
25
- rule(:topics) do
26
- if value.is_a?(Array)
27
- value.each_with_index do |topic, index|
28
- TOPIC_CONTRACT.call(topic).errors.each do |error|
29
- key([:topics, index, error.path[0]]).failure(error.text)
30
- end
28
+ virtual do |data, errors|
29
+ next unless errors.empty?
30
+
31
+ fetched_errors = []
32
+
33
+ data.fetch(:topics).each do |topic|
34
+ ConsumerGroupTopic.new.call(topic).errors.each do |key, value|
35
+ fetched_errors << [[topic, key].flatten, value]
31
36
  end
32
37
  end
38
+
39
+ fetched_errors
33
40
  end
34
41
  end
35
42
  end
@@ -4,24 +4,38 @@ module Karafka
4
4
  module Contracts
5
5
  # Consumer group topic validation rules.
6
6
  class ConsumerGroupTopic < Base
7
- params do
8
- required(:consumer).filled
9
- required(:deserializer).filled
10
- required(:id).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
11
- required(:kafka).filled
12
- required(:max_messages) { int? & gteq?(1) }
13
- required(:initial_offset).filled(included_in?: %w[earliest latest])
14
- required(:max_wait_time).filled { int? & gteq?(10) }
15
- required(:manual_offset_management).filled(:bool?)
16
- required(:name).filled(:str?, format?: Karafka::Contracts::TOPIC_REGEXP)
7
+ configure do |config|
8
+ config.error_messages = YAML.safe_load(
9
+ File.read(
10
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
11
+ )
12
+ ).fetch('en').fetch('validations').fetch('consumer_group_topic')
17
13
  end
18
14
 
19
- rule(:kafka) do
20
- # This will trigger rdkafka validations that we catch and re-map the info and use dry
21
- # compatible format
22
- Rdkafka::Config.new(value).send(:native_config)
23
- rescue Rdkafka::Config::ConfigError => e
24
- key(:kafka).failure(e.message)
15
+ required(:consumer) { |consumer_group| !consumer_group.nil? }
16
+ required(:deserializer) { |deserializer| !deserializer.nil? }
17
+ required(:id) { |id| id.is_a?(String) && Contracts::TOPIC_REGEXP.match?(id) }
18
+ required(:kafka) { |kafka| kafka.is_a?(Hash) && !kafka.empty? }
19
+ required(:max_messages) { |mm| mm.is_a?(Integer) && mm >= 1 }
20
+ required(:initial_offset) { |io| %w[earliest latest].include?(io) }
21
+ required(:max_wait_time) { |mwt| mwt.is_a?(Integer) && mwt >= 10 }
22
+ required(:manual_offset_management) { |mmm| [true, false].include?(mmm) }
23
+ required(:name) { |name| name.is_a?(String) && Contracts::TOPIC_REGEXP.match?(name) }
24
+
25
+ virtual do |data, errors|
26
+ next unless errors.empty?
27
+
28
+ value = data.fetch(:kafka)
29
+
30
+ begin
31
+ # This will trigger rdkafka validations that we catch and re-map the info and use dry
32
+ # compatible format
33
+ Rdkafka::Config.new(value).send(:native_config)
34
+
35
+ nil
36
+ rescue Rdkafka::Config::ConfigError => e
37
+ [[%w[kafka], e.message]]
38
+ end
25
39
  end
26
40
  end
27
41
  end
@@ -4,17 +4,28 @@ module Karafka
4
4
  module Contracts
5
5
  # Contract for validating correctness of the server cli command options.
6
6
  class ServerCliOptions < Base
7
- params do
8
- optional(:consumer_groups).value(:array, :filled?)
7
+ configure do |config|
8
+ config.error_messages = YAML.safe_load(
9
+ File.read(
10
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
11
+ )
12
+ ).fetch('en').fetch('validations').fetch('server_cli_options')
9
13
  end
10
14
 
11
- rule(:consumer_groups) do
15
+ optional(:consumer_groups) { |cg| cg.is_a?(Array) && !cg.empty? }
16
+
17
+ virtual do |data, errors|
18
+ next unless errors.empty?
19
+ next unless data.key?(:consumer_groups)
20
+
21
+ value = data.fetch(:consumer_groups)
22
+
12
23
  # If there were no consumer_groups declared in the server cli, it means that we will
13
24
  # run all of them and no need to validate them here at all
14
- if !value.nil? &&
15
- !(value - Karafka::App.config.internal.routing.builder.map(&:name)).empty?
16
- key(:consumer_groups).failure(:consumer_groups_inclusion)
17
- end
25
+ next if value.nil?
26
+ next if (value - Karafka::App.config.internal.routing.builder.map(&:name)).empty?
27
+
28
+ [[%i[consumer_groups], :consumer_groups_inclusion]]
18
29
  end
19
30
  end
20
31
  end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Helpers
5
+ # Simple wrapper for adding colors to strings
6
+ module Colorize
7
+ # @param string [String] string we want to have in green
8
+ # @return [String] green string
9
+ def green(string)
10
+ "\033[0;32m#{string}\033[0m"
11
+ end
12
+
13
+ # @param string [String] string we want to have in red
14
+ # @return [String] red string
15
+ def red(string)
16
+ "\033[0;31m#{string}\033[0m"
17
+ end
18
+ end
19
+ end
20
+ end
@@ -14,13 +14,17 @@ module Karafka
14
14
  module ActiveJob
15
15
  # Contract for validating the options that can be altered with `#karafka_options` per job
16
16
  # class that works with Pro features.
17
- class JobOptionsContract < ::Karafka::ActiveJob::JobOptionsContract
18
- # Dry types
19
- Types = include Dry.Types()
20
-
21
- params do
22
- optional(:partitioner).value(Types.Interface(:call))
17
+ class JobOptionsContract < Contracts::Base
18
+ configure do |config|
19
+ config.error_messages = YAML.safe_load(
20
+ File.read(
21
+ File.join(Karafka.gem_root, 'config', 'errors.yml')
22
+ )
23
+ ).fetch('en').fetch('validations').fetch('job_options')
23
24
  end
25
+
26
+ optional(:dispatch_method) { |val| %i[produce_async produce_sync].include?(val) }
27
+ optional(:partitioner) { |val| val.respond_to?(:call) }
24
28
  end
25
29
  end
26
30
  end
@@ -12,7 +12,7 @@ module Karafka
12
12
  # enough and will still keep the code simple
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
- extend Dry::Configurable
15
+ extend ::WaterDrop::Configurable
16
16
 
17
17
  # Defaults for kafka settings, that will be overwritten only if not present already
18
18
  KAFKA_DEFAULTS = {
@@ -62,7 +62,7 @@ module Karafka
62
62
  # options max_messages [Integer] how many messages do we want to fetch from Kafka in one go
63
63
  setting :max_messages, default: 1_000
64
64
  # option [Integer] number of milliseconds we can wait while fetching data
65
- setting :max_wait_time, default: 10_000
65
+ setting :max_wait_time, default: 5_000
66
66
  # option shutdown_timeout [Integer] the number of milliseconds after which Karafka no
67
67
  # longer waits for the consumers to stop gracefully but instead we force terminate
68
68
  # everything.
@@ -123,6 +123,10 @@ module Karafka
123
123
  end
124
124
  end
125
125
 
126
+ # This will load all the defaults that can be later overwritten.
127
+ # Thanks to that we have an initial state out of the box.
128
+ configure
129
+
126
130
  class << self
127
131
  # Configuring method
128
132
  # @param block [Proc] block we want to execute with the config instance
@@ -7,10 +7,10 @@ class ExampleConsumer < ApplicationConsumer
7
7
  end
8
8
 
9
9
  # Run anything upon partition being revoked
10
- # def on_revoked
10
+ # def revoked
11
11
  # end
12
12
 
13
13
  # Define here any teardown things you want when Karafka server stops
14
- # def on_shutdown
14
+ # def shutdown
15
15
  # end
16
16
  end
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.0.0.rc1'
6
+ VERSION = '2.0.0.rc2'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -12,8 +12,6 @@
12
12
  openssl
13
13
  base64
14
14
  date
15
- dry-configurable
16
- dry-validation
17
15
  dry/events/publisher
18
16
  dry/monitor/notifications
19
17
  zeitwerk
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.0.rc1
4
+ version: 2.0.0.rc2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -34,22 +34,8 @@ cert_chain:
34
34
  R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
35
35
  pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
36
36
  -----END CERTIFICATE-----
37
- date: 2022-07-08 00:00:00.000000000 Z
37
+ date: 2022-07-19 00:00:00.000000000 Z
38
38
  dependencies:
39
- - !ruby/object:Gem::Dependency
40
- name: dry-configurable
41
- requirement: !ruby/object:Gem::Requirement
42
- requirements:
43
- - - "~>"
44
- - !ruby/object:Gem::Version
45
- version: '0.13'
46
- type: :runtime
47
- prerelease: false
48
- version_requirements: !ruby/object:Gem::Requirement
49
- requirements:
50
- - - "~>"
51
- - !ruby/object:Gem::Version
52
- version: '0.13'
53
39
  - !ruby/object:Gem::Dependency
54
40
  name: dry-monitor
55
41
  requirement: !ruby/object:Gem::Requirement
@@ -64,20 +50,6 @@ dependencies:
64
50
  - - "~>"
65
51
  - !ruby/object:Gem::Version
66
52
  version: '0.5'
67
- - !ruby/object:Gem::Dependency
68
- name: dry-validation
69
- requirement: !ruby/object:Gem::Requirement
70
- requirements:
71
- - - "~>"
72
- - !ruby/object:Gem::Version
73
- version: '1.7'
74
- type: :runtime
75
- prerelease: false
76
- version_requirements: !ruby/object:Gem::Requirement
77
- requirements:
78
- - - "~>"
79
- - !ruby/object:Gem::Version
80
- version: '1.7'
81
53
  - !ruby/object:Gem::Dependency
82
54
  name: rdkafka
83
55
  requirement: !ruby/object:Gem::Requirement
@@ -112,7 +84,7 @@ dependencies:
112
84
  requirements:
113
85
  - - ">="
114
86
  - !ruby/object:Gem::Version
115
- version: 2.3.1
87
+ version: 2.3.3
116
88
  - - "<"
117
89
  - !ruby/object:Gem::Version
118
90
  version: 3.0.0
@@ -122,7 +94,7 @@ dependencies:
122
94
  requirements:
123
95
  - - ">="
124
96
  - !ruby/object:Gem::Version
125
- version: 2.3.1
97
+ version: 2.3.3
126
98
  - - "<"
127
99
  - !ruby/object:Gem::Version
128
100
  version: 3.0.0
@@ -212,6 +184,7 @@ files:
212
184
  - lib/karafka/env.rb
213
185
  - lib/karafka/errors.rb
214
186
  - lib/karafka/helpers/async.rb
187
+ - lib/karafka/helpers/colorize.rb
215
188
  - lib/karafka/helpers/multi_delegator.rb
216
189
  - lib/karafka/instrumentation.rb
217
190
  - lib/karafka/instrumentation/callbacks/error.rb
@@ -308,5 +281,5 @@ requirements: []
308
281
  rubygems_version: 3.3.7
309
282
  signing_key:
310
283
  specification_version: 4
311
- summary: Ruby based framework for working with Apache Kafka
284
+ summary: Ruby framework for working with Apache Kafka
312
285
  test_files: []
metadata.gz.sig CHANGED
Binary file