karafka 1.4.0.rc2 → 1.4.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 62798b3cc6f5f12fd89e13e4f1163aaf6f759d9cebf7862b591e7d3c6603bfc3
4
- data.tar.gz: f4b416b014de9d6b4da282735014d5e16ec4195db01b98798764efc7a9c8ce8d
3
+ metadata.gz: 315219748ef2c007721a9a193232b34add1792386d55764c9fd7d5054f70086e
4
+ data.tar.gz: 29d91273e9eba6dc86c834ce8dd3d727243c8d0d5d877072700a11753032bbfd
5
5
  SHA512:
6
- metadata.gz: 47848c70c13d6456c863e9c0811e08084f9ad1710ac299e2a954eed08e5b283eb990333c4443bfcaf70929d1fc85d597ff820b5321016fe19ef4d78951dd2853
7
- data.tar.gz: 961cfbc6c5be4acef1c897c685f591c1549c92ea95f438013ce405f073b1e85745b4413a726017bed8b328a76d100bdd4fc1f9b74eeffd6bca0561e2d4d7632c
6
+ metadata.gz: cb999b3707fbe26fc631f68897d5924f7f5378b7bcb4a1854d4fab0670ab0d91fffce8692937c36f494963fed8be8ad99d841ddaea917aa65bb33c0bc87a7cd9
7
+ data.tar.gz: adcc4898e6bc94873bbb33da5b7ffb61b95738e77113e7da105a4174ed81399d1dbb33f3f1a20a56cc8870c6cdab9f21dbe15b420c8d8488cca038e2a47d32f9
checksums.yaml.gz.sig CHANGED
Binary file
data.tar.gz.sig CHANGED
Binary file
@@ -1,6 +1,7 @@
1
1
  name: ci
2
2
 
3
3
  on:
4
+ pull_request:
4
5
  push:
5
6
  schedule:
6
7
  - cron: '0 1 * * *'
@@ -8,15 +9,16 @@ on:
8
9
  jobs:
9
10
  specs:
10
11
  runs-on: ubuntu-latest
12
+ needs: diffend
11
13
  strategy:
12
14
  fail-fast: false
13
15
  matrix:
14
16
  ruby:
17
+ - '3.0'
15
18
  - '2.7'
16
19
  - '2.6'
17
- - '2.5'
18
20
  include:
19
- - ruby: '2.7'
21
+ - ruby: '3.0'
20
22
  coverage: 'true'
21
23
  steps:
22
24
  - uses: actions/checkout@v2
@@ -40,6 +42,26 @@ jobs:
40
42
  env:
41
43
  GITHUB_COVERAGE: ${{matrix.coverage}}
42
44
  run: bundle exec rspec
45
+
46
+ diffend:
47
+ runs-on: ubuntu-latest
48
+ strategy:
49
+ fail-fast: false
50
+ steps:
51
+ - uses: actions/checkout@v2
52
+ with:
53
+ fetch-depth: 0
54
+ - name: Set up Ruby
55
+ uses: ruby/setup-ruby@v1
56
+ with:
57
+ ruby-version: 3.0
58
+ - name: Install latest bundler
59
+ run: gem install bundler --no-document
60
+ - name: Install Diffend plugin
61
+ run: bundle plugin install diffend
62
+ - name: Bundle Secure
63
+ run: bundle secure
64
+
43
65
  coditsu:
44
66
  runs-on: ubuntu-latest
45
67
  strategy:
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 2.7.1
1
+ 3.0.1
data/CHANGELOG.md CHANGED
@@ -1,9 +1,23 @@
1
1
  # Karafka framework changelog
2
2
 
3
- ## 1.4.0.rc2 (2020-08-25)
3
+ ## 1.4.4 (2021-04-19)
4
+ - Remove Ruby 2.5 support and update minimum Ruby requirement to 2.6
5
+ - Remove rake dependency
6
+
7
+ ## 1.4.3 (2021-03-24)
8
+ - Fixes for Ruby 3.0 compatibility
9
+
10
+ ## 1.4.2 (2021-02-16)
11
+ - Rescue Errno::EROFS in ensure_dir_exists (unasuke)
12
+
13
+ ## 1.4.1 (2020-12-04)
14
+ - Return non-zero exit code when printing usage
15
+ - Add support for :assignment_strategy for consumers
16
+
17
+ ## 1.4.0 (2020-09-05)
4
18
  - Rename `Karafka::Params::Metadata` to `Karafka::Params::BatchMetadata`
5
19
  ` Rename consumer `#metadata` to `#batch_metadata`
6
- - Separate metadata (including Karafka native metadata) from the root of params (backwards compatibility preseverd thanks to rabotyaga)
20
+ - Separate metadata (including Karafka native metadata) from the root of params (backwards compatibility preserved thanks to rabotyaga)
7
21
  - Remove metadata hash dependency
8
22
  - Remove params dependency on a hash in favour of PORO
9
23
  - Remove batch metadata dependency on a hash
data/Gemfile.lock CHANGED
@@ -1,14 +1,13 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (1.4.0.rc2)
4
+ karafka (1.4.4)
5
5
  dry-configurable (~> 0.8)
6
6
  dry-inflector (~> 0.1)
7
7
  dry-monitor (~> 0.3)
8
8
  dry-validation (~> 1.2)
9
9
  envlogic (~> 1.1)
10
10
  irb (~> 1.0)
11
- rake (>= 11.3)
12
11
  ruby-kafka (>= 1.0.0)
13
12
  thor (>= 0.20)
14
13
  waterdrop (~> 1.4.0)
@@ -17,116 +16,112 @@ PATH
17
16
  GEM
18
17
  remote: https://rubygems.org/
19
18
  specs:
20
- activesupport (6.0.3.2)
19
+ activesupport (6.1.3.1)
21
20
  concurrent-ruby (~> 1.0, >= 1.0.2)
22
- i18n (>= 0.7, < 2)
23
- minitest (~> 5.1)
24
- tzinfo (~> 1.1)
25
- zeitwerk (~> 2.2, >= 2.2.2)
21
+ i18n (>= 1.6, < 2)
22
+ minitest (>= 5.1)
23
+ tzinfo (~> 2.0)
24
+ zeitwerk (~> 2.3)
26
25
  byebug (11.1.3)
27
- concurrent-ruby (1.1.7)
28
- delivery_boy (1.0.1)
29
- king_konf (~> 0.3)
26
+ concurrent-ruby (1.1.8)
27
+ delivery_boy (1.1.0)
28
+ king_konf (~> 1.0)
30
29
  ruby-kafka (~> 1.0)
31
30
  diff-lcs (1.4.4)
32
- digest-crc (0.6.1)
33
- rake (~> 13.0)
34
- docile (1.3.2)
35
- dry-configurable (0.11.6)
31
+ digest-crc (0.6.3)
32
+ rake (>= 12.0.0, < 14.0.0)
33
+ docile (1.3.5)
34
+ dry-configurable (0.12.1)
36
35
  concurrent-ruby (~> 1.0)
37
- dry-core (~> 0.4, >= 0.4.7)
38
- dry-equalizer (~> 0.2)
36
+ dry-core (~> 0.5, >= 0.5.0)
39
37
  dry-container (0.7.2)
40
38
  concurrent-ruby (~> 1.0)
41
39
  dry-configurable (~> 0.1, >= 0.1.3)
42
- dry-core (0.4.9)
40
+ dry-core (0.5.0)
43
41
  concurrent-ruby (~> 1.0)
44
42
  dry-equalizer (0.3.0)
45
- dry-events (0.2.0)
43
+ dry-events (0.3.0)
46
44
  concurrent-ruby (~> 1.0)
47
- dry-core (~> 0.4)
48
- dry-equalizer (~> 0.2)
45
+ dry-core (~> 0.5, >= 0.5)
49
46
  dry-inflector (0.2.0)
50
- dry-initializer (3.0.3)
51
- dry-logic (1.0.7)
47
+ dry-initializer (3.0.4)
48
+ dry-logic (1.1.1)
52
49
  concurrent-ruby (~> 1.0)
53
- dry-core (~> 0.2)
54
- dry-equalizer (~> 0.2)
55
- dry-monitor (0.3.2)
50
+ dry-core (~> 0.5, >= 0.5)
51
+ dry-monitor (0.4.0)
56
52
  dry-configurable (~> 0.5)
57
- dry-core (~> 0.4)
58
- dry-equalizer (~> 0.2)
53
+ dry-core (~> 0.5, >= 0.5)
59
54
  dry-events (~> 0.2)
60
- dry-schema (1.5.3)
55
+ dry-schema (1.6.2)
61
56
  concurrent-ruby (~> 1.0)
62
57
  dry-configurable (~> 0.8, >= 0.8.3)
63
- dry-core (~> 0.4)
64
- dry-equalizer (~> 0.2)
58
+ dry-core (~> 0.5, >= 0.5)
65
59
  dry-initializer (~> 3.0)
66
60
  dry-logic (~> 1.0)
67
- dry-types (~> 1.4)
68
- dry-types (1.4.0)
61
+ dry-types (~> 1.5)
62
+ dry-types (1.5.1)
69
63
  concurrent-ruby (~> 1.0)
70
64
  dry-container (~> 0.3)
71
- dry-core (~> 0.4, >= 0.4.4)
72
- dry-equalizer (~> 0.3)
65
+ dry-core (~> 0.5, >= 0.5)
73
66
  dry-inflector (~> 0.1, >= 0.1.2)
74
67
  dry-logic (~> 1.0, >= 1.0.2)
75
- dry-validation (1.5.4)
68
+ dry-validation (1.6.0)
76
69
  concurrent-ruby (~> 1.0)
77
70
  dry-container (~> 0.7, >= 0.7.1)
78
71
  dry-core (~> 0.4)
79
72
  dry-equalizer (~> 0.2)
80
73
  dry-initializer (~> 3.0)
81
- dry-schema (~> 1.5)
74
+ dry-schema (~> 1.5, >= 1.5.2)
82
75
  envlogic (1.1.2)
83
76
  dry-inflector (~> 0.1)
84
77
  factory_bot (6.1.0)
85
78
  activesupport (>= 5.0.0)
86
- i18n (1.8.5)
79
+ i18n (1.8.10)
87
80
  concurrent-ruby (~> 1.0)
88
- io-console (0.5.6)
89
- irb (1.2.4)
90
- reline (>= 0.0.1)
91
- king_konf (0.3.7)
92
- minitest (5.14.1)
93
- rake (13.0.1)
94
- reline (0.1.4)
81
+ io-console (0.5.9)
82
+ irb (1.3.5)
83
+ reline (>= 0.1.5)
84
+ king_konf (1.0.0)
85
+ minitest (5.14.4)
86
+ rake (13.0.3)
87
+ reline (0.2.5)
95
88
  io-console (~> 0.5)
96
- rspec (3.9.0)
97
- rspec-core (~> 3.9.0)
98
- rspec-expectations (~> 3.9.0)
99
- rspec-mocks (~> 3.9.0)
100
- rspec-core (3.9.2)
101
- rspec-support (~> 3.9.3)
102
- rspec-expectations (3.9.2)
89
+ rspec (3.10.0)
90
+ rspec-core (~> 3.10.0)
91
+ rspec-expectations (~> 3.10.0)
92
+ rspec-mocks (~> 3.10.0)
93
+ rspec-core (3.10.1)
94
+ rspec-support (~> 3.10.0)
95
+ rspec-expectations (3.10.1)
103
96
  diff-lcs (>= 1.2.0, < 2.0)
104
- rspec-support (~> 3.9.0)
105
- rspec-mocks (3.9.1)
97
+ rspec-support (~> 3.10.0)
98
+ rspec-mocks (3.10.2)
106
99
  diff-lcs (>= 1.2.0, < 2.0)
107
- rspec-support (~> 3.9.0)
108
- rspec-support (3.9.3)
109
- ruby-kafka (1.2.0)
100
+ rspec-support (~> 3.10.0)
101
+ rspec-support (3.10.2)
102
+ ruby-kafka (1.3.0)
110
103
  digest-crc
111
- simplecov (0.19.0)
104
+ simplecov (0.21.2)
112
105
  docile (~> 1.1)
113
106
  simplecov-html (~> 0.11)
114
- simplecov-html (0.12.2)
115
- thor (1.0.1)
116
- thread_safe (0.3.6)
117
- tzinfo (1.2.7)
118
- thread_safe (~> 0.1)
119
- waterdrop (1.4.0)
107
+ simplecov_json_formatter (~> 0.1)
108
+ simplecov-html (0.12.3)
109
+ simplecov_json_formatter (0.1.2)
110
+ thor (1.1.0)
111
+ tzinfo (2.0.4)
112
+ concurrent-ruby (~> 1.0)
113
+ waterdrop (1.4.2)
120
114
  delivery_boy (>= 0.2, < 2.x)
121
115
  dry-configurable (~> 0.8)
122
116
  dry-monitor (~> 0.3)
123
117
  dry-validation (~> 1.2)
124
118
  ruby-kafka (>= 0.7.8)
125
119
  zeitwerk (~> 2.1)
126
- zeitwerk (2.4.0)
120
+ zeitwerk (2.4.2)
127
121
 
128
122
  PLATFORMS
129
123
  ruby
124
+ x86_64-linux
130
125
 
131
126
  DEPENDENCIES
132
127
  byebug
@@ -136,4 +131,4 @@ DEPENDENCIES
136
131
  simplecov
137
132
 
138
133
  BUNDLED WITH
139
- 2.1.4
134
+ 2.2.15
data/README.md CHANGED
@@ -1,10 +1,12 @@
1
1
  ![karafka logo](https://raw.githubusercontent.com/karafka/misc/master/logo/karafka_logotype_transparent2.png)
2
2
 
3
- [![Build Status](https://travis-ci.org/karafka/karafka.svg?branch=master)](https://travis-ci.org/karafka/karafka)
3
+ [![Build Status](https://github.com/karafka/karafka/actions/workflows/ci.yml/badge.svg)](https://github.com/karafka/karafka/actions/workflows/ci.yml)
4
+ [![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
5
+ [![Join the chat at https://gitter.im/karafka/karafka](https://badges.gitter.im/karafka/karafka.svg)](https://gitter.im/karafka/karafka)
4
6
 
5
- **Note**: Documentation presented here refers to **not yet** released Karafka `1.4.x`.
6
-
7
- If you are looking for the documentation for Karafka `1.3.x`, it can be found [here](https://github.com/karafka/wiki/tree/1.3).
7
+ **Note**: We're finishing the new Karafka `2.0` but for now, please use `1.4`. All the documentation presented here refers to `1.4`
8
+ ..
9
+ Documentation presented here refers to Karafka `1.4`.
8
10
 
9
11
  ## About Karafka
10
12
 
@@ -72,7 +74,7 @@ Karafka framework and Karafka team are __not__ related to Kafka streaming servic
72
74
  ## References
73
75
 
74
76
  * [Karafka framework](https://github.com/karafka/karafka)
75
- * [Karafka Travis CI](https://travis-ci.org/karafka/karafka)
77
+ * [Karafka GitHub Actions](https://github.com/karafka/karafka/actions)
76
78
  * [Karafka Coditsu](https://app.coditsu.io/karafka/repositories/karafka)
77
79
 
78
80
  ## Note on contributions
data/config/errors.yml CHANGED
@@ -37,3 +37,5 @@ en:
37
37
  Unknown consumer group
38
38
  does_not_exist:
39
39
  Given file does not exist or cannot be read
40
+ does_not_respond_to_call: >
41
+ needs to respond to a #call method
data/karafka.gemspec CHANGED
@@ -23,13 +23,12 @@ Gem::Specification.new do |spec|
23
23
  spec.add_dependency 'dry-validation', '~> 1.2'
24
24
  spec.add_dependency 'envlogic', '~> 1.1'
25
25
  spec.add_dependency 'irb', '~> 1.0'
26
- spec.add_dependency 'rake', '>= 11.3'
27
26
  spec.add_dependency 'ruby-kafka', '>= 1.0.0'
28
27
  spec.add_dependency 'thor', '>= 0.20'
29
28
  spec.add_dependency 'waterdrop', '~> 1.4.0'
30
29
  spec.add_dependency 'zeitwerk', '~> 2.1'
31
30
 
32
- spec.required_ruby_version = '>= 2.5.0'
31
+ spec.required_ruby_version = '>= 2.6.0'
33
32
 
34
33
  if $PROGRAM_NAME.end_with?('gem')
35
34
  spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Strategies for Kafka partitions assignments
5
+ module AssignmentStrategies
6
+ # Standard RoundRobin strategy
7
+ class RoundRobin < SimpleDelegator
8
+ def initialize
9
+ super(Kafka::RoundRobinAssignmentStrategy.new)
10
+ end
11
+ end
12
+ end
13
+ end
@@ -19,6 +19,7 @@ module Karafka
19
19
  consumer: %i[
20
20
  session_timeout offset_commit_interval offset_commit_threshold
21
21
  offset_retention_time heartbeat_interval fetcher_max_queue_size
22
+ assignment_strategy
22
23
  ],
23
24
  subscribe: %i[start_from_beginning max_bytes_per_partition],
24
25
  consumption: %i[min_bytes max_bytes max_wait_time],
data/lib/karafka/cli.rb CHANGED
@@ -10,6 +10,8 @@ module Karafka
10
10
  class Cli < Thor
11
11
  package_name 'Karafka'
12
12
 
13
+ default_task :missingno
14
+
13
15
  class << self
14
16
  # Loads all Cli commands into Thor framework
15
17
  # This method should be executed before we run Karafka::Cli.start, otherwise we won't
@@ -20,6 +22,12 @@ module Karafka
20
22
  end
21
23
  end
22
24
 
25
+ # When there is a CLI crash, exit
26
+ # @return [true]
27
+ def exit_on_failure?
28
+ true
29
+ end
30
+
23
31
  private
24
32
 
25
33
  # @return [Array<Class>] Array with Cli action classes that can be used as commands
@@ -43,16 +43,16 @@ module Karafka
43
43
  end
44
44
 
45
45
  # Allows to set description of a given cli command
46
- # @param desc [String] Description of a given cli command
47
- def desc(desc)
48
- @desc ||= desc
46
+ # @param args [Array] All the arguments that Thor desc method accepts
47
+ def desc(*args)
48
+ @desc ||= args
49
49
  end
50
50
 
51
51
  # This method will bind a given Cli command into Karafka Cli
52
52
  # This method is a wrapper to way Thor defines its commands
53
53
  # @param cli_class [Karafka::Cli] Karafka cli_class
54
54
  def bind_to(cli_class)
55
- cli_class.desc name, @desc
55
+ cli_class.desc name, *@desc
56
56
 
57
57
  (@options || []).each { |option| cli_class.option(*option) }
58
58
 
@@ -34,7 +34,7 @@ module Karafka
34
34
  Bundler.read_file(
35
35
  Bundler.default_lockfile
36
36
  )
37
- ).dependencies.key?('rails')
37
+ ).dependencies.key?('railties')
38
38
  end
39
39
 
40
40
  # Install all required things for Karafka application in current directory
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ class Cli < Thor
5
+ # Command that gets invoked when no method is provided when running the CLI
6
+ # It allows us to exit with exit code 1 instead of default 0 to indicate that something
7
+ # was missing
8
+ # @see https://github.com/karafka/karafka/issues/619
9
+ class Missingno < Base
10
+ desc 'Hidden command that gets invoked when no command is provided', hide: true
11
+
12
+ # Prints an error about the lack of command (nothing selected)
13
+ def call
14
+ Karafka.logger.error('No command provided')
15
+ exit 1
16
+ end
17
+ end
18
+ end
19
+ end
@@ -31,14 +31,6 @@ module Karafka
31
31
  # part of the topics
32
32
  Karafka::Server.consumer_groups = cli.options[:consumer_groups]
33
33
 
34
- # Remove pidfile on stop, just before the server instance is going to be GCed
35
- # We want to delay the moment in which the pidfile is removed as much as we can,
36
- # so instead of removing it after the server stops running, we rely on the gc moment
37
- # when this object gets removed (it is a bit later), so it is closer to the actual
38
- # system process end. We do that, so monitoring and deployment tools that rely on a pid
39
- # won't alarm or start new system process up until the current one is finished
40
- ObjectSpace.define_finalizer(self, proc { send(:clean) })
41
-
42
34
  Karafka::Server.run
43
35
  end
44
36
 
@@ -60,6 +52,14 @@ module Karafka
60
52
  cli.options[:pid],
61
53
  'w'
62
54
  ) { |file| file.write(::Process.pid) }
55
+
56
+ # Remove pidfile on stop, just before the server instance is going to be GCed
57
+ # We want to delay the moment in which the pidfile is removed as much as we can,
58
+ # so instead of removing it after the server stops running, we rely on the gc moment
59
+ # when this object gets removed (it is a bit later), so it is closer to the actual
60
+ # system process end. We do that, so monitoring and deployment tools that rely on a pid
61
+ # won't alarm or start new system process up until the current one is finished
62
+ ObjectSpace.define_finalizer(self, proc { send(:clean) })
63
63
  end
64
64
 
65
65
  # Removes a pidfile (if exist)
@@ -48,30 +48,28 @@ module Karafka
48
48
 
49
49
  # Builds all the configuration settings for kafka#consumer method
50
50
  # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
51
- # @return [Array<Hash>] array with all the consumer arguments including hash with all
51
+ # @return [Hash] all the consumer keyword arguments including hash with all
52
52
  # the settings required by Kafka#consumer
53
53
  def consumer(consumer_group)
54
54
  settings = { group_id: consumer_group.id }
55
55
  settings = fetch_for(:consumer, consumer_group, settings)
56
- [sanitize(settings)]
56
+ sanitize(settings)
57
57
  end
58
58
 
59
59
  # Builds all the configuration settings for kafka consumer consume_each_batch and
60
60
  # consume_each_message methods
61
61
  # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
62
- # @return [Array<Hash>] Array with all the arguments required by consuming method
63
- # including hash with all the settings required by
62
+ # @return [Hash] hash with all the arguments required by consuming method
63
+ # including all the settings required by
64
64
  # Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
65
65
  def consumption(consumer_group)
66
- [
67
- sanitize(
68
- fetch_for(
69
- :consumption,
70
- consumer_group,
71
- automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
72
- )
66
+ sanitize(
67
+ fetch_for(
68
+ :consumption,
69
+ consumer_group,
70
+ automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
73
71
  )
74
- ]
72
+ )
75
73
  end
76
74
 
77
75
  # Builds all the configuration settings for kafka consumer#subscribe method
@@ -10,7 +10,12 @@ module Karafka
10
10
  # to have a new Kafka client
11
11
  # @return [::Kafka::Client] returns a Kafka client
12
12
  def call(consumer_group)
13
- Kafka.new(*ApiAdapter.client(consumer_group))
13
+ settings = ApiAdapter.client(consumer_group)
14
+
15
+ Kafka.new(
16
+ settings[0],
17
+ **settings[1]
18
+ )
14
19
  end
15
20
  end
16
21
  end
@@ -33,9 +33,9 @@ module Karafka
33
33
  settings = ApiAdapter.consumption(consumer_group)
34
34
 
35
35
  if consumer_group.batch_fetching
36
- kafka_consumer.each_batch(*settings) { |batch| yield(batch, :batch) }
36
+ kafka_consumer.each_batch(**settings) { |batch| yield(batch, :batch) }
37
37
  else
38
- kafka_consumer.each_message(*settings) { |message| yield(message, :message) }
38
+ kafka_consumer.each_message(**settings) { |message| yield(message, :message) }
39
39
  end
40
40
  # @note We catch only the processing errors as any other are considered critical (exceptions)
41
41
  # and should require a client restart with a backoff
@@ -98,10 +98,12 @@ module Karafka
98
98
  # @note We don't cache the connection internally because we cache kafka_consumer that uses
99
99
  # kafka client object instance
100
100
  @kafka_consumer ||= Builder.call(consumer_group).consumer(
101
- *ApiAdapter.consumer(consumer_group)
101
+ **ApiAdapter.consumer(consumer_group)
102
102
  ).tap do |consumer|
103
103
  consumer_group.topics.each do |topic|
104
- consumer.subscribe(*ApiAdapter.subscribe(topic))
104
+ settings = ApiAdapter.subscribe(topic)
105
+
106
+ consumer.subscribe(settings[0], **settings[1])
105
107
  end
106
108
  end
107
109
  rescue Kafka::ConnectionError
@@ -5,6 +5,6 @@ module Karafka
5
5
  module Contracts
6
6
  # Regexp for validating format of groups and topics
7
7
  # @note It is not nested inside of the contracts, as it is used by couple of them
8
- TOPIC_REGEXP = /\A(\w|\-|\.)+\z/.freeze
8
+ TOPIC_REGEXP = /\A(\w|-|\.)+\z/.freeze
9
9
  end
10
10
  end
@@ -32,6 +32,7 @@ module Karafka
32
32
  required(:offset_retention_time).maybe(:integer)
33
33
  required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
34
34
  required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
35
+ required(:assignment_strategy).value(:any)
35
36
  required(:connect_timeout).filled { (int? | float?) & gt?(0) }
36
37
  required(:reconnect_timeout).filled { (int? | float?) & gteq?(0) }
37
38
  required(:socket_timeout).filled { (int? | float?) & gt?(0) }
@@ -70,13 +71,13 @@ module Karafka
70
71
 
71
72
  # Uri rule to check if uri is in a Karafka acceptable format
72
73
  rule(:seed_brokers) do
73
- if value&.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
+ if value.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
75
  key.failure(:invalid_broker_schema)
75
76
  end
76
77
  end
77
78
 
78
79
  rule(:topics) do
79
- if value&.is_a?(Array)
80
+ if value.is_a?(Array)
80
81
  names = value.map { |topic| topic[:name] }
81
82
 
82
83
  key.failure(:topics_names_not_unique) if names.size != names.uniq.size
@@ -84,7 +85,7 @@ module Karafka
84
85
  end
85
86
 
86
87
  rule(:topics) do
87
- if value&.is_a?(Array)
88
+ if value.is_a?(Array)
88
89
  value.each_with_index do |topic, index|
89
90
  TOPIC_CONTRACT.call(topic).errors.each do |error|
90
91
  key([:topics, index, error.path[0]]).failure(error.text)
@@ -93,6 +94,10 @@ module Karafka
93
94
  end
94
95
  end
95
96
 
97
+ rule(:assignment_strategy) do
98
+ key.failure(:does_not_respond_to_call) unless value.respond_to?(:call)
99
+ end
100
+
96
101
  rule(:ssl_client_cert, :ssl_client_cert_key) do
97
102
  if values[:ssl_client_cert] && !values[:ssl_client_cert_key]
98
103
  key(:ssl_client_cert_key).failure(:ssl_client_cert_with_ssl_client_cert_key)
@@ -8,7 +8,7 @@ module Karafka
8
8
  class ClassMatcher
9
9
  # Regexp used to remove any non classy like characters that might be in the consumer
10
10
  # class name (if defined dynamically, etc)
11
- CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}.freeze
11
+ CONSTANT_REGEXP = %r{[?!=+\-*/\^|&\[\]<>%~\#:\s()]}.freeze
12
12
 
13
13
  private_constant :CONSTANT_REGEXP
14
14
 
@@ -29,17 +29,17 @@ module Karafka
29
29
 
30
30
  # @return [Karafka::Helpers::MultiDelegator] multi delegator instance
31
31
  # to which we will be writing logs
32
- # We use this approach to log stuff to file and to the STDOUT at the same time
32
+ # We use this approach to log stuff to file and to the $stdout at the same time
33
33
  def target
34
34
  Karafka::Helpers::MultiDelegator
35
35
  .delegate(:write, :close)
36
- .to(STDOUT, file)
36
+ .to($stdout, file)
37
37
  end
38
38
 
39
39
  # Makes sure the log directory exists as long as we can write to it
40
40
  def ensure_dir_exists
41
41
  FileUtils.mkdir_p(File.dirname(log_path))
42
- rescue Errno::EACCES
42
+ rescue Errno::EACCES, Errno::EROFS
43
43
  nil
44
44
  end
45
45
 
@@ -16,6 +16,7 @@ module Karafka
16
16
  private_constant :CONTRACT
17
17
 
18
18
  def initialize
19
+ super
19
20
  @draws = Concurrent::Array.new
20
21
  end
21
22
 
@@ -8,9 +8,11 @@ module Karafka
8
8
  class ConsumerGroup
9
9
  extend Helpers::ConfigRetriever
10
10
 
11
- attr_reader :topics
12
- attr_reader :id
13
- attr_reader :name
11
+ attr_reader(
12
+ :topics,
13
+ :id,
14
+ :name
15
+ )
14
16
 
15
17
  # @param name [String, Symbol] raw name of this consumer group. Raw means, that it does not
16
18
  # yet have an application client_id namespace, this will be added here by default.
@@ -58,7 +58,7 @@ module Karafka
58
58
  def stop_supervised
59
59
  Karafka::App.stop!
60
60
 
61
- # Temporary patch until https://github.com/dry-rb/dry-configurable/issues/93 is fixed
61
+ # See https://github.com/dry-rb/dry-configurable/issues/93
62
62
  timeout = Thread.new { Karafka::App.config.shutdown_timeout }.join.value
63
63
 
64
64
  # We check from time to time (for the timeout period) if all the threads finished
@@ -89,6 +89,9 @@ module Karafka
89
89
  # are stored for further processing. Note, that each item in the queue represents a
90
90
  # response from a single broker
91
91
  setting :fetcher_max_queue_size, 10
92
+ # option assignment_strategy [Object] a strategy determining the assignment of
93
+ # partitions to the consumers.
94
+ setting :assignment_strategy, Karafka::AssignmentStrategies::RoundRobin.new
92
95
  # option max_bytes_per_partition [Integer] the maximum amount of data fetched
93
96
  # from a single partition at a time.
94
97
  setting :max_bytes_per_partition, 1_048_576
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '1.4.0.rc2'
6
+ VERSION = '1.4.4'
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.4.0.rc2
4
+ version: 1.4.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -36,7 +36,7 @@ cert_chain:
36
36
  2DND//YJUikn1zwbz1kT70XmHd97B4Eytpln7K+M1u2g1pHVEPW4owD/ammXNpUy
37
37
  nt70FcDD4yxJQ+0YNiHd0N8IcVBM1TMIVctMNQ==
38
38
  -----END CERTIFICATE-----
39
- date: 2020-08-30 00:00:00.000000000 Z
39
+ date: 2021-04-19 00:00:00.000000000 Z
40
40
  dependencies:
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: dry-configurable
@@ -122,20 +122,6 @@ dependencies:
122
122
  - - "~>"
123
123
  - !ruby/object:Gem::Version
124
124
  version: '1.0'
125
- - !ruby/object:Gem::Dependency
126
- name: rake
127
- requirement: !ruby/object:Gem::Requirement
128
- requirements:
129
- - - ">="
130
- - !ruby/object:Gem::Version
131
- version: '11.3'
132
- type: :runtime
133
- prerelease: false
134
- version_requirements: !ruby/object:Gem::Requirement
135
- requirements:
136
- - - ">="
137
- - !ruby/object:Gem::Version
138
- version: '11.3'
139
125
  - !ruby/object:Gem::Dependency
140
126
  name: ruby-kafka
141
127
  requirement: !ruby/object:Gem::Requirement
@@ -227,6 +213,7 @@ files:
227
213
  - karafka.gemspec
228
214
  - lib/karafka.rb
229
215
  - lib/karafka/app.rb
216
+ - lib/karafka/assignment_strategies/round_robin.rb
230
217
  - lib/karafka/attributes_map.rb
231
218
  - lib/karafka/backends/inline.rb
232
219
  - lib/karafka/base_consumer.rb
@@ -237,6 +224,7 @@ files:
237
224
  - lib/karafka/cli/flow.rb
238
225
  - lib/karafka/cli/info.rb
239
226
  - lib/karafka/cli/install.rb
227
+ - lib/karafka/cli/missingno.rb
240
228
  - lib/karafka/cli/server.rb
241
229
  - lib/karafka/code_reloader.rb
242
230
  - lib/karafka/connection/api_adapter.rb
@@ -311,14 +299,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
311
299
  requirements:
312
300
  - - ">="
313
301
  - !ruby/object:Gem::Version
314
- version: 2.5.0
302
+ version: 2.6.0
315
303
  required_rubygems_version: !ruby/object:Gem::Requirement
316
304
  requirements:
317
- - - ">"
305
+ - - ">="
318
306
  - !ruby/object:Gem::Version
319
- version: 1.3.1
307
+ version: '0'
320
308
  requirements: []
321
- rubygems_version: 3.1.4
309
+ rubygems_version: 3.2.15
322
310
  signing_key:
323
311
  specification_version: 4
324
312
  summary: Ruby based framework for working with Apache Kafka
metadata.gz.sig CHANGED
Binary file