karafka 1.3.7 → 1.4.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +1 -3
  3. data.tar.gz.sig +0 -0
  4. data/.github/workflows/ci.yml +23 -1
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +25 -0
  7. data/Gemfile.lock +57 -61
  8. data/README.md +2 -4
  9. data/config/errors.yml +2 -0
  10. data/karafka.gemspec +1 -2
  11. data/lib/karafka.rb +1 -1
  12. data/lib/karafka/assignment_strategies/round_robin.rb +13 -0
  13. data/lib/karafka/attributes_map.rb +1 -0
  14. data/lib/karafka/cli.rb +8 -0
  15. data/lib/karafka/cli/base.rb +4 -4
  16. data/lib/karafka/cli/missingno.rb +19 -0
  17. data/lib/karafka/connection/api_adapter.rb +5 -3
  18. data/lib/karafka/connection/batch_delegator.rb +5 -1
  19. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  20. data/lib/karafka/consumers/includer.rb +5 -4
  21. data/lib/karafka/consumers/single_params.rb +1 -1
  22. data/lib/karafka/contracts.rb +1 -1
  23. data/lib/karafka/contracts/consumer_group.rb +8 -3
  24. data/lib/karafka/helpers/class_matcher.rb +1 -1
  25. data/lib/karafka/instrumentation/logger.rb +3 -3
  26. data/lib/karafka/instrumentation/stdout_listener.rb +4 -2
  27. data/lib/karafka/params/batch_metadata.rb +26 -0
  28. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  29. data/lib/karafka/params/builders/params.rb +17 -15
  30. data/lib/karafka/params/builders/params_batch.rb +2 -2
  31. data/lib/karafka/params/metadata.rb +14 -29
  32. data/lib/karafka/params/params.rb +26 -50
  33. data/lib/karafka/params/params_batch.rb +15 -16
  34. data/lib/karafka/routing/builder.rb +1 -0
  35. data/lib/karafka/routing/consumer_group.rb +5 -3
  36. data/lib/karafka/serialization/json/deserializer.rb +2 -2
  37. data/lib/karafka/setup/config.rb +3 -0
  38. data/lib/karafka/version.rb +1 -1
  39. metadata +10 -21
  40. metadata.gz.sig +0 -0
  41. data/lib/karafka/consumers/metadata.rb +0 -10
  42. data/lib/karafka/params/builders/metadata.rb +0 -33
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a4a06a7ae9c8de9ada4f029b4aa8c0e5946d127eed891c0676dc6cfa92d61aaa
4
- data.tar.gz: 532c931751da662fe1b0aa7b4c5361effd710d7112a39fd094c93a3846bd04e9
3
+ metadata.gz: b67c595b8ce4b5ff1589517547f94c8e1ba4caf51361074949b7bae1c5368bf6
4
+ data.tar.gz: 17c507ddf4d2b4cd62eea3df3a1dec0cef5a257054f7d0321e1c71dcba0d400e
5
5
  SHA512:
6
- metadata.gz: 56e43f67ef345d2a9f528be97107fa84d1a5d13f343bcc43ae3af4bc7d1af85cf01f438dd0c050ea799c5c6c55c8b680d9b9f2c4c10d37a846cefa55ffcdebf3
7
- data.tar.gz: ba50c3a5b047b1afabd578708fed5ea5cd7cdf875333e31e129752ec83992c6cc8eb0d4e601438846229b273da709e8b7e1a3fd777ad0e0919dde2c7f2349293
6
+ metadata.gz: 818d4d75303eda22f8e923e7ade1c445f0c2e99aebdb7e56511b5fdf38ea39c047f61c0983bc8d38641676be2c2fcc901df9d2bc0d812c3454664dd73ab27b2e
7
+ data.tar.gz: 5c10aced279e74af09d42dd3367a1593abf11267cb09099d0d92a3b4e540f19fa3c06bd9e0f61f7025c3fc0774cde9f4902fb929ca9e24aa308f9d72e6a61def
checksums.yaml.gz.sig CHANGED
@@ -1,3 +1 @@
1
- Um��JJg�hhƄ�?=w�Pi`��lD����>(Z�b҆f�����x�G�'B�Q�^�smdwz�N��nBۮG*?rhdo����M}��%F��HWB�W*p`��&h_�� i4��7xc�;[ P��Jͨ''��p���9���ll={��!r�+�W
2
- ����,Su�V���^�'G�I�Ѯ��\D�4\zԽ��֒��N���e���I(��:�]��*�kѫ863����e��P�V�v �8�w4b�A_b9چO���!������)tb���*�5C�������q�jED� lr �-p'�ϣw퀚��s�S`ٟWԎ�,$$]{2�<����^R̸���LY�n-�]
3
- r��☣�[�w�
1
+ ��<w��sD'���שU�o���[���ݯQ�*��}a�h�`#���1����rr�I_��8%A#W��.�֤�ۮ�@�ad��M}��K.�� sk^��W�qD�<�;�ZH YJe�8��C��H��(����/��Wwo�<�y�{@���mU����͵�f�Ԇ�\�킕%���
data.tar.gz.sig CHANGED
Binary file
@@ -8,15 +8,17 @@ on:
8
8
  jobs:
9
9
  specs:
10
10
  runs-on: ubuntu-latest
11
+ needs: diffend
11
12
  strategy:
12
13
  fail-fast: false
13
14
  matrix:
14
15
  ruby:
16
+ - '3.0'
15
17
  - '2.7'
16
18
  - '2.6'
17
19
  - '2.5'
18
20
  include:
19
- - ruby: '2.7'
21
+ - ruby: '3.0'
20
22
  coverage: 'true'
21
23
  steps:
22
24
  - uses: actions/checkout@v2
@@ -40,6 +42,26 @@ jobs:
40
42
  env:
41
43
  GITHUB_COVERAGE: ${{matrix.coverage}}
42
44
  run: bundle exec rspec
45
+
46
+ diffend:
47
+ runs-on: ubuntu-latest
48
+ strategy:
49
+ fail-fast: false
50
+ steps:
51
+ - uses: actions/checkout@v2
52
+ with:
53
+ fetch-depth: 0
54
+ - name: Set up Ruby
55
+ uses: ruby/setup-ruby@v1
56
+ with:
57
+ ruby-version: 3.0
58
+ - name: Install latest bundler
59
+ run: gem install bundler --no-document
60
+ - name: Install Diffend plugin
61
+ run: bundle plugin install diffend
62
+ - name: Bundle Secure
63
+ run: bundle secure
64
+
43
65
  coditsu:
44
66
  runs-on: ubuntu-latest
45
67
  strategy:
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 2.7.1
1
+ 3.0.0
data/CHANGELOG.md CHANGED
@@ -1,5 +1,30 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 1.4.2 (2021-02-16)
4
+ - Rescue Errno::EROFS in ensure_dir_exists (unasuke)
5
+
6
+ ## 1.4.1 (2020-12-04)
7
+ - Return non-zero exit code when printing usage
8
+ - Add support for :assignment_strategy for consumers
9
+
10
+ ## 1.4.0 (2020-09-05)
11
+ - Rename `Karafka::Params::Metadata` to `Karafka::Params::BatchMetadata`
12
+ ` Rename consumer `#metadata` to `#batch_metadata`
13
+ - Separate metadata (including Karafka native metadata) from the root of params (backwards compatibility preserved thanks to rabotyaga)
14
+ - Remove metadata hash dependency
15
+ - Remove params dependency on a hash in favour of PORO
16
+ - Remove batch metadata dependency on a hash
17
+ - Remove MultiJson in favour of JSON in the default deserializer
18
+ - allow accessing all the metadata without accessing the payload
19
+ - freeze params and underlying elements except for the mutable payload
20
+ - provide access to raw payload after serialization
21
+ - fixes a bug where a non-deserializable (error) params would be marked as deserialized after first unsuccessful deserialization attempt
22
+ - fixes bug where karafka would mutate internal ruby-kafka state
23
+ - fixes bug where topic name in metadata would not be mapped using topic mappers
24
+ - simplifies the params and params batch API, before `#payload` usage, it won't be deserialized
25
+ - removes the `#[]` API from params to prevent from accessing raw data in a different way than #raw_payload
26
+ - makes the params batch operations consistent as params payload is deserialized only when accessed explicitly
27
+
3
28
  ## 1.3.7 (2020-08-11)
4
29
  - #599 - Allow metadata access without deserialization attempt (rabotyaga)
5
30
  - Sync with ruby-kafka `1.2.0` api
data/Gemfile.lock CHANGED
@@ -1,46 +1,44 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (1.3.7)
4
+ karafka (1.4.2)
5
5
  dry-configurable (~> 0.8)
6
6
  dry-inflector (~> 0.1)
7
7
  dry-monitor (~> 0.3)
8
8
  dry-validation (~> 1.2)
9
9
  envlogic (~> 1.1)
10
10
  irb (~> 1.0)
11
- multi_json (>= 1.12)
12
11
  rake (>= 11.3)
13
12
  ruby-kafka (>= 1.0.0)
14
13
  thor (>= 0.20)
15
- waterdrop (~> 1.3.0)
14
+ waterdrop (~> 1.4.0)
16
15
  zeitwerk (~> 2.1)
17
16
 
18
17
  GEM
19
18
  remote: https://rubygems.org/
20
19
  specs:
21
- activesupport (6.0.3.2)
20
+ activesupport (6.1.3)
22
21
  concurrent-ruby (~> 1.0, >= 1.0.2)
23
- i18n (>= 0.7, < 2)
24
- minitest (~> 5.1)
25
- tzinfo (~> 1.1)
26
- zeitwerk (~> 2.2, >= 2.2.2)
22
+ i18n (>= 1.6, < 2)
23
+ minitest (>= 5.1)
24
+ tzinfo (~> 2.0)
25
+ zeitwerk (~> 2.3)
27
26
  byebug (11.1.3)
28
- concurrent-ruby (1.1.7)
29
- delivery_boy (1.0.1)
30
- king_konf (~> 0.3)
27
+ concurrent-ruby (1.1.8)
28
+ delivery_boy (1.1.0)
29
+ king_konf (~> 1.0)
31
30
  ruby-kafka (~> 1.0)
32
31
  diff-lcs (1.4.4)
33
- digest-crc (0.6.1)
34
- rake (~> 13.0)
35
- docile (1.3.2)
36
- dry-configurable (0.11.6)
32
+ digest-crc (0.6.3)
33
+ rake (>= 12.0.0, < 14.0.0)
34
+ docile (1.3.5)
35
+ dry-configurable (0.12.1)
37
36
  concurrent-ruby (~> 1.0)
38
- dry-core (~> 0.4, >= 0.4.7)
39
- dry-equalizer (~> 0.2)
37
+ dry-core (~> 0.5, >= 0.5.0)
40
38
  dry-container (0.7.2)
41
39
  concurrent-ruby (~> 1.0)
42
40
  dry-configurable (~> 0.1, >= 0.1.3)
43
- dry-core (0.4.9)
41
+ dry-core (0.5.0)
44
42
  concurrent-ruby (~> 1.0)
45
43
  dry-equalizer (0.3.0)
46
44
  dry-events (0.2.0)
@@ -48,87 +46,85 @@ GEM
48
46
  dry-core (~> 0.4)
49
47
  dry-equalizer (~> 0.2)
50
48
  dry-inflector (0.2.0)
51
- dry-initializer (3.0.3)
52
- dry-logic (1.0.6)
49
+ dry-initializer (3.0.4)
50
+ dry-logic (1.1.0)
53
51
  concurrent-ruby (~> 1.0)
54
- dry-core (~> 0.2)
55
- dry-equalizer (~> 0.2)
52
+ dry-core (~> 0.5, >= 0.5)
56
53
  dry-monitor (0.3.2)
57
54
  dry-configurable (~> 0.5)
58
55
  dry-core (~> 0.4)
59
56
  dry-equalizer (~> 0.2)
60
57
  dry-events (~> 0.2)
61
- dry-schema (1.5.2)
58
+ dry-schema (1.6.1)
62
59
  concurrent-ruby (~> 1.0)
63
60
  dry-configurable (~> 0.8, >= 0.8.3)
64
- dry-core (~> 0.4)
65
- dry-equalizer (~> 0.2)
61
+ dry-core (~> 0.5, >= 0.5)
66
62
  dry-initializer (~> 3.0)
67
63
  dry-logic (~> 1.0)
68
- dry-types (~> 1.4)
69
- dry-types (1.4.0)
64
+ dry-types (~> 1.5)
65
+ dry-types (1.5.1)
70
66
  concurrent-ruby (~> 1.0)
71
67
  dry-container (~> 0.3)
72
- dry-core (~> 0.4, >= 0.4.4)
73
- dry-equalizer (~> 0.3)
68
+ dry-core (~> 0.5, >= 0.5)
74
69
  dry-inflector (~> 0.1, >= 0.1.2)
75
70
  dry-logic (~> 1.0, >= 1.0.2)
76
- dry-validation (1.5.3)
71
+ dry-validation (1.6.0)
77
72
  concurrent-ruby (~> 1.0)
78
73
  dry-container (~> 0.7, >= 0.7.1)
79
74
  dry-core (~> 0.4)
80
75
  dry-equalizer (~> 0.2)
81
76
  dry-initializer (~> 3.0)
82
- dry-schema (~> 1.5)
77
+ dry-schema (~> 1.5, >= 1.5.2)
83
78
  envlogic (1.1.2)
84
79
  dry-inflector (~> 0.1)
85
80
  factory_bot (6.1.0)
86
81
  activesupport (>= 5.0.0)
87
- i18n (1.8.5)
82
+ i18n (1.8.9)
88
83
  concurrent-ruby (~> 1.0)
89
- io-console (0.5.6)
90
- irb (1.2.4)
91
- reline (>= 0.0.1)
92
- king_konf (0.3.7)
93
- minitest (5.14.1)
94
- multi_json (1.15.0)
95
- rake (13.0.1)
96
- reline (0.1.4)
84
+ io-console (0.5.8)
85
+ irb (1.3.4)
86
+ reline (>= 0.1.5)
87
+ king_konf (1.0.0)
88
+ minitest (5.14.3)
89
+ rake (13.0.3)
90
+ reline (0.2.4)
97
91
  io-console (~> 0.5)
98
- rspec (3.9.0)
99
- rspec-core (~> 3.9.0)
100
- rspec-expectations (~> 3.9.0)
101
- rspec-mocks (~> 3.9.0)
102
- rspec-core (3.9.2)
103
- rspec-support (~> 3.9.3)
104
- rspec-expectations (3.9.2)
92
+ rspec (3.10.0)
93
+ rspec-core (~> 3.10.0)
94
+ rspec-expectations (~> 3.10.0)
95
+ rspec-mocks (~> 3.10.0)
96
+ rspec-core (3.10.1)
97
+ rspec-support (~> 3.10.0)
98
+ rspec-expectations (3.10.1)
105
99
  diff-lcs (>= 1.2.0, < 2.0)
106
- rspec-support (~> 3.9.0)
107
- rspec-mocks (3.9.1)
100
+ rspec-support (~> 3.10.0)
101
+ rspec-mocks (3.10.2)
108
102
  diff-lcs (>= 1.2.0, < 2.0)
109
- rspec-support (~> 3.9.0)
110
- rspec-support (3.9.3)
111
- ruby-kafka (1.2.0)
103
+ rspec-support (~> 3.10.0)
104
+ rspec-support (3.10.2)
105
+ ruby-kafka (1.3.0)
112
106
  digest-crc
113
- simplecov (0.18.5)
107
+ simplecov (0.21.2)
114
108
  docile (~> 1.1)
115
109
  simplecov-html (~> 0.11)
116
- simplecov-html (0.12.2)
117
- thor (1.0.1)
118
- thread_safe (0.3.6)
119
- tzinfo (1.2.7)
120
- thread_safe (~> 0.1)
121
- waterdrop (1.3.4)
110
+ simplecov_json_formatter (~> 0.1)
111
+ simplecov-html (0.12.3)
112
+ simplecov_json_formatter (0.1.2)
113
+ thor (1.1.0)
114
+ tzinfo (2.0.4)
115
+ concurrent-ruby (~> 1.0)
116
+ waterdrop (1.4.0)
122
117
  delivery_boy (>= 0.2, < 2.x)
123
118
  dry-configurable (~> 0.8)
124
119
  dry-monitor (~> 0.3)
125
120
  dry-validation (~> 1.2)
126
121
  ruby-kafka (>= 0.7.8)
127
122
  zeitwerk (~> 2.1)
128
- zeitwerk (2.4.0)
123
+ zeitwerk (2.4.2)
129
124
 
130
125
  PLATFORMS
131
126
  ruby
127
+ x86_64-darwin-19
132
128
 
133
129
  DEPENDENCIES
134
130
  byebug
@@ -138,4 +134,4 @@ DEPENDENCIES
138
134
  simplecov
139
135
 
140
136
  BUNDLED WITH
141
- 2.1.4
137
+ 2.2.11
data/README.md CHANGED
@@ -2,11 +2,9 @@
2
2
 
3
3
  [![Build Status](https://travis-ci.org/karafka/karafka.svg?branch=master)](https://travis-ci.org/karafka/karafka)
4
4
 
5
- **Note**: Documentation presented here refers to Karafka `1.3.x`.
5
+ **Note**: Documentation presented here refers to Karafka `1.4`.
6
6
 
7
- If you're upgrading from `1.2.0`, please refer to our [Upgrade Notes article](https://mensfeld.pl/2019/09/karafka-framework-1-3-0-release-notes-ruby-kafka/).
8
-
9
- If you are looking for the documentation for Karafka `1.2.x`, it can be found [here](https://github.com/karafka/wiki/tree/1.2).
7
+ If you are looking for the documentation for Karafka `1.3.x`, it can be found [here](https://github.com/karafka/wiki/tree/1.3).
10
8
 
11
9
  ## About Karafka
12
10
 
data/config/errors.yml CHANGED
@@ -37,3 +37,5 @@ en:
37
37
  Unknown consumer group
38
38
  does_not_exist:
39
39
  Given file does not exist or cannot be read
40
+ does_not_respond_to_call: >
41
+ needs to respond to a #call method
data/karafka.gemspec CHANGED
@@ -23,11 +23,10 @@ Gem::Specification.new do |spec|
23
23
  spec.add_dependency 'dry-validation', '~> 1.2'
24
24
  spec.add_dependency 'envlogic', '~> 1.1'
25
25
  spec.add_dependency 'irb', '~> 1.0'
26
- spec.add_dependency 'multi_json', '>= 1.12'
27
26
  spec.add_dependency 'rake', '>= 11.3'
28
27
  spec.add_dependency 'ruby-kafka', '>= 1.0.0'
29
28
  spec.add_dependency 'thor', '>= 0.20'
30
- spec.add_dependency 'waterdrop', '~> 1.3.0'
29
+ spec.add_dependency 'waterdrop', '~> 1.4.0'
31
30
  spec.add_dependency 'zeitwerk', '~> 2.1'
32
31
 
33
32
  spec.required_ruby_version = '>= 2.5.0'
data/lib/karafka.rb CHANGED
@@ -6,10 +6,10 @@
6
6
  waterdrop
7
7
  kafka
8
8
  envlogic
9
+ json
9
10
  thor
10
11
  forwardable
11
12
  fileutils
12
- multi_json
13
13
  dry-configurable
14
14
  dry-validation
15
15
  dry/events/publisher
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Strategies for Kafka partitions assignments
5
+ module AssignmentStrategies
6
+ # Standard RoundRobin strategy
7
+ class RoundRobin < SimpleDelegator
8
+ def initialize
9
+ super(Kafka::RoundRobinAssignmentStrategy.new)
10
+ end
11
+ end
12
+ end
13
+ end
@@ -19,6 +19,7 @@ module Karafka
19
19
  consumer: %i[
20
20
  session_timeout offset_commit_interval offset_commit_threshold
21
21
  offset_retention_time heartbeat_interval fetcher_max_queue_size
22
+ assignment_strategy
22
23
  ],
23
24
  subscribe: %i[start_from_beginning max_bytes_per_partition],
24
25
  consumption: %i[min_bytes max_bytes max_wait_time],
data/lib/karafka/cli.rb CHANGED
@@ -10,6 +10,8 @@ module Karafka
10
10
  class Cli < Thor
11
11
  package_name 'Karafka'
12
12
 
13
+ default_task :missingno
14
+
13
15
  class << self
14
16
  # Loads all Cli commands into Thor framework
15
17
  # This method should be executed before we run Karafka::Cli.start, otherwise we won't
@@ -20,6 +22,12 @@ module Karafka
20
22
  end
21
23
  end
22
24
 
25
+ # When there is a CLI crash, exit
26
+ # @return [true]
27
+ def exit_on_failure?
28
+ true
29
+ end
30
+
23
31
  private
24
32
 
25
33
  # @return [Array<Class>] Array with Cli action classes that can be used as commands
@@ -43,16 +43,16 @@ module Karafka
43
43
  end
44
44
 
45
45
  # Allows to set description of a given cli command
46
- # @param desc [String] Description of a given cli command
47
- def desc(desc)
48
- @desc ||= desc
46
+ # @param args [Array] All the arguments that Thor desc method accepts
47
+ def desc(*args)
48
+ @desc ||= args
49
49
  end
50
50
 
51
51
  # This method will bind a given Cli command into Karafka Cli
52
52
  # This method is a wrapper to way Thor defines its commands
53
53
  # @param cli_class [Karafka::Cli] Karafka cli_class
54
54
  def bind_to(cli_class)
55
- cli_class.desc name, @desc
55
+ cli_class.desc name, *@desc
56
56
 
57
57
  (@options || []).each { |option| cli_class.option(*option) }
58
58
 
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ class Cli < Thor
5
+ # Command that gets invoked when no method is provided when running the CLI
6
+ # It allows us to exit with exit code 1 instead of default 0 to indicate that something
7
+ # was missing
8
+ # @see https://github.com/karafka/karafka/issues/619
9
+ class Missingno < Base
10
+ desc 'Hidden command that gets invoked when no command is provided', hide: true
11
+
12
+ # Prints an error about the lack of command (nothing selected)
13
+ def call
14
+ Karafka.logger.error('No command provided')
15
+ exit 1
16
+ end
17
+ end
18
+ end
19
+ end
@@ -109,11 +109,13 @@ module Karafka
109
109
  # Majority of users don't use custom topic mappers. No need to change anything when it
110
110
  # is a default mapper that does not change anything. Only some cloud providers require
111
111
  # topics to be remapped
112
- return [params] if Karafka::App.config.topic_mapper.is_a?(Karafka::Routing::TopicMapper)
112
+ return [params.metadata] if Karafka::App.config.topic_mapper.is_a?(
113
+ Karafka::Routing::TopicMapper
114
+ )
113
115
 
114
116
  # @note We don't use tap as it is around 13% slower than non-dup version
115
- dupped = params.dup
116
- dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.topic)
117
+ dupped = params.metadata.dup
118
+ dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.metadata.topic)
117
119
  [dupped]
118
120
  end
119
121
 
@@ -23,7 +23,11 @@ module Karafka
23
23
  ) do
24
24
  # Due to how ruby-kafka is built, we have the metadata that is stored on the batch
25
25
  # level only available for batch consuming
26
- consumer.metadata = Params::Builders::Metadata.from_kafka_batch(kafka_batch, topic)
26
+ consumer.batch_metadata = Params::Builders::BatchMetadata.from_kafka_batch(
27
+ kafka_batch,
28
+ topic
29
+ )
30
+
27
31
  kafka_messages = kafka_batch.messages
28
32
 
29
33
  # Depending on a case (persisted or not) we might use new consumer instance per
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Consumers
5
+ # Brings the batch metadata into consumers that support batch_fetching
6
+ module BatchMetadata
7
+ attr_accessor :batch_metadata
8
+ end
9
+ end
10
+ end
@@ -16,7 +16,7 @@ module Karafka
16
16
 
17
17
  bind_backend(consumer, topic)
18
18
  bind_params(consumer, topic)
19
- bind_metadata(consumer, topic)
19
+ bind_batch_metadata(consumer, topic)
20
20
  bind_responders(consumer, topic)
21
21
  end
22
22
 
@@ -40,13 +40,14 @@ module Karafka
40
40
  consumer.extend(SingleParams)
41
41
  end
42
42
 
43
- # Adds an option to work with metadata for consumer instances that have batch fetching
43
+ # Adds an option to work with batch metadata for consumer instances that have
44
+ # batch fetching enabled
44
45
  # @param consumer [Karafka::BaseConsumer] consumer instance
45
46
  # @param topic [Karafka::Routing::Topic] topic of a consumer class
46
- def bind_metadata(consumer, topic)
47
+ def bind_batch_metadata(consumer, topic)
47
48
  return unless topic.batch_fetching
48
49
 
49
- consumer.extend(Metadata)
50
+ consumer.extend(BatchMetadata)
50
51
  end
51
52
 
52
53
  # Adds responders support for topics and consumers with responders defined for them
@@ -8,7 +8,7 @@ module Karafka
8
8
 
9
9
  # @return [Karafka::Params::Params] params instance for non batch consumption consumers
10
10
  def params
11
- params_batch.to_a.first
11
+ params_batch.first
12
12
  end
13
13
  end
14
14
  end
@@ -5,6 +5,6 @@ module Karafka
5
5
  module Contracts
6
6
  # Regexp for validating format of groups and topics
7
7
  # @note It is not nested inside of the contracts, as it is used by couple of them
8
- TOPIC_REGEXP = /\A(\w|\-|\.)+\z/.freeze
8
+ TOPIC_REGEXP = /\A(\w|-|\.)+\z/.freeze
9
9
  end
10
10
  end
@@ -32,6 +32,7 @@ module Karafka
32
32
  required(:offset_retention_time).maybe(:integer)
33
33
  required(:heartbeat_interval).filled { (int? | float?) & gteq?(0) }
34
34
  required(:fetcher_max_queue_size).filled(:int?, gt?: 0)
35
+ required(:assignment_strategy).value(:any)
35
36
  required(:connect_timeout).filled { (int? | float?) & gt?(0) }
36
37
  required(:reconnect_timeout).filled { (int? | float?) & gteq?(0) }
37
38
  required(:socket_timeout).filled { (int? | float?) & gt?(0) }
@@ -70,13 +71,13 @@ module Karafka
70
71
 
71
72
  # Uri rule to check if uri is in a Karafka acceptable format
72
73
  rule(:seed_brokers) do
73
- if value&.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
+ if value.is_a?(Array) && !value.all?(&method(:kafka_uri?))
74
75
  key.failure(:invalid_broker_schema)
75
76
  end
76
77
  end
77
78
 
78
79
  rule(:topics) do
79
- if value&.is_a?(Array)
80
+ if value.is_a?(Array)
80
81
  names = value.map { |topic| topic[:name] }
81
82
 
82
83
  key.failure(:topics_names_not_unique) if names.size != names.uniq.size
@@ -84,7 +85,7 @@ module Karafka
84
85
  end
85
86
 
86
87
  rule(:topics) do
87
- if value&.is_a?(Array)
88
+ if value.is_a?(Array)
88
89
  value.each_with_index do |topic, index|
89
90
  TOPIC_CONTRACT.call(topic).errors.each do |error|
90
91
  key([:topics, index, error.path[0]]).failure(error.text)
@@ -93,6 +94,10 @@ module Karafka
93
94
  end
94
95
  end
95
96
 
97
+ rule(:assignment_strategy) do
98
+ key.failure(:does_not_respond_to_call) unless value.respond_to?(:call)
99
+ end
100
+
96
101
  rule(:ssl_client_cert, :ssl_client_cert_key) do
97
102
  if values[:ssl_client_cert] && !values[:ssl_client_cert_key]
98
103
  key(:ssl_client_cert_key).failure(:ssl_client_cert_with_ssl_client_cert_key)
@@ -8,7 +8,7 @@ module Karafka
8
8
  class ClassMatcher
9
9
  # Regexp used to remove any non classy like characters that might be in the consumer
10
10
  # class name (if defined dynamically, etc)
11
- CONSTANT_REGEXP = %r{[?!=+\-\*/\^\|&\[\]<>%~\#\:\s\(\)]}.freeze
11
+ CONSTANT_REGEXP = %r{[?!=+\-*/\^|&\[\]<>%~\#:\s()]}.freeze
12
12
 
13
13
  private_constant :CONSTANT_REGEXP
14
14
 
@@ -29,17 +29,17 @@ module Karafka
29
29
 
30
30
  # @return [Karafka::Helpers::MultiDelegator] multi delegator instance
31
31
  # to which we will be writing logs
32
- # We use this approach to log stuff to file and to the STDOUT at the same time
32
+ # We use this approach to log stuff to file and to the $stdout at the same time
33
33
  def target
34
34
  Karafka::Helpers::MultiDelegator
35
35
  .delegate(:write, :close)
36
- .to(STDOUT, file)
36
+ .to($stdout, file)
37
37
  end
38
38
 
39
39
  # Makes sure the log directory exists as long as we can write to it
40
40
  def ensure_dir_exists
41
41
  FileUtils.mkdir_p(File.dirname(log_path))
42
- rescue Errno::EACCES
42
+ rescue Errno::EACCES, Errno::EROFS
43
43
  nil
44
44
  end
45
45
 
@@ -43,7 +43,7 @@ module Karafka
43
43
  # so it returns a topic as a string, not a routing topic
44
44
  debug(
45
45
  <<~MSG.chomp.tr("\n", ' ')
46
- Params deserialization for #{event[:caller].topic} topic
46
+ Params deserialization for #{event[:caller].metadata.topic} topic
47
47
  successful in #{event[:time]} ms
48
48
  MSG
49
49
  )
@@ -52,7 +52,9 @@ module Karafka
52
52
  # Logs unsuccessful deserialization attempts of incoming data
53
53
  # @param event [Dry::Events::Event] event details including payload
54
54
  def on_params_params_deserialize_error(event)
55
- error "Params deserialization error for #{event[:caller].topic} topic: #{event[:error]}"
55
+ topic = event[:caller].metadata.topic
56
+ error = event[:error]
57
+ error "Params deserialization error for #{topic} topic: #{error}"
56
58
  end
57
59
 
58
60
  # Logs errors that occurred in a listener fetch loop
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ # Simple batch metadata object that stores all non-message information received from Kafka
6
+ # cluster while fetching the data
7
+ # @note This metadata object refers to per batch metadata, not `#params.metadata`
8
+ BatchMetadata = Struct.new(
9
+ :batch_size,
10
+ :first_offset,
11
+ :highwater_mark_offset,
12
+ :unknown_last_offset,
13
+ :last_offset,
14
+ :offset_lag,
15
+ :deserializer,
16
+ :partition,
17
+ :topic,
18
+ keyword_init: true
19
+ ) do
20
+ # @return [Boolean] is the last offset known or unknown
21
+ def unknown_last_offset?
22
+ unknown_last_offset
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Params
5
+ module Builders
6
+ # Builder for creating batch metadata object based on the batch informations
7
+ module BatchMetadata
8
+ class << self
9
+ # Creates metadata based on the kafka batch data
10
+ # @param kafka_batch [Kafka::FetchedBatch] kafka batch details
11
+ # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
12
+ # @return [Karafka::Params::BatchMetadata] batch metadata object
13
+ def from_kafka_batch(kafka_batch, topic)
14
+ Karafka::Params::BatchMetadata.new(
15
+ batch_size: kafka_batch.messages.count,
16
+ first_offset: kafka_batch.first_offset,
17
+ highwater_mark_offset: kafka_batch.highwater_mark_offset,
18
+ unknown_last_offset: kafka_batch.unknown_last_offset?,
19
+ last_offset: kafka_batch.last_offset,
20
+ offset_lag: kafka_batch.offset_lag,
21
+ deserializer: topic.deserializer,
22
+ partition: kafka_batch.partition,
23
+ topic: topic.name
24
+ ).freeze
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -12,22 +12,24 @@ module Karafka
12
12
  class << self
13
13
  # @param kafka_message [Kafka::FetchedMessage] message fetched from Kafka
14
14
  # @param topic [Karafka::Routing::Topic] topic for which this message was fetched
15
- # @return [Karafka::Params::Params] params object
15
+ # @return [Karafka::Params::Params] params object with payload and message metadata
16
16
  def from_kafka_message(kafka_message, topic)
17
- Karafka::Params::Params
18
- .new
19
- .merge!(
20
- 'create_time' => kafka_message.create_time,
21
- 'headers' => kafka_message.headers || {},
22
- 'is_control_record' => kafka_message.is_control_record,
23
- 'key' => kafka_message.key,
24
- 'offset' => kafka_message.offset,
25
- 'deserializer' => topic.deserializer,
26
- 'partition' => kafka_message.partition,
27
- 'receive_time' => Time.now,
28
- 'topic' => kafka_message.topic,
29
- 'payload' => kafka_message.value
30
- )
17
+ metadata = Karafka::Params::Metadata.new(
18
+ create_time: kafka_message.create_time,
19
+ headers: kafka_message.headers || {},
20
+ is_control_record: kafka_message.is_control_record,
21
+ key: kafka_message.key,
22
+ offset: kafka_message.offset,
23
+ deserializer: topic.deserializer,
24
+ partition: kafka_message.partition,
25
+ receive_time: Time.now,
26
+ topic: topic.name
27
+ ).freeze
28
+
29
+ Karafka::Params::Params.new(
30
+ kafka_message.value,
31
+ metadata
32
+ )
31
33
  end
32
34
  end
33
35
  end
@@ -12,11 +12,11 @@ module Karafka
12
12
  # @param topic [Karafka::Routing::Topic] topic for which we're received messages
13
13
  # @return [Karafka::Params::ParamsBatch<Karafka::Params::Params>] batch with params
14
14
  def from_kafka_messages(kafka_messages, topic)
15
- params_array = kafka_messages.map! do |message|
15
+ params_array = kafka_messages.map do |message|
16
16
  Karafka::Params::Builders::Params.from_kafka_message(message, topic)
17
17
  end
18
18
 
19
- Karafka::Params::ParamsBatch.new(params_array)
19
+ Karafka::Params::ParamsBatch.new(params_array).freeze
20
20
  end
21
21
  end
22
22
  end
@@ -2,34 +2,19 @@
2
2
 
3
3
  module Karafka
4
4
  module Params
5
- # Simple metadata object that stores all non-message information received from Kafka cluster
6
- # while fetching the data
7
- class Metadata < Hash
8
- # Attributes that should be accessible as methods as well (not only hash)
9
- METHOD_ATTRIBUTES = %w[
10
- batch_size
11
- first_offset
12
- highwater_mark_offset
13
- last_offset
14
- offset_lag
15
- deserializer
16
- partition
17
- topic
18
- ].freeze
19
-
20
- private_constant :METHOD_ATTRIBUTES
21
-
22
- METHOD_ATTRIBUTES.each do |attr|
23
- # Defines a method call accessor to a particular hash field.
24
- define_method(attr) do
25
- self[attr]
26
- end
27
- end
28
-
29
- # @return [Boolean] is the last offset known or unknown
30
- def unknown_last_offset?
31
- self['unknown_last_offset']
32
- end
33
- end
5
+ # Single message / params metadata details that can be accessed without the need for the
6
+ # payload deserialization
7
+ Metadata = Struct.new(
8
+ :create_time,
9
+ :headers,
10
+ :is_control_record,
11
+ :key,
12
+ :offset,
13
+ :deserializer,
14
+ :partition,
15
+ :receive_time,
16
+ :topic,
17
+ keyword_init: true
18
+ )
34
19
  end
35
20
  end
@@ -6,68 +6,44 @@ module Karafka
6
6
  # It provides lazy loading not only until the first usage, but also allows us to skip
7
7
  # using deserializer until we execute our logic. That way we can operate with
8
8
  # heavy-deserialization data without slowing down the whole application.
9
- class Params < Hash
10
- # Params attributes that should be available via a method call invocation for Kafka
11
- # client compatibility.
12
- # Kafka passes internally Kafka::FetchedMessage object and the ruby-kafka consumer
13
- # uses those fields via method calls, so in order to be able to pass there our params
14
- # objects, have to have same api.
15
- METHOD_ATTRIBUTES = %w[
16
- create_time
17
- headers
18
- is_control_record
19
- key
20
- offset
21
- deserializer
22
- deserialized
23
- partition
24
- receive_time
25
- topic
26
- payload
27
- ].freeze
9
+ class Params
10
+ extend Forwardable
28
11
 
29
- private_constant :METHOD_ATTRIBUTES
12
+ attr_reader :raw_payload, :metadata
30
13
 
31
- METHOD_ATTRIBUTES.each do |attr|
32
- # Defines a method call accessor to a particular hash field.
33
- # @note Won't work for complex key names that contain spaces, etc
34
- # @param key [Symbol] name of a field that we want to retrieve with a method call
35
- # @example
36
- # key_attr_reader :example
37
- # params.example #=> 'my example payload'
38
- define_method(attr) do
39
- self[attr]
40
- end
41
- end
14
+ def_delegators :metadata, *Metadata.members
42
15
 
43
- # Overrides `Hash#[]` to allow lazy deserialization of payload. This allows us to fetch
44
- # metadata without actually triggering deserialization of the payload until it is needed
45
- # @param key [String, Symbol] hash key
46
- # @return [Object] content of a given params key
47
- def [](key)
48
- # Payload will be deserialized only when we request for it.
49
- deserialize! if key == 'payload'
50
- super
16
+ # @param raw_payload [Object] incoming payload before deserialization
17
+ # @param metadata [Karafka::Params::Metadata] message metadata object
18
+ def initialize(raw_payload, metadata)
19
+ @raw_payload = raw_payload
20
+ @metadata = metadata
21
+ @deserialized = false
22
+ @payload = nil
51
23
  end
52
24
 
53
- # @return [Karafka::Params::Params] This method will trigger deserializer execution. If we
54
- # decide to retrieve data, deserializer will be executed to get data. Output of that will
55
- # be merged to the current object. This object will be also marked as already deserialized,
56
- # so we won't deserialize it again.
57
- def deserialize!
58
- return self if self['deserialized']
25
+ # @return [Object] lazy-deserialized data (deserialized upon first request)
26
+ def payload
27
+ return @payload if deserialized?
28
+
29
+ @payload = deserialize
30
+ # We mark deserialization as successful after deserialization, as in case of an error
31
+ # this won't be falsely set to true
32
+ @deserialized = true
33
+ @payload
34
+ end
59
35
 
60
- self['deserialized'] = true
61
- self['payload'] = deserialize
62
- self
36
+ # @return [Boolean] did given params payload were deserialized already
37
+ def deserialized?
38
+ @deserialized
63
39
  end
64
40
 
65
41
  private
66
42
 
67
- # @return [Object] deserialized data
43
+ # @return [Object] tries de-serializes data
68
44
  def deserialize
69
45
  Karafka.monitor.instrument('params.params.deserialize', caller: self) do
70
- self['deserializer'].call(self)
46
+ metadata.deserializer.call(self)
71
47
  end
72
48
  rescue ::StandardError => e
73
49
  Karafka.monitor.instrument('params.params.deserialize.error', caller: self, error: e)
@@ -15,47 +15,46 @@ module Karafka
15
15
  @params_array = params_array
16
16
  end
17
17
 
18
- # @yieldparam [Karafka::Params::Params] each deserialized and loaded params instance
19
- # @note Invocation of this method will cause loading and deserializing each param after
20
- # another. If you want to get access without deserializing, please access params_array
21
- # directly
18
+ # @yieldparam [Karafka::Params::Params] each params instance
19
+ # @note Invocation of this method will not cause loading and deserializing each param after
20
+ # another.
22
21
  def each
23
- @params_array.each { |param| yield(param.deserialize!) }
22
+ @params_array.each { |param| yield(param) }
24
23
  end
25
24
 
26
25
  # @return [Array<Karafka::Params::Params>] returns all the params in a loaded state, so they
27
26
  # can be used for batch insert, etc. Without invoking all, up until first use, they won't
28
27
  # be deserialized
29
28
  def deserialize!
30
- each(&:itself)
29
+ each(&:payload)
31
30
  end
32
31
 
33
32
  # @return [Array<Object>] array with deserialized payloads. This method can be useful when
34
33
  # we don't care about metadata and just want to extract all the data payloads from the
35
34
  # batch
36
35
  def payloads
37
- deserialize!.map(&:payload)
36
+ map(&:payload)
38
37
  end
39
38
 
40
- # @return [Karafka::Params::Params] first element after the deserialization process
39
+ # @return [Karafka::Params::Params] first element
41
40
  def first
42
- @params_array.first.deserialize!
41
+ @params_array.first
43
42
  end
44
43
 
45
- # @return [Karafka::Params::Params] last element after the deserialization process
44
+ # @return [Karafka::Params::Params] last element
46
45
  def last
47
- @params_array.last.deserialize!
48
- end
49
-
50
- # @return [Array<Karafka::Params::Params>] pure array with params (not deserialized)
51
- def to_a
52
- @params_array
46
+ @params_array.last
53
47
  end
54
48
 
55
49
  # @return [Integer] number of messages in the batch
56
50
  def size
57
51
  @params_array.size
58
52
  end
53
+
54
+ # @return [Array<Karafka::Params::Params>] pure array with params
55
+ def to_a
56
+ @params_array
57
+ end
59
58
  end
60
59
  end
61
60
  end
@@ -16,6 +16,7 @@ module Karafka
16
16
  private_constant :CONTRACT
17
17
 
18
18
  def initialize
19
+ super
19
20
  @draws = Concurrent::Array.new
20
21
  end
21
22
 
@@ -8,9 +8,11 @@ module Karafka
8
8
  class ConsumerGroup
9
9
  extend Helpers::ConfigRetriever
10
10
 
11
- attr_reader :topics
12
- attr_reader :id
13
- attr_reader :name
11
+ attr_reader(
12
+ :topics,
13
+ :id,
14
+ :name
15
+ )
14
16
 
15
17
  # @param name [String, Symbol] raw name of this consumer group. Raw means, that it does not
16
18
  # yet have an application client_id namespace, this will be added here by default.
@@ -17,8 +17,8 @@ module Karafka
17
17
  # }
18
18
  # Deserializer.call(params) #=> { 'a' => 1 }
19
19
  def call(params)
20
- ::MultiJson.load(params['payload'])
21
- rescue ::MultiJson::ParseError => e
20
+ params.raw_payload.nil? ? nil : ::JSON.parse(params.raw_payload)
21
+ rescue ::JSON::ParserError => e
22
22
  raise ::Karafka::Errors::DeserializationError, e
23
23
  end
24
24
  end
@@ -89,6 +89,9 @@ module Karafka
89
89
  # are stored for further processing. Note, that each item in the queue represents a
90
90
  # response from a single broker
91
91
  setting :fetcher_max_queue_size, 10
92
+ # option assignment_strategy [Object] a strategy determining the assignment of
93
+ # partitions to the consumers.
94
+ setting :assignment_strategy, Karafka::AssignmentStrategies::RoundRobin.new
92
95
  # option max_bytes_per_partition [Integer] the maximum amount of data fetched
93
96
  # from a single partition at a time.
94
97
  setting :max_bytes_per_partition, 1_048_576
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '1.3.7'
6
+ VERSION = '1.4.2'
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.3.7
4
+ version: 1.4.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -36,7 +36,7 @@ cert_chain:
36
36
  2DND//YJUikn1zwbz1kT70XmHd97B4Eytpln7K+M1u2g1pHVEPW4owD/ammXNpUy
37
37
  nt70FcDD4yxJQ+0YNiHd0N8IcVBM1TMIVctMNQ==
38
38
  -----END CERTIFICATE-----
39
- date: 2020-08-11 00:00:00.000000000 Z
39
+ date: 2021-02-26 00:00:00.000000000 Z
40
40
  dependencies:
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: dry-configurable
@@ -122,20 +122,6 @@ dependencies:
122
122
  - - "~>"
123
123
  - !ruby/object:Gem::Version
124
124
  version: '1.0'
125
- - !ruby/object:Gem::Dependency
126
- name: multi_json
127
- requirement: !ruby/object:Gem::Requirement
128
- requirements:
129
- - - ">="
130
- - !ruby/object:Gem::Version
131
- version: '1.12'
132
- type: :runtime
133
- prerelease: false
134
- version_requirements: !ruby/object:Gem::Requirement
135
- requirements:
136
- - - ">="
137
- - !ruby/object:Gem::Version
138
- version: '1.12'
139
125
  - !ruby/object:Gem::Dependency
140
126
  name: rake
141
127
  requirement: !ruby/object:Gem::Requirement
@@ -184,14 +170,14 @@ dependencies:
184
170
  requirements:
185
171
  - - "~>"
186
172
  - !ruby/object:Gem::Version
187
- version: 1.3.0
173
+ version: 1.4.0
188
174
  type: :runtime
189
175
  prerelease: false
190
176
  version_requirements: !ruby/object:Gem::Requirement
191
177
  requirements:
192
178
  - - "~>"
193
179
  - !ruby/object:Gem::Version
194
- version: 1.3.0
180
+ version: 1.4.0
195
181
  - !ruby/object:Gem::Dependency
196
182
  name: zeitwerk
197
183
  requirement: !ruby/object:Gem::Requirement
@@ -241,6 +227,7 @@ files:
241
227
  - karafka.gemspec
242
228
  - lib/karafka.rb
243
229
  - lib/karafka/app.rb
230
+ - lib/karafka/assignment_strategies/round_robin.rb
244
231
  - lib/karafka/attributes_map.rb
245
232
  - lib/karafka/backends/inline.rb
246
233
  - lib/karafka/base_consumer.rb
@@ -251,6 +238,7 @@ files:
251
238
  - lib/karafka/cli/flow.rb
252
239
  - lib/karafka/cli/info.rb
253
240
  - lib/karafka/cli/install.rb
241
+ - lib/karafka/cli/missingno.rb
254
242
  - lib/karafka/cli/server.rb
255
243
  - lib/karafka/code_reloader.rb
256
244
  - lib/karafka/connection/api_adapter.rb
@@ -259,9 +247,9 @@ files:
259
247
  - lib/karafka/connection/client.rb
260
248
  - lib/karafka/connection/listener.rb
261
249
  - lib/karafka/connection/message_delegator.rb
250
+ - lib/karafka/consumers/batch_metadata.rb
262
251
  - lib/karafka/consumers/callbacks.rb
263
252
  - lib/karafka/consumers/includer.rb
264
- - lib/karafka/consumers/metadata.rb
265
253
  - lib/karafka/consumers/responders.rb
266
254
  - lib/karafka/consumers/single_params.rb
267
255
  - lib/karafka/contracts.rb
@@ -280,7 +268,8 @@ files:
280
268
  - lib/karafka/instrumentation/monitor.rb
281
269
  - lib/karafka/instrumentation/proctitle_listener.rb
282
270
  - lib/karafka/instrumentation/stdout_listener.rb
283
- - lib/karafka/params/builders/metadata.rb
271
+ - lib/karafka/params/batch_metadata.rb
272
+ - lib/karafka/params/builders/batch_metadata.rb
284
273
  - lib/karafka/params/builders/params.rb
285
274
  - lib/karafka/params/builders/params_batch.rb
286
275
  - lib/karafka/params/metadata.rb
@@ -331,7 +320,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
331
320
  - !ruby/object:Gem::Version
332
321
  version: '0'
333
322
  requirements: []
334
- rubygems_version: 3.1.2
323
+ rubygems_version: 3.2.3
335
324
  signing_key:
336
325
  specification_version: 4
337
326
  summary: Ruby based framework for working with Apache Kafka
metadata.gz.sig CHANGED
Binary file
@@ -1,10 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Consumers
5
- # Brings the metadata into consumers that support batch_fetching
6
- module Metadata
7
- attr_accessor :metadata
8
- end
9
- end
10
- end
@@ -1,33 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Params
5
- module Builders
6
- # Builder for creating metadata object based on the message or batch informations
7
- # @note We have 2 ways of creating metadata based on the way ruby-kafka operates
8
- module Metadata
9
- class << self
10
- # Creates metadata based on the kafka batch data
11
- # @param kafka_batch [Kafka::FetchedBatch] kafka batch details
12
- # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
13
- # @return [Karafka::Params::Metadata] metadata object
14
- def from_kafka_batch(kafka_batch, topic)
15
- Karafka::Params::Metadata
16
- .new
17
- .merge!(
18
- 'batch_size' => kafka_batch.messages.count,
19
- 'first_offset' => kafka_batch.first_offset,
20
- 'highwater_mark_offset' => kafka_batch.highwater_mark_offset,
21
- 'last_offset' => kafka_batch.last_offset,
22
- 'offset_lag' => kafka_batch.offset_lag,
23
- 'deserializer' => topic.deserializer,
24
- 'partition' => kafka_batch.partition,
25
- 'topic' => kafka_batch.topic,
26
- 'unknown_last_offset' => kafka_batch.unknown_last_offset?
27
- )
28
- end
29
- end
30
- end
31
- end
32
- end
33
- end