karafka 1.4.5 → 1.4.9
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +15 -1
- data/Gemfile.lock +25 -26
- data/README.md +23 -24
- data/certs/mensfeld.pem +21 -21
- data/karafka.gemspec +1 -1
- data/lib/karafka/cli/install.rb +3 -2
- data/lib/karafka/connection/api_adapter.rb +5 -6
- data/lib/karafka/connection/client.rb +2 -1
- data/lib/karafka/setup/config.rb +57 -54
- data/lib/karafka/version.rb +1 -1
- data.tar.gz.sig +0 -0
- metadata +26 -27
- metadata.gz.sig +0 -0
- data/.github/FUNDING.yml +0 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 54b5bde8c7a61dbf95021a53f40a5789eb29a5b297e6639a616eac5a0883391d
|
4
|
+
data.tar.gz: faae48e60cba546b503a222b4c02e5165e7b6934bfab4414d0116e30b4f79089
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ca53d510bd6a0e2c6efb7cec8a4b298c5668ef738943b1fa97acace0b7f9a4a89e9e2f1e45ba0ae3e3202613b4259ff8e1408a7ebf727566f11863bf4f7c637b
|
7
|
+
data.tar.gz: a5f735ca604ef83fb324c1c011ad4f3f9cdfa57009c3eb88a852b1a9c41bffdad7f87fd936c1b5b95076b38535fcaa38dd3aa9afdac355098f071578fef81dad
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
3.0.
|
1
|
+
3.0.2
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,19 @@
|
|
1
1
|
# Karafka framework changelog
|
2
2
|
|
3
|
+
## 1.4.9 (2021-09-29)
|
4
|
+
- fix `dry-configurable` deprecation warnings for default value as positional argument
|
5
|
+
|
6
|
+
## 1.4.8 (2021-09-08)
|
7
|
+
- Allow 'rails' in Gemfile to enable rails-aware generator (rewritten)
|
8
|
+
|
9
|
+
## 1.4.7 (2021-09-04)
|
10
|
+
- Update ruby-kafka to `1.4.0`
|
11
|
+
- Support for `resolve_seed_brokers` option (with Azdaroth)
|
12
|
+
- Set minimum `ruby-kafka` requirement to `1.3.0`
|
13
|
+
|
14
|
+
## 1.4.6 (2021-08-05)
|
15
|
+
- #700 Fix Ruby 3 compatibility issues in Connection::Client#pause (MmKolodziej)
|
16
|
+
|
3
17
|
## 1.4.5 (2021-06-16)
|
4
18
|
- Fixup logger checks for non-writeable logfile (ojab)
|
5
19
|
- #689 - Update the stdout initialization message for framework initialization
|
@@ -20,7 +34,7 @@
|
|
20
34
|
|
21
35
|
## 1.4.0 (2020-09-05)
|
22
36
|
- Rename `Karafka::Params::Metadata` to `Karafka::Params::BatchMetadata`
|
23
|
-
|
37
|
+
- Rename consumer `#metadata` to `#batch_metadata`
|
24
38
|
- Separate metadata (including Karafka native metadata) from the root of params (backwards compatibility preserved thanks to rabotyaga)
|
25
39
|
- Remove metadata hash dependency
|
26
40
|
- Remove params dependency on a hash in favour of PORO
|
data/Gemfile.lock
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
karafka (1.4.
|
4
|
+
karafka (1.4.9)
|
5
5
|
dry-configurable (~> 0.8)
|
6
6
|
dry-inflector (~> 0.1)
|
7
7
|
dry-monitor (~> 0.3)
|
8
8
|
dry-validation (~> 1.2)
|
9
9
|
envlogic (~> 1.1)
|
10
10
|
irb (~> 1.0)
|
11
|
-
ruby-kafka (>= 1.
|
11
|
+
ruby-kafka (>= 1.3.0)
|
12
12
|
thor (>= 0.20)
|
13
13
|
waterdrop (~> 1.4.0)
|
14
14
|
zeitwerk (~> 2.1)
|
@@ -16,7 +16,7 @@ PATH
|
|
16
16
|
GEM
|
17
17
|
remote: https://rubygems.org/
|
18
18
|
specs:
|
19
|
-
activesupport (6.1.
|
19
|
+
activesupport (6.1.4.1)
|
20
20
|
concurrent-ruby (~> 1.0, >= 1.0.2)
|
21
21
|
i18n (>= 1.6, < 2)
|
22
22
|
minitest (>= 5.1)
|
@@ -28,33 +28,32 @@ GEM
|
|
28
28
|
king_konf (~> 1.0)
|
29
29
|
ruby-kafka (~> 1.0)
|
30
30
|
diff-lcs (1.4.4)
|
31
|
-
digest-crc (0.6.
|
31
|
+
digest-crc (0.6.4)
|
32
32
|
rake (>= 12.0.0, < 14.0.0)
|
33
33
|
docile (1.4.0)
|
34
|
-
dry-configurable (0.
|
34
|
+
dry-configurable (0.13.0)
|
35
35
|
concurrent-ruby (~> 1.0)
|
36
|
-
dry-core (~> 0.
|
37
|
-
dry-container (0.
|
36
|
+
dry-core (~> 0.6)
|
37
|
+
dry-container (0.9.0)
|
38
38
|
concurrent-ruby (~> 1.0)
|
39
|
-
dry-configurable (~> 0.
|
40
|
-
dry-core (0.
|
39
|
+
dry-configurable (~> 0.13, >= 0.13.0)
|
40
|
+
dry-core (0.7.1)
|
41
41
|
concurrent-ruby (~> 1.0)
|
42
|
-
dry-equalizer (0.3.0)
|
43
42
|
dry-events (0.3.0)
|
44
43
|
concurrent-ruby (~> 1.0)
|
45
44
|
dry-core (~> 0.5, >= 0.5)
|
46
|
-
dry-inflector (0.2.
|
45
|
+
dry-inflector (0.2.1)
|
47
46
|
dry-initializer (3.0.4)
|
48
47
|
dry-logic (1.2.0)
|
49
48
|
concurrent-ruby (~> 1.0)
|
50
49
|
dry-core (~> 0.5, >= 0.5)
|
51
|
-
dry-monitor (0.
|
52
|
-
dry-configurable (~> 0.
|
50
|
+
dry-monitor (0.5.0)
|
51
|
+
dry-configurable (~> 0.13, >= 0.13.0)
|
53
52
|
dry-core (~> 0.5, >= 0.5)
|
54
53
|
dry-events (~> 0.2)
|
55
|
-
dry-schema (1.
|
54
|
+
dry-schema (1.8.0)
|
56
55
|
concurrent-ruby (~> 1.0)
|
57
|
-
dry-configurable (~> 0.
|
56
|
+
dry-configurable (~> 0.13, >= 0.13.0)
|
58
57
|
dry-core (~> 0.5, >= 0.5)
|
59
58
|
dry-initializer (~> 3.0)
|
60
59
|
dry-logic (~> 1.0)
|
@@ -65,13 +64,12 @@ GEM
|
|
65
64
|
dry-core (~> 0.5, >= 0.5)
|
66
65
|
dry-inflector (~> 0.1, >= 0.1.2)
|
67
66
|
dry-logic (~> 1.0, >= 1.0.2)
|
68
|
-
dry-validation (1.
|
67
|
+
dry-validation (1.7.0)
|
69
68
|
concurrent-ruby (~> 1.0)
|
70
69
|
dry-container (~> 0.7, >= 0.7.1)
|
71
|
-
dry-core (~> 0.
|
72
|
-
dry-equalizer (~> 0.2)
|
70
|
+
dry-core (~> 0.5, >= 0.5)
|
73
71
|
dry-initializer (~> 3.0)
|
74
|
-
dry-schema (~> 1.
|
72
|
+
dry-schema (~> 1.8, >= 1.8.0)
|
75
73
|
envlogic (1.1.3)
|
76
74
|
dry-inflector (~> 0.1)
|
77
75
|
factory_bot (6.2.0)
|
@@ -79,12 +77,12 @@ GEM
|
|
79
77
|
i18n (1.8.10)
|
80
78
|
concurrent-ruby (~> 1.0)
|
81
79
|
io-console (0.5.9)
|
82
|
-
irb (1.3.
|
83
|
-
reline (>= 0.
|
80
|
+
irb (1.3.7)
|
81
|
+
reline (>= 0.2.7)
|
84
82
|
king_konf (1.0.0)
|
85
83
|
minitest (5.14.4)
|
86
|
-
rake (13.0.
|
87
|
-
reline (0.2.
|
84
|
+
rake (13.0.6)
|
85
|
+
reline (0.2.7)
|
88
86
|
io-console (~> 0.5)
|
89
87
|
rspec (3.10.0)
|
90
88
|
rspec-core (~> 3.10.0)
|
@@ -99,7 +97,7 @@ GEM
|
|
99
97
|
diff-lcs (>= 1.2.0, < 2.0)
|
100
98
|
rspec-support (~> 3.10.0)
|
101
99
|
rspec-support (3.10.2)
|
102
|
-
ruby-kafka (1.
|
100
|
+
ruby-kafka (1.4.0)
|
103
101
|
digest-crc
|
104
102
|
simplecov (0.21.2)
|
105
103
|
docile (~> 1.1)
|
@@ -110,7 +108,7 @@ GEM
|
|
110
108
|
thor (1.1.0)
|
111
109
|
tzinfo (2.0.4)
|
112
110
|
concurrent-ruby (~> 1.0)
|
113
|
-
waterdrop (1.4.
|
111
|
+
waterdrop (1.4.3)
|
114
112
|
delivery_boy (>= 0.2, < 2.x)
|
115
113
|
dry-configurable (~> 0.8)
|
116
114
|
dry-monitor (~> 0.3)
|
@@ -120,6 +118,7 @@ GEM
|
|
120
118
|
zeitwerk (2.4.2)
|
121
119
|
|
122
120
|
PLATFORMS
|
121
|
+
ruby
|
123
122
|
x86_64-darwin
|
124
123
|
x86_64-darwin-19
|
125
124
|
x86_64-linux
|
@@ -132,4 +131,4 @@ DEPENDENCIES
|
|
132
131
|
simplecov
|
133
132
|
|
134
133
|
BUNDLED WITH
|
135
|
-
2.2.
|
134
|
+
2.2.28
|
data/README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
[![Build Status](https://github.com/karafka/karafka/actions/workflows/ci.yml/badge.svg)](https://github.com/karafka/karafka/actions/workflows/ci.yml)
|
4
4
|
[![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
|
5
|
-
[![Join the chat at https://
|
5
|
+
[![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
|
6
6
|
|
7
7
|
**Note**: We're finishing the new Karafka `2.0` but for now, please use `1.4`. All the documentation presented here refers to `1.4`
|
8
8
|
|
@@ -10,6 +10,24 @@
|
|
10
10
|
|
11
11
|
Framework used to simplify Apache Kafka based Ruby applications development.
|
12
12
|
|
13
|
+
```ruby
|
14
|
+
# Define what topics you want to consume with which consumers
|
15
|
+
Karafka::App.consumer_groups.draw do
|
16
|
+
topic 'system_events' do
|
17
|
+
consumer EventsConsumer
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
# And create your consumers, within which your messages will be processed
|
22
|
+
class EventsConsumer < ApplicationConsumer
|
23
|
+
# Example that utilizes ActiveRecord#insert_all and Karafka batch processing
|
24
|
+
def consume
|
25
|
+
# Store all of the incoming Kafka events locally in an efficient way
|
26
|
+
Event.insert_all params_batch.payloads
|
27
|
+
end
|
28
|
+
end
|
29
|
+
```
|
30
|
+
|
13
31
|
Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming and processing this data, without having to focus on things that are not your business domain.
|
14
32
|
|
15
33
|
Karafka not only handles incoming messages but also provides tools for building complex data-flow applications that receive and send messages.
|
@@ -35,7 +53,7 @@ Karafka based applications can be easily deployed to any type of infrastructure,
|
|
35
53
|
|
36
54
|
## Support
|
37
55
|
|
38
|
-
Karafka has
|
56
|
+
Karafka has [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup, and deployment along with other useful details on how to run Karafka.
|
39
57
|
|
40
58
|
If you have any questions about using Karafka, feel free to join our [Gitter](https://gitter.im/karafka/karafka) chat channel.
|
41
59
|
|
@@ -65,10 +83,6 @@ and follow the instructions from the [example app Wiki](https://github.com/karaf
|
|
65
83
|
|
66
84
|
If you need more details and know how on how to start Karafka with a clean installation, read the [Getting started page](https://github.com/karafka/karafka/wiki/Getting-started) section of our Wiki.
|
67
85
|
|
68
|
-
## Notice
|
69
|
-
|
70
|
-
Karafka framework and Karafka team are __not__ related to Kafka streaming service called CloudKarafka in any matter. We don't recommend nor discourage usage of their platform.
|
71
|
-
|
72
86
|
## References
|
73
87
|
|
74
88
|
* [Karafka framework](https://github.com/karafka/karafka)
|
@@ -77,23 +91,8 @@ Karafka framework and Karafka team are __not__ related to Kafka streaming servic
|
|
77
91
|
|
78
92
|
## Note on contributions
|
79
93
|
|
80
|
-
First, thank you for considering contributing to Karafka! It's people like you that make the open source community such a great community!
|
81
|
-
|
82
|
-
Each pull request must pass all the RSpec specs and meet our quality requirements.
|
83
|
-
|
84
|
-
To check if everything is as it should be, we use [Coditsu](https://coditsu.io) that combines multiple linters and code analyzers for both code and documentation. Once you're done with your changes, submit a pull request.
|
85
|
-
|
86
|
-
Coditsu will automatically check your work against our quality standards. You can find your commit check results on the [builds page](https://app.coditsu.io/karafka/commit_builds) of Karafka organization.
|
87
|
-
|
88
|
-
[![coditsu](https://coditsu.io/assets/quality_bar.svg)](https://app.coditsu.io/karafka/commit_builds)
|
89
|
-
|
90
|
-
## Contributors
|
91
|
-
|
92
|
-
This project exists thanks to all the people who contribute.
|
93
|
-
<a href="https://github.com/karafka/karafka/graphs/contributors"><img src="https://opencollective.com/karafka/contributors.svg?width=890" /></a>
|
94
|
-
|
95
|
-
## Sponsors
|
94
|
+
First, thank you for considering contributing to the Karafka ecosystem! It's people like you that make the open source community such a great community!
|
96
95
|
|
97
|
-
|
96
|
+
Each pull request must pass all the RSpec specs, integration tests and meet our quality requirements.
|
98
97
|
|
99
|
-
|
98
|
+
Fork it, update and wait for the Github Actions results.
|
data/certs/mensfeld.pem
CHANGED
@@ -1,25 +1,25 @@
|
|
1
1
|
-----BEGIN CERTIFICATE-----
|
2
2
|
MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
3
|
+
ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjEwODExMTQxNTEzWhcNMjIwODExMTQx
|
4
|
+
NTEzWjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
|
5
|
+
CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDV2jKH4Ti87GM6nyT6D+ESzTI0MZDj
|
6
|
+
ak2/TEwnxvijMJyCCPKT/qIkbW4/f0VHM4rhPr1nW73sb5SZBVFCLlJcOSKOBdUY
|
7
|
+
TMY+SIXN2EtUaZuhAOe8LxtxjHTgRHvHcqUQMBENXTISNzCo32LnUxweu66ia4Pd
|
8
|
+
1mNRhzOqNv9YiBZvtBf7IMQ+sYdOCjboq2dlsWmJiwiDpY9lQBTnWORnT3mQxU5x
|
9
|
+
vPSwnLB854cHdCS8fQo4DjeJBRZHhEbcE5sqhEMB3RZA3EtFVEXOxlNxVTS3tncI
|
10
|
+
qyNXiWDaxcipaens4ObSY1C2HTV7OWb7OMqSCIybeYTSfkaSdqmcl4S6zxXkjH1J
|
11
|
+
tnjayAVzD+QVXGijsPLE2PFnJAh9iDET2cMsjabO1f6l1OQNyAtqpcyQcgfnyW0z
|
12
|
+
g7tGxTYD+6wJHffM9d9txOUw6djkF6bDxyqB8lo4Z3IObCx18AZjI9XPS9QG7w6q
|
13
|
+
LCWuMG2lkCcRgASqaVk9fEf9yMc2xxz5o3kCAwEAAaN3MHUwCQYDVR0TBAIwADAL
|
14
|
+
BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFBqUFCKCOe5IuueUVqOB991jyCLLMB0GA1Ud
|
15
15
|
EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
16
|
+
c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBADD0/UuTTFgW+CGk2U0RDw2RBOca
|
17
|
+
W2LTF/G7AOzuzD0Tc4voc7WXyrgKwJREv8rgBimLnNlgmFJLmtUCh2U/MgxvcilH
|
18
|
+
yshYcbseNvjkrtYnLRlWZR4SSB6Zei5AlyGVQLPkvdsBpNegcG6w075YEwzX/38a
|
19
|
+
8V9B/Yri2OGELBz8ykl7BsXUgNoUPA/4pHF6YRLz+VirOaUIQ4JfY7xGj6fSOWWz
|
20
|
+
/rQ/d77r6o1mfJYM/3BRVg73a3b7DmRnE5qjwmSaSQ7u802pJnLesmArch0xGCT/
|
21
|
+
fMmRli1Qb+6qOTl9mzD6UDMAyFR4t6MStLm0mIEqM0nBO5nUdUWbC7l9qXEf8XBE
|
22
|
+
2DP28p3EqSuS+lKbAWKcqv7t0iRhhmaod+Yn9mcrLN1sa3q3KSQ9BCyxezCD4Mk2
|
23
|
+
R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
|
24
|
+
pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
|
25
25
|
-----END CERTIFICATE-----
|
data/karafka.gemspec
CHANGED
@@ -23,7 +23,7 @@ Gem::Specification.new do |spec|
|
|
23
23
|
spec.add_dependency 'dry-validation', '~> 1.2'
|
24
24
|
spec.add_dependency 'envlogic', '~> 1.1'
|
25
25
|
spec.add_dependency 'irb', '~> 1.0'
|
26
|
-
spec.add_dependency 'ruby-kafka', '>= 1.
|
26
|
+
spec.add_dependency 'ruby-kafka', '>= 1.3.0'
|
27
27
|
spec.add_dependency 'thor', '>= 0.20'
|
28
28
|
spec.add_dependency 'waterdrop', '~> 1.4.0'
|
29
29
|
spec.add_dependency 'zeitwerk', '~> 2.1'
|
data/lib/karafka/cli/install.rb
CHANGED
@@ -30,11 +30,12 @@ module Karafka
|
|
30
30
|
# @param args [Array] all the things that Thor CLI accepts
|
31
31
|
def initialize(*args)
|
32
32
|
super
|
33
|
-
|
33
|
+
dependencies = Bundler::LockfileParser.new(
|
34
34
|
Bundler.read_file(
|
35
35
|
Bundler.default_lockfile
|
36
36
|
)
|
37
|
-
).dependencies
|
37
|
+
).dependencies
|
38
|
+
@rails = dependencies.key?('railties') || dependencies.key?('rails')
|
38
39
|
end
|
39
40
|
|
40
41
|
# Install all required things for Karafka application in current directory
|
@@ -84,17 +84,16 @@ module Karafka
|
|
84
84
|
# @param topic [String] topic that we want to pause
|
85
85
|
# @param partition [Integer] number partition that we want to pause
|
86
86
|
# @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
|
87
|
-
# @return [
|
87
|
+
# @return [Hash] hash with all the details required to pause kafka consumer
|
88
88
|
def pause(topic, partition, consumer_group)
|
89
|
-
|
90
|
-
Karafka::App.config.topic_mapper.outgoing(topic),
|
91
|
-
|
92
|
-
{
|
89
|
+
{
|
90
|
+
args: [Karafka::App.config.topic_mapper.outgoing(topic), partition],
|
91
|
+
kwargs: {
|
93
92
|
timeout: consumer_group.pause_timeout,
|
94
93
|
max_timeout: consumer_group.pause_max_timeout,
|
95
94
|
exponential_backoff: consumer_group.pause_exponential_backoff
|
96
95
|
}
|
97
|
-
|
96
|
+
}
|
98
97
|
end
|
99
98
|
|
100
99
|
# Remaps topic details taking the topic mapper feature into consideration.
|
@@ -64,7 +64,8 @@ module Karafka
|
|
64
64
|
# @param topic [String] topic that we want to pause
|
65
65
|
# @param partition [Integer] number partition that we want to pause
|
66
66
|
def pause(topic, partition)
|
67
|
-
|
67
|
+
args, kwargs = ApiAdapter.pause(topic, partition, consumer_group).values_at(:args, :kwargs)
|
68
|
+
kafka_consumer.pause(*args, **kwargs)
|
68
69
|
end
|
69
70
|
|
70
71
|
# Marks given message as consumed
|
data/lib/karafka/setup/config.rb
CHANGED
@@ -24,86 +24,89 @@ module Karafka
|
|
24
24
|
# default Kafka groups namespaces and identify that app in kafka
|
25
25
|
setting :client_id
|
26
26
|
# What backend do we want to use to process messages
|
27
|
-
setting :backend, :inline
|
27
|
+
setting :backend, default: :inline
|
28
28
|
# option logger [Instance] logger that we want to use
|
29
|
-
setting :logger, ::Karafka::Instrumentation::Logger.new
|
29
|
+
setting :logger, default: ::Karafka::Instrumentation::Logger.new
|
30
30
|
# option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
|
31
|
-
setting :monitor, ::Karafka::Instrumentation::Monitor.new
|
31
|
+
setting :monitor, default: ::Karafka::Instrumentation::Monitor.new
|
32
32
|
# Mapper used to remap consumer groups ids, so in case users migrate from other tools
|
33
33
|
# or they need to maintain their own internal consumer group naming conventions, they
|
34
34
|
# can easily do it, replacing the default client_id + consumer name pattern concept
|
35
|
-
setting :consumer_mapper, Routing::ConsumerMapper.new
|
35
|
+
setting :consumer_mapper, default: Routing::ConsumerMapper.new
|
36
36
|
# Mapper used to remap names of topics, so we can have a clean internal topic naming
|
37
37
|
# despite using any Kafka provider that uses namespacing, etc
|
38
38
|
# It needs to implement two methods:
|
39
39
|
# - #incoming - for remapping from the incoming message to our internal format
|
40
40
|
# - #outgoing - for remapping from internal topic name into outgoing message
|
41
|
-
setting :topic_mapper, Routing::TopicMapper.new
|
41
|
+
setting :topic_mapper, default: Routing::TopicMapper.new
|
42
42
|
# Default serializer for converting whatever we want to send to kafka to json
|
43
|
-
setting :serializer, Karafka::Serialization::Json::Serializer.new
|
43
|
+
setting :serializer, default: Karafka::Serialization::Json::Serializer.new
|
44
44
|
# Default deserializer for converting incoming data into ruby objects
|
45
|
-
setting :deserializer, Karafka::Serialization::Json::Deserializer.new
|
45
|
+
setting :deserializer, default: Karafka::Serialization::Json::Deserializer.new
|
46
46
|
# If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
|
47
47
|
# @note Fetching does not equal consuming, see batch_consuming description for details
|
48
|
-
setting :batch_fetching, true
|
48
|
+
setting :batch_fetching, default: true
|
49
49
|
# If batch_consuming is true, we will have access to #params_batch instead of #params.
|
50
50
|
# #params_batch will contain params received from Kafka (may be more than 1) so we can
|
51
51
|
# process them in batches
|
52
|
-
setting :batch_consuming, false
|
52
|
+
setting :batch_consuming, default: false
|
53
53
|
# option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
|
54
54
|
# longer wait for the consumers to stop gracefully but instead we force terminate
|
55
55
|
# everything.
|
56
|
-
setting :shutdown_timeout, 60
|
56
|
+
setting :shutdown_timeout, default: 60
|
57
57
|
|
58
58
|
# option kafka [Hash] - optional - kafka configuration options
|
59
59
|
setting :kafka do
|
60
60
|
# Array with at least one host
|
61
|
-
setting :seed_brokers, %w[kafka://127.0.0.1:9092]
|
61
|
+
setting :seed_brokers, default: %w[kafka://127.0.0.1:9092]
|
62
62
|
# option session_timeout [Integer] the number of seconds after which, if a client
|
63
63
|
# hasn't contacted the Kafka cluster, it will be kicked out of the group.
|
64
|
-
setting :session_timeout, 30
|
64
|
+
setting :session_timeout, default: 30
|
65
65
|
# Time that a given partition will be paused from fetching messages, when message
|
66
66
|
# consumption fails. It allows us to process other partitions, while the error is being
|
67
67
|
# resolved and also "slows" things down, so it prevents from "eating" up all messages and
|
68
68
|
# consuming them with failed code. Use `nil` if you want to pause forever and never retry.
|
69
|
-
setting :pause_timeout, 10
|
69
|
+
setting :pause_timeout, default: 10
|
70
70
|
# option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
|
71
71
|
# or `nil` if no maximum should be enforced.
|
72
|
-
setting :pause_max_timeout, nil
|
72
|
+
setting :pause_max_timeout, default: nil
|
73
73
|
# option pause_exponential_backoff [Boolean] whether to enable exponential backoff
|
74
|
-
setting :pause_exponential_backoff, false
|
74
|
+
setting :pause_exponential_backoff, default: false
|
75
75
|
# option offset_commit_interval [Integer] the interval between offset commits,
|
76
76
|
# in seconds.
|
77
|
-
setting :offset_commit_interval, 10
|
77
|
+
setting :offset_commit_interval, default: 10
|
78
78
|
# option offset_commit_threshold [Integer] the number of messages that can be
|
79
79
|
# processed before their offsets are committed. If zero, offset commits are
|
80
80
|
# not triggered by message consumption.
|
81
|
-
setting :offset_commit_threshold, 0
|
81
|
+
setting :offset_commit_threshold, default: 0
|
82
82
|
# option heartbeat_interval [Integer] the interval between heartbeats; must be less
|
83
83
|
# than the session window.
|
84
|
-
setting :heartbeat_interval, 10
|
84
|
+
setting :heartbeat_interval, default: 10
|
85
85
|
# option offset_retention_time [Integer] The length of the retention window, known as
|
86
86
|
# offset retention time
|
87
|
-
setting :offset_retention_time, nil
|
87
|
+
setting :offset_retention_time, default: nil
|
88
88
|
# option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
|
89
89
|
# are stored for further processing. Note, that each item in the queue represents a
|
90
90
|
# response from a single broker
|
91
|
-
setting :fetcher_max_queue_size, 10
|
91
|
+
setting :fetcher_max_queue_size, default: 10
|
92
92
|
# option assignment_strategy [Object] a strategy determining the assignment of
|
93
93
|
# partitions to the consumers.
|
94
|
-
setting :assignment_strategy, Karafka::AssignmentStrategies::RoundRobin.new
|
94
|
+
setting :assignment_strategy, default: Karafka::AssignmentStrategies::RoundRobin.new
|
95
95
|
# option max_bytes_per_partition [Integer] the maximum amount of data fetched
|
96
96
|
# from a single partition at a time.
|
97
|
-
setting :max_bytes_per_partition, 1_048_576
|
97
|
+
setting :max_bytes_per_partition, default: 1_048_576
|
98
98
|
# whether to consume messages starting at the beginning or to just consume new messages
|
99
|
-
setting :start_from_beginning, true
|
99
|
+
setting :start_from_beginning, default: true
|
100
|
+
# option resolve_seed_brokers [Boolean] whether to resolve each hostname of the seed
|
101
|
+
# brokers
|
102
|
+
setting :resolve_seed_brokers, default: false
|
100
103
|
# option min_bytes [Integer] the minimum number of bytes to read before
|
101
104
|
# returning messages from the server; if `max_wait_time` is reached, this
|
102
105
|
# is ignored.
|
103
|
-
setting :min_bytes, 1
|
106
|
+
setting :min_bytes, default: 1
|
104
107
|
# option max_bytes [Integer] the maximum number of bytes to read before returning messages
|
105
108
|
# from each broker.
|
106
|
-
setting :max_bytes, 10_485_760
|
109
|
+
setting :max_bytes, default: 10_485_760
|
107
110
|
# option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
|
108
111
|
# wait before returning data from a single message fetch. By setting this high you also
|
109
112
|
# increase the fetching throughput - and by setting it low you set a bound on latency.
|
@@ -111,65 +114,65 @@ module Karafka
|
|
111
114
|
# time specified. The default value is one second. If you want to have at most five
|
112
115
|
# seconds of latency, set `max_wait_time` to 5. You should make sure
|
113
116
|
# max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
|
114
|
-
setting :max_wait_time, 1
|
117
|
+
setting :max_wait_time, default: 1
|
115
118
|
# option automatically_mark_as_consumed [Boolean] should we automatically mark received
|
116
119
|
# messages as consumed (processed) after non-error consumption
|
117
|
-
setting :automatically_mark_as_consumed, true
|
120
|
+
setting :automatically_mark_as_consumed, default: true
|
118
121
|
# option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
|
119
122
|
# Kafka cluster that went down (in seconds)
|
120
|
-
setting :reconnect_timeout, 5
|
123
|
+
setting :reconnect_timeout, default: 5
|
121
124
|
# option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
|
122
125
|
# a broker for the first time. When ruby-kafka initializes, it needs to connect to at
|
123
126
|
# least one host.
|
124
|
-
setting :connect_timeout, 10
|
127
|
+
setting :connect_timeout, default: 10
|
125
128
|
# option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
|
126
129
|
# writing to a socket connection to a broker. After this timeout expires the connection
|
127
130
|
# will be killed. Note that some Kafka operations are by definition long-running, such as
|
128
131
|
# waiting for new messages to arrive in a partition, so don't set this value too low
|
129
|
-
setting :socket_timeout, 30
|
132
|
+
setting :socket_timeout, default: 30
|
130
133
|
# option partitioner [Object, nil] the partitioner that should be used by the client
|
131
|
-
setting :partitioner, nil
|
134
|
+
setting :partitioner, default: nil
|
132
135
|
|
133
136
|
# SSL authentication related settings
|
134
137
|
# option ca_cert [String, nil] SSL CA certificate
|
135
|
-
setting :ssl_ca_cert, nil
|
138
|
+
setting :ssl_ca_cert, default: nil
|
136
139
|
# option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
|
137
|
-
setting :ssl_ca_cert_file_path, nil
|
140
|
+
setting :ssl_ca_cert_file_path, default: nil
|
138
141
|
# option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
|
139
142
|
# certificate store
|
140
|
-
setting :ssl_ca_certs_from_system, false
|
143
|
+
setting :ssl_ca_certs_from_system, default: false
|
141
144
|
# option ssl_verify_hostname [Boolean] Verify the hostname for client certs
|
142
|
-
setting :ssl_verify_hostname, true
|
145
|
+
setting :ssl_verify_hostname, default: true
|
143
146
|
# option ssl_client_cert [String, nil] SSL client certificate
|
144
|
-
setting :ssl_client_cert, nil
|
147
|
+
setting :ssl_client_cert, default: nil
|
145
148
|
# option ssl_client_cert_key [String, nil] SSL client certificate password
|
146
|
-
setting :ssl_client_cert_key, nil
|
149
|
+
setting :ssl_client_cert_key, default: nil
|
147
150
|
# option sasl_gssapi_principal [String, nil] sasl principal
|
148
|
-
setting :sasl_gssapi_principal, nil
|
151
|
+
setting :sasl_gssapi_principal, default: nil
|
149
152
|
# option sasl_gssapi_keytab [String, nil] sasl keytab
|
150
|
-
setting :sasl_gssapi_keytab, nil
|
153
|
+
setting :sasl_gssapi_keytab, default: nil
|
151
154
|
# option sasl_plain_authzid [String] The authorization identity to use
|
152
|
-
setting :sasl_plain_authzid, ''
|
155
|
+
setting :sasl_plain_authzid, default: ''
|
153
156
|
# option sasl_plain_username [String, nil] The username used to authenticate
|
154
|
-
setting :sasl_plain_username, nil
|
157
|
+
setting :sasl_plain_username, default: nil
|
155
158
|
# option sasl_plain_password [String, nil] The password used to authenticate
|
156
|
-
setting :sasl_plain_password, nil
|
159
|
+
setting :sasl_plain_password, default: nil
|
157
160
|
# option sasl_scram_username [String, nil] The username used to authenticate
|
158
|
-
setting :sasl_scram_username, nil
|
161
|
+
setting :sasl_scram_username, default: nil
|
159
162
|
# option sasl_scram_password [String, nil] The password used to authenticate
|
160
|
-
setting :sasl_scram_password, nil
|
163
|
+
setting :sasl_scram_password, default: nil
|
161
164
|
# option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
|
162
|
-
setting :sasl_scram_mechanism, nil
|
165
|
+
setting :sasl_scram_mechanism, default: nil
|
163
166
|
# option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
|
164
|
-
setting :sasl_over_ssl, true
|
167
|
+
setting :sasl_over_ssl, default: true
|
165
168
|
# option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
|
166
|
-
setting :ssl_client_cert_chain, nil
|
169
|
+
setting :ssl_client_cert_chain, default: nil
|
167
170
|
# option ssl_client_cert_key_password [String, nil] the password required to read
|
168
171
|
# the ssl_client_cert_key
|
169
|
-
setting :ssl_client_cert_key_password, nil
|
172
|
+
setting :ssl_client_cert_key_password, default: nil
|
170
173
|
# @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
|
171
174
|
# implements method token.
|
172
|
-
setting :sasl_oauth_token_provider, nil
|
175
|
+
setting :sasl_oauth_token_provider, default: nil
|
173
176
|
end
|
174
177
|
|
175
178
|
# Namespace for internal settings that should not be modified
|
@@ -177,18 +180,18 @@ module Karafka
|
|
177
180
|
# non global state
|
178
181
|
setting :internal do
|
179
182
|
# option routing_builder [Karafka::Routing::Builder] builder instance
|
180
|
-
setting :routing_builder, Routing::Builder.new
|
183
|
+
setting :routing_builder, default: Routing::Builder.new
|
181
184
|
# option status [Karafka::Status] app status
|
182
|
-
setting :status, Status.new
|
185
|
+
setting :status, default: Status.new
|
183
186
|
# option process [Karafka::Process] process status
|
184
187
|
# @note In the future, we need to have a single process representation for all the karafka
|
185
188
|
# instances
|
186
|
-
setting :process, Process.new
|
189
|
+
setting :process, default: Process.new
|
187
190
|
# option fetcher [Karafka::Fetcher] fetcher instance
|
188
|
-
setting :fetcher, Fetcher.new
|
191
|
+
setting :fetcher, default: Fetcher.new
|
189
192
|
# option configurators [Array<Object>] all configurators that we want to run after
|
190
193
|
# the setup
|
191
|
-
setting :configurators, [Configurators::WaterDrop.new]
|
194
|
+
setting :configurators, default: [Configurators::WaterDrop.new]
|
192
195
|
end
|
193
196
|
|
194
197
|
class << self
|
data/lib/karafka/version.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.4.
|
4
|
+
version: 1.4.9
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -13,30 +13,30 @@ cert_chain:
|
|
13
13
|
- |
|
14
14
|
-----BEGIN CERTIFICATE-----
|
15
15
|
MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
16
|
+
ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjEwODExMTQxNTEzWhcNMjIwODExMTQx
|
17
|
+
NTEzWjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
|
18
|
+
CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDV2jKH4Ti87GM6nyT6D+ESzTI0MZDj
|
19
|
+
ak2/TEwnxvijMJyCCPKT/qIkbW4/f0VHM4rhPr1nW73sb5SZBVFCLlJcOSKOBdUY
|
20
|
+
TMY+SIXN2EtUaZuhAOe8LxtxjHTgRHvHcqUQMBENXTISNzCo32LnUxweu66ia4Pd
|
21
|
+
1mNRhzOqNv9YiBZvtBf7IMQ+sYdOCjboq2dlsWmJiwiDpY9lQBTnWORnT3mQxU5x
|
22
|
+
vPSwnLB854cHdCS8fQo4DjeJBRZHhEbcE5sqhEMB3RZA3EtFVEXOxlNxVTS3tncI
|
23
|
+
qyNXiWDaxcipaens4ObSY1C2HTV7OWb7OMqSCIybeYTSfkaSdqmcl4S6zxXkjH1J
|
24
|
+
tnjayAVzD+QVXGijsPLE2PFnJAh9iDET2cMsjabO1f6l1OQNyAtqpcyQcgfnyW0z
|
25
|
+
g7tGxTYD+6wJHffM9d9txOUw6djkF6bDxyqB8lo4Z3IObCx18AZjI9XPS9QG7w6q
|
26
|
+
LCWuMG2lkCcRgASqaVk9fEf9yMc2xxz5o3kCAwEAAaN3MHUwCQYDVR0TBAIwADAL
|
27
|
+
BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFBqUFCKCOe5IuueUVqOB991jyCLLMB0GA1Ud
|
28
28
|
EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
29
|
+
c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBADD0/UuTTFgW+CGk2U0RDw2RBOca
|
30
|
+
W2LTF/G7AOzuzD0Tc4voc7WXyrgKwJREv8rgBimLnNlgmFJLmtUCh2U/MgxvcilH
|
31
|
+
yshYcbseNvjkrtYnLRlWZR4SSB6Zei5AlyGVQLPkvdsBpNegcG6w075YEwzX/38a
|
32
|
+
8V9B/Yri2OGELBz8ykl7BsXUgNoUPA/4pHF6YRLz+VirOaUIQ4JfY7xGj6fSOWWz
|
33
|
+
/rQ/d77r6o1mfJYM/3BRVg73a3b7DmRnE5qjwmSaSQ7u802pJnLesmArch0xGCT/
|
34
|
+
fMmRli1Qb+6qOTl9mzD6UDMAyFR4t6MStLm0mIEqM0nBO5nUdUWbC7l9qXEf8XBE
|
35
|
+
2DP28p3EqSuS+lKbAWKcqv7t0iRhhmaod+Yn9mcrLN1sa3q3KSQ9BCyxezCD4Mk2
|
36
|
+
R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
|
37
|
+
pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
|
38
38
|
-----END CERTIFICATE-----
|
39
|
-
date: 2021-
|
39
|
+
date: 2021-09-29 00:00:00.000000000 Z
|
40
40
|
dependencies:
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: dry-configurable
|
@@ -128,14 +128,14 @@ dependencies:
|
|
128
128
|
requirements:
|
129
129
|
- - ">="
|
130
130
|
- !ruby/object:Gem::Version
|
131
|
-
version: 1.
|
131
|
+
version: 1.3.0
|
132
132
|
type: :runtime
|
133
133
|
prerelease: false
|
134
134
|
version_requirements: !ruby/object:Gem::Requirement
|
135
135
|
requirements:
|
136
136
|
- - ">="
|
137
137
|
- !ruby/object:Gem::Version
|
138
|
-
version: 1.
|
138
|
+
version: 1.3.0
|
139
139
|
- !ruby/object:Gem::Dependency
|
140
140
|
name: thor
|
141
141
|
requirement: !ruby/object:Gem::Requirement
|
@@ -191,7 +191,6 @@ files:
|
|
191
191
|
- ".coditsu/ci.yml"
|
192
192
|
- ".console_irbrc"
|
193
193
|
- ".diffend.yml"
|
194
|
-
- ".github/FUNDING.yml"
|
195
194
|
- ".github/ISSUE_TEMPLATE/bug_report.md"
|
196
195
|
- ".github/ISSUE_TEMPLATE/feature_request.md"
|
197
196
|
- ".github/workflows/ci.yml"
|
@@ -306,7 +305,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
306
305
|
- !ruby/object:Gem::Version
|
307
306
|
version: '0'
|
308
307
|
requirements: []
|
309
|
-
rubygems_version: 3.2.
|
308
|
+
rubygems_version: 3.2.28
|
310
309
|
signing_key:
|
311
310
|
specification_version: 4
|
312
311
|
summary: Ruby based framework for working with Apache Kafka
|
metadata.gz.sig
CHANGED
Binary file
|
data/.github/FUNDING.yml
DELETED