karafka 1.4.7 → 1.4.11
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/CHANGELOG.md +14 -1
- data/Gemfile.lock +30 -34
- data/README.md +23 -24
- data/karafka.gemspec +10 -9
- data/lib/karafka/cli/install.rb +3 -2
- data/lib/karafka/setup/config.rb +55 -55
- data/lib/karafka/version.rb +1 -1
- data.tar.gz.sig +0 -0
- metadata +22 -22
- metadata.gz.sig +0 -0
- data/.github/FUNDING.yml +0 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: efef4e0f8b4509636c994eed65dc9ad88723ed3b7890126771673d9c9c0e5b21
|
4
|
+
data.tar.gz: 803222eaf3fd015c53207ca9a33505e577e93d62f26047bf5d6136b9b2a94fb2
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 271dda36edd258e50a8ea34c0e9848610e28af284e0b90efe1567cfe6421db75970a9717373d4d5ceb17ba39e4f5b2db6bc80b9927a7184353d1ee4ef0738ad1
|
7
|
+
data.tar.gz: 2e212be7913dc15f07cc5933bc3893c8de0205cee40a95dbd7e55cb4f6377ce3a7283f602e114fea57ba8c3e0fb416aafd96af24c5fcd0ee12549d298eadd983
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,18 @@
|
|
1
1
|
# Karafka framework changelog
|
2
2
|
|
3
|
+
## 1.4.11 (2021-12-04)
|
4
|
+
- Source code metadata url added to the gemspec
|
5
|
+
- Gem bump
|
6
|
+
|
7
|
+
## 1.4.10 (2021-10-30)
|
8
|
+
- update gems requirements in the gemspec (nijikon)
|
9
|
+
|
10
|
+
## 1.4.9 (2021-09-29)
|
11
|
+
- fix `dry-configurable` deprecation warnings for default value as positional argument
|
12
|
+
|
13
|
+
## 1.4.8 (2021-09-08)
|
14
|
+
- Allow 'rails' in Gemfile to enable rails-aware generator (rewritten)
|
15
|
+
|
3
16
|
## 1.4.7 (2021-09-04)
|
4
17
|
- Update ruby-kafka to `1.4.0`
|
5
18
|
- Support for `resolve_seed_brokers` option (with Azdaroth)
|
@@ -28,7 +41,7 @@
|
|
28
41
|
|
29
42
|
## 1.4.0 (2020-09-05)
|
30
43
|
- Rename `Karafka::Params::Metadata` to `Karafka::Params::BatchMetadata`
|
31
|
-
|
44
|
+
- Rename consumer `#metadata` to `#batch_metadata`
|
32
45
|
- Separate metadata (including Karafka native metadata) from the root of params (backwards compatibility preserved thanks to rabotyaga)
|
33
46
|
- Remove metadata hash dependency
|
34
47
|
- Remove params dependency on a hash in favour of PORO
|
data/Gemfile.lock
CHANGED
@@ -1,17 +1,17 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
karafka (1.4.
|
5
|
-
dry-configurable (~> 0.
|
6
|
-
dry-inflector (~> 0.
|
7
|
-
dry-monitor (~> 0.
|
8
|
-
dry-validation (~> 1.
|
4
|
+
karafka (1.4.11)
|
5
|
+
dry-configurable (~> 0.13)
|
6
|
+
dry-inflector (~> 0.2)
|
7
|
+
dry-monitor (~> 0.5)
|
8
|
+
dry-validation (~> 1.7)
|
9
9
|
envlogic (~> 1.1)
|
10
|
-
irb (~> 1.
|
10
|
+
irb (~> 1.3)
|
11
11
|
ruby-kafka (>= 1.3.0)
|
12
|
-
thor (>=
|
13
|
-
waterdrop (~> 1.4
|
14
|
-
zeitwerk (~> 2.
|
12
|
+
thor (>= 1.1)
|
13
|
+
waterdrop (~> 1.4)
|
14
|
+
zeitwerk (~> 2.4)
|
15
15
|
|
16
16
|
GEM
|
17
17
|
remote: https://rubygems.org/
|
@@ -31,15 +31,14 @@ GEM
|
|
31
31
|
digest-crc (0.6.4)
|
32
32
|
rake (>= 12.0.0, < 14.0.0)
|
33
33
|
docile (1.4.0)
|
34
|
-
dry-configurable (0.
|
34
|
+
dry-configurable (0.13.0)
|
35
35
|
concurrent-ruby (~> 1.0)
|
36
|
-
dry-core (~> 0.
|
37
|
-
dry-container (0.
|
36
|
+
dry-core (~> 0.6)
|
37
|
+
dry-container (0.9.0)
|
38
38
|
concurrent-ruby (~> 1.0)
|
39
|
-
dry-configurable (~> 0.
|
39
|
+
dry-configurable (~> 0.13, >= 0.13.0)
|
40
40
|
dry-core (0.7.1)
|
41
41
|
concurrent-ruby (~> 1.0)
|
42
|
-
dry-equalizer (0.3.0)
|
43
42
|
dry-events (0.3.0)
|
44
43
|
concurrent-ruby (~> 1.0)
|
45
44
|
dry-core (~> 0.5, >= 0.5)
|
@@ -48,13 +47,13 @@ GEM
|
|
48
47
|
dry-logic (1.2.0)
|
49
48
|
concurrent-ruby (~> 1.0)
|
50
49
|
dry-core (~> 0.5, >= 0.5)
|
51
|
-
dry-monitor (0.
|
52
|
-
dry-configurable (~> 0.
|
50
|
+
dry-monitor (0.5.0)
|
51
|
+
dry-configurable (~> 0.13, >= 0.13.0)
|
53
52
|
dry-core (~> 0.5, >= 0.5)
|
54
53
|
dry-events (~> 0.2)
|
55
|
-
dry-schema (1.
|
54
|
+
dry-schema (1.8.0)
|
56
55
|
concurrent-ruby (~> 1.0)
|
57
|
-
dry-configurable (~> 0.
|
56
|
+
dry-configurable (~> 0.13, >= 0.13.0)
|
58
57
|
dry-core (~> 0.5, >= 0.5)
|
59
58
|
dry-initializer (~> 3.0)
|
60
59
|
dry-logic (~> 1.0)
|
@@ -65,18 +64,17 @@ GEM
|
|
65
64
|
dry-core (~> 0.5, >= 0.5)
|
66
65
|
dry-inflector (~> 0.1, >= 0.1.2)
|
67
66
|
dry-logic (~> 1.0, >= 1.0.2)
|
68
|
-
dry-validation (1.
|
67
|
+
dry-validation (1.7.0)
|
69
68
|
concurrent-ruby (~> 1.0)
|
70
69
|
dry-container (~> 0.7, >= 0.7.1)
|
71
|
-
dry-core (~> 0.
|
72
|
-
dry-equalizer (~> 0.2)
|
70
|
+
dry-core (~> 0.5, >= 0.5)
|
73
71
|
dry-initializer (~> 3.0)
|
74
|
-
dry-schema (~> 1.
|
72
|
+
dry-schema (~> 1.8, >= 1.8.0)
|
75
73
|
envlogic (1.1.3)
|
76
74
|
dry-inflector (~> 0.1)
|
77
75
|
factory_bot (6.2.0)
|
78
76
|
activesupport (>= 5.0.0)
|
79
|
-
i18n (1.8.
|
77
|
+
i18n (1.8.11)
|
80
78
|
concurrent-ruby (~> 1.0)
|
81
79
|
io-console (0.5.9)
|
82
80
|
irb (1.3.7)
|
@@ -98,7 +96,7 @@ GEM
|
|
98
96
|
rspec-mocks (3.10.2)
|
99
97
|
diff-lcs (>= 1.2.0, < 2.0)
|
100
98
|
rspec-support (~> 3.10.0)
|
101
|
-
rspec-support (3.10.
|
99
|
+
rspec-support (3.10.3)
|
102
100
|
ruby-kafka (1.4.0)
|
103
101
|
digest-crc
|
104
102
|
simplecov (0.21.2)
|
@@ -110,19 +108,17 @@ GEM
|
|
110
108
|
thor (1.1.0)
|
111
109
|
tzinfo (2.0.4)
|
112
110
|
concurrent-ruby (~> 1.0)
|
113
|
-
waterdrop (1.4.
|
111
|
+
waterdrop (1.4.4)
|
114
112
|
delivery_boy (>= 0.2, < 2.x)
|
115
|
-
dry-configurable (~> 0.
|
116
|
-
dry-monitor (~> 0.
|
117
|
-
dry-validation (~> 1.
|
118
|
-
ruby-kafka (>=
|
119
|
-
zeitwerk (~> 2.
|
120
|
-
zeitwerk (2.
|
113
|
+
dry-configurable (~> 0.13)
|
114
|
+
dry-monitor (~> 0.5)
|
115
|
+
dry-validation (~> 1.7)
|
116
|
+
ruby-kafka (>= 1.3.0)
|
117
|
+
zeitwerk (~> 2.4)
|
118
|
+
zeitwerk (2.5.1)
|
121
119
|
|
122
120
|
PLATFORMS
|
123
|
-
ruby
|
124
121
|
x86_64-darwin
|
125
|
-
x86_64-darwin-19
|
126
122
|
x86_64-linux
|
127
123
|
|
128
124
|
DEPENDENCIES
|
@@ -133,4 +129,4 @@ DEPENDENCIES
|
|
133
129
|
simplecov
|
134
130
|
|
135
131
|
BUNDLED WITH
|
136
|
-
2.2.
|
132
|
+
2.2.31
|
data/README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
[![Build Status](https://github.com/karafka/karafka/actions/workflows/ci.yml/badge.svg)](https://github.com/karafka/karafka/actions/workflows/ci.yml)
|
4
4
|
[![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
|
5
|
-
[![Join the chat at https://
|
5
|
+
[![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
|
6
6
|
|
7
7
|
**Note**: We're finishing the new Karafka `2.0` but for now, please use `1.4`. All the documentation presented here refers to `1.4`
|
8
8
|
|
@@ -10,6 +10,24 @@
|
|
10
10
|
|
11
11
|
Framework used to simplify Apache Kafka based Ruby applications development.
|
12
12
|
|
13
|
+
```ruby
|
14
|
+
# Define what topics you want to consume with which consumers
|
15
|
+
Karafka::App.consumer_groups.draw do
|
16
|
+
topic 'system_events' do
|
17
|
+
consumer EventsConsumer
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
# And create your consumers, within which your messages will be processed
|
22
|
+
class EventsConsumer < ApplicationConsumer
|
23
|
+
# Example that utilizes ActiveRecord#insert_all and Karafka batch processing
|
24
|
+
def consume
|
25
|
+
# Store all of the incoming Kafka events locally in an efficient way
|
26
|
+
Event.insert_all params_batch.payloads
|
27
|
+
end
|
28
|
+
end
|
29
|
+
```
|
30
|
+
|
13
31
|
Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming and processing this data, without having to focus on things that are not your business domain.
|
14
32
|
|
15
33
|
Karafka not only handles incoming messages but also provides tools for building complex data-flow applications that receive and send messages.
|
@@ -35,7 +53,7 @@ Karafka based applications can be easily deployed to any type of infrastructure,
|
|
35
53
|
|
36
54
|
## Support
|
37
55
|
|
38
|
-
Karafka has
|
56
|
+
Karafka has [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup, and deployment along with other useful details on how to run Karafka.
|
39
57
|
|
40
58
|
If you have any questions about using Karafka, feel free to join our [Gitter](https://gitter.im/karafka/karafka) chat channel.
|
41
59
|
|
@@ -65,10 +83,6 @@ and follow the instructions from the [example app Wiki](https://github.com/karaf
|
|
65
83
|
|
66
84
|
If you need more details and know how on how to start Karafka with a clean installation, read the [Getting started page](https://github.com/karafka/karafka/wiki/Getting-started) section of our Wiki.
|
67
85
|
|
68
|
-
## Notice
|
69
|
-
|
70
|
-
Karafka framework and Karafka team are __not__ related to Kafka streaming service called CloudKarafka in any matter. We don't recommend nor discourage usage of their platform.
|
71
|
-
|
72
86
|
## References
|
73
87
|
|
74
88
|
* [Karafka framework](https://github.com/karafka/karafka)
|
@@ -77,23 +91,8 @@ Karafka framework and Karafka team are __not__ related to Kafka streaming servic
|
|
77
91
|
|
78
92
|
## Note on contributions
|
79
93
|
|
80
|
-
First, thank you for considering contributing to Karafka! It's people like you that make the open source community such a great community!
|
81
|
-
|
82
|
-
Each pull request must pass all the RSpec specs and meet our quality requirements.
|
83
|
-
|
84
|
-
To check if everything is as it should be, we use [Coditsu](https://coditsu.io) that combines multiple linters and code analyzers for both code and documentation. Once you're done with your changes, submit a pull request.
|
85
|
-
|
86
|
-
Coditsu will automatically check your work against our quality standards. You can find your commit check results on the [builds page](https://app.coditsu.io/karafka/commit_builds) of Karafka organization.
|
87
|
-
|
88
|
-
[![coditsu](https://coditsu.io/assets/quality_bar.svg)](https://app.coditsu.io/karafka/commit_builds)
|
89
|
-
|
90
|
-
## Contributors
|
91
|
-
|
92
|
-
This project exists thanks to all the people who contribute.
|
93
|
-
<a href="https://github.com/karafka/karafka/graphs/contributors"><img src="https://opencollective.com/karafka/contributors.svg?width=890" /></a>
|
94
|
-
|
95
|
-
## Sponsors
|
94
|
+
First, thank you for considering contributing to the Karafka ecosystem! It's people like you that make the open source community such a great community!
|
96
95
|
|
97
|
-
|
96
|
+
Each pull request must pass all the RSpec specs, integration tests and meet our quality requirements.
|
98
97
|
|
99
|
-
|
98
|
+
Fork it, update and wait for the Github Actions results.
|
data/karafka.gemspec
CHANGED
@@ -12,21 +12,21 @@ Gem::Specification.new do |spec|
|
|
12
12
|
spec.platform = Gem::Platform::RUBY
|
13
13
|
spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
|
14
14
|
spec.email = %w[maciej@mensfeld.pl pavlo.vavruk@gmail.com adam99g@gmail.com]
|
15
|
-
spec.homepage = 'https://
|
15
|
+
spec.homepage = 'https://karafka.io'
|
16
16
|
spec.summary = 'Ruby based framework for working with Apache Kafka'
|
17
17
|
spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
|
18
18
|
spec.license = 'MIT'
|
19
19
|
|
20
|
-
spec.add_dependency 'dry-configurable', '~> 0.
|
21
|
-
spec.add_dependency 'dry-inflector', '~> 0.
|
22
|
-
spec.add_dependency 'dry-monitor', '~> 0.
|
23
|
-
spec.add_dependency 'dry-validation', '~> 1.
|
20
|
+
spec.add_dependency 'dry-configurable', '~> 0.13'
|
21
|
+
spec.add_dependency 'dry-inflector', '~> 0.2'
|
22
|
+
spec.add_dependency 'dry-monitor', '~> 0.5'
|
23
|
+
spec.add_dependency 'dry-validation', '~> 1.7'
|
24
24
|
spec.add_dependency 'envlogic', '~> 1.1'
|
25
|
-
spec.add_dependency 'irb', '~> 1.
|
25
|
+
spec.add_dependency 'irb', '~> 1.3'
|
26
26
|
spec.add_dependency 'ruby-kafka', '>= 1.3.0'
|
27
|
-
spec.add_dependency 'thor', '>=
|
28
|
-
spec.add_dependency 'waterdrop', '~> 1.4
|
29
|
-
spec.add_dependency 'zeitwerk', '~> 2.
|
27
|
+
spec.add_dependency 'thor', '>= 1.1'
|
28
|
+
spec.add_dependency 'waterdrop', '~> 1.4'
|
29
|
+
spec.add_dependency 'zeitwerk', '~> 2.4'
|
30
30
|
|
31
31
|
spec.required_ruby_version = '>= 2.6.0'
|
32
32
|
|
@@ -38,5 +38,6 @@ Gem::Specification.new do |spec|
|
|
38
38
|
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
|
39
39
|
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
40
40
|
spec.require_paths = %w[lib]
|
41
|
+
spec.metadata = { 'source_code_uri' => 'https://github.com/karafka/karafka' }
|
41
42
|
end
|
42
43
|
# rubocop:enable Metrics/BlockLength
|
data/lib/karafka/cli/install.rb
CHANGED
@@ -30,11 +30,12 @@ module Karafka
|
|
30
30
|
# @param args [Array] all the things that Thor CLI accepts
|
31
31
|
def initialize(*args)
|
32
32
|
super
|
33
|
-
|
33
|
+
dependencies = Bundler::LockfileParser.new(
|
34
34
|
Bundler.read_file(
|
35
35
|
Bundler.default_lockfile
|
36
36
|
)
|
37
|
-
).dependencies
|
37
|
+
).dependencies
|
38
|
+
@rails = dependencies.key?('railties') || dependencies.key?('rails')
|
38
39
|
end
|
39
40
|
|
40
41
|
# Install all required things for Karafka application in current directory
|
data/lib/karafka/setup/config.rb
CHANGED
@@ -24,89 +24,89 @@ module Karafka
|
|
24
24
|
# default Kafka groups namespaces and identify that app in kafka
|
25
25
|
setting :client_id
|
26
26
|
# What backend do we want to use to process messages
|
27
|
-
setting :backend, :inline
|
27
|
+
setting :backend, default: :inline
|
28
28
|
# option logger [Instance] logger that we want to use
|
29
|
-
setting :logger, ::Karafka::Instrumentation::Logger.new
|
29
|
+
setting :logger, default: ::Karafka::Instrumentation::Logger.new
|
30
30
|
# option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
|
31
|
-
setting :monitor, ::Karafka::Instrumentation::Monitor.new
|
31
|
+
setting :monitor, default: ::Karafka::Instrumentation::Monitor.new
|
32
32
|
# Mapper used to remap consumer groups ids, so in case users migrate from other tools
|
33
33
|
# or they need to maintain their own internal consumer group naming conventions, they
|
34
34
|
# can easily do it, replacing the default client_id + consumer name pattern concept
|
35
|
-
setting :consumer_mapper, Routing::ConsumerMapper.new
|
35
|
+
setting :consumer_mapper, default: Routing::ConsumerMapper.new
|
36
36
|
# Mapper used to remap names of topics, so we can have a clean internal topic naming
|
37
37
|
# despite using any Kafka provider that uses namespacing, etc
|
38
38
|
# It needs to implement two methods:
|
39
39
|
# - #incoming - for remapping from the incoming message to our internal format
|
40
40
|
# - #outgoing - for remapping from internal topic name into outgoing message
|
41
|
-
setting :topic_mapper, Routing::TopicMapper.new
|
41
|
+
setting :topic_mapper, default: Routing::TopicMapper.new
|
42
42
|
# Default serializer for converting whatever we want to send to kafka to json
|
43
|
-
setting :serializer, Karafka::Serialization::Json::Serializer.new
|
43
|
+
setting :serializer, default: Karafka::Serialization::Json::Serializer.new
|
44
44
|
# Default deserializer for converting incoming data into ruby objects
|
45
|
-
setting :deserializer, Karafka::Serialization::Json::Deserializer.new
|
45
|
+
setting :deserializer, default: Karafka::Serialization::Json::Deserializer.new
|
46
46
|
# If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
|
47
47
|
# @note Fetching does not equal consuming, see batch_consuming description for details
|
48
|
-
setting :batch_fetching, true
|
48
|
+
setting :batch_fetching, default: true
|
49
49
|
# If batch_consuming is true, we will have access to #params_batch instead of #params.
|
50
50
|
# #params_batch will contain params received from Kafka (may be more than 1) so we can
|
51
51
|
# process them in batches
|
52
|
-
setting :batch_consuming, false
|
52
|
+
setting :batch_consuming, default: false
|
53
53
|
# option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
|
54
54
|
# longer wait for the consumers to stop gracefully but instead we force terminate
|
55
55
|
# everything.
|
56
|
-
setting :shutdown_timeout, 60
|
56
|
+
setting :shutdown_timeout, default: 60
|
57
57
|
|
58
58
|
# option kafka [Hash] - optional - kafka configuration options
|
59
59
|
setting :kafka do
|
60
60
|
# Array with at least one host
|
61
|
-
setting :seed_brokers, %w[kafka://127.0.0.1:9092]
|
61
|
+
setting :seed_brokers, default: %w[kafka://127.0.0.1:9092]
|
62
62
|
# option session_timeout [Integer] the number of seconds after which, if a client
|
63
63
|
# hasn't contacted the Kafka cluster, it will be kicked out of the group.
|
64
|
-
setting :session_timeout, 30
|
64
|
+
setting :session_timeout, default: 30
|
65
65
|
# Time that a given partition will be paused from fetching messages, when message
|
66
66
|
# consumption fails. It allows us to process other partitions, while the error is being
|
67
67
|
# resolved and also "slows" things down, so it prevents from "eating" up all messages and
|
68
68
|
# consuming them with failed code. Use `nil` if you want to pause forever and never retry.
|
69
|
-
setting :pause_timeout, 10
|
69
|
+
setting :pause_timeout, default: 10
|
70
70
|
# option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
|
71
71
|
# or `nil` if no maximum should be enforced.
|
72
|
-
setting :pause_max_timeout, nil
|
72
|
+
setting :pause_max_timeout, default: nil
|
73
73
|
# option pause_exponential_backoff [Boolean] whether to enable exponential backoff
|
74
|
-
setting :pause_exponential_backoff, false
|
74
|
+
setting :pause_exponential_backoff, default: false
|
75
75
|
# option offset_commit_interval [Integer] the interval between offset commits,
|
76
76
|
# in seconds.
|
77
|
-
setting :offset_commit_interval, 10
|
77
|
+
setting :offset_commit_interval, default: 10
|
78
78
|
# option offset_commit_threshold [Integer] the number of messages that can be
|
79
79
|
# processed before their offsets are committed. If zero, offset commits are
|
80
80
|
# not triggered by message consumption.
|
81
|
-
setting :offset_commit_threshold, 0
|
81
|
+
setting :offset_commit_threshold, default: 0
|
82
82
|
# option heartbeat_interval [Integer] the interval between heartbeats; must be less
|
83
83
|
# than the session window.
|
84
|
-
setting :heartbeat_interval, 10
|
84
|
+
setting :heartbeat_interval, default: 10
|
85
85
|
# option offset_retention_time [Integer] The length of the retention window, known as
|
86
86
|
# offset retention time
|
87
|
-
setting :offset_retention_time, nil
|
87
|
+
setting :offset_retention_time, default: nil
|
88
88
|
# option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
|
89
89
|
# are stored for further processing. Note, that each item in the queue represents a
|
90
90
|
# response from a single broker
|
91
|
-
setting :fetcher_max_queue_size, 10
|
91
|
+
setting :fetcher_max_queue_size, default: 10
|
92
92
|
# option assignment_strategy [Object] a strategy determining the assignment of
|
93
93
|
# partitions to the consumers.
|
94
|
-
setting :assignment_strategy, Karafka::AssignmentStrategies::RoundRobin.new
|
94
|
+
setting :assignment_strategy, default: Karafka::AssignmentStrategies::RoundRobin.new
|
95
95
|
# option max_bytes_per_partition [Integer] the maximum amount of data fetched
|
96
96
|
# from a single partition at a time.
|
97
|
-
setting :max_bytes_per_partition, 1_048_576
|
97
|
+
setting :max_bytes_per_partition, default: 1_048_576
|
98
98
|
# whether to consume messages starting at the beginning or to just consume new messages
|
99
|
-
setting :start_from_beginning, true
|
99
|
+
setting :start_from_beginning, default: true
|
100
100
|
# option resolve_seed_brokers [Boolean] whether to resolve each hostname of the seed
|
101
101
|
# brokers
|
102
|
-
setting :resolve_seed_brokers, false
|
102
|
+
setting :resolve_seed_brokers, default: false
|
103
103
|
# option min_bytes [Integer] the minimum number of bytes to read before
|
104
104
|
# returning messages from the server; if `max_wait_time` is reached, this
|
105
105
|
# is ignored.
|
106
|
-
setting :min_bytes, 1
|
106
|
+
setting :min_bytes, default: 1
|
107
107
|
# option max_bytes [Integer] the maximum number of bytes to read before returning messages
|
108
108
|
# from each broker.
|
109
|
-
setting :max_bytes, 10_485_760
|
109
|
+
setting :max_bytes, default: 10_485_760
|
110
110
|
# option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
|
111
111
|
# wait before returning data from a single message fetch. By setting this high you also
|
112
112
|
# increase the fetching throughput - and by setting it low you set a bound on latency.
|
@@ -114,65 +114,65 @@ module Karafka
|
|
114
114
|
# time specified. The default value is one second. If you want to have at most five
|
115
115
|
# seconds of latency, set `max_wait_time` to 5. You should make sure
|
116
116
|
# max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
|
117
|
-
setting :max_wait_time, 1
|
117
|
+
setting :max_wait_time, default: 1
|
118
118
|
# option automatically_mark_as_consumed [Boolean] should we automatically mark received
|
119
119
|
# messages as consumed (processed) after non-error consumption
|
120
|
-
setting :automatically_mark_as_consumed, true
|
120
|
+
setting :automatically_mark_as_consumed, default: true
|
121
121
|
# option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
|
122
122
|
# Kafka cluster that went down (in seconds)
|
123
|
-
setting :reconnect_timeout, 5
|
123
|
+
setting :reconnect_timeout, default: 5
|
124
124
|
# option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
|
125
125
|
# a broker for the first time. When ruby-kafka initializes, it needs to connect to at
|
126
126
|
# least one host.
|
127
|
-
setting :connect_timeout, 10
|
127
|
+
setting :connect_timeout, default: 10
|
128
128
|
# option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
|
129
129
|
# writing to a socket connection to a broker. After this timeout expires the connection
|
130
130
|
# will be killed. Note that some Kafka operations are by definition long-running, such as
|
131
131
|
# waiting for new messages to arrive in a partition, so don't set this value too low
|
132
|
-
setting :socket_timeout, 30
|
132
|
+
setting :socket_timeout, default: 30
|
133
133
|
# option partitioner [Object, nil] the partitioner that should be used by the client
|
134
|
-
setting :partitioner, nil
|
134
|
+
setting :partitioner, default: nil
|
135
135
|
|
136
136
|
# SSL authentication related settings
|
137
137
|
# option ca_cert [String, nil] SSL CA certificate
|
138
|
-
setting :ssl_ca_cert, nil
|
138
|
+
setting :ssl_ca_cert, default: nil
|
139
139
|
# option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
|
140
|
-
setting :ssl_ca_cert_file_path, nil
|
140
|
+
setting :ssl_ca_cert_file_path, default: nil
|
141
141
|
# option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
|
142
142
|
# certificate store
|
143
|
-
setting :ssl_ca_certs_from_system, false
|
143
|
+
setting :ssl_ca_certs_from_system, default: false
|
144
144
|
# option ssl_verify_hostname [Boolean] Verify the hostname for client certs
|
145
|
-
setting :ssl_verify_hostname, true
|
145
|
+
setting :ssl_verify_hostname, default: true
|
146
146
|
# option ssl_client_cert [String, nil] SSL client certificate
|
147
|
-
setting :ssl_client_cert, nil
|
147
|
+
setting :ssl_client_cert, default: nil
|
148
148
|
# option ssl_client_cert_key [String, nil] SSL client certificate password
|
149
|
-
setting :ssl_client_cert_key, nil
|
149
|
+
setting :ssl_client_cert_key, default: nil
|
150
150
|
# option sasl_gssapi_principal [String, nil] sasl principal
|
151
|
-
setting :sasl_gssapi_principal, nil
|
151
|
+
setting :sasl_gssapi_principal, default: nil
|
152
152
|
# option sasl_gssapi_keytab [String, nil] sasl keytab
|
153
|
-
setting :sasl_gssapi_keytab, nil
|
153
|
+
setting :sasl_gssapi_keytab, default: nil
|
154
154
|
# option sasl_plain_authzid [String] The authorization identity to use
|
155
|
-
setting :sasl_plain_authzid, ''
|
155
|
+
setting :sasl_plain_authzid, default: ''
|
156
156
|
# option sasl_plain_username [String, nil] The username used to authenticate
|
157
|
-
setting :sasl_plain_username, nil
|
157
|
+
setting :sasl_plain_username, default: nil
|
158
158
|
# option sasl_plain_password [String, nil] The password used to authenticate
|
159
|
-
setting :sasl_plain_password, nil
|
159
|
+
setting :sasl_plain_password, default: nil
|
160
160
|
# option sasl_scram_username [String, nil] The username used to authenticate
|
161
|
-
setting :sasl_scram_username, nil
|
161
|
+
setting :sasl_scram_username, default: nil
|
162
162
|
# option sasl_scram_password [String, nil] The password used to authenticate
|
163
|
-
setting :sasl_scram_password, nil
|
163
|
+
setting :sasl_scram_password, default: nil
|
164
164
|
# option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
|
165
|
-
setting :sasl_scram_mechanism, nil
|
165
|
+
setting :sasl_scram_mechanism, default: nil
|
166
166
|
# option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
|
167
|
-
setting :sasl_over_ssl, true
|
167
|
+
setting :sasl_over_ssl, default: true
|
168
168
|
# option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
|
169
|
-
setting :ssl_client_cert_chain, nil
|
169
|
+
setting :ssl_client_cert_chain, default: nil
|
170
170
|
# option ssl_client_cert_key_password [String, nil] the password required to read
|
171
171
|
# the ssl_client_cert_key
|
172
|
-
setting :ssl_client_cert_key_password, nil
|
172
|
+
setting :ssl_client_cert_key_password, default: nil
|
173
173
|
# @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
|
174
174
|
# implements method token.
|
175
|
-
setting :sasl_oauth_token_provider, nil
|
175
|
+
setting :sasl_oauth_token_provider, default: nil
|
176
176
|
end
|
177
177
|
|
178
178
|
# Namespace for internal settings that should not be modified
|
@@ -180,18 +180,18 @@ module Karafka
|
|
180
180
|
# non global state
|
181
181
|
setting :internal do
|
182
182
|
# option routing_builder [Karafka::Routing::Builder] builder instance
|
183
|
-
setting :routing_builder, Routing::Builder.new
|
183
|
+
setting :routing_builder, default: Routing::Builder.new
|
184
184
|
# option status [Karafka::Status] app status
|
185
|
-
setting :status, Status.new
|
185
|
+
setting :status, default: Status.new
|
186
186
|
# option process [Karafka::Process] process status
|
187
187
|
# @note In the future, we need to have a single process representation for all the karafka
|
188
188
|
# instances
|
189
|
-
setting :process, Process.new
|
189
|
+
setting :process, default: Process.new
|
190
190
|
# option fetcher [Karafka::Fetcher] fetcher instance
|
191
|
-
setting :fetcher, Fetcher.new
|
191
|
+
setting :fetcher, default: Fetcher.new
|
192
192
|
# option configurators [Array<Object>] all configurators that we want to run after
|
193
193
|
# the setup
|
194
|
-
setting :configurators, [Configurators::WaterDrop.new]
|
194
|
+
setting :configurators, default: [Configurators::WaterDrop.new]
|
195
195
|
end
|
196
196
|
|
197
197
|
class << self
|
data/lib/karafka/version.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.4.
|
4
|
+
version: 1.4.11
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -36,7 +36,7 @@ cert_chain:
|
|
36
36
|
R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
|
37
37
|
pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
|
38
38
|
-----END CERTIFICATE-----
|
39
|
-
date: 2021-
|
39
|
+
date: 2021-12-04 00:00:00.000000000 Z
|
40
40
|
dependencies:
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: dry-configurable
|
@@ -44,56 +44,56 @@ dependencies:
|
|
44
44
|
requirements:
|
45
45
|
- - "~>"
|
46
46
|
- !ruby/object:Gem::Version
|
47
|
-
version: '0.
|
47
|
+
version: '0.13'
|
48
48
|
type: :runtime
|
49
49
|
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
52
52
|
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
|
-
version: '0.
|
54
|
+
version: '0.13'
|
55
55
|
- !ruby/object:Gem::Dependency
|
56
56
|
name: dry-inflector
|
57
57
|
requirement: !ruby/object:Gem::Requirement
|
58
58
|
requirements:
|
59
59
|
- - "~>"
|
60
60
|
- !ruby/object:Gem::Version
|
61
|
-
version: '0.
|
61
|
+
version: '0.2'
|
62
62
|
type: :runtime
|
63
63
|
prerelease: false
|
64
64
|
version_requirements: !ruby/object:Gem::Requirement
|
65
65
|
requirements:
|
66
66
|
- - "~>"
|
67
67
|
- !ruby/object:Gem::Version
|
68
|
-
version: '0.
|
68
|
+
version: '0.2'
|
69
69
|
- !ruby/object:Gem::Dependency
|
70
70
|
name: dry-monitor
|
71
71
|
requirement: !ruby/object:Gem::Requirement
|
72
72
|
requirements:
|
73
73
|
- - "~>"
|
74
74
|
- !ruby/object:Gem::Version
|
75
|
-
version: '0.
|
75
|
+
version: '0.5'
|
76
76
|
type: :runtime
|
77
77
|
prerelease: false
|
78
78
|
version_requirements: !ruby/object:Gem::Requirement
|
79
79
|
requirements:
|
80
80
|
- - "~>"
|
81
81
|
- !ruby/object:Gem::Version
|
82
|
-
version: '0.
|
82
|
+
version: '0.5'
|
83
83
|
- !ruby/object:Gem::Dependency
|
84
84
|
name: dry-validation
|
85
85
|
requirement: !ruby/object:Gem::Requirement
|
86
86
|
requirements:
|
87
87
|
- - "~>"
|
88
88
|
- !ruby/object:Gem::Version
|
89
|
-
version: '1.
|
89
|
+
version: '1.7'
|
90
90
|
type: :runtime
|
91
91
|
prerelease: false
|
92
92
|
version_requirements: !ruby/object:Gem::Requirement
|
93
93
|
requirements:
|
94
94
|
- - "~>"
|
95
95
|
- !ruby/object:Gem::Version
|
96
|
-
version: '1.
|
96
|
+
version: '1.7'
|
97
97
|
- !ruby/object:Gem::Dependency
|
98
98
|
name: envlogic
|
99
99
|
requirement: !ruby/object:Gem::Requirement
|
@@ -114,14 +114,14 @@ dependencies:
|
|
114
114
|
requirements:
|
115
115
|
- - "~>"
|
116
116
|
- !ruby/object:Gem::Version
|
117
|
-
version: '1.
|
117
|
+
version: '1.3'
|
118
118
|
type: :runtime
|
119
119
|
prerelease: false
|
120
120
|
version_requirements: !ruby/object:Gem::Requirement
|
121
121
|
requirements:
|
122
122
|
- - "~>"
|
123
123
|
- !ruby/object:Gem::Version
|
124
|
-
version: '1.
|
124
|
+
version: '1.3'
|
125
125
|
- !ruby/object:Gem::Dependency
|
126
126
|
name: ruby-kafka
|
127
127
|
requirement: !ruby/object:Gem::Requirement
|
@@ -142,42 +142,42 @@ dependencies:
|
|
142
142
|
requirements:
|
143
143
|
- - ">="
|
144
144
|
- !ruby/object:Gem::Version
|
145
|
-
version: '
|
145
|
+
version: '1.1'
|
146
146
|
type: :runtime
|
147
147
|
prerelease: false
|
148
148
|
version_requirements: !ruby/object:Gem::Requirement
|
149
149
|
requirements:
|
150
150
|
- - ">="
|
151
151
|
- !ruby/object:Gem::Version
|
152
|
-
version: '
|
152
|
+
version: '1.1'
|
153
153
|
- !ruby/object:Gem::Dependency
|
154
154
|
name: waterdrop
|
155
155
|
requirement: !ruby/object:Gem::Requirement
|
156
156
|
requirements:
|
157
157
|
- - "~>"
|
158
158
|
- !ruby/object:Gem::Version
|
159
|
-
version: 1.4
|
159
|
+
version: '1.4'
|
160
160
|
type: :runtime
|
161
161
|
prerelease: false
|
162
162
|
version_requirements: !ruby/object:Gem::Requirement
|
163
163
|
requirements:
|
164
164
|
- - "~>"
|
165
165
|
- !ruby/object:Gem::Version
|
166
|
-
version: 1.4
|
166
|
+
version: '1.4'
|
167
167
|
- !ruby/object:Gem::Dependency
|
168
168
|
name: zeitwerk
|
169
169
|
requirement: !ruby/object:Gem::Requirement
|
170
170
|
requirements:
|
171
171
|
- - "~>"
|
172
172
|
- !ruby/object:Gem::Version
|
173
|
-
version: '2.
|
173
|
+
version: '2.4'
|
174
174
|
type: :runtime
|
175
175
|
prerelease: false
|
176
176
|
version_requirements: !ruby/object:Gem::Requirement
|
177
177
|
requirements:
|
178
178
|
- - "~>"
|
179
179
|
- !ruby/object:Gem::Version
|
180
|
-
version: '2.
|
180
|
+
version: '2.4'
|
181
181
|
description: Framework used to simplify Apache Kafka based Ruby applications development
|
182
182
|
email:
|
183
183
|
- maciej@mensfeld.pl
|
@@ -191,7 +191,6 @@ files:
|
|
191
191
|
- ".coditsu/ci.yml"
|
192
192
|
- ".console_irbrc"
|
193
193
|
- ".diffend.yml"
|
194
|
-
- ".github/FUNDING.yml"
|
195
194
|
- ".github/ISSUE_TEMPLATE/bug_report.md"
|
196
195
|
- ".github/ISSUE_TEMPLATE/feature_request.md"
|
197
196
|
- ".github/workflows/ci.yml"
|
@@ -287,10 +286,11 @@ files:
|
|
287
286
|
- lib/karafka/templates/karafka.rb.erb
|
288
287
|
- lib/karafka/version.rb
|
289
288
|
- log/.gitkeep
|
290
|
-
homepage: https://
|
289
|
+
homepage: https://karafka.io
|
291
290
|
licenses:
|
292
291
|
- MIT
|
293
|
-
metadata:
|
292
|
+
metadata:
|
293
|
+
source_code_uri: https://github.com/karafka/karafka
|
294
294
|
post_install_message:
|
295
295
|
rdoc_options: []
|
296
296
|
require_paths:
|
@@ -306,7 +306,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
306
306
|
- !ruby/object:Gem::Version
|
307
307
|
version: '0'
|
308
308
|
requirements: []
|
309
|
-
rubygems_version: 3.2.
|
309
|
+
rubygems_version: 3.2.31
|
310
310
|
signing_key:
|
311
311
|
specification_version: 4
|
312
312
|
summary: Ruby based framework for working with Apache Kafka
|
metadata.gz.sig
CHANGED
Binary file
|
data/.github/FUNDING.yml
DELETED