karafka 1.4.3 → 1.4.14

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0fae4fb99cb42515602d9f27a6d7a5bd7485e4e0b40868a6856766ebe7b50083
4
- data.tar.gz: b82635f328ba711f836da733dd0ec5ed415446d3209918a20f7b6f42ff1e1ac2
3
+ metadata.gz: 7dd06a7ace623ae63695899e2cff1293482390ccbaeabcf7b1cc4b4aa6ec6a9e
4
+ data.tar.gz: 60e7c986a94c9552c1adc754b6bdb02f5e5cb5012881a15333fa8eff854485a8
5
5
  SHA512:
6
- metadata.gz: f90e7c6757cd517649c0021f759db091c050899c8a7846d022fa58e117db61cb5a3f6ee640b452f43017e0b504b0ba6a3b075886b266438daf1098d0fa2bc48d
7
- data.tar.gz: 7ab841ff7269992e90a17a88159baa9be67ab35866f8d174a434bd6efbecceb1521bbf142b6c9ebb37cabbea8a125e49b60fde3844b2a0e6ce3b62ef941b6a75
6
+ metadata.gz: cc34ba15cd7f8f138202fd0a9b53c3f63fcde13dd353e8da810a2b1f5e153c87335b0f4d26e6dccca4c484ddc12ef59d8331315130772bd5f05ef39a34f1a7c7
7
+ data.tar.gz: 9ef7cfce8c382091072e1c1df4cfece7bd5220cc3b8c74f82f4b1166bda756c354b4632df66ef95c912d3f13c2264914d96359fb72d4663592f98cd4b313f269
checksums.yaml.gz.sig CHANGED
Binary file
@@ -1,5 +1,7 @@
1
1
  name: ci
2
2
 
3
+ concurrency: ci-${{ github.ref }}
4
+
3
5
  on:
4
6
  pull_request:
5
7
  push:
@@ -14,12 +16,11 @@ jobs:
14
16
  fail-fast: false
15
17
  matrix:
16
18
  ruby:
19
+ - '3.1'
17
20
  - '3.0'
18
21
  - '2.7'
19
- - '2.6'
20
- - '2.5'
21
22
  include:
22
- - ruby: '3.0'
23
+ - ruby: '3.1'
23
24
  coverage: 'true'
24
25
  steps:
25
26
  - uses: actions/checkout@v2
@@ -55,7 +56,7 @@ jobs:
55
56
  - name: Set up Ruby
56
57
  uses: ruby/setup-ruby@v1
57
58
  with:
58
- ruby-version: 3.0
59
+ ruby-version: 3.1
59
60
  - name: Install latest bundler
60
61
  run: gem install bundler --no-document
61
62
  - name: Install Diffend plugin
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.0.0
1
+ 3.1.2
data/CHANGELOG.md CHANGED
@@ -1,5 +1,46 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 1.4.14 (2022-10-14)
4
+ - Fix `concurrent-ruby` missing as a dependency (Azdaroth)
5
+ - Warn about upcoming end of 1.4 support.
6
+
7
+ ## 1.4.13 (2022-02-19)
8
+ - Drop support for ruby 2.6
9
+ - Add mfa requirement
10
+
11
+ ## 1.4.12 (2022-01-13)
12
+ - Ruby 3.1 support
13
+ - `irb` dependency removal (vbyno)
14
+
15
+ ## 1.4.11 (2021-12-04)
16
+ - Source code metadata url added to the gemspec
17
+ - Gem bump
18
+
19
+ ## 1.4.10 (2021-10-30)
20
+ - update gems requirements in the gemspec (nijikon)
21
+
22
+ ## 1.4.9 (2021-09-29)
23
+ - fix `dry-configurable` deprecation warnings for default value as positional argument
24
+
25
+ ## 1.4.8 (2021-09-08)
26
+ - Allow 'rails' in Gemfile to enable rails-aware generator (rewritten)
27
+
28
+ ## 1.4.7 (2021-09-04)
29
+ - Update ruby-kafka to `1.4.0`
30
+ - Support for `resolve_seed_brokers` option (with Azdaroth)
31
+ - Set minimum `ruby-kafka` requirement to `1.3.0`
32
+
33
+ ## 1.4.6 (2021-08-05)
34
+ - #700 Fix Ruby 3 compatibility issues in Connection::Client#pause (MmKolodziej)
35
+
36
+ ## 1.4.5 (2021-06-16)
37
+ - Fixup logger checks for non-writeable logfile (ojab)
38
+ - #689 - Update the stdout initialization message for framework initialization
39
+
40
+ ## 1.4.4 (2021-04-19)
41
+ - Remove Ruby 2.5 support and update minimum Ruby requirement to 2.6
42
+ - Remove rake dependency
43
+
3
44
  ## 1.4.3 (2021-03-24)
4
45
  - Fixes for Ruby 3.0 compatibility
5
46
 
@@ -12,7 +53,7 @@
12
53
 
13
54
  ## 1.4.0 (2020-09-05)
14
55
  - Rename `Karafka::Params::Metadata` to `Karafka::Params::BatchMetadata`
15
- ` Rename consumer `#metadata` to `#batch_metadata`
56
+ - Rename consumer `#metadata` to `#batch_metadata`
16
57
  - Separate metadata (including Karafka native metadata) from the root of params (backwards compatibility preserved thanks to rabotyaga)
17
58
  - Remove metadata hash dependency
18
59
  - Remove params dependency on a hash in favour of PORO
data/Gemfile.lock CHANGED
@@ -1,66 +1,61 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (1.4.3)
5
- dry-configurable (~> 0.8)
6
- dry-inflector (~> 0.1)
7
- dry-monitor (~> 0.3)
8
- dry-validation (~> 1.2)
4
+ karafka (1.4.14)
5
+ concurrent-ruby
6
+ dry-configurable (~> 0.16)
7
+ dry-inflector (~> 0.2)
8
+ dry-monitor (~> 0.5)
9
+ dry-validation (~> 1.7)
9
10
  envlogic (~> 1.1)
10
- irb (~> 1.0)
11
- rake (>= 11.3)
12
- ruby-kafka (>= 1.0.0)
13
- thor (>= 0.20)
14
- waterdrop (~> 1.4.0)
15
- zeitwerk (~> 2.1)
11
+ ruby-kafka (>= 1.3.0)
12
+ thor (>= 1.1)
13
+ waterdrop (~> 1.4)
14
+ zeitwerk (~> 2.6)
16
15
 
17
16
  GEM
18
17
  remote: https://rubygems.org/
19
18
  specs:
20
- activesupport (6.1.3)
19
+ activesupport (7.0.3)
21
20
  concurrent-ruby (~> 1.0, >= 1.0.2)
22
21
  i18n (>= 1.6, < 2)
23
22
  minitest (>= 5.1)
24
23
  tzinfo (~> 2.0)
25
- zeitwerk (~> 2.3)
26
24
  byebug (11.1.3)
27
- concurrent-ruby (1.1.8)
25
+ concurrent-ruby (1.1.10)
28
26
  delivery_boy (1.1.0)
29
27
  king_konf (~> 1.0)
30
28
  ruby-kafka (~> 1.0)
31
- diff-lcs (1.4.4)
32
- digest-crc (0.6.3)
29
+ diff-lcs (1.5.0)
30
+ digest-crc (0.6.4)
33
31
  rake (>= 12.0.0, < 14.0.0)
34
- docile (1.3.5)
35
- dry-configurable (0.12.1)
36
- concurrent-ruby (~> 1.0)
37
- dry-core (~> 0.5, >= 0.5.0)
38
- dry-container (0.7.2)
32
+ docile (1.4.0)
33
+ dry-configurable (0.16.1)
34
+ dry-core (~> 0.6)
35
+ zeitwerk (~> 2.6)
36
+ dry-container (0.11.0)
39
37
  concurrent-ruby (~> 1.0)
40
- dry-configurable (~> 0.1, >= 0.1.3)
41
- dry-core (0.5.0)
38
+ dry-core (0.8.1)
42
39
  concurrent-ruby (~> 1.0)
43
- dry-equalizer (0.3.0)
44
- dry-events (0.2.0)
40
+ dry-events (0.3.0)
45
41
  concurrent-ruby (~> 1.0)
46
- dry-core (~> 0.4)
47
- dry-equalizer (~> 0.2)
48
- dry-inflector (0.2.0)
49
- dry-initializer (3.0.4)
50
- dry-logic (1.1.0)
42
+ dry-core (~> 0.5, >= 0.5)
43
+ dry-inflector (0.3.0)
44
+ dry-initializer (3.1.1)
45
+ dry-logic (1.2.0)
51
46
  concurrent-ruby (~> 1.0)
52
47
  dry-core (~> 0.5, >= 0.5)
53
- dry-monitor (0.3.2)
54
- dry-configurable (~> 0.5)
55
- dry-core (~> 0.4)
56
- dry-equalizer (~> 0.2)
48
+ dry-monitor (0.6.3)
49
+ dry-configurable (~> 0.13, >= 0.13.0)
50
+ dry-core (~> 0.5, >= 0.5)
57
51
  dry-events (~> 0.2)
58
- dry-schema (1.6.1)
52
+ zeitwerk (~> 2.5)
53
+ dry-schema (1.10.6)
59
54
  concurrent-ruby (~> 1.0)
60
- dry-configurable (~> 0.8, >= 0.8.3)
55
+ dry-configurable (~> 0.13, >= 0.13.0)
61
56
  dry-core (~> 0.5, >= 0.5)
62
57
  dry-initializer (~> 3.0)
63
- dry-logic (~> 1.0)
58
+ dry-logic (~> 1.2)
64
59
  dry-types (~> 1.5)
65
60
  dry-types (1.5.1)
66
61
  concurrent-ruby (~> 1.0)
@@ -68,61 +63,56 @@ GEM
68
63
  dry-core (~> 0.5, >= 0.5)
69
64
  dry-inflector (~> 0.1, >= 0.1.2)
70
65
  dry-logic (~> 1.0, >= 1.0.2)
71
- dry-validation (1.6.0)
66
+ dry-validation (1.8.1)
72
67
  concurrent-ruby (~> 1.0)
73
68
  dry-container (~> 0.7, >= 0.7.1)
74
- dry-core (~> 0.4)
75
- dry-equalizer (~> 0.2)
69
+ dry-core (~> 0.5, >= 0.5)
76
70
  dry-initializer (~> 3.0)
77
- dry-schema (~> 1.5, >= 1.5.2)
78
- envlogic (1.1.2)
71
+ dry-schema (~> 1.8, >= 1.8.0)
72
+ envlogic (1.1.5)
79
73
  dry-inflector (~> 0.1)
80
- factory_bot (6.1.0)
74
+ factory_bot (6.2.1)
81
75
  activesupport (>= 5.0.0)
82
- i18n (1.8.9)
76
+ i18n (1.10.0)
83
77
  concurrent-ruby (~> 1.0)
84
- io-console (0.5.9)
85
- irb (1.3.4)
86
- reline (>= 0.1.5)
87
- king_konf (1.0.0)
88
- minitest (5.14.4)
89
- rake (13.0.3)
90
- reline (0.2.4)
91
- io-console (~> 0.5)
92
- rspec (3.10.0)
93
- rspec-core (~> 3.10.0)
94
- rspec-expectations (~> 3.10.0)
95
- rspec-mocks (~> 3.10.0)
96
- rspec-core (3.10.1)
97
- rspec-support (~> 3.10.0)
98
- rspec-expectations (3.10.1)
78
+ king_konf (1.0.1)
79
+ minitest (5.15.0)
80
+ rake (13.0.6)
81
+ rspec (3.11.0)
82
+ rspec-core (~> 3.11.0)
83
+ rspec-expectations (~> 3.11.0)
84
+ rspec-mocks (~> 3.11.0)
85
+ rspec-core (3.11.0)
86
+ rspec-support (~> 3.11.0)
87
+ rspec-expectations (3.11.0)
99
88
  diff-lcs (>= 1.2.0, < 2.0)
100
- rspec-support (~> 3.10.0)
101
- rspec-mocks (3.10.2)
89
+ rspec-support (~> 3.11.0)
90
+ rspec-mocks (3.11.1)
102
91
  diff-lcs (>= 1.2.0, < 2.0)
103
- rspec-support (~> 3.10.0)
104
- rspec-support (3.10.2)
105
- ruby-kafka (1.3.0)
92
+ rspec-support (~> 3.11.0)
93
+ rspec-support (3.11.0)
94
+ ruby-kafka (1.5.0)
106
95
  digest-crc
107
96
  simplecov (0.21.2)
108
97
  docile (~> 1.1)
109
98
  simplecov-html (~> 0.11)
110
99
  simplecov_json_formatter (~> 0.1)
111
100
  simplecov-html (0.12.3)
112
- simplecov_json_formatter (0.1.2)
113
- thor (1.1.0)
101
+ simplecov_json_formatter (0.1.4)
102
+ thor (1.2.1)
114
103
  tzinfo (2.0.4)
115
104
  concurrent-ruby (~> 1.0)
116
- waterdrop (1.4.1)
105
+ waterdrop (1.4.4)
117
106
  delivery_boy (>= 0.2, < 2.x)
118
- dry-configurable (~> 0.8)
119
- dry-monitor (~> 0.3)
120
- dry-validation (~> 1.2)
121
- ruby-kafka (>= 0.7.8)
122
- zeitwerk (~> 2.1)
123
- zeitwerk (2.4.2)
107
+ dry-configurable (~> 0.13)
108
+ dry-monitor (~> 0.5)
109
+ dry-validation (~> 1.7)
110
+ ruby-kafka (>= 1.3.0)
111
+ zeitwerk (~> 2.4)
112
+ zeitwerk (2.6.1)
124
113
 
125
114
  PLATFORMS
115
+ x86_64-darwin-18
126
116
  x86_64-linux
127
117
 
128
118
  DEPENDENCIES
@@ -133,4 +123,4 @@ DEPENDENCIES
133
123
  simplecov
134
124
 
135
125
  BUNDLED WITH
136
- 2.2.15
126
+ 2.3.11
data/README.md CHANGED
@@ -2,16 +2,32 @@
2
2
 
3
3
  [![Build Status](https://github.com/karafka/karafka/actions/workflows/ci.yml/badge.svg)](https://github.com/karafka/karafka/actions/workflows/ci.yml)
4
4
  [![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
5
- [![Join the chat at https://gitter.im/karafka/karafka](https://badges.gitter.im/karafka/karafka.svg)](https://gitter.im/karafka/karafka)
5
+ [![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
6
6
 
7
7
  **Note**: We're finishing the new Karafka `2.0` but for now, please use `1.4`. All the documentation presented here refers to `1.4`
8
- ..
9
- Documentation presented here refers to Karafka `1.4`.
10
8
 
11
9
  ## About Karafka
12
10
 
13
11
  Framework used to simplify Apache Kafka based Ruby applications development.
14
12
 
13
+ ```ruby
14
+ # Define what topics you want to consume with which consumers
15
+ Karafka::App.consumer_groups.draw do
16
+ topic 'system_events' do
17
+ consumer EventsConsumer
18
+ end
19
+ end
20
+
21
+ # And create your consumers, within which your messages will be processed
22
+ class EventsConsumer < ApplicationConsumer
23
+ # Example that utilizes ActiveRecord#insert_all and Karafka batch processing
24
+ def consume
25
+ # Store all of the incoming Kafka events locally in an efficient way
26
+ Event.insert_all params_batch.payloads
27
+ end
28
+ end
29
+ ```
30
+
15
31
  Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming and processing this data, without having to focus on things that are not your business domain.
16
32
 
17
33
  Karafka not only handles incoming messages but also provides tools for building complex data-flow applications that receive and send messages.
@@ -37,9 +53,9 @@ Karafka based applications can be easily deployed to any type of infrastructure,
37
53
 
38
54
  ## Support
39
55
 
40
- Karafka has a [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup, and deployment along with other useful details on how to run Karafka.
56
+ Karafka has [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup, and deployment along with other useful details on how to run Karafka.
41
57
 
42
- If you have any questions about using Karafka, feel free to join our [Gitter](https://gitter.im/karafka/karafka) chat channel.
58
+ If you have any questions about using Karafka, feel free to join our [Slack](https://slack.karafka.io) chat channel.
43
59
 
44
60
  ## Getting started
45
61
 
@@ -67,10 +83,6 @@ and follow the instructions from the [example app Wiki](https://github.com/karaf
67
83
 
68
84
  If you need more details and know how on how to start Karafka with a clean installation, read the [Getting started page](https://github.com/karafka/karafka/wiki/Getting-started) section of our Wiki.
69
85
 
70
- ## Notice
71
-
72
- Karafka framework and Karafka team are __not__ related to Kafka streaming service called CloudKarafka in any matter. We don't recommend nor discourage usage of their platform.
73
-
74
86
  ## References
75
87
 
76
88
  * [Karafka framework](https://github.com/karafka/karafka)
@@ -79,23 +91,8 @@ Karafka framework and Karafka team are __not__ related to Kafka streaming servic
79
91
 
80
92
  ## Note on contributions
81
93
 
82
- First, thank you for considering contributing to Karafka! It's people like you that make the open source community such a great community!
83
-
84
- Each pull request must pass all the RSpec specs and meet our quality requirements.
85
-
86
- To check if everything is as it should be, we use [Coditsu](https://coditsu.io) that combines multiple linters and code analyzers for both code and documentation. Once you're done with your changes, submit a pull request.
87
-
88
- Coditsu will automatically check your work against our quality standards. You can find your commit check results on the [builds page](https://app.coditsu.io/karafka/commit_builds) of Karafka organization.
89
-
90
- [![coditsu](https://coditsu.io/assets/quality_bar.svg)](https://app.coditsu.io/karafka/commit_builds)
91
-
92
- ## Contributors
93
-
94
- This project exists thanks to all the people who contribute.
95
- <a href="https://github.com/karafka/karafka/graphs/contributors"><img src="https://opencollective.com/karafka/contributors.svg?width=890" /></a>
96
-
97
- ## Sponsors
94
+ First, thank you for considering contributing to the Karafka ecosystem! It's people like you that make the open source community such a great community!
98
95
 
99
- We are looking for sustainable sponsorship. If your company is relying on Karafka framework or simply want to see Karafka evolve faster to meet your requirements, please consider backing the project.
96
+ Each pull request must pass all the RSpec specs, integration tests and meet our quality requirements.
100
97
 
101
- Please contact [Maciej Mensfeld](mailto:maciej@mensfeld.pl) directly for more details.
98
+ Fork it, update and wait for the Github Actions results.
data/certs/mensfeld.pem CHANGED
@@ -1,25 +1,26 @@
1
1
  -----BEGIN CERTIFICATE-----
2
- MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
3
- ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjAwODExMDkxNTM3WhcNMjEwODExMDkx
4
- NTM3WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
5
- CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDCpXsCgmINb6lHBXXBdyrgsBPSxC4/
6
- 2H+weJ6L9CruTiv2+2/ZkQGtnLcDgrD14rdLIHK7t0o3EKYlDT5GhD/XUVhI15JE
7
- N7IqnPUgexe1fbZArwQ51afxz2AmPQN2BkB2oeQHXxnSWUGMhvcEZpfbxCCJH26w
8
- hS0Ccsma8yxA6hSlGVhFVDuCr7c2L1di6cK2CtIDpfDaWqnVNJEwBYHIxrCoWK5g
9
- sIGekVt/admS9gRhIMaIBg+Mshth5/DEyWO2QjteTodItlxfTctrfmiAl8X8T5JP
10
- VXeLp5SSOJ5JXE80nShMJp3RFnGw5fqjX/ffjtISYh78/By4xF3a25HdWH9+qO2Z
11
- tx0wSGc9/4gqNM0APQnjN/4YXrGZ4IeSjtE+OrrX07l0TiyikzSLFOkZCAp8oBJi
12
- Fhlosz8xQDJf7mhNxOaZziqASzp/hJTU/tuDKl5+ql2icnMv5iV/i6SlmvU29QNg
13
- LCV71pUv0pWzN+OZbHZKWepGhEQ3cG9MwvkCAwEAAaN3MHUwCQYDVR0TBAIwADAL
14
- BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFImGed2AXS070ohfRidiCEhXEUN+MB0GA1Ud
15
- EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
16
- c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKiHpwoENVrMi94V1zD4o8/6G3AU
17
- gWz4udkPYHTZLUy3dLznc/sNjdkJFWT3E6NKYq7c60EpJ0m0vAEg5+F5pmNOsvD3
18
- 2pXLj9kisEeYhR516HwXAvtngboUcb75skqvBCU++4Pu7BRAPjO1/ihLSBexbwSS
19
- fF+J5OWNuyHHCQp+kGPLtXJe2yUYyvSWDj3I2//Vk0VhNOIlaCS1+5/P3ZJThOtm
20
- zJUBI7h3HgovwRpcnmk2mXTmU4Zx/bCzX8EA6VY0khEvnmiq7S6eBF0H9qH8KyQ6
21
- EkVLpvmUDFcf/uNaBQdazEMB5jYtwoA8gQlANETNGPi51KlkukhKgaIEDMkBDJOx
22
- 65N7DzmkcyY0/GwjIVIxmRhcrCt1YeCUElmfFx0iida1/YRm6sB2AXqScc1+ECRi
23
- 2DND//YJUikn1zwbz1kT70XmHd97B4Eytpln7K+M1u2g1pHVEPW4owD/ammXNpUy
24
- nt70FcDD4yxJQ+0YNiHd0N8IcVBM1TMIVctMNQ==
2
+ MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
3
+ YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
4
+ MB4XDTIyMDgxOTE3MjEzN1oXDTIzMDgxOTE3MjEzN1owPzEQMA4GA1UEAwwHY29u
5
+ dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
6
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAODzeO3L6lxdATzMHKNW
7
+ jFA/GGunoPuylO/BMzy8RiQHh7VIvysAKs0tHhTx3g2D0STDpF+hcQcPELFikiT2
8
+ F+1wOHj/SsrK7VKqfA8+gq04hKc5sQoX2Egf9k3V0YJ3eZ6R/koHkQ8A0TVt0w6F
9
+ ZQckoV4MqnEAx0g/FZN3mnHTlJ3VFLSBqJEIe+S6FZMl92mSv+hTrlUG8VaYxSfN
10
+ lTCvnKk284F6QZq5XIENLRmcDd/3aPBLnLwNnyMyhB+6gK8cUO+CFlDO5tjo/aBA
11
+ rUnl++wGG0JooF1ed0v+evOn9KoMBG6rHewcf79qJbVOscbD8qSAmo+sCXtcFryr
12
+ KRMTB8gNbowJkFRJDEe8tfRy11u1fYzFg/qNO82FJd62rKAw2wN0C29yCeQOPRb1
13
+ Cw9Y4ZwK9VFNEcV9L+3pHTHn2XfuZHtDaG198VweiF6raFO4yiEYccodH/USP0L5
14
+ cbcCFtmu/4HDSxL1ByQXO84A0ybJuk3/+aPUSXe9C9U8fwIDAQABo3cwdTAJBgNV
15
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUSlcEakb7gfn/5E2WY6z73BF/
16
+ iZkwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
17
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEA1aS+E7RXJ1w9g9mJ
18
+ G0NzFxe64OEuENosNlvYQCbRKGCXAU1qqelYkBQHseRgRKxLICrnypRo9IEobyHa
19
+ vDnJ4r7Tsb34dleqQW2zY/obG+cia3Ym2JsegXWF7dDOzCXJ4FN8MFoT2jHlqLLw
20
+ yrap0YO5zx0GSQ0Dwy8h2n2v2vanMEeCx7iNm3ERgR5WuN5sjzWoz2A/JLEEcK0C
21
+ EnAGKCWAd1fuG8IemDjT1edsd5FyYR4bIX0m+99oDuFZyPiiIbalmyYiSBBp59Yb
22
+ Q0P8zeBi4OfwCZNcxqz0KONmw9JLNv6DgyEAH5xe/4JzhMEgvIRiPj0pHfA7oqQF
23
+ KUNqvD1KlxbEC+bZfE5IZhnqYLdld/Ksqd22FI1RBhiS1Ejfsj99LVIm9cBuZEY2
24
+ Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
25
+ MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
25
26
  -----END CERTIFICATE-----
data/karafka.gemspec CHANGED
@@ -12,24 +12,23 @@ Gem::Specification.new do |spec|
12
12
  spec.platform = Gem::Platform::RUBY
13
13
  spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
14
14
  spec.email = %w[maciej@mensfeld.pl pavlo.vavruk@gmail.com adam99g@gmail.com]
15
- spec.homepage = 'https://github.com/karafka/karafka'
15
+ spec.homepage = 'https://karafka.io'
16
16
  spec.summary = 'Ruby based framework for working with Apache Kafka'
17
17
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
18
18
  spec.license = 'MIT'
19
19
 
20
- spec.add_dependency 'dry-configurable', '~> 0.8'
21
- spec.add_dependency 'dry-inflector', '~> 0.1'
22
- spec.add_dependency 'dry-monitor', '~> 0.3'
23
- spec.add_dependency 'dry-validation', '~> 1.2'
20
+ spec.add_dependency 'concurrent-ruby'
21
+ spec.add_dependency 'dry-configurable', '~> 0.16'
22
+ spec.add_dependency 'dry-inflector', '~> 0.2'
23
+ spec.add_dependency 'dry-monitor', '~> 0.5'
24
+ spec.add_dependency 'dry-validation', '~> 1.7'
24
25
  spec.add_dependency 'envlogic', '~> 1.1'
25
- spec.add_dependency 'irb', '~> 1.0'
26
- spec.add_dependency 'rake', '>= 11.3'
27
- spec.add_dependency 'ruby-kafka', '>= 1.0.0'
28
- spec.add_dependency 'thor', '>= 0.20'
29
- spec.add_dependency 'waterdrop', '~> 1.4.0'
30
- spec.add_dependency 'zeitwerk', '~> 2.1'
26
+ spec.add_dependency 'ruby-kafka', '>= 1.3.0'
27
+ spec.add_dependency 'thor', '>= 1.1'
28
+ spec.add_dependency 'waterdrop', '~> 1.4'
29
+ spec.add_dependency 'zeitwerk', '~> 2.6'
31
30
 
32
- spec.required_ruby_version = '>= 2.5.0'
31
+ spec.required_ruby_version = '>= 2.7'
33
32
 
34
33
  if $PROGRAM_NAME.end_with?('gem')
35
34
  spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
@@ -39,5 +38,15 @@ Gem::Specification.new do |spec|
39
38
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
40
39
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
41
40
  spec.require_paths = %w[lib]
41
+ spec.post_install_message = <<~MSG
42
+ WARN: Karafka 1.4 will reach the end of life soon.
43
+ We highly recommend updating to Karafka 2.0.
44
+ Visit this page for more details: https://karafka.io/docs/Versions-Lifecycle-and-EOL
45
+ MSG
46
+
47
+ spec.metadata = {
48
+ 'source_code_uri' => 'https://github.com/karafka/karafka',
49
+ 'rubygems_mfa_required' => 'true'
50
+ }
42
51
  end
43
52
  # rubocop:enable Metrics/BlockLength
@@ -30,11 +30,12 @@ module Karafka
30
30
  # @param args [Array] all the things that Thor CLI accepts
31
31
  def initialize(*args)
32
32
  super
33
- @rails = Bundler::LockfileParser.new(
33
+ dependencies = Bundler::LockfileParser.new(
34
34
  Bundler.read_file(
35
35
  Bundler.default_lockfile
36
36
  )
37
- ).dependencies.key?('railties')
37
+ ).dependencies
38
+ @rails = dependencies.key?('railties') || dependencies.key?('rails')
38
39
  end
39
40
 
40
41
  # Install all required things for Karafka application in current directory
@@ -84,17 +84,16 @@ module Karafka
84
84
  # @param topic [String] topic that we want to pause
85
85
  # @param partition [Integer] number partition that we want to pause
86
86
  # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
87
- # @return [Array] array with all the details required to pause kafka consumer
87
+ # @return [Hash] hash with all the details required to pause kafka consumer
88
88
  def pause(topic, partition, consumer_group)
89
- [
90
- Karafka::App.config.topic_mapper.outgoing(topic),
91
- partition,
92
- {
89
+ {
90
+ args: [Karafka::App.config.topic_mapper.outgoing(topic), partition],
91
+ kwargs: {
93
92
  timeout: consumer_group.pause_timeout,
94
93
  max_timeout: consumer_group.pause_max_timeout,
95
94
  exponential_backoff: consumer_group.pause_exponential_backoff
96
95
  }
97
- ]
96
+ }
98
97
  end
99
98
 
100
99
  # Remaps topic details taking the topic mapper feature into consideration.
@@ -64,7 +64,8 @@ module Karafka
64
64
  # @param topic [String] topic that we want to pause
65
65
  # @param partition [Integer] number partition that we want to pause
66
66
  def pause(topic, partition)
67
- kafka_consumer.pause(*ApiAdapter.pause(topic, partition, consumer_group))
67
+ args, kwargs = ApiAdapter.pause(topic, partition, consumer_group).values_at(:args, :kwargs)
68
+ kafka_consumer.pause(*args, **kwargs)
68
69
  end
69
70
 
70
71
  # Marks given message as consumed
@@ -20,7 +20,6 @@ module Karafka
20
20
  # @param _args Any arguments that we don't care about but that are needed in order to
21
21
  # make this logger compatible with the default Ruby one
22
22
  def initialize(*_args)
23
- ensure_dir_exists
24
23
  super(target)
25
24
  self.level = ENV_MAP[Karafka.env] || ENV_MAP['default']
26
25
  end
@@ -33,14 +32,7 @@ module Karafka
33
32
  def target
34
33
  Karafka::Helpers::MultiDelegator
35
34
  .delegate(:write, :close)
36
- .to($stdout, file)
37
- end
38
-
39
- # Makes sure the log directory exists as long as we can write to it
40
- def ensure_dir_exists
41
- FileUtils.mkdir_p(File.dirname(log_path))
42
- rescue Errno::EACCES, Errno::EROFS
43
- nil
35
+ .to(*[$stdout, file].compact)
44
36
  end
45
37
 
46
38
  # @return [Pathname] Path to a file to which we should log
@@ -51,7 +43,11 @@ module Karafka
51
43
  # @return [File] file to which we want to write our logs
52
44
  # @note File is being opened in append mode ('a')
53
45
  def file
46
+ FileUtils.mkdir_p(File.dirname(log_path))
47
+
54
48
  @file ||= File.open(log_path, 'a')
49
+ rescue Errno::EACCES, Errno::EROFS
50
+ nil
55
51
  end
56
52
  end
57
53
  end
@@ -103,10 +103,10 @@ module Karafka
103
103
  info "Responded from #{calling.class} using #{responder} with following data #{data}"
104
104
  end
105
105
 
106
- # Logs info that we're initializing Karafka app
106
+ # Logs info that we're initializing Karafka framework components
107
107
  # @param _event [Dry::Events::Event] event details including payload
108
108
  def on_app_initializing(_event)
109
- info "Initializing Karafka server #{::Process.pid}"
109
+ info "Initializing Karafka framework #{::Process.pid}"
110
110
  end
111
111
 
112
112
  # Logs info that we're running Karafka app
@@ -24,86 +24,89 @@ module Karafka
24
24
  # default Kafka groups namespaces and identify that app in kafka
25
25
  setting :client_id
26
26
  # What backend do we want to use to process messages
27
- setting :backend, :inline
27
+ setting :backend, default: :inline
28
28
  # option logger [Instance] logger that we want to use
29
- setting :logger, ::Karafka::Instrumentation::Logger.new
29
+ setting :logger, default: ::Karafka::Instrumentation::Logger.new
30
30
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
31
- setting :monitor, ::Karafka::Instrumentation::Monitor.new
31
+ setting :monitor, default: ::Karafka::Instrumentation::Monitor.new
32
32
  # Mapper used to remap consumer groups ids, so in case users migrate from other tools
33
33
  # or they need to maintain their own internal consumer group naming conventions, they
34
34
  # can easily do it, replacing the default client_id + consumer name pattern concept
35
- setting :consumer_mapper, Routing::ConsumerMapper.new
35
+ setting :consumer_mapper, default: Routing::ConsumerMapper.new
36
36
  # Mapper used to remap names of topics, so we can have a clean internal topic naming
37
37
  # despite using any Kafka provider that uses namespacing, etc
38
38
  # It needs to implement two methods:
39
39
  # - #incoming - for remapping from the incoming message to our internal format
40
40
  # - #outgoing - for remapping from internal topic name into outgoing message
41
- setting :topic_mapper, Routing::TopicMapper.new
41
+ setting :topic_mapper, default: Routing::TopicMapper.new
42
42
  # Default serializer for converting whatever we want to send to kafka to json
43
- setting :serializer, Karafka::Serialization::Json::Serializer.new
43
+ setting :serializer, default: Karafka::Serialization::Json::Serializer.new
44
44
  # Default deserializer for converting incoming data into ruby objects
45
- setting :deserializer, Karafka::Serialization::Json::Deserializer.new
45
+ setting :deserializer, default: Karafka::Serialization::Json::Deserializer.new
46
46
  # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
47
47
  # @note Fetching does not equal consuming, see batch_consuming description for details
48
- setting :batch_fetching, true
48
+ setting :batch_fetching, default: true
49
49
  # If batch_consuming is true, we will have access to #params_batch instead of #params.
50
50
  # #params_batch will contain params received from Kafka (may be more than 1) so we can
51
51
  # process them in batches
52
- setting :batch_consuming, false
52
+ setting :batch_consuming, default: false
53
53
  # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
54
  # longer wait for the consumers to stop gracefully but instead we force terminate
55
55
  # everything.
56
- setting :shutdown_timeout, 60
56
+ setting :shutdown_timeout, default: 60
57
57
 
58
58
  # option kafka [Hash] - optional - kafka configuration options
59
59
  setting :kafka do
60
60
  # Array with at least one host
61
- setting :seed_brokers, %w[kafka://127.0.0.1:9092]
61
+ setting :seed_brokers, default: %w[kafka://127.0.0.1:9092]
62
62
  # option session_timeout [Integer] the number of seconds after which, if a client
63
63
  # hasn't contacted the Kafka cluster, it will be kicked out of the group.
64
- setting :session_timeout, 30
64
+ setting :session_timeout, default: 30
65
65
  # Time that a given partition will be paused from fetching messages, when message
66
66
  # consumption fails. It allows us to process other partitions, while the error is being
67
67
  # resolved and also "slows" things down, so it prevents from "eating" up all messages and
68
68
  # consuming them with failed code. Use `nil` if you want to pause forever and never retry.
69
- setting :pause_timeout, 10
69
+ setting :pause_timeout, default: 10
70
70
  # option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
71
71
  # or `nil` if no maximum should be enforced.
72
- setting :pause_max_timeout, nil
72
+ setting :pause_max_timeout, default: nil
73
73
  # option pause_exponential_backoff [Boolean] whether to enable exponential backoff
74
- setting :pause_exponential_backoff, false
74
+ setting :pause_exponential_backoff, default: false
75
75
  # option offset_commit_interval [Integer] the interval between offset commits,
76
76
  # in seconds.
77
- setting :offset_commit_interval, 10
77
+ setting :offset_commit_interval, default: 10
78
78
  # option offset_commit_threshold [Integer] the number of messages that can be
79
79
  # processed before their offsets are committed. If zero, offset commits are
80
80
  # not triggered by message consumption.
81
- setting :offset_commit_threshold, 0
81
+ setting :offset_commit_threshold, default: 0
82
82
  # option heartbeat_interval [Integer] the interval between heartbeats; must be less
83
83
  # than the session window.
84
- setting :heartbeat_interval, 10
84
+ setting :heartbeat_interval, default: 10
85
85
  # option offset_retention_time [Integer] The length of the retention window, known as
86
86
  # offset retention time
87
- setting :offset_retention_time, nil
87
+ setting :offset_retention_time, default: nil
88
88
  # option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
89
89
  # are stored for further processing. Note, that each item in the queue represents a
90
90
  # response from a single broker
91
- setting :fetcher_max_queue_size, 10
91
+ setting :fetcher_max_queue_size, default: 10
92
92
  # option assignment_strategy [Object] a strategy determining the assignment of
93
93
  # partitions to the consumers.
94
- setting :assignment_strategy, Karafka::AssignmentStrategies::RoundRobin.new
94
+ setting :assignment_strategy, default: Karafka::AssignmentStrategies::RoundRobin.new
95
95
  # option max_bytes_per_partition [Integer] the maximum amount of data fetched
96
96
  # from a single partition at a time.
97
- setting :max_bytes_per_partition, 1_048_576
97
+ setting :max_bytes_per_partition, default: 1_048_576
98
98
  # whether to consume messages starting at the beginning or to just consume new messages
99
- setting :start_from_beginning, true
99
+ setting :start_from_beginning, default: true
100
+ # option resolve_seed_brokers [Boolean] whether to resolve each hostname of the seed
101
+ # brokers
102
+ setting :resolve_seed_brokers, default: false
100
103
  # option min_bytes [Integer] the minimum number of bytes to read before
101
104
  # returning messages from the server; if `max_wait_time` is reached, this
102
105
  # is ignored.
103
- setting :min_bytes, 1
106
+ setting :min_bytes, default: 1
104
107
  # option max_bytes [Integer] the maximum number of bytes to read before returning messages
105
108
  # from each broker.
106
- setting :max_bytes, 10_485_760
109
+ setting :max_bytes, default: 10_485_760
107
110
  # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
108
111
  # wait before returning data from a single message fetch. By setting this high you also
109
112
  # increase the fetching throughput - and by setting it low you set a bound on latency.
@@ -111,65 +114,65 @@ module Karafka
111
114
  # time specified. The default value is one second. If you want to have at most five
112
115
  # seconds of latency, set `max_wait_time` to 5. You should make sure
113
116
  # max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
114
- setting :max_wait_time, 1
117
+ setting :max_wait_time, default: 1
115
118
  # option automatically_mark_as_consumed [Boolean] should we automatically mark received
116
119
  # messages as consumed (processed) after non-error consumption
117
- setting :automatically_mark_as_consumed, true
120
+ setting :automatically_mark_as_consumed, default: true
118
121
  # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
119
122
  # Kafka cluster that went down (in seconds)
120
- setting :reconnect_timeout, 5
123
+ setting :reconnect_timeout, default: 5
121
124
  # option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
122
125
  # a broker for the first time. When ruby-kafka initializes, it needs to connect to at
123
126
  # least one host.
124
- setting :connect_timeout, 10
127
+ setting :connect_timeout, default: 10
125
128
  # option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
126
129
  # writing to a socket connection to a broker. After this timeout expires the connection
127
130
  # will be killed. Note that some Kafka operations are by definition long-running, such as
128
131
  # waiting for new messages to arrive in a partition, so don't set this value too low
129
- setting :socket_timeout, 30
132
+ setting :socket_timeout, default: 30
130
133
  # option partitioner [Object, nil] the partitioner that should be used by the client
131
- setting :partitioner, nil
134
+ setting :partitioner, default: nil
132
135
 
133
136
  # SSL authentication related settings
134
137
  # option ca_cert [String, nil] SSL CA certificate
135
- setting :ssl_ca_cert, nil
138
+ setting :ssl_ca_cert, default: nil
136
139
  # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
137
- setting :ssl_ca_cert_file_path, nil
140
+ setting :ssl_ca_cert_file_path, default: nil
138
141
  # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
139
142
  # certificate store
140
- setting :ssl_ca_certs_from_system, false
143
+ setting :ssl_ca_certs_from_system, default: false
141
144
  # option ssl_verify_hostname [Boolean] Verify the hostname for client certs
142
- setting :ssl_verify_hostname, true
145
+ setting :ssl_verify_hostname, default: true
143
146
  # option ssl_client_cert [String, nil] SSL client certificate
144
- setting :ssl_client_cert, nil
147
+ setting :ssl_client_cert, default: nil
145
148
  # option ssl_client_cert_key [String, nil] SSL client certificate password
146
- setting :ssl_client_cert_key, nil
149
+ setting :ssl_client_cert_key, default: nil
147
150
  # option sasl_gssapi_principal [String, nil] sasl principal
148
- setting :sasl_gssapi_principal, nil
151
+ setting :sasl_gssapi_principal, default: nil
149
152
  # option sasl_gssapi_keytab [String, nil] sasl keytab
150
- setting :sasl_gssapi_keytab, nil
153
+ setting :sasl_gssapi_keytab, default: nil
151
154
  # option sasl_plain_authzid [String] The authorization identity to use
152
- setting :sasl_plain_authzid, ''
155
+ setting :sasl_plain_authzid, default: ''
153
156
  # option sasl_plain_username [String, nil] The username used to authenticate
154
- setting :sasl_plain_username, nil
157
+ setting :sasl_plain_username, default: nil
155
158
  # option sasl_plain_password [String, nil] The password used to authenticate
156
- setting :sasl_plain_password, nil
159
+ setting :sasl_plain_password, default: nil
157
160
  # option sasl_scram_username [String, nil] The username used to authenticate
158
- setting :sasl_scram_username, nil
161
+ setting :sasl_scram_username, default: nil
159
162
  # option sasl_scram_password [String, nil] The password used to authenticate
160
- setting :sasl_scram_password, nil
163
+ setting :sasl_scram_password, default: nil
161
164
  # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
162
- setting :sasl_scram_mechanism, nil
165
+ setting :sasl_scram_mechanism, default: nil
163
166
  # option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
164
- setting :sasl_over_ssl, true
167
+ setting :sasl_over_ssl, default: true
165
168
  # option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
166
- setting :ssl_client_cert_chain, nil
169
+ setting :ssl_client_cert_chain, default: nil
167
170
  # option ssl_client_cert_key_password [String, nil] the password required to read
168
171
  # the ssl_client_cert_key
169
- setting :ssl_client_cert_key_password, nil
172
+ setting :ssl_client_cert_key_password, default: nil
170
173
  # @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
171
174
  # implements method token.
172
- setting :sasl_oauth_token_provider, nil
175
+ setting :sasl_oauth_token_provider, default: nil
173
176
  end
174
177
 
175
178
  # Namespace for internal settings that should not be modified
@@ -177,18 +180,18 @@ module Karafka
177
180
  # non global state
178
181
  setting :internal do
179
182
  # option routing_builder [Karafka::Routing::Builder] builder instance
180
- setting :routing_builder, Routing::Builder.new
183
+ setting :routing_builder, default: Routing::Builder.new
181
184
  # option status [Karafka::Status] app status
182
- setting :status, Status.new
185
+ setting :status, default: Status.new
183
186
  # option process [Karafka::Process] process status
184
187
  # @note In the future, we need to have a single process representation for all the karafka
185
188
  # instances
186
- setting :process, Process.new
189
+ setting :process, default: Process.new
187
190
  # option fetcher [Karafka::Fetcher] fetcher instance
188
- setting :fetcher, Fetcher.new
191
+ setting :fetcher, default: Fetcher.new
189
192
  # option configurators [Array<Object>] all configurators that we want to run after
190
193
  # the setup
191
- setting :configurators, [Configurators::WaterDrop.new]
194
+ setting :configurators, default: [Configurators::WaterDrop.new]
192
195
  end
193
196
 
194
197
  class << self
@@ -54,7 +54,7 @@ class KarafkaApp < Karafka::App
54
54
  # listen to only what you really need for given environment.
55
55
  Karafka.monitor.subscribe(WaterDrop::Instrumentation::StdoutListener.new)
56
56
  Karafka.monitor.subscribe(Karafka::Instrumentation::StdoutListener.new)
57
- Karafka.monitor.subscribe(Karafka::Instrumentation::ProctitleListener.new)
57
+ # Karafka.monitor.subscribe(Karafka::Instrumentation::ProctitleListener.new)
58
58
 
59
59
  # Uncomment that in order to achieve code reload in development mode
60
60
  # Be aware, that this might have some side-effects. Please refer to the wiki
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '1.4.3'
6
+ VERSION = '1.4.14'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -10,6 +10,7 @@
10
10
  thor
11
11
  forwardable
12
12
  fileutils
13
+ concurrent
13
14
  dry-configurable
14
15
  dry-validation
15
16
  dry/events/publisher
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.4.3
4
+ version: 1.4.14
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -12,186 +12,173 @@ bindir: bin
12
12
  cert_chain:
13
13
  - |
14
14
  -----BEGIN CERTIFICATE-----
15
- MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
16
- ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjAwODExMDkxNTM3WhcNMjEwODExMDkx
17
- NTM3WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
18
- CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDCpXsCgmINb6lHBXXBdyrgsBPSxC4/
19
- 2H+weJ6L9CruTiv2+2/ZkQGtnLcDgrD14rdLIHK7t0o3EKYlDT5GhD/XUVhI15JE
20
- N7IqnPUgexe1fbZArwQ51afxz2AmPQN2BkB2oeQHXxnSWUGMhvcEZpfbxCCJH26w
21
- hS0Ccsma8yxA6hSlGVhFVDuCr7c2L1di6cK2CtIDpfDaWqnVNJEwBYHIxrCoWK5g
22
- sIGekVt/admS9gRhIMaIBg+Mshth5/DEyWO2QjteTodItlxfTctrfmiAl8X8T5JP
23
- VXeLp5SSOJ5JXE80nShMJp3RFnGw5fqjX/ffjtISYh78/By4xF3a25HdWH9+qO2Z
24
- tx0wSGc9/4gqNM0APQnjN/4YXrGZ4IeSjtE+OrrX07l0TiyikzSLFOkZCAp8oBJi
25
- Fhlosz8xQDJf7mhNxOaZziqASzp/hJTU/tuDKl5+ql2icnMv5iV/i6SlmvU29QNg
26
- LCV71pUv0pWzN+OZbHZKWepGhEQ3cG9MwvkCAwEAAaN3MHUwCQYDVR0TBAIwADAL
27
- BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFImGed2AXS070ohfRidiCEhXEUN+MB0GA1Ud
28
- EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
29
- c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKiHpwoENVrMi94V1zD4o8/6G3AU
30
- gWz4udkPYHTZLUy3dLznc/sNjdkJFWT3E6NKYq7c60EpJ0m0vAEg5+F5pmNOsvD3
31
- 2pXLj9kisEeYhR516HwXAvtngboUcb75skqvBCU++4Pu7BRAPjO1/ihLSBexbwSS
32
- fF+J5OWNuyHHCQp+kGPLtXJe2yUYyvSWDj3I2//Vk0VhNOIlaCS1+5/P3ZJThOtm
33
- zJUBI7h3HgovwRpcnmk2mXTmU4Zx/bCzX8EA6VY0khEvnmiq7S6eBF0H9qH8KyQ6
34
- EkVLpvmUDFcf/uNaBQdazEMB5jYtwoA8gQlANETNGPi51KlkukhKgaIEDMkBDJOx
35
- 65N7DzmkcyY0/GwjIVIxmRhcrCt1YeCUElmfFx0iida1/YRm6sB2AXqScc1+ECRi
36
- 2DND//YJUikn1zwbz1kT70XmHd97B4Eytpln7K+M1u2g1pHVEPW4owD/ammXNpUy
37
- nt70FcDD4yxJQ+0YNiHd0N8IcVBM1TMIVctMNQ==
15
+ MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
16
+ YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
17
+ MB4XDTIyMDgxOTE3MjEzN1oXDTIzMDgxOTE3MjEzN1owPzEQMA4GA1UEAwwHY29u
18
+ dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
19
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAODzeO3L6lxdATzMHKNW
20
+ jFA/GGunoPuylO/BMzy8RiQHh7VIvysAKs0tHhTx3g2D0STDpF+hcQcPELFikiT2
21
+ F+1wOHj/SsrK7VKqfA8+gq04hKc5sQoX2Egf9k3V0YJ3eZ6R/koHkQ8A0TVt0w6F
22
+ ZQckoV4MqnEAx0g/FZN3mnHTlJ3VFLSBqJEIe+S6FZMl92mSv+hTrlUG8VaYxSfN
23
+ lTCvnKk284F6QZq5XIENLRmcDd/3aPBLnLwNnyMyhB+6gK8cUO+CFlDO5tjo/aBA
24
+ rUnl++wGG0JooF1ed0v+evOn9KoMBG6rHewcf79qJbVOscbD8qSAmo+sCXtcFryr
25
+ KRMTB8gNbowJkFRJDEe8tfRy11u1fYzFg/qNO82FJd62rKAw2wN0C29yCeQOPRb1
26
+ Cw9Y4ZwK9VFNEcV9L+3pHTHn2XfuZHtDaG198VweiF6raFO4yiEYccodH/USP0L5
27
+ cbcCFtmu/4HDSxL1ByQXO84A0ybJuk3/+aPUSXe9C9U8fwIDAQABo3cwdTAJBgNV
28
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUSlcEakb7gfn/5E2WY6z73BF/
29
+ iZkwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
30
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEA1aS+E7RXJ1w9g9mJ
31
+ G0NzFxe64OEuENosNlvYQCbRKGCXAU1qqelYkBQHseRgRKxLICrnypRo9IEobyHa
32
+ vDnJ4r7Tsb34dleqQW2zY/obG+cia3Ym2JsegXWF7dDOzCXJ4FN8MFoT2jHlqLLw
33
+ yrap0YO5zx0GSQ0Dwy8h2n2v2vanMEeCx7iNm3ERgR5WuN5sjzWoz2A/JLEEcK0C
34
+ EnAGKCWAd1fuG8IemDjT1edsd5FyYR4bIX0m+99oDuFZyPiiIbalmyYiSBBp59Yb
35
+ Q0P8zeBi4OfwCZNcxqz0KONmw9JLNv6DgyEAH5xe/4JzhMEgvIRiPj0pHfA7oqQF
36
+ KUNqvD1KlxbEC+bZfE5IZhnqYLdld/Ksqd22FI1RBhiS1Ejfsj99LVIm9cBuZEY2
37
+ Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
38
+ MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
38
39
  -----END CERTIFICATE-----
39
- date: 2021-03-24 00:00:00.000000000 Z
40
+ date: 2022-10-14 00:00:00.000000000 Z
40
41
  dependencies:
41
42
  - !ruby/object:Gem::Dependency
42
- name: dry-configurable
43
+ name: concurrent-ruby
43
44
  requirement: !ruby/object:Gem::Requirement
44
45
  requirements:
45
- - - "~>"
46
+ - - ">="
46
47
  - !ruby/object:Gem::Version
47
- version: '0.8'
48
+ version: '0'
48
49
  type: :runtime
49
50
  prerelease: false
50
51
  version_requirements: !ruby/object:Gem::Requirement
51
52
  requirements:
52
- - - "~>"
53
+ - - ">="
53
54
  - !ruby/object:Gem::Version
54
- version: '0.8'
55
+ version: '0'
55
56
  - !ruby/object:Gem::Dependency
56
- name: dry-inflector
57
+ name: dry-configurable
57
58
  requirement: !ruby/object:Gem::Requirement
58
59
  requirements:
59
60
  - - "~>"
60
61
  - !ruby/object:Gem::Version
61
- version: '0.1'
62
+ version: '0.16'
62
63
  type: :runtime
63
64
  prerelease: false
64
65
  version_requirements: !ruby/object:Gem::Requirement
65
66
  requirements:
66
67
  - - "~>"
67
68
  - !ruby/object:Gem::Version
68
- version: '0.1'
69
+ version: '0.16'
69
70
  - !ruby/object:Gem::Dependency
70
- name: dry-monitor
71
+ name: dry-inflector
71
72
  requirement: !ruby/object:Gem::Requirement
72
73
  requirements:
73
74
  - - "~>"
74
75
  - !ruby/object:Gem::Version
75
- version: '0.3'
76
+ version: '0.2'
76
77
  type: :runtime
77
78
  prerelease: false
78
79
  version_requirements: !ruby/object:Gem::Requirement
79
80
  requirements:
80
81
  - - "~>"
81
82
  - !ruby/object:Gem::Version
82
- version: '0.3'
83
+ version: '0.2'
83
84
  - !ruby/object:Gem::Dependency
84
- name: dry-validation
85
+ name: dry-monitor
85
86
  requirement: !ruby/object:Gem::Requirement
86
87
  requirements:
87
88
  - - "~>"
88
89
  - !ruby/object:Gem::Version
89
- version: '1.2'
90
+ version: '0.5'
90
91
  type: :runtime
91
92
  prerelease: false
92
93
  version_requirements: !ruby/object:Gem::Requirement
93
94
  requirements:
94
95
  - - "~>"
95
96
  - !ruby/object:Gem::Version
96
- version: '1.2'
97
+ version: '0.5'
97
98
  - !ruby/object:Gem::Dependency
98
- name: envlogic
99
+ name: dry-validation
99
100
  requirement: !ruby/object:Gem::Requirement
100
101
  requirements:
101
102
  - - "~>"
102
103
  - !ruby/object:Gem::Version
103
- version: '1.1'
104
+ version: '1.7'
104
105
  type: :runtime
105
106
  prerelease: false
106
107
  version_requirements: !ruby/object:Gem::Requirement
107
108
  requirements:
108
109
  - - "~>"
109
110
  - !ruby/object:Gem::Version
110
- version: '1.1'
111
+ version: '1.7'
111
112
  - !ruby/object:Gem::Dependency
112
- name: irb
113
+ name: envlogic
113
114
  requirement: !ruby/object:Gem::Requirement
114
115
  requirements:
115
116
  - - "~>"
116
117
  - !ruby/object:Gem::Version
117
- version: '1.0'
118
+ version: '1.1'
118
119
  type: :runtime
119
120
  prerelease: false
120
121
  version_requirements: !ruby/object:Gem::Requirement
121
122
  requirements:
122
123
  - - "~>"
123
124
  - !ruby/object:Gem::Version
124
- version: '1.0'
125
- - !ruby/object:Gem::Dependency
126
- name: rake
127
- requirement: !ruby/object:Gem::Requirement
128
- requirements:
129
- - - ">="
130
- - !ruby/object:Gem::Version
131
- version: '11.3'
132
- type: :runtime
133
- prerelease: false
134
- version_requirements: !ruby/object:Gem::Requirement
135
- requirements:
136
- - - ">="
137
- - !ruby/object:Gem::Version
138
- version: '11.3'
125
+ version: '1.1'
139
126
  - !ruby/object:Gem::Dependency
140
127
  name: ruby-kafka
141
128
  requirement: !ruby/object:Gem::Requirement
142
129
  requirements:
143
130
  - - ">="
144
131
  - !ruby/object:Gem::Version
145
- version: 1.0.0
132
+ version: 1.3.0
146
133
  type: :runtime
147
134
  prerelease: false
148
135
  version_requirements: !ruby/object:Gem::Requirement
149
136
  requirements:
150
137
  - - ">="
151
138
  - !ruby/object:Gem::Version
152
- version: 1.0.0
139
+ version: 1.3.0
153
140
  - !ruby/object:Gem::Dependency
154
141
  name: thor
155
142
  requirement: !ruby/object:Gem::Requirement
156
143
  requirements:
157
144
  - - ">="
158
145
  - !ruby/object:Gem::Version
159
- version: '0.20'
146
+ version: '1.1'
160
147
  type: :runtime
161
148
  prerelease: false
162
149
  version_requirements: !ruby/object:Gem::Requirement
163
150
  requirements:
164
151
  - - ">="
165
152
  - !ruby/object:Gem::Version
166
- version: '0.20'
153
+ version: '1.1'
167
154
  - !ruby/object:Gem::Dependency
168
155
  name: waterdrop
169
156
  requirement: !ruby/object:Gem::Requirement
170
157
  requirements:
171
158
  - - "~>"
172
159
  - !ruby/object:Gem::Version
173
- version: 1.4.0
160
+ version: '1.4'
174
161
  type: :runtime
175
162
  prerelease: false
176
163
  version_requirements: !ruby/object:Gem::Requirement
177
164
  requirements:
178
165
  - - "~>"
179
166
  - !ruby/object:Gem::Version
180
- version: 1.4.0
167
+ version: '1.4'
181
168
  - !ruby/object:Gem::Dependency
182
169
  name: zeitwerk
183
170
  requirement: !ruby/object:Gem::Requirement
184
171
  requirements:
185
172
  - - "~>"
186
173
  - !ruby/object:Gem::Version
187
- version: '2.1'
174
+ version: '2.6'
188
175
  type: :runtime
189
176
  prerelease: false
190
177
  version_requirements: !ruby/object:Gem::Requirement
191
178
  requirements:
192
179
  - - "~>"
193
180
  - !ruby/object:Gem::Version
194
- version: '2.1'
181
+ version: '2.6'
195
182
  description: Framework used to simplify Apache Kafka based Ruby applications development
196
183
  email:
197
184
  - maciej@mensfeld.pl
@@ -205,7 +192,6 @@ files:
205
192
  - ".coditsu/ci.yml"
206
193
  - ".console_irbrc"
207
194
  - ".diffend.yml"
208
- - ".github/FUNDING.yml"
209
195
  - ".github/ISSUE_TEMPLATE/bug_report.md"
210
196
  - ".github/ISSUE_TEMPLATE/feature_request.md"
211
197
  - ".github/workflows/ci.yml"
@@ -301,11 +287,16 @@ files:
301
287
  - lib/karafka/templates/karafka.rb.erb
302
288
  - lib/karafka/version.rb
303
289
  - log/.gitkeep
304
- homepage: https://github.com/karafka/karafka
290
+ homepage: https://karafka.io
305
291
  licenses:
306
292
  - MIT
307
- metadata: {}
308
- post_install_message:
293
+ metadata:
294
+ source_code_uri: https://github.com/karafka/karafka
295
+ rubygems_mfa_required: 'true'
296
+ post_install_message: |
297
+ WARN: Karafka 1.4 will reach the end of life soon.
298
+ We highly recommend updating to Karafka 2.0.
299
+ Visit this page for more details: https://karafka.io/docs/Versions-Lifecycle-and-EOL
309
300
  rdoc_options: []
310
301
  require_paths:
311
302
  - lib
@@ -313,14 +304,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
313
304
  requirements:
314
305
  - - ">="
315
306
  - !ruby/object:Gem::Version
316
- version: 2.5.0
307
+ version: '2.7'
317
308
  required_rubygems_version: !ruby/object:Gem::Requirement
318
309
  requirements:
319
310
  - - ">="
320
311
  - !ruby/object:Gem::Version
321
312
  version: '0'
322
313
  requirements: []
323
- rubygems_version: 3.2.3
314
+ rubygems_version: 3.3.7
324
315
  signing_key:
325
316
  specification_version: 4
326
317
  summary: Ruby based framework for working with Apache Kafka
metadata.gz.sig CHANGED
Binary file
data/.github/FUNDING.yml DELETED
@@ -1,3 +0,0 @@
1
- # These are supported funding model platforms
2
-
3
- open_collective: karafka