karafka 2.2.14 → 2.3.0.alpha2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +38 -12
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +24 -0
  6. data/Gemfile.lock +16 -16
  7. data/README.md +0 -2
  8. data/SECURITY.md +23 -0
  9. data/bin/integrations +1 -1
  10. data/config/locales/errors.yml +7 -1
  11. data/config/locales/pro_errors.yml +22 -0
  12. data/docker-compose.yml +1 -1
  13. data/karafka.gemspec +2 -2
  14. data/lib/karafka/admin/acl.rb +287 -0
  15. data/lib/karafka/admin.rb +9 -13
  16. data/lib/karafka/app.rb +5 -3
  17. data/lib/karafka/base_consumer.rb +9 -1
  18. data/lib/karafka/cli/base.rb +1 -1
  19. data/lib/karafka/connection/client.rb +83 -76
  20. data/lib/karafka/connection/conductor.rb +28 -0
  21. data/lib/karafka/connection/listener.rb +159 -42
  22. data/lib/karafka/connection/listeners_batch.rb +5 -11
  23. data/lib/karafka/connection/manager.rb +72 -0
  24. data/lib/karafka/connection/messages_buffer.rb +12 -0
  25. data/lib/karafka/connection/proxy.rb +17 -0
  26. data/lib/karafka/connection/status.rb +75 -0
  27. data/lib/karafka/contracts/config.rb +14 -10
  28. data/lib/karafka/contracts/consumer_group.rb +9 -1
  29. data/lib/karafka/contracts/topic.rb +3 -1
  30. data/lib/karafka/errors.rb +17 -0
  31. data/lib/karafka/instrumentation/logger_listener.rb +3 -0
  32. data/lib/karafka/instrumentation/notifications.rb +13 -5
  33. data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +31 -28
  34. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +20 -1
  35. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +15 -12
  36. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +39 -36
  37. data/lib/karafka/pro/base_consumer.rb +47 -0
  38. data/lib/karafka/pro/connection/manager.rb +269 -0
  39. data/lib/karafka/pro/connection/multiplexing/listener.rb +40 -0
  40. data/lib/karafka/pro/iterator/tpl_builder.rb +1 -1
  41. data/lib/karafka/pro/iterator.rb +1 -6
  42. data/lib/karafka/pro/loader.rb +14 -0
  43. data/lib/karafka/pro/processing/coordinator.rb +2 -1
  44. data/lib/karafka/pro/processing/executor.rb +37 -0
  45. data/lib/karafka/pro/processing/expansions_selector.rb +32 -0
  46. data/lib/karafka/pro/processing/jobs/periodic.rb +41 -0
  47. data/lib/karafka/pro/processing/jobs/periodic_non_blocking.rb +32 -0
  48. data/lib/karafka/pro/processing/jobs_builder.rb +14 -3
  49. data/lib/karafka/pro/processing/offset_metadata/consumer.rb +44 -0
  50. data/lib/karafka/pro/processing/offset_metadata/fetcher.rb +131 -0
  51. data/lib/karafka/pro/processing/offset_metadata/listener.rb +46 -0
  52. data/lib/karafka/pro/processing/schedulers/base.rb +39 -23
  53. data/lib/karafka/pro/processing/schedulers/default.rb +12 -14
  54. data/lib/karafka/pro/processing/strategies/default.rb +154 -1
  55. data/lib/karafka/pro/processing/strategies/dlq/default.rb +39 -0
  56. data/lib/karafka/pro/processing/strategies/vp/default.rb +65 -25
  57. data/lib/karafka/pro/processing/virtual_offset_manager.rb +41 -11
  58. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +2 -0
  59. data/lib/karafka/pro/routing/features/multiplexing/config.rb +38 -0
  60. data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +114 -0
  61. data/lib/karafka/pro/routing/features/multiplexing/patches/contracts/consumer_group.rb +42 -0
  62. data/lib/karafka/pro/routing/features/multiplexing/proxy.rb +38 -0
  63. data/lib/karafka/pro/routing/features/multiplexing/subscription_group.rb +42 -0
  64. data/lib/karafka/pro/routing/features/multiplexing/subscription_groups_builder.rb +40 -0
  65. data/lib/karafka/pro/routing/features/multiplexing.rb +59 -0
  66. data/lib/karafka/pro/routing/features/non_blocking_job/topic.rb +32 -0
  67. data/lib/karafka/pro/routing/features/non_blocking_job.rb +37 -0
  68. data/lib/karafka/pro/routing/features/offset_metadata/config.rb +33 -0
  69. data/lib/karafka/pro/routing/features/offset_metadata/contracts/topic.rb +42 -0
  70. data/lib/karafka/pro/routing/features/offset_metadata/topic.rb +65 -0
  71. data/lib/karafka/pro/routing/features/offset_metadata.rb +40 -0
  72. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +4 -0
  73. data/lib/karafka/pro/routing/features/patterns/detector.rb +18 -10
  74. data/lib/karafka/pro/routing/features/periodic_job/config.rb +37 -0
  75. data/lib/karafka/pro/routing/features/periodic_job/contracts/topic.rb +44 -0
  76. data/lib/karafka/pro/routing/features/periodic_job/topic.rb +94 -0
  77. data/lib/karafka/pro/routing/features/periodic_job.rb +27 -0
  78. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +1 -0
  79. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +1 -0
  80. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +7 -2
  81. data/lib/karafka/process.rb +5 -3
  82. data/lib/karafka/processing/coordinator.rb +5 -1
  83. data/lib/karafka/processing/executor.rb +16 -10
  84. data/lib/karafka/processing/executors_buffer.rb +19 -4
  85. data/lib/karafka/processing/schedulers/default.rb +3 -2
  86. data/lib/karafka/processing/strategies/default.rb +6 -0
  87. data/lib/karafka/processing/strategies/dlq.rb +36 -0
  88. data/lib/karafka/routing/builder.rb +12 -2
  89. data/lib/karafka/routing/consumer_group.rb +5 -5
  90. data/lib/karafka/routing/features/base.rb +44 -8
  91. data/lib/karafka/routing/features/dead_letter_queue/config.rb +6 -1
  92. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -0
  93. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +9 -2
  94. data/lib/karafka/routing/subscription_group.rb +2 -2
  95. data/lib/karafka/routing/subscription_groups_builder.rb +11 -2
  96. data/lib/karafka/routing/topic.rb +8 -10
  97. data/lib/karafka/runner.rb +13 -3
  98. data/lib/karafka/server.rb +5 -9
  99. data/lib/karafka/setup/config.rb +17 -0
  100. data/lib/karafka/status.rb +23 -14
  101. data/lib/karafka/templates/karafka.rb.erb +7 -0
  102. data/lib/karafka/time_trackers/partition_usage.rb +56 -0
  103. data/lib/karafka/version.rb +1 -1
  104. data.tar.gz.sig +0 -0
  105. metadata +42 -10
  106. metadata.gz.sig +0 -0
  107. data/lib/karafka/connection/consumer_group_coordinator.rb +0 -48
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 69d8242fa695121f63b2e582d7a0b97f090d58f82047513c450f3b21107703b3
4
- data.tar.gz: a9fb3db88cc6fbb3a25db24e95b8010a0b01f7ab09fc2f54d201e311581db9a5
3
+ metadata.gz: 07bebe70b6697a90d04154dcbfe5837f5bcfaf934073f91bbfc9e8939d9d1a6c
4
+ data.tar.gz: 13e41b276eee5142b55eb6908c8b9292bd8f802f470fb4e9bdc1f812dbd50189
5
5
  SHA512:
6
- metadata.gz: 83e22a8317f10328c11f3f4ac4c90109ecebb7f1ca0b089da2875c4e0700b58338adfb6b7c70e30df6fedecb26e2aaa4a11df347cc0bd898781adf709ad7a87c
7
- data.tar.gz: 15eb23000600be7d2f2c49316ae8d3355ddef4ab2d9f75585a5b63ea0f8b27a87f473d8fe5fcf926a5815f319413541f6822fa15660fc7962bb5baf31771f00a
6
+ metadata.gz: fef2cbded4409d951cf7e752f25b7db016052cc5f2a77c54ccf7e8778fbf44edba74cd1b04cf82caf341cf05beda552af5c4cbd994a510c54ad1d3c4e561fd17
7
+ data.tar.gz: 00bf893d7c6f29e559530585c40ba721ab3a9f3ce5906cd7c5b6948f79725d6d99423444d9ec7e6f8593f9780aec3375e400986ed8478dfab2cbbabb5807a85d
checksums.yaml.gz.sig CHANGED
Binary file
@@ -27,7 +27,7 @@ jobs:
27
27
  - name: Set up Ruby
28
28
  uses: ruby/setup-ruby@v1
29
29
  with:
30
- ruby-version: 3.2
30
+ ruby-version: 3.3
31
31
  bundler-cache: true
32
32
 
33
33
  - name: Install Diffend plugin
@@ -73,7 +73,7 @@ jobs:
73
73
  fail-fast: false
74
74
  matrix:
75
75
  ruby:
76
- - '3.3.0-preview2'
76
+ - '3.3'
77
77
  - '3.2'
78
78
  # We run it against the oldest and the newest of a given major to make sure, that there
79
79
  # are no syntax-sugars that we would use that were introduced down the road
@@ -82,9 +82,8 @@ jobs:
82
82
  - '3.0'
83
83
  - '3.0.0'
84
84
  - '2.7'
85
- - '2.7.0'
86
85
  include:
87
- - ruby: '3.2'
86
+ - ruby: '3.3'
88
87
  coverage: 'true'
89
88
  steps:
90
89
  - uses: actions/checkout@v4
@@ -100,6 +99,7 @@ jobs:
100
99
  with:
101
100
  ruby-version: ${{matrix.ruby}}
102
101
  bundler-cache: true
102
+ bundler: 'latest'
103
103
 
104
104
  - name: Wait for Kafka
105
105
  run: |
@@ -118,7 +118,7 @@ jobs:
118
118
  fail-fast: false
119
119
  matrix:
120
120
  ruby:
121
- - '3.3.0-preview2'
121
+ - '3.3'
122
122
  - '3.2'
123
123
  - '3.1'
124
124
  - '3.0'
@@ -143,17 +143,30 @@ jobs:
143
143
  #
144
144
  # We also want to check that librdkafka is compiling as expected on all versions of Ruby
145
145
  ruby-version: ${{matrix.ruby}}
146
+ bundler: 'latest'
146
147
 
147
148
  - name: Install latest Bundler
148
149
  run: |
149
- gem install bundler --no-document
150
- gem update --system --no-document
150
+ if [[ "$(ruby -v | awk '{print $2}')" == 2.7.8* ]]; then
151
+ gem install bundler -v 2.4.22 --no-document
152
+ bundle config set version 2.4.22
153
+ gem update --system 3.4.22 --no-document
154
+ else
155
+ gem install bundler --no-document
156
+ gem update --system --no-document
157
+ fi
158
+
151
159
  bundle config set without 'tools benchmarks docs'
152
160
 
153
161
  - name: Bundle install
154
162
  run: |
155
163
  bundle config set without development
156
- bundle install
164
+
165
+ if [[ "$(ruby -v | awk '{print $2}')" == 2.7.8* ]]; then
166
+ BUNDLER_VERSION=2.4.22 bundle install --jobs 4 --retry 3
167
+ else
168
+ bundle install --jobs 4 --retry 3
169
+ fi
157
170
 
158
171
  - name: Wait for Kafka
159
172
  run: |
@@ -170,7 +183,7 @@ jobs:
170
183
  fail-fast: false
171
184
  matrix:
172
185
  ruby:
173
- - '3.3.0-preview2'
186
+ - '3.3'
174
187
  - '3.2'
175
188
  - '3.1'
176
189
  - '3.0'
@@ -188,17 +201,30 @@ jobs:
188
201
  uses: ruby/setup-ruby@v1
189
202
  with:
190
203
  ruby-version: ${{matrix.ruby}}
204
+ bundler: 'latest'
191
205
 
192
206
  - name: Install latest Bundler
193
207
  run: |
194
- gem install bundler --no-document
195
- gem update --system --no-document
208
+ if [[ "$(ruby -v | awk '{print $2}')" == 2.7.8* ]]; then
209
+ gem install bundler -v 2.4.22 --no-document
210
+ bundle config set version 2.4.22
211
+ gem update --system 3.4.22 --no-document
212
+ else
213
+ gem install bundler --no-document
214
+ gem update --system --no-document
215
+ fi
216
+
196
217
  bundle config set without 'tools benchmarks docs'
197
218
 
198
219
  - name: Bundle install
199
220
  run: |
200
221
  bundle config set without development
201
- bundle install
222
+
223
+ if [[ "$(ruby -v | awk '{print $2}')" == 2.7.8* ]]; then
224
+ BUNDLER_VERSION=2.4.22 bundle install --jobs 4 --retry 3
225
+ else
226
+ bundle install --jobs 4 --retry 3
227
+ fi
202
228
 
203
229
  - name: Wait for Kafka
204
230
  run: |
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.2.2
1
+ 3.3.0
data/CHANGELOG.md CHANGED
@@ -1,5 +1,29 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 2.3.0 (Unreleased)
4
+ - **[Feature]** Introduce Exactly-Once Semantics within consumers `#transaction` block (Pro)
5
+ - **[Feature]** Provide ability to multiplex subscription groups (Pro)
6
+ - **[Feature]** Provide `Karafka::Admin::Acl` for Kafka ACL management via the Admin APIs.
7
+ - **[Feature]** Periodic Jobs (Pro)
8
+ - **[Feature]** Offset Metadata storage (Pro)
9
+ - **[Feature]** Provide low-level listeners management API for dynamic resources scaling (Pro)
10
+ - [Enhancement] Improve shutdown process by allowing for parallel connections shutdown.
11
+ - [Enhancement] Introduce `non_blocking` routing API that aliases LRJ to indicate a different use-case for LRJ flow approach.
12
+ - [Enhancement] Allow to reset offset when seeking backwards by using the `reset_offset` keyword attribute set to `true`.
13
+ - [Enhancement] Alias producer operations in consumer to skip `#producer` reference.
14
+ - [Enhancement] Provide an `:independent` configuration to DLQ allowing to reset pause count track on each marking as consumed when retrying.
15
+ - [Enhancement] Remove no longer needed shutdown patches for `librdkafka` improving multi-sg shutdown times for `cooperative-sticky`.
16
+ - [Enhancement] Allow for parallel closing of connections from independent consumer groups.
17
+ - [Change] Make `Kubernetes::LivenessListener` not start until Karafka app starts running.
18
+ - [Change] Remove the legacy "inside of topics" way of defining subscription groups names
19
+ - [Change] Update supported instrumentation to report on `#tick`.
20
+ - [Refactor] Replace `define_method` with `class_eval` in some locations.
21
+ - [Fix] Fix a case where internal Idle job scheduling would go via the consumption flow.
22
+ - [Fix] Make the Iterator `#stop_partition` work with karafka-rdkafka `0.14.6`.
23
+ - [Fix] Ensure Pro components are not loaded during OSS specs execution (not affecting usage).
24
+ - [Fix] Fix invalid action label for consumers in DataDog logger instrumentation.
25
+ - [Ignore] option --include-consumer-groups not working as intended after removal of "thor"
26
+
3
27
  ## 2.2.14 (2023-12-07)
4
28
  - **[Feature]** Provide `Karafka::Admin#delete_consumer_group` and `Karafka::Admin#seek_consumer_group`.
5
29
  - **[Feature]** Provide `Karafka::App.assignments` that will return real-time assignments tracking.
data/Gemfile.lock CHANGED
@@ -1,18 +1,18 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.2.14)
5
- karafka-core (>= 2.2.7, < 2.3.0)
6
- waterdrop (>= 2.6.11, < 3.0.0)
4
+ karafka (2.3.0.alpha2)
5
+ karafka-core (>= 2.3.0.alpha1, < 2.4.0)
6
+ waterdrop (>= 2.6.12, < 3.0.0)
7
7
  zeitwerk (~> 2.3)
8
8
 
9
9
  GEM
10
10
  remote: https://rubygems.org/
11
11
  specs:
12
- activejob (7.1.2)
13
- activesupport (= 7.1.2)
12
+ activejob (7.1.3)
13
+ activesupport (= 7.1.3)
14
14
  globalid (>= 0.3.6)
15
- activesupport (7.1.2)
15
+ activesupport (7.1.3)
16
16
  base64
17
17
  bigdecimal
18
18
  concurrent-ruby (~> 1.0, >= 1.0.2)
@@ -23,26 +23,25 @@ GEM
23
23
  mutex_m
24
24
  tzinfo (~> 2.0)
25
25
  base64 (0.2.0)
26
- bigdecimal (3.1.4)
26
+ bigdecimal (3.1.5)
27
27
  byebug (11.1.3)
28
- concurrent-ruby (1.2.2)
28
+ concurrent-ruby (1.2.3)
29
29
  connection_pool (2.4.1)
30
30
  diff-lcs (1.5.0)
31
31
  docile (1.4.0)
32
32
  drb (2.2.0)
33
33
  ruby2_keywords
34
34
  erubi (1.12.0)
35
- factory_bot (6.4.2)
35
+ factory_bot (6.4.5)
36
36
  activesupport (>= 5.0.0)
37
37
  ffi (1.16.3)
38
38
  globalid (1.2.1)
39
39
  activesupport (>= 6.1)
40
40
  i18n (1.14.1)
41
41
  concurrent-ruby (~> 1.0)
42
- karafka-core (2.2.7)
43
- concurrent-ruby (>= 1.1)
44
- karafka-rdkafka (>= 0.13.9, < 0.15.0)
45
- karafka-rdkafka (0.14.1)
42
+ karafka-core (2.3.0.alpha1)
43
+ karafka-rdkafka (>= 0.14.7, < 0.15.0)
44
+ karafka-rdkafka (0.14.7)
46
45
  ffi (~> 1.15)
47
46
  mini_portile2 (~> 2.6)
48
47
  rake (> 12)
@@ -57,7 +56,7 @@ GEM
57
56
  mutex_m (0.2.0)
58
57
  rack (3.0.8)
59
58
  rake (13.1.0)
60
- roda (3.74.0)
59
+ roda (3.75.0)
61
60
  rack
62
61
  rspec (3.12.0)
63
62
  rspec-core (~> 3.12.0)
@@ -82,12 +81,13 @@ GEM
82
81
  tilt (2.3.0)
83
82
  tzinfo (2.0.6)
84
83
  concurrent-ruby (~> 1.0)
85
- waterdrop (2.6.11)
84
+ waterdrop (2.6.12)
86
85
  karafka-core (>= 2.2.3, < 3.0.0)
87
86
  zeitwerk (~> 2.3)
88
87
  zeitwerk (2.6.12)
89
88
 
90
89
  PLATFORMS
90
+ ruby
91
91
  x86_64-linux
92
92
 
93
93
  DEPENDENCIES
@@ -100,4 +100,4 @@ DEPENDENCIES
100
100
  simplecov
101
101
 
102
102
  BUNDLED WITH
103
- 2.4.19
103
+ 2.5.3
data/README.md CHANGED
@@ -4,8 +4,6 @@
4
4
  [![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
5
5
  [![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
6
6
 
7
- **Note**: Upgrade instructions for migration from Karafka `1.4` to Karafka `2.0` can be found [here](https://karafka.io/docs/Upgrades-2.0/).
8
-
9
7
  ## About Karafka
10
8
 
11
9
  Karafka is a Ruby and Rails multi-threaded efficient Kafka processing framework that:
data/SECURITY.md ADDED
@@ -0,0 +1,23 @@
1
+ # Security Policy
2
+
3
+ ## Supported Versions
4
+
5
+ Please refer to the Karafka [EOL documentation](https://karafka.io/docs/Versions-Lifecycle-and-EOL/) page for detailed information on which versions are actively supported with security updates.
6
+
7
+ ## Reporting a Vulnerability
8
+
9
+ If you have identified a potential security vulnerability in our projects, we encourage you to report it immediately. We take all reports of security issues seriously and will work diligently to address them.
10
+
11
+ To report a vulnerability, please send an email directly to contact@karafka.io.
12
+
13
+ We understand the importance of addressing security vulnerabilities promptly. You can expect a reply from us within 2 working days of your report. This initial response will confirm receipt of your report.
14
+
15
+ After acknowledging your report, we will:
16
+
17
+ - Evaluate the reported vulnerability in the context of our project.
18
+ - Provide you with regular updates on our progress.
19
+ - Upon completing our assessment, we will inform you of the outcome. This includes whether the vulnerability will be accepted or declined for further action.
20
+
21
+ Your report will be kept confidential and not disclosed to third parties without your consent, except as required by law.
22
+
23
+ We appreciate your assistance in keeping our projects and their users safe by responsibly reporting vulnerabilities. Together, we can maintain a high standard of security for our community.
data/bin/integrations CHANGED
@@ -28,7 +28,7 @@ ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../
28
28
  CONCURRENCY = ENV.key?('CI') ? 5 : Etc.nprocessors * 3
29
29
 
30
30
  # How may bytes do we want to keep from the stdout in the buffer for when we need to print it
31
- MAX_BUFFER_OUTPUT = 51_200
31
+ MAX_BUFFER_OUTPUT = 102_400
32
32
 
33
33
  # Abstraction around a single test scenario execution process
34
34
  class Scenario
@@ -30,12 +30,17 @@ en:
30
30
  internal.tick_interval_format: needs to be an integer bigger or equal to 1000
31
31
  internal.routing.builder_format: needs to be present
32
32
  internal.routing.subscription_groups_builder_format: needs to be present
33
+ internal.connection.manager_format: needs to be present
34
+ internal.connection.conductor_format: needs to be present
33
35
  internal.connection.proxy.query_watermark_offsets.timeout_format: needs to be an integer bigger than 0
34
36
  internal.connection.proxy.query_watermark_offsets.max_attempts_format: needs to be an integer bigger than 0
35
37
  internal.connection.proxy.query_watermark_offsets.wait_time_format: needs to be an integer bigger than 0
36
38
  internal.connection.proxy.offsets_for_times.timeout_format: needs to be an integer bigger than 0
37
39
  internal.connection.proxy.offsets_for_times.max_attempts_format: needs to be an integer bigger than 0
38
40
  internal.connection.proxy.offsets_for_times.wait_time_format: needs to be an integer bigger than 0
41
+ internal.connection.proxy.committed.timeout_format: needs to be an integer bigger than 0
42
+ internal.connection.proxy.committed.max_attempts_format: needs to be an integer bigger than 0
43
+ internal.connection.proxy.committed.wait_time_format: needs to be an integer bigger than 0
39
44
  key_must_be_a_symbol: All keys under the kafka settings scope need to be symbols
40
45
  max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
41
46
  shutdown_timeout_vs_max_wait_time: shutdown_timeout must be more than max_wait_time
@@ -61,7 +66,7 @@ en:
61
66
  consumer_format: needs to be present
62
67
  id_format: 'needs to be a string with a Kafka accepted format'
63
68
  initial_offset_format: needs to be either earliest or latest
64
- subscription_group_name_format: must be a non-empty string
69
+ subscription_group_details.name_format: must be a non-empty string
65
70
  manual_offset_management.active_format: needs to be either true or false
66
71
  manual_offset_management_must_be_enabled: cannot be disabled for ActiveJob topics
67
72
  inline_insights.active_format: needs to be either true or false
@@ -69,6 +74,7 @@ en:
69
74
  dead_letter_queue.max_retries_format: needs to be equal or bigger than 0
70
75
  dead_letter_queue.topic_format: 'needs to be a string with a Kafka accepted format'
71
76
  dead_letter_queue.active_format: needs to be either true or false
77
+ dead_letter_queue.independent_format: needs to be either true or false
72
78
  active_format: needs to be either true or false
73
79
  declaratives.partitions_format: needs to be more or equal to 1
74
80
  declaratives.active_format: needs to be true
@@ -3,6 +3,7 @@ en:
3
3
  topic:
4
4
  virtual_partitions.partitioner_respond_to_call: needs to be defined and needs to respond to `#call`
5
5
  virtual_partitions.max_partitions_format: needs to be equal or more than 1
6
+ virtual_partitions.offset_metadata_strategy_format: needs to be either :exact or :current
6
7
 
7
8
  long_running_job.active_format: needs to be either true or false
8
9
 
@@ -31,9 +32,30 @@ en:
31
32
  patterns.active_format: 'needs to be boolean'
32
33
  patterns.type_format: 'needs to be :matcher, :discovered or :regular'
33
34
 
35
+ periodic_job.active_missing: needs to be present
36
+ periodic_job.active_format: 'needs to be boolean'
37
+ periodic_job.interval_missing: 'needs to be present'
38
+ periodic_job.interval_format: 'needs to be an integer equal or more than 100'
39
+ periodic_job.during_pause_format: 'needs to be boolean'
40
+ periodic_job.during_retry_format: 'needs to be boolean'
41
+ periodic_job.materialized_format: 'needs to be boolean'
42
+ periodic_job.materialized_missing: 'needs to be present'
43
+
34
44
  inline_insights.active_format: 'needs to be boolean'
35
45
  inline_insights.required_format: 'needs to be boolean'
36
46
 
47
+ offset_metadata.active_format: 'needs to be boolean'
48
+ offset_metadata.cache_format: 'needs to be boolean'
49
+ offset_metadata.deserializer_missing: needs to be present
50
+ offset_metadata.deserializer_format: 'needs to respond to #call'
51
+
52
+ subscription_group_details.multiplexing_min_format: 'needs to be an integer equal or more than 1'
53
+ subscription_group_details.multiplexing_max_format: 'needs to be an integer equal or more than 1'
54
+ subscription_group_details_multiplexing_min_max_mismatch: 'min needs to be equal or less than max'
55
+ subscription_group_details_multiplexing_boot_mismatch: 'boot needs to be between min and max'
56
+ subscription_group_details.multiplexing_boot_format: 'needs to be an integer equal or more than 1'
57
+ subscription_group_details.multiplexing_boot_not_dynamic: 'needs to be equal to max when not in dynamic mode'
58
+
37
59
  consumer_group:
38
60
  patterns_format: must be an array with hashes
39
61
  patterns_missing: needs to be present
data/docker-compose.yml CHANGED
@@ -3,7 +3,7 @@ version: '2'
3
3
  services:
4
4
  kafka:
5
5
  container_name: kafka
6
- image: confluentinc/cp-kafka:7.5.2
6
+ image: confluentinc/cp-kafka:7.5.3
7
7
 
8
8
  ports:
9
9
  - 9092:9092
data/karafka.gemspec CHANGED
@@ -21,8 +21,8 @@ Gem::Specification.new do |spec|
21
21
  without having to focus on things that are not your business domain.
22
22
  DESC
23
23
 
24
- spec.add_dependency 'karafka-core', '>= 2.2.7', '< 2.3.0'
25
- spec.add_dependency 'waterdrop', '>= 2.6.11', '< 3.0.0'
24
+ spec.add_dependency 'karafka-core', '>= 2.3.0.alpha1', '< 2.4.0'
25
+ spec.add_dependency 'waterdrop', '>= 2.6.12', '< 3.0.0'
26
26
  spec.add_dependency 'zeitwerk', '~> 2.3'
27
27
 
28
28
  if $PROGRAM_NAME.end_with?('gem')