karafka 2.4.0.rc1 → 2.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +0 -3
- data/.ruby-version +1 -1
- data/CHANGELOG.md +3 -1
- data/Gemfile +1 -1
- data/Gemfile.lock +14 -14
- data/README.md +2 -2
- data/bin/integrations +2 -1
- data/karafka.gemspec +2 -2
- data/lib/karafka/constraints.rb +2 -2
- data/lib/karafka/instrumentation/callbacks/error.rb +14 -1
- data/lib/karafka/instrumentation/callbacks/oauthbearer_token_refresh.rb +12 -1
- data/lib/karafka/instrumentation/callbacks/rebalance.rb +14 -1
- data/lib/karafka/instrumentation/callbacks/statistics.rb +7 -3
- data/lib/karafka/instrumentation/logger_listener.rb +5 -2
- data/lib/karafka/pro/processing/piping/consumer.rb +14 -8
- data/lib/karafka/setup/attributes_map.rb +2 -0
- data/lib/karafka/templates/karafka.rb.erb +2 -2
- data/lib/karafka/version.rb +1 -1
- data.tar.gz.sig +0 -0
- metadata +7 -7
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8111bc347c891837ee1f7cbabd4d98ee1c55f2bb7366e72aa077628cc4052c14
|
4
|
+
data.tar.gz: 9a616eb14fbbddc433e56691540613a728cab6163632981de1c9f21abee8f0b4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 35e52c0f5ffeace79cc58e6ef85918f6ff32c80233419dec83a5b99485cd6f5ac2b163f3d0a73e96b4ac424e27a81a3f8f848d3bed5e461b69eb28399f102d3c
|
7
|
+
data.tar.gz: 544848837b53a5e0e64b9c842f8dadc63a486b784660e37a6f44a67947682c59cd112e6ac509e19de99b7b084402b38d7e00d7fff52a89c9e07172f593b3032e
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.github/workflows/ci.yml
CHANGED
@@ -75,10 +75,7 @@ jobs:
|
|
75
75
|
ruby:
|
76
76
|
- '3.3'
|
77
77
|
- '3.2'
|
78
|
-
# We run it against the oldest and the newest of a given major to make sure, that there
|
79
|
-
# are no syntax-sugars that we would use that were introduced down the road
|
80
78
|
- '3.1'
|
81
|
-
- '3.1.0'
|
82
79
|
- '3.0'
|
83
80
|
include:
|
84
81
|
- ruby: '3.3'
|
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
3.3.
|
1
|
+
3.3.1
|
data/CHANGELOG.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
# Karafka framework changelog
|
2
2
|
|
3
|
-
## 2.4.0 (
|
3
|
+
## 2.4.0 (2024-04-26)
|
4
4
|
|
5
5
|
This release contains **BREAKING** changes. Make sure to read and apply upgrade notes.
|
6
6
|
|
@@ -17,6 +17,7 @@ This release contains **BREAKING** changes. Make sure to read and apply upgrade
|
|
17
17
|
- **[Feature]** Introduce ability to use custom message key deserializers.
|
18
18
|
- **[Feature]** Introduce ability to use custom message headers deserializers.
|
19
19
|
- **[Feature]** Provide `Karafka::Admin::Configs` API for cluster and topics configuration management.
|
20
|
+
- [Enhancement] Protect critical `rdkafka` thread executable code sections.
|
20
21
|
- [Enhancement] Assign names to internal threads for better debuggability when on `TTIN`.
|
21
22
|
- [Enhancement] Provide `log_polling` setting to the `Karafka::Instrumentation::LoggerListener` to silence polling in any non-debug mode.
|
22
23
|
- [Enhancement] Provide `metadata#message` to be able to retrieve message from metadata.
|
@@ -42,6 +43,7 @@ This release contains **BREAKING** changes. Make sure to read and apply upgrade
|
|
42
43
|
- [Enhancement] No longer raise `Karafka::UnsupportedCaseError` for not recognized error types to support dynamic errors reporting.
|
43
44
|
- [Change] Do not create new proxy object to Rdkafka with certain low-level operations and re-use existing.
|
44
45
|
- [Change] Update `karafka.erb` template with a placeholder for waterdrop and karafka error instrumentation.
|
46
|
+
- [Change] Replace `statistics.emitted.error` error type with `callbacks.statistics.error` to align naming conventions.
|
45
47
|
- [Fix] Pro Swarm liveness listener can report incorrect failure when dynamic multiplexing scales down.
|
46
48
|
- [Fix] K8s liveness listener can report incorrect failure when dynamic multiplexing scales down.
|
47
49
|
- [Fix] Fix a case where connection conductor would not be released during manager state changes.
|
data/Gemfile
CHANGED
@@ -12,7 +12,7 @@ gemspec
|
|
12
12
|
group :integrations do
|
13
13
|
gem 'activejob', require: false
|
14
14
|
gem 'karafka-testing', '>= 2.4.0.rc1', require: false
|
15
|
-
gem 'karafka-web', '>= 0.9.0.
|
15
|
+
gem 'karafka-web', '>= 0.9.0.rc3', require: false
|
16
16
|
gem 'rspec', require: false
|
17
17
|
end
|
18
18
|
|
data/Gemfile.lock
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
karafka (2.4.0
|
5
|
-
karafka-core (>= 2.4.0
|
6
|
-
waterdrop (>= 2.7.0
|
4
|
+
karafka (2.4.0)
|
5
|
+
karafka-core (>= 2.4.0, < 2.5.0)
|
6
|
+
waterdrop (>= 2.7.0, < 3.0.0)
|
7
7
|
zeitwerk (~> 2.3)
|
8
8
|
|
9
9
|
GEM
|
@@ -38,19 +38,19 @@ GEM
|
|
38
38
|
activesupport (>= 6.1)
|
39
39
|
i18n (1.14.4)
|
40
40
|
concurrent-ruby (~> 1.0)
|
41
|
-
karafka-core (2.4.0
|
42
|
-
karafka-rdkafka (>= 0.15.0
|
43
|
-
karafka-rdkafka (0.15.0
|
41
|
+
karafka-core (2.4.0)
|
42
|
+
karafka-rdkafka (>= 0.15.0, < 0.16.0)
|
43
|
+
karafka-rdkafka (0.15.0)
|
44
44
|
ffi (~> 1.15)
|
45
45
|
mini_portile2 (~> 2.6)
|
46
46
|
rake (> 12)
|
47
|
-
karafka-testing (2.4.0.
|
47
|
+
karafka-testing (2.4.0.rc2)
|
48
48
|
karafka (>= 2.4.0.beta1, < 2.5.0)
|
49
49
|
waterdrop (>= 2.7.0.rc1)
|
50
|
-
karafka-web (0.9.0.
|
50
|
+
karafka-web (0.9.0.rc3)
|
51
51
|
erubi (~> 1.4)
|
52
|
-
karafka (>= 2.4.0.
|
53
|
-
karafka-core (>= 2.4.0.
|
52
|
+
karafka (>= 2.4.0.rc1, < 2.5.0)
|
53
|
+
karafka-core (>= 2.4.0.rc2, < 2.5.0)
|
54
54
|
roda (~> 3.68, >= 3.69)
|
55
55
|
tilt (~> 2.0)
|
56
56
|
mini_portile2 (2.8.6)
|
@@ -82,8 +82,8 @@ GEM
|
|
82
82
|
tilt (2.3.0)
|
83
83
|
tzinfo (2.0.6)
|
84
84
|
concurrent-ruby (~> 1.0)
|
85
|
-
waterdrop (2.7.0
|
86
|
-
karafka-core (>= 2.4.0
|
85
|
+
waterdrop (2.7.0)
|
86
|
+
karafka-core (>= 2.4.0, < 3.0.0)
|
87
87
|
zeitwerk (~> 2.3)
|
88
88
|
zeitwerk (2.6.13)
|
89
89
|
|
@@ -97,9 +97,9 @@ DEPENDENCIES
|
|
97
97
|
factory_bot
|
98
98
|
karafka!
|
99
99
|
karafka-testing (>= 2.4.0.rc1)
|
100
|
-
karafka-web (>= 0.9.0.
|
100
|
+
karafka-web (>= 0.9.0.rc3)
|
101
101
|
rspec
|
102
102
|
simplecov
|
103
103
|
|
104
104
|
BUNDLED WITH
|
105
|
-
2.5.
|
105
|
+
2.5.9
|
data/README.md
CHANGED
@@ -57,8 +57,8 @@ We also maintain many [integration specs](https://github.com/karafka/karafka/tre
|
|
57
57
|
1. Add and install Karafka:
|
58
58
|
|
59
59
|
```bash
|
60
|
-
# Make sure to install Karafka 2.
|
61
|
-
bundle add karafka --version ">= 2.
|
60
|
+
# Make sure to install Karafka 2.4
|
61
|
+
bundle add karafka --version ">= 2.4.0"
|
62
62
|
|
63
63
|
bundle exec karafka install
|
64
64
|
```
|
data/bin/integrations
CHANGED
@@ -44,7 +44,8 @@ class Scenario
|
|
44
44
|
'shutdown/on_hanging_jobs_and_a_shutdown_spec.rb' => [2].freeze,
|
45
45
|
'shutdown/on_hanging_on_shutdown_job_and_a_shutdown_spec.rb' => [2].freeze,
|
46
46
|
'shutdown/on_hanging_listener_and_shutdown_spec.rb' => [2].freeze,
|
47
|
-
'swarm/forceful_shutdown_of_hanging_spec.rb' => [2].freeze
|
47
|
+
'swarm/forceful_shutdown_of_hanging_spec.rb' => [2].freeze,
|
48
|
+
'instrumentation/post_errors_instrumentation_error_spec.rb' => [1].freeze
|
48
49
|
}.freeze
|
49
50
|
|
50
51
|
private_constant :MAX_RUN_TIME, :EXIT_CODES
|
data/karafka.gemspec
CHANGED
@@ -21,8 +21,8 @@ Gem::Specification.new do |spec|
|
|
21
21
|
without having to focus on things that are not your business domain.
|
22
22
|
DESC
|
23
23
|
|
24
|
-
spec.add_dependency 'karafka-core', '>= 2.4.0
|
25
|
-
spec.add_dependency 'waterdrop', '>= 2.7.0
|
24
|
+
spec.add_dependency 'karafka-core', '>= 2.4.0', '< 2.5.0'
|
25
|
+
spec.add_dependency 'waterdrop', '>= 2.7.0', '< 3.0.0'
|
26
26
|
spec.add_dependency 'zeitwerk', '~> 2.3'
|
27
27
|
|
28
28
|
spec.required_ruby_version = '>= 3.0.0'
|
data/lib/karafka/constraints.rb
CHANGED
@@ -15,8 +15,8 @@ module Karafka
|
|
15
15
|
# Skip verification if web is not used at all
|
16
16
|
return unless require_version('karafka/web')
|
17
17
|
|
18
|
-
# All good if version higher than 0.9.0.
|
19
|
-
return if version(Karafka::Web::VERSION) >= version('0.9.0.
|
18
|
+
# All good if version higher than 0.9.0.rc3 because we expect 0.9.0.rc3 or higher
|
19
|
+
return if version(Karafka::Web::VERSION) >= version('0.9.0.rc3')
|
20
20
|
|
21
21
|
# If older web-ui used, we cannot allow it
|
22
22
|
raise(
|
@@ -6,6 +6,10 @@ module Karafka
|
|
6
6
|
module Callbacks
|
7
7
|
# Callback that kicks in when consumer error occurs and is published in a background thread
|
8
8
|
class Error
|
9
|
+
include Helpers::ConfigImporter.new(
|
10
|
+
monitor: %i[monitor]
|
11
|
+
)
|
12
|
+
|
9
13
|
# @param subscription_group_id [String] id of the current subscription group instance
|
10
14
|
# @param consumer_group_id [String] id of the current consumer group
|
11
15
|
# @param client_name [String] rdkafka client name
|
@@ -24,7 +28,7 @@ module Karafka
|
|
24
28
|
# Same as with statistics (mor explanation there)
|
25
29
|
return unless @client_name == client_name
|
26
30
|
|
27
|
-
|
31
|
+
monitor.instrument(
|
28
32
|
'error.occurred',
|
29
33
|
caller: self,
|
30
34
|
subscription_group_id: @subscription_group_id,
|
@@ -32,6 +36,15 @@ module Karafka
|
|
32
36
|
type: 'librdkafka.error',
|
33
37
|
error: error
|
34
38
|
)
|
39
|
+
rescue StandardError => e
|
40
|
+
monitor.instrument(
|
41
|
+
'error.occurred',
|
42
|
+
caller: self,
|
43
|
+
subscription_group_id: @subscription_group_id,
|
44
|
+
consumer_group_id: @consumer_group_id,
|
45
|
+
type: 'callbacks.error.error',
|
46
|
+
error: e
|
47
|
+
)
|
35
48
|
end
|
36
49
|
end
|
37
50
|
end
|
@@ -5,6 +5,10 @@ module Karafka
|
|
5
5
|
module Callbacks
|
6
6
|
# Callback that is triggered when oauth token needs to be refreshed.
|
7
7
|
class OauthbearerTokenRefresh
|
8
|
+
include Helpers::ConfigImporter.new(
|
9
|
+
monitor: %i[monitor]
|
10
|
+
)
|
11
|
+
|
8
12
|
# @param bearer [Rdkafka::Consumer, Rdkafka::Admin] given rdkafka instance. It is needed as
|
9
13
|
# we need to have a reference to call `#oauthbearer_set_token` or
|
10
14
|
# `#oauthbearer_set_token_failure` upon the event.
|
@@ -17,11 +21,18 @@ module Karafka
|
|
17
21
|
def call(_rd_config, bearer_name)
|
18
22
|
return unless @bearer.name == bearer_name
|
19
23
|
|
20
|
-
|
24
|
+
monitor.instrument(
|
21
25
|
'oauthbearer.token_refresh',
|
22
26
|
bearer: @bearer,
|
23
27
|
caller: self
|
24
28
|
)
|
29
|
+
rescue StandardError => e
|
30
|
+
monitor.instrument(
|
31
|
+
'error.occurred',
|
32
|
+
caller: self,
|
33
|
+
type: 'callbacks.oauthbearer_token_refresh.error',
|
34
|
+
error: e
|
35
|
+
)
|
25
36
|
end
|
26
37
|
end
|
27
38
|
end
|
@@ -6,6 +6,10 @@ module Karafka
|
|
6
6
|
# Callback that connects to the librdkafka rebalance callback and converts those events into
|
7
7
|
# our internal events
|
8
8
|
class Rebalance
|
9
|
+
include Helpers::ConfigImporter.new(
|
10
|
+
monitor: %i[monitor]
|
11
|
+
)
|
12
|
+
|
9
13
|
# @param subscription_group [Karafka::Routes::SubscriptionGroup] subscription group for
|
10
14
|
# which we want to manage rebalances
|
11
15
|
def initialize(subscription_group)
|
@@ -49,7 +53,7 @@ module Karafka
|
|
49
53
|
# @param name [String] name of the event
|
50
54
|
# @param tpl [Rdkafka::Consumer::TopicPartitionList]
|
51
55
|
def instrument(name, tpl)
|
52
|
-
|
56
|
+
monitor.instrument(
|
53
57
|
"rebalance.#{name}",
|
54
58
|
caller: self,
|
55
59
|
# We keep the id references here for backwards compatibility as some of the monitors
|
@@ -60,6 +64,15 @@ module Karafka
|
|
60
64
|
consumer_group: @subscription_group.consumer_group,
|
61
65
|
tpl: tpl
|
62
66
|
)
|
67
|
+
rescue StandardError => e
|
68
|
+
monitor.instrument(
|
69
|
+
'error.occurred',
|
70
|
+
caller: self,
|
71
|
+
subscription_group_id: @subscription_group.id,
|
72
|
+
consumer_group_id: @subscription_group.consumer_group.id,
|
73
|
+
type: "callbacks.rebalance.#{name}.error",
|
74
|
+
error: e
|
75
|
+
)
|
63
76
|
end
|
64
77
|
end
|
65
78
|
end
|
@@ -7,6 +7,10 @@ module Karafka
|
|
7
7
|
# @see `WaterDrop::Instrumentation::Callbacks::Statistics` for details on why we decorate
|
8
8
|
# those statistics
|
9
9
|
class Statistics
|
10
|
+
include Helpers::ConfigImporter.new(
|
11
|
+
monitor: %i[monitor]
|
12
|
+
)
|
13
|
+
|
10
14
|
# @param subscription_group_id [String] id of the current subscription group
|
11
15
|
# @param consumer_group_id [String] id of the current consumer group
|
12
16
|
# @param client_name [String] rdkafka client name
|
@@ -26,7 +30,7 @@ module Karafka
|
|
26
30
|
# all the time.
|
27
31
|
return unless @client_name == statistics['name']
|
28
32
|
|
29
|
-
|
33
|
+
monitor.instrument(
|
30
34
|
'statistics.emitted',
|
31
35
|
subscription_group_id: @subscription_group_id,
|
32
36
|
consumer_group_id: @consumer_group_id,
|
@@ -36,12 +40,12 @@ module Karafka
|
|
36
40
|
# as otherwise, in case of statistics which run in the main librdkafka thread, any crash
|
37
41
|
# will hang the whole process.
|
38
42
|
rescue StandardError => e
|
39
|
-
|
43
|
+
monitor.instrument(
|
40
44
|
'error.occurred',
|
41
45
|
caller: self,
|
42
46
|
subscription_group_id: @subscription_group_id,
|
43
47
|
consumer_group_id: @consumer_group_id,
|
44
|
-
type: 'statistics.
|
48
|
+
type: 'callbacks.statistics.error',
|
45
49
|
error: e
|
46
50
|
)
|
47
51
|
end
|
@@ -313,8 +313,11 @@ module Karafka
|
|
313
313
|
# Those can occur when emitted statistics are consumed by the end user and the processing
|
314
314
|
# of statistics fails. The statistics are emitted from librdkafka main loop thread and
|
315
315
|
# any errors there crash the whole thread
|
316
|
-
when 'statistics.
|
317
|
-
error "statistics
|
316
|
+
when 'callbacks.statistics.error'
|
317
|
+
error "callbacks.statistics processing failed due to an error: #{error}"
|
318
|
+
error details
|
319
|
+
when 'callbacks.error.error'
|
320
|
+
error "callbacks.error processing failed due to an error: #{error}"
|
318
321
|
error details
|
319
322
|
# Those will only occur when retries in the client fail and when they did not stop after
|
320
323
|
# back-offs
|
@@ -36,9 +36,9 @@ module Karafka
|
|
36
36
|
# @note It will NOT deserialize the payload so it is fast
|
37
37
|
#
|
38
38
|
# @note We assume that there can be different number of partitions in the target topic,
|
39
|
-
# this is why we use `key` based on the original topic
|
40
|
-
#
|
41
|
-
#
|
39
|
+
# this is why we use `key` based on the original topic key and not the partition id.
|
40
|
+
# This will not utilize partitions beyond the number of partitions of original topic,
|
41
|
+
# but will accommodate for topics with less partitions.
|
42
42
|
def pipe_async(topic:, message:)
|
43
43
|
produce_async(
|
44
44
|
build_pipe_message(topic: topic, message: message)
|
@@ -94,20 +94,26 @@ module Karafka
|
|
94
94
|
#
|
95
95
|
# @note If you need to alter this, please define the `#enhance_pipe_message` method
|
96
96
|
def build_pipe_message(topic:, message:)
|
97
|
-
original_partition = message.partition.to_s
|
98
|
-
|
99
97
|
pipe_message = {
|
100
98
|
topic: topic,
|
101
|
-
key: original_partition,
|
102
99
|
payload: message.raw_payload,
|
103
|
-
headers: message.
|
100
|
+
headers: message.raw_headers.merge(
|
104
101
|
'original_topic' => message.topic,
|
105
|
-
'original_partition' =>
|
102
|
+
'original_partition' => message.partition.to_s,
|
106
103
|
'original_offset' => message.offset.to_s,
|
107
104
|
'original_consumer_group' => self.topic.consumer_group.id
|
108
105
|
)
|
109
106
|
}
|
110
107
|
|
108
|
+
# Use a key only if key was provided
|
109
|
+
if message.raw_key
|
110
|
+
pipe_message[:key] = message.raw_key
|
111
|
+
# Otherwise pipe creating a key that will assign it based on the original partition
|
112
|
+
# number
|
113
|
+
else
|
114
|
+
pipe_message[:key] = message.partition.to_s
|
115
|
+
end
|
116
|
+
|
111
117
|
# Optional method user can define in consumer to enhance the dlq message hash with
|
112
118
|
# some extra details if needed or to replace payload, etc
|
113
119
|
if respond_to?(:enhance_pipe_message, true)
|
@@ -39,8 +39,8 @@ class KarafkaApp < Karafka::App
|
|
39
39
|
# listen to only what you really need for given environment.
|
40
40
|
Karafka.monitor.subscribe(
|
41
41
|
Karafka::Instrumentation::LoggerListener.new(
|
42
|
-
# Karafka, when the logger is set to info
|
43
|
-
# internal messages
|
42
|
+
# Karafka, when the logger is set to info, produces logs each time it polls data from an
|
43
|
+
# internal messages queue. This can be extensive, so you can turn it off by setting below
|
44
44
|
# to false.
|
45
45
|
log_polling: true
|
46
46
|
)
|
data/lib/karafka/version.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.4.0
|
4
|
+
version: 2.4.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
36
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date: 2024-04-
|
38
|
+
date: 2024-04-26 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
40
|
- !ruby/object:Gem::Dependency
|
41
41
|
name: karafka-core
|
@@ -43,7 +43,7 @@ dependencies:
|
|
43
43
|
requirements:
|
44
44
|
- - ">="
|
45
45
|
- !ruby/object:Gem::Version
|
46
|
-
version: 2.4.0
|
46
|
+
version: 2.4.0
|
47
47
|
- - "<"
|
48
48
|
- !ruby/object:Gem::Version
|
49
49
|
version: 2.5.0
|
@@ -53,7 +53,7 @@ dependencies:
|
|
53
53
|
requirements:
|
54
54
|
- - ">="
|
55
55
|
- !ruby/object:Gem::Version
|
56
|
-
version: 2.4.0
|
56
|
+
version: 2.4.0
|
57
57
|
- - "<"
|
58
58
|
- !ruby/object:Gem::Version
|
59
59
|
version: 2.5.0
|
@@ -63,7 +63,7 @@ dependencies:
|
|
63
63
|
requirements:
|
64
64
|
- - ">="
|
65
65
|
- !ruby/object:Gem::Version
|
66
|
-
version: 2.7.0
|
66
|
+
version: 2.7.0
|
67
67
|
- - "<"
|
68
68
|
- !ruby/object:Gem::Version
|
69
69
|
version: 3.0.0
|
@@ -73,7 +73,7 @@ dependencies:
|
|
73
73
|
requirements:
|
74
74
|
- - ">="
|
75
75
|
- !ruby/object:Gem::Version
|
76
|
-
version: 2.7.0
|
76
|
+
version: 2.7.0
|
77
77
|
- - "<"
|
78
78
|
- !ruby/object:Gem::Version
|
79
79
|
version: 3.0.0
|
@@ -535,7 +535,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
535
535
|
- !ruby/object:Gem::Version
|
536
536
|
version: '0'
|
537
537
|
requirements: []
|
538
|
-
rubygems_version: 3.5.
|
538
|
+
rubygems_version: 3.5.9
|
539
539
|
signing_key:
|
540
540
|
specification_version: 4
|
541
541
|
summary: Karafka is Ruby and Rails efficient Kafka processing framework.
|
metadata.gz.sig
CHANGED
Binary file
|