karafka 2.4.0 → 2.4.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/CHANGELOG.md +9 -0
- data/Gemfile +2 -2
- data/Gemfile.lock +17 -15
- data/karafka.gemspec +1 -0
- data/lib/karafka/active_job/job_extensions.rb +3 -0
- data/lib/karafka/admin.rb +44 -0
- data/lib/karafka/cli/topics/align.rb +12 -2
- data/lib/karafka/cli/topics/plan.rb +12 -1
- data/lib/karafka/pro/active_job/dispatcher.rb +37 -9
- data/lib/karafka/routing/topic.rb +10 -1
- data/lib/karafka/setup/attributes_map.rb +7 -6
- data/lib/karafka/version.rb +1 -1
- data.tar.gz.sig +0 -0
- metadata +16 -2
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7f3d5fa334ede552c2b0097acf16a942318ef648bcdbdcb0e84ffefcd5e65772
|
4
|
+
data.tar.gz: d800e8e25a62df01000dee9e3ce5904f3ddf632f66ac49e0c71dde3a6702209b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ac438f23ca449023ff9537a5aa007879f2420dfdfe1cd81c1715322cfea53af8112cdf77663547155a40ba4e154cbc8e9c81df23dff9a6f843bb0601395b7271
|
7
|
+
data.tar.gz: 3e267b623cee9b649182ac08d34a68780e696e9ba24fda448caecb762ebf726f436b1b5b3415240f8409b4c7b66fc62c39c319c58128e429580f3468db1e152e
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,14 @@
|
|
1
1
|
# Karafka framework changelog
|
2
2
|
|
3
|
+
## 2.4.1 (2024-05-10)
|
4
|
+
- [Enhancement] Allow for usage of producer variants and alternative producers with ActiveJob Jobs (Pro).
|
5
|
+
- [Enhancement] Support `:earliest` and `:latest` in `Karafka::Admin#seek_consumer_group`.
|
6
|
+
- [Enhancement] Align configuration attributes mapper with exact librdkafka version used and not master.
|
7
|
+
- [Maintenance] Use `base64` from RubyGems as it will no longer be part of standard library in Ruby 3.4.
|
8
|
+
- [Fix] Support migrating via aliases and plan with aliases usage.
|
9
|
+
- [Fix] Active with default set to `false` cannot be overwritten
|
10
|
+
- [Fix] Fix inheritance of ActiveJob adapter `karafka_options` partitioner and dispatch method.
|
11
|
+
|
3
12
|
## 2.4.0 (2024-04-26)
|
4
13
|
|
5
14
|
This release contains **BREAKING** changes. Make sure to read and apply upgrade notes.
|
data/Gemfile
CHANGED
@@ -11,8 +11,8 @@ gemspec
|
|
11
11
|
# Since some of those are only needed for some specs, they should never be required automatically
|
12
12
|
group :integrations do
|
13
13
|
gem 'activejob', require: false
|
14
|
-
gem 'karafka-testing', '>= 2.4.0
|
15
|
-
gem 'karafka-web', '>= 0.9.0
|
14
|
+
gem 'karafka-testing', '>= 2.4.0', require: false
|
15
|
+
gem 'karafka-web', '>= 0.9.0', require: false
|
16
16
|
gem 'rspec', require: false
|
17
17
|
end
|
18
18
|
|
data/Gemfile.lock
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
karafka (2.4.
|
4
|
+
karafka (2.4.1)
|
5
|
+
base64 (~> 0.2)
|
5
6
|
karafka-core (>= 2.4.0, < 2.5.0)
|
6
7
|
waterdrop (>= 2.7.0, < 3.0.0)
|
7
8
|
zeitwerk (~> 2.3)
|
@@ -23,7 +24,7 @@ GEM
|
|
23
24
|
mutex_m
|
24
25
|
tzinfo (~> 2.0)
|
25
26
|
base64 (0.2.0)
|
26
|
-
bigdecimal (3.1.
|
27
|
+
bigdecimal (3.1.8)
|
27
28
|
byebug (11.1.3)
|
28
29
|
concurrent-ruby (1.2.3)
|
29
30
|
connection_pool (2.4.1)
|
@@ -36,27 +37,27 @@ GEM
|
|
36
37
|
ffi (1.16.3)
|
37
38
|
globalid (1.2.1)
|
38
39
|
activesupport (>= 6.1)
|
39
|
-
i18n (1.14.
|
40
|
+
i18n (1.14.5)
|
40
41
|
concurrent-ruby (~> 1.0)
|
41
42
|
karafka-core (2.4.0)
|
42
43
|
karafka-rdkafka (>= 0.15.0, < 0.16.0)
|
43
|
-
karafka-rdkafka (0.15.
|
44
|
+
karafka-rdkafka (0.15.1)
|
44
45
|
ffi (~> 1.15)
|
45
46
|
mini_portile2 (~> 2.6)
|
46
47
|
rake (> 12)
|
47
|
-
karafka-testing (2.4.
|
48
|
-
karafka (>= 2.4.0
|
49
|
-
waterdrop (>= 2.7.0
|
50
|
-
karafka-web (0.9.
|
48
|
+
karafka-testing (2.4.3)
|
49
|
+
karafka (>= 2.4.0, < 2.5.0)
|
50
|
+
waterdrop (>= 2.7.0)
|
51
|
+
karafka-web (0.9.1)
|
51
52
|
erubi (~> 1.4)
|
52
|
-
karafka (>= 2.4.0
|
53
|
-
karafka-core (>= 2.4.0
|
53
|
+
karafka (>= 2.4.0, < 2.5.0)
|
54
|
+
karafka-core (>= 2.4.0, < 2.5.0)
|
54
55
|
roda (~> 3.68, >= 3.69)
|
55
56
|
tilt (~> 2.0)
|
56
57
|
mini_portile2 (2.8.6)
|
57
58
|
minitest (5.22.3)
|
58
59
|
mutex_m (0.2.0)
|
59
|
-
rack (3.0.
|
60
|
+
rack (3.0.11)
|
60
61
|
rake (13.2.1)
|
61
62
|
roda (3.79.0)
|
62
63
|
rack
|
@@ -69,7 +70,7 @@ GEM
|
|
69
70
|
rspec-expectations (3.13.0)
|
70
71
|
diff-lcs (>= 1.2.0, < 2.0)
|
71
72
|
rspec-support (~> 3.13.0)
|
72
|
-
rspec-mocks (3.13.
|
73
|
+
rspec-mocks (3.13.1)
|
73
74
|
diff-lcs (>= 1.2.0, < 2.0)
|
74
75
|
rspec-support (~> 3.13.0)
|
75
76
|
rspec-support (3.13.1)
|
@@ -82,8 +83,9 @@ GEM
|
|
82
83
|
tilt (2.3.0)
|
83
84
|
tzinfo (2.0.6)
|
84
85
|
concurrent-ruby (~> 1.0)
|
85
|
-
waterdrop (2.7.
|
86
|
+
waterdrop (2.7.2)
|
86
87
|
karafka-core (>= 2.4.0, < 3.0.0)
|
88
|
+
karafka-rdkafka (>= 0.15.1)
|
87
89
|
zeitwerk (~> 2.3)
|
88
90
|
zeitwerk (2.6.13)
|
89
91
|
|
@@ -96,8 +98,8 @@ DEPENDENCIES
|
|
96
98
|
byebug
|
97
99
|
factory_bot
|
98
100
|
karafka!
|
99
|
-
karafka-testing (>= 2.4.0
|
100
|
-
karafka-web (>= 0.9.0
|
101
|
+
karafka-testing (>= 2.4.0)
|
102
|
+
karafka-web (>= 0.9.0)
|
101
103
|
rspec
|
102
104
|
simplecov
|
103
105
|
|
data/karafka.gemspec
CHANGED
@@ -21,6 +21,7 @@ Gem::Specification.new do |spec|
|
|
21
21
|
without having to focus on things that are not your business domain.
|
22
22
|
DESC
|
23
23
|
|
24
|
+
spec.add_dependency 'base64', '~> 0.2'
|
24
25
|
spec.add_dependency 'karafka-core', '>= 2.4.0', '< 2.5.0'
|
25
26
|
spec.add_dependency 'waterdrop', '>= 2.7.0', '< 3.0.0'
|
26
27
|
spec.add_dependency 'zeitwerk', '~> 2.3'
|
@@ -23,6 +23,9 @@ module Karafka
|
|
23
23
|
# them
|
24
24
|
App.config.internal.active_job.job_options_contract.validate!(new_options)
|
25
25
|
|
26
|
+
# We need to modify this hash because otherwise we would modify parent hash.
|
27
|
+
self._karafka_options = _karafka_options.dup
|
28
|
+
|
26
29
|
new_options.each do |name, value|
|
27
30
|
_karafka_options[name] = value
|
28
31
|
end
|
data/lib/karafka/admin.rb
CHANGED
@@ -10,6 +10,12 @@ module Karafka
|
|
10
10
|
# Cluster on which operations are performed can be changed via `admin.kafka` config, however
|
11
11
|
# there is no multi-cluster runtime support.
|
12
12
|
module Admin
|
13
|
+
# More or less number of seconds of 1 hundred years
|
14
|
+
# Used for time referencing that does not have to be accurate but needs to be big
|
15
|
+
HUNDRED_YEARS = 100 * 365.25 * 24 * 60 * 60
|
16
|
+
|
17
|
+
private_constant :HUNDRED_YEARS
|
18
|
+
|
13
19
|
class << self
|
14
20
|
# Allows us to read messages from the topic
|
15
21
|
#
|
@@ -156,6 +162,18 @@ module Karafka
|
|
156
162
|
#
|
157
163
|
# @example Move offset to 5 seconds ago on partition 2
|
158
164
|
# Karafka::Admin.seek_consumer_group('group-id', { 'topic' => { 2 => 5.seconds.ago } })
|
165
|
+
#
|
166
|
+
# @example Move to the earliest offset on all the partitions of a topic
|
167
|
+
# Karafka::Admin.seek_consumer_group('group-id', { 'topic' => 'earliest' })
|
168
|
+
#
|
169
|
+
# @example Move to the latest (high-watermark) offset on all the partitions of a topic
|
170
|
+
# Karafka::Admin.seek_consumer_group('group-id', { 'topic' => 'latest' })
|
171
|
+
#
|
172
|
+
# @example Move offset of a single partition to earliest
|
173
|
+
# Karafka::Admin.seek_consumer_group('group-id', { 'topic' => { 1 => 'earliest' } })
|
174
|
+
#
|
175
|
+
# @example Move offset of a single partition to latest
|
176
|
+
# Karafka::Admin.seek_consumer_group('group-id', { 'topic' => { 1 => 'latest' } })
|
159
177
|
def seek_consumer_group(consumer_group_id, topics_with_partitions_and_offsets)
|
160
178
|
tpl_base = {}
|
161
179
|
|
@@ -174,6 +192,32 @@ module Karafka
|
|
174
192
|
end
|
175
193
|
end
|
176
194
|
|
195
|
+
tpl_base.each_value do |partitions|
|
196
|
+
partitions.transform_values! do |position|
|
197
|
+
# Support both symbol and string based references
|
198
|
+
casted_position = position.is_a?(Symbol) ? position.to_s : position
|
199
|
+
|
200
|
+
# This remap allows us to transform some special cases in a reference that can be
|
201
|
+
# understood by Kafka
|
202
|
+
case casted_position
|
203
|
+
# Earliest is not always 0. When compacting/deleting it can be much later, that's why
|
204
|
+
# we fetch the oldest possible offset
|
205
|
+
when 'earliest'
|
206
|
+
Time.now - HUNDRED_YEARS
|
207
|
+
# Latest will always be the high-watermark offset and we can get it just by getting
|
208
|
+
# a future position
|
209
|
+
when 'latest'
|
210
|
+
Time.now + HUNDRED_YEARS
|
211
|
+
# Same as `'latest'`
|
212
|
+
when false
|
213
|
+
Time.now - HUNDRED_YEARS
|
214
|
+
# Regular offset case
|
215
|
+
else
|
216
|
+
position
|
217
|
+
end
|
218
|
+
end
|
219
|
+
end
|
220
|
+
|
177
221
|
tpl = Rdkafka::Consumer::TopicPartitionList.new
|
178
222
|
# In case of time based location, we need to to a pre-resolution, that's why we keep it
|
179
223
|
# separately
|
@@ -89,9 +89,19 @@ module Karafka
|
|
89
89
|
desired_configs.transform_keys!(&:to_s)
|
90
90
|
|
91
91
|
topic_with_configs.configs.each do |config|
|
92
|
-
|
92
|
+
names = config.synonyms.map(&:name) << config.name
|
93
93
|
|
94
|
-
|
94
|
+
# We move forward only if given topic config is for altering
|
95
|
+
next if (desired_configs.keys & names).empty?
|
96
|
+
|
97
|
+
desired_config = nil
|
98
|
+
|
99
|
+
# We then find last defined value in our configs for a given attribute
|
100
|
+
# Since attributes can have synonyms, we select last one, which will represent the
|
101
|
+
# last defined value in case someone defined same multiple times
|
102
|
+
desired_configs.each do |name, value|
|
103
|
+
desired_config = value if names.include?(name)
|
104
|
+
end
|
95
105
|
|
96
106
|
# Do not migrate if existing and desired values are the same
|
97
107
|
next if desired_config == config.value
|
@@ -143,14 +143,25 @@ module Karafka
|
|
143
143
|
}
|
144
144
|
|
145
145
|
scoped = @topics_to_alter[declarative][declarative_name]
|
146
|
+
# declarative name can be a synonym. In such cases we remap it during the discovery
|
147
|
+
# below
|
148
|
+
final_name = declarative_name
|
146
149
|
|
147
150
|
topic_c.configs.each do |config|
|
148
|
-
|
151
|
+
names = config.synonyms.map(&:name) << config.name
|
149
152
|
|
153
|
+
next unless names.include?(declarative_name)
|
154
|
+
|
155
|
+
# Always use a non-synonym name if differs
|
156
|
+
final_name = config.name
|
150
157
|
scoped[:action] = :change
|
151
158
|
scoped[:from] = config.value
|
152
159
|
end
|
153
160
|
|
161
|
+
# Aligns the name in case synonym was used
|
162
|
+
target = @topics_to_alter[declarative].delete(declarative_name)
|
163
|
+
@topics_to_alter[declarative][final_name] = target
|
164
|
+
|
154
165
|
# Remove change definitions that would migrate to the same value as present
|
155
166
|
@topics_to_alter[declarative].delete_if do |_name, details|
|
156
167
|
details[:from] == details[:to]
|
@@ -28,14 +28,23 @@ module Karafka
|
|
28
28
|
# each job.
|
29
29
|
partitioner: nil,
|
30
30
|
# Allows for usage of `:key` or `:partition_key`
|
31
|
-
partition_key_type: :key
|
31
|
+
partition_key_type: :key,
|
32
|
+
# Allows for setting a callable producer since at the moment of defining the class,
|
33
|
+
# variants may not be available
|
34
|
+
#
|
35
|
+
# We do not initialize it with `-> { ::Karafka.producer }` so we do not have to call it
|
36
|
+
# each time for the defaults to preserve CPU cycles.
|
37
|
+
#
|
38
|
+
# We also do **not** cache the execution of this producer lambda because we want to
|
39
|
+
# support job args based producer selection
|
40
|
+
producer: nil
|
32
41
|
}.freeze
|
33
42
|
|
34
43
|
private_constant :DEFAULTS
|
35
44
|
|
36
45
|
# @param job [ActiveJob::Base] job
|
37
46
|
def dispatch(job)
|
38
|
-
|
47
|
+
producer(job).public_send(
|
39
48
|
fetch_option(job, :dispatch_method, DEFAULTS),
|
40
49
|
dispatch_details(job).merge!(
|
41
50
|
topic: job.queue_name,
|
@@ -47,27 +56,46 @@ module Karafka
|
|
47
56
|
# Bulk dispatches multiple jobs using the Rails 7.1+ API
|
48
57
|
# @param jobs [Array<ActiveJob::Base>] jobs we want to dispatch
|
49
58
|
def dispatch_many(jobs)
|
50
|
-
|
59
|
+
# First level is type of dispatch and second is the producer we want to use to dispatch
|
60
|
+
dispatches = Hash.new do |hash, key|
|
61
|
+
hash[key] = Hash.new do |hash2, key2|
|
62
|
+
hash2[key2] = []
|
63
|
+
end
|
64
|
+
end
|
51
65
|
|
52
66
|
jobs.each do |job|
|
53
67
|
d_method = fetch_option(job, :dispatch_many_method, DEFAULTS)
|
68
|
+
producer = producer(job)
|
54
69
|
|
55
|
-
dispatches[d_method] << dispatch_details(job).merge!(
|
70
|
+
dispatches[d_method][producer] << dispatch_details(job).merge!(
|
56
71
|
topic: job.queue_name,
|
57
72
|
payload: ::ActiveSupport::JSON.encode(serialize_job(job))
|
58
73
|
)
|
59
74
|
end
|
60
75
|
|
61
|
-
dispatches.each do |
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
76
|
+
dispatches.each do |d_method, producers|
|
77
|
+
producers.each do |producer, messages|
|
78
|
+
producer.public_send(
|
79
|
+
d_method,
|
80
|
+
messages
|
81
|
+
)
|
82
|
+
end
|
66
83
|
end
|
67
84
|
end
|
68
85
|
|
69
86
|
private
|
70
87
|
|
88
|
+
# Selects the producer based on options. If callable `:producer` is defined, it will use
|
89
|
+
# it. If not, will just use the `Karafka.producer`.
|
90
|
+
#
|
91
|
+
# @param job [ActiveJob::Base] job instance
|
92
|
+
# @return [WaterDrop::Producer, WaterDrop::Producer::Variant] producer or a variant
|
93
|
+
def producer(job)
|
94
|
+
dynamic_producer = fetch_option(job, :producer, DEFAULTS)
|
95
|
+
|
96
|
+
dynamic_producer ? dynamic_producer.call(job) : ::Karafka.producer
|
97
|
+
end
|
98
|
+
|
71
99
|
# @param job [ActiveJob::Base] job instance
|
72
100
|
# @return [Hash] hash with dispatch details to which we merge topic and payload
|
73
101
|
def dispatch_details(job)
|
@@ -81,7 +81,16 @@ module Karafka
|
|
81
81
|
# Allows to disable topic by invoking this method and setting it to `false`.
|
82
82
|
# @param active [Boolean] should this topic be consumed or not
|
83
83
|
def active(active)
|
84
|
-
|
84
|
+
# Do not allow for active overrides. Basically if this is set on the topic level, defaults
|
85
|
+
# will not overwrite it and this is desired. Otherwise because of the fact that this is
|
86
|
+
# not a full feature config but just a flag, default value would always overwrite the
|
87
|
+
# per-topic config since defaults application happens after the topic config block
|
88
|
+
unless @active_assigned
|
89
|
+
@active = active
|
90
|
+
@active_assigned = true
|
91
|
+
end
|
92
|
+
|
93
|
+
@active
|
85
94
|
end
|
86
95
|
|
87
96
|
# @return [Class] consumer class that we should use
|
@@ -52,9 +52,7 @@ module Karafka
|
|
52
52
|
fetch.wait.max.ms
|
53
53
|
group.id
|
54
54
|
group.instance.id
|
55
|
-
group.protocol
|
56
55
|
group.protocol.type
|
57
|
-
group.remote.assignor
|
58
56
|
heartbeat.interval.ms
|
59
57
|
interceptors
|
60
58
|
internal.termination.signal
|
@@ -153,6 +151,7 @@ module Karafka
|
|
153
151
|
# List of rdkafka producer accepted attributes
|
154
152
|
PRODUCER = %i[
|
155
153
|
acks
|
154
|
+
allow.auto.create.topics
|
156
155
|
api.version.fallback.ms
|
157
156
|
api.version.request
|
158
157
|
api.version.request.timeout.ms
|
@@ -289,7 +288,12 @@ module Karafka
|
|
289
288
|
].freeze
|
290
289
|
|
291
290
|
# Location of the file with rdkafka settings list
|
292
|
-
SOURCE =
|
291
|
+
SOURCE = <<~SOURCE.delete("\n").gsub(/\s+/, '/')
|
292
|
+
https://raw.githubusercontent.com
|
293
|
+
confluentinc/librdkafka
|
294
|
+
v#{Rdkafka::LIBRDKAFKA_VERSION}
|
295
|
+
CONFIGURATION.md
|
296
|
+
SOURCE
|
293
297
|
|
294
298
|
private_constant :SOURCE
|
295
299
|
|
@@ -337,9 +341,6 @@ module Karafka
|
|
337
341
|
end
|
338
342
|
end
|
339
343
|
|
340
|
-
# This can be removed when 0.13 librdkafka is released
|
341
|
-
attributes[:producer].delete_if { |val| val == 'allow.auto.create.topics' }
|
342
|
-
|
343
344
|
attributes.transform_values!(&:sort)
|
344
345
|
attributes.each_value { |vals| vals.map!(&:to_sym) }
|
345
346
|
attributes
|
data/lib/karafka/version.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.4.
|
4
|
+
version: 2.4.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -35,8 +35,22 @@ cert_chain:
|
|
35
35
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
36
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date: 2024-
|
38
|
+
date: 2024-05-10 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
|
+
- !ruby/object:Gem::Dependency
|
41
|
+
name: base64
|
42
|
+
requirement: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - "~>"
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '0.2'
|
47
|
+
type: :runtime
|
48
|
+
prerelease: false
|
49
|
+
version_requirements: !ruby/object:Gem::Requirement
|
50
|
+
requirements:
|
51
|
+
- - "~>"
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '0.2'
|
40
54
|
- !ruby/object:Gem::Dependency
|
41
55
|
name: karafka-core
|
42
56
|
requirement: !ruby/object:Gem::Requirement
|
metadata.gz.sig
CHANGED
Binary file
|