karafka 2.0.19 → 2.0.21

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: be91c3848b757c6af4c25f905df2b081629532bd29dbcea23ed2ef0af2e4e4a2
4
- data.tar.gz: 6823d4335e4b395546642101d6754b97958c86810cbcd12819559acff74bd90d
3
+ metadata.gz: 9de6cb2d19b24258eb60548b383e664dc035f1f48ffc7077ca5f238b3a85aa3e
4
+ data.tar.gz: 706d4e81a74b1aad9d1f9b13a67d168a7ef3c45088b89ec710d29abb3a767f11
5
5
  SHA512:
6
- metadata.gz: fce0259ee987e37c01ea037f81ea91b4eb770ea8eabcb9f93c66aa1a1960c903030648b5441945ef28f43a88660d18240e6db61f6885a169d70eb46174543616
7
- data.tar.gz: f93985c98daba5965f8f0597da4744d1dae0603f24a6a44f4b337462f66c8b0f08d0c22dd734205a0aebe7c3dbdb73237ff568f7f1ff842b38c05a7f4b5ce463
6
+ metadata.gz: a22be6ff489f9f7cce39e0e4d6d374a70680124842afeeb962e33eaa8c6cd3db8cbcd6f3251543653c98777d025b6d9b0385f4580d92fd4b807e835a68296a43
7
+ data.tar.gz: 466d9723427771594701071e5a27a3a1a53b6b4efce9ffa6b241f5261c9a887136b2a8a4a90b35e216b2af01539336bde5377f4edc1ab99837087dd751b830a8
checksums.yaml.gz.sig CHANGED
Binary file
@@ -0,0 +1 @@
1
+ custom: ['https://karafka.io/#become-pro']
data/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 20.0.21 (2022-11-25)
4
+ - [Improvement] Make revocation jobs for LRJ topics non-blocking to prevent blocking polling when someone uses non-revocation aware LRJ jobs and revocation happens.
5
+
6
+ ## 2.0.20 (2022-11-24)
7
+ - [Improvement] Support `group.instance.id` assignment (static group membership) for a case where a single consumer group has multiple subscription groups (#1173).
8
+
3
9
  ## 2.0.19 (2022-11-20)
4
10
  - **[Feature]** Provide ability to skip failing messages without dispatching them to an alternative topic (DLQ).
5
11
  - [Improvement] Improve the integration with Ruby on Rails by preventing double-require of components.
data/Gemfile.lock CHANGED
@@ -1,8 +1,8 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.0.19)
5
- karafka-core (>= 2.0.2, < 3.0.0)
4
+ karafka (2.0.21)
5
+ karafka-core (>= 2.0.4, < 3.0.0)
6
6
  rdkafka (>= 0.12)
7
7
  thor (>= 0.20)
8
8
  waterdrop (>= 2.4.1, < 3.0.0)
@@ -30,7 +30,7 @@ GEM
30
30
  activesupport (>= 5.0)
31
31
  i18n (1.12.0)
32
32
  concurrent-ruby (~> 1.0)
33
- karafka-core (2.0.3)
33
+ karafka-core (2.0.4)
34
34
  concurrent-ruby (>= 1.1)
35
35
  mini_portile2 (2.8.0)
36
36
  minitest (5.16.3)
@@ -79,4 +79,4 @@ DEPENDENCIES
79
79
  simplecov
80
80
 
81
81
  BUNDLED WITH
82
- 2.3.24
82
+ 2.3.26
data/karafka.gemspec CHANGED
@@ -12,11 +12,16 @@ Gem::Specification.new do |spec|
12
12
  spec.authors = ['Maciej Mensfeld']
13
13
  spec.email = %w[contact@karafka.io]
14
14
  spec.homepage = 'https://karafka.io'
15
- spec.summary = 'Efficient Kafka processing framework for Ruby and Rails'
16
- spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
17
15
  spec.licenses = ['LGPL-3.0', 'Commercial']
16
+ spec.summary = 'Karafka is Ruby and Rails efficient Kafka processing framework.'
17
+ spec.description = <<-DESC
18
+ Karafka is Ruby and Rails efficient Kafka processing framework.
18
19
 
19
- spec.add_dependency 'karafka-core', '>= 2.0.2', '< 3.0.0'
20
+ Karafka allows you to capture everything that happens in your systems in large scale,
21
+ without having to focus on things that are not your business domain.
22
+ DESC
23
+
24
+ spec.add_dependency 'karafka-core', '>= 2.0.4', '< 3.0.0'
20
25
  spec.add_dependency 'rdkafka', '>= 0.12'
21
26
  spec.add_dependency 'thor', '>= 0.20'
22
27
  spec.add_dependency 'waterdrop', '>= 2.4.1', '< 3.0.0'
@@ -16,11 +16,6 @@ module Karafka
16
16
  class ConsumerGroupCoordinator
17
17
  # @param group_size [Integer] number of separate subscription groups in a consumer group
18
18
  def initialize(group_size)
19
- # We need two locks here:
20
- # - first one is to decrement the number of listeners doing work
21
- # - second to ensure only one client is being closed the same time and that others can
22
- # wait actively (not locked)
23
- @work_mutex = Mutex.new
24
19
  @shutdown_lock = Mutex.new
25
20
  @group_size = group_size
26
21
  @finished = Set.new
@@ -18,8 +18,7 @@ module Karafka
18
18
  # Pro jobs
19
19
  module Jobs
20
20
  # The main job type in a non-blocking variant.
21
- # This variant works "like" the regular consumption but pauses the partition for as long
22
- # as it is needed until a job is done.
21
+ # This variant works "like" the regular consumption but does not block the queue.
23
22
  #
24
23
  # It can be useful when having long lasting jobs that would exceed `max.poll.interval`
25
24
  # if would block.
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ # Pro components related to processing part of Karafka
17
+ module Processing
18
+ # Pro jobs
19
+ module Jobs
20
+ # The revoked job type in a non-blocking variant.
21
+ # This variant works "like" the regular revoked but does not block the queue.
22
+ #
23
+ # It can be useful when having long lasting jobs that would exceed `max.poll.interval`
24
+ # in scenarios where there are more jobs than threads, without this being async we
25
+ # would potentially stop polling
26
+ class RevokedNonBlocking < ::Karafka::Processing::Jobs::Revoked
27
+ # Makes this job non-blocking from the start
28
+ # @param args [Array] any arguments accepted by `::Karafka::Processing::Jobs::Revoked`
29
+ def initialize(*args)
30
+ super
31
+ @non_blocking = true
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end
@@ -28,6 +28,18 @@ module Karafka
28
28
  super
29
29
  end
30
30
  end
31
+
32
+ # @param executor [Karafka::Processing::Executor]
33
+ # @return [Karafka::Processing::Jobs::Revoked] revocation job for non LRJ
34
+ # @return [Karafka::Processing::Jobs::RevokedNonBlocking] revocation job that is
35
+ # non-blocking, so when revocation job is scheduled for LRJ it also will not block
36
+ def revoked(executor)
37
+ if executor.topic.long_running_job?
38
+ Jobs::RevokedNonBlocking.new(executor)
39
+ else
40
+ super
41
+ end
42
+ end
31
43
  end
32
44
  end
33
45
  end
@@ -40,6 +40,9 @@ module Karafka
40
40
  consumer_group.topics.each do |topic|
41
41
  Contracts::Topic.new.validate!(topic.to_h)
42
42
  end
43
+
44
+ # Initialize subscription groups after all the routing is done
45
+ consumer_group.subscription_groups
43
46
  end
44
47
  end
45
48
 
@@ -14,7 +14,7 @@ module Karafka
14
14
  # It allows us to store the "current" subscription group defined in the routing
15
15
  # This subscription group id is then injected into topics, so we can compute the subscription
16
16
  # groups
17
- attr_accessor :current_subscription_group_name
17
+ attr_accessor :current_subscription_group_id
18
18
 
19
19
  # @param name [String, Symbol] raw name of this consumer group. Raw means, that it does not
20
20
  # yet have an application client_id namespace, this will be added here by default.
@@ -24,6 +24,9 @@ module Karafka
24
24
  @name = name.to_s
25
25
  @id = Karafka::App.config.consumer_mapper.call(name)
26
26
  @topics = Topics.new([])
27
+ # Initialize the subscription group so there's always a value for it, since even if not
28
+ # defined directly, a subscription group will be created
29
+ @current_subscription_group_id = SecureRandom.uuid
27
30
  end
28
31
 
29
32
  # @return [Boolean] true if this consumer group should be active in our current process
@@ -41,7 +44,7 @@ module Karafka
41
44
  built_topic = @topics.last
42
45
  # We overwrite it conditionally in case it was not set by the user inline in the topic
43
46
  # block definition
44
- built_topic.subscription_group ||= current_subscription_group_name
47
+ built_topic.subscription_group ||= current_subscription_group_id
45
48
  built_topic
46
49
  end
47
50
 
@@ -52,19 +55,24 @@ module Karafka
52
55
  def subscription_group=(name, &block)
53
56
  # We cast it here, so the routing supports symbol based but that's anyhow later on
54
57
  # validated as a string
55
- self.current_subscription_group_name = name
58
+ @current_subscription_group_id = name
56
59
 
57
60
  Proxy.new(self, &block)
58
61
 
59
62
  # We need to reset the current subscription group after it is used, so it won't leak
60
63
  # outside to other topics that would be defined without a defined subscription group
61
- self.current_subscription_group_name = nil
64
+ @current_subscription_group_id = SecureRandom.uuid
62
65
  end
63
66
 
64
67
  # @return [Array<Routing::SubscriptionGroup>] all the subscription groups build based on
65
68
  # the consumer group topics
66
69
  def subscription_groups
67
- App.config.internal.routing.subscription_groups_builder.call(topics)
70
+ @subscription_groups ||= App
71
+ .config
72
+ .internal
73
+ .routing
74
+ .subscription_groups_builder
75
+ .call(topics)
68
76
  end
69
77
 
70
78
  # Hashed version of consumer group that can be used for validation purposes
@@ -8,13 +8,18 @@ module Karafka
8
8
  # @note One subscription group will always belong to one consumer group, but one consumer
9
9
  # group can have multiple subscription groups.
10
10
  class SubscriptionGroup
11
- attr_reader :id, :topics
11
+ attr_reader :id, :topics, :kafka
12
12
 
13
+ # @param position [Integer] position of this subscription group in all the subscriptions
14
+ # groups array. We need to have this value for sake of static group memberships, where
15
+ # we need a "in-between" restarts unique identifier
13
16
  # @param topics [Karafka::Routing::Topics] all the topics that share the same key settings
14
17
  # @return [SubscriptionGroup] built subscription group
15
- def initialize(topics)
16
- @id = SecureRandom.uuid
18
+ def initialize(position, topics)
19
+ @id = "#{topics.first.subscription_group}_#{position}"
20
+ @position = position
17
21
  @topics = topics
22
+ @kafka = build_kafka
18
23
  freeze
19
24
  end
20
25
 
@@ -33,12 +38,22 @@ module Karafka
33
38
  @topics.first.max_wait_time
34
39
  end
35
40
 
41
+ private
42
+
36
43
  # @return [Hash] kafka settings are a bit special. They are exactly the same for all of the
37
44
  # topics but they lack the group.id (unless explicitly) provided. To make it compatible
38
45
  # with our routing engine, we inject it before it will go to the consumer
39
- def kafka
46
+ def build_kafka
40
47
  kafka = Setup::AttributesMap.consumer(@topics.first.kafka.dup)
41
48
 
49
+ # If we use static group memberships, there can be a case, where same instance id would
50
+ # be set on many subscription groups as the group instance id from Karafka perspective is
51
+ # set per config. Each instance even if they are subscribed to different topics needs to
52
+ # have if fully unique. To make sure of that, we just add extra postfix at the end that
53
+ # increments.
54
+ group_instance_id = kafka.fetch(:'group.instance.id', false)
55
+
56
+ kafka[:'group.instance.id'] = "#{group_instance_id}_#{@position}" if group_instance_id
42
57
  kafka[:'client.id'] ||= Karafka::App.config.client_id
43
58
  kafka[:'group.id'] ||= @topics.first.consumer_group.id
44
59
  kafka[:'auto.offset.reset'] ||= @topics.first.initial_offset
@@ -24,6 +24,10 @@ module Karafka
24
24
 
25
25
  private_constant :DISTRIBUTION_KEYS
26
26
 
27
+ def initialize
28
+ @position = -1
29
+ end
30
+
27
31
  # @param topics [Karafka::Routing::Topics] all the topics based on which we want to build
28
32
  # subscription groups
29
33
  # @return [Array<SubscriptionGroup>] all subscription groups we need in separate threads
@@ -34,7 +38,7 @@ module Karafka
34
38
  .values
35
39
  .map { |value| value.map(&:last) }
36
40
  .map { |topics_array| Routing::Topics.new(topics_array) }
37
- .map { |grouped_topics| SubscriptionGroup.new(grouped_topics) }
41
+ .map { |grouped_topics| SubscriptionGroup.new(@position += 1, grouped_topics) }
38
42
  end
39
43
 
40
44
  private
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.0.19'
6
+ VERSION = '2.0.21'
7
7
  end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.19
4
+ version: 2.0.21
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -35,7 +35,7 @@ cert_chain:
35
35
  Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
36
36
  MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
37
37
  -----END CERTIFICATE-----
38
- date: 2022-11-20 00:00:00.000000000 Z
38
+ date: 2022-11-25 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: karafka-core
@@ -43,7 +43,7 @@ dependencies:
43
43
  requirements:
44
44
  - - ">="
45
45
  - !ruby/object:Gem::Version
46
- version: 2.0.2
46
+ version: 2.0.4
47
47
  - - "<"
48
48
  - !ruby/object:Gem::Version
49
49
  version: 3.0.0
@@ -53,7 +53,7 @@ dependencies:
53
53
  requirements:
54
54
  - - ">="
55
55
  - !ruby/object:Gem::Version
56
- version: 2.0.2
56
+ version: 2.0.4
57
57
  - - "<"
58
58
  - !ruby/object:Gem::Version
59
59
  version: 3.0.0
@@ -119,7 +119,11 @@ dependencies:
119
119
  - - "~>"
120
120
  - !ruby/object:Gem::Version
121
121
  version: '2.3'
122
- description: Framework used to simplify Apache Kafka based Ruby applications development
122
+ description: |2
123
+ Karafka is Ruby and Rails efficient Kafka processing framework.
124
+
125
+ Karafka allows you to capture everything that happens in your systems in large scale,
126
+ without having to focus on things that are not your business domain.
123
127
  email:
124
128
  - contact@karafka.io
125
129
  executables:
@@ -130,6 +134,7 @@ files:
130
134
  - ".coditsu/ci.yml"
131
135
  - ".console_irbrc"
132
136
  - ".diffend.yml"
137
+ - ".github/FUNDING.yml"
133
138
  - ".github/ISSUE_TEMPLATE/bug_report.md"
134
139
  - ".github/ISSUE_TEMPLATE/feature_request.md"
135
140
  - ".github/workflows/ci.yml"
@@ -226,6 +231,7 @@ files:
226
231
  - lib/karafka/pro/performance_tracker.rb
227
232
  - lib/karafka/pro/processing/coordinator.rb
228
233
  - lib/karafka/pro/processing/jobs/consume_non_blocking.rb
234
+ - lib/karafka/pro/processing/jobs/revoked_non_blocking.rb
229
235
  - lib/karafka/pro/processing/jobs_builder.rb
230
236
  - lib/karafka/pro/processing/partitioner.rb
231
237
  - lib/karafka/pro/processing/scheduler.rb
@@ -352,5 +358,5 @@ requirements: []
352
358
  rubygems_version: 3.3.7
353
359
  signing_key:
354
360
  specification_version: 4
355
- summary: Efficient Kafka processing framework for Ruby and Rails
361
+ summary: Karafka is Ruby and Rails efficient Kafka processing framework.
356
362
  test_files: []
metadata.gz.sig CHANGED
Binary file