karafka 2.0.9 → 2.0.11

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ee9c609249fea5e996d9506bd3e04435dbcc189addcd6383a092b080e776f525
4
- data.tar.gz: 6ed354b21361966f1988bb90b29cea14d842fb02299f12bd30ab4ec57eaf8cc1
3
+ metadata.gz: 00fdec9b351cb2da897d141a0dbb97816d7b0e53d7548b86d1ff48c5a8d5af3a
4
+ data.tar.gz: 514964e409f4904932ee9e7cc1aa27094820d2cbe875c75ad49c82ce41b8afbf
5
5
  SHA512:
6
- metadata.gz: a20a1bf2d2b86fcd63bf2e036d535c1d7aa4d06943cc00a414851e8f0a0054054eff621b67bfd355456ac93f1561931aec598b43c0ca535ceaa57c9d94957378
7
- data.tar.gz: '05996101b929a143926508a0afb69e6e6b09de04fd088dd06c83cba122efc9cfecdd7586892c80ab5409964737dd4a74b0ecea0f8a30ff1dfbf032f6bb289288'
6
+ metadata.gz: ba9281c9bfdb3a7a2a4c4121c26d1debb4bd39af07a190172a15f34705ee65bd9a1c8ce2e3773ccb3fe749dc83025bf96e0dde31664f1c6541e5923cad87b7a5
7
+ data.tar.gz: 95f150e80a77f68c16f0ecce849403a7afdcaf50f67bdaa1471074173f8b27b5a6b66d8a5cda8c3dc39106d8153ba843a35c9123883dc803b7bb05ec2c1f47c3
checksums.yaml.gz.sig CHANGED
Binary file
@@ -52,9 +52,14 @@ jobs:
52
52
  fail-fast: false
53
53
  matrix:
54
54
  ruby:
55
+ # We run it against the oldest and the newest of a given major to make sure, that there
56
+ # are no syntax-sugars that we would use that were introduced down the road
55
57
  - '3.1'
58
+ - '3.1.0'
56
59
  - '3.0'
60
+ - '3.0.0'
57
61
  - '2.7'
62
+ - '2.7.0'
58
63
  include:
59
64
  - ruby: '3.1'
60
65
  coverage: 'true'
data/CHANGELOG.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 2.0.11 (2022-09-29)
4
+ - Report early on errors related to network and on max poll interval being exceeded to indicate critical problems that will be retries but may mean some underlying problems in the system.
5
+ - Fix support of Ruby 2.7.0 to 2.7.2 (#1045)
6
+
7
+ ## 2.0.10 (2022-09-23)
8
+ - Improve error recovery by delegating the recovery to the existing `librdkafka` instance.
9
+
3
10
  ## 2.0.9 (2022-09-22)
4
11
  - Fix Singleton not visible when used in PORO (#1034)
5
12
  - Divide pristine specs into pristine and poro. Pristine will still have helpers loaded, poro will have nothing.
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.0.9)
4
+ karafka (2.0.11)
5
5
  karafka-core (>= 2.0.2, < 3.0.0)
6
6
  rdkafka (>= 0.12)
7
7
  thor (>= 0.20)
@@ -61,14 +61,13 @@ GEM
61
61
  thor (1.2.1)
62
62
  tzinfo (2.0.5)
63
63
  concurrent-ruby (~> 1.0)
64
- waterdrop (2.4.1)
64
+ waterdrop (2.4.2)
65
65
  karafka-core (>= 2.0.2, < 3.0.0)
66
66
  rdkafka (>= 0.10)
67
67
  zeitwerk (~> 2.3)
68
68
  zeitwerk (2.6.0)
69
69
 
70
70
  PLATFORMS
71
- arm64-darwin
72
71
  x86_64-linux
73
72
 
74
73
  DEPENDENCIES
data/bin/integrations CHANGED
@@ -81,7 +81,7 @@ class Scenario
81
81
 
82
82
  # @return [Boolean] any spec that is not a regular one should not run in parallel with others
83
83
  def linear?
84
- !type == :regular
84
+ type != :regular
85
85
  end
86
86
 
87
87
  # @return [Boolean] did this scenario finished or is it still running
@@ -15,7 +15,7 @@ module Karafka
15
15
  attr_reader :name
16
16
 
17
17
  # How many times should we retry polling in case of a failure
18
- MAX_POLL_RETRIES = 10
18
+ MAX_POLL_RETRIES = 20
19
19
 
20
20
  private_constant :MAX_POLL_RETRIES
21
21
 
@@ -188,6 +188,7 @@ module Karafka
188
188
  tpl = topic_partition_list(topic, partition) || @paused_tpls[topic][partition]
189
189
 
190
190
  return unless tpl
191
+
191
192
  # If we did not have it, it means we never paused this partition, thus no resume should
192
193
  # happen in the first place
193
194
  return unless @paused_tpls[topic].delete(partition)
@@ -317,7 +318,7 @@ module Karafka
317
318
  Rdkafka::Consumer::TopicPartitionList.new({ topic => [rdkafka_partition] })
318
319
  end
319
320
 
320
- # Performs a single poll operation.
321
+ # Performs a single poll operation and handles retries and error
321
322
  #
322
323
  # @param timeout [Integer] timeout for a single poll
323
324
  # @return [Rdkafka::Consumer::Message, nil] fetched message or nil if nothing polled
@@ -330,44 +331,45 @@ module Karafka
330
331
 
331
332
  @kafka.poll(timeout)
332
333
  rescue ::Rdkafka::RdkafkaError => e
333
- # We return nil, so we do not restart until running the whole loop
334
- # This allows us to run revocation jobs and other things and we will pick up new work
335
- # next time after dispatching all the things that are needed
336
- #
337
- # If we would retry here, the client reset would become transparent and we would not have
338
- # a chance to take any actions
339
- early_return = false
334
+ early_report = false
340
335
 
336
+ # There are retryable issues on which we want to report fast as they are source of
337
+ # problems and can mean some bigger system instabilities
338
+ # Those are mainly network issues and exceeding the max poll interval
339
+ # We want to report early on max poll interval exceeding because it may mean that the
340
+ # underlying processing is taking too much time and it is not LRJ
341
341
  case e.code
342
342
  when :max_poll_exceeded # -147
343
- reset
344
- early_return = true
345
- when :transport # -195
346
- reset
347
- early_return = true
348
- when :not_coordinator # 16
349
- reset
350
- early_return = true
343
+ early_report = true
351
344
  when :network_exception # 13
352
- early_return = true
353
- when :rebalance_in_progress # -27
354
- early_return = true
355
- when :coordinator_load_in_progress # 14
356
- early_return = true
357
- when :unknown_topic_or_part
358
- # This is expected and temporary until rdkafka catches up with metadata
359
- early_return = true
345
+ early_report = true
346
+ when :transport # -195
347
+ early_report = true
348
+ end
349
+
350
+ retryable = time_poll.attempts <= MAX_POLL_RETRIES && time_poll.retryable?
351
+
352
+ if early_report || !retryable
353
+ Karafka.monitor.instrument(
354
+ 'error.occurred',
355
+ caller: self,
356
+ error: e,
357
+ type: 'connection.client.poll.error'
358
+ )
360
359
  end
361
360
 
362
- raise if time_poll.attempts > MAX_POLL_RETRIES
363
- raise unless time_poll.retryable?
361
+ raise unless retryable
362
+
363
+ # Most of the errors can be safely ignored as librdkafka will recover from them
364
+ # @see https://github.com/edenhill/librdkafka/issues/1987#issuecomment-422008750
365
+ # @see https://github.com/edenhill/librdkafka/wiki/Error-handling
364
366
 
365
367
  time_poll.checkpoint
366
368
  time_poll.backoff
367
369
 
368
- # On unknown errors we do our best to retry and handle them before raising unless we
369
- # decide to early return
370
- early_return ? nil : retry
370
+ # poll may not only return message but also can run callbacks and if they changed,
371
+ # despite the errors we need to delegate to the other app parts
372
+ @rebalance_manager.changed? ? nil : retry
371
373
  end
372
374
 
373
375
  # Builds a new rdkafka consumer instance based on the subscription group configuration
@@ -153,6 +153,11 @@ module Karafka
153
153
  when 'librdkafka.error'
154
154
  error "librdkafka internal error occurred: #{error}"
155
155
  error details
156
+ # Those will only occur when retries in the client fail and when they did not stop after
157
+ # backoffs
158
+ when 'connection.client.poll.error'
159
+ error "Data polling error occurred: #{error}"
160
+ error details
156
161
  else
157
162
  # This should never happen. Please contact the maintainers
158
163
  raise Errors::UnsupportedCaseError, event
@@ -92,7 +92,7 @@ module Karafka
92
92
  # If this is not a long-running job there is nothing for us to do here
93
93
  return unless topic.long_running_job?
94
94
 
95
- seek(coordinator.seek_offset)
95
+ seek(coordinator.seek_offset) unless revoked?
96
96
 
97
97
  resume
98
98
  else
@@ -21,7 +21,7 @@ module Karafka
21
21
  # We add it to make a multi-system development easier for people that don't use
22
22
  # kafka and don't understand the concept of consumer groups.
23
23
  def initialize(name)
24
- @name = name
24
+ @name = name.to_s
25
25
  @id = Karafka::App.config.consumer_mapper.call(name)
26
26
  @topics = Topics.new([])
27
27
  end
@@ -52,7 +52,7 @@ module Karafka
52
52
  def subscription_group=(name, &block)
53
53
  # We cast it here, so the routing supports symbol based but that's anyhow later on
54
54
  # validated as a string
55
- self.current_subscription_group_name = name.to_s
55
+ self.current_subscription_group_name = name
56
56
 
57
57
  Proxy.new(self, &block)
58
58
 
@@ -14,17 +14,22 @@ module Karafka
14
14
  instance_eval(&block)
15
15
  end
16
16
 
17
- # Translates the no "=" DSL of routing into elements assignments on target
18
- # @param method_name [Symbol] name of the missing method
19
- def method_missing(method_name, ...)
20
- return super unless respond_to_missing?(method_name)
17
+ # Ruby 2.7.0 to 2.7.2 do not have arg forwarding, so we fallback to the old way
18
+ arg_forwarding = RUBY_VERSION < '3.0' ? '*args, &block' : '...'
19
+
20
+ class_eval <<~RUBY, __FILE__, __LINE__ + 1
21
+ # Translates the no "=" DSL of routing into elements assignments on target
22
+ # @param method_name [Symbol] name of the missing method
23
+ def method_missing(method_name, #{arg_forwarding})
24
+ return super unless respond_to_missing?(method_name)
21
25
 
22
- if @target.respond_to?(:"#{method_name}=")
23
- @target.public_send(:"#{method_name}=", ...)
24
- else
25
- @target.public_send(method_name, ...)
26
+ if @target.respond_to?(:"\#{method_name}=")
27
+ @target.public_send(:"\#{method_name}=", #{arg_forwarding})
28
+ else
29
+ @target.public_send(method_name, #{arg_forwarding})
30
+ end
26
31
  end
27
- end
32
+ RUBY
28
33
 
29
34
  # Tells whether or not a given element exists on the target
30
35
  # @param method_name [Symbol] name of the missing method
@@ -10,6 +10,7 @@ module Karafka
10
10
  # @param message [Karafka::Messages::Message] Message object that we want to deserialize
11
11
  # @return [Hash] hash with deserialized JSON data
12
12
  def call(message)
13
+ # nil payload can be present for example for tombstone messages
13
14
  message.raw_payload.nil? ? nil : ::JSON.parse(message.raw_payload)
14
15
  end
15
16
  end
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.0.9'
6
+ VERSION = '2.0.11'
7
7
  end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.9
4
+ version: 2.0.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -35,7 +35,7 @@ cert_chain:
35
35
  Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
36
36
  MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
37
37
  -----END CERTIFICATE-----
38
- date: 2022-09-22 00:00:00.000000000 Z
38
+ date: 2022-09-29 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: karafka-core
metadata.gz.sig CHANGED
Binary file