karafka 2.3.0.alpha2 → 2.3.0.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 07bebe70b6697a90d04154dcbfe5837f5bcfaf934073f91bbfc9e8939d9d1a6c
4
- data.tar.gz: 13e41b276eee5142b55eb6908c8b9292bd8f802f470fb4e9bdc1f812dbd50189
3
+ metadata.gz: b07678937b0dc9c4154c90ccea2e9bbbc25aa2669461d937e859b77e134ddc48
4
+ data.tar.gz: a2e3eee8a9351a789dc33a1f78a48899a007da29b81205b4383e75a9fa0cc13c
5
5
  SHA512:
6
- metadata.gz: fef2cbded4409d951cf7e752f25b7db016052cc5f2a77c54ccf7e8778fbf44edba74cd1b04cf82caf341cf05beda552af5c4cbd994a510c54ad1d3c4e561fd17
7
- data.tar.gz: 00bf893d7c6f29e559530585c40ba721ab3a9f3ce5906cd7c5b6948f79725d6d99423444d9ec7e6f8593f9780aec3375e400986ed8478dfab2cbbabb5807a85d
6
+ metadata.gz: d2331d328849f4ffb65aa5deb921bf8d12d732f06b30acc5b17ef1fb844d5da23166dbcd27a6b53a6d57d76c6a7d508e9c5479e064a73aa6e0c2eaed69597c8a
7
+ data.tar.gz: da8afa92e4b5fc552319b15322b3b3e883c2ec5c2b46ec887d098d34f7a0fbd7a7408558a89d781699eb521309c8fe69d2df7e6a1f46de19259451fb3050e391
checksums.yaml.gz.sig CHANGED
Binary file
data/CHANGELOG.md CHANGED
@@ -14,6 +14,7 @@
14
14
  - [Enhancement] Provide an `:independent` configuration to DLQ allowing to reset pause count track on each marking as consumed when retrying.
15
15
  - [Enhancement] Remove no longer needed shutdown patches for `librdkafka` improving multi-sg shutdown times for `cooperative-sticky`.
16
16
  - [Enhancement] Allow for parallel closing of connections from independent consumer groups.
17
+ - [Enhancement] Provide recovery flow for cases where DLQ dispatch would fail.
17
18
  - [Change] Make `Kubernetes::LivenessListener` not start until Karafka app starts running.
18
19
  - [Change] Remove the legacy "inside of topics" way of defining subscription groups names
19
20
  - [Change] Update supported instrumentation to report on `#tick`.
data/Gemfile.lock CHANGED
@@ -1,8 +1,8 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.3.0.alpha2)
5
- karafka-core (>= 2.3.0.alpha1, < 2.4.0)
4
+ karafka (2.3.0.rc1)
5
+ karafka-core (>= 2.3.0.rc1, < 2.4.0)
6
6
  waterdrop (>= 2.6.12, < 3.0.0)
7
7
  zeitwerk (~> 2.3)
8
8
 
@@ -23,7 +23,7 @@ GEM
23
23
  mutex_m
24
24
  tzinfo (~> 2.0)
25
25
  base64 (0.2.0)
26
- bigdecimal (3.1.5)
26
+ bigdecimal (3.1.6)
27
27
  byebug (11.1.3)
28
28
  concurrent-ruby (1.2.3)
29
29
  connection_pool (2.4.1)
@@ -39,7 +39,7 @@ GEM
39
39
  activesupport (>= 6.1)
40
40
  i18n (1.14.1)
41
41
  concurrent-ruby (~> 1.0)
42
- karafka-core (2.3.0.alpha1)
42
+ karafka-core (2.3.0.rc1)
43
43
  karafka-rdkafka (>= 0.14.7, < 0.15.0)
44
44
  karafka-rdkafka (0.14.7)
45
45
  ffi (~> 1.15)
@@ -52,11 +52,11 @@ GEM
52
52
  roda (~> 3.68, >= 3.69)
53
53
  tilt (~> 2.0)
54
54
  mini_portile2 (2.8.5)
55
- minitest (5.20.0)
55
+ minitest (5.21.2)
56
56
  mutex_m (0.2.0)
57
57
  rack (3.0.8)
58
58
  rake (13.1.0)
59
- roda (3.75.0)
59
+ roda (3.76.0)
60
60
  rack
61
61
  rspec (3.12.0)
62
62
  rspec-core (~> 3.12.0)
@@ -100,4 +100,4 @@ DEPENDENCIES
100
100
  simplecov
101
101
 
102
102
  BUNDLED WITH
103
- 2.5.3
103
+ 2.5.4
@@ -75,6 +75,7 @@ en:
75
75
  dead_letter_queue.topic_format: 'needs to be a string with a Kafka accepted format'
76
76
  dead_letter_queue.active_format: needs to be either true or false
77
77
  dead_letter_queue.independent_format: needs to be either true or false
78
+ dead_letter_queue.transactional_format: needs to be either true or false
78
79
  active_format: needs to be either true or false
79
80
  declaratives.partitions_format: needs to be more or equal to 1
80
81
  declaratives.active_format: needs to be true
data/karafka.gemspec CHANGED
@@ -21,7 +21,7 @@ Gem::Specification.new do |spec|
21
21
  without having to focus on things that are not your business domain.
22
22
  DESC
23
23
 
24
- spec.add_dependency 'karafka-core', '>= 2.3.0.alpha1', '< 2.4.0'
24
+ spec.add_dependency 'karafka-core', '>= 2.3.0.rc1', '< 2.4.0'
25
25
  spec.add_dependency 'waterdrop', '>= 2.6.12', '< 3.0.0'
26
26
  spec.add_dependency 'zeitwerk', '~> 2.3'
27
27
 
@@ -79,10 +79,25 @@ module Karafka
79
79
  end
80
80
 
81
81
  # @private
82
+ #
82
83
  # @note This should not be used by the end users as it is part of the lifecycle of things but
83
84
  # not as part of the public api.
85
+ #
86
+ # @note We handle and report errors here because of flows that could fail. For example a DLQ
87
+ # flow could fail if it was not able to dispatch the DLQ message. Other "non-user" based
88
+ # flows do not interact with external systems and their errors are expected to bubble up
84
89
  def on_after_consume
85
90
  handle_after_consume
91
+ rescue StandardError => e
92
+ Karafka.monitor.instrument(
93
+ 'error.occurred',
94
+ error: e,
95
+ caller: self,
96
+ seek_offset: coordinator.seek_offset,
97
+ type: 'consumer.after_consume.error'
98
+ )
99
+
100
+ retry_after_pause
86
101
  end
87
102
 
88
103
  # Can be used to run code prior to scheduling of idle execution
@@ -248,7 +248,10 @@ module Karafka
248
248
  error "Consumer on shutdown failed due to an error: #{error}"
249
249
  error details
250
250
  when 'consumer.tick.error'
251
- error "Consumer tick failed due to an error: #{error}"
251
+ error "Consumer on tick failed due to an error: #{error}"
252
+ error details
253
+ when 'consumer.after_consume.error'
254
+ error "Consumer on after_consume failed due to an error: #{error}"
252
255
  error details
253
256
  when 'worker.process.error'
254
257
  fatal "Worker processing failed due to an error: #{error}"
@@ -107,8 +107,23 @@ module Karafka
107
107
  # allow to mark offsets inside of the transaction. If the transaction is initialized
108
108
  # only from the consumer, the offset will be stored in a regular fashion.
109
109
  #
110
+ # @param active_producer [WaterDrop::Producer] alternative producer instance we may want
111
+ # to use. It is useful when we have connection pool or any other selective engine for
112
+ # managing multiple producers. If not provided, default producer taken from `#producer`
113
+ # will be used.
114
+ #
110
115
  # @param block [Proc] code that we want to run in a transaction
111
- def transaction(&block)
116
+ #
117
+ # @note Please note, that if you provide the producer, it will reassign the producer of
118
+ # the consumer for the transaction time. This means, that in case you would even
119
+ # accidentally refer to `Consumer#producer` from other threads, it will contain the
120
+ # reassigned producer and not the initially used/assigned producer. It is done that
121
+ # way, so the message producing aliases operate from within transactions and since the
122
+ # producer in transaction is locked, it will prevent other threads from using it.
123
+ def transaction(active_producer = producer, &block)
124
+ default_producer = producer
125
+ self.producer = active_producer
126
+
112
127
  transaction_started = false
113
128
 
114
129
  # Prevent from nested transactions. It would not make any sense
@@ -138,6 +153,8 @@ module Karafka
138
153
  marking.pop ? mark_as_consumed(*marking) : mark_as_consumed!(*marking)
139
154
  end
140
155
  ensure
156
+ self.producer = default_producer
157
+
141
158
  if transaction_started
142
159
  @_transaction_marked.clear
143
160
  @_in_transaction = false
@@ -84,9 +84,7 @@ module Karafka
84
84
  else
85
85
  # We reset the pause to indicate we will now consider it as "ok".
86
86
  coordinator.pause_tracker.reset
87
- skippable_message, = find_skippable_message
88
- dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
89
- mark_as_consumed(skippable_message)
87
+ dispatch_if_needed_and_mark_as_consumed
90
88
  pause(coordinator.seek_offset, nil, false)
91
89
  end
92
90
  end
@@ -133,6 +131,25 @@ module Karafka
133
131
  )
134
132
  end
135
133
 
134
+ # Dispatches the message to the DLQ (when needed and when applicable based on settings)
135
+ # and marks this message as consumed for non MOM flows.
136
+ #
137
+ # If producer is transactional and config allows, uses transaction to do that
138
+ def dispatch_if_needed_and_mark_as_consumed
139
+ skippable_message, = find_skippable_message
140
+
141
+ dispatch = lambda do
142
+ dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
143
+ mark_as_consumed(skippable_message)
144
+ end
145
+
146
+ if dispatch_in_a_transaction?
147
+ transaction { dispatch.call }
148
+ else
149
+ dispatch.call
150
+ end
151
+ end
152
+
136
153
  # @param skippable_message [Array<Karafka::Messages::Message>]
137
154
  # @return [Hash] dispatch DLQ message
138
155
  def build_dlq_message(skippable_message)
@@ -168,6 +185,13 @@ module Karafka
168
185
  def dispatch_to_dlq?
169
186
  topic.dead_letter_queue.topic
170
187
  end
188
+
189
+ # @return [Boolean] should we use a transaction to move the data to the DLQ.
190
+ # We can do it only when producer is transactional and configuration for DLQ
191
+ # transactional dispatches is not set to false.
192
+ def dispatch_in_a_transaction?
193
+ producer.transactional? && topic.dead_letter_queue.transactional?
194
+ end
171
195
  end
172
196
  end
173
197
  end
@@ -46,9 +46,9 @@ module Karafka
46
46
  retry_after_pause
47
47
  else
48
48
  coordinator.pause_tracker.reset
49
- skippable_message, = find_skippable_message
50
- dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
51
- mark_as_consumed(skippable_message)
49
+
50
+ dispatch_if_needed_and_mark_as_consumed
51
+
52
52
  pause(coordinator.seek_offset, nil, false)
53
53
  end
54
54
  end
@@ -60,9 +60,8 @@ module Karafka
60
60
 
61
61
  return resume if revoked?
62
62
 
63
- skippable_message, = find_skippable_message
64
- dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
65
- mark_as_consumed(skippable_message)
63
+ dispatch_if_needed_and_mark_as_consumed
64
+
66
65
  pause(coordinator.seek_offset, nil, false)
67
66
  end
68
67
  end
@@ -49,9 +49,8 @@ module Karafka
49
49
 
50
50
  return resume if revoked?
51
51
 
52
- skippable_message, = find_skippable_message
53
- dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
54
- mark_as_consumed(skippable_message)
52
+ dispatch_if_needed_and_mark_as_consumed
53
+
55
54
  pause(coordinator.seek_offset, nil, false)
56
55
  end
57
56
  end
@@ -13,10 +13,13 @@ module Karafka
13
13
  :topic,
14
14
  # Should retries be handled collectively on a batch or independently per message
15
15
  :independent,
16
+ # Move to DLQ and mark as consumed in transactional mode (if applicable)
17
+ :transactional,
16
18
  keyword_init: true
17
19
  ) do
18
20
  alias_method :active?, :active
19
21
  alias_method :independent?, :independent
22
+ alias_method :transactional?, :transactional
20
23
  end
21
24
  end
22
25
  end
@@ -20,6 +20,7 @@ module Karafka
20
20
  required(:active) { |val| [true, false].include?(val) }
21
21
  required(:independent) { |val| [true, false].include?(val) }
22
22
  required(:max_retries) { |val| val.is_a?(Integer) && val >= 0 }
23
+ required(:transactional) { |val| [true, false].include?(val) }
23
24
  end
24
25
 
25
26
  # Validate topic name only if dlq is active
@@ -16,17 +16,21 @@ module Karafka
16
16
  # if we do not want to move it anywhere and just skip
17
17
  # @param independent [Boolean] needs to be true in order for each marking as consumed
18
18
  # in a retry flow to reset the errors counter
19
+ # @param transactional [Boolean] if applicable, should transaction be used to move
20
+ # given message to the dead-letter topic and mark it as consumed.
19
21
  # @return [Config] defined config
20
22
  def dead_letter_queue(
21
23
  max_retries: DEFAULT_MAX_RETRIES,
22
24
  topic: nil,
23
- independent: false
25
+ independent: false,
26
+ transactional: true
24
27
  )
25
28
  @dead_letter_queue ||= Config.new(
26
29
  active: !topic.nil?,
27
30
  max_retries: max_retries,
28
31
  topic: topic,
29
- independent: independent
32
+ independent: independent,
33
+ transactional: transactional
30
34
  )
31
35
  end
32
36
 
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.3.0.alpha2'
6
+ VERSION = '2.3.0.rc1'
7
7
  end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.3.0.alpha2
4
+ version: 2.3.0.rc1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2024-01-17 00:00:00.000000000 Z
38
+ date: 2024-01-21 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: karafka-core
@@ -43,7 +43,7 @@ dependencies:
43
43
  requirements:
44
44
  - - ">="
45
45
  - !ruby/object:Gem::Version
46
- version: 2.3.0.alpha1
46
+ version: 2.3.0.rc1
47
47
  - - "<"
48
48
  - !ruby/object:Gem::Version
49
49
  version: 2.4.0
@@ -53,7 +53,7 @@ dependencies:
53
53
  requirements:
54
54
  - - ">="
55
55
  - !ruby/object:Gem::Version
56
- version: 2.3.0.alpha1
56
+ version: 2.3.0.rc1
57
57
  - - "<"
58
58
  - !ruby/object:Gem::Version
59
59
  version: 2.4.0
metadata.gz.sig CHANGED
Binary file