waterdrop 2.6.9 → 2.6.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a4811e6ec1f3cbea82e4604e95b9cd3deddce9337e9fd935f786bd97d008e914
4
- data.tar.gz: baf54ded0ca2e0ec90e659c199138a36932cd5e2f8d6afffbfdb6b770ebbd29c
3
+ metadata.gz: 3798d61ff3464dd6aaeb21f5457230a3b079748f260c59138142144efc62dc40
4
+ data.tar.gz: c9699bc202a67fbb8e456a4e0d316f56e8635e0986a09d0429a25999553c4a76
5
5
  SHA512:
6
- metadata.gz: 4b2b33cfd43d78ae534c14780accb6193a6ec6ff930168c7c7e2813eb9e2599702c2db923b05793a7366f236e9d51250b07894c7cb11b8f1eca81cb79bafa3b7
7
- data.tar.gz: bde20c38e6ebbac05db6085381928516c050512adffd9a4a36a14c80edfed9788b8375edc1e058f38d809cf7862d742f4116fe94efca6fddc5bb0242d4ba216f
6
+ metadata.gz: c6a6d03d205314775b71ca0c4e922ef52159965e30b9280727712f25368f6af78c821d6d468b2362bd4094fbd2d4747195dc1e3a7cd76a0e4582f81a66829b43
7
+ data.tar.gz: 9541ae05afde65cf1124661fd57e23afb4265601748c6b4a60f6fc6f8cabe463cf59b6ac683e7d047e454cc671182808f800add0f68b780bbb1b1325abcab892
checksums.yaml.gz.sig CHANGED
Binary file
data/CHANGELOG.md CHANGED
@@ -1,5 +1,14 @@
1
1
  # WaterDrop changelog
2
2
 
3
+ ## 2.6.11 (2023-10-25)
4
+ - [Enhancement] Return delivery handles and delivery report for both dummy and buffered clients with proper topics, partitions and offsets assign and auto-increment offsets per partition.
5
+ - [Fix] Fix a case where buffered test client would not accumulate messages on failed transactions
6
+
7
+ ## 2.6.10 (2023-10-24)
8
+ - [Improvement] Introduce `message.purged` event to indicate that a message that was not delivered to Kafka was purged. This most of the time refers to messages that were part of a transaction and were not yet dispatched to Kafka. It always means, that given message was not delivered but in case of transactions it is expected. In case of non-transactional it usually means `#purge` usage or exceeding `message.timeout.ms` so `librdkafka` removes this message from its internal queue. Non-transactional producers do **not** use this and pipe purges to `error.occurred`.
9
+ - [Fix] Fix a case where `message.acknowledged` would not have `caller` key.
10
+ - [Fix] Fix a bug where critical errors (like `IRB::Abort`) would not abort the ongoing transaction.
11
+
3
12
  ## 2.6.9 (2023-10-23)
4
13
  - [Improvement] Introduce a `transaction.finished` event to indicate that transaction has finished whether it was aborted or committed.
5
14
  - [Improvement] Use `transaction.committed` event to indicate that transaction has been committed.
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- waterdrop (2.6.9)
4
+ waterdrop (2.6.11)
5
5
  karafka-core (>= 2.2.3, < 3.0.0)
6
6
  zeitwerk (~> 2.3)
7
7
 
@@ -6,21 +6,12 @@ module WaterDrop
6
6
  class Buffered < Clients::Dummy
7
7
  attr_accessor :messages
8
8
 
9
- # Sync fake response for the message delivery to Kafka, since we do not dispatch anything
10
- class SyncResponse
11
- # @param _args Handler wait arguments (irrelevant as waiting is fake here)
12
- def wait(*_args)
13
- false
14
- end
15
- end
16
-
17
9
  # @param args [Object] anything accepted by `Clients::Dummy`
18
10
  def initialize(*args)
19
11
  super
20
12
  @messages = []
21
13
  @topics = Hash.new { |k, v| k[v] = [] }
22
14
 
23
- @transaction_mutex = Mutex.new
24
15
  @transaction_active = false
25
16
  @transaction_messages = []
26
17
  @transaction_topics = Hash.new { |k, v| k[v] = [] }
@@ -29,6 +20,7 @@ module WaterDrop
29
20
 
30
21
  # "Produces" message to Kafka: it acknowledges it locally, adds it to the internal buffer
31
22
  # @param message [Hash] `WaterDrop::Producer#produce_sync` message hash
23
+ # @return [Dummy::Handle] fake delivery handle that can be materialized into a report
32
24
  def produce(message)
33
25
  if @transaction_active
34
26
  @transaction_topics[message.fetch(:topic)] << message
@@ -39,29 +31,20 @@ module WaterDrop
39
31
  @messages << message
40
32
  end
41
33
 
42
- SyncResponse.new
34
+ super(**message.to_h)
43
35
  end
44
36
 
45
- # Yields the code pretending it is in a transaction
46
- # Supports our aborting transaction flow
47
- # Moves messages the appropriate buffers only if transaction is successful
48
- def transaction
37
+ # Starts the transaction on a given level
38
+ def begin_transaction
49
39
  @transaction_level += 1
50
-
51
- return yield if @transaction_mutex.owned?
52
-
53
- @transaction_mutex.lock
54
40
  @transaction_active = true
41
+ end
55
42
 
56
- result = nil
57
- commit = false
58
-
59
- catch(:abort) do
60
- result = yield
61
- commit = true
62
- end
43
+ # Finishes given level of transaction
44
+ def commit_transaction
45
+ @transaction_level -= 1
63
46
 
64
- commit || raise(WaterDrop::Errors::AbortTransaction)
47
+ return unless @transaction_level.zero?
65
48
 
66
49
  # Transfer transactional data on success
67
50
  @transaction_topics.each do |topic, messages|
@@ -70,20 +53,20 @@ module WaterDrop
70
53
 
71
54
  @messages += @transaction_messages
72
55
 
73
- result
74
- rescue StandardError => e
75
- return if e.is_a?(WaterDrop::Errors::AbortTransaction)
56
+ @transaction_topics.clear
57
+ @transaction_messages.clear
58
+ @transaction_active = false
59
+ end
76
60
 
77
- raise
78
- ensure
61
+ # Aborts the transaction
62
+ def abort_transaction
79
63
  @transaction_level -= 1
80
64
 
81
- if @transaction_level.zero? && @transaction_mutex.owned?
82
- @transaction_topics.clear
83
- @transaction_messages.clear
84
- @transaction_active = false
85
- @transaction_mutex.unlock
86
- end
65
+ return unless @transaction_level.zero?
66
+
67
+ @transaction_topics.clear
68
+ @transaction_messages.clear
69
+ @transaction_active = false
87
70
  end
88
71
 
89
72
  # Returns messages produced to a given topic
@@ -95,6 +78,10 @@ module WaterDrop
95
78
  # Clears internal buffer
96
79
  # Used in between specs so messages do not leak out
97
80
  def reset
81
+ @transaction_level = 0
82
+ @transaction_active = false
83
+ @transaction_topics.clear
84
+ @transaction_messages.clear
98
85
  @messages.clear
99
86
  @topics.each_value(&:clear)
100
87
  end
@@ -5,19 +5,53 @@ module WaterDrop
5
5
  # A dummy client that is supposed to be used instead of Rdkafka::Producer in case we don't
6
6
  # want to dispatch anything to Kafka.
7
7
  #
8
- # It does not store anything and just ignores messages.
8
+ # It does not store anything and just ignores messages. It does however return proper delivery
9
+ # handle that can be materialized into a report.
9
10
  class Dummy
11
+ # `::Rdkafka::Producer::DeliveryHandle` object API compatible dummy object
12
+ class Handle < ::Rdkafka::Producer::DeliveryHandle
13
+ # @param topic [String] topic where we want to dispatch message
14
+ # @param partition [Integer] target partition
15
+ # @param offset [Integer] offset assigned by our fake "Kafka"
16
+ def initialize(topic, partition, offset)
17
+ @topic = topic
18
+ @partition = partition
19
+ @offset = offset
20
+ end
21
+
22
+ # Does not wait, just creates the result
23
+ #
24
+ # @param _args [Array] anything the wait handle would accept
25
+ # @return [::Rdkafka::Producer::DeliveryReport]
26
+ def wait(*_args)
27
+ create_result
28
+ end
29
+
30
+ # Creates a delivery report with details where the message went
31
+ #
32
+ # @return [::Rdkafka::Producer::DeliveryReport]
33
+ def create_result
34
+ ::Rdkafka::Producer::DeliveryReport.new(
35
+ @partition,
36
+ @offset,
37
+ @topic
38
+ )
39
+ end
40
+ end
41
+
10
42
  # @param _producer [WaterDrop::Producer]
11
43
  # @return [Dummy] dummy instance
12
44
  def initialize(_producer)
13
- @counter = -1
45
+ @counters = Hash.new { |h, k| h[k] = -1 }
14
46
  end
15
47
 
16
- # Dummy method for returning the delivery report
17
- # @param _args [Object] anything that the delivery handle accepts
18
- # @return [::Rdkafka::Producer::DeliveryReport]
19
- def wait(*_args)
20
- ::Rdkafka::Producer::DeliveryReport.new(0, @counter += 1)
48
+ # "Produces" the message
49
+ # @param topic [String, Symbol] topic where we want to dispatch message
50
+ # @param partition [Integer] target partition
51
+ # @param _args [Hash] remaining details that are ignored in the dummy mode
52
+ # @return [Handle] delivery handle
53
+ def produce(topic:, partition: 0, **_args)
54
+ Handle.new(topic.to_s, partition, @counters["#{topic}#{partition}"] += 1)
21
55
  end
22
56
 
23
57
  # @param _args [Object] anything really, this dummy is suppose to support anything
@@ -25,26 +59,6 @@ module WaterDrop
25
59
  true
26
60
  end
27
61
 
28
- # Yields the code pretending it is in a transaction
29
- # Supports our aborting transaction flow
30
- def transaction
31
- result = nil
32
- commit = false
33
-
34
- catch(:abort) do
35
- result = yield
36
- commit = true
37
- end
38
-
39
- commit || raise(WaterDrop::Errors::AbortTransaction)
40
-
41
- result
42
- rescue StandardError => e
43
- return if e.is_a?(WaterDrop::Errors::AbortTransaction)
44
-
45
- raise
46
- end
47
-
48
62
  # @param _args [Object] anything really, this dummy is suppose to support anything
49
63
  # @return [self] returns self for chaining cases
50
64
  def method_missing(*_args)
@@ -19,6 +19,7 @@ module WaterDrop
19
19
  # callbacks manager to make it work
20
20
  client.delivery_callback = Instrumentation::Callbacks::Delivery.new(
21
21
  producer.id,
22
+ producer.transactional?,
22
23
  producer.config.monitor
23
24
  )
24
25
 
@@ -6,19 +6,41 @@ module WaterDrop
6
6
  # Creates a callable that we want to run upon each message delivery or failure
7
7
  #
8
8
  # @note We don't have to provide client_name here as this callback is per client instance
9
+ #
10
+ # @note We do not consider `message.purge` as an error for transactional producers, because
11
+ # this is a standard behaviour for not yet dispatched messages on aborted transactions.
12
+ # We do however still want to instrument it for traceability.
9
13
  class Delivery
14
+ # Error emitted when a message was not yet dispatched and was purged from the queue
15
+ RD_KAFKA_RESP_PURGE_QUEUE = -152
16
+
17
+ # Error emitted when a message was purged while it was dispatched
18
+ RD_KAFKA_RESP_PURGE_INFLIGHT = -151
19
+
20
+ # Errors related to queue purging that is expected in transactions
21
+ PURGE_ERRORS = [RD_KAFKA_RESP_PURGE_INFLIGHT, RD_KAFKA_RESP_PURGE_QUEUE].freeze
22
+
23
+ private_constant :RD_KAFKA_RESP_PURGE_QUEUE, :RD_KAFKA_RESP_PURGE_INFLIGHT, :PURGE_ERRORS
24
+
10
25
  # @param producer_id [String] id of the current producer
26
+ # @param transactional [Boolean] is this handle for a transactional or regular producer
11
27
  # @param monitor [WaterDrop::Instrumentation::Monitor] monitor we are using
12
- def initialize(producer_id, monitor)
28
+ def initialize(producer_id, transactional, monitor)
13
29
  @producer_id = producer_id
30
+ @transactional = transactional
14
31
  @monitor = monitor
15
32
  end
16
33
 
17
34
  # Emits delivery details to the monitor
18
35
  # @param delivery_report [Rdkafka::Producer::DeliveryReport] delivery report
19
36
  def call(delivery_report)
20
- if delivery_report.error.to_i.zero?
37
+ error_code = delivery_report.error.to_i
38
+
39
+ if error_code.zero?
21
40
  instrument_acknowledged(delivery_report)
41
+
42
+ elsif @transactional && PURGE_ERRORS.include?(error_code)
43
+ instrument_purged(delivery_report)
22
44
  else
23
45
  instrument_error(delivery_report)
24
46
  end
@@ -27,24 +49,24 @@ module WaterDrop
27
49
  private
28
50
 
29
51
  # @param delivery_report [Rdkafka::Producer::DeliveryReport] delivery report
30
- def instrument_error(delivery_report)
52
+ def instrument_acknowledged(delivery_report)
31
53
  @monitor.instrument(
32
- 'error.occurred',
54
+ 'message.acknowledged',
33
55
  caller: self,
34
- error: ::Rdkafka::RdkafkaError.new(delivery_report.error),
35
56
  producer_id: @producer_id,
36
57
  offset: delivery_report.offset,
37
58
  partition: delivery_report.partition,
38
59
  topic: delivery_report.topic_name,
39
- delivery_report: delivery_report,
40
- type: 'librdkafka.dispatch_error'
60
+ delivery_report: delivery_report
41
61
  )
42
62
  end
43
63
 
44
64
  # @param delivery_report [Rdkafka::Producer::DeliveryReport] delivery report
45
- def instrument_acknowledged(delivery_report)
65
+ def instrument_purged(delivery_report)
46
66
  @monitor.instrument(
47
- 'message.acknowledged',
67
+ 'message.purged',
68
+ caller: self,
69
+ error: build_error(delivery_report),
48
70
  producer_id: @producer_id,
49
71
  offset: delivery_report.offset,
50
72
  partition: delivery_report.partition,
@@ -52,6 +74,28 @@ module WaterDrop
52
74
  delivery_report: delivery_report
53
75
  )
54
76
  end
77
+
78
+ # @param delivery_report [Rdkafka::Producer::DeliveryReport] delivery report
79
+ def instrument_error(delivery_report)
80
+ @monitor.instrument(
81
+ 'error.occurred',
82
+ caller: self,
83
+ error: build_error(delivery_report),
84
+ producer_id: @producer_id,
85
+ offset: delivery_report.offset,
86
+ partition: delivery_report.partition,
87
+ topic: delivery_report.topic_name,
88
+ delivery_report: delivery_report,
89
+ type: 'librdkafka.dispatch_error'
90
+ )
91
+ end
92
+
93
+ # Builds appropriate rdkafka error
94
+ # @param delivery_report [Rdkafka::Producer::DeliveryReport] delivery report
95
+ # @return [::Rdkafka::RdkafkaError]
96
+ def build_error(delivery_report)
97
+ ::Rdkafka::RdkafkaError.new(delivery_report.error)
98
+ end
55
99
  end
56
100
  end
57
101
  end
@@ -12,6 +12,7 @@ module WaterDrop
12
12
  message.produced_async
13
13
  message.produced_sync
14
14
  message.acknowledged
15
+ message.purged
15
16
  message.buffered
16
17
 
17
18
  messages.produced_async
@@ -3,5 +3,5 @@
3
3
  # WaterDrop library
4
4
  module WaterDrop
5
5
  # Current WaterDrop version
6
- VERSION = '2.6.9'
6
+ VERSION = '2.6.11'
7
7
  end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: waterdrop
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.6.9
4
+ version: 2.6.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-10-23 00:00:00.000000000 Z
38
+ date: 2023-10-25 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: karafka-core
metadata.gz.sig CHANGED
Binary file