waterdrop 2.6.10 → 2.6.11

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f8108bc92f548809ab7220d8d36a03ceb72e283dd651d2d2fc0629bb81a36fc8
4
- data.tar.gz: a9bbb98af4e8b1425082fd5816e1441c91a263acabbeb987b75d7d598fe72d8c
3
+ metadata.gz: 3798d61ff3464dd6aaeb21f5457230a3b079748f260c59138142144efc62dc40
4
+ data.tar.gz: c9699bc202a67fbb8e456a4e0d316f56e8635e0986a09d0429a25999553c4a76
5
5
  SHA512:
6
- metadata.gz: 3e11745fc2986fcdc1ac8c5b4a8f4eeeb71c812b961eb296585e71d12d3601e9fcf124ad02d219db049f774a6417f6a293ce9cf5ce282ff5b70a05067fd310ec
7
- data.tar.gz: 9488e3cf3753077bf790dbbd815889214dc0e66a4de871e460dd25a287ee3df309a14eed28969bf41cc77c76b4b3eaa64cd2c44a8f8f1556d78828cdf0a08659
6
+ metadata.gz: c6a6d03d205314775b71ca0c4e922ef52159965e30b9280727712f25368f6af78c821d6d468b2362bd4094fbd2d4747195dc1e3a7cd76a0e4582f81a66829b43
7
+ data.tar.gz: 9541ae05afde65cf1124661fd57e23afb4265601748c6b4a60f6fc6f8cabe463cf59b6ac683e7d047e454cc671182808f800add0f68b780bbb1b1325abcab892
checksums.yaml.gz.sig CHANGED
Binary file
data/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
1
  # WaterDrop changelog
2
2
 
3
+ ## 2.6.11 (2023-10-25)
4
+ - [Enhancement] Return delivery handles and delivery report for both dummy and buffered clients with proper topics, partitions and offsets assign and auto-increment offsets per partition.
5
+ - [Fix] Fix a case where buffered test client would not accumulate messages on failed transactions
6
+
3
7
  ## 2.6.10 (2023-10-24)
4
8
  - [Improvement] Introduce `message.purged` event to indicate that a message that was not delivered to Kafka was purged. This most of the time refers to messages that were part of a transaction and were not yet dispatched to Kafka. It always means, that given message was not delivered but in case of transactions it is expected. In case of non-transactional it usually means `#purge` usage or exceeding `message.timeout.ms` so `librdkafka` removes this message from its internal queue. Non-transactional producers do **not** use this and pipe purges to `error.occurred`.
5
9
  - [Fix] Fix a case where `message.acknowledged` would not have `caller` key.
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- waterdrop (2.6.10)
4
+ waterdrop (2.6.11)
5
5
  karafka-core (>= 2.2.3, < 3.0.0)
6
6
  zeitwerk (~> 2.3)
7
7
 
@@ -6,21 +6,12 @@ module WaterDrop
6
6
  class Buffered < Clients::Dummy
7
7
  attr_accessor :messages
8
8
 
9
- # Sync fake response for the message delivery to Kafka, since we do not dispatch anything
10
- class SyncResponse
11
- # @param _args Handler wait arguments (irrelevant as waiting is fake here)
12
- def wait(*_args)
13
- false
14
- end
15
- end
16
-
17
9
  # @param args [Object] anything accepted by `Clients::Dummy`
18
10
  def initialize(*args)
19
11
  super
20
12
  @messages = []
21
13
  @topics = Hash.new { |k, v| k[v] = [] }
22
14
 
23
- @transaction_mutex = Mutex.new
24
15
  @transaction_active = false
25
16
  @transaction_messages = []
26
17
  @transaction_topics = Hash.new { |k, v| k[v] = [] }
@@ -29,6 +20,7 @@ module WaterDrop
29
20
 
30
21
  # "Produces" message to Kafka: it acknowledges it locally, adds it to the internal buffer
31
22
  # @param message [Hash] `WaterDrop::Producer#produce_sync` message hash
23
+ # @return [Dummy::Handle] fake delivery handle that can be materialized into a report
32
24
  def produce(message)
33
25
  if @transaction_active
34
26
  @transaction_topics[message.fetch(:topic)] << message
@@ -39,29 +31,20 @@ module WaterDrop
39
31
  @messages << message
40
32
  end
41
33
 
42
- SyncResponse.new
34
+ super(**message.to_h)
43
35
  end
44
36
 
45
- # Yields the code pretending it is in a transaction
46
- # Supports our aborting transaction flow
47
- # Moves messages the appropriate buffers only if transaction is successful
48
- def transaction
37
+ # Starts the transaction on a given level
38
+ def begin_transaction
49
39
  @transaction_level += 1
50
-
51
- return yield if @transaction_mutex.owned?
52
-
53
- @transaction_mutex.lock
54
40
  @transaction_active = true
41
+ end
55
42
 
56
- result = nil
57
- commit = false
58
-
59
- catch(:abort) do
60
- result = yield
61
- commit = true
62
- end
43
+ # Finishes given level of transaction
44
+ def commit_transaction
45
+ @transaction_level -= 1
63
46
 
64
- commit || raise(WaterDrop::Errors::AbortTransaction)
47
+ return unless @transaction_level.zero?
65
48
 
66
49
  # Transfer transactional data on success
67
50
  @transaction_topics.each do |topic, messages|
@@ -70,20 +53,20 @@ module WaterDrop
70
53
 
71
54
  @messages += @transaction_messages
72
55
 
73
- result
74
- rescue StandardError => e
75
- return if e.is_a?(WaterDrop::Errors::AbortTransaction)
56
+ @transaction_topics.clear
57
+ @transaction_messages.clear
58
+ @transaction_active = false
59
+ end
76
60
 
77
- raise
78
- ensure
61
+ # Aborts the transaction
62
+ def abort_transaction
79
63
  @transaction_level -= 1
80
64
 
81
- if @transaction_level.zero? && @transaction_mutex.owned?
82
- @transaction_topics.clear
83
- @transaction_messages.clear
84
- @transaction_active = false
85
- @transaction_mutex.unlock
86
- end
65
+ return unless @transaction_level.zero?
66
+
67
+ @transaction_topics.clear
68
+ @transaction_messages.clear
69
+ @transaction_active = false
87
70
  end
88
71
 
89
72
  # Returns messages produced to a given topic
@@ -95,6 +78,10 @@ module WaterDrop
95
78
  # Clears internal buffer
96
79
  # Used in between specs so messages do not leak out
97
80
  def reset
81
+ @transaction_level = 0
82
+ @transaction_active = false
83
+ @transaction_topics.clear
84
+ @transaction_messages.clear
98
85
  @messages.clear
99
86
  @topics.each_value(&:clear)
100
87
  end
@@ -5,19 +5,53 @@ module WaterDrop
5
5
  # A dummy client that is supposed to be used instead of Rdkafka::Producer in case we don't
6
6
  # want to dispatch anything to Kafka.
7
7
  #
8
- # It does not store anything and just ignores messages.
8
+ # It does not store anything and just ignores messages. It does however return proper delivery
9
+ # handle that can be materialized into a report.
9
10
  class Dummy
11
+ # `::Rdkafka::Producer::DeliveryHandle` object API compatible dummy object
12
+ class Handle < ::Rdkafka::Producer::DeliveryHandle
13
+ # @param topic [String] topic where we want to dispatch message
14
+ # @param partition [Integer] target partition
15
+ # @param offset [Integer] offset assigned by our fake "Kafka"
16
+ def initialize(topic, partition, offset)
17
+ @topic = topic
18
+ @partition = partition
19
+ @offset = offset
20
+ end
21
+
22
+ # Does not wait, just creates the result
23
+ #
24
+ # @param _args [Array] anything the wait handle would accept
25
+ # @return [::Rdkafka::Producer::DeliveryReport]
26
+ def wait(*_args)
27
+ create_result
28
+ end
29
+
30
+ # Creates a delivery report with details where the message went
31
+ #
32
+ # @return [::Rdkafka::Producer::DeliveryReport]
33
+ def create_result
34
+ ::Rdkafka::Producer::DeliveryReport.new(
35
+ @partition,
36
+ @offset,
37
+ @topic
38
+ )
39
+ end
40
+ end
41
+
10
42
  # @param _producer [WaterDrop::Producer]
11
43
  # @return [Dummy] dummy instance
12
44
  def initialize(_producer)
13
- @counter = -1
45
+ @counters = Hash.new { |h, k| h[k] = -1 }
14
46
  end
15
47
 
16
- # Dummy method for returning the delivery report
17
- # @param _args [Object] anything that the delivery handle accepts
18
- # @return [::Rdkafka::Producer::DeliveryReport]
19
- def wait(*_args)
20
- ::Rdkafka::Producer::DeliveryReport.new(0, @counter += 1)
48
+ # "Produces" the message
49
+ # @param topic [String, Symbol] topic where we want to dispatch message
50
+ # @param partition [Integer] target partition
51
+ # @param _args [Hash] remaining details that are ignored in the dummy mode
52
+ # @return [Handle] delivery handle
53
+ def produce(topic:, partition: 0, **_args)
54
+ Handle.new(topic.to_s, partition, @counters["#{topic}#{partition}"] += 1)
21
55
  end
22
56
 
23
57
  # @param _args [Object] anything really, this dummy is suppose to support anything
@@ -25,26 +59,6 @@ module WaterDrop
25
59
  true
26
60
  end
27
61
 
28
- # Yields the code pretending it is in a transaction
29
- # Supports our aborting transaction flow
30
- def transaction
31
- result = nil
32
- commit = false
33
-
34
- catch(:abort) do
35
- result = yield
36
- commit = true
37
- end
38
-
39
- commit || raise(WaterDrop::Errors::AbortTransaction)
40
-
41
- result
42
- rescue StandardError => e
43
- return if e.is_a?(WaterDrop::Errors::AbortTransaction)
44
-
45
- raise
46
- end
47
-
48
62
  # @param _args [Object] anything really, this dummy is suppose to support anything
49
63
  # @return [self] returns self for chaining cases
50
64
  def method_missing(*_args)
@@ -3,5 +3,5 @@
3
3
  # WaterDrop library
4
4
  module WaterDrop
5
5
  # Current WaterDrop version
6
- VERSION = '2.6.10'
6
+ VERSION = '2.6.11'
7
7
  end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: waterdrop
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.6.10
4
+ version: 2.6.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-10-24 00:00:00.000000000 Z
38
+ date: 2023-10-25 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: karafka-core
metadata.gz.sig CHANGED
Binary file