waterdrop 2.6.10 → 2.6.12
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +21 -6
- data/.ruby-version +1 -1
- data/CHANGELOG.md +10 -0
- data/Gemfile.lock +27 -14
- data/config/locales/errors.yml +6 -0
- data/docker-compose.yml +3 -1
- data/lib/waterdrop/clients/buffered.rb +31 -39
- data/lib/waterdrop/clients/dummy.rb +41 -27
- data/lib/waterdrop/contracts/transactional_offset.rb +21 -0
- data/lib/waterdrop/errors.rb +9 -0
- data/lib/waterdrop/helpers/counter.rb +27 -0
- data/lib/waterdrop/instrumentation/callbacks/delivery.rb +6 -2
- data/lib/waterdrop/instrumentation/logger_listener.rb +14 -0
- data/lib/waterdrop/instrumentation/notifications.rb +1 -0
- data/lib/waterdrop/producer/buffer.rb +1 -1
- data/lib/waterdrop/producer/transactions.rb +52 -3
- data/lib/waterdrop/producer.rb +8 -5
- data/lib/waterdrop/version.rb +1 -1
- data/lib/waterdrop.rb +0 -1
- data.tar.gz.sig +0 -0
- metadata +5 -3
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b69e4151ceb0d03f8ac34e05793f96e4907791c5dbdc49f5beeffa7eb02d5bcf
|
4
|
+
data.tar.gz: fef1a647d06d7f7ab3f26045f1ce22e7b2b40476c31b7526198291a55d73c828
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 271f3de27ae484cb95b6dbc0718bbe59119262035f130511d7ef84071b598dd3e06695f233ad9335c7e318e5da754b3ec2852b1655c23d28ee2d88d67aff901c
|
7
|
+
data.tar.gz: eabff4ab9ea4c832054583d52dd83d003d27333394d8630133b08d3f46d9c11278c6bd76dd4408750f4d0c41654eb2d70d94f83fbedf2250dfa0b2538a6e8e30
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.github/workflows/ci.yml
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
name: ci
|
2
2
|
|
3
|
-
concurrency:
|
3
|
+
concurrency:
|
4
|
+
group: ${{ github.workflow }}-${{ github.ref }}
|
5
|
+
cancel-in-progress: true
|
4
6
|
|
5
7
|
on:
|
6
8
|
pull_request:
|
@@ -16,13 +18,13 @@ jobs:
|
|
16
18
|
fail-fast: false
|
17
19
|
matrix:
|
18
20
|
ruby:
|
19
|
-
- '3.3
|
21
|
+
- '3.3'
|
20
22
|
- '3.2'
|
21
23
|
- '3.1'
|
22
24
|
- '3.0'
|
23
25
|
- '2.7'
|
24
26
|
include:
|
25
|
-
- ruby: '3.
|
27
|
+
- ruby: '3.3'
|
26
28
|
coverage: 'true'
|
27
29
|
steps:
|
28
30
|
- uses: actions/checkout@v4
|
@@ -35,6 +37,7 @@ jobs:
|
|
35
37
|
with:
|
36
38
|
ruby-version: ${{matrix.ruby}}
|
37
39
|
bundler-cache: true
|
40
|
+
bundler: 'latest'
|
38
41
|
|
39
42
|
- name: Run Kafka with docker-compose
|
40
43
|
run: |
|
@@ -46,13 +49,25 @@ jobs:
|
|
46
49
|
|
47
50
|
- name: Install latest bundler
|
48
51
|
run: |
|
49
|
-
|
52
|
+
if [[ "$(ruby -v | awk '{print $2}')" == 2.7.8* ]]; then
|
53
|
+
gem install bundler -v 2.4.22 --no-document
|
54
|
+
gem update --system 3.4.22 --no-document
|
55
|
+
else
|
56
|
+
gem install bundler --no-document
|
57
|
+
gem update --system --no-document
|
58
|
+
fi
|
59
|
+
|
50
60
|
bundle config set without 'tools benchmarks docs'
|
51
61
|
|
52
62
|
- name: Bundle install
|
53
63
|
run: |
|
54
64
|
bundle config set without development
|
55
|
-
|
65
|
+
|
66
|
+
if [[ "$(ruby -v | awk '{print $2}')" == 2.7.8* ]]; then
|
67
|
+
BUNDLER_VERSION=2.4.22 bundle install --jobs 4 --retry 3
|
68
|
+
else
|
69
|
+
bundle install --jobs 4 --retry 3
|
70
|
+
fi
|
56
71
|
|
57
72
|
- name: Run all tests
|
58
73
|
env:
|
@@ -70,7 +85,7 @@ jobs:
|
|
70
85
|
- name: Set up Ruby
|
71
86
|
uses: ruby/setup-ruby@v1
|
72
87
|
with:
|
73
|
-
ruby-version: 3.
|
88
|
+
ruby-version: 3.3
|
74
89
|
- name: Install latest bundler
|
75
90
|
run: gem install bundler --no-document
|
76
91
|
- name: Install Diffend plugin
|
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
3.
|
1
|
+
3.3.0
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,15 @@
|
|
1
1
|
# WaterDrop changelog
|
2
2
|
|
3
|
+
## 2.6.12 (2024-01-03)
|
4
|
+
- [Enhancement] Provide ability to label message dispatches for increased observability.
|
5
|
+
- [Enhancement] Provide ability to commit offset during the transaction with a consumer provided.
|
6
|
+
- [Change] Change transactional message purged error type from `message.error` to `librdkafka.dispatch_error` to align with the non-transactional error type.
|
7
|
+
- [Change] Remove usage of concurrent ruby.
|
8
|
+
|
9
|
+
## 2.6.11 (2023-10-25)
|
10
|
+
- [Enhancement] Return delivery handles and delivery report for both dummy and buffered clients with proper topics, partitions and offsets assign and auto-increment offsets per partition.
|
11
|
+
- [Fix] Fix a case where buffered test client would not accumulate messages on failed transactions
|
12
|
+
|
3
13
|
## 2.6.10 (2023-10-24)
|
4
14
|
- [Improvement] Introduce `message.purged` event to indicate that a message that was not delivered to Kafka was purged. This most of the time refers to messages that were part of a transaction and were not yet dispatched to Kafka. It always means, that given message was not delivered but in case of transactions it is expected. In case of non-transactional it usually means `#purge` usage or exceeding `message.timeout.ms` so `librdkafka` removes this message from its internal queue. Non-transactional producers do **not** use this and pipe purges to `error.occurred`.
|
5
15
|
- [Fix] Fix a case where `message.acknowledged` would not have `caller` key.
|
data/Gemfile.lock
CHANGED
@@ -1,37 +1,48 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
waterdrop (2.6.
|
4
|
+
waterdrop (2.6.12)
|
5
5
|
karafka-core (>= 2.2.3, < 3.0.0)
|
6
6
|
zeitwerk (~> 2.3)
|
7
7
|
|
8
8
|
GEM
|
9
9
|
remote: https://rubygems.org/
|
10
10
|
specs:
|
11
|
-
activesupport (7.
|
11
|
+
activesupport (7.1.2)
|
12
|
+
base64
|
13
|
+
bigdecimal
|
12
14
|
concurrent-ruby (~> 1.0, >= 1.0.2)
|
15
|
+
connection_pool (>= 2.2.5)
|
16
|
+
drb
|
13
17
|
i18n (>= 1.6, < 2)
|
14
18
|
minitest (>= 5.1)
|
19
|
+
mutex_m
|
15
20
|
tzinfo (~> 2.0)
|
21
|
+
base64 (0.2.0)
|
22
|
+
bigdecimal (3.1.5)
|
16
23
|
byebug (11.1.3)
|
17
24
|
concurrent-ruby (1.2.2)
|
25
|
+
connection_pool (2.4.1)
|
18
26
|
diff-lcs (1.5.0)
|
19
27
|
docile (1.4.0)
|
20
|
-
|
28
|
+
drb (2.2.0)
|
29
|
+
ruby2_keywords
|
30
|
+
factory_bot (6.4.5)
|
21
31
|
activesupport (>= 5.0.0)
|
22
|
-
ffi (1.
|
32
|
+
ffi (1.16.3)
|
23
33
|
i18n (1.14.1)
|
24
34
|
concurrent-ruby (~> 1.0)
|
25
|
-
karafka-core (2.2.
|
35
|
+
karafka-core (2.2.7)
|
26
36
|
concurrent-ruby (>= 1.1)
|
27
|
-
karafka-rdkafka (>= 0.13.
|
28
|
-
karafka-rdkafka (0.
|
37
|
+
karafka-rdkafka (>= 0.13.9, < 0.15.0)
|
38
|
+
karafka-rdkafka (0.14.6)
|
29
39
|
ffi (~> 1.15)
|
30
40
|
mini_portile2 (~> 2.6)
|
31
41
|
rake (> 12)
|
32
|
-
mini_portile2 (2.8.
|
33
|
-
minitest (5.
|
34
|
-
|
42
|
+
mini_portile2 (2.8.5)
|
43
|
+
minitest (5.20.0)
|
44
|
+
mutex_m (0.2.0)
|
45
|
+
rake (13.1.0)
|
35
46
|
rspec (3.12.0)
|
36
47
|
rspec-core (~> 3.12.0)
|
37
48
|
rspec-expectations (~> 3.12.0)
|
@@ -41,10 +52,11 @@ GEM
|
|
41
52
|
rspec-expectations (3.12.3)
|
42
53
|
diff-lcs (>= 1.2.0, < 2.0)
|
43
54
|
rspec-support (~> 3.12.0)
|
44
|
-
rspec-mocks (3.12.
|
55
|
+
rspec-mocks (3.12.6)
|
45
56
|
diff-lcs (>= 1.2.0, < 2.0)
|
46
57
|
rspec-support (~> 3.12.0)
|
47
|
-
rspec-support (3.12.
|
58
|
+
rspec-support (3.12.1)
|
59
|
+
ruby2_keywords (0.0.5)
|
48
60
|
simplecov (0.22.0)
|
49
61
|
docile (~> 1.1)
|
50
62
|
simplecov-html (~> 0.11)
|
@@ -53,9 +65,10 @@ GEM
|
|
53
65
|
simplecov_json_formatter (0.1.4)
|
54
66
|
tzinfo (2.0.6)
|
55
67
|
concurrent-ruby (~> 1.0)
|
56
|
-
zeitwerk (2.6.
|
68
|
+
zeitwerk (2.6.12)
|
57
69
|
|
58
70
|
PLATFORMS
|
71
|
+
ruby
|
59
72
|
x86_64-linux
|
60
73
|
|
61
74
|
DEPENDENCIES
|
@@ -66,4 +79,4 @@ DEPENDENCIES
|
|
66
79
|
waterdrop!
|
67
80
|
|
68
81
|
BUNDLED WITH
|
69
|
-
2.
|
82
|
+
2.5.3
|
data/config/locales/errors.yml
CHANGED
@@ -27,6 +27,12 @@ en:
|
|
27
27
|
headers_invalid_key_type: all headers keys need to be of type String
|
28
28
|
headers_invalid_value_type: all headers values need to be of type String
|
29
29
|
|
30
|
+
transactional_offset:
|
31
|
+
consumer_format: 'must respond to #consumer_group_metadata_pointer method'
|
32
|
+
message_format: 'must respond to #topic, #partition and #offset'
|
33
|
+
missing: must be present
|
34
|
+
offset_metadata_format: must be string or nil
|
35
|
+
|
30
36
|
test:
|
31
37
|
missing: must be present
|
32
38
|
nested.id_format: 'is invalid'
|
data/docker-compose.yml
CHANGED
@@ -3,7 +3,7 @@ version: '2'
|
|
3
3
|
services:
|
4
4
|
kafka:
|
5
5
|
container_name: kafka
|
6
|
-
image: confluentinc/cp-kafka:7.5.
|
6
|
+
image: confluentinc/cp-kafka:7.5.3
|
7
7
|
|
8
8
|
ports:
|
9
9
|
- 9092:9092
|
@@ -23,3 +23,5 @@ services:
|
|
23
23
|
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
|
24
24
|
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
|
25
25
|
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
26
|
+
KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
|
27
|
+
KAFKA_AUTHORIZER_CLASS_NAME: org.apache.kafka.metadata.authorizer.StandardAuthorizer
|
@@ -6,21 +6,12 @@ module WaterDrop
|
|
6
6
|
class Buffered < Clients::Dummy
|
7
7
|
attr_accessor :messages
|
8
8
|
|
9
|
-
# Sync fake response for the message delivery to Kafka, since we do not dispatch anything
|
10
|
-
class SyncResponse
|
11
|
-
# @param _args Handler wait arguments (irrelevant as waiting is fake here)
|
12
|
-
def wait(*_args)
|
13
|
-
false
|
14
|
-
end
|
15
|
-
end
|
16
|
-
|
17
9
|
# @param args [Object] anything accepted by `Clients::Dummy`
|
18
10
|
def initialize(*args)
|
19
11
|
super
|
20
12
|
@messages = []
|
21
13
|
@topics = Hash.new { |k, v| k[v] = [] }
|
22
14
|
|
23
|
-
@transaction_mutex = Mutex.new
|
24
15
|
@transaction_active = false
|
25
16
|
@transaction_messages = []
|
26
17
|
@transaction_topics = Hash.new { |k, v| k[v] = [] }
|
@@ -29,6 +20,7 @@ module WaterDrop
|
|
29
20
|
|
30
21
|
# "Produces" message to Kafka: it acknowledges it locally, adds it to the internal buffer
|
31
22
|
# @param message [Hash] `WaterDrop::Producer#produce_sync` message hash
|
23
|
+
# @return [Dummy::Handle] fake delivery handle that can be materialized into a report
|
32
24
|
def produce(message)
|
33
25
|
if @transaction_active
|
34
26
|
@transaction_topics[message.fetch(:topic)] << message
|
@@ -39,29 +31,18 @@ module WaterDrop
|
|
39
31
|
@messages << message
|
40
32
|
end
|
41
33
|
|
42
|
-
|
34
|
+
super(**message.to_h)
|
43
35
|
end
|
44
36
|
|
45
|
-
#
|
46
|
-
|
47
|
-
# Moves messages the appropriate buffers only if transaction is successful
|
48
|
-
def transaction
|
37
|
+
# Starts the transaction on a given level
|
38
|
+
def begin_transaction
|
49
39
|
@transaction_level += 1
|
50
|
-
|
51
|
-
return yield if @transaction_mutex.owned?
|
52
|
-
|
53
|
-
@transaction_mutex.lock
|
54
40
|
@transaction_active = true
|
41
|
+
end
|
55
42
|
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
catch(:abort) do
|
60
|
-
result = yield
|
61
|
-
commit = true
|
62
|
-
end
|
63
|
-
|
64
|
-
commit || raise(WaterDrop::Errors::AbortTransaction)
|
43
|
+
# Finishes given level of transaction
|
44
|
+
def commit_transaction
|
45
|
+
@transaction_level -= 1
|
65
46
|
|
66
47
|
# Transfer transactional data on success
|
67
48
|
@transaction_topics.each do |topic, messages|
|
@@ -70,20 +51,27 @@ module WaterDrop
|
|
70
51
|
|
71
52
|
@messages += @transaction_messages
|
72
53
|
|
73
|
-
|
74
|
-
|
75
|
-
|
54
|
+
@transaction_topics.clear
|
55
|
+
@transaction_messages.clear
|
56
|
+
@transaction_active = false
|
57
|
+
end
|
76
58
|
|
77
|
-
|
78
|
-
|
79
|
-
|
59
|
+
# Fakes storing the offset in a transactional fashion
|
60
|
+
#
|
61
|
+
# @param _consumer [#consumer_group_metadata_pointer] any consumer from which we can obtain
|
62
|
+
# the librdkafka consumer group metadata pointer
|
63
|
+
# @param _tpl [Rdkafka::Consumer::TopicPartitionList] consumer tpl for offset storage
|
64
|
+
# @param _timeout [Integer] ms timeout
|
65
|
+
def send_offsets_to_transaction(_consumer, _tpl, _timeout)
|
66
|
+
nil
|
67
|
+
end
|
80
68
|
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
69
|
+
# Aborts the transaction
|
70
|
+
def abort_transaction
|
71
|
+
@transaction_level -= 1
|
72
|
+
@transaction_topics.clear
|
73
|
+
@transaction_messages.clear
|
74
|
+
@transaction_active = false
|
87
75
|
end
|
88
76
|
|
89
77
|
# Returns messages produced to a given topic
|
@@ -95,6 +83,10 @@ module WaterDrop
|
|
95
83
|
# Clears internal buffer
|
96
84
|
# Used in between specs so messages do not leak out
|
97
85
|
def reset
|
86
|
+
@transaction_level = 0
|
87
|
+
@transaction_active = false
|
88
|
+
@transaction_topics.clear
|
89
|
+
@transaction_messages.clear
|
98
90
|
@messages.clear
|
99
91
|
@topics.each_value(&:clear)
|
100
92
|
end
|
@@ -5,19 +5,53 @@ module WaterDrop
|
|
5
5
|
# A dummy client that is supposed to be used instead of Rdkafka::Producer in case we don't
|
6
6
|
# want to dispatch anything to Kafka.
|
7
7
|
#
|
8
|
-
# It does not store anything and just ignores messages.
|
8
|
+
# It does not store anything and just ignores messages. It does however return proper delivery
|
9
|
+
# handle that can be materialized into a report.
|
9
10
|
class Dummy
|
11
|
+
# `::Rdkafka::Producer::DeliveryHandle` object API compatible dummy object
|
12
|
+
class Handle < ::Rdkafka::Producer::DeliveryHandle
|
13
|
+
# @param topic [String] topic where we want to dispatch message
|
14
|
+
# @param partition [Integer] target partition
|
15
|
+
# @param offset [Integer] offset assigned by our fake "Kafka"
|
16
|
+
def initialize(topic, partition, offset)
|
17
|
+
@topic = topic
|
18
|
+
@partition = partition
|
19
|
+
@offset = offset
|
20
|
+
end
|
21
|
+
|
22
|
+
# Does not wait, just creates the result
|
23
|
+
#
|
24
|
+
# @param _args [Array] anything the wait handle would accept
|
25
|
+
# @return [::Rdkafka::Producer::DeliveryReport]
|
26
|
+
def wait(*_args)
|
27
|
+
create_result
|
28
|
+
end
|
29
|
+
|
30
|
+
# Creates a delivery report with details where the message went
|
31
|
+
#
|
32
|
+
# @return [::Rdkafka::Producer::DeliveryReport]
|
33
|
+
def create_result
|
34
|
+
::Rdkafka::Producer::DeliveryReport.new(
|
35
|
+
@partition,
|
36
|
+
@offset,
|
37
|
+
@topic
|
38
|
+
)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
10
42
|
# @param _producer [WaterDrop::Producer]
|
11
43
|
# @return [Dummy] dummy instance
|
12
44
|
def initialize(_producer)
|
13
|
-
@
|
45
|
+
@counters = Hash.new { |h, k| h[k] = -1 }
|
14
46
|
end
|
15
47
|
|
16
|
-
#
|
17
|
-
# @param
|
18
|
-
# @
|
19
|
-
|
20
|
-
|
48
|
+
# "Produces" the message
|
49
|
+
# @param topic [String, Symbol] topic where we want to dispatch message
|
50
|
+
# @param partition [Integer] target partition
|
51
|
+
# @param _args [Hash] remaining details that are ignored in the dummy mode
|
52
|
+
# @return [Handle] delivery handle
|
53
|
+
def produce(topic:, partition: 0, **_args)
|
54
|
+
Handle.new(topic.to_s, partition, @counters["#{topic}#{partition}"] += 1)
|
21
55
|
end
|
22
56
|
|
23
57
|
# @param _args [Object] anything really, this dummy is suppose to support anything
|
@@ -25,26 +59,6 @@ module WaterDrop
|
|
25
59
|
true
|
26
60
|
end
|
27
61
|
|
28
|
-
# Yields the code pretending it is in a transaction
|
29
|
-
# Supports our aborting transaction flow
|
30
|
-
def transaction
|
31
|
-
result = nil
|
32
|
-
commit = false
|
33
|
-
|
34
|
-
catch(:abort) do
|
35
|
-
result = yield
|
36
|
-
commit = true
|
37
|
-
end
|
38
|
-
|
39
|
-
commit || raise(WaterDrop::Errors::AbortTransaction)
|
40
|
-
|
41
|
-
result
|
42
|
-
rescue StandardError => e
|
43
|
-
return if e.is_a?(WaterDrop::Errors::AbortTransaction)
|
44
|
-
|
45
|
-
raise
|
46
|
-
end
|
47
|
-
|
48
62
|
# @param _args [Object] anything really, this dummy is suppose to support anything
|
49
63
|
# @return [self] returns self for chaining cases
|
50
64
|
def method_missing(*_args)
|
@@ -0,0 +1,21 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
module Contracts
|
5
|
+
# Contract to ensure that arguments provided to the transactional offset commit are valid
|
6
|
+
# and match our expectations
|
7
|
+
class TransactionalOffset < ::Karafka::Core::Contractable::Contract
|
8
|
+
configure do |config|
|
9
|
+
config.error_messages = YAML.safe_load(
|
10
|
+
File.read(
|
11
|
+
File.join(WaterDrop.gem_root, 'config', 'locales', 'errors.yml')
|
12
|
+
)
|
13
|
+
).fetch('en').fetch('validations').fetch('transactional_offset')
|
14
|
+
end
|
15
|
+
|
16
|
+
required(:consumer) { |val| val.respond_to?(:consumer_group_metadata_pointer) }
|
17
|
+
required(:message) { |val| val.respond_to?(:topic) && val.respond_to?(:partition) }
|
18
|
+
required(:offset_metadata) { |val| val.is_a?(String) || val.nil? }
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
data/lib/waterdrop/errors.rb
CHANGED
@@ -25,6 +25,9 @@ module WaterDrop
|
|
25
25
|
# Raised when we want to send a message that is invalid (impossible topic, etc)
|
26
26
|
MessageInvalidError = Class.new(BaseError)
|
27
27
|
|
28
|
+
# Raised when we want to commit transactional offset and the input is invalid
|
29
|
+
TransactionalOffsetInvalidError = Class.new(BaseError)
|
30
|
+
|
28
31
|
# Raised when we've got an unexpected status. This should never happen. If it does, please
|
29
32
|
# contact us as it is an error.
|
30
33
|
StatusInvalidError = Class.new(BaseError)
|
@@ -32,7 +35,13 @@ module WaterDrop
|
|
32
35
|
# Raised when there is an inline error during single message produce operations
|
33
36
|
ProduceError = Class.new(BaseError)
|
34
37
|
|
38
|
+
# Raised when we attempt to perform operation that is only allowed inside of a transaction and
|
39
|
+
# there is no transaction around us
|
40
|
+
TransactionRequiredError = Class.new(BaseError)
|
41
|
+
|
35
42
|
# Raise it within a transaction to abort it
|
43
|
+
# It does not have an `Error` postfix because technically it is not an error as it is used for
|
44
|
+
# graceful transaction aborting
|
36
45
|
AbortTransaction = Class.new(BaseError)
|
37
46
|
|
38
47
|
# Raised when during messages producing something bad happened inline
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
# Extra internal helper objects
|
5
|
+
module Helpers
|
6
|
+
# Atomic counter that we can safely increment and decrement without race conditions
|
7
|
+
class Counter
|
8
|
+
# @return [Integer] current value
|
9
|
+
attr_reader :value
|
10
|
+
|
11
|
+
def initialize
|
12
|
+
@value = 0
|
13
|
+
@mutex = Mutex.new
|
14
|
+
end
|
15
|
+
|
16
|
+
# Increments the value by 1
|
17
|
+
def increment
|
18
|
+
@mutex.synchronize { @value += 1 }
|
19
|
+
end
|
20
|
+
|
21
|
+
# Decrements the value by 1
|
22
|
+
def decrement
|
23
|
+
@mutex.synchronize { @value -= 1 }
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -57,7 +57,8 @@ module WaterDrop
|
|
57
57
|
offset: delivery_report.offset,
|
58
58
|
partition: delivery_report.partition,
|
59
59
|
topic: delivery_report.topic_name,
|
60
|
-
delivery_report: delivery_report
|
60
|
+
delivery_report: delivery_report,
|
61
|
+
label: delivery_report.label
|
61
62
|
)
|
62
63
|
end
|
63
64
|
|
@@ -71,7 +72,9 @@ module WaterDrop
|
|
71
72
|
offset: delivery_report.offset,
|
72
73
|
partition: delivery_report.partition,
|
73
74
|
topic: delivery_report.topic_name,
|
74
|
-
delivery_report: delivery_report
|
75
|
+
delivery_report: delivery_report,
|
76
|
+
label: delivery_report.label,
|
77
|
+
type: 'librdkafka.dispatch_error'
|
75
78
|
)
|
76
79
|
end
|
77
80
|
|
@@ -86,6 +89,7 @@ module WaterDrop
|
|
86
89
|
partition: delivery_report.partition,
|
87
90
|
topic: delivery_report.topic_name,
|
88
91
|
delivery_report: delivery_report,
|
92
|
+
label: delivery_report.label,
|
89
93
|
type: 'librdkafka.dispatch_error'
|
90
94
|
)
|
91
95
|
end
|
@@ -145,6 +145,20 @@ module WaterDrop
|
|
145
145
|
info(event, 'Committing transaction')
|
146
146
|
end
|
147
147
|
|
148
|
+
# @param event [Dry::Events::Event] event that happened with the details
|
149
|
+
def on_transaction_marked_as_consumed(event)
|
150
|
+
message = event[:message]
|
151
|
+
topic = message.topic
|
152
|
+
partition = message.partition
|
153
|
+
offset = message.offset
|
154
|
+
loc = "#{topic}/#{partition}"
|
155
|
+
|
156
|
+
info(
|
157
|
+
event,
|
158
|
+
"Marking message with offset #{offset} for topic #{loc} as consumed in a transaction"
|
159
|
+
)
|
160
|
+
end
|
161
|
+
|
148
162
|
# @param event [Dry::Events::Event] event that happened with the details
|
149
163
|
def on_transaction_finished(event)
|
150
164
|
info(event, 'Processing transaction')
|
@@ -4,6 +4,11 @@ module WaterDrop
|
|
4
4
|
class Producer
|
5
5
|
# Transactions related producer functionalities
|
6
6
|
module Transactions
|
7
|
+
# Contract to validate that input for transactional offset storage is correct
|
8
|
+
CONTRACT = Contracts::TransactionalOffset.new
|
9
|
+
|
10
|
+
private_constant :CONTRACT
|
11
|
+
|
7
12
|
# Creates a transaction.
|
8
13
|
#
|
9
14
|
# Karafka transactions work in a similar manner to SQL db transactions though there are some
|
@@ -91,6 +96,49 @@ module WaterDrop
|
|
91
96
|
@transactional = config.kafka.to_h.key?(:'transactional.id')
|
92
97
|
end
|
93
98
|
|
99
|
+
# Marks given message as consumed inside of a transaction.
|
100
|
+
#
|
101
|
+
# @param consumer [#consumer_group_metadata_pointer] any consumer from which we can obtain
|
102
|
+
# the librdkafka consumer group metadata pointer
|
103
|
+
# @param message [Karafka::Messages::Message] karafka message
|
104
|
+
# @param offset_metadata [String] offset metadata or nil if none
|
105
|
+
def transaction_mark_as_consumed(consumer, message, offset_metadata = nil)
|
106
|
+
raise Errors::TransactionRequiredError unless @transaction_mutex.owned?
|
107
|
+
|
108
|
+
CONTRACT.validate!(
|
109
|
+
{
|
110
|
+
consumer: consumer,
|
111
|
+
message: message,
|
112
|
+
offset_metadata: offset_metadata
|
113
|
+
},
|
114
|
+
Errors::TransactionalOffsetInvalidError
|
115
|
+
)
|
116
|
+
|
117
|
+
details = { message: message, offset_metadata: offset_metadata }
|
118
|
+
|
119
|
+
transactional_instrument(:marked_as_consumed, details) do
|
120
|
+
tpl = Rdkafka::Consumer::TopicPartitionList.new
|
121
|
+
partition = Rdkafka::Consumer::Partition.new(
|
122
|
+
message.partition,
|
123
|
+
# +1 because this is next offset from which we will start processing from
|
124
|
+
message.offset + 1,
|
125
|
+
0,
|
126
|
+
offset_metadata
|
127
|
+
)
|
128
|
+
|
129
|
+
tpl.add_topic_and_partitions_with_offsets(message.topic, [partition])
|
130
|
+
|
131
|
+
with_transactional_error_handling(:store_offset) do
|
132
|
+
client.send_offsets_to_transaction(
|
133
|
+
consumer,
|
134
|
+
tpl,
|
135
|
+
# This setting is at the moment in seconds and we require ms
|
136
|
+
@config.max_wait_timeout * 1_000
|
137
|
+
)
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
94
142
|
private
|
95
143
|
|
96
144
|
# Runs provided code with a transaction wrapper if transactions are enabled.
|
@@ -105,9 +153,10 @@ module WaterDrop
|
|
105
153
|
# Instruments the transactional operation with producer id
|
106
154
|
#
|
107
155
|
# @param key [Symbol] transaction operation key
|
156
|
+
# @param details [Hash] additional instrumentation details
|
108
157
|
# @param block [Proc] block to run inside the instrumentation or nothing if not given
|
109
|
-
def transactional_instrument(key, &block)
|
110
|
-
@monitor.instrument("transaction.#{key}", producer_id: id, &block)
|
158
|
+
def transactional_instrument(key, details = EMPTY_HASH, &block)
|
159
|
+
@monitor.instrument("transaction.#{key}", details.merge(producer_id: id), &block)
|
111
160
|
end
|
112
161
|
|
113
162
|
# Error handling for transactional operations is a bit special. There are three types of
|
@@ -157,7 +206,7 @@ module WaterDrop
|
|
157
206
|
# Always attempt to abort but if aborting fails with an abortable error, do not attempt
|
158
207
|
# to abort from abort as this could create an infinite loop
|
159
208
|
with_transactional_error_handling(:abort, allow_abortable: false) do
|
160
|
-
transactional_instrument(:aborted) {
|
209
|
+
transactional_instrument(:aborted) { client.abort_transaction }
|
161
210
|
end
|
162
211
|
|
163
212
|
raise
|
data/lib/waterdrop/producer.rb
CHANGED
@@ -16,7 +16,10 @@ module WaterDrop
|
|
16
16
|
Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
|
17
17
|
].freeze
|
18
18
|
|
19
|
-
|
19
|
+
# Empty has to save on memory allocations
|
20
|
+
EMPTY_HASH = {}.freeze
|
21
|
+
|
22
|
+
private_constant :SUPPORTED_FLOW_ERRORS, :EMPTY_HASH
|
20
23
|
|
21
24
|
def_delegators :config, :middleware
|
22
25
|
|
@@ -24,7 +27,7 @@ module WaterDrop
|
|
24
27
|
attr_reader :id
|
25
28
|
# @return [Status] producer status object
|
26
29
|
attr_reader :status
|
27
|
-
# @return [
|
30
|
+
# @return [Array] internal messages buffer
|
28
31
|
attr_reader :messages
|
29
32
|
# @return [Object] monitor we want to use
|
30
33
|
attr_reader :monitor
|
@@ -35,14 +38,14 @@ module WaterDrop
|
|
35
38
|
# @param block [Proc] configuration block
|
36
39
|
# @return [Producer] producer instance
|
37
40
|
def initialize(&block)
|
38
|
-
@operations_in_progress =
|
41
|
+
@operations_in_progress = Helpers::Counter.new
|
39
42
|
@buffer_mutex = Mutex.new
|
40
43
|
@connecting_mutex = Mutex.new
|
41
44
|
@operating_mutex = Mutex.new
|
42
45
|
@transaction_mutex = Mutex.new
|
43
46
|
|
44
47
|
@status = Status.new
|
45
|
-
@messages =
|
48
|
+
@messages = []
|
46
49
|
|
47
50
|
return unless block
|
48
51
|
|
@@ -127,7 +130,7 @@ module WaterDrop
|
|
127
130
|
def purge
|
128
131
|
@monitor.instrument('buffer.purged', producer_id: id) do
|
129
132
|
@buffer_mutex.synchronize do
|
130
|
-
@messages =
|
133
|
+
@messages = []
|
131
134
|
end
|
132
135
|
|
133
136
|
@client.purge
|
data/lib/waterdrop/version.rb
CHANGED
data/lib/waterdrop.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: waterdrop
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.6.
|
4
|
+
version: 2.6.12
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
36
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date:
|
38
|
+
date: 2024-01-03 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
40
|
- !ruby/object:Gem::Dependency
|
41
41
|
name: karafka-core
|
@@ -102,7 +102,9 @@ files:
|
|
102
102
|
- lib/waterdrop/contracts.rb
|
103
103
|
- lib/waterdrop/contracts/config.rb
|
104
104
|
- lib/waterdrop/contracts/message.rb
|
105
|
+
- lib/waterdrop/contracts/transactional_offset.rb
|
105
106
|
- lib/waterdrop/errors.rb
|
107
|
+
- lib/waterdrop/helpers/counter.rb
|
106
108
|
- lib/waterdrop/instrumentation/callbacks/delivery.rb
|
107
109
|
- lib/waterdrop/instrumentation/callbacks/error.rb
|
108
110
|
- lib/waterdrop/instrumentation/callbacks/statistics.rb
|
@@ -149,7 +151,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
149
151
|
- !ruby/object:Gem::Version
|
150
152
|
version: '0'
|
151
153
|
requirements: []
|
152
|
-
rubygems_version: 3.
|
154
|
+
rubygems_version: 3.5.3
|
153
155
|
signing_key:
|
154
156
|
specification_version: 4
|
155
157
|
summary: Kafka messaging made easy!
|
metadata.gz.sig
CHANGED
Binary file
|