waterdrop 1.4.3 → 2.0.0.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci.yml +3 -25
- data/.gitignore +2 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +8 -9
- data/Gemfile +9 -0
- data/Gemfile.lock +78 -57
- data/LICENSE +165 -0
- data/README.md +200 -57
- data/certs/mensfeld.pem +21 -21
- data/config/errors.yml +3 -16
- data/lib/water_drop/config.rb +42 -143
- data/lib/water_drop/contracts/config.rb +8 -121
- data/lib/water_drop/contracts/message.rb +41 -0
- data/lib/water_drop/contracts.rb +0 -2
- data/lib/water_drop/errors.rb +30 -5
- data/lib/water_drop/instrumentation/monitor.rb +16 -22
- data/lib/water_drop/instrumentation/stdout_listener.rb +113 -32
- data/lib/water_drop/producer/async.rb +51 -0
- data/lib/water_drop/producer/buffer.rb +113 -0
- data/lib/water_drop/producer/builder.rb +63 -0
- data/lib/water_drop/producer/dummy_client.rb +32 -0
- data/lib/water_drop/producer/statistics_decorator.rb +71 -0
- data/lib/water_drop/producer/status.rb +52 -0
- data/lib/water_drop/producer/sync.rb +65 -0
- data/lib/water_drop/producer.rb +142 -0
- data/lib/water_drop/version.rb +1 -1
- data/lib/water_drop.rb +4 -24
- data/waterdrop.gemspec +5 -5
- data.tar.gz.sig +0 -0
- metadata +47 -48
- metadata.gz.sig +0 -0
- data/MIT-LICENCE +0 -18
- data/lib/water_drop/async_producer.rb +0 -26
- data/lib/water_drop/base_producer.rb +0 -57
- data/lib/water_drop/config_applier.rb +0 -52
- data/lib/water_drop/contracts/message_options.rb +0 -19
- data/lib/water_drop/sync_producer.rb +0 -24
@@ -0,0 +1,32 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
class Producer
|
5
|
+
# A dummy client that is supposed to be used instead of Rdkafka::Producer in case we don't
|
6
|
+
# want to dispatch anything to Kafka
|
7
|
+
class DummyClient
|
8
|
+
# @return [DummyClient] dummy instance
|
9
|
+
def initialize
|
10
|
+
@counter = -1
|
11
|
+
end
|
12
|
+
|
13
|
+
# Dummy method for returning the delivery report
|
14
|
+
# @param _args [Object] anything that the delivery handle accepts
|
15
|
+
# @return [::Rdkafka::Producer::DeliveryReport]
|
16
|
+
def wait(*_args)
|
17
|
+
::Rdkafka::Producer::DeliveryReport.new(0, @counter += 1)
|
18
|
+
end
|
19
|
+
|
20
|
+
# @param _args [Object] anything really, this dummy is suppose to support anything
|
21
|
+
def respond_to_missing?(*_args)
|
22
|
+
true
|
23
|
+
end
|
24
|
+
|
25
|
+
# @param _args [Object] anything really, this dummy is suppose to support anything
|
26
|
+
# @return [self] returns self for chaining cases
|
27
|
+
def method_missing(*_args)
|
28
|
+
self || super
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,71 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
class Producer
|
5
|
+
# Many of the librdkafka statistics are absolute values instead of a gauge.
|
6
|
+
# This means, that for example number of messages sent is an absolute growing value
|
7
|
+
# instead of being a value of messages sent from the last statistics report.
|
8
|
+
# This decorator calculates the diff against previously emited stats, so we get also
|
9
|
+
# the diff together with the original values
|
10
|
+
class StatisticsDecorator
|
11
|
+
def initialize
|
12
|
+
@previous = {}.freeze
|
13
|
+
end
|
14
|
+
|
15
|
+
# @param emited_stats [Hash] original emited statistics
|
16
|
+
# @return [Hash] emited statistics extended with the diff data
|
17
|
+
# @note We modify the emited statistics, instead of creating new. Since we don't expose
|
18
|
+
# any API to get raw data, users can just assume that the result of this decoration is the
|
19
|
+
# proper raw stats that they can use
|
20
|
+
def call(emited_stats)
|
21
|
+
diff(
|
22
|
+
@previous,
|
23
|
+
emited_stats
|
24
|
+
)
|
25
|
+
|
26
|
+
@previous = emited_stats
|
27
|
+
|
28
|
+
emited_stats.freeze
|
29
|
+
end
|
30
|
+
|
31
|
+
private
|
32
|
+
|
33
|
+
# Calculates the diff of the provided values and modifies in place the emited statistics
|
34
|
+
#
|
35
|
+
# @param previous [Object] previous value from the given scope in which
|
36
|
+
# we are
|
37
|
+
# @param current [Object] current scope from emitted statistics
|
38
|
+
# @return [Object] the diff if the values were numerics or the current scope
|
39
|
+
def diff(previous, current)
|
40
|
+
if current.is_a?(Hash)
|
41
|
+
# @note We cannot use #each_key as we modify the content of the current scope
|
42
|
+
# in place (in case it's a hash)
|
43
|
+
current.keys.each do |key|
|
44
|
+
append(
|
45
|
+
current,
|
46
|
+
key,
|
47
|
+
diff((previous || {})[key], (current || {})[key])
|
48
|
+
)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
if current.is_a?(Numeric) && previous.is_a?(Numeric)
|
53
|
+
current - previous
|
54
|
+
else
|
55
|
+
current
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
# Appends the result of the diff to a given key as long as the result is numeric
|
60
|
+
#
|
61
|
+
# @param current [Hash] current scope
|
62
|
+
# @param key [Symbol] key based on which we were diffing
|
63
|
+
# @param result [Object] diff result
|
64
|
+
def append(current, key, result)
|
65
|
+
return unless result.is_a?(Numeric)
|
66
|
+
|
67
|
+
current["#{key}_d"] = result
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
class Producer
|
5
|
+
# Producer lifecycle status object representation
|
6
|
+
class Status
|
7
|
+
# States in which the producer can be
|
8
|
+
LIFECYCLE = %i[
|
9
|
+
initial
|
10
|
+
configured
|
11
|
+
connected
|
12
|
+
closing
|
13
|
+
closed
|
14
|
+
].freeze
|
15
|
+
|
16
|
+
private_constant :LIFECYCLE
|
17
|
+
|
18
|
+
# Creates a new instance of status with the initial state
|
19
|
+
# @return [Status]
|
20
|
+
def initialize
|
21
|
+
@current = LIFECYCLE.first
|
22
|
+
end
|
23
|
+
|
24
|
+
# @return [Boolean] true if producer is in a active state. Active means, that we can start
|
25
|
+
# sending messages. Actives states are connected (connection established) or configured,
|
26
|
+
# which means, that producer is configured, but connection with Kafka is
|
27
|
+
# not yet established.
|
28
|
+
def active?
|
29
|
+
connected? || configured?
|
30
|
+
end
|
31
|
+
|
32
|
+
# @return [String] current status as a string
|
33
|
+
def to_s
|
34
|
+
@current.to_s
|
35
|
+
end
|
36
|
+
|
37
|
+
LIFECYCLE.each do |state|
|
38
|
+
module_eval <<-RUBY, __FILE__, __LINE__ + 1
|
39
|
+
# @return [Boolean] true if current status is as we want, otherwise false
|
40
|
+
def #{state}?
|
41
|
+
@current == :#{state}
|
42
|
+
end
|
43
|
+
|
44
|
+
# Sets a given state as current
|
45
|
+
def #{state}!
|
46
|
+
@current = :#{state}
|
47
|
+
end
|
48
|
+
RUBY
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
class Producer
|
5
|
+
# Component for synchronous producer operations
|
6
|
+
module Sync
|
7
|
+
# Produces a message to Kafka and waits for it to be delivered
|
8
|
+
#
|
9
|
+
# @param message [Hash] hash that complies with the {Contracts::Message} contract
|
10
|
+
#
|
11
|
+
# @return [Rdkafka::Producer::DeliveryReport] delivery report
|
12
|
+
#
|
13
|
+
# @raise [Rdkafka::RdkafkaError] When adding the message to rdkafka's queue failed
|
14
|
+
# @raise [Rdkafka::Producer::WaitTimeoutError] When the timeout has been reached and the
|
15
|
+
# handle is still pending
|
16
|
+
# @raise [Errors::MessageInvalidError] When provided message details are invalid and the
|
17
|
+
# message could not be sent to Kafka
|
18
|
+
def produce_sync(message)
|
19
|
+
ensure_active!
|
20
|
+
validate_message!(message)
|
21
|
+
|
22
|
+
@monitor.instrument(
|
23
|
+
'message.produced_sync',
|
24
|
+
producer: self,
|
25
|
+
message: message
|
26
|
+
) do
|
27
|
+
client
|
28
|
+
.produce(**message)
|
29
|
+
.wait(
|
30
|
+
max_wait_timeout: @config.max_wait_timeout,
|
31
|
+
wait_timeout: @config.wait_timeout
|
32
|
+
)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
# Produces many messages to Kafka and waits for them to be delivered
|
37
|
+
#
|
38
|
+
# @param messages [Array<Hash>] array with messages that comply with the
|
39
|
+
# {Contracts::Message} contract
|
40
|
+
#
|
41
|
+
# @return [Array<Rdkafka::Producer::DeliveryReport>] delivery reports
|
42
|
+
#
|
43
|
+
# @raise [Rdkafka::RdkafkaError] When adding the messages to rdkafka's queue failed
|
44
|
+
# @raise [Rdkafka::Producer::WaitTimeoutError] When the timeout has been reached and the
|
45
|
+
# some handles are still pending
|
46
|
+
# @raise [Errors::MessageInvalidError] When any of the provided messages details are invalid
|
47
|
+
# and the message could not be sent to Kafka
|
48
|
+
def produce_many_sync(messages)
|
49
|
+
ensure_active!
|
50
|
+
messages.each { |message| validate_message!(message) }
|
51
|
+
|
52
|
+
@monitor.instrument('messages.produced_sync', producer: self, messages: messages) do
|
53
|
+
messages
|
54
|
+
.map { |message| client.produce(**message) }
|
55
|
+
.map! do |handler|
|
56
|
+
handler.wait(
|
57
|
+
max_wait_timeout: @config.max_wait_timeout,
|
58
|
+
wait_timeout: @config.wait_timeout
|
59
|
+
)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,142 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
# Main WaterDrop messages producer
|
5
|
+
class Producer
|
6
|
+
include Sync
|
7
|
+
include Async
|
8
|
+
include Buffer
|
9
|
+
|
10
|
+
# @return [String] uuid of the current producer
|
11
|
+
attr_reader :id
|
12
|
+
# @return [Status] producer status object
|
13
|
+
attr_reader :status
|
14
|
+
# @return [Concurrent::Array] internal messages buffer
|
15
|
+
attr_reader :messages
|
16
|
+
# @return [Object] monitor we want to use
|
17
|
+
attr_reader :monitor
|
18
|
+
# @return [Object] dry-configurable config object
|
19
|
+
attr_reader :config
|
20
|
+
|
21
|
+
# Creates a not-yet-configured instance of the producer
|
22
|
+
# @param block [Proc] configuration block
|
23
|
+
# @return [Producer] producer instance
|
24
|
+
def initialize(&block)
|
25
|
+
@buffer_mutex = Mutex.new
|
26
|
+
@connecting_mutex = Mutex.new
|
27
|
+
@closing_mutex = Mutex.new
|
28
|
+
|
29
|
+
@status = Status.new
|
30
|
+
@messages = Concurrent::Array.new
|
31
|
+
|
32
|
+
return unless block
|
33
|
+
|
34
|
+
setup(&block)
|
35
|
+
end
|
36
|
+
|
37
|
+
# Sets up the whole configuration and initializes all that is needed
|
38
|
+
# @param block [Block] configuration block
|
39
|
+
def setup(&block)
|
40
|
+
raise Errors::ProducerAlreadyConfiguredError, id unless @status.initial?
|
41
|
+
|
42
|
+
@config = Config
|
43
|
+
.new
|
44
|
+
.setup(&block)
|
45
|
+
.config
|
46
|
+
|
47
|
+
@id = @config.id
|
48
|
+
@monitor = @config.monitor
|
49
|
+
@contract = Contracts::Message.new(max_payload_size: @config.max_payload_size)
|
50
|
+
@status.configured!
|
51
|
+
end
|
52
|
+
|
53
|
+
# @return [Rdkafka::Producer] raw rdkafka producer
|
54
|
+
# @note Client is lazy initialized, keeping in mind also the fact of a potential fork that
|
55
|
+
# can happen any time.
|
56
|
+
# @note It is not recommended to fork a producer that is already in use so in case of
|
57
|
+
# bootstrapping a cluster, it's much better to fork configured but not used producers
|
58
|
+
def client
|
59
|
+
return @client if @client && @pid == Process.pid
|
60
|
+
|
61
|
+
# Don't allow to obtain a client reference for a producer that was not configured
|
62
|
+
raise Errors::ProducerNotConfiguredError, id if @status.initial?
|
63
|
+
|
64
|
+
@connecting_mutex.synchronize do
|
65
|
+
return @client if @client && @pid == Process.pid
|
66
|
+
|
67
|
+
# We should raise an error when trying to use a producer from a fork, that is already
|
68
|
+
# connected to Kafka. We allow forking producers only before they are used
|
69
|
+
raise Errors::ProducerUsedInParentProcess, Process.pid if @status.connected?
|
70
|
+
|
71
|
+
# We undefine all the finalizers, in case it was a fork, so the finalizers from the parent
|
72
|
+
# process don't leak
|
73
|
+
ObjectSpace.undefine_finalizer(self)
|
74
|
+
# Finalizer tracking is needed for handling shutdowns gracefully.
|
75
|
+
# I don't expect everyone to remember about closing all the producers all the time, thus
|
76
|
+
# this approach is better. Although it is still worth keeping in mind, that this will
|
77
|
+
# block GC from removing a no longer used producer unless closed properly
|
78
|
+
ObjectSpace.define_finalizer(self, proc { close })
|
79
|
+
|
80
|
+
@pid = Process.pid
|
81
|
+
@client = Builder.new.call(self, @config)
|
82
|
+
@status.connected!
|
83
|
+
end
|
84
|
+
|
85
|
+
@client
|
86
|
+
end
|
87
|
+
|
88
|
+
# Flushes the buffers in a sync way and closes the producer
|
89
|
+
def close
|
90
|
+
@closing_mutex.synchronize do
|
91
|
+
return unless @status.active?
|
92
|
+
|
93
|
+
@monitor.instrument(
|
94
|
+
'producer.closed',
|
95
|
+
producer: self
|
96
|
+
) do
|
97
|
+
@status.closing!
|
98
|
+
|
99
|
+
# No need for auto-gc if everything got closed by us
|
100
|
+
# This should be used only in case a producer was not closed properly and forgotten
|
101
|
+
ObjectSpace.undefine_finalizer(self)
|
102
|
+
|
103
|
+
# Flush has it's own buffer mutex but even if it is blocked, flushing can still happen
|
104
|
+
# as we close the client after the flushing (even if blocked by the mutex)
|
105
|
+
flush(false)
|
106
|
+
|
107
|
+
# We should not close the client in several threads the same time
|
108
|
+
# It is safe to run it several times but not exactly the same moment
|
109
|
+
client.close
|
110
|
+
|
111
|
+
@status.closed!
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
# Ensures that we don't run any operations when the producer is not configured or when it
|
117
|
+
# was already closed
|
118
|
+
def ensure_active!
|
119
|
+
return if @status.active?
|
120
|
+
|
121
|
+
raise Errors::ProducerNotConfiguredError, id if @status.initial?
|
122
|
+
raise Errors::ProducerClosedError, id if @status.closing? || @status.closed?
|
123
|
+
|
124
|
+
# This should never happen
|
125
|
+
raise Errors::StatusInvalidError, [id, @status.to_s]
|
126
|
+
end
|
127
|
+
|
128
|
+
# Ensures that the message we want to send out to Kafka is actually valid and that it can be
|
129
|
+
# sent there
|
130
|
+
# @param message [Hash] message we want to send
|
131
|
+
# @raise [Karafka::Errors::MessageInvalidError]
|
132
|
+
def validate_message!(message)
|
133
|
+
result = @contract.call(message)
|
134
|
+
return if result.success?
|
135
|
+
|
136
|
+
raise Errors::MessageInvalidError, [
|
137
|
+
result.errors.to_h,
|
138
|
+
message
|
139
|
+
]
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
data/lib/water_drop/version.rb
CHANGED
data/lib/water_drop.rb
CHANGED
@@ -3,39 +3,19 @@
|
|
3
3
|
# External components
|
4
4
|
# delegate should be removed because we don't need it, we just add it because of ruby-kafka
|
5
5
|
%w[
|
6
|
-
|
7
|
-
json
|
8
|
-
delivery_boy
|
9
|
-
singleton
|
6
|
+
concurrent/array
|
10
7
|
dry-configurable
|
11
8
|
dry/monitor/notifications
|
12
9
|
dry-validation
|
10
|
+
rdkafka
|
11
|
+
json
|
13
12
|
zeitwerk
|
13
|
+
securerandom
|
14
14
|
].each { |lib| require lib }
|
15
15
|
|
16
16
|
# WaterDrop library
|
17
17
|
module WaterDrop
|
18
18
|
class << self
|
19
|
-
attr_accessor :logger
|
20
|
-
|
21
|
-
# Sets up the whole configuration
|
22
|
-
# @param [Block] block configuration block
|
23
|
-
def setup(&block)
|
24
|
-
Config.setup(&block)
|
25
|
-
DeliveryBoy.logger = self.logger = config.logger
|
26
|
-
ConfigApplier.call(DeliveryBoy.config, Config.config.to_h)
|
27
|
-
end
|
28
|
-
|
29
|
-
# @return [WaterDrop::Config] config instance
|
30
|
-
def config
|
31
|
-
Config.config
|
32
|
-
end
|
33
|
-
|
34
|
-
# @return [::WaterDrop::Monitor] monitor that we want to use
|
35
|
-
def monitor
|
36
|
-
config.monitor
|
37
|
-
end
|
38
|
-
|
39
19
|
# @return [String] root path of this gem
|
40
20
|
def gem_root
|
41
21
|
Pathname.new(File.expand_path('..', __dir__))
|
data/waterdrop.gemspec
CHANGED
@@ -14,16 +14,16 @@ Gem::Specification.new do |spec|
|
|
14
14
|
spec.homepage = 'https://github.com/karafka/waterdrop'
|
15
15
|
spec.summary = 'Kafka messaging made easy!'
|
16
16
|
spec.description = spec.summary
|
17
|
-
spec.license = '
|
17
|
+
spec.license = 'LGPL-3.0'
|
18
18
|
|
19
|
-
spec.add_dependency '
|
19
|
+
spec.add_dependency 'concurrent-ruby', '>= 1.1'
|
20
20
|
spec.add_dependency 'dry-configurable', '~> 0.8'
|
21
21
|
spec.add_dependency 'dry-monitor', '~> 0.3'
|
22
|
-
spec.add_dependency 'dry-validation', '~> 1.
|
23
|
-
spec.add_dependency '
|
22
|
+
spec.add_dependency 'dry-validation', '~> 1.3'
|
23
|
+
spec.add_dependency 'rdkafka', '>= 0.6.0'
|
24
24
|
spec.add_dependency 'zeitwerk', '~> 2.1'
|
25
25
|
|
26
|
-
spec.required_ruby_version = '>= 2.
|
26
|
+
spec.required_ruby_version = '>= 2.5.0'
|
27
27
|
|
28
28
|
if $PROGRAM_NAME.end_with?('gem')
|
29
29
|
spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: waterdrop
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 2.0.0.rc1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -11,51 +11,45 @@ cert_chain:
|
|
11
11
|
- |
|
12
12
|
-----BEGIN CERTIFICATE-----
|
13
13
|
MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
14
|
+
ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjAwODExMDkxNTM3WhcNMjEwODExMDkx
|
15
|
+
NTM3WjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
|
16
|
+
CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDCpXsCgmINb6lHBXXBdyrgsBPSxC4/
|
17
|
+
2H+weJ6L9CruTiv2+2/ZkQGtnLcDgrD14rdLIHK7t0o3EKYlDT5GhD/XUVhI15JE
|
18
|
+
N7IqnPUgexe1fbZArwQ51afxz2AmPQN2BkB2oeQHXxnSWUGMhvcEZpfbxCCJH26w
|
19
|
+
hS0Ccsma8yxA6hSlGVhFVDuCr7c2L1di6cK2CtIDpfDaWqnVNJEwBYHIxrCoWK5g
|
20
|
+
sIGekVt/admS9gRhIMaIBg+Mshth5/DEyWO2QjteTodItlxfTctrfmiAl8X8T5JP
|
21
|
+
VXeLp5SSOJ5JXE80nShMJp3RFnGw5fqjX/ffjtISYh78/By4xF3a25HdWH9+qO2Z
|
22
|
+
tx0wSGc9/4gqNM0APQnjN/4YXrGZ4IeSjtE+OrrX07l0TiyikzSLFOkZCAp8oBJi
|
23
|
+
Fhlosz8xQDJf7mhNxOaZziqASzp/hJTU/tuDKl5+ql2icnMv5iV/i6SlmvU29QNg
|
24
|
+
LCV71pUv0pWzN+OZbHZKWepGhEQ3cG9MwvkCAwEAAaN3MHUwCQYDVR0TBAIwADAL
|
25
|
+
BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFImGed2AXS070ohfRidiCEhXEUN+MB0GA1Ud
|
26
26
|
EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
/
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
27
|
+
c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBAKiHpwoENVrMi94V1zD4o8/6G3AU
|
28
|
+
gWz4udkPYHTZLUy3dLznc/sNjdkJFWT3E6NKYq7c60EpJ0m0vAEg5+F5pmNOsvD3
|
29
|
+
2pXLj9kisEeYhR516HwXAvtngboUcb75skqvBCU++4Pu7BRAPjO1/ihLSBexbwSS
|
30
|
+
fF+J5OWNuyHHCQp+kGPLtXJe2yUYyvSWDj3I2//Vk0VhNOIlaCS1+5/P3ZJThOtm
|
31
|
+
zJUBI7h3HgovwRpcnmk2mXTmU4Zx/bCzX8EA6VY0khEvnmiq7S6eBF0H9qH8KyQ6
|
32
|
+
EkVLpvmUDFcf/uNaBQdazEMB5jYtwoA8gQlANETNGPi51KlkukhKgaIEDMkBDJOx
|
33
|
+
65N7DzmkcyY0/GwjIVIxmRhcrCt1YeCUElmfFx0iida1/YRm6sB2AXqScc1+ECRi
|
34
|
+
2DND//YJUikn1zwbz1kT70XmHd97B4Eytpln7K+M1u2g1pHVEPW4owD/ammXNpUy
|
35
|
+
nt70FcDD4yxJQ+0YNiHd0N8IcVBM1TMIVctMNQ==
|
36
36
|
-----END CERTIFICATE-----
|
37
|
-
date:
|
37
|
+
date: 2020-09-05 00:00:00.000000000 Z
|
38
38
|
dependencies:
|
39
39
|
- !ruby/object:Gem::Dependency
|
40
|
-
name:
|
40
|
+
name: concurrent-ruby
|
41
41
|
requirement: !ruby/object:Gem::Requirement
|
42
42
|
requirements:
|
43
43
|
- - ">="
|
44
44
|
- !ruby/object:Gem::Version
|
45
|
-
version: '
|
46
|
-
- - "<"
|
47
|
-
- !ruby/object:Gem::Version
|
48
|
-
version: 2.x
|
45
|
+
version: '1.1'
|
49
46
|
type: :runtime
|
50
47
|
prerelease: false
|
51
48
|
version_requirements: !ruby/object:Gem::Requirement
|
52
49
|
requirements:
|
53
50
|
- - ">="
|
54
51
|
- !ruby/object:Gem::Version
|
55
|
-
version: '
|
56
|
-
- - "<"
|
57
|
-
- !ruby/object:Gem::Version
|
58
|
-
version: 2.x
|
52
|
+
version: '1.1'
|
59
53
|
- !ruby/object:Gem::Dependency
|
60
54
|
name: dry-configurable
|
61
55
|
requirement: !ruby/object:Gem::Requirement
|
@@ -90,28 +84,28 @@ dependencies:
|
|
90
84
|
requirements:
|
91
85
|
- - "~>"
|
92
86
|
- !ruby/object:Gem::Version
|
93
|
-
version: '1.
|
87
|
+
version: '1.3'
|
94
88
|
type: :runtime
|
95
89
|
prerelease: false
|
96
90
|
version_requirements: !ruby/object:Gem::Requirement
|
97
91
|
requirements:
|
98
92
|
- - "~>"
|
99
93
|
- !ruby/object:Gem::Version
|
100
|
-
version: '1.
|
94
|
+
version: '1.3'
|
101
95
|
- !ruby/object:Gem::Dependency
|
102
|
-
name:
|
96
|
+
name: rdkafka
|
103
97
|
requirement: !ruby/object:Gem::Requirement
|
104
98
|
requirements:
|
105
99
|
- - ">="
|
106
100
|
- !ruby/object:Gem::Version
|
107
|
-
version: 0.
|
101
|
+
version: 0.6.0
|
108
102
|
type: :runtime
|
109
103
|
prerelease: false
|
110
104
|
version_requirements: !ruby/object:Gem::Requirement
|
111
105
|
requirements:
|
112
106
|
- - ">="
|
113
107
|
- !ruby/object:Gem::Version
|
114
|
-
version: 0.
|
108
|
+
version: 0.6.0
|
115
109
|
- !ruby/object:Gem::Dependency
|
116
110
|
name: zeitwerk
|
117
111
|
requirement: !ruby/object:Gem::Requirement
|
@@ -135,6 +129,7 @@ extra_rdoc_files: []
|
|
135
129
|
files:
|
136
130
|
- ".coditsu/ci.yml"
|
137
131
|
- ".diffend.yml"
|
132
|
+
- ".github/FUNDING.yml"
|
138
133
|
- ".github/workflows/ci.yml"
|
139
134
|
- ".gitignore"
|
140
135
|
- ".rspec"
|
@@ -143,31 +138,35 @@ files:
|
|
143
138
|
- CHANGELOG.md
|
144
139
|
- Gemfile
|
145
140
|
- Gemfile.lock
|
146
|
-
-
|
141
|
+
- LICENSE
|
147
142
|
- README.md
|
148
143
|
- certs/mensfeld.pem
|
149
144
|
- config/errors.yml
|
150
145
|
- docker-compose.yml
|
151
146
|
- lib/water_drop.rb
|
152
|
-
- lib/water_drop/async_producer.rb
|
153
|
-
- lib/water_drop/base_producer.rb
|
154
147
|
- lib/water_drop/config.rb
|
155
|
-
- lib/water_drop/config_applier.rb
|
156
148
|
- lib/water_drop/contracts.rb
|
157
149
|
- lib/water_drop/contracts/config.rb
|
158
|
-
- lib/water_drop/contracts/
|
150
|
+
- lib/water_drop/contracts/message.rb
|
159
151
|
- lib/water_drop/errors.rb
|
160
152
|
- lib/water_drop/instrumentation.rb
|
161
153
|
- lib/water_drop/instrumentation/monitor.rb
|
162
154
|
- lib/water_drop/instrumentation/stdout_listener.rb
|
163
|
-
- lib/water_drop/
|
155
|
+
- lib/water_drop/producer.rb
|
156
|
+
- lib/water_drop/producer/async.rb
|
157
|
+
- lib/water_drop/producer/buffer.rb
|
158
|
+
- lib/water_drop/producer/builder.rb
|
159
|
+
- lib/water_drop/producer/dummy_client.rb
|
160
|
+
- lib/water_drop/producer/statistics_decorator.rb
|
161
|
+
- lib/water_drop/producer/status.rb
|
162
|
+
- lib/water_drop/producer/sync.rb
|
164
163
|
- lib/water_drop/version.rb
|
165
164
|
- lib/waterdrop.rb
|
166
165
|
- log/.gitkeep
|
167
166
|
- waterdrop.gemspec
|
168
167
|
homepage: https://github.com/karafka/waterdrop
|
169
168
|
licenses:
|
170
|
-
-
|
169
|
+
- LGPL-3.0
|
171
170
|
metadata: {}
|
172
171
|
post_install_message:
|
173
172
|
rdoc_options: []
|
@@ -177,14 +176,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
177
176
|
requirements:
|
178
177
|
- - ">="
|
179
178
|
- !ruby/object:Gem::Version
|
180
|
-
version: 2.
|
179
|
+
version: 2.5.0
|
181
180
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
182
181
|
requirements:
|
183
|
-
- - "
|
182
|
+
- - ">"
|
184
183
|
- !ruby/object:Gem::Version
|
185
|
-
version:
|
184
|
+
version: 1.3.1
|
186
185
|
requirements: []
|
187
|
-
rubygems_version: 3.
|
186
|
+
rubygems_version: 3.1.4
|
188
187
|
signing_key:
|
189
188
|
specification_version: 4
|
190
189
|
summary: Kafka messaging made easy!
|
metadata.gz.sig
CHANGED
Binary file
|
data/MIT-LICENCE
DELETED
@@ -1,18 +0,0 @@
|
|
1
|
-
Permission is hereby granted, free of charge, to any person obtaining
|
2
|
-
a copy of this software and associated documentation files (the
|
3
|
-
"Software"), to deal in the Software without restriction, including
|
4
|
-
without limitation the rights to use, copy, modify, merge, publish,
|
5
|
-
distribute, sublicense, and/or sell copies of the Software, and to
|
6
|
-
permit persons to whom the Software is furnished to do so, subject to
|
7
|
-
the following conditions:
|
8
|
-
|
9
|
-
The above copyright notice and this permission notice shall be
|
10
|
-
included in all copies or substantial portions of the Software.
|
11
|
-
|
12
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
13
|
-
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
14
|
-
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
15
|
-
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
16
|
-
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
17
|
-
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
18
|
-
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@@ -1,26 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
# WaterDrop library
|
4
|
-
module WaterDrop
|
5
|
-
# Async producer for messages
|
6
|
-
class AsyncProducer < BaseProducer
|
7
|
-
# Performs message delivery using deliver_async method
|
8
|
-
# @param message [String] message that we want to send to Kafka
|
9
|
-
# @param options [Hash] options (including topic) for producer
|
10
|
-
# @raise [WaterDrop::Errors::InvalidMessageOptions] raised when message options are
|
11
|
-
# somehow invalid and we cannot perform delivery because of that
|
12
|
-
def self.call(message, options)
|
13
|
-
attempts_count ||= 0
|
14
|
-
attempts_count += 1
|
15
|
-
|
16
|
-
validate!(options)
|
17
|
-
return unless WaterDrop.config.deliver
|
18
|
-
|
19
|
-
d_method = WaterDrop.config.raise_on_buffer_overflow ? :deliver_async! : :deliver_async
|
20
|
-
|
21
|
-
DeliveryBoy.send(d_method, message, **options)
|
22
|
-
rescue Kafka::Error => e
|
23
|
-
graceful_attempt?(attempts_count, message, options, e) ? retry : raise(e)
|
24
|
-
end
|
25
|
-
end
|
26
|
-
end
|