waterdrop 2.1.0 → 2.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +0 -2
- data/CHANGELOG.md +9 -0
- data/Gemfile +0 -2
- data/Gemfile.lock +21 -24
- data/README.md +35 -7
- data/config/errors.yml +1 -0
- data/lib/{water_drop → waterdrop}/config.rb +3 -13
- data/lib/waterdrop/contracts/base.rb +23 -0
- data/lib/{water_drop → waterdrop}/contracts/config.rb +12 -8
- data/lib/{water_drop → waterdrop}/contracts/message.rb +2 -4
- data/lib/{water_drop → waterdrop}/contracts.rb +0 -0
- data/lib/{water_drop → waterdrop}/errors.rb +0 -0
- data/lib/{water_drop → waterdrop}/instrumentation/callbacks/delivery.rb +0 -0
- data/lib/{water_drop → waterdrop}/instrumentation/callbacks/error.rb +0 -0
- data/lib/{water_drop → waterdrop}/instrumentation/callbacks/statistics.rb +0 -0
- data/lib/{water_drop → waterdrop}/instrumentation/callbacks/statistics_decorator.rb +0 -0
- data/lib/{water_drop → waterdrop}/instrumentation/callbacks_manager.rb +0 -0
- data/lib/{water_drop → waterdrop}/instrumentation/monitor.rb +0 -0
- data/lib/{water_drop → waterdrop}/instrumentation/stdout_listener.rb +0 -0
- data/lib/waterdrop/instrumentation/vendors/datadog/dashboard.json +1 -0
- data/lib/waterdrop/instrumentation/vendors/datadog/listener.rb +197 -0
- data/lib/{water_drop → waterdrop}/instrumentation.rb +0 -0
- data/lib/{water_drop → waterdrop}/patches/rdkafka/bindings.rb +0 -0
- data/lib/{water_drop → waterdrop}/patches/rdkafka/producer.rb +0 -0
- data/lib/{water_drop → waterdrop}/producer/async.rb +0 -0
- data/lib/{water_drop → waterdrop}/producer/buffer.rb +4 -2
- data/lib/{water_drop → waterdrop}/producer/builder.rb +0 -0
- data/lib/{water_drop → waterdrop}/producer/dummy_client.rb +0 -0
- data/lib/{water_drop → waterdrop}/producer/status.rb +0 -0
- data/lib/{water_drop → waterdrop}/producer/sync.rb +0 -0
- data/lib/{water_drop → waterdrop}/producer.rb +1 -7
- data/lib/{water_drop → waterdrop}/version.rb +1 -1
- data/lib/waterdrop.rb +35 -2
- data/waterdrop.gemspec +7 -3
- data.tar.gz.sig +0 -0
- metadata +31 -28
- metadata.gz.sig +0 -0
- data/lib/water_drop.rb +0 -36
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: db36e5203312e482d0d98a83d3bfb4fd5e274c342e5b3c63829d6bb5f616b52e
|
4
|
+
data.tar.gz: 6e49baf2f5ebd33aded769899fc0eb7f3c130ac97d9a6156fd491d000fd04306
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 426d9f910ca5e24a7e86db1ceec672f895186ace830554708e8741d21bb83f929c9278ed5018bff6468b354523ac7cc790cba0b91b8c13dd2bb28e58f8a52dc6
|
7
|
+
data.tar.gz: b3871df6868ce287daf0765f522031cfdd566b96d0e5f8bf3ca1cd9724a7a68d5b3e4e1a33e0bdcc9b65063150d48cfc213e19348d19a6268a309b1c69dcab50
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.github/workflows/ci.yml
CHANGED
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,14 @@
|
|
1
1
|
# WaterDrop changelog
|
2
2
|
|
3
|
+
## 2.2.0 (2022-02-18)
|
4
|
+
- Add Datadog listener for metrics + errors publishing
|
5
|
+
- Add Datadog example dashboard template
|
6
|
+
- Update Readme to show Dd instrumentation usage
|
7
|
+
- Align the directory namespace convention with gem name (waterdrop => WaterDrop)
|
8
|
+
- Introduce a common base for validation contracts
|
9
|
+
- Drop CI support for ruby 2.6
|
10
|
+
- Require all `kafka` settings to have symbol keys (compatibility with Karafka 2.0 and rdkafka)
|
11
|
+
|
3
12
|
## 2.1.0 (2022-01-03)
|
4
13
|
- Ruby 3.1 support
|
5
14
|
- Change the error notification key from `error.emitted` to `error.occurred`.
|
data/Gemfile
CHANGED
data/Gemfile.lock
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
waterdrop (2.
|
4
|
+
waterdrop (2.2.0)
|
5
5
|
concurrent-ruby (>= 1.1)
|
6
6
|
dry-configurable (~> 0.13)
|
7
7
|
dry-monitor (~> 0.5)
|
@@ -12,17 +12,16 @@ PATH
|
|
12
12
|
GEM
|
13
13
|
remote: https://rubygems.org/
|
14
14
|
specs:
|
15
|
-
activesupport (
|
15
|
+
activesupport (7.0.2)
|
16
16
|
concurrent-ruby (~> 1.0, >= 1.0.2)
|
17
17
|
i18n (>= 1.6, < 2)
|
18
18
|
minitest (>= 5.1)
|
19
19
|
tzinfo (~> 2.0)
|
20
|
-
zeitwerk (~> 2.3)
|
21
20
|
byebug (11.1.3)
|
22
21
|
concurrent-ruby (1.1.9)
|
23
|
-
diff-lcs (1.
|
22
|
+
diff-lcs (1.5.0)
|
24
23
|
docile (1.4.0)
|
25
|
-
dry-configurable (0.
|
24
|
+
dry-configurable (0.14.0)
|
26
25
|
concurrent-ruby (~> 1.0)
|
27
26
|
dry-core (~> 0.6)
|
28
27
|
dry-container (0.9.0)
|
@@ -34,7 +33,7 @@ GEM
|
|
34
33
|
concurrent-ruby (~> 1.0)
|
35
34
|
dry-core (~> 0.5, >= 0.5)
|
36
35
|
dry-inflector (0.2.1)
|
37
|
-
dry-initializer (3.
|
36
|
+
dry-initializer (3.1.1)
|
38
37
|
dry-logic (1.2.0)
|
39
38
|
concurrent-ruby (~> 1.0)
|
40
39
|
dry-core (~> 0.5, >= 0.5)
|
@@ -63,29 +62,29 @@ GEM
|
|
63
62
|
dry-schema (~> 1.8, >= 1.8.0)
|
64
63
|
factory_bot (6.2.0)
|
65
64
|
activesupport (>= 5.0.0)
|
66
|
-
ffi (1.15.
|
67
|
-
i18n (1.
|
65
|
+
ffi (1.15.5)
|
66
|
+
i18n (1.9.1)
|
68
67
|
concurrent-ruby (~> 1.0)
|
69
68
|
mini_portile2 (2.7.1)
|
70
|
-
minitest (5.
|
69
|
+
minitest (5.15.0)
|
71
70
|
rake (13.0.6)
|
72
71
|
rdkafka (0.11.1)
|
73
72
|
ffi (~> 1.15)
|
74
73
|
mini_portile2 (~> 2.6)
|
75
74
|
rake (> 12)
|
76
|
-
rspec (3.
|
77
|
-
rspec-core (~> 3.
|
78
|
-
rspec-expectations (~> 3.
|
79
|
-
rspec-mocks (~> 3.
|
80
|
-
rspec-core (3.
|
81
|
-
rspec-support (~> 3.
|
82
|
-
rspec-expectations (3.
|
75
|
+
rspec (3.11.0)
|
76
|
+
rspec-core (~> 3.11.0)
|
77
|
+
rspec-expectations (~> 3.11.0)
|
78
|
+
rspec-mocks (~> 3.11.0)
|
79
|
+
rspec-core (3.11.0)
|
80
|
+
rspec-support (~> 3.11.0)
|
81
|
+
rspec-expectations (3.11.0)
|
83
82
|
diff-lcs (>= 1.2.0, < 2.0)
|
84
|
-
rspec-support (~> 3.
|
85
|
-
rspec-mocks (3.
|
83
|
+
rspec-support (~> 3.11.0)
|
84
|
+
rspec-mocks (3.11.0)
|
86
85
|
diff-lcs (>= 1.2.0, < 2.0)
|
87
|
-
rspec-support (~> 3.
|
88
|
-
rspec-support (3.
|
86
|
+
rspec-support (~> 3.11.0)
|
87
|
+
rspec-support (3.11.0)
|
89
88
|
simplecov (0.21.2)
|
90
89
|
docile (~> 1.1)
|
91
90
|
simplecov-html (~> 0.11)
|
@@ -94,19 +93,17 @@ GEM
|
|
94
93
|
simplecov_json_formatter (0.1.3)
|
95
94
|
tzinfo (2.0.4)
|
96
95
|
concurrent-ruby (~> 1.0)
|
97
|
-
zeitwerk (2.5.
|
96
|
+
zeitwerk (2.5.4)
|
98
97
|
|
99
98
|
PLATFORMS
|
100
|
-
x86_64-darwin
|
101
99
|
x86_64-linux
|
102
100
|
|
103
101
|
DEPENDENCIES
|
104
102
|
byebug
|
105
103
|
factory_bot
|
106
|
-
rdkafka
|
107
104
|
rspec
|
108
105
|
simplecov
|
109
106
|
waterdrop!
|
110
107
|
|
111
108
|
BUNDLED WITH
|
112
|
-
2.3.
|
109
|
+
2.3.7
|
data/README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
**Note**: Documentation presented here refers to WaterDrop `2.0.0`.
|
4
4
|
|
5
|
-
WaterDrop `2.0` does **not** work with Karafka `1.*` and aims to either work as a standalone producer outside of Karafka `1.*` ecosystem or as a part of
|
5
|
+
WaterDrop `2.0` does **not** work with Karafka `1.*` and aims to either work as a standalone producer outside of Karafka `1.*` ecosystem or as a part of soon to be released Karafka `2.0.*`.
|
6
6
|
|
7
7
|
Please refer to [this](https://github.com/karafka/waterdrop/tree/1.4) branch and its documentation for details about WaterDrop `1.*` usage.
|
8
8
|
|
@@ -36,16 +36,13 @@ It:
|
|
36
36
|
- [Instrumentation](#instrumentation)
|
37
37
|
* [Usage statistics](#usage-statistics)
|
38
38
|
* [Error notifications](#error-notifications)
|
39
|
+
* [Datadog and StatsD integration](#datadog-and-statsd-integration)
|
39
40
|
* [Forking and potential memory problems](#forking-and-potential-memory-problems)
|
40
41
|
- [Note on contributions](#note-on-contributions)
|
41
42
|
|
42
43
|
## Installation
|
43
44
|
|
44
|
-
|
45
|
-
gem install waterdrop
|
46
|
-
```
|
47
|
-
|
48
|
-
or add this to your Gemfile:
|
45
|
+
Add this to your Gemfile:
|
49
46
|
|
50
47
|
```ruby
|
51
48
|
gem 'waterdrop'
|
@@ -201,7 +198,7 @@ producer.setup do |config|
|
|
201
198
|
config.kafka = {
|
202
199
|
'bootstrap.servers': 'localhost:9092',
|
203
200
|
# Accumulate messages for at most 10 seconds
|
204
|
-
'queue.buffering.max.ms'
|
201
|
+
'queue.buffering.max.ms': 10_000
|
205
202
|
}
|
206
203
|
end
|
207
204
|
|
@@ -288,6 +285,37 @@ producer.close
|
|
288
285
|
|
289
286
|
Note: The metrics returned may not be completely consistent between brokers, toppars and totals, due to the internal asynchronous nature of librdkafka. E.g., the top level tx total may be less than the sum of the broker tx values which it represents.
|
290
287
|
|
288
|
+
### Datadog and StatsD integration
|
289
|
+
|
290
|
+
WaterDrop comes with (optional) full Datadog and StatsD integration that you can use. To use it:
|
291
|
+
|
292
|
+
```ruby
|
293
|
+
# require datadog/statsd and the listener as it is not loaded by default
|
294
|
+
require 'datadog/statsd'
|
295
|
+
require 'waterdrop/instrumentation/vendors/datadog/listener'
|
296
|
+
|
297
|
+
# initialize your producer with statistics.interval.ms enabled so the metrics are published
|
298
|
+
producer = WaterDrop::Producer.new do |config|
|
299
|
+
config.deliver = true
|
300
|
+
config.kafka = {
|
301
|
+
'bootstrap.servers': 'localhost:9092',
|
302
|
+
'statistics.interval.ms': 1_000
|
303
|
+
}
|
304
|
+
end
|
305
|
+
|
306
|
+
# initialize the listener with statsd client
|
307
|
+
listener = ::WaterDrop::Instrumentation::Vendors::Datadog::Listener.new do |config|
|
308
|
+
config.client = Datadog::Statsd.new('localhost', 8125)
|
309
|
+
# Publish host as a tag alongside the rest of tags
|
310
|
+
config.default_tags = ["host:#{Socket.gethostname}"]
|
311
|
+
end
|
312
|
+
|
313
|
+
# Subscribe with your listener to your producer and you should be ready to go!
|
314
|
+
producer.monitor.subscribe(listener)
|
315
|
+
```
|
316
|
+
|
317
|
+
You can also find a ready to import DataDog dashboard configuration file that you can use to monitor all of your producer.
|
318
|
+
|
291
319
|
### Error notifications
|
292
320
|
|
293
321
|
WaterDrop allows you to listen to all errors that occur while producing messages and in its internal background threads. Things like reconnecting to Kafka upon network errors and others unrelated to publishing messages are all available under `error.occurred` notification key. You can subscribe to this event to ensure your setup is healthy and without any problems that would otherwise go unnoticed as long as messages are delivered.
|
data/config/errors.yml
CHANGED
@@ -9,7 +9,7 @@ module WaterDrop
|
|
9
9
|
|
10
10
|
# Defaults for kafka settings, that will be overwritten only if not present already
|
11
11
|
KAFKA_DEFAULTS = {
|
12
|
-
'client.id'
|
12
|
+
'client.id': 'waterdrop'
|
13
13
|
}.freeze
|
14
14
|
|
15
15
|
private_constant :KAFKA_DEFAULTS
|
@@ -63,7 +63,8 @@ module WaterDrop
|
|
63
63
|
yield(config)
|
64
64
|
|
65
65
|
merge_kafka_defaults!(config)
|
66
|
-
|
66
|
+
|
67
|
+
Contracts::Config.new.validate!(config.to_h, Errors::ConfigurationInvalidError)
|
67
68
|
|
68
69
|
::Rdkafka::Config.logger = config.logger
|
69
70
|
end
|
@@ -82,16 +83,5 @@ module WaterDrop
|
|
82
83
|
config.kafka[key] = value
|
83
84
|
end
|
84
85
|
end
|
85
|
-
|
86
|
-
# Validates the configuration and if anything is wrong, will raise an exception
|
87
|
-
# @param config_hash [Hash] config hash with setup details
|
88
|
-
# @raise [WaterDrop::Errors::ConfigurationInvalidError] raised when something is wrong with
|
89
|
-
# the configuration
|
90
|
-
def validate!(config_hash)
|
91
|
-
result = Contracts::Config.new.call(config_hash)
|
92
|
-
return true if result.success?
|
93
|
-
|
94
|
-
raise Errors::ConfigurationInvalidError, result.errors.to_h
|
95
|
-
end
|
96
86
|
end
|
97
87
|
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
module Contracts
|
5
|
+
# Base for all the contracts in WaterDrop
|
6
|
+
class Base < Dry::Validation::Contract
|
7
|
+
config.messages.load_paths << File.join(WaterDrop.gem_root, 'config', 'errors.yml')
|
8
|
+
|
9
|
+
# @param data [Hash] data for validation
|
10
|
+
# @param error_class [Class] error class that should be used when validation fails
|
11
|
+
# @return [Boolean] true
|
12
|
+
# @raise [StandardError] any error provided in the error_class that inherits from the
|
13
|
+
# standard error
|
14
|
+
def validate!(data, error_class)
|
15
|
+
result = call(data)
|
16
|
+
|
17
|
+
return true if result.success?
|
18
|
+
|
19
|
+
raise error_class, result.errors.to_h
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -3,12 +3,7 @@
|
|
3
3
|
module WaterDrop
|
4
4
|
module Contracts
|
5
5
|
# Contract with validation rules for WaterDrop configuration details
|
6
|
-
class Config <
|
7
|
-
# Ensure valid format of each seed broker so that rdkafka doesn't fail silently
|
8
|
-
SEED_BROKER_FORMAT_REGEXP = %r{\A([^:/,]+:[0-9]+)(,[^:/,]+:[0-9]+)*\z}.freeze
|
9
|
-
|
10
|
-
private_constant :SEED_BROKER_FORMAT_REGEXP
|
11
|
-
|
6
|
+
class Config < Base
|
12
7
|
params do
|
13
8
|
required(:id).filled(:str?)
|
14
9
|
required(:logger).filled
|
@@ -16,9 +11,18 @@ module WaterDrop
|
|
16
11
|
required(:max_payload_size).filled(:int?, gteq?: 1)
|
17
12
|
required(:max_wait_timeout).filled(:number?, gteq?: 0)
|
18
13
|
required(:wait_timeout).filled(:number?, gt?: 0)
|
14
|
+
required(:kafka).filled(:hash?)
|
15
|
+
end
|
16
|
+
|
17
|
+
# rdkafka allows both symbols and strings as keys for config but then casts them to strings
|
18
|
+
# This can be confusing, so we expect all keys to be symbolized
|
19
|
+
rule(:kafka) do
|
20
|
+
next unless value.is_a?(Hash)
|
21
|
+
|
22
|
+
value.each_key do |key|
|
23
|
+
next if key.is_a?(Symbol)
|
19
24
|
|
20
|
-
|
21
|
-
required(:'bootstrap.servers').filled(:str?, format?: SEED_BROKER_FORMAT_REGEXP)
|
25
|
+
key(:"kafka.#{key}").failure(:kafka_key_must_be_a_symbol)
|
22
26
|
end
|
23
27
|
end
|
24
28
|
end
|
@@ -4,17 +4,15 @@ module WaterDrop
|
|
4
4
|
module Contracts
|
5
5
|
# Contract with validation rules for validating that all the message options that
|
6
6
|
# we provide to producer ale valid and usable
|
7
|
-
class Message <
|
7
|
+
class Message < Base
|
8
8
|
# Regex to check that topic has a valid format
|
9
|
-
TOPIC_REGEXP = /\A(\w|-|\.)+\z
|
9
|
+
TOPIC_REGEXP = /\A(\w|-|\.)+\z/
|
10
10
|
|
11
11
|
# Checks, that the given value is a string
|
12
12
|
STRING_ASSERTION = ->(value) { value.is_a?(String) }.to_proc
|
13
13
|
|
14
14
|
private_constant :TOPIC_REGEXP, :STRING_ASSERTION
|
15
15
|
|
16
|
-
config.messages.load_paths << File.join(WaterDrop.gem_root, 'config', 'errors.yml')
|
17
|
-
|
18
16
|
option :max_payload_size
|
19
17
|
|
20
18
|
params do
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
@@ -0,0 +1 @@
|
|
1
|
+
{"title":"WaterDrop producer example dashboard","description":"This dashboard include example setup for monitoring activity of your WaterDrop producer","widgets":[{"id":243951318,"definition":{"title":"Messages produced","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"produced sync","formula":"query1"},{"alias":"produced async","formula":"query2"},{"alias":"flushed sync","formula":"query3"},{"alias":"flushed async","formula":"query4"},{"alias":"acknowledged","formula":"query5"}],"response_format":"timeseries","queries":[{"query":"sum:waterdrop.produced_sync{*}.as_count()","data_source":"metrics","name":"query1"},{"query":"sum:waterdrop.produced_async{*}.as_count()","data_source":"metrics","name":"query2"},{"query":"sum:waterdrop.flushed_sync{*}.as_count()","data_source":"metrics","name":"query3"},{"query":"sum:waterdrop.flushed_async{*}.as_count()","data_source":"metrics","name":"query4"},{"query":"sum:waterdrop.acknowledged{*}.as_count()","data_source":"metrics","name":"query5"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":1979626566852990,"definition":{"title":"Messages buffer size","title_size":"16","title_align":"left","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"max","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.buffer.size.max{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}]}},{"id":243951221,"definition":{"title":"Kafka broker API calls","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"API calls","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"sum:waterdrop.calls{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":243951952,"definition":{"title":"Producer queue size","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"Queue size average","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"max:waterdrop.queue.size.avg{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Queue size max","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"max:waterdrop.queue.size.max{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":243951263,"definition":{"title":"Producer queue latency","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"Average latency","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.queue.latency.avg{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Latency p95","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.queue.latency.p95{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Latency p99","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.queue.latency.p99{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":243951276,"definition":{"title":"Producer network latency","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"Average latency","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.request_size.avg{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Latency p95","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.network.latency.p95{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Latency p99","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.network.latency.p99{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":243954928,"definition":{"title":"Producer errors","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"formula":"query1"}],"response_format":"timeseries","queries":[{"query":"sum:waterdrop.error_occurred{*}.as_count()","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}}],"template_variables":[],"layout_type":"ordered","is_read_only":false,"notify_list":[],"reflow_type":"auto","id":"rnr-kgh-dna"}
|
@@ -0,0 +1,197 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module WaterDrop
|
4
|
+
module Instrumentation
|
5
|
+
# Namespace for vendor specific instrumentation
|
6
|
+
module Vendors
|
7
|
+
# Datadog specific instrumentation
|
8
|
+
module Datadog
|
9
|
+
# Listener that can be used to subscribe to WaterDrop producer to receive stats via StatsD
|
10
|
+
# and/or Datadog
|
11
|
+
#
|
12
|
+
# @note You need to setup the `dogstatsd-ruby` client and assign it
|
13
|
+
class Listener
|
14
|
+
include Dry::Configurable
|
15
|
+
|
16
|
+
# Value object for storing a single rdkafka metric publishing details
|
17
|
+
RdKafkaMetric = Struct.new(:type, :scope, :name, :key_location)
|
18
|
+
|
19
|
+
# Namespace under which the DD metrics should be published
|
20
|
+
setting :namespace, default: 'waterdrop', reader: true
|
21
|
+
|
22
|
+
# Datadog client that we should use to publish the metrics
|
23
|
+
setting :client, reader: true
|
24
|
+
|
25
|
+
# Default tags we want to publish (for example hostname)
|
26
|
+
# Format as followed (example for hostname): `["host:#{Socket.gethostname}"]`
|
27
|
+
setting :default_tags, default: [], reader: true
|
28
|
+
|
29
|
+
# All the rdkafka metrics we want to publish
|
30
|
+
#
|
31
|
+
# By default we publish quite a lot so this can be tuned
|
32
|
+
# Note, that the once with `_d` come from WaterDrop, not rdkafka or Kafka
|
33
|
+
setting :rd_kafka_metrics, reader: true, default: [
|
34
|
+
# Client metrics
|
35
|
+
RdKafkaMetric.new(:count, :root, 'calls', 'tx_d'),
|
36
|
+
RdKafkaMetric.new(:histogram, :root, 'queue.size', 'msg_cnt_d'),
|
37
|
+
|
38
|
+
# Broker metrics
|
39
|
+
RdKafkaMetric.new(:count, :brokers, 'deliver.attempts', 'txretries_d'),
|
40
|
+
RdKafkaMetric.new(:count, :brokers, 'deliver.errors', 'txerrs_d'),
|
41
|
+
RdKafkaMetric.new(:count, :brokers, 'receive.errors', 'rxerrs_d'),
|
42
|
+
RdKafkaMetric.new(:gauge, :brokers, 'queue.latency.avg', %w[outbuf_latency avg]),
|
43
|
+
RdKafkaMetric.new(:gauge, :brokers, 'queue.latency.p95', %w[outbuf_latency p95]),
|
44
|
+
RdKafkaMetric.new(:gauge, :brokers, 'queue.latency.p99', %w[outbuf_latency p99]),
|
45
|
+
RdKafkaMetric.new(:gauge, :brokers, 'network.latency.avg', %w[rtt avg]),
|
46
|
+
RdKafkaMetric.new(:gauge, :brokers, 'network.latency.p95', %w[rtt p95]),
|
47
|
+
RdKafkaMetric.new(:gauge, :brokers, 'network.latency.p99', %w[rtt p99])
|
48
|
+
].freeze
|
49
|
+
|
50
|
+
# @param block [Proc] configuration block
|
51
|
+
def initialize(&block)
|
52
|
+
setup(&block) if block
|
53
|
+
end
|
54
|
+
|
55
|
+
# @param block [Proc] configuration block
|
56
|
+
# @note We define this alias to be consistent with `WaterDrop#setup`
|
57
|
+
def setup(&block)
|
58
|
+
configure(&block)
|
59
|
+
end
|
60
|
+
|
61
|
+
# Hooks up to WaterDrop instrumentation for emitted statistics
|
62
|
+
#
|
63
|
+
# @param event [Dry::Events::Event]
|
64
|
+
def on_statistics_emitted(event)
|
65
|
+
statistics = event[:statistics]
|
66
|
+
|
67
|
+
rd_kafka_metrics.each do |metric|
|
68
|
+
report_metric(metric, statistics)
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
# Increases the errors count by 1
|
73
|
+
#
|
74
|
+
# @param _event [Dry::Events::Event]
|
75
|
+
def on_error_occurred(_event)
|
76
|
+
client.count(
|
77
|
+
namespaced_metric('error_occurred'),
|
78
|
+
1,
|
79
|
+
tags: default_tags
|
80
|
+
)
|
81
|
+
end
|
82
|
+
|
83
|
+
# Increases acknowledged messages counter
|
84
|
+
# @param _event [Dry::Events::Event]
|
85
|
+
def on_message_acknowledged(_event)
|
86
|
+
client.increment(
|
87
|
+
namespaced_metric('acknowledged'),
|
88
|
+
tags: default_tags
|
89
|
+
)
|
90
|
+
end
|
91
|
+
|
92
|
+
%i[
|
93
|
+
produced_sync
|
94
|
+
produced_async
|
95
|
+
].each do |event_scope|
|
96
|
+
class_eval <<~METHODS, __FILE__, __LINE__ + 1
|
97
|
+
# @param event [Dry::Events::Event]
|
98
|
+
def on_message_#{event_scope}(event)
|
99
|
+
report_message(event[:message][:topic], :#{event_scope})
|
100
|
+
end
|
101
|
+
|
102
|
+
# @param event [Dry::Events::Event]
|
103
|
+
def on_messages_#{event_scope}(event)
|
104
|
+
event[:messages].each do |message|
|
105
|
+
report_message(message[:topic], :#{event_scope})
|
106
|
+
end
|
107
|
+
end
|
108
|
+
METHODS
|
109
|
+
end
|
110
|
+
|
111
|
+
# Reports the buffer usage when anything is added to the buffer
|
112
|
+
%i[
|
113
|
+
message_buffered
|
114
|
+
messages_buffered
|
115
|
+
].each do |event_scope|
|
116
|
+
class_eval <<~METHODS, __FILE__, __LINE__ + 1
|
117
|
+
# @param event [Dry::Events::Event]
|
118
|
+
def on_#{event_scope}(event)
|
119
|
+
client.histogram(
|
120
|
+
namespaced_metric('buffer.size'),
|
121
|
+
event[:buffer].size,
|
122
|
+
tags: default_tags
|
123
|
+
)
|
124
|
+
end
|
125
|
+
METHODS
|
126
|
+
end
|
127
|
+
|
128
|
+
# Events that support many messages only
|
129
|
+
# Reports data flushing operation (production from the buffer)
|
130
|
+
%i[
|
131
|
+
flushed_sync
|
132
|
+
flushed_async
|
133
|
+
].each do |event_scope|
|
134
|
+
class_eval <<~METHODS, __FILE__, __LINE__ + 1
|
135
|
+
# @param event [Dry::Events::Event]
|
136
|
+
def on_buffer_#{event_scope}(event)
|
137
|
+
event[:messages].each do |message|
|
138
|
+
report_message(message[:topic], :#{event_scope})
|
139
|
+
end
|
140
|
+
end
|
141
|
+
METHODS
|
142
|
+
end
|
143
|
+
|
144
|
+
private
|
145
|
+
|
146
|
+
# Report that a message has been produced to a topic.
|
147
|
+
# @param topic [String] Kafka topic
|
148
|
+
# @param method_name [Symbol] method from which this message operation comes
|
149
|
+
def report_message(topic, method_name)
|
150
|
+
client.increment(
|
151
|
+
namespaced_metric(method_name),
|
152
|
+
tags: default_tags + ["topic:#{topic}"]
|
153
|
+
)
|
154
|
+
end
|
155
|
+
|
156
|
+
# Wraps metric name in listener's namespace
|
157
|
+
# @param metric_name [String] RdKafkaMetric name
|
158
|
+
# @return [String]
|
159
|
+
def namespaced_metric(metric_name)
|
160
|
+
"#{namespace}.#{metric_name}"
|
161
|
+
end
|
162
|
+
|
163
|
+
# Reports a given metric statistics to Datadog
|
164
|
+
# @param metric [RdKafkaMetric] metric value object
|
165
|
+
# @param statistics [Hash] hash with all the statistics emitted
|
166
|
+
def report_metric(metric, statistics)
|
167
|
+
case metric.scope
|
168
|
+
when :root
|
169
|
+
client.public_send(
|
170
|
+
metric.type,
|
171
|
+
namespaced_metric(metric.name),
|
172
|
+
statistics.fetch(*metric.key_location),
|
173
|
+
tags: default_tags
|
174
|
+
)
|
175
|
+
when :brokers
|
176
|
+
statistics.fetch('brokers').each_value do |broker_statistics|
|
177
|
+
# Skip bootstrap nodes
|
178
|
+
# Bootstrap nodes have nodeid -1, other nodes have positive
|
179
|
+
# node ids
|
180
|
+
next if broker_statistics['nodeid'] == -1
|
181
|
+
|
182
|
+
client.public_send(
|
183
|
+
metric.type,
|
184
|
+
namespaced_metric(metric.name),
|
185
|
+
broker_statistics.dig(*metric.key_location),
|
186
|
+
tags: default_tags + ["broker:#{broker_statistics['nodename']}"]
|
187
|
+
)
|
188
|
+
end
|
189
|
+
else
|
190
|
+
raise ArgumentError, metric.scope
|
191
|
+
end
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
195
|
+
end
|
196
|
+
end
|
197
|
+
end
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
@@ -24,7 +24,8 @@ module WaterDrop
|
|
24
24
|
@monitor.instrument(
|
25
25
|
'message.buffered',
|
26
26
|
producer_id: id,
|
27
|
-
message: message
|
27
|
+
message: message,
|
28
|
+
buffer: @messages
|
28
29
|
) { @messages << message }
|
29
30
|
end
|
30
31
|
|
@@ -41,7 +42,8 @@ module WaterDrop
|
|
41
42
|
@monitor.instrument(
|
42
43
|
'messages.buffered',
|
43
44
|
producer_id: id,
|
44
|
-
messages: messages
|
45
|
+
messages: messages,
|
46
|
+
buffer: @messages
|
45
47
|
) do
|
46
48
|
messages.each { |message| @messages << message }
|
47
49
|
messages
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
@@ -150,13 +150,7 @@ module WaterDrop
|
|
150
150
|
# @param message [Hash] message we want to send
|
151
151
|
# @raise [Karafka::Errors::MessageInvalidError]
|
152
152
|
def validate_message!(message)
|
153
|
-
|
154
|
-
return if result.success?
|
155
|
-
|
156
|
-
raise Errors::MessageInvalidError, [
|
157
|
-
result.errors.to_h,
|
158
|
-
message
|
159
|
-
]
|
153
|
+
@contract.validate!(message, Errors::MessageInvalidError)
|
160
154
|
end
|
161
155
|
end
|
162
156
|
end
|
data/lib/waterdrop.rb
CHANGED
@@ -1,4 +1,37 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
#
|
4
|
-
|
3
|
+
# External components
|
4
|
+
# delegate should be removed because we don't need it, we just add it because of ruby-kafka
|
5
|
+
%w[
|
6
|
+
concurrent/array
|
7
|
+
dry-configurable
|
8
|
+
dry/monitor/notifications
|
9
|
+
dry-validation
|
10
|
+
rdkafka
|
11
|
+
json
|
12
|
+
zeitwerk
|
13
|
+
securerandom
|
14
|
+
].each { |lib| require lib }
|
15
|
+
|
16
|
+
# WaterDrop library
|
17
|
+
module WaterDrop
|
18
|
+
class << self
|
19
|
+
# @return [String] root path of this gem
|
20
|
+
def gem_root
|
21
|
+
Pathname.new(File.expand_path('..', __dir__))
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
loader = Zeitwerk::Loader.for_gem
|
27
|
+
loader.inflector.inflect('waterdrop' => 'WaterDrop')
|
28
|
+
# Do not load vendors instrumentation components. Those need to be required manually if needed
|
29
|
+
loader.ignore("#{__dir__}/waterdrop/instrumentation/vendors/**/*.rb")
|
30
|
+
loader.setup
|
31
|
+
loader.eager_load
|
32
|
+
|
33
|
+
# Rdkafka uses a single global callback for things. We bypass that by injecting a manager for
|
34
|
+
# each callback type. Callback manager allows us to register more than one callback
|
35
|
+
# @note Those managers are also used by Karafka for consumer related statistics
|
36
|
+
Rdkafka::Config.statistics_callback = WaterDrop::Instrumentation.statistics_callbacks
|
37
|
+
Rdkafka::Config.error_callback = WaterDrop::Instrumentation.error_callbacks
|
data/waterdrop.gemspec
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
lib = File.expand_path('lib', __dir__)
|
4
4
|
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
5
5
|
|
6
|
-
require '
|
6
|
+
require 'waterdrop/version'
|
7
7
|
|
8
8
|
Gem::Specification.new do |spec|
|
9
9
|
spec.name = 'waterdrop'
|
@@ -23,7 +23,7 @@ Gem::Specification.new do |spec|
|
|
23
23
|
spec.add_dependency 'rdkafka', '>= 0.10'
|
24
24
|
spec.add_dependency 'zeitwerk', '~> 2.3'
|
25
25
|
|
26
|
-
spec.required_ruby_version = '>= 2.
|
26
|
+
spec.required_ruby_version = '>= 2.7'
|
27
27
|
|
28
28
|
if $PROGRAM_NAME.end_with?('gem')
|
29
29
|
spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
|
@@ -33,5 +33,9 @@ Gem::Specification.new do |spec|
|
|
33
33
|
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
|
34
34
|
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
35
35
|
spec.require_paths = %w[lib]
|
36
|
-
|
36
|
+
|
37
|
+
spec.metadata = {
|
38
|
+
'source_code_uri' => 'https://github.com/karafka/waterdrop',
|
39
|
+
'rubygems_mfa_required' => 'true'
|
40
|
+
}
|
37
41
|
end
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: waterdrop
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.
|
4
|
+
version: 2.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -34,7 +34,7 @@ cert_chain:
|
|
34
34
|
R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
|
35
35
|
pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
|
36
36
|
-----END CERTIFICATE-----
|
37
|
-
date: 2022-
|
37
|
+
date: 2022-02-18 00:00:00.000000000 Z
|
38
38
|
dependencies:
|
39
39
|
- !ruby/object:Gem::Dependency
|
40
40
|
name: concurrent-ruby
|
@@ -142,31 +142,33 @@ files:
|
|
142
142
|
- certs/mensfeld.pem
|
143
143
|
- config/errors.yml
|
144
144
|
- docker-compose.yml
|
145
|
-
- lib/water_drop.rb
|
146
|
-
- lib/water_drop/config.rb
|
147
|
-
- lib/water_drop/contracts.rb
|
148
|
-
- lib/water_drop/contracts/config.rb
|
149
|
-
- lib/water_drop/contracts/message.rb
|
150
|
-
- lib/water_drop/errors.rb
|
151
|
-
- lib/water_drop/instrumentation.rb
|
152
|
-
- lib/water_drop/instrumentation/callbacks/delivery.rb
|
153
|
-
- lib/water_drop/instrumentation/callbacks/error.rb
|
154
|
-
- lib/water_drop/instrumentation/callbacks/statistics.rb
|
155
|
-
- lib/water_drop/instrumentation/callbacks/statistics_decorator.rb
|
156
|
-
- lib/water_drop/instrumentation/callbacks_manager.rb
|
157
|
-
- lib/water_drop/instrumentation/monitor.rb
|
158
|
-
- lib/water_drop/instrumentation/stdout_listener.rb
|
159
|
-
- lib/water_drop/patches/rdkafka/bindings.rb
|
160
|
-
- lib/water_drop/patches/rdkafka/producer.rb
|
161
|
-
- lib/water_drop/producer.rb
|
162
|
-
- lib/water_drop/producer/async.rb
|
163
|
-
- lib/water_drop/producer/buffer.rb
|
164
|
-
- lib/water_drop/producer/builder.rb
|
165
|
-
- lib/water_drop/producer/dummy_client.rb
|
166
|
-
- lib/water_drop/producer/status.rb
|
167
|
-
- lib/water_drop/producer/sync.rb
|
168
|
-
- lib/water_drop/version.rb
|
169
145
|
- lib/waterdrop.rb
|
146
|
+
- lib/waterdrop/config.rb
|
147
|
+
- lib/waterdrop/contracts.rb
|
148
|
+
- lib/waterdrop/contracts/base.rb
|
149
|
+
- lib/waterdrop/contracts/config.rb
|
150
|
+
- lib/waterdrop/contracts/message.rb
|
151
|
+
- lib/waterdrop/errors.rb
|
152
|
+
- lib/waterdrop/instrumentation.rb
|
153
|
+
- lib/waterdrop/instrumentation/callbacks/delivery.rb
|
154
|
+
- lib/waterdrop/instrumentation/callbacks/error.rb
|
155
|
+
- lib/waterdrop/instrumentation/callbacks/statistics.rb
|
156
|
+
- lib/waterdrop/instrumentation/callbacks/statistics_decorator.rb
|
157
|
+
- lib/waterdrop/instrumentation/callbacks_manager.rb
|
158
|
+
- lib/waterdrop/instrumentation/monitor.rb
|
159
|
+
- lib/waterdrop/instrumentation/stdout_listener.rb
|
160
|
+
- lib/waterdrop/instrumentation/vendors/datadog/dashboard.json
|
161
|
+
- lib/waterdrop/instrumentation/vendors/datadog/listener.rb
|
162
|
+
- lib/waterdrop/patches/rdkafka/bindings.rb
|
163
|
+
- lib/waterdrop/patches/rdkafka/producer.rb
|
164
|
+
- lib/waterdrop/producer.rb
|
165
|
+
- lib/waterdrop/producer/async.rb
|
166
|
+
- lib/waterdrop/producer/buffer.rb
|
167
|
+
- lib/waterdrop/producer/builder.rb
|
168
|
+
- lib/waterdrop/producer/dummy_client.rb
|
169
|
+
- lib/waterdrop/producer/status.rb
|
170
|
+
- lib/waterdrop/producer/sync.rb
|
171
|
+
- lib/waterdrop/version.rb
|
170
172
|
- log/.gitkeep
|
171
173
|
- waterdrop.gemspec
|
172
174
|
homepage: https://karafka.io
|
@@ -174,6 +176,7 @@ licenses:
|
|
174
176
|
- MIT
|
175
177
|
metadata:
|
176
178
|
source_code_uri: https://github.com/karafka/waterdrop
|
179
|
+
rubygems_mfa_required: 'true'
|
177
180
|
post_install_message:
|
178
181
|
rdoc_options: []
|
179
182
|
require_paths:
|
@@ -182,14 +185,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
182
185
|
requirements:
|
183
186
|
- - ">="
|
184
187
|
- !ruby/object:Gem::Version
|
185
|
-
version: 2.
|
188
|
+
version: '2.7'
|
186
189
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
187
190
|
requirements:
|
188
191
|
- - ">="
|
189
192
|
- !ruby/object:Gem::Version
|
190
193
|
version: '0'
|
191
194
|
requirements: []
|
192
|
-
rubygems_version: 3.3.
|
195
|
+
rubygems_version: 3.3.4
|
193
196
|
signing_key:
|
194
197
|
specification_version: 4
|
195
198
|
summary: Kafka messaging made easy!
|
metadata.gz.sig
CHANGED
Binary file
|
data/lib/water_drop.rb
DELETED
@@ -1,36 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
# External components
|
4
|
-
# delegate should be removed because we don't need it, we just add it because of ruby-kafka
|
5
|
-
%w[
|
6
|
-
concurrent/array
|
7
|
-
dry-configurable
|
8
|
-
dry/monitor/notifications
|
9
|
-
dry-validation
|
10
|
-
rdkafka
|
11
|
-
json
|
12
|
-
zeitwerk
|
13
|
-
securerandom
|
14
|
-
].each { |lib| require lib }
|
15
|
-
|
16
|
-
# WaterDrop library
|
17
|
-
module WaterDrop
|
18
|
-
class << self
|
19
|
-
# @return [String] root path of this gem
|
20
|
-
def gem_root
|
21
|
-
Pathname.new(File.expand_path('..', __dir__))
|
22
|
-
end
|
23
|
-
end
|
24
|
-
end
|
25
|
-
|
26
|
-
Zeitwerk::Loader
|
27
|
-
.for_gem
|
28
|
-
.tap { |loader| loader.ignore("#{__dir__}/waterdrop.rb") }
|
29
|
-
.tap(&:setup)
|
30
|
-
.tap(&:eager_load)
|
31
|
-
|
32
|
-
# Rdkafka uses a single global callback for things. We bypass that by injecting a manager for
|
33
|
-
# each callback type. Callback manager allows us to register more than one callback
|
34
|
-
# @note Those managers are also used by Karafka for consumer related statistics
|
35
|
-
Rdkafka::Config.statistics_callback = WaterDrop::Instrumentation.statistics_callbacks
|
36
|
-
Rdkafka::Config.error_callback = WaterDrop::Instrumentation.error_callbacks
|