waterdrop 2.0.5 → 2.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +5 -4
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +25 -0
  6. data/Gemfile +0 -2
  7. data/Gemfile.lock +23 -26
  8. data/README.md +40 -12
  9. data/config/errors.yml +1 -0
  10. data/lib/{water_drop → waterdrop}/config.rb +5 -14
  11. data/lib/waterdrop/contracts/base.rb +23 -0
  12. data/lib/{water_drop → waterdrop}/contracts/config.rb +12 -8
  13. data/lib/{water_drop → waterdrop}/contracts/message.rb +2 -4
  14. data/lib/{water_drop → waterdrop}/contracts.rb +0 -0
  15. data/lib/{water_drop → waterdrop}/errors.rb +0 -0
  16. data/lib/{water_drop → waterdrop}/instrumentation/callbacks/delivery.rb +0 -0
  17. data/lib/{water_drop → waterdrop}/instrumentation/callbacks/error.rb +4 -3
  18. data/lib/{water_drop → waterdrop}/instrumentation/callbacks/statistics.rb +0 -0
  19. data/lib/{water_drop → waterdrop}/instrumentation/callbacks/statistics_decorator.rb +0 -0
  20. data/lib/{water_drop → waterdrop}/instrumentation/callbacks_manager.rb +5 -1
  21. data/lib/{water_drop → waterdrop}/instrumentation/monitor.rb +1 -3
  22. data/lib/{water_drop → waterdrop}/instrumentation/stdout_listener.rb +15 -24
  23. data/lib/waterdrop/instrumentation/vendors/datadog/dashboard.json +1 -0
  24. data/lib/waterdrop/instrumentation/vendors/datadog/listener.rb +197 -0
  25. data/lib/{water_drop → waterdrop}/instrumentation.rb +0 -1
  26. data/lib/{water_drop → waterdrop}/patches/rdkafka/bindings.rb +0 -0
  27. data/lib/{water_drop → waterdrop}/patches/rdkafka/producer.rb +0 -0
  28. data/lib/{water_drop → waterdrop}/producer/async.rb +2 -2
  29. data/lib/{water_drop → waterdrop}/producer/buffer.rb +15 -8
  30. data/lib/{water_drop → waterdrop}/producer/builder.rb +0 -0
  31. data/lib/{water_drop → waterdrop}/producer/dummy_client.rb +0 -0
  32. data/lib/{water_drop → waterdrop}/producer/status.rb +0 -0
  33. data/lib/{water_drop → waterdrop}/producer/sync.rb +2 -2
  34. data/lib/{water_drop → waterdrop}/producer.rb +2 -8
  35. data/lib/{water_drop → waterdrop}/version.rb +1 -1
  36. data/lib/waterdrop.rb +35 -2
  37. data/waterdrop.gemspec +8 -3
  38. data.tar.gz.sig +0 -0
  39. metadata +34 -30
  40. metadata.gz.sig +0 -0
  41. data/lib/water_drop.rb +0 -36
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 310a3d7e1a4d0e5825b3a01f59b29c22a9f180c639951763bdf936a23c1119fd
4
- data.tar.gz: f6c0c498266ba067201e7983d5bdea7a0aee7810a403be1cd4f4b3d62ab60633
3
+ metadata.gz: db36e5203312e482d0d98a83d3bfb4fd5e274c342e5b3c63829d6bb5f616b52e
4
+ data.tar.gz: 6e49baf2f5ebd33aded769899fc0eb7f3c130ac97d9a6156fd491d000fd04306
5
5
  SHA512:
6
- metadata.gz: 4e486cfa6aa673e008eeaccb8cf920fbb30fce1d23277021d3c6a02e36ee14b8a280e9114b9be778bdb68ba4b07eb2d64371362c454c607edf3c4b57a26a0066
7
- data.tar.gz: 50301b9c5a5e67434f46247b5d1a83e4af2577e0f3b8f251a2795bc48aaba8c59135025e606b8143ca57560a2eac6666c530bd5d1b6059ce2e61d008e1eb9385
6
+ metadata.gz: 426d9f910ca5e24a7e86db1ceec672f895186ace830554708e8741d21bb83f929c9278ed5018bff6468b354523ac7cc790cba0b91b8c13dd2bb28e58f8a52dc6
7
+ data.tar.gz: b3871df6868ce287daf0765f522031cfdd566b96d0e5f8bf3ca1cd9724a7a68d5b3e4e1a33e0bdcc9b65063150d48cfc213e19348d19a6268a309b1c69dcab50
checksums.yaml.gz.sig CHANGED
Binary file
@@ -1,5 +1,7 @@
1
1
  name: ci
2
2
 
3
+ concurrency: ci-${{ github.ref }}
4
+
3
5
  on:
4
6
  pull_request:
5
7
  push:
@@ -14,12 +16,11 @@ jobs:
14
16
  fail-fast: false
15
17
  matrix:
16
18
  ruby:
19
+ - '3.1'
17
20
  - '3.0'
18
21
  - '2.7'
19
- - '2.6'
20
- - 'jruby-9.3.1.0'
21
22
  include:
22
- - ruby: '3.0'
23
+ - ruby: '3.1'
23
24
  coverage: 'true'
24
25
  steps:
25
26
  - uses: actions/checkout@v2
@@ -59,7 +60,7 @@ jobs:
59
60
  - name: Set up Ruby
60
61
  uses: ruby/setup-ruby@v1
61
62
  with:
62
- ruby-version: 3.0
63
+ ruby-version: 3.1
63
64
  - name: Install latest bundler
64
65
  run: gem install bundler --no-document
65
66
  - name: Install Diffend plugin
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.0.2
1
+ 3.1.0
data/CHANGELOG.md CHANGED
@@ -1,5 +1,30 @@
1
1
  # WaterDrop changelog
2
2
 
3
+ ## 2.2.0 (2022-02-18)
4
+ - Add Datadog listener for metrics + errors publishing
5
+ - Add Datadog example dashboard template
6
+ - Update Readme to show Dd instrumentation usage
7
+ - Align the directory namespace convention with gem name (waterdrop => WaterDrop)
8
+ - Introduce a common base for validation contracts
9
+ - Drop CI support for ruby 2.6
10
+ - Require all `kafka` settings to have symbol keys (compatibility with Karafka 2.0 and rdkafka)
11
+
12
+ ## 2.1.0 (2022-01-03)
13
+ - Ruby 3.1 support
14
+ - Change the error notification key from `error.emitted` to `error.occurred`.
15
+ - Normalize error tracking and make all the places publish errors into the same notification endpoint (`error.occurred`).
16
+ - Start semantic versioning WaterDrop.
17
+
18
+ ## 2.0.7 (2021-12-03)
19
+ - Source code metadata url added to the gemspec
20
+ - Replace `:producer` with `:producer_id` in events and update `StdoutListener` accordingly. This change aligns all the events in terms of not publishing the whole producer object in the events.
21
+ - Add `error.emitted` into the `StdoutListener`.
22
+ - Enable `StdoutLogger` in specs for additional integration coverage.
23
+
24
+ ## 2.0.6 (2021-12-01)
25
+ - #218 - Fixes a case, where dispatch of callbacks the same moment a new producer was created could cause a concurrency issue in the manager.
26
+ - Fix some unstable specs.
27
+
3
28
  ## 2.0.5 (2021-11-28)
4
29
 
5
30
  ### Bug fixes
data/Gemfile CHANGED
@@ -6,8 +6,6 @@ plugin 'diffend'
6
6
 
7
7
  gemspec
8
8
 
9
- gem 'rdkafka'
10
-
11
9
  group :development do
12
10
  gem 'byebug'
13
11
  end
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- waterdrop (2.0.5)
4
+ waterdrop (2.2.0)
5
5
  concurrent-ruby (>= 1.1)
6
6
  dry-configurable (~> 0.13)
7
7
  dry-monitor (~> 0.5)
@@ -12,17 +12,16 @@ PATH
12
12
  GEM
13
13
  remote: https://rubygems.org/
14
14
  specs:
15
- activesupport (6.1.4.1)
15
+ activesupport (7.0.2)
16
16
  concurrent-ruby (~> 1.0, >= 1.0.2)
17
17
  i18n (>= 1.6, < 2)
18
18
  minitest (>= 5.1)
19
19
  tzinfo (~> 2.0)
20
- zeitwerk (~> 2.3)
21
20
  byebug (11.1.3)
22
21
  concurrent-ruby (1.1.9)
23
- diff-lcs (1.4.4)
22
+ diff-lcs (1.5.0)
24
23
  docile (1.4.0)
25
- dry-configurable (0.13.0)
24
+ dry-configurable (0.14.0)
26
25
  concurrent-ruby (~> 1.0)
27
26
  dry-core (~> 0.6)
28
27
  dry-container (0.9.0)
@@ -34,7 +33,7 @@ GEM
34
33
  concurrent-ruby (~> 1.0)
35
34
  dry-core (~> 0.5, >= 0.5)
36
35
  dry-inflector (0.2.1)
37
- dry-initializer (3.0.4)
36
+ dry-initializer (3.1.1)
38
37
  dry-logic (1.2.0)
39
38
  concurrent-ruby (~> 1.0)
40
39
  dry-core (~> 0.5, >= 0.5)
@@ -63,29 +62,29 @@ GEM
63
62
  dry-schema (~> 1.8, >= 1.8.0)
64
63
  factory_bot (6.2.0)
65
64
  activesupport (>= 5.0.0)
66
- ffi (1.15.4)
67
- i18n (1.8.11)
65
+ ffi (1.15.5)
66
+ i18n (1.9.1)
68
67
  concurrent-ruby (~> 1.0)
69
68
  mini_portile2 (2.7.1)
70
- minitest (5.14.4)
69
+ minitest (5.15.0)
71
70
  rake (13.0.6)
72
- rdkafka (0.11.0)
71
+ rdkafka (0.11.1)
73
72
  ffi (~> 1.15)
74
- mini_portile2 (~> 2.7)
73
+ mini_portile2 (~> 2.6)
75
74
  rake (> 12)
76
- rspec (3.10.0)
77
- rspec-core (~> 3.10.0)
78
- rspec-expectations (~> 3.10.0)
79
- rspec-mocks (~> 3.10.0)
80
- rspec-core (3.10.1)
81
- rspec-support (~> 3.10.0)
82
- rspec-expectations (3.10.1)
75
+ rspec (3.11.0)
76
+ rspec-core (~> 3.11.0)
77
+ rspec-expectations (~> 3.11.0)
78
+ rspec-mocks (~> 3.11.0)
79
+ rspec-core (3.11.0)
80
+ rspec-support (~> 3.11.0)
81
+ rspec-expectations (3.11.0)
83
82
  diff-lcs (>= 1.2.0, < 2.0)
84
- rspec-support (~> 3.10.0)
85
- rspec-mocks (3.10.2)
83
+ rspec-support (~> 3.11.0)
84
+ rspec-mocks (3.11.0)
86
85
  diff-lcs (>= 1.2.0, < 2.0)
87
- rspec-support (~> 3.10.0)
88
- rspec-support (3.10.3)
86
+ rspec-support (~> 3.11.0)
87
+ rspec-support (3.11.0)
89
88
  simplecov (0.21.2)
90
89
  docile (~> 1.1)
91
90
  simplecov-html (~> 0.11)
@@ -94,19 +93,17 @@ GEM
94
93
  simplecov_json_formatter (0.1.3)
95
94
  tzinfo (2.0.4)
96
95
  concurrent-ruby (~> 1.0)
97
- zeitwerk (2.5.1)
96
+ zeitwerk (2.5.4)
98
97
 
99
98
  PLATFORMS
100
- x86_64-darwin
101
99
  x86_64-linux
102
100
 
103
101
  DEPENDENCIES
104
102
  byebug
105
103
  factory_bot
106
- rdkafka
107
104
  rspec
108
105
  simplecov
109
106
  waterdrop!
110
107
 
111
108
  BUNDLED WITH
112
- 2.2.31
109
+ 2.3.7
data/README.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  **Note**: Documentation presented here refers to WaterDrop `2.0.0`.
4
4
 
5
- WaterDrop `2.0` does **not** work with Karafka `1.*` and aims to either work as a standalone producer outside of Karafka `1.*` ecosystem or as a part of not yet released Karafka `2.0.*`.
5
+ WaterDrop `2.0` does **not** work with Karafka `1.*` and aims to either work as a standalone producer outside of Karafka `1.*` ecosystem or as a part of soon to be released Karafka `2.0.*`.
6
6
 
7
7
  Please refer to [this](https://github.com/karafka/waterdrop/tree/1.4) branch and its documentation for details about WaterDrop `1.*` usage.
8
8
 
@@ -36,16 +36,13 @@ It:
36
36
  - [Instrumentation](#instrumentation)
37
37
  * [Usage statistics](#usage-statistics)
38
38
  * [Error notifications](#error-notifications)
39
+ * [Datadog and StatsD integration](#datadog-and-statsd-integration)
39
40
  * [Forking and potential memory problems](#forking-and-potential-memory-problems)
40
41
  - [Note on contributions](#note-on-contributions)
41
42
 
42
43
  ## Installation
43
44
 
44
- ```ruby
45
- gem install waterdrop
46
- ```
47
-
48
- or add this to your Gemfile:
45
+ Add this to your Gemfile:
49
46
 
50
47
  ```ruby
51
48
  gem 'waterdrop'
@@ -201,7 +198,7 @@ producer.setup do |config|
201
198
  config.kafka = {
202
199
  'bootstrap.servers': 'localhost:9092',
203
200
  # Accumulate messages for at most 10 seconds
204
- 'queue.buffering.max.ms' => 10_000
201
+ 'queue.buffering.max.ms': 10_000
205
202
  }
206
203
  end
207
204
 
@@ -288,9 +285,40 @@ producer.close
288
285
 
289
286
  Note: The metrics returned may not be completely consistent between brokers, toppars and totals, due to the internal asynchronous nature of librdkafka. E.g., the top level tx total may be less than the sum of the broker tx values which it represents.
290
287
 
288
+ ### Datadog and StatsD integration
289
+
290
+ WaterDrop comes with (optional) full Datadog and StatsD integration that you can use. To use it:
291
+
292
+ ```ruby
293
+ # require datadog/statsd and the listener as it is not loaded by default
294
+ require 'datadog/statsd'
295
+ require 'waterdrop/instrumentation/vendors/datadog/listener'
296
+
297
+ # initialize your producer with statistics.interval.ms enabled so the metrics are published
298
+ producer = WaterDrop::Producer.new do |config|
299
+ config.deliver = true
300
+ config.kafka = {
301
+ 'bootstrap.servers': 'localhost:9092',
302
+ 'statistics.interval.ms': 1_000
303
+ }
304
+ end
305
+
306
+ # initialize the listener with statsd client
307
+ listener = ::WaterDrop::Instrumentation::Vendors::Datadog::Listener.new do |config|
308
+ config.client = Datadog::Statsd.new('localhost', 8125)
309
+ # Publish host as a tag alongside the rest of tags
310
+ config.default_tags = ["host:#{Socket.gethostname}"]
311
+ end
312
+
313
+ # Subscribe with your listener to your producer and you should be ready to go!
314
+ producer.monitor.subscribe(listener)
315
+ ```
316
+
317
+ You can also find a ready to import DataDog dashboard configuration file that you can use to monitor all of your producer.
318
+
291
319
  ### Error notifications
292
320
 
293
- Aside from errors related to publishing messages like `buffer.flushed_async.error`, WaterDrop allows you to listen to errors that occur in its internal background threads. Things like reconnecting to Kafka upon network errors and others unrelated to publishing messages are all available under `error.emitted` notification key. You can subscribe to this event to ensure your setup is healthy and without any problems that would otherwise go unnoticed as long as messages are delivered.
321
+ WaterDrop allows you to listen to all errors that occur while producing messages and in its internal background threads. Things like reconnecting to Kafka upon network errors and others unrelated to publishing messages are all available under `error.occurred` notification key. You can subscribe to this event to ensure your setup is healthy and without any problems that would otherwise go unnoticed as long as messages are delivered.
294
322
 
295
323
  ```ruby
296
324
  producer = WaterDrop::Producer.new do |config|
@@ -298,10 +326,10 @@ producer = WaterDrop::Producer.new do |config|
298
326
  config.kafka = { 'bootstrap.servers': 'localhost:9090' }
299
327
  end
300
328
 
301
- producer.monitor.subscribe('error.emitted') do |event|
329
+ producer.monitor.subscribe('error.occurred') do |event|
302
330
  error = event[:error]
303
331
 
304
- p "Internal error occurred: #{error}"
332
+ p "WaterDrop error occurred: #{error}"
305
333
  end
306
334
 
307
335
  # Run this code without Kafka cluster
@@ -313,9 +341,9 @@ end
313
341
 
314
342
  # After you stop your Kafka cluster, you will see a lot of those:
315
343
  #
316
- # Internal error occurred: Local: Broker transport failure (transport)
344
+ # WaterDrop error occurred: Local: Broker transport failure (transport)
317
345
  #
318
- # Internal error occurred: Local: Broker transport failure (transport)
346
+ # WaterDrop error occurred: Local: Broker transport failure (transport)
319
347
  ```
320
348
 
321
349
  ### Forking and potential memory problems
data/config/errors.yml CHANGED
@@ -4,3 +4,4 @@ en:
4
4
  invalid_key_type: all keys need to be of type String
5
5
  invalid_value_type: all values need to be of type String
6
6
  max_payload_size: is more than `max_payload_size` config value
7
+ kafka_key_must_be_a_symbol: All keys under the kafka settings scope need to be symbols
@@ -9,7 +9,7 @@ module WaterDrop
9
9
 
10
10
  # Defaults for kafka settings, that will be overwritten only if not present already
11
11
  KAFKA_DEFAULTS = {
12
- 'client.id' => 'waterdrop'
12
+ 'client.id': 'waterdrop'
13
13
  }.freeze
14
14
 
15
15
  private_constant :KAFKA_DEFAULTS
@@ -17,7 +17,8 @@ module WaterDrop
17
17
  # WaterDrop options
18
18
  #
19
19
  # option [String] id of the producer. This can be helpful when building producer specific
20
- # instrumentation or loggers. It is not the kafka producer id
20
+ # instrumentation or loggers. It is not the kafka client id. It is an id that should be
21
+ # unique for each of the producers
21
22
  setting(
22
23
  :id,
23
24
  default: false,
@@ -62,7 +63,8 @@ module WaterDrop
62
63
  yield(config)
63
64
 
64
65
  merge_kafka_defaults!(config)
65
- validate!(config.to_h)
66
+
67
+ Contracts::Config.new.validate!(config.to_h, Errors::ConfigurationInvalidError)
66
68
 
67
69
  ::Rdkafka::Config.logger = config.logger
68
70
  end
@@ -81,16 +83,5 @@ module WaterDrop
81
83
  config.kafka[key] = value
82
84
  end
83
85
  end
84
-
85
- # Validates the configuration and if anything is wrong, will raise an exception
86
- # @param config_hash [Hash] config hash with setup details
87
- # @raise [WaterDrop::Errors::ConfigurationInvalidError] raised when something is wrong with
88
- # the configuration
89
- def validate!(config_hash)
90
- result = Contracts::Config.new.call(config_hash)
91
- return true if result.success?
92
-
93
- raise Errors::ConfigurationInvalidError, result.errors.to_h
94
- end
95
86
  end
96
87
  end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module WaterDrop
4
+ module Contracts
5
+ # Base for all the contracts in WaterDrop
6
+ class Base < Dry::Validation::Contract
7
+ config.messages.load_paths << File.join(WaterDrop.gem_root, 'config', 'errors.yml')
8
+
9
+ # @param data [Hash] data for validation
10
+ # @param error_class [Class] error class that should be used when validation fails
11
+ # @return [Boolean] true
12
+ # @raise [StandardError] any error provided in the error_class that inherits from the
13
+ # standard error
14
+ def validate!(data, error_class)
15
+ result = call(data)
16
+
17
+ return true if result.success?
18
+
19
+ raise error_class, result.errors.to_h
20
+ end
21
+ end
22
+ end
23
+ end
@@ -3,12 +3,7 @@
3
3
  module WaterDrop
4
4
  module Contracts
5
5
  # Contract with validation rules for WaterDrop configuration details
6
- class Config < Dry::Validation::Contract
7
- # Ensure valid format of each seed broker so that rdkafka doesn't fail silently
8
- SEED_BROKER_FORMAT_REGEXP = %r{\A([^:/,]+:[0-9]+)(,[^:/,]+:[0-9]+)*\z}.freeze
9
-
10
- private_constant :SEED_BROKER_FORMAT_REGEXP
11
-
6
+ class Config < Base
12
7
  params do
13
8
  required(:id).filled(:str?)
14
9
  required(:logger).filled
@@ -16,9 +11,18 @@ module WaterDrop
16
11
  required(:max_payload_size).filled(:int?, gteq?: 1)
17
12
  required(:max_wait_timeout).filled(:number?, gteq?: 0)
18
13
  required(:wait_timeout).filled(:number?, gt?: 0)
14
+ required(:kafka).filled(:hash?)
15
+ end
16
+
17
+ # rdkafka allows both symbols and strings as keys for config but then casts them to strings
18
+ # This can be confusing, so we expect all keys to be symbolized
19
+ rule(:kafka) do
20
+ next unless value.is_a?(Hash)
21
+
22
+ value.each_key do |key|
23
+ next if key.is_a?(Symbol)
19
24
 
20
- required(:kafka).schema do
21
- required(:'bootstrap.servers').filled(:str?, format?: SEED_BROKER_FORMAT_REGEXP)
25
+ key(:"kafka.#{key}").failure(:kafka_key_must_be_a_symbol)
22
26
  end
23
27
  end
24
28
  end
@@ -4,17 +4,15 @@ module WaterDrop
4
4
  module Contracts
5
5
  # Contract with validation rules for validating that all the message options that
6
6
  # we provide to producer ale valid and usable
7
- class Message < Dry::Validation::Contract
7
+ class Message < Base
8
8
  # Regex to check that topic has a valid format
9
- TOPIC_REGEXP = /\A(\w|-|\.)+\z/.freeze
9
+ TOPIC_REGEXP = /\A(\w|-|\.)+\z/
10
10
 
11
11
  # Checks, that the given value is a string
12
12
  STRING_ASSERTION = ->(value) { value.is_a?(String) }.to_proc
13
13
 
14
14
  private_constant :TOPIC_REGEXP, :STRING_ASSERTION
15
15
 
16
- config.messages.load_paths << File.join(WaterDrop.gem_root, 'config', 'errors.yml')
17
-
18
16
  option :max_payload_size
19
17
 
20
18
  params do
File without changes
File without changes
@@ -17,16 +17,17 @@ module WaterDrop
17
17
  # Runs the instrumentation monitor with error
18
18
  # @param client_name [String] rdkafka client name
19
19
  # @param error [Rdkafka::Error] error that occurred
20
- # @note If will only instrument on errors of the client of our producer
20
+ # @note It will only instrument on errors of the client of our producer
21
21
  def call(client_name, error)
22
22
  # Emit only errors related to our client
23
23
  # Same as with statistics (mor explanation there)
24
24
  return unless @client_name == client_name
25
25
 
26
26
  @monitor.instrument(
27
- 'error.emitted',
27
+ 'error.occurred',
28
+ error: error,
28
29
  producer_id: @producer_id,
29
- error: error
30
+ type: 'librdkafka.error'
30
31
  )
31
32
  end
32
33
  end
@@ -13,8 +13,12 @@ module WaterDrop
13
13
  # Invokes all the callbacks registered one after another
14
14
  #
15
15
  # @param args [Object] any args that should go to the callbacks
16
+ # @note We do not use `#each_value` here on purpose. With it being used, we cannot dispatch
17
+ # callbacks and add new at the same time. Since we don't know when and in what thread
18
+ # things are going to be added to the manager, we need to extract values into an array and
19
+ # run it. That way we can add new things the same time.
16
20
  def call(*args)
17
- @callbacks.each_value { |a| a.call(*args) }
21
+ @callbacks.values.each { |callback| callback.call(*args) }
18
22
  end
19
23
 
20
24
  # Adds a callback to the manager
@@ -24,13 +24,11 @@ module WaterDrop
24
24
  messages.buffered
25
25
 
26
26
  buffer.flushed_async
27
- buffer.flushed_async.error
28
27
  buffer.flushed_sync
29
- buffer.flushed_sync.error
30
28
 
31
29
  statistics.emitted
32
30
 
33
- error.emitted
31
+ error.occurred
34
32
  ].freeze
35
33
 
36
34
  private_constant :EVENTS
@@ -51,7 +51,7 @@ module WaterDrop
51
51
  message = event[:message]
52
52
 
53
53
  info(event, "Buffering of a message to '#{message[:topic]}' topic")
54
- debug(event, [message, event[:producer].messages.size])
54
+ debug(event, [message])
55
55
  end
56
56
 
57
57
  # @param event [Dry::Events::Event] event that happened with the details
@@ -59,7 +59,7 @@ module WaterDrop
59
59
  messages = event[:messages]
60
60
 
61
61
  info(event, "Buffering of #{messages.size} messages")
62
- debug(event, [messages, event[:producer].messages.size])
62
+ debug(event, [messages, messages.size])
63
63
  end
64
64
 
65
65
  # @param event [Dry::Events::Event] event that happened with the details
@@ -70,15 +70,6 @@ module WaterDrop
70
70
  debug(event, messages)
71
71
  end
72
72
 
73
- # @param event [Dry::Events::Event] event that happened with the details
74
- def on_buffer_flushed_async_error(event)
75
- messages = event[:messages]
76
- error = event[:error]
77
-
78
- error(event, "Async flushing of #{messages.size} failed due to: #{error}")
79
- debug(event, messages)
80
- end
81
-
82
73
  # @param event [Dry::Events::Event] event that happened with the details
83
74
  def on_buffer_flushed_sync(event)
84
75
  messages = event[:messages]
@@ -87,19 +78,19 @@ module WaterDrop
87
78
  debug(event, messages)
88
79
  end
89
80
 
90
- # @param event [Dry::Events::Event] event that happened with the details
91
- def on_buffer_flushed_sync_error(event)
92
- messages = event[:dispatched]
93
- error = event[:error]
94
-
95
- error(event, "Sync flushing of #{messages.size} failed due to: #{error}")
96
- debug(event, messages)
97
- end
98
-
99
81
  # @param event [Dry::Events::Event] event that happened with the details
100
82
  def on_producer_closed(event)
101
83
  info event, 'Closing producer'
102
- debug event, event[:producer].messages.size
84
+ debug event, ''
85
+ end
86
+
87
+ # @param event [Dry::Events::Event] event that happened with the error details
88
+ def on_error_occurred(event)
89
+ error = event[:error]
90
+ type = event[:type]
91
+
92
+ error(event, "Error occurred: #{error} - #{type}")
93
+ debug(event, '')
103
94
  end
104
95
 
105
96
  private
@@ -107,19 +98,19 @@ module WaterDrop
107
98
  # @param event [Dry::Events::Event] event that happened with the details
108
99
  # @param log_message [String] message we want to publish
109
100
  def debug(event, log_message)
110
- @logger.debug("[#{event[:producer].id}] #{log_message}")
101
+ @logger.debug("[#{event[:producer_id]}] #{log_message}")
111
102
  end
112
103
 
113
104
  # @param event [Dry::Events::Event] event that happened with the details
114
105
  # @param log_message [String] message we want to publish
115
106
  def info(event, log_message)
116
- @logger.info("[#{event[:producer].id}] #{log_message} took #{event[:time]} ms")
107
+ @logger.info("[#{event[:producer_id]}] #{log_message} took #{event[:time]} ms")
117
108
  end
118
109
 
119
110
  # @param event [Dry::Events::Event] event that happened with the details
120
111
  # @param log_message [String] message we want to publish
121
112
  def error(event, log_message)
122
- @logger.error("[#{event[:producer].id}] #{log_message}")
113
+ @logger.error("[#{event[:producer_id]}] #{log_message}")
123
114
  end
124
115
  end
125
116
  end
@@ -0,0 +1 @@
1
+ {"title":"WaterDrop producer example dashboard","description":"This dashboard include example setup for monitoring activity of your WaterDrop producer","widgets":[{"id":243951318,"definition":{"title":"Messages produced","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"produced sync","formula":"query1"},{"alias":"produced async","formula":"query2"},{"alias":"flushed sync","formula":"query3"},{"alias":"flushed async","formula":"query4"},{"alias":"acknowledged","formula":"query5"}],"response_format":"timeseries","queries":[{"query":"sum:waterdrop.produced_sync{*}.as_count()","data_source":"metrics","name":"query1"},{"query":"sum:waterdrop.produced_async{*}.as_count()","data_source":"metrics","name":"query2"},{"query":"sum:waterdrop.flushed_sync{*}.as_count()","data_source":"metrics","name":"query3"},{"query":"sum:waterdrop.flushed_async{*}.as_count()","data_source":"metrics","name":"query4"},{"query":"sum:waterdrop.acknowledged{*}.as_count()","data_source":"metrics","name":"query5"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":1979626566852990,"definition":{"title":"Messages buffer size","title_size":"16","title_align":"left","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"max","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.buffer.size.max{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}]}},{"id":243951221,"definition":{"title":"Kafka broker API calls","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"API calls","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"sum:waterdrop.calls{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":243951952,"definition":{"title":"Producer queue size","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"Queue size average","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"max:waterdrop.queue.size.avg{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Queue size max","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"max:waterdrop.queue.size.max{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":243951263,"definition":{"title":"Producer queue latency","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"Average latency","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.queue.latency.avg{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Latency p95","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.queue.latency.p95{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Latency p99","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.queue.latency.p99{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":243951276,"definition":{"title":"Producer network latency","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"alias":"Average latency","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.request_size.avg{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Latency p95","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.network.latency.p95{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"},{"formulas":[{"alias":"Latency p99","formula":"query1"}],"response_format":"timeseries","queries":[{"query":"avg:waterdrop.network.latency.p99{*}","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}},{"id":243954928,"definition":{"title":"Producer errors","show_legend":true,"legend_layout":"auto","legend_columns":["avg","min","max","value","sum"],"type":"timeseries","requests":[{"formulas":[{"formula":"query1"}],"response_format":"timeseries","queries":[{"query":"sum:waterdrop.error_occurred{*}.as_count()","data_source":"metrics","name":"query1"}],"style":{"palette":"dog_classic","line_type":"solid","line_width":"normal"},"display_type":"line"}],"yaxis":{"include_zero":true,"scale":"linear","label":"","min":"auto","max":"auto"}}}],"template_variables":[],"layout_type":"ordered","is_read_only":false,"notify_list":[],"reflow_type":"auto","id":"rnr-kgh-dna"}
@@ -0,0 +1,197 @@
1
+ # frozen_string_literal: true
2
+
3
+ module WaterDrop
4
+ module Instrumentation
5
+ # Namespace for vendor specific instrumentation
6
+ module Vendors
7
+ # Datadog specific instrumentation
8
+ module Datadog
9
+ # Listener that can be used to subscribe to WaterDrop producer to receive stats via StatsD
10
+ # and/or Datadog
11
+ #
12
+ # @note You need to setup the `dogstatsd-ruby` client and assign it
13
+ class Listener
14
+ include Dry::Configurable
15
+
16
+ # Value object for storing a single rdkafka metric publishing details
17
+ RdKafkaMetric = Struct.new(:type, :scope, :name, :key_location)
18
+
19
+ # Namespace under which the DD metrics should be published
20
+ setting :namespace, default: 'waterdrop', reader: true
21
+
22
+ # Datadog client that we should use to publish the metrics
23
+ setting :client, reader: true
24
+
25
+ # Default tags we want to publish (for example hostname)
26
+ # Format as followed (example for hostname): `["host:#{Socket.gethostname}"]`
27
+ setting :default_tags, default: [], reader: true
28
+
29
+ # All the rdkafka metrics we want to publish
30
+ #
31
+ # By default we publish quite a lot so this can be tuned
32
+ # Note, that the once with `_d` come from WaterDrop, not rdkafka or Kafka
33
+ setting :rd_kafka_metrics, reader: true, default: [
34
+ # Client metrics
35
+ RdKafkaMetric.new(:count, :root, 'calls', 'tx_d'),
36
+ RdKafkaMetric.new(:histogram, :root, 'queue.size', 'msg_cnt_d'),
37
+
38
+ # Broker metrics
39
+ RdKafkaMetric.new(:count, :brokers, 'deliver.attempts', 'txretries_d'),
40
+ RdKafkaMetric.new(:count, :brokers, 'deliver.errors', 'txerrs_d'),
41
+ RdKafkaMetric.new(:count, :brokers, 'receive.errors', 'rxerrs_d'),
42
+ RdKafkaMetric.new(:gauge, :brokers, 'queue.latency.avg', %w[outbuf_latency avg]),
43
+ RdKafkaMetric.new(:gauge, :brokers, 'queue.latency.p95', %w[outbuf_latency p95]),
44
+ RdKafkaMetric.new(:gauge, :brokers, 'queue.latency.p99', %w[outbuf_latency p99]),
45
+ RdKafkaMetric.new(:gauge, :brokers, 'network.latency.avg', %w[rtt avg]),
46
+ RdKafkaMetric.new(:gauge, :brokers, 'network.latency.p95', %w[rtt p95]),
47
+ RdKafkaMetric.new(:gauge, :brokers, 'network.latency.p99', %w[rtt p99])
48
+ ].freeze
49
+
50
+ # @param block [Proc] configuration block
51
+ def initialize(&block)
52
+ setup(&block) if block
53
+ end
54
+
55
+ # @param block [Proc] configuration block
56
+ # @note We define this alias to be consistent with `WaterDrop#setup`
57
+ def setup(&block)
58
+ configure(&block)
59
+ end
60
+
61
+ # Hooks up to WaterDrop instrumentation for emitted statistics
62
+ #
63
+ # @param event [Dry::Events::Event]
64
+ def on_statistics_emitted(event)
65
+ statistics = event[:statistics]
66
+
67
+ rd_kafka_metrics.each do |metric|
68
+ report_metric(metric, statistics)
69
+ end
70
+ end
71
+
72
+ # Increases the errors count by 1
73
+ #
74
+ # @param _event [Dry::Events::Event]
75
+ def on_error_occurred(_event)
76
+ client.count(
77
+ namespaced_metric('error_occurred'),
78
+ 1,
79
+ tags: default_tags
80
+ )
81
+ end
82
+
83
+ # Increases acknowledged messages counter
84
+ # @param _event [Dry::Events::Event]
85
+ def on_message_acknowledged(_event)
86
+ client.increment(
87
+ namespaced_metric('acknowledged'),
88
+ tags: default_tags
89
+ )
90
+ end
91
+
92
+ %i[
93
+ produced_sync
94
+ produced_async
95
+ ].each do |event_scope|
96
+ class_eval <<~METHODS, __FILE__, __LINE__ + 1
97
+ # @param event [Dry::Events::Event]
98
+ def on_message_#{event_scope}(event)
99
+ report_message(event[:message][:topic], :#{event_scope})
100
+ end
101
+
102
+ # @param event [Dry::Events::Event]
103
+ def on_messages_#{event_scope}(event)
104
+ event[:messages].each do |message|
105
+ report_message(message[:topic], :#{event_scope})
106
+ end
107
+ end
108
+ METHODS
109
+ end
110
+
111
+ # Reports the buffer usage when anything is added to the buffer
112
+ %i[
113
+ message_buffered
114
+ messages_buffered
115
+ ].each do |event_scope|
116
+ class_eval <<~METHODS, __FILE__, __LINE__ + 1
117
+ # @param event [Dry::Events::Event]
118
+ def on_#{event_scope}(event)
119
+ client.histogram(
120
+ namespaced_metric('buffer.size'),
121
+ event[:buffer].size,
122
+ tags: default_tags
123
+ )
124
+ end
125
+ METHODS
126
+ end
127
+
128
+ # Events that support many messages only
129
+ # Reports data flushing operation (production from the buffer)
130
+ %i[
131
+ flushed_sync
132
+ flushed_async
133
+ ].each do |event_scope|
134
+ class_eval <<~METHODS, __FILE__, __LINE__ + 1
135
+ # @param event [Dry::Events::Event]
136
+ def on_buffer_#{event_scope}(event)
137
+ event[:messages].each do |message|
138
+ report_message(message[:topic], :#{event_scope})
139
+ end
140
+ end
141
+ METHODS
142
+ end
143
+
144
+ private
145
+
146
+ # Report that a message has been produced to a topic.
147
+ # @param topic [String] Kafka topic
148
+ # @param method_name [Symbol] method from which this message operation comes
149
+ def report_message(topic, method_name)
150
+ client.increment(
151
+ namespaced_metric(method_name),
152
+ tags: default_tags + ["topic:#{topic}"]
153
+ )
154
+ end
155
+
156
+ # Wraps metric name in listener's namespace
157
+ # @param metric_name [String] RdKafkaMetric name
158
+ # @return [String]
159
+ def namespaced_metric(metric_name)
160
+ "#{namespace}.#{metric_name}"
161
+ end
162
+
163
+ # Reports a given metric statistics to Datadog
164
+ # @param metric [RdKafkaMetric] metric value object
165
+ # @param statistics [Hash] hash with all the statistics emitted
166
+ def report_metric(metric, statistics)
167
+ case metric.scope
168
+ when :root
169
+ client.public_send(
170
+ metric.type,
171
+ namespaced_metric(metric.name),
172
+ statistics.fetch(*metric.key_location),
173
+ tags: default_tags
174
+ )
175
+ when :brokers
176
+ statistics.fetch('brokers').each_value do |broker_statistics|
177
+ # Skip bootstrap nodes
178
+ # Bootstrap nodes have nodeid -1, other nodes have positive
179
+ # node ids
180
+ next if broker_statistics['nodeid'] == -1
181
+
182
+ client.public_send(
183
+ metric.type,
184
+ namespaced_metric(metric.name),
185
+ broker_statistics.dig(*metric.key_location),
186
+ tags: default_tags + ["broker:#{broker_statistics['nodename']}"]
187
+ )
188
+ end
189
+ else
190
+ raise ArgumentError, metric.scope
191
+ end
192
+ end
193
+ end
194
+ end
195
+ end
196
+ end
197
+ end
@@ -2,7 +2,6 @@
2
2
 
3
3
  module WaterDrop
4
4
  # Namespace for all the things related with WaterDrop instrumentation process
5
- # @note We do not
6
5
  module Instrumentation
7
6
  class << self
8
7
  # Builds a manager for statistics callbacks
@@ -19,7 +19,7 @@ module WaterDrop
19
19
 
20
20
  @monitor.instrument(
21
21
  'message.produced_async',
22
- producer: self,
22
+ producer_id: id,
23
23
  message: message
24
24
  ) { client.produce(**message) }
25
25
  end
@@ -40,7 +40,7 @@ module WaterDrop
40
40
 
41
41
  @monitor.instrument(
42
42
  'messages.produced_async',
43
- producer: self,
43
+ producer_id: id,
44
44
  messages: messages
45
45
  ) do
46
46
  messages.map { |message| client.produce(**message) }
@@ -23,8 +23,9 @@ module WaterDrop
23
23
 
24
24
  @monitor.instrument(
25
25
  'message.buffered',
26
- producer: self,
27
- message: message
26
+ producer_id: id,
27
+ message: message,
28
+ buffer: @messages
28
29
  ) { @messages << message }
29
30
  end
30
31
 
@@ -40,8 +41,9 @@ module WaterDrop
40
41
 
41
42
  @monitor.instrument(
42
43
  'messages.buffered',
43
- producer: self,
44
- messages: messages
44
+ producer_id: id,
45
+ messages: messages,
46
+ buffer: @messages
45
47
  ) do
46
48
  messages.each { |message| @messages << message }
47
49
  messages
@@ -56,7 +58,7 @@ module WaterDrop
56
58
 
57
59
  @monitor.instrument(
58
60
  'buffer.flushed_async',
59
- producer: self,
61
+ producer_id: id,
60
62
  messages: @messages
61
63
  ) { flush(false) }
62
64
  end
@@ -69,7 +71,7 @@ module WaterDrop
69
71
 
70
72
  @monitor.instrument(
71
73
  'buffer.flushed_sync',
72
- producer: self,
74
+ producer_id: id,
73
75
  messages: @messages
74
76
  ) { flush(true) }
75
77
  end
@@ -103,8 +105,13 @@ module WaterDrop
103
105
  )
104
106
  end
105
107
  rescue *RESCUED_ERRORS => e
106
- key = sync ? 'buffer.flushed_sync.error' : 'buffer.flush_async.error'
107
- @monitor.instrument(key, producer: self, error: e, dispatched: dispatched)
108
+ @monitor.instrument(
109
+ 'error.occurred',
110
+ error: e,
111
+ producer_id: id,
112
+ dispatched: dispatched,
113
+ type: sync ? 'buffer.flushed_sync.error' : 'buffer.flush_async.error'
114
+ )
108
115
 
109
116
  raise Errors::FlushFailureError.new(dispatched)
110
117
  end
File without changes
File without changes
@@ -21,7 +21,7 @@ module WaterDrop
21
21
 
22
22
  @monitor.instrument(
23
23
  'message.produced_sync',
24
- producer: self,
24
+ producer_id: id,
25
25
  message: message
26
26
  ) do
27
27
  client
@@ -49,7 +49,7 @@ module WaterDrop
49
49
  ensure_active!
50
50
  messages.each { |message| validate_message!(message) }
51
51
 
52
- @monitor.instrument('messages.produced_sync', producer: self, messages: messages) do
52
+ @monitor.instrument('messages.produced_sync', producer_id: id, messages: messages) do
53
53
  messages
54
54
  .map { |message| client.produce(**message) }
55
55
  .map! do |handler|
@@ -106,7 +106,7 @@ module WaterDrop
106
106
 
107
107
  @monitor.instrument(
108
108
  'producer.closed',
109
- producer: self
109
+ producer_id: id
110
110
  ) do
111
111
  @status.closing!
112
112
 
@@ -150,13 +150,7 @@ module WaterDrop
150
150
  # @param message [Hash] message we want to send
151
151
  # @raise [Karafka::Errors::MessageInvalidError]
152
152
  def validate_message!(message)
153
- result = @contract.call(message)
154
- return if result.success?
155
-
156
- raise Errors::MessageInvalidError, [
157
- result.errors.to_h,
158
- message
159
- ]
153
+ @contract.validate!(message, Errors::MessageInvalidError)
160
154
  end
161
155
  end
162
156
  end
@@ -3,5 +3,5 @@
3
3
  # WaterDrop library
4
4
  module WaterDrop
5
5
  # Current WaterDrop version
6
- VERSION = '2.0.5'
6
+ VERSION = '2.2.0'
7
7
  end
data/lib/waterdrop.rb CHANGED
@@ -1,4 +1,37 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # This file is used as a compatibility step
4
- require 'water_drop'
3
+ # External components
4
+ # delegate should be removed because we don't need it, we just add it because of ruby-kafka
5
+ %w[
6
+ concurrent/array
7
+ dry-configurable
8
+ dry/monitor/notifications
9
+ dry-validation
10
+ rdkafka
11
+ json
12
+ zeitwerk
13
+ securerandom
14
+ ].each { |lib| require lib }
15
+
16
+ # WaterDrop library
17
+ module WaterDrop
18
+ class << self
19
+ # @return [String] root path of this gem
20
+ def gem_root
21
+ Pathname.new(File.expand_path('..', __dir__))
22
+ end
23
+ end
24
+ end
25
+
26
+ loader = Zeitwerk::Loader.for_gem
27
+ loader.inflector.inflect('waterdrop' => 'WaterDrop')
28
+ # Do not load vendors instrumentation components. Those need to be required manually if needed
29
+ loader.ignore("#{__dir__}/waterdrop/instrumentation/vendors/**/*.rb")
30
+ loader.setup
31
+ loader.eager_load
32
+
33
+ # Rdkafka uses a single global callback for things. We bypass that by injecting a manager for
34
+ # each callback type. Callback manager allows us to register more than one callback
35
+ # @note Those managers are also used by Karafka for consumer related statistics
36
+ Rdkafka::Config.statistics_callback = WaterDrop::Instrumentation.statistics_callbacks
37
+ Rdkafka::Config.error_callback = WaterDrop::Instrumentation.error_callbacks
data/waterdrop.gemspec CHANGED
@@ -3,7 +3,7 @@
3
3
  lib = File.expand_path('lib', __dir__)
4
4
  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
5
 
6
- require 'water_drop/version'
6
+ require 'waterdrop/version'
7
7
 
8
8
  Gem::Specification.new do |spec|
9
9
  spec.name = 'waterdrop'
@@ -11,7 +11,7 @@ Gem::Specification.new do |spec|
11
11
  spec.platform = Gem::Platform::RUBY
12
12
  spec.authors = ['Maciej Mensfeld']
13
13
  spec.email = %w[maciej@mensfeld.pl]
14
- spec.homepage = 'https://github.com/karafka/waterdrop'
14
+ spec.homepage = 'https://karafka.io'
15
15
  spec.summary = 'Kafka messaging made easy!'
16
16
  spec.description = spec.summary
17
17
  spec.license = 'MIT'
@@ -23,7 +23,7 @@ Gem::Specification.new do |spec|
23
23
  spec.add_dependency 'rdkafka', '>= 0.10'
24
24
  spec.add_dependency 'zeitwerk', '~> 2.3'
25
25
 
26
- spec.required_ruby_version = '>= 2.6.0'
26
+ spec.required_ruby_version = '>= 2.7'
27
27
 
28
28
  if $PROGRAM_NAME.end_with?('gem')
29
29
  spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
@@ -33,4 +33,9 @@ Gem::Specification.new do |spec|
33
33
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
34
34
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
35
35
  spec.require_paths = %w[lib]
36
+
37
+ spec.metadata = {
38
+ 'source_code_uri' => 'https://github.com/karafka/waterdrop',
39
+ 'rubygems_mfa_required' => 'true'
40
+ }
36
41
  end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: waterdrop
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.5
4
+ version: 2.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -34,7 +34,7 @@ cert_chain:
34
34
  R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
35
35
  pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
36
36
  -----END CERTIFICATE-----
37
- date: 2021-11-28 00:00:00.000000000 Z
37
+ date: 2022-02-18 00:00:00.000000000 Z
38
38
  dependencies:
39
39
  - !ruby/object:Gem::Dependency
40
40
  name: concurrent-ruby
@@ -142,37 +142,41 @@ files:
142
142
  - certs/mensfeld.pem
143
143
  - config/errors.yml
144
144
  - docker-compose.yml
145
- - lib/water_drop.rb
146
- - lib/water_drop/config.rb
147
- - lib/water_drop/contracts.rb
148
- - lib/water_drop/contracts/config.rb
149
- - lib/water_drop/contracts/message.rb
150
- - lib/water_drop/errors.rb
151
- - lib/water_drop/instrumentation.rb
152
- - lib/water_drop/instrumentation/callbacks/delivery.rb
153
- - lib/water_drop/instrumentation/callbacks/error.rb
154
- - lib/water_drop/instrumentation/callbacks/statistics.rb
155
- - lib/water_drop/instrumentation/callbacks/statistics_decorator.rb
156
- - lib/water_drop/instrumentation/callbacks_manager.rb
157
- - lib/water_drop/instrumentation/monitor.rb
158
- - lib/water_drop/instrumentation/stdout_listener.rb
159
- - lib/water_drop/patches/rdkafka/bindings.rb
160
- - lib/water_drop/patches/rdkafka/producer.rb
161
- - lib/water_drop/producer.rb
162
- - lib/water_drop/producer/async.rb
163
- - lib/water_drop/producer/buffer.rb
164
- - lib/water_drop/producer/builder.rb
165
- - lib/water_drop/producer/dummy_client.rb
166
- - lib/water_drop/producer/status.rb
167
- - lib/water_drop/producer/sync.rb
168
- - lib/water_drop/version.rb
169
145
  - lib/waterdrop.rb
146
+ - lib/waterdrop/config.rb
147
+ - lib/waterdrop/contracts.rb
148
+ - lib/waterdrop/contracts/base.rb
149
+ - lib/waterdrop/contracts/config.rb
150
+ - lib/waterdrop/contracts/message.rb
151
+ - lib/waterdrop/errors.rb
152
+ - lib/waterdrop/instrumentation.rb
153
+ - lib/waterdrop/instrumentation/callbacks/delivery.rb
154
+ - lib/waterdrop/instrumentation/callbacks/error.rb
155
+ - lib/waterdrop/instrumentation/callbacks/statistics.rb
156
+ - lib/waterdrop/instrumentation/callbacks/statistics_decorator.rb
157
+ - lib/waterdrop/instrumentation/callbacks_manager.rb
158
+ - lib/waterdrop/instrumentation/monitor.rb
159
+ - lib/waterdrop/instrumentation/stdout_listener.rb
160
+ - lib/waterdrop/instrumentation/vendors/datadog/dashboard.json
161
+ - lib/waterdrop/instrumentation/vendors/datadog/listener.rb
162
+ - lib/waterdrop/patches/rdkafka/bindings.rb
163
+ - lib/waterdrop/patches/rdkafka/producer.rb
164
+ - lib/waterdrop/producer.rb
165
+ - lib/waterdrop/producer/async.rb
166
+ - lib/waterdrop/producer/buffer.rb
167
+ - lib/waterdrop/producer/builder.rb
168
+ - lib/waterdrop/producer/dummy_client.rb
169
+ - lib/waterdrop/producer/status.rb
170
+ - lib/waterdrop/producer/sync.rb
171
+ - lib/waterdrop/version.rb
170
172
  - log/.gitkeep
171
173
  - waterdrop.gemspec
172
- homepage: https://github.com/karafka/waterdrop
174
+ homepage: https://karafka.io
173
175
  licenses:
174
176
  - MIT
175
- metadata: {}
177
+ metadata:
178
+ source_code_uri: https://github.com/karafka/waterdrop
179
+ rubygems_mfa_required: 'true'
176
180
  post_install_message:
177
181
  rdoc_options: []
178
182
  require_paths:
@@ -181,14 +185,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
181
185
  requirements:
182
186
  - - ">="
183
187
  - !ruby/object:Gem::Version
184
- version: 2.6.0
188
+ version: '2.7'
185
189
  required_rubygems_version: !ruby/object:Gem::Requirement
186
190
  requirements:
187
191
  - - ">="
188
192
  - !ruby/object:Gem::Version
189
193
  version: '0'
190
194
  requirements: []
191
- rubygems_version: 3.2.25
195
+ rubygems_version: 3.3.4
192
196
  signing_key:
193
197
  specification_version: 4
194
198
  summary: Kafka messaging made easy!
metadata.gz.sig CHANGED
Binary file
data/lib/water_drop.rb DELETED
@@ -1,36 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # External components
4
- # delegate should be removed because we don't need it, we just add it because of ruby-kafka
5
- %w[
6
- concurrent/array
7
- dry-configurable
8
- dry/monitor/notifications
9
- dry-validation
10
- rdkafka
11
- json
12
- zeitwerk
13
- securerandom
14
- ].each { |lib| require lib }
15
-
16
- # WaterDrop library
17
- module WaterDrop
18
- class << self
19
- # @return [String] root path of this gem
20
- def gem_root
21
- Pathname.new(File.expand_path('..', __dir__))
22
- end
23
- end
24
- end
25
-
26
- Zeitwerk::Loader
27
- .for_gem
28
- .tap { |loader| loader.ignore("#{__dir__}/waterdrop.rb") }
29
- .tap(&:setup)
30
- .tap(&:eager_load)
31
-
32
- # Rdkafka uses a single global callback for things. We bypass that by injecting a manager for
33
- # each callback type. Callback manager allows us to register more than one callback
34
- # @note Those managers are also used by Karafka for consumer related statistics
35
- Rdkafka::Config.statistics_callback = WaterDrop::Instrumentation.statistics_callbacks
36
- Rdkafka::Config.error_callback = WaterDrop::Instrumentation.error_callbacks