waterdrop 1.4.0 → 2.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data.tar.gz.sig +0 -0
- data/.diffend.yml +3 -0
- data/.github/workflows/ci.yml +53 -0
- data/.gitignore +2 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +9 -0
- data/Gemfile +9 -0
- data/Gemfile.lock +51 -33
- data/LICENSE +165 -0
- data/README.md +192 -53
- data/config/errors.yml +3 -16
- data/docker-compose.yml +17 -0
- data/lib/water_drop.rb +4 -24
- data/lib/water_drop/config.rb +41 -142
- data/lib/water_drop/contracts.rb +0 -2
- data/lib/water_drop/contracts/config.rb +8 -121
- data/lib/water_drop/contracts/message.rb +41 -0
- data/lib/water_drop/errors.rb +31 -5
- data/lib/water_drop/instrumentation.rb +7 -0
- data/lib/water_drop/instrumentation/monitor.rb +16 -23
- data/lib/water_drop/instrumentation/stdout_listener.rb +113 -32
- data/lib/water_drop/producer.rb +142 -0
- data/lib/water_drop/producer/async.rb +51 -0
- data/lib/water_drop/producer/buffer.rb +113 -0
- data/lib/water_drop/producer/builder.rb +63 -0
- data/lib/water_drop/producer/dummy_client.rb +32 -0
- data/lib/water_drop/producer/statistics_decorator.rb +71 -0
- data/lib/water_drop/producer/status.rb +52 -0
- data/lib/water_drop/producer/sync.rb +65 -0
- data/lib/water_drop/version.rb +1 -1
- data/waterdrop.gemspec +4 -4
- metadata +25 -24
- metadata.gz.sig +0 -0
- data/.travis.yml +0 -35
- data/MIT-LICENCE +0 -18
- data/lib/water_drop/async_producer.rb +0 -26
- data/lib/water_drop/base_producer.rb +0 -57
- data/lib/water_drop/config_applier.rb +0 -52
- data/lib/water_drop/contracts/message_options.rb +0 -19
- data/lib/water_drop/sync_producer.rb +0 -24
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 616222bb9061a061dbe807b3c490c04fc368b0f457dc70e27b27c5cc049b1271
|
4
|
+
data.tar.gz: f6330179e3db7aea799360ca51a09fa56d9d7bbc757925bf482a901c90e4e76d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e09358bf7a02acca118f62f869f3f53bf417356e9f28f32187c578af5debfd6d602463b13082cfc79a3a12042e2e2efcedb000fd45009802bc186249dee4a6bb
|
7
|
+
data.tar.gz: 81fe2d98e27704662201408c47746f8a800a955143f260afcbc2e4f3c3d44f34b5b27fbfdedb7bfe642e1fd3df966265f2e69585cc59cc3c4941005e17238079
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data.tar.gz.sig
CHANGED
Binary file
|
data/.diffend.yml
ADDED
@@ -0,0 +1,53 @@
|
|
1
|
+
name: ci
|
2
|
+
|
3
|
+
on:
|
4
|
+
push:
|
5
|
+
schedule:
|
6
|
+
- cron: '0 1 * * *'
|
7
|
+
|
8
|
+
jobs:
|
9
|
+
specs:
|
10
|
+
runs-on: ubuntu-latest
|
11
|
+
strategy:
|
12
|
+
fail-fast: false
|
13
|
+
matrix:
|
14
|
+
ruby:
|
15
|
+
- '2.7'
|
16
|
+
- '2.6'
|
17
|
+
- '2.5'
|
18
|
+
- 'jruby'
|
19
|
+
include:
|
20
|
+
- ruby: '2.7'
|
21
|
+
coverage: 'true'
|
22
|
+
steps:
|
23
|
+
- uses: actions/checkout@v2
|
24
|
+
- name: Install package dependencies
|
25
|
+
run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
|
26
|
+
- name: Set up Ruby
|
27
|
+
uses: ruby/setup-ruby@v1
|
28
|
+
with:
|
29
|
+
ruby-version: ${{matrix.ruby}}
|
30
|
+
- name: Install latest bundler
|
31
|
+
run: |
|
32
|
+
gem install bundler --no-document
|
33
|
+
bundle config set without 'tools benchmarks docs'
|
34
|
+
- name: Bundle install
|
35
|
+
run: |
|
36
|
+
bundle config set without development
|
37
|
+
bundle install --jobs 4 --retry 3
|
38
|
+
- name: Run Kafka with docker-compose
|
39
|
+
run: docker-compose up -d
|
40
|
+
- name: Run all tests
|
41
|
+
env:
|
42
|
+
GITHUB_COVERAGE: ${{matrix.coverage}}
|
43
|
+
run: bundle exec rspec
|
44
|
+
coditsu:
|
45
|
+
runs-on: ubuntu-latest
|
46
|
+
strategy:
|
47
|
+
fail-fast: false
|
48
|
+
steps:
|
49
|
+
- uses: actions/checkout@v2
|
50
|
+
with:
|
51
|
+
fetch-depth: 0
|
52
|
+
- name: Run Coditsu
|
53
|
+
run: \curl -sSL https://api.coditsu.io/run/ci | bash
|
data/.gitignore
CHANGED
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
2.7.
|
1
|
+
2.7.2
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,14 @@
|
|
1
1
|
# WaterDrop changelog
|
2
2
|
|
3
|
+
## 2.0.0 (2020-12-13)
|
4
|
+
- Redesign of the whole API (see `README.md` for the use-cases and the current API)
|
5
|
+
- Replace `ruby-kafka` with `rdkafka`
|
6
|
+
- Switch license from `MIT` to `LGPL-3.0`
|
7
|
+
- #113 - Add some basic validations of the kafka scope of the config (Azdaroth)
|
8
|
+
- Global state removed
|
9
|
+
- Redesigned metrics that use `rdkafka` internal data + custom diffing
|
10
|
+
- Restore JRuby support
|
11
|
+
|
3
12
|
## 1.4.0 (2020-08-25)
|
4
13
|
- Release to match Karafka 1.4 versioning.
|
5
14
|
|
data/Gemfile
CHANGED
data/Gemfile.lock
CHANGED
@@ -1,24 +1,26 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
waterdrop (
|
5
|
-
|
4
|
+
waterdrop (2.0.0)
|
5
|
+
concurrent-ruby (>= 1.1)
|
6
6
|
dry-configurable (~> 0.8)
|
7
7
|
dry-monitor (~> 0.3)
|
8
|
-
dry-validation (~> 1.
|
9
|
-
|
8
|
+
dry-validation (~> 1.3)
|
9
|
+
rdkafka (>= 0.6.0)
|
10
10
|
zeitwerk (~> 2.1)
|
11
11
|
|
12
12
|
GEM
|
13
13
|
remote: https://rubygems.org/
|
14
14
|
specs:
|
15
|
+
activesupport (6.1.0)
|
16
|
+
concurrent-ruby (~> 1.0, >= 1.0.2)
|
17
|
+
i18n (>= 1.6, < 2)
|
18
|
+
minitest (>= 5.1)
|
19
|
+
tzinfo (~> 2.0)
|
20
|
+
zeitwerk (~> 2.3)
|
21
|
+
byebug (11.1.3)
|
15
22
|
concurrent-ruby (1.1.7)
|
16
|
-
delivery_boy (1.0.1)
|
17
|
-
king_konf (~> 0.3)
|
18
|
-
ruby-kafka (~> 1.0)
|
19
23
|
diff-lcs (1.4.4)
|
20
|
-
digest-crc (0.6.1)
|
21
|
-
rake (~> 13.0)
|
22
24
|
docile (1.3.2)
|
23
25
|
dry-configurable (0.11.6)
|
24
26
|
concurrent-ruby (~> 1.0)
|
@@ -27,7 +29,7 @@ GEM
|
|
27
29
|
dry-container (0.7.2)
|
28
30
|
concurrent-ruby (~> 1.0)
|
29
31
|
dry-configurable (~> 0.1, >= 0.1.3)
|
30
|
-
dry-core (0.
|
32
|
+
dry-core (0.5.0)
|
31
33
|
concurrent-ruby (~> 1.0)
|
32
34
|
dry-equalizer (0.3.0)
|
33
35
|
dry-events (0.2.0)
|
@@ -35,8 +37,8 @@ GEM
|
|
35
37
|
dry-core (~> 0.4)
|
36
38
|
dry-equalizer (~> 0.2)
|
37
39
|
dry-inflector (0.2.0)
|
38
|
-
dry-initializer (3.0.
|
39
|
-
dry-logic (1.0.
|
40
|
+
dry-initializer (3.0.4)
|
41
|
+
dry-logic (1.0.8)
|
40
42
|
concurrent-ruby (~> 1.0)
|
41
43
|
dry-core (~> 0.2)
|
42
44
|
dry-equalizer (~> 0.2)
|
@@ -45,7 +47,7 @@ GEM
|
|
45
47
|
dry-core (~> 0.4)
|
46
48
|
dry-equalizer (~> 0.2)
|
47
49
|
dry-events (~> 0.2)
|
48
|
-
dry-schema (1.5.
|
50
|
+
dry-schema (1.5.6)
|
49
51
|
concurrent-ruby (~> 1.0)
|
50
52
|
dry-configurable (~> 0.8, >= 0.8.3)
|
51
53
|
dry-core (~> 0.4)
|
@@ -60,43 +62,59 @@ GEM
|
|
60
62
|
dry-equalizer (~> 0.3)
|
61
63
|
dry-inflector (~> 0.1, >= 0.1.2)
|
62
64
|
dry-logic (~> 1.0, >= 1.0.2)
|
63
|
-
dry-validation (1.
|
65
|
+
dry-validation (1.6.0)
|
64
66
|
concurrent-ruby (~> 1.0)
|
65
67
|
dry-container (~> 0.7, >= 0.7.1)
|
66
68
|
dry-core (~> 0.4)
|
67
69
|
dry-equalizer (~> 0.2)
|
68
70
|
dry-initializer (~> 3.0)
|
69
|
-
dry-schema (~> 1.5)
|
70
|
-
|
71
|
+
dry-schema (~> 1.5, >= 1.5.2)
|
72
|
+
factory_bot (6.1.0)
|
73
|
+
activesupport (>= 5.0.0)
|
74
|
+
ffi (1.13.1)
|
75
|
+
i18n (1.8.5)
|
76
|
+
concurrent-ruby (~> 1.0)
|
77
|
+
mini_portile2 (2.5.0)
|
78
|
+
minitest (5.14.2)
|
71
79
|
rake (13.0.1)
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
rspec
|
77
|
-
rspec-
|
78
|
-
|
80
|
+
rdkafka (0.8.1)
|
81
|
+
ffi (~> 1.9)
|
82
|
+
mini_portile2 (~> 2.1)
|
83
|
+
rake (>= 12.3)
|
84
|
+
rspec (3.10.0)
|
85
|
+
rspec-core (~> 3.10.0)
|
86
|
+
rspec-expectations (~> 3.10.0)
|
87
|
+
rspec-mocks (~> 3.10.0)
|
88
|
+
rspec-core (3.10.0)
|
89
|
+
rspec-support (~> 3.10.0)
|
90
|
+
rspec-expectations (3.10.0)
|
79
91
|
diff-lcs (>= 1.2.0, < 2.0)
|
80
|
-
rspec-support (~> 3.
|
81
|
-
rspec-mocks (3.
|
92
|
+
rspec-support (~> 3.10.0)
|
93
|
+
rspec-mocks (3.10.0)
|
82
94
|
diff-lcs (>= 1.2.0, < 2.0)
|
83
|
-
rspec-support (~> 3.
|
84
|
-
rspec-support (3.
|
85
|
-
|
86
|
-
digest-crc
|
87
|
-
simplecov (0.19.0)
|
95
|
+
rspec-support (~> 3.10.0)
|
96
|
+
rspec-support (3.10.0)
|
97
|
+
simplecov (0.20.0)
|
88
98
|
docile (~> 1.1)
|
89
99
|
simplecov-html (~> 0.11)
|
90
|
-
|
91
|
-
|
100
|
+
simplecov_json_formatter (~> 0.1)
|
101
|
+
simplecov-html (0.12.3)
|
102
|
+
simplecov_json_formatter (0.1.2)
|
103
|
+
tzinfo (2.0.3)
|
104
|
+
concurrent-ruby (~> 1.0)
|
105
|
+
zeitwerk (2.4.2)
|
92
106
|
|
93
107
|
PLATFORMS
|
94
108
|
ruby
|
109
|
+
x86_64-linux
|
95
110
|
|
96
111
|
DEPENDENCIES
|
112
|
+
byebug
|
113
|
+
factory_bot
|
114
|
+
rdkafka
|
97
115
|
rspec
|
98
116
|
simplecov
|
99
117
|
waterdrop!
|
100
118
|
|
101
119
|
BUNDLED WITH
|
102
|
-
2.
|
120
|
+
2.2.0
|
data/LICENSE
ADDED
@@ -0,0 +1,165 @@
|
|
1
|
+
GNU LESSER GENERAL PUBLIC LICENSE
|
2
|
+
Version 3, 29 June 2007
|
3
|
+
|
4
|
+
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
5
|
+
Everyone is permitted to copy and distribute verbatim copies
|
6
|
+
of this license document, but changing it is not allowed.
|
7
|
+
|
8
|
+
|
9
|
+
This version of the GNU Lesser General Public License incorporates
|
10
|
+
the terms and conditions of version 3 of the GNU General Public
|
11
|
+
License, supplemented by the additional permissions listed below.
|
12
|
+
|
13
|
+
0. Additional Definitions.
|
14
|
+
|
15
|
+
As used herein, "this License" refers to version 3 of the GNU Lesser
|
16
|
+
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
17
|
+
General Public License.
|
18
|
+
|
19
|
+
"The Library" refers to a covered work governed by this License,
|
20
|
+
other than an Application or a Combined Work as defined below.
|
21
|
+
|
22
|
+
An "Application" is any work that makes use of an interface provided
|
23
|
+
by the Library, but which is not otherwise based on the Library.
|
24
|
+
Defining a subclass of a class defined by the Library is deemed a mode
|
25
|
+
of using an interface provided by the Library.
|
26
|
+
|
27
|
+
A "Combined Work" is a work produced by combining or linking an
|
28
|
+
Application with the Library. The particular version of the Library
|
29
|
+
with which the Combined Work was made is also called the "Linked
|
30
|
+
Version".
|
31
|
+
|
32
|
+
The "Minimal Corresponding Source" for a Combined Work means the
|
33
|
+
Corresponding Source for the Combined Work, excluding any source code
|
34
|
+
for portions of the Combined Work that, considered in isolation, are
|
35
|
+
based on the Application, and not on the Linked Version.
|
36
|
+
|
37
|
+
The "Corresponding Application Code" for a Combined Work means the
|
38
|
+
object code and/or source code for the Application, including any data
|
39
|
+
and utility programs needed for reproducing the Combined Work from the
|
40
|
+
Application, but excluding the System Libraries of the Combined Work.
|
41
|
+
|
42
|
+
1. Exception to Section 3 of the GNU GPL.
|
43
|
+
|
44
|
+
You may convey a covered work under sections 3 and 4 of this License
|
45
|
+
without being bound by section 3 of the GNU GPL.
|
46
|
+
|
47
|
+
2. Conveying Modified Versions.
|
48
|
+
|
49
|
+
If you modify a copy of the Library, and, in your modifications, a
|
50
|
+
facility refers to a function or data to be supplied by an Application
|
51
|
+
that uses the facility (other than as an argument passed when the
|
52
|
+
facility is invoked), then you may convey a copy of the modified
|
53
|
+
version:
|
54
|
+
|
55
|
+
a) under this License, provided that you make a good faith effort to
|
56
|
+
ensure that, in the event an Application does not supply the
|
57
|
+
function or data, the facility still operates, and performs
|
58
|
+
whatever part of its purpose remains meaningful, or
|
59
|
+
|
60
|
+
b) under the GNU GPL, with none of the additional permissions of
|
61
|
+
this License applicable to that copy.
|
62
|
+
|
63
|
+
3. Object Code Incorporating Material from Library Header Files.
|
64
|
+
|
65
|
+
The object code form of an Application may incorporate material from
|
66
|
+
a header file that is part of the Library. You may convey such object
|
67
|
+
code under terms of your choice, provided that, if the incorporated
|
68
|
+
material is not limited to numerical parameters, data structure
|
69
|
+
layouts and accessors, or small macros, inline functions and templates
|
70
|
+
(ten or fewer lines in length), you do both of the following:
|
71
|
+
|
72
|
+
a) Give prominent notice with each copy of the object code that the
|
73
|
+
Library is used in it and that the Library and its use are
|
74
|
+
covered by this License.
|
75
|
+
|
76
|
+
b) Accompany the object code with a copy of the GNU GPL and this license
|
77
|
+
document.
|
78
|
+
|
79
|
+
4. Combined Works.
|
80
|
+
|
81
|
+
You may convey a Combined Work under terms of your choice that,
|
82
|
+
taken together, effectively do not restrict modification of the
|
83
|
+
portions of the Library contained in the Combined Work and reverse
|
84
|
+
engineering for debugging such modifications, if you also do each of
|
85
|
+
the following:
|
86
|
+
|
87
|
+
a) Give prominent notice with each copy of the Combined Work that
|
88
|
+
the Library is used in it and that the Library and its use are
|
89
|
+
covered by this License.
|
90
|
+
|
91
|
+
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
92
|
+
document.
|
93
|
+
|
94
|
+
c) For a Combined Work that displays copyright notices during
|
95
|
+
execution, include the copyright notice for the Library among
|
96
|
+
these notices, as well as a reference directing the user to the
|
97
|
+
copies of the GNU GPL and this license document.
|
98
|
+
|
99
|
+
d) Do one of the following:
|
100
|
+
|
101
|
+
0) Convey the Minimal Corresponding Source under the terms of this
|
102
|
+
License, and the Corresponding Application Code in a form
|
103
|
+
suitable for, and under terms that permit, the user to
|
104
|
+
recombine or relink the Application with a modified version of
|
105
|
+
the Linked Version to produce a modified Combined Work, in the
|
106
|
+
manner specified by section 6 of the GNU GPL for conveying
|
107
|
+
Corresponding Source.
|
108
|
+
|
109
|
+
1) Use a suitable shared library mechanism for linking with the
|
110
|
+
Library. A suitable mechanism is one that (a) uses at run time
|
111
|
+
a copy of the Library already present on the user's computer
|
112
|
+
system, and (b) will operate properly with a modified version
|
113
|
+
of the Library that is interface-compatible with the Linked
|
114
|
+
Version.
|
115
|
+
|
116
|
+
e) Provide Installation Information, but only if you would otherwise
|
117
|
+
be required to provide such information under section 6 of the
|
118
|
+
GNU GPL, and only to the extent that such information is
|
119
|
+
necessary to install and execute a modified version of the
|
120
|
+
Combined Work produced by recombining or relinking the
|
121
|
+
Application with a modified version of the Linked Version. (If
|
122
|
+
you use option 4d0, the Installation Information must accompany
|
123
|
+
the Minimal Corresponding Source and Corresponding Application
|
124
|
+
Code. If you use option 4d1, you must provide the Installation
|
125
|
+
Information in the manner specified by section 6 of the GNU GPL
|
126
|
+
for conveying Corresponding Source.)
|
127
|
+
|
128
|
+
5. Combined Libraries.
|
129
|
+
|
130
|
+
You may place library facilities that are a work based on the
|
131
|
+
Library side by side in a single library together with other library
|
132
|
+
facilities that are not Applications and are not covered by this
|
133
|
+
License, and convey such a combined library under terms of your
|
134
|
+
choice, if you do both of the following:
|
135
|
+
|
136
|
+
a) Accompany the combined library with a copy of the same work based
|
137
|
+
on the Library, uncombined with any other library facilities,
|
138
|
+
conveyed under the terms of this License.
|
139
|
+
|
140
|
+
b) Give prominent notice with the combined library that part of it
|
141
|
+
is a work based on the Library, and explaining where to find the
|
142
|
+
accompanying uncombined form of the same work.
|
143
|
+
|
144
|
+
6. Revised Versions of the GNU Lesser General Public License.
|
145
|
+
|
146
|
+
The Free Software Foundation may publish revised and/or new versions
|
147
|
+
of the GNU Lesser General Public License from time to time. Such new
|
148
|
+
versions will be similar in spirit to the present version, but may
|
149
|
+
differ in detail to address new problems or concerns.
|
150
|
+
|
151
|
+
Each version is given a distinguishing version number. If the
|
152
|
+
Library as you received it specifies that a certain numbered version
|
153
|
+
of the GNU Lesser General Public License "or any later version"
|
154
|
+
applies to it, you have the option of following the terms and
|
155
|
+
conditions either of that published version or of any later version
|
156
|
+
published by the Free Software Foundation. If the Library as you
|
157
|
+
received it does not specify a version number of the GNU Lesser
|
158
|
+
General Public License, you may choose any version of the GNU Lesser
|
159
|
+
General Public License ever published by the Free Software Foundation.
|
160
|
+
|
161
|
+
If the Library as you received it specifies that a proxy can decide
|
162
|
+
whether future versions of the GNU Lesser General Public License shall
|
163
|
+
apply, that proxy's public statement of acceptance of any version is
|
164
|
+
permanent authorization for you to choose that version for the
|
165
|
+
Library.
|
data/README.md
CHANGED
@@ -1,17 +1,25 @@
|
|
1
1
|
# WaterDrop
|
2
2
|
|
3
|
-
|
3
|
+
**Note**: Documentation presented here refers to WaterDrop `2.0.0.pre1`.
|
4
|
+
|
5
|
+
WaterDrop `2.0` does **not** work with Karafka `1.*` and aims to either work as a standalone producer outside of Karafka `1.*` ecosystem or as a part of not yet released Karafka `2.0.*`.
|
6
|
+
|
7
|
+
Please refer to [this](https://github.com/karafka/waterdrop/tree/1.4) branch and it's documentation for details about WaterDrop `1.*` usage.
|
8
|
+
|
9
|
+
[![Build Status](https://github.com/karafka/waterdrop/workflows/ci/badge.svg)](https://github.com/karafka/waterdrop/actions?query=workflow%3Aci)
|
4
10
|
[![Join the chat at https://gitter.im/karafka/karafka](https://badges.gitter.im/karafka/karafka.svg)](https://gitter.im/karafka/karafka?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
5
11
|
|
6
12
|
Gem used to send messages to Kafka in an easy way with an extra validation layer. It is a part of the [Karafka](https://github.com/karafka/karafka) ecosystem.
|
7
13
|
|
8
|
-
|
14
|
+
It:
|
9
15
|
|
10
|
-
|
11
|
-
|
12
|
-
-
|
13
|
-
- Supports
|
14
|
-
-
|
16
|
+
- Is thread safe
|
17
|
+
- Supports sync producing
|
18
|
+
- Supports async producing
|
19
|
+
- Supports buffering
|
20
|
+
- Supports producing messages to multiple clusters
|
21
|
+
- Supports multiple delivery policies
|
22
|
+
- Works with Kafka 1.0+ and Ruby 2.5+
|
15
23
|
|
16
24
|
## Installation
|
17
25
|
|
@@ -36,82 +44,213 @@ bundle install
|
|
36
44
|
WaterDrop is a complex tool, that contains multiple configuration options. To keep everything organized, all the configuration options were divided into two groups:
|
37
45
|
|
38
46
|
- WaterDrop options - options directly related to Karafka framework and it's components
|
39
|
-
-
|
47
|
+
- Kafka driver options - options related to `Kafka`
|
40
48
|
|
41
|
-
To apply all those configuration options, you need to use the ```#setup``` method:
|
49
|
+
To apply all those configuration options, you need to create a producer instance and use the ```#setup``` method:
|
42
50
|
|
43
51
|
```ruby
|
44
|
-
WaterDrop.
|
52
|
+
producer = WaterDrop::Producer.new
|
53
|
+
|
54
|
+
producer.setup do |config|
|
45
55
|
config.deliver = true
|
46
|
-
config.kafka
|
56
|
+
config.kafka = {
|
57
|
+
'bootstrap.servers': 'localhost:9092',
|
58
|
+
'request.required.acks': 1
|
59
|
+
}
|
60
|
+
end
|
61
|
+
```
|
62
|
+
|
63
|
+
or you can do the same while initializing the producer:
|
64
|
+
|
65
|
+
```ruby
|
66
|
+
producer = WaterDrop::Producer.new do |config|
|
67
|
+
config.deliver = true
|
68
|
+
config.kafka = {
|
69
|
+
'bootstrap.servers': 'localhost:9092',
|
70
|
+
'request.required.acks': 1
|
71
|
+
}
|
47
72
|
end
|
48
73
|
```
|
49
74
|
|
50
75
|
### WaterDrop configuration options
|
51
76
|
|
52
|
-
| Option
|
53
|
-
|
54
|
-
|
|
55
|
-
| logger
|
56
|
-
| deliver
|
77
|
+
| Option | Description |
|
78
|
+
|--------------------|-----------------------------------------------------------------|
|
79
|
+
| `id` | id of the producer for instrumentation and logging |
|
80
|
+
| `logger` | Logger that we want to use |
|
81
|
+
| `deliver` | Should we send messages to Kafka or just fake the delivery |
|
82
|
+
| `max_wait_timeout` | Waits that long for the delivery report or raises an error |
|
83
|
+
| `wait_timeout` | Waits that long before re-check of delivery report availability |
|
57
84
|
|
58
|
-
###
|
85
|
+
### Kafka configuration options
|
59
86
|
|
60
|
-
|
87
|
+
You can create producers with different `kafka` settings. Documentation of the available configuration options is available on https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md.
|
88
|
+
|
89
|
+
## Usage
|
61
90
|
|
62
|
-
|
91
|
+
Please refer to the [documentation](https://www.rubydoc.info/github/karafka/waterdrop) in case you're interested in the more advanced API.
|
63
92
|
|
64
|
-
|
65
|
-
|--------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------|
|
66
|
-
| raise_on_buffer_overflow | Should we raise an exception, when messages can't be sent in an async way due to the message buffer overflow or should we just drop them |
|
67
|
-
| delivery_interval | The number of seconds between background message deliveries. Disable timer-based background deliveries by setting this to 0. |
|
68
|
-
| delivery_threshold | The number of buffered messages that will trigger a background message delivery. Disable buffer size based background deliveries by setting this to 0.|
|
69
|
-
| required_acks | The number of Kafka replicas that must acknowledge messages before they're considered as successfully written. |
|
70
|
-
| ack_timeout | A timeout executed by a broker when the client is sending messages to it. |
|
71
|
-
| max_retries | The number of retries when attempting to deliver messages. |
|
72
|
-
| retry_backoff | The number of seconds to wait after a failed attempt to send messages to a Kafka broker before retrying. |
|
73
|
-
| max_buffer_bytesize | The maximum number of bytes allowed in the buffer before new messages are rejected. |
|
74
|
-
| max_buffer_size | The maximum number of messages allowed in the buffer before new messages are rejected. |
|
75
|
-
| max_queue_size | The maximum number of messages allowed in the queue before new messages are rejected. |
|
76
|
-
| sasl_plain_username | The username used to authenticate. |
|
77
|
-
| sasl_plain_password | The password used to authenticate. |
|
93
|
+
### Basic usage
|
78
94
|
|
79
|
-
|
95
|
+
To send Kafka messages, just create a producer and use it:
|
80
96
|
|
81
97
|
```ruby
|
82
|
-
WaterDrop.
|
83
|
-
|
84
|
-
|
98
|
+
producer = WaterDrop::Producer.new
|
99
|
+
|
100
|
+
producer.setup do |config|
|
101
|
+
config.kafka = { 'bootstrap.servers': 'localhost:9092' }
|
85
102
|
end
|
103
|
+
|
104
|
+
producer.produce_sync(topic: 'my-topic', payload: 'my message')
|
105
|
+
|
106
|
+
# or for async
|
107
|
+
producer.produce_async(topic: 'my-topic', payload: 'my message')
|
108
|
+
|
109
|
+
# or in batches
|
110
|
+
producer.produce_many_sync(
|
111
|
+
[
|
112
|
+
{ topic: 'my-topic', payload: 'my message'},
|
113
|
+
{ topic: 'my-topic', payload: 'my message'}
|
114
|
+
]
|
115
|
+
)
|
116
|
+
|
117
|
+
# both sync and async
|
118
|
+
producer.produce_many_async(
|
119
|
+
[
|
120
|
+
{ topic: 'my-topic', payload: 'my message'},
|
121
|
+
{ topic: 'my-topic', payload: 'my message'}
|
122
|
+
]
|
123
|
+
)
|
124
|
+
|
125
|
+
# Don't forget to close the producer once you're done to flush the internal buffers, etc
|
126
|
+
producer.close
|
86
127
|
```
|
87
128
|
|
88
|
-
|
129
|
+
Each message that you want to publish, will have its value checked.
|
130
|
+
|
131
|
+
Here are all the things you can provide in the message hash:
|
132
|
+
|
133
|
+
| Option | Required | Value type | Description |
|
134
|
+
|-------------|----------|---------------|-------------------------------------------------------|
|
135
|
+
| `topic` | true | String | The Kafka topic that should be written to |
|
136
|
+
| `payload` | true | String | Data you want to send to Kafka |
|
137
|
+
| `key` | false | String | The key that should be set in the Kafka message |
|
138
|
+
| `partition` | false | Integer | A specific partition number that should be written to |
|
139
|
+
| `timestamp` | false | Time, Integer | The timestamp that should be set on the message |
|
140
|
+
| `headers` | false | Hash | Headers for the message |
|
141
|
+
|
142
|
+
Keep in mind, that message you want to send should be either binary or stringified (to_s, to_json, etc).
|
89
143
|
|
90
|
-
|
144
|
+
### Buffering
|
145
|
+
|
146
|
+
WaterDrop producers support buffering of messages, which means that you can easily implement periodic flushing for long running processes as well as buffer several messages to be flushed the same moment:
|
91
147
|
|
92
148
|
```ruby
|
93
|
-
WaterDrop::
|
94
|
-
|
95
|
-
|
149
|
+
producer = WaterDrop::Producer.new
|
150
|
+
|
151
|
+
producer.setup do |config|
|
152
|
+
config.kafka = { 'bootstrap.servers': 'localhost:9092' }
|
153
|
+
end
|
154
|
+
|
155
|
+
time = Time.now - 10
|
156
|
+
|
157
|
+
while time < Time.now
|
158
|
+
time += 1
|
159
|
+
producer.buffer(topic: 'times', payload: Time.now.to_s)
|
160
|
+
end
|
161
|
+
|
162
|
+
puts "The messages buffer size #{producer.messages.size}"
|
163
|
+
producer.flush_sync
|
164
|
+
puts "The messages buffer size #{producer.message.size}"
|
165
|
+
|
166
|
+
producer.close
|
96
167
|
```
|
97
168
|
|
98
|
-
|
169
|
+
## Instrumentation
|
99
170
|
|
100
|
-
|
101
|
-
|-------------------- |----------|------------|---------------------------------------------------------------------|
|
102
|
-
| ```topic``` | true | String | The Kafka topic that should be written to |
|
103
|
-
| ```key``` | false | String | The key that should be set in the Kafka message |
|
104
|
-
| ```partition``` | false | Integer | A specific partition number that should be written to |
|
105
|
-
| ```partition_key``` | false | String | A string that can be used to deterministically select the partition |
|
106
|
-
| ```create_time``` | false | Time | The timestamp that should be set on the message |
|
107
|
-
| ```headers``` | false | Hash | Headers for the message |
|
171
|
+
Each of the producers after the `#setup` is done, has a custom monitor to which you can subscribe.
|
108
172
|
|
109
|
-
|
173
|
+
```ruby
|
174
|
+
producer = WaterDrop::Producer.new
|
175
|
+
|
176
|
+
producer.setup do |config|
|
177
|
+
config.kafka = { 'bootstrap.servers': 'localhost:9092' }
|
178
|
+
end
|
179
|
+
|
180
|
+
producer.monitor.subscribe('message.produced_async') do |event|
|
181
|
+
puts "A message was produced to '#{event[:message][:topic]}' topic!"
|
182
|
+
end
|
183
|
+
|
184
|
+
producer.produce_async(topic: 'events', payload: 'data')
|
185
|
+
|
186
|
+
producer.close
|
187
|
+
```
|
188
|
+
|
189
|
+
See the `WaterDrop::Instrumentation::Monitor::EVENTS` for the list of all the supported events.
|
190
|
+
|
191
|
+
### Usage statistics
|
192
|
+
|
193
|
+
WaterDrop may be configured to emit internal metrics at a fixed interval by setting the `kafka` `statistics.interval.ms` configuration property to a value > `0`. Once that is done, emitted statistics are available after subscribing to the `statistics.emitted` publisher event.
|
194
|
+
|
195
|
+
The statistics include all of the metrics from `librdkafka` (full list [here](https://github.com/edenhill/librdkafka/blob/master/STATISTICS.md)) as well as the diff of those against the previously emitted values.
|
196
|
+
|
197
|
+
For several attributes like `txmsgs`, `librdkafka` publishes only the totals. In order to make it easier to track the progress (for example number of messages sent between statistics emitted events), WaterDrop diffs all the numeric values against previously available numbers. All of those metrics are available under the same key as the metric but with additional `_d` postfix:
|
198
|
+
|
199
|
+
|
200
|
+
```ruby
|
201
|
+
producer = WaterDrop::Producer.new do |config|
|
202
|
+
config.kafka = {
|
203
|
+
'bootstrap.servers': 'localhost:9092',
|
204
|
+
'statistics.interval.ms': 2_000 # emit statistics every 2 seconds
|
205
|
+
}
|
206
|
+
end
|
207
|
+
|
208
|
+
producer.monitor.subscribe('statistics.emitted') do |event|
|
209
|
+
sum = event[:statistics]['txmsgs']
|
210
|
+
diff = event[:statistics]['txmsgs_d']
|
211
|
+
|
212
|
+
p "Sent messages: #{sum}"
|
213
|
+
p "Messages sent from last statistics report: #{diff}"
|
214
|
+
end
|
215
|
+
|
216
|
+
sleep(2)
|
217
|
+
|
218
|
+
# Sent messages: 0
|
219
|
+
# Messages sent from last statistics report: 0
|
220
|
+
|
221
|
+
20.times { producer.produce_async(topic: 'events', payload: 'data') }
|
222
|
+
|
223
|
+
# Sent messages: 20
|
224
|
+
# Messages sent from last statistics report: 20
|
225
|
+
|
226
|
+
sleep(2)
|
227
|
+
|
228
|
+
20.times { producer.produce_async(topic: 'events', payload: 'data') }
|
229
|
+
|
230
|
+
# Sent messages: 40
|
231
|
+
# Messages sent from last statistics report: 20
|
232
|
+
|
233
|
+
sleep(2)
|
234
|
+
|
235
|
+
# Sent messages: 40
|
236
|
+
# Messages sent from last statistics report: 0
|
237
|
+
|
238
|
+
producer.close
|
239
|
+
```
|
240
|
+
|
241
|
+
Note: The metrics returned may not be completely consistent between brokers, toppars and totals, due to the internal asynchronous nature of librdkafka. E.g., the top level tx total may be less than the sum of the broker tx values which it represents.
|
242
|
+
|
243
|
+
### Forking and potential memory problems
|
244
|
+
|
245
|
+
If you work with forked processes, make sure you **don't** use the producer before the fork. You can easily configure the producer and then fork and use it.
|
246
|
+
|
247
|
+
To tackle this [obstacle](https://github.com/appsignal/rdkafka-ruby/issues/15) related to rdkafka, WaterDrop adds finalizer to each of the producers to close the rdkafka client before the Ruby process is shutdown. Due to the [nature of the finalizers](https://www.mikeperham.com/2010/02/24/the-trouble-with-ruby-finalizers/), this implementation prevents producers from being GCed (except upon VM shutdown) and can cause memory leaks if you don't use persistent/long-lived producers in a long-running process or if you don't use the `#close` method of a producer when it is no longer needed. Creating a producer instance for each message is anyhow a rather bad idea, so we recommend not to.
|
110
248
|
|
111
249
|
## References
|
112
250
|
|
251
|
+
* [WaterDrop code documentation](https://www.rubydoc.info/github/karafka/waterdrop)
|
113
252
|
* [Karafka framework](https://github.com/karafka/karafka)
|
114
|
-
* [WaterDrop
|
253
|
+
* [WaterDrop Actions CI](https://github.com/karafka/waterdrop/actions?query=workflow%3Ac)
|
115
254
|
* [WaterDrop Coditsu](https://app.coditsu.io/karafka/repositories/waterdrop)
|
116
255
|
|
117
256
|
## Note on contributions
|