kafka 0.5.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +14 -0
- data/.rubocop.yml +210 -0
- data/.travis.yml +45 -0
- data/CHANGELOG.md +3 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Gemfile +5 -0
- data/LICENSE.txt +21 -0
- data/README.md +182 -0
- data/Rakefile +69 -0
- data/examples/consumer.rb +55 -0
- data/examples/producer.rb +46 -0
- data/ext/Rakefile +69 -0
- data/kafka.gemspec +39 -0
- data/lib/kafka/admin.rb +141 -0
- data/lib/kafka/config.rb +145 -0
- data/lib/kafka/consumer.rb +87 -0
- data/lib/kafka/error.rb +44 -0
- data/lib/kafka/ffi/admin/admin_options.rb +121 -0
- data/lib/kafka/ffi/admin/config_entry.rb +97 -0
- data/lib/kafka/ffi/admin/config_resource.rb +101 -0
- data/lib/kafka/ffi/admin/delete_topic.rb +19 -0
- data/lib/kafka/ffi/admin/new_partitions.rb +77 -0
- data/lib/kafka/ffi/admin/new_topic.rb +91 -0
- data/lib/kafka/ffi/admin/result.rb +66 -0
- data/lib/kafka/ffi/admin/topic_result.rb +32 -0
- data/lib/kafka/ffi/admin.rb +16 -0
- data/lib/kafka/ffi/broker_metadata.rb +32 -0
- data/lib/kafka/ffi/client.rb +640 -0
- data/lib/kafka/ffi/config.rb +382 -0
- data/lib/kafka/ffi/consumer.rb +342 -0
- data/lib/kafka/ffi/error.rb +25 -0
- data/lib/kafka/ffi/event.rb +215 -0
- data/lib/kafka/ffi/group_info.rb +75 -0
- data/lib/kafka/ffi/group_list.rb +27 -0
- data/lib/kafka/ffi/group_member_info.rb +52 -0
- data/lib/kafka/ffi/message/header.rb +205 -0
- data/lib/kafka/ffi/message.rb +205 -0
- data/lib/kafka/ffi/metadata.rb +58 -0
- data/lib/kafka/ffi/opaque.rb +81 -0
- data/lib/kafka/ffi/opaque_pointer.rb +73 -0
- data/lib/kafka/ffi/partition_metadata.rb +61 -0
- data/lib/kafka/ffi/producer.rb +144 -0
- data/lib/kafka/ffi/queue.rb +65 -0
- data/lib/kafka/ffi/topic.rb +32 -0
- data/lib/kafka/ffi/topic_config.rb +126 -0
- data/lib/kafka/ffi/topic_metadata.rb +42 -0
- data/lib/kafka/ffi/topic_partition.rb +43 -0
- data/lib/kafka/ffi/topic_partition_list.rb +167 -0
- data/lib/kafka/ffi.rb +624 -0
- data/lib/kafka/poller.rb +28 -0
- data/lib/kafka/producer/delivery_report.rb +120 -0
- data/lib/kafka/producer.rb +127 -0
- data/lib/kafka/version.rb +8 -0
- data/lib/kafka.rb +11 -0
- metadata +159 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: b1ff8fcc9cb3c43bd818733a5af934490298e0fc28a54668bb4a47d24abe8c69
|
4
|
+
data.tar.gz: 9ca67cd457ceadaffaea6bdb2b1c61d1f4481aa3e2a0ce3874bfb251c3ae9e1f
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 0ecdfd53f2621b6f876595a159847683a8360d2cfa675b0161db5baf688415f2d65c195848d0835c8dbd2aad06ed3377d54dde463bad3e83968893778ef143c6
|
7
|
+
data.tar.gz: c56a0ceafce7c89761aad6da5bdd45ca0da82a765d7072ea472abd6c8fc585dceebaf8f37f2783ea8cccb18205f58f0ef883f16472a8693860a59bcb1697b49a
|
data/.gitignore
ADDED
data/.rubocop.yml
ADDED
@@ -0,0 +1,210 @@
|
|
1
|
+
AllCops:
|
2
|
+
TargetRubyVersion: 2.6
|
3
|
+
Exclude:
|
4
|
+
- "bin/*"
|
5
|
+
- "ext/tmp/*"
|
6
|
+
- "ext/ports/*"
|
7
|
+
|
8
|
+
require:
|
9
|
+
- rubocop-performance
|
10
|
+
|
11
|
+
#################
|
12
|
+
# [i] Overrides #
|
13
|
+
#################
|
14
|
+
|
15
|
+
Layout/ArgumentAlignment:
|
16
|
+
EnforcedStyle: with_fixed_indentation
|
17
|
+
|
18
|
+
Layout/HashAlignment:
|
19
|
+
EnforcedLastArgumentHashStyle: ignore_implicit
|
20
|
+
|
21
|
+
# Align ends correctly.
|
22
|
+
Layout/EndAlignment:
|
23
|
+
EnforcedStyleAlignWith: variable
|
24
|
+
|
25
|
+
Layout/LineLength:
|
26
|
+
Max: 200
|
27
|
+
|
28
|
+
Layout/SpaceInsideBlockBraces:
|
29
|
+
SpaceBeforeBlockParameters: true
|
30
|
+
|
31
|
+
Layout/SpaceInsideHashLiteralBraces:
|
32
|
+
EnforcedStyle: space
|
33
|
+
|
34
|
+
Style/StringLiterals:
|
35
|
+
EnforcedStyle: double_quotes
|
36
|
+
|
37
|
+
Style/SymbolArray:
|
38
|
+
EnforcedStyle: brackets
|
39
|
+
|
40
|
+
Style/CollectionMethods:
|
41
|
+
# Mapping from undesired method to desired_method
|
42
|
+
# e.g. to use `detect` over `find`:
|
43
|
+
#
|
44
|
+
# CollectionMethods:
|
45
|
+
# PreferredMethods:
|
46
|
+
# find: detect
|
47
|
+
PreferredMethods:
|
48
|
+
reduce: 'inject'
|
49
|
+
find: 'detect'
|
50
|
+
each_with_index: 'each.with_index'
|
51
|
+
|
52
|
+
Style/StringLiteralsInInterpolation:
|
53
|
+
EnforcedStyle: double_quotes
|
54
|
+
|
55
|
+
Style/SignalException:
|
56
|
+
EnforcedStyle: only_raise
|
57
|
+
|
58
|
+
# Require that rescue blocks use the inferred StandardError rather than require
|
59
|
+
# the class to be specified.
|
60
|
+
Style/RescueStandardError:
|
61
|
+
EnforcedStyle: "implicit"
|
62
|
+
|
63
|
+
Style/TrailingCommaInArrayLiteral:
|
64
|
+
Enabled: true
|
65
|
+
EnforcedStyleForMultiline: consistent_comma
|
66
|
+
|
67
|
+
Style/TrailingCommaInHashLiteral:
|
68
|
+
Enabled: true
|
69
|
+
EnforcedStyleForMultiline: consistent_comma
|
70
|
+
|
71
|
+
Style/TrailingCommaInArguments:
|
72
|
+
EnforcedStyleForMultiline: comma
|
73
|
+
|
74
|
+
Layout/FirstHashElementIndentation:
|
75
|
+
EnforcedStyle: consistent
|
76
|
+
|
77
|
+
Layout/FirstArrayElementIndentation:
|
78
|
+
EnforcedStyle: consistent
|
79
|
+
|
80
|
+
Layout/MultilineMethodCallIndentation:
|
81
|
+
EnforcedStyle: indented
|
82
|
+
|
83
|
+
Style/PercentLiteralDelimiters:
|
84
|
+
PreferredDelimiters:
|
85
|
+
"%w": "[]"
|
86
|
+
"%W": "[]"
|
87
|
+
"%i": "[]"
|
88
|
+
|
89
|
+
Style/EmptyElse:
|
90
|
+
EnforcedStyle: empty
|
91
|
+
|
92
|
+
Style/EmptyMethod:
|
93
|
+
EnforcedStyle: expanded
|
94
|
+
|
95
|
+
Metrics/PerceivedComplexity:
|
96
|
+
Max: 11
|
97
|
+
|
98
|
+
# As a wrapper around librdkafka there is little we can do to properly limit
|
99
|
+
# this.
|
100
|
+
Metrics/ParameterLists:
|
101
|
+
Max: 7
|
102
|
+
|
103
|
+
#################
|
104
|
+
# Disabled cops #
|
105
|
+
#################
|
106
|
+
Style/ClassAndModuleChildren:
|
107
|
+
Enabled: false
|
108
|
+
|
109
|
+
Metrics/ClassLength:
|
110
|
+
Enabled: false
|
111
|
+
|
112
|
+
Metrics/CyclomaticComplexity:
|
113
|
+
Enabled: false
|
114
|
+
|
115
|
+
Style/Documentation:
|
116
|
+
Enabled: false
|
117
|
+
|
118
|
+
Style/EachWithObject:
|
119
|
+
Enabled: false
|
120
|
+
|
121
|
+
Style/FormatString:
|
122
|
+
Enabled: false
|
123
|
+
|
124
|
+
Style/Lambda:
|
125
|
+
Enabled: false
|
126
|
+
|
127
|
+
Metrics/MethodLength:
|
128
|
+
Enabled: false
|
129
|
+
|
130
|
+
Style/NegatedIf:
|
131
|
+
Enabled: false
|
132
|
+
|
133
|
+
Style/Semicolon:
|
134
|
+
Enabled: false
|
135
|
+
|
136
|
+
Layout/SpaceInsideArrayLiteralBrackets:
|
137
|
+
Enabled: false
|
138
|
+
|
139
|
+
Style/SingleLineBlockParams:
|
140
|
+
Enabled: false
|
141
|
+
|
142
|
+
Style/WordArray:
|
143
|
+
Enabled: false
|
144
|
+
|
145
|
+
Style/IfUnlessModifier:
|
146
|
+
Enabled: false
|
147
|
+
|
148
|
+
Style/NumericLiterals:
|
149
|
+
Enabled: false
|
150
|
+
|
151
|
+
Style/TrailingUnderscoreVariable:
|
152
|
+
Enabled: false
|
153
|
+
|
154
|
+
Style/EmptyCaseCondition:
|
155
|
+
Enabled: false
|
156
|
+
|
157
|
+
Style/WhileUntilModifier:
|
158
|
+
Enabled: false
|
159
|
+
|
160
|
+
Lint/AssignmentInCondition:
|
161
|
+
Enabled: false
|
162
|
+
|
163
|
+
# It was recommending parenthasizing blocks that were not ambiguous.
|
164
|
+
Lint/AmbiguousBlockAssociation:
|
165
|
+
Enabled: false
|
166
|
+
|
167
|
+
Metrics/AbcSize:
|
168
|
+
Enabled: false
|
169
|
+
|
170
|
+
Style/AsciiComments:
|
171
|
+
Enabled: false
|
172
|
+
|
173
|
+
Performance/TimesMap:
|
174
|
+
Enabled: false
|
175
|
+
|
176
|
+
Style/GuardClause:
|
177
|
+
Enabled: false
|
178
|
+
|
179
|
+
Style/BracesAroundHashParameters:
|
180
|
+
Enabled: false
|
181
|
+
|
182
|
+
Style/NumericPredicate:
|
183
|
+
Enabled: false
|
184
|
+
|
185
|
+
Style/SafeNavigation:
|
186
|
+
Enabled: false
|
187
|
+
|
188
|
+
# Fails for a pretty consistent pattern where we preemptively use a return in a
|
189
|
+
# conditional to allow future expansion of a method:
|
190
|
+
#
|
191
|
+
# def foo
|
192
|
+
# if condition?
|
193
|
+
# render action: :bar
|
194
|
+
# return
|
195
|
+
# end
|
196
|
+
# end
|
197
|
+
Style/RedundantReturn:
|
198
|
+
Enabled: false
|
199
|
+
|
200
|
+
# The goal of this cop is to improve readability for formatting strings and
|
201
|
+
# could be useful for complex formats. As of 2019-12-28 all of the offending
|
202
|
+
# instances are simple single value formats and wouldn't be any more readable.
|
203
|
+
Style/FormatStringToken:
|
204
|
+
Enabled: false
|
205
|
+
|
206
|
+
Metrics/BlockLength:
|
207
|
+
Enabled: false
|
208
|
+
|
209
|
+
Metrics/ModuleLength:
|
210
|
+
Enabled: false
|
data/.travis.yml
ADDED
@@ -0,0 +1,45 @@
|
|
1
|
+
dist: bionic
|
2
|
+
|
3
|
+
language: ruby
|
4
|
+
rvm:
|
5
|
+
- 2.5
|
6
|
+
- 2.6
|
7
|
+
- 2.7
|
8
|
+
- ruby-head
|
9
|
+
|
10
|
+
# Use a consistent Kafka version for each version of Ruby being tested.
|
11
|
+
env:
|
12
|
+
- KAFKA=2.2
|
13
|
+
|
14
|
+
# Run tests against different versions of Kafka using whatever RVM considers
|
15
|
+
# the most recent stable version of Ruby to be.
|
16
|
+
jobs:
|
17
|
+
allow_failures:
|
18
|
+
- rvm: ruby-head
|
19
|
+
include:
|
20
|
+
- rvm: ruby
|
21
|
+
env: KAFKA=2.1
|
22
|
+
- rvm: ruby
|
23
|
+
pnv: KAFKA=2.2
|
24
|
+
- rvm: ruby
|
25
|
+
env: KAFKA=2.3
|
26
|
+
- rvm: ruby
|
27
|
+
env: KAFKA=2.4
|
28
|
+
|
29
|
+
services:
|
30
|
+
- docker
|
31
|
+
|
32
|
+
cache: bundler
|
33
|
+
|
34
|
+
before_install:
|
35
|
+
- docker-compose -f spec/support/kafka-$KAFKA.yml up -d
|
36
|
+
- gem install bundler -v 2.1.3
|
37
|
+
|
38
|
+
# Install an up to date version of kafkacat via docker. The version available
|
39
|
+
# in apt doesn't support nil keys or headers.
|
40
|
+
- docker pull edenhill/kafkacat:1.5.0
|
41
|
+
- echo 'docker run -i --rm --network=host edenhill/kafkacat:1.5.0 "$@"' > $HOME/bin/kafkacat
|
42
|
+
- chmod +x $HOME/bin/kafkacat
|
43
|
+
|
44
|
+
script:
|
45
|
+
- bundle exec rake
|
data/CHANGELOG.md
ADDED
data/CODE_OF_CONDUCT.md
ADDED
@@ -0,0 +1,74 @@
|
|
1
|
+
# Contributor Covenant Code of Conduct
|
2
|
+
|
3
|
+
## Our Pledge
|
4
|
+
|
5
|
+
In the interest of fostering an open and welcoming environment, we as
|
6
|
+
contributors and maintainers pledge to making participation in our project and
|
7
|
+
our community a harassment-free experience for everyone, regardless of age, body
|
8
|
+
size, disability, ethnicity, gender identity and expression, level of experience,
|
9
|
+
nationality, personal appearance, race, religion, or sexual identity and
|
10
|
+
orientation.
|
11
|
+
|
12
|
+
## Our Standards
|
13
|
+
|
14
|
+
Examples of behavior that contributes to creating a positive environment
|
15
|
+
include:
|
16
|
+
|
17
|
+
* Using welcoming and inclusive language
|
18
|
+
* Being respectful of differing viewpoints and experiences
|
19
|
+
* Gracefully accepting constructive criticism
|
20
|
+
* Focusing on what is best for the community
|
21
|
+
* Showing empathy towards other community members
|
22
|
+
|
23
|
+
Examples of unacceptable behavior by participants include:
|
24
|
+
|
25
|
+
* The use of sexualized language or imagery and unwelcome sexual attention or
|
26
|
+
advances
|
27
|
+
* Trolling, insulting/derogatory comments, and personal or political attacks
|
28
|
+
* Public or private harassment
|
29
|
+
* Publishing others' private information, such as a physical or electronic
|
30
|
+
address, without explicit permission
|
31
|
+
* Other conduct which could reasonably be considered inappropriate in a
|
32
|
+
professional setting
|
33
|
+
|
34
|
+
## Our Responsibilities
|
35
|
+
|
36
|
+
Project maintainers are responsible for clarifying the standards of acceptable
|
37
|
+
behavior and are expected to take appropriate and fair corrective action in
|
38
|
+
response to any instances of unacceptable behavior.
|
39
|
+
|
40
|
+
Project maintainers have the right and responsibility to remove, edit, or
|
41
|
+
reject comments, commits, code, wiki edits, issues, and other contributions
|
42
|
+
that are not aligned to this Code of Conduct, or to ban temporarily or
|
43
|
+
permanently any contributor for other behaviors that they deem inappropriate,
|
44
|
+
threatening, offensive, or harmful.
|
45
|
+
|
46
|
+
## Scope
|
47
|
+
|
48
|
+
This Code of Conduct applies both within project spaces and in public spaces
|
49
|
+
when an individual is representing the project or its community. Examples of
|
50
|
+
representing a project or community include using an official project e-mail
|
51
|
+
address, posting via an official social media account, or acting as an appointed
|
52
|
+
representative at an online or offline event. Representation of a project may be
|
53
|
+
further defined and clarified by project maintainers.
|
54
|
+
|
55
|
+
## Enforcement
|
56
|
+
|
57
|
+
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
58
|
+
reported by contacting the project team at gaffneyc@gmail.com. All
|
59
|
+
complaints will be reviewed and investigated and will result in a response that
|
60
|
+
is deemed necessary and appropriate to the circumstances. The project team is
|
61
|
+
obligated to maintain confidentiality with regard to the reporter of an incident.
|
62
|
+
Further details of specific enforcement policies may be posted separately.
|
63
|
+
|
64
|
+
Project maintainers who do not follow or enforce the Code of Conduct in good
|
65
|
+
faith may face temporary or permanent repercussions as determined by other
|
66
|
+
members of the project's leadership.
|
67
|
+
|
68
|
+
## Attribution
|
69
|
+
|
70
|
+
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
71
|
+
available at [https://contributor-covenant.org/version/1/4][version]
|
72
|
+
|
73
|
+
[homepage]: https://contributor-covenant.org
|
74
|
+
[version]: https://contributor-covenant.org/version/1/4/
|
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2020 Chris Gaffney
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,182 @@
|
|
1
|
+
# Kafka
|
2
|
+
|
3
|
+
[![Build Status](https://travis-ci.com/deadmanssnitch/kafka.svg?branch=master)](https://travis-ci.com/deadmanssnitch/kafka)
|
4
|
+
|
5
|
+
The kafka gem provides a general producer and consumer for
|
6
|
+
[Apache Kafka](https://kafka.apache.org) using bindings to the official
|
7
|
+
[C client librdkafka](https://github.com/edenhill/librdkafka). The `Kafka::FFI`
|
8
|
+
module implements an object oriented mapping to most of the librdkafka API,
|
9
|
+
making it easier and safer to use than calling functions directly.
|
10
|
+
|
11
|
+
## ⚠️ Project Status: Beta ⚠️
|
12
|
+
|
13
|
+
This project is currently of BETA quality. Some APIs are still in flux and may
|
14
|
+
change. There are also probably a number of subtle (and not so subtle) bugs and
|
15
|
+
memory leaks. Since this relies heavily on binding to librdkafka through FFI
|
16
|
+
there are probably code paths which will cause segfaults or memory corruption.
|
17
|
+
|
18
|
+
Working with Kafka::FFI directly has many sharp edges which are blunted by
|
19
|
+
everything in the Kafka module.
|
20
|
+
|
21
|
+
You (yes you!) can make a difference and help make this project better. Test
|
22
|
+
against your application and traffic, implement missing functions (see
|
23
|
+
`rake ffi:missing`), work with the API and make suggestions for improvements.
|
24
|
+
All help is wanted and appreciated.
|
25
|
+
|
26
|
+
## Installation
|
27
|
+
|
28
|
+
Add this line to your application's Gemfile:
|
29
|
+
|
30
|
+
```ruby
|
31
|
+
gem "kafka"
|
32
|
+
```
|
33
|
+
|
34
|
+
And then execute:
|
35
|
+
|
36
|
+
$ bundle install
|
37
|
+
|
38
|
+
Or install it yourself as:
|
39
|
+
|
40
|
+
$ gem install kafka
|
41
|
+
|
42
|
+
## Usage
|
43
|
+
|
44
|
+
For more examples see [the examples directory](examples/).
|
45
|
+
|
46
|
+
For a detailed introduction on librdkafka which would be useful when working
|
47
|
+
with `Kafka::FFI` directly, see
|
48
|
+
[the librdkafka documentation](https://github.com/edenhill/librdkafka/blob/master/INTRODUCTION.md).
|
49
|
+
|
50
|
+
### Sending Message to a Topic
|
51
|
+
|
52
|
+
```ruby
|
53
|
+
require "kafka"
|
54
|
+
|
55
|
+
config = Kafka::Config.new("bootstrap.servers": "localhost:9092")
|
56
|
+
producer = Kafka::Producer.new(config)
|
57
|
+
|
58
|
+
# Asynchronously publish a JSON payload to the events topic.
|
59
|
+
event = { time: Time.now, status: "success" }
|
60
|
+
result = producer.produce("events", event.to_json)
|
61
|
+
|
62
|
+
# Wait for the delivery to confirm that publishing was successful.
|
63
|
+
# result.wait
|
64
|
+
# result.successful?
|
65
|
+
```
|
66
|
+
|
67
|
+
### Consuming Messages from a Topic
|
68
|
+
|
69
|
+
```ruby
|
70
|
+
require "kafka"
|
71
|
+
|
72
|
+
config = Kafka::Config.new({
|
73
|
+
"bootstrap.servers": "localhost:9092",
|
74
|
+
|
75
|
+
# Required for consumers to know what consumer group to join.
|
76
|
+
"group.id": "web.production.eventer",
|
77
|
+
})
|
78
|
+
|
79
|
+
consumer = Kafka::Consumer.new(config)
|
80
|
+
consumer.subscribe("events")
|
81
|
+
|
82
|
+
@run = true
|
83
|
+
trap("INT") { @run = false }
|
84
|
+
trap("TERM") { @run = false }
|
85
|
+
|
86
|
+
while @run
|
87
|
+
consumer.poll do |message|
|
88
|
+
puts message.payload
|
89
|
+
end
|
90
|
+
end
|
91
|
+
```
|
92
|
+
|
93
|
+
### Configuration
|
94
|
+
|
95
|
+
Kafka has a lot of potential knobs to turn and dials to tweak. A
|
96
|
+
`Kafka::Config` uses the same configuration options as librdkafka (and most or
|
97
|
+
all from the Java client). The defaults are generally good and a fine place to
|
98
|
+
start.
|
99
|
+
|
100
|
+
[All Configuration Options](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md)
|
101
|
+
|
102
|
+
### Bindings
|
103
|
+
|
104
|
+
`Kafka::FFI` provides bindings to functions in
|
105
|
+
[librdkafka](https://github.com/edenhill/librdkafka/blob/master/src/rdkafka.h).
|
106
|
+
All of the names are the same and mostly have named parameters to help with
|
107
|
+
calling them. Be aware that you will need to handle some memory management to
|
108
|
+
call most functions exported in the bindings. See
|
109
|
+
[rdkafka.h](https://github.com/edenhill/librdkafka/blob/master/src/rdkafka.h)
|
110
|
+
for any questions about usage and semantics.
|
111
|
+
|
112
|
+
All classes in `Kafka::FFI` provide an object oriented mapping to the functions
|
113
|
+
exported on `Kafka::FFI.rd_kafka_*`. Most will require understanding memory
|
114
|
+
management but most should be easier to use and safe than calling into
|
115
|
+
librdkafka directly.
|
116
|
+
|
117
|
+
## Why another Kafka gem?
|
118
|
+
|
119
|
+
There are already at least two good gems for Kafka:
|
120
|
+
[ruby-kafka](https://github.com/zendesk/ruby-kafka) and
|
121
|
+
[rdkafka](https://github.com/appsignal/rdkafka-ruby). In fact we've used both
|
122
|
+
of these gems on Dead Man's Snitch for quite a while and they've been great. We
|
123
|
+
really appreciate all of the work that has gone into them :heart:.
|
124
|
+
|
125
|
+
Unfortunately, keeping up with Kafka feature and protocol changes can be a full
|
126
|
+
time job. Development on ruby-kafka has stalled for that reason and many
|
127
|
+
consumer/producer libraries are migrating away from it.
|
128
|
+
|
129
|
+
As a heartbeat and cron job monitoring service, we depend on receiving and
|
130
|
+
processing reports from jobs reliably and quickly. Failing to receive a report
|
131
|
+
could mean waking someone up at 3AM or forcing them to take time away from
|
132
|
+
family or friends to deal with a false alarm. What started as a deep dive into
|
133
|
+
rdkafka to understand how best to use it reliably, we had ideas we wanted to
|
134
|
+
implement that probably wouldn't have been a good fit for rdkafka so we decided
|
135
|
+
to start from scratch.
|
136
|
+
|
137
|
+
Our goal is to provide a stable and easy to maintain Kafka consumer / producer
|
138
|
+
for Ruby. With time as our biggest constraint it makes sense to leverage
|
139
|
+
librdkafka as it has full time maintenance and support by the team behind
|
140
|
+
Kafka. FFI makes it fast and easy to expose new librdkafka APIs as they are
|
141
|
+
added. A stable test suite means being able to meaningfully spend the limited
|
142
|
+
amount of time we have available to invest. Embracing memory management and
|
143
|
+
building clean separations between layers should reduce the burden to implement
|
144
|
+
new bindings as the rules and responsibilities of each layer are clear.
|
145
|
+
|
146
|
+
## Development
|
147
|
+
|
148
|
+
To get started with development make sure to have docker, docker-compose, and
|
149
|
+
[kafkacat](https://github.com/edenhill/kafkacat) installed as they make getting
|
150
|
+
up to speed easier.
|
151
|
+
|
152
|
+
Before running the test, start a Kafka broker instance
|
153
|
+
|
154
|
+
```console
|
155
|
+
rake kafka:up
|
156
|
+
```
|
157
|
+
|
158
|
+
Then run the tests with
|
159
|
+
```console
|
160
|
+
rake
|
161
|
+
```
|
162
|
+
|
163
|
+
When you're done shut down the Kafka instance by running:
|
164
|
+
```console
|
165
|
+
rake kafka:down
|
166
|
+
```
|
167
|
+
|
168
|
+
## Contributing
|
169
|
+
|
170
|
+
Bug reports and pull requests are welcome on GitHub at
|
171
|
+
https://github.com/deadmanssnitch/kafka. This project is intended to be a safe,
|
172
|
+
welcoming space for collaboration, and contributors are expected to adhere to
|
173
|
+
the [code of conduct](https://github.com/deadmanssnitch/kafka/blob/master/CODE_OF_CONDUCT.md).
|
174
|
+
|
175
|
+
## License
|
176
|
+
|
177
|
+
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
178
|
+
|
179
|
+
## Code of Conduct
|
180
|
+
|
181
|
+
Everyone interacting in the Kafka project's codebases and issue trackers are expected to follow the
|
182
|
+
[code of conduct](https://github.com/deadmanssnitch/kafka/blob/master/CODE_OF_CONDUCT.md).
|
data/Rakefile
ADDED
@@ -0,0 +1,69 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "bundler/gem_tasks"
|
4
|
+
require "rspec/core/rake_task"
|
5
|
+
|
6
|
+
RSpec::Core::RakeTask.new(:spec)
|
7
|
+
|
8
|
+
desc "Build librdkafka into ext"
|
9
|
+
task :ext do
|
10
|
+
if Dir.glob("ext/librdkafka.*").empty?
|
11
|
+
sh "cd ext && rake"
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
task default: [:ext, :spec]
|
16
|
+
|
17
|
+
namespace :ffi do
|
18
|
+
desc "Lists the librdkafka functions that have not been implemented in Kafka::FFI"
|
19
|
+
task :missing do
|
20
|
+
require_relative "lib/kafka/version"
|
21
|
+
|
22
|
+
require "uri"
|
23
|
+
require "net/http"
|
24
|
+
require "tempfile"
|
25
|
+
|
26
|
+
header = Tempfile.new(["rdkafka", ".h"])
|
27
|
+
|
28
|
+
# Fetch the header for the pinned version of librdkafka. rdkafka.h contains
|
29
|
+
# all of the exported function prototypes.
|
30
|
+
url = URI("https://raw.githubusercontent.com/edenhill/librdkafka/v#{::Kafka::LIBRDKAFKA_VERSION}/src/rdkafka.h")
|
31
|
+
resp = Net::HTTP.get(url)
|
32
|
+
header.write(resp)
|
33
|
+
header.close
|
34
|
+
|
35
|
+
all = `ctags -x --sort=yes --kinds-C=pf #{header.path} | awk '{ print $1 }'`
|
36
|
+
all = all.split("\n")
|
37
|
+
|
38
|
+
ffi_path = File.expand_path("lib/kafka/ffi.rb", __dir__)
|
39
|
+
implemented = `grep -o -h -P '^\\s+attach_function\\s+:\\Krd_kafka_\\w+' #{ffi_path}`
|
40
|
+
implemented = implemented.split("\n").sort
|
41
|
+
|
42
|
+
missing = all - implemented
|
43
|
+
puts missing
|
44
|
+
ensure
|
45
|
+
header.unlink
|
46
|
+
end
|
47
|
+
|
48
|
+
desc "Prints the list of implemented librdkafka functions"
|
49
|
+
task :implemented do
|
50
|
+
ffi_path = File.expand_path("lib/kafka/ffi.rb", __dir__)
|
51
|
+
puts `grep -o -h -P '^\\s+attach_function\\s+:\\Krd_kafka_\\w+' #{ffi_path} | sort`
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
namespace :kafka do
|
56
|
+
desc "Start an instance of Kafka running in docker"
|
57
|
+
task :up do
|
58
|
+
# Find the docker-compose file for the most recent version of Kafka in
|
59
|
+
# spec/support.
|
60
|
+
compose = Dir["spec/support/kafka-*.yml"].max
|
61
|
+
|
62
|
+
sh "docker-compose -p ruby_kafka_dev -f #{compose} up -d"
|
63
|
+
end
|
64
|
+
|
65
|
+
desc "Shutdown the development Kafka instance"
|
66
|
+
task :down do
|
67
|
+
sh "docker-compose -p ruby_kafka_dev down"
|
68
|
+
end
|
69
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
$LOAD_PATH.unshift File.expand_path("../lib", __dir__)
|
4
|
+
|
5
|
+
require "kafka"
|
6
|
+
|
7
|
+
config = Kafka::Config.new({
|
8
|
+
"bootstrap.servers": "127.0.0.1:9092",
|
9
|
+
"group.id": "ruby-kafka-test",
|
10
|
+
|
11
|
+
# Disable automatic offsit commits, requiring the consumer to call commit on
|
12
|
+
# the consumer. Commits keep track of what messages have been processed to
|
13
|
+
# reduce replays of messages.
|
14
|
+
"enable.auto.commit": false,
|
15
|
+
})
|
16
|
+
|
17
|
+
# Initialize a topic with 8 partitions and 1 replica per partition. This is
|
18
|
+
# only for testing, a replication factor of 1 is not generally recommended for
|
19
|
+
# production.
|
20
|
+
admin = Kafka::Admin.new(config)
|
21
|
+
admin.create_topic("ruby_test_topic", 8, 1)
|
22
|
+
admin.close
|
23
|
+
|
24
|
+
@run = true
|
25
|
+
trap("INT") { @run = false }
|
26
|
+
trap("TERM") { @run = false }
|
27
|
+
|
28
|
+
4.times.map do |i|
|
29
|
+
# While librdkafka is thread safe, it doesn't make sense to have multiple
|
30
|
+
# threads polling a single consumer as they will end up just taking turns
|
31
|
+
# consuming the next available message.
|
32
|
+
#
|
33
|
+
# To get better coverage it is better to have a consumer per thread. Be aware
|
34
|
+
# that having more threads than partitions will cause some to sit idle
|
35
|
+
# waiting for a consumer to fail. At most there can be one consumer per
|
36
|
+
# partition for a topic actively receiving messages.
|
37
|
+
Thread.new do
|
38
|
+
con = Kafka::Consumer.new(config)
|
39
|
+
con.subscribe("ruby_test_topic")
|
40
|
+
|
41
|
+
while @run
|
42
|
+
con.poll(timeout: 500) do |msg|
|
43
|
+
# Requires Ruby 2.5. Most efficient way to convert from a millisecond
|
44
|
+
# timestamp to a Time.
|
45
|
+
ts = Time.at(0, msg.timestamp, :millisecond).utc
|
46
|
+
|
47
|
+
puts format("%2d: %4d %8d %s %s => %s", i, msg.partition, msg.offset, msg.key, ts, msg.payload)
|
48
|
+
|
49
|
+
con.commit(msg, async: true)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
con.close
|
54
|
+
end
|
55
|
+
end.map(&:join)
|