ruby-kafka 0.5.2 → 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.circleci/config.yml +9 -2
- data/CHANGELOG.md +4 -0
- data/README.md +30 -19
- data/docker-compose.yml +3 -3
- data/examples/simple-consumer.rb +1 -1
- data/lib/kafka.rb +8 -2
- data/lib/kafka/broker.rb +18 -0
- data/lib/kafka/client.rb +58 -5
- data/lib/kafka/cluster.rb +75 -2
- data/lib/kafka/consumer.rb +4 -2
- data/lib/kafka/consumer_group.rb +1 -1
- data/lib/kafka/producer.rb +2 -7
- data/lib/kafka/protocol.rb +34 -3
- data/lib/kafka/protocol/api_versions_response.rb +4 -0
- data/lib/kafka/protocol/create_partitions_request.rb +40 -0
- data/lib/kafka/protocol/create_partitions_response.rb +26 -0
- data/lib/kafka/protocol/delete_topics_request.rb +31 -0
- data/lib/kafka/protocol/delete_topics_response.rb +24 -0
- data/lib/kafka/protocol/describe_configs_request.rb +33 -0
- data/lib/kafka/protocol/describe_configs_response.rb +71 -0
- data/lib/kafka/protocol/encoder.rb +1 -1
- data/lib/kafka/version.rb +1 -1
- metadata +8 -6
- data/ci/consumer.rb +0 -18
- data/ci/init.rb +0 -17
- data/ci/producer.rb +0 -25
- data/performance/profile.rb +0 -39
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 48ff478bed517af54383f5751c8d2bff8e2d9af6
|
4
|
+
data.tar.gz: 5238160c645a2a491ec75292f2c1a62e99f0fac6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: eae662a727f7da9a533ccb9c7fa2054dc56187f60b1fed56f2c6e4b78cc56aa3e0e42b8c03bfa228ee754fdf5a9edff431314a5b40d6c2d585e30ad5d0a5f3e3
|
7
|
+
data.tar.gz: 34f00949dc40559aa6d0154e93dfa03e0d9db17834a55c2cdf7b03fced2b8c8993cbee6301852135e91e8f086efe99f859088720f42c4b733277df8b1420126f
|
data/.circleci/config.yml
CHANGED
@@ -23,18 +23,21 @@ jobs:
|
|
23
23
|
KAFKA_ADVERTISED_PORT: 9092
|
24
24
|
KAFKA_PORT: 9092
|
25
25
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
26
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
26
27
|
- image: wurstmeister/kafka:0.10.2.1
|
27
28
|
environment:
|
28
29
|
KAFKA_ADVERTISED_HOST_NAME: localhost
|
29
30
|
KAFKA_ADVERTISED_PORT: 9093
|
30
31
|
KAFKA_PORT: 9093
|
31
32
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
33
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
32
34
|
- image: wurstmeister/kafka:0.10.2.1
|
33
35
|
environment:
|
34
36
|
KAFKA_ADVERTISED_HOST_NAME: localhost
|
35
37
|
KAFKA_ADVERTISED_PORT: 9094
|
36
38
|
KAFKA_PORT: 9094
|
37
39
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
40
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
38
41
|
steps:
|
39
42
|
- checkout
|
40
43
|
- run: bundle install --path vendor/bundle
|
@@ -52,18 +55,19 @@ jobs:
|
|
52
55
|
KAFKA_ADVERTISED_PORT: 9092
|
53
56
|
KAFKA_PORT: 9092
|
54
57
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
58
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
55
59
|
- image: wurstmeister/kafka:0.11.0.1
|
56
60
|
environment:
|
57
|
-
KAFKA_ADVERTISED_HOST_NAME: localhost
|
58
|
-
KAFKA_ADVERTISED_PORT: 9093
|
59
61
|
KAFKA_PORT: 9093
|
60
62
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
63
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
61
64
|
- image: wurstmeister/kafka:0.11.0.1
|
62
65
|
environment:
|
63
66
|
KAFKA_ADVERTISED_HOST_NAME: localhost
|
64
67
|
KAFKA_ADVERTISED_PORT: 9094
|
65
68
|
KAFKA_PORT: 9094
|
66
69
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
70
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
67
71
|
steps:
|
68
72
|
- checkout
|
69
73
|
- run: bundle install --path vendor/bundle
|
@@ -81,18 +85,21 @@ jobs:
|
|
81
85
|
KAFKA_ADVERTISED_PORT: 9092
|
82
86
|
KAFKA_PORT: 9092
|
83
87
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
88
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
84
89
|
- image: wurstmeister/kafka:1.0.0
|
85
90
|
environment:
|
86
91
|
KAFKA_ADVERTISED_HOST_NAME: localhost
|
87
92
|
KAFKA_ADVERTISED_PORT: 9093
|
88
93
|
KAFKA_PORT: 9093
|
89
94
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
95
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
90
96
|
- image: wurstmeister/kafka:1.0.0
|
91
97
|
environment:
|
92
98
|
KAFKA_ADVERTISED_HOST_NAME: localhost
|
93
99
|
KAFKA_ADVERTISED_PORT: 9094
|
94
100
|
KAFKA_PORT: 9094
|
95
101
|
KAFKA_ZOOKEEPER_CONNECT: localhost:2181
|
102
|
+
KAFKA_DELETE_TOPIC_ENABLE: true
|
96
103
|
steps:
|
97
104
|
- checkout
|
98
105
|
- run: bundle install --path vendor/bundle
|
data/CHANGELOG.md
CHANGED
@@ -4,6 +4,10 @@ Changes and additions to the library will be listed here.
|
|
4
4
|
|
5
5
|
## Unreleased
|
6
6
|
|
7
|
+
- Add support for the topic deletion API (#528).
|
8
|
+
- Add support for the partition creation API (#533).
|
9
|
+
- Allow passing in the seed brokers in a positional argument (#538).
|
10
|
+
|
7
11
|
## v0.5.2
|
8
12
|
|
9
13
|
- Instrument the start of message/batch processing (#496).
|
data/README.md
CHANGED
@@ -46,7 +46,7 @@ Although parts of this library work with Kafka 0.8 – specifically, the Produce
|
|
46
46
|
6. [Support and Discussion](#support-and-discussion)
|
47
47
|
7. [Roadmap](#roadmap)
|
48
48
|
8. [Higher level libraries](#higher-level-libraries)
|
49
|
-
1. [Message processing frameworks](#message-processing-
|
49
|
+
1. [Message processing frameworks](#message-processing-frameworks)
|
50
50
|
2. [Message publishing libraries](#message-publishing-libraries)
|
51
51
|
|
52
52
|
## Installation
|
@@ -74,13 +74,15 @@ Or install it yourself as:
|
|
74
74
|
<th>Kafka 0.9</th>
|
75
75
|
<th>Kafka 0.10</th>
|
76
76
|
<th>Kafka 0.11</th>
|
77
|
+
<th>Kafka 1.0</th>
|
77
78
|
</tr>
|
78
79
|
<tr>
|
79
80
|
<th>Producer API</th>
|
80
|
-
<td>Full support</td>
|
81
|
+
<td>Full support in v0.4.x</td>
|
81
82
|
<td>Full support in v0.4.x</td>
|
82
83
|
<td>Full support in v0.5.x</td>
|
83
84
|
<td>Limited support</td>
|
85
|
+
<td>Limited support</td>
|
84
86
|
</tr>
|
85
87
|
<tr>
|
86
88
|
<th>Consumer API</th>
|
@@ -88,6 +90,7 @@ Or install it yourself as:
|
|
88
90
|
<td>Full support in v0.4.x</td>
|
89
91
|
<td>Full support in v0.5.x</td>
|
90
92
|
<td>Limited support</td>
|
93
|
+
<td>Limited support</td>
|
91
94
|
</tr>
|
92
95
|
</table>
|
93
96
|
|
@@ -97,6 +100,7 @@ This library is targeting Kafka 0.9 with the v0.4.x series and Kafka 0.10 with t
|
|
97
100
|
- **Kafka 0.9:** Full support for the Producer and Consumer API in ruby-kafka v0.4.x.
|
98
101
|
- **Kafka 0.10:** Full support for the Producer and Consumer API in ruby-kafka v0.5.x. Note that you _must_ run version 0.10.1 or higher of Kafka due to limitations in 0.10.0.
|
99
102
|
- **Kafka 0.11:** Everything that works with Kafka 0.10 should still work, but so far no features specific to Kafka 0.11 have been added.
|
103
|
+
- **Kafka 0.11:** Everything that works with Kafka 0.10 should still work, but so far no features specific to Kafka 1.0 have been added.
|
100
104
|
|
101
105
|
This library requires Ruby 2.1 or higher.
|
102
106
|
|
@@ -111,13 +115,10 @@ A client must be initialized with at least one Kafka broker, from which the enti
|
|
111
115
|
```ruby
|
112
116
|
require "kafka"
|
113
117
|
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
# Set an optional client id in order to identify the client to Kafka:
|
119
|
-
client_id: "my-application",
|
120
|
-
)
|
118
|
+
# The first argument is a list of "seed brokers" that will be queried for the full
|
119
|
+
# cluster topology. At least one of these *must* be available. `client_id` is
|
120
|
+
# used to identify this client in logs and metrics. It's optional but recommended.
|
121
|
+
kafka = Kafka.new(["kafka1:9092", "kafka2:9092"], client_id: "my-application")
|
121
122
|
```
|
122
123
|
|
123
124
|
### Producing Messages to Kafka
|
@@ -426,10 +427,7 @@ require "kafka"
|
|
426
427
|
|
427
428
|
# Configure the Kafka client with the broker hosts and the Rails
|
428
429
|
# logger.
|
429
|
-
$kafka = Kafka.new(
|
430
|
-
seed_brokers: ["kafka1:9092", "kafka2:9092"],
|
431
|
-
logger: Rails.logger,
|
432
|
-
)
|
430
|
+
$kafka = Kafka.new(["kafka1:9092", "kafka2:9092"], logger: Rails.logger)
|
433
431
|
|
434
432
|
# Set up an asynchronous producer that delivers its buffered messages
|
435
433
|
# every ten seconds:
|
@@ -469,7 +467,7 @@ Consuming messages from a Kafka topic with ruby-kafka is simple:
|
|
469
467
|
```ruby
|
470
468
|
require "kafka"
|
471
469
|
|
472
|
-
kafka = Kafka.new(
|
470
|
+
kafka = Kafka.new(["kafka1:9092", "kafka2:9092"])
|
473
471
|
|
474
472
|
kafka.each_message(topic: "greetings") do |message|
|
475
473
|
puts message.offset, message.key, message.value
|
@@ -492,7 +490,7 @@ Using the API is simple:
|
|
492
490
|
```ruby
|
493
491
|
require "kafka"
|
494
492
|
|
495
|
-
kafka = Kafka.new(
|
493
|
+
kafka = Kafka.new(["kafka1:9092", "kafka2:9092"])
|
496
494
|
|
497
495
|
# Consumers with the same group id will form a Consumer Group together.
|
498
496
|
consumer = kafka.consumer(group_id: "my-consumer")
|
@@ -880,20 +878,30 @@ By enabling SSL encryption you can have some confidence that messages can be sen
|
|
880
878
|
In this case you just need to pass a valid CA certificate as a string when configuring your `Kafka` client:
|
881
879
|
|
882
880
|
```ruby
|
883
|
-
kafka = Kafka.new(
|
884
|
-
ssl_ca_cert: File.read('my_ca_cert.pem'),
|
885
|
-
# ...
|
886
|
-
)
|
881
|
+
kafka = Kafka.new(["kafka1:9092"], ssl_ca_cert: File.read('my_ca_cert.pem'))
|
887
882
|
```
|
888
883
|
|
889
884
|
Without passing the CA certificate to the client it would be impossible to protect against [man-in-the-middle attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack).
|
890
885
|
|
886
|
+
##### Using your system's CA cert store
|
887
|
+
|
888
|
+
If you want to use the CA certs from your system's default certificate store, you
|
889
|
+
can use:
|
890
|
+
|
891
|
+
```ruby
|
892
|
+
kafka = Kafka.new(["kafka1:9092"], ssl_ca_certs_from_system: true)
|
893
|
+
```
|
894
|
+
|
895
|
+
This configures the store to look up CA certificates from the system default certificate store on an as needed basis. The location of the store can usually be determined by:
|
896
|
+
`OpenSSL::X509::DEFAULT_CERT_FILE`
|
897
|
+
|
891
898
|
##### Client Authentication
|
892
899
|
|
893
900
|
In order to authenticate the client to the cluster, you need to pass in a certificate and key created for the client and trusted by the brokers.
|
894
901
|
|
895
902
|
```ruby
|
896
903
|
kafka = Kafka.new(
|
904
|
+
["kafka1:9092"],
|
897
905
|
ssl_ca_cert: File.read('my_ca_cert.pem'),
|
898
906
|
ssl_client_cert: File.read('my_client_cert.pem'),
|
899
907
|
ssl_client_cert_key: File.read('my_client_cert_key.pem'),
|
@@ -916,6 +924,7 @@ In order to authenticate using GSSAPI, set your principal and optionally your ke
|
|
916
924
|
|
917
925
|
```ruby
|
918
926
|
kafka = Kafka.new(
|
927
|
+
["kafka1:9092"],
|
919
928
|
sasl_gssapi_principal: 'kafka/kafka.example.com@EXAMPLE.COM',
|
920
929
|
sasl_gssapi_keytab: '/etc/keytabs/kafka.keytab',
|
921
930
|
# ...
|
@@ -927,6 +936,7 @@ In order to authenticate using PLAIN, you must set your username and password wh
|
|
927
936
|
|
928
937
|
```ruby
|
929
938
|
kafka = Kafka.new(
|
939
|
+
["kafka1:9092"],
|
930
940
|
ssl_ca_cert: File.read('/etc/openssl/cert.pem'), # Optional but highly recommended
|
931
941
|
sasl_plain_username: 'username',
|
932
942
|
sasl_plain_password: 'password'
|
@@ -941,6 +951,7 @@ Since 0.11 kafka supports [SCRAM](https://kafka.apache.org/documentation.html#se
|
|
941
951
|
|
942
952
|
```ruby
|
943
953
|
kafka = Kafka.new(
|
954
|
+
["kafka1:9092"],
|
944
955
|
sasl_scram_username: 'username',
|
945
956
|
sasl_scram_password: 'password',
|
946
957
|
sasl_scram_mechanism: 'sha256',
|
data/docker-compose.yml
CHANGED
@@ -5,7 +5,7 @@ services:
|
|
5
5
|
ports:
|
6
6
|
- "2181:2181"
|
7
7
|
kafka1:
|
8
|
-
image: wurstmeister/kafka:0.
|
8
|
+
image: wurstmeister/kafka:0.11.0.1
|
9
9
|
ports:
|
10
10
|
- "9092:9092"
|
11
11
|
environment:
|
@@ -16,7 +16,7 @@ services:
|
|
16
16
|
volumes:
|
17
17
|
- /var/run/docker.sock:/var/run/docker.sock
|
18
18
|
kafka2:
|
19
|
-
image: wurstmeister/kafka:0.
|
19
|
+
image: wurstmeister/kafka:0.11.0.1
|
20
20
|
ports:
|
21
21
|
- "9093:9092"
|
22
22
|
environment:
|
@@ -27,7 +27,7 @@ services:
|
|
27
27
|
volumes:
|
28
28
|
- /var/run/docker.sock:/var/run/docker.sock
|
29
29
|
kafka3:
|
30
|
-
image: wurstmeister/kafka:0.
|
30
|
+
image: wurstmeister/kafka:0.11.0.1
|
31
31
|
ports:
|
32
32
|
- "9094:9092"
|
33
33
|
environment:
|
data/examples/simple-consumer.rb
CHANGED
@@ -14,7 +14,7 @@ require "kafka"
|
|
14
14
|
# with e.g. `$stderr` if you want to see what's happening under the hood.
|
15
15
|
logger = Logger.new(StringIO.new)
|
16
16
|
|
17
|
-
brokers = ENV.fetch("KAFKA_BROKERS")
|
17
|
+
brokers = ENV.fetch("KAFKA_BROKERS").split(",")
|
18
18
|
|
19
19
|
# Make sure to create this topic in your Kafka cluster or configure the
|
20
20
|
# cluster to auto-create topics.
|
data/lib/kafka.rb
CHANGED
@@ -243,8 +243,14 @@ module Kafka
|
|
243
243
|
#
|
244
244
|
# @see Client#initialize
|
245
245
|
# @return [Client]
|
246
|
-
def self.new(**options)
|
247
|
-
|
246
|
+
def self.new(seed_brokers = nil, **options)
|
247
|
+
# We allow `seed_brokers` to be passed in either as a positional _or_ as a
|
248
|
+
# keyword argument.
|
249
|
+
if seed_brokers.nil?
|
250
|
+
Client.new(**options)
|
251
|
+
else
|
252
|
+
Client.new(seed_brokers: seed_brokers, **options)
|
253
|
+
end
|
248
254
|
end
|
249
255
|
end
|
250
256
|
|
data/lib/kafka/broker.rb
CHANGED
@@ -115,6 +115,24 @@ module Kafka
|
|
115
115
|
send_request(request)
|
116
116
|
end
|
117
117
|
|
118
|
+
def delete_topics(**options)
|
119
|
+
request = Protocol::DeleteTopicsRequest.new(**options)
|
120
|
+
|
121
|
+
send_request(request)
|
122
|
+
end
|
123
|
+
|
124
|
+
def describe_configs(**options)
|
125
|
+
request = Protocol::DescribeConfigsRequest.new(**options)
|
126
|
+
|
127
|
+
send_request(request)
|
128
|
+
end
|
129
|
+
|
130
|
+
def create_partitions(**options)
|
131
|
+
request = Protocol::CreatePartitionsRequest.new(**options)
|
132
|
+
|
133
|
+
send_request(request)
|
134
|
+
end
|
135
|
+
|
118
136
|
def api_versions
|
119
137
|
request = Protocol::ApiVersionsRequest.new
|
120
138
|
|
data/lib/kafka/client.rb
CHANGED
@@ -60,12 +60,12 @@ module Kafka
|
|
60
60
|
ssl_ca_cert_file_path: nil, ssl_ca_cert: nil, ssl_client_cert: nil, ssl_client_cert_key: nil,
|
61
61
|
sasl_gssapi_principal: nil, sasl_gssapi_keytab: nil,
|
62
62
|
sasl_plain_authzid: '', sasl_plain_username: nil, sasl_plain_password: nil,
|
63
|
-
sasl_scram_username: nil, sasl_scram_password: nil, sasl_scram_mechanism: nil)
|
63
|
+
sasl_scram_username: nil, sasl_scram_password: nil, sasl_scram_mechanism: nil, ssl_ca_certs_from_system: false)
|
64
64
|
@logger = logger || Logger.new(nil)
|
65
65
|
@instrumenter = Instrumenter.new(client_id: client_id)
|
66
66
|
@seed_brokers = normalize_seed_brokers(seed_brokers)
|
67
67
|
|
68
|
-
ssl_context = build_ssl_context(ssl_ca_cert_file_path, ssl_ca_cert, ssl_client_cert, ssl_client_cert_key)
|
68
|
+
ssl_context = build_ssl_context(ssl_ca_cert_file_path, ssl_ca_cert, ssl_client_cert, ssl_client_cert_key, ssl_ca_certs_from_system)
|
69
69
|
|
70
70
|
sasl_authenticator = SaslAuthenticator.new(
|
71
71
|
sasl_gssapi_principal: sasl_gssapi_principal,
|
@@ -463,6 +463,47 @@ module Kafka
|
|
463
463
|
@cluster.create_topic(name, num_partitions: num_partitions, replication_factor: replication_factor, timeout: timeout)
|
464
464
|
end
|
465
465
|
|
466
|
+
# Delete a topic in the cluster.
|
467
|
+
#
|
468
|
+
# @param name [String] the name of the topic.
|
469
|
+
# @param timeout [Integer] a duration of time to wait for the topic to be
|
470
|
+
# completely marked deleted.
|
471
|
+
# @return [nil]
|
472
|
+
def delete_topic(name, timeout: 30)
|
473
|
+
@cluster.delete_topic(name, timeout: timeout)
|
474
|
+
end
|
475
|
+
|
476
|
+
# Describe the configuration of a topic.
|
477
|
+
#
|
478
|
+
# Retrieves the topic configuration from the Kafka brokers. Configuration names
|
479
|
+
# refer to [Kafka's topic-level configs](https://kafka.apache.org/documentation/#topicconfigs).
|
480
|
+
#
|
481
|
+
# @note This is an alpha level API and is subject to change.
|
482
|
+
#
|
483
|
+
# @example Describing the cleanup policy config of a topic
|
484
|
+
# kafka = Kafka.new(["kafka1:9092"])
|
485
|
+
# kafka.describe_topic("my-topic", ["cleanup.policy"])
|
486
|
+
# #=> { "cleanup.policy" => "delete" }
|
487
|
+
#
|
488
|
+
# @param name [String] the name of the topic.
|
489
|
+
# @param configs [Array<String>] array of desired config names.
|
490
|
+
# @return [Hash<String, String>]
|
491
|
+
def describe_topic(name, configs = [])
|
492
|
+
@cluster.describe_topic(name, configs)
|
493
|
+
end
|
494
|
+
|
495
|
+
# Create partitions for a topic.
|
496
|
+
#
|
497
|
+
# @param name [String] the name of the topic.
|
498
|
+
# @param num_partitions [Integer] the number of desired partitions for
|
499
|
+
# the topic
|
500
|
+
# @param timeout [Integer] a duration of time to wait for the new
|
501
|
+
# partitions to be added.
|
502
|
+
# @return [nil]
|
503
|
+
def create_partitions_for(name, num_partitions: 1, timeout: 30)
|
504
|
+
@cluster.create_partitions_for(name, num_partitions: num_partitions, timeout: timeout)
|
505
|
+
end
|
506
|
+
|
466
507
|
# Lists all topics in the cluster.
|
467
508
|
#
|
468
509
|
# @return [Array<String>] the list of topic names.
|
@@ -516,6 +557,15 @@ module Kafka
|
|
516
557
|
}.to_h
|
517
558
|
end
|
518
559
|
|
560
|
+
# Check whether current cluster supports a specific version or not
|
561
|
+
#
|
562
|
+
# @param api_key [Integer] API key.
|
563
|
+
# @param version [Integer] API version.
|
564
|
+
# @return [Boolean]
|
565
|
+
def supports_api?(api_key, version = nil)
|
566
|
+
@cluster.supports_api?(api_key, version)
|
567
|
+
end
|
568
|
+
|
519
569
|
def apis
|
520
570
|
@cluster.apis
|
521
571
|
end
|
@@ -542,8 +592,8 @@ module Kafka
|
|
542
592
|
)
|
543
593
|
end
|
544
594
|
|
545
|
-
def build_ssl_context(ca_cert_file_path, ca_cert, client_cert, client_cert_key)
|
546
|
-
return nil unless ca_cert_file_path || ca_cert || client_cert || client_cert_key
|
595
|
+
def build_ssl_context(ca_cert_file_path, ca_cert, client_cert, client_cert_key, ssl_ca_certs_from_system)
|
596
|
+
return nil unless ca_cert_file_path || ca_cert || client_cert || client_cert_key || ssl_ca_certs_from_system
|
547
597
|
|
548
598
|
ssl_context = OpenSSL::SSL::SSLContext.new
|
549
599
|
|
@@ -558,7 +608,7 @@ module Kafka
|
|
558
608
|
raise ArgumentError, "Kafka client initialized with `ssl_client_cert_key`, but no `ssl_client_cert`. Please provide both."
|
559
609
|
end
|
560
610
|
|
561
|
-
if ca_cert || ca_cert_file_path
|
611
|
+
if ca_cert || ca_cert_file_path || ssl_ca_certs_from_system
|
562
612
|
store = OpenSSL::X509::Store.new
|
563
613
|
Array(ca_cert).each do |cert|
|
564
614
|
store.add_cert(OpenSSL::X509::Certificate.new(cert))
|
@@ -566,6 +616,9 @@ module Kafka
|
|
566
616
|
if ca_cert_file_path
|
567
617
|
store.add_file(ca_cert_file_path)
|
568
618
|
end
|
619
|
+
if ssl_ca_certs_from_system
|
620
|
+
store.set_default_paths
|
621
|
+
end
|
569
622
|
ssl_context.cert_store = store
|
570
623
|
end
|
571
624
|
|
data/lib/kafka/cluster.rb
CHANGED
@@ -53,6 +53,17 @@ module Kafka
|
|
53
53
|
apis.find {|api| api.api_key == api_key }
|
54
54
|
end
|
55
55
|
|
56
|
+
def supports_api?(api_key, version = nil)
|
57
|
+
info = api_info(api_key)
|
58
|
+
if info.nil?
|
59
|
+
return false
|
60
|
+
elsif version.nil?
|
61
|
+
return true
|
62
|
+
else
|
63
|
+
return info.version_supported?(version)
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
56
67
|
def apis
|
57
68
|
@apis ||=
|
58
69
|
begin
|
@@ -180,6 +191,64 @@ module Kafka
|
|
180
191
|
@logger.info "Topic `#{name}` was created"
|
181
192
|
end
|
182
193
|
|
194
|
+
def delete_topic(name, timeout:)
|
195
|
+
options = {
|
196
|
+
topics: [name],
|
197
|
+
timeout: timeout,
|
198
|
+
}
|
199
|
+
|
200
|
+
broker = controller_broker
|
201
|
+
|
202
|
+
@logger.info "Deleting topic `#{name}` using controller broker #{broker}"
|
203
|
+
|
204
|
+
response = broker.delete_topics(**options)
|
205
|
+
|
206
|
+
response.errors.each do |topic, error_code|
|
207
|
+
Protocol.handle_error(error_code)
|
208
|
+
end
|
209
|
+
|
210
|
+
@logger.info "Topic `#{name}` was deleted"
|
211
|
+
end
|
212
|
+
|
213
|
+
def describe_topic(name, configs = [])
|
214
|
+
options = {
|
215
|
+
resources: [[Kafka::Protocol::RESOURCE_TYPE_TOPIC, name, configs]]
|
216
|
+
}
|
217
|
+
broker = controller_broker
|
218
|
+
|
219
|
+
@logger.info "Fetching topic `#{name}`'s configs using controller broker #{broker}"
|
220
|
+
|
221
|
+
response = broker.describe_configs(**options)
|
222
|
+
|
223
|
+
response.resources.each do |resource|
|
224
|
+
Protocol.handle_error(resource.error_code, resource.error_message)
|
225
|
+
end
|
226
|
+
topic_description = response.resources.first
|
227
|
+
topic_description.configs.each_with_object({}) do |config, hash|
|
228
|
+
hash[config.name] = config.value
|
229
|
+
end
|
230
|
+
end
|
231
|
+
|
232
|
+
def create_partitions_for(name, num_partitions:, timeout:)
|
233
|
+
options = {
|
234
|
+
topics: [[name, num_partitions, nil]],
|
235
|
+
timeout: timeout
|
236
|
+
}
|
237
|
+
|
238
|
+
broker = controller_broker
|
239
|
+
|
240
|
+
@logger.info "Creating #{num_partitions} partition(s) for topic `#{name}` using controller broker #{broker}"
|
241
|
+
|
242
|
+
response = broker.create_partitions(**options)
|
243
|
+
|
244
|
+
response.errors.each do |topic, error_code, error_message|
|
245
|
+
Protocol.handle_error(error_code, error_message)
|
246
|
+
end
|
247
|
+
mark_as_stale!
|
248
|
+
|
249
|
+
@logger.info "Topic `#{name}` was updated"
|
250
|
+
end
|
251
|
+
|
183
252
|
def resolve_offsets(topic, partitions, offset)
|
184
253
|
add_target_topics([topic])
|
185
254
|
refresh_metadata_if_necessary!
|
@@ -229,13 +298,17 @@ module Kafka
|
|
229
298
|
|
230
299
|
def topics
|
231
300
|
refresh_metadata_if_necessary!
|
232
|
-
cluster_info.topics.
|
301
|
+
cluster_info.topics.select do |topic|
|
302
|
+
topic.topic_error_code == 0
|
303
|
+
end.map(&:topic_name)
|
233
304
|
end
|
234
305
|
|
235
306
|
# Lists all topics in the cluster.
|
236
307
|
def list_topics
|
237
308
|
response = random_broker.fetch_metadata(topics: nil)
|
238
|
-
response.topics.
|
309
|
+
response.topics.select do |topic|
|
310
|
+
topic.topic_error_code == 0
|
311
|
+
end.map(&:topic_name)
|
239
312
|
end
|
240
313
|
|
241
314
|
def disconnect
|
data/lib/kafka/consumer.rb
CHANGED
@@ -20,7 +20,7 @@ module Kafka
|
|
20
20
|
#
|
21
21
|
# require "kafka"
|
22
22
|
#
|
23
|
-
# kafka = Kafka.new(
|
23
|
+
# kafka = Kafka.new(["kafka1:9092", "kafka2:9092"])
|
24
24
|
#
|
25
25
|
# # Create a new Consumer instance in the group `my-group`:
|
26
26
|
# consumer = kafka.consumer(group_id: "my-group")
|
@@ -371,7 +371,9 @@ module Kafka
|
|
371
371
|
|
372
372
|
while @running
|
373
373
|
begin
|
374
|
-
|
374
|
+
@instrumenter.instrument("loop.consumer") do
|
375
|
+
yield
|
376
|
+
end
|
375
377
|
rescue HeartbeatError, OffsetCommitError
|
376
378
|
join_group
|
377
379
|
rescue RebalanceInProgress
|
data/lib/kafka/consumer_group.rb
CHANGED
data/lib/kafka/producer.rb
CHANGED
@@ -14,7 +14,7 @@ module Kafka
|
|
14
14
|
# do it for you, e.g.
|
15
15
|
#
|
16
16
|
# # Will instantiate Kafka::Client
|
17
|
-
# kafka = Kafka.new(
|
17
|
+
# kafka = Kafka.new(["kafka1:9092", "kafka2:9092"])
|
18
18
|
#
|
19
19
|
# # Will instantiate Kafka::Producer
|
20
20
|
# producer = kafka.producer
|
@@ -106,12 +106,7 @@ module Kafka
|
|
106
106
|
# # cluster to auto-create topics.
|
107
107
|
# topic = "random-messages"
|
108
108
|
#
|
109
|
-
# kafka = Kafka.new(
|
110
|
-
# seed_brokers: brokers,
|
111
|
-
# client_id: "simple-producer",
|
112
|
-
# logger: logger,
|
113
|
-
# )
|
114
|
-
#
|
109
|
+
# kafka = Kafka.new(brokers, client_id: "simple-producer", logger: logger)
|
115
110
|
# producer = kafka.producer
|
116
111
|
#
|
117
112
|
# begin
|
data/lib/kafka/protocol.rb
CHANGED
@@ -26,6 +26,9 @@ module Kafka
|
|
26
26
|
SASL_HANDSHAKE_API = 17
|
27
27
|
API_VERSIONS_API = 18
|
28
28
|
CREATE_TOPICS_API = 19
|
29
|
+
DELETE_TOPICS_API = 20
|
30
|
+
DESCRIBE_CONFIGS_API = 32
|
31
|
+
CREATE_PARTITIONS_API = 37
|
29
32
|
|
30
33
|
# A mapping from numeric API keys to symbolic API names.
|
31
34
|
APIS = {
|
@@ -43,6 +46,9 @@ module Kafka
|
|
43
46
|
SASL_HANDSHAKE_API => :sasl_handshake,
|
44
47
|
API_VERSIONS_API => :api_versions,
|
45
48
|
CREATE_TOPICS_API => :create_topics,
|
49
|
+
DELETE_TOPICS_API => :delete_topics,
|
50
|
+
DESCRIBE_CONFIGS_API => :describe_configs_api,
|
51
|
+
CREATE_PARTITIONS_API => :create_partitions
|
46
52
|
}
|
47
53
|
|
48
54
|
# A mapping from numeric error codes to exception classes.
|
@@ -87,19 +93,38 @@ module Kafka
|
|
87
93
|
42 => InvalidRequest
|
88
94
|
}
|
89
95
|
|
96
|
+
# A mapping from int to corresponding resource type in symbol.
|
97
|
+
# https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/resource/ResourceType.java
|
98
|
+
RESOURCE_TYPE_UNKNOWN = 0
|
99
|
+
RESOURCE_TYPE_ANY = 1
|
100
|
+
RESOURCE_TYPE_TOPIC = 2
|
101
|
+
RESOURCE_TYPE_GROUP = 3
|
102
|
+
RESOURCE_TYPE_CLUSTER = 4
|
103
|
+
RESOURCE_TYPE_TRANSACTIONAL_ID = 5
|
104
|
+
RESOURCE_TYPE_DELEGATION_TOKEN = 6
|
105
|
+
RESOURCE_TYPES = {
|
106
|
+
RESOURCE_TYPE_UNKNOWN => :unknown,
|
107
|
+
RESOURCE_TYPE_ANY => :any,
|
108
|
+
RESOURCE_TYPE_TOPIC => :topic,
|
109
|
+
RESOURCE_TYPE_GROUP => :group,
|
110
|
+
RESOURCE_TYPE_CLUSTER => :cluster,
|
111
|
+
RESOURCE_TYPE_TRANSACTIONAL_ID => :transactional_id,
|
112
|
+
RESOURCE_TYPE_DELEGATION_TOKEN => :delegation_token,
|
113
|
+
}
|
114
|
+
|
90
115
|
# Handles an error code by either doing nothing (if there was no error) or
|
91
116
|
# by raising an appropriate exception.
|
92
117
|
#
|
93
118
|
# @param error_code Integer
|
94
119
|
# @raise [ProtocolError]
|
95
120
|
# @return [nil]
|
96
|
-
def self.handle_error(error_code)
|
121
|
+
def self.handle_error(error_code, error_message = nil)
|
97
122
|
if error_code == 0
|
98
123
|
# No errors, yay!
|
99
124
|
elsif error = ERRORS[error_code]
|
100
|
-
raise error
|
125
|
+
raise error, error_message
|
101
126
|
else
|
102
|
-
raise UnknownError, "Unknown error with code #{error_code}"
|
127
|
+
raise UnknownError, "Unknown error with code #{error_code} #{error_message}"
|
103
128
|
end
|
104
129
|
end
|
105
130
|
|
@@ -141,3 +166,9 @@ require "kafka/protocol/sasl_handshake_request"
|
|
141
166
|
require "kafka/protocol/sasl_handshake_response"
|
142
167
|
require "kafka/protocol/create_topics_request"
|
143
168
|
require "kafka/protocol/create_topics_response"
|
169
|
+
require "kafka/protocol/delete_topics_request"
|
170
|
+
require "kafka/protocol/delete_topics_response"
|
171
|
+
require "kafka/protocol/describe_configs_request"
|
172
|
+
require "kafka/protocol/describe_configs_response"
|
173
|
+
require "kafka/protocol/create_partitions_request"
|
174
|
+
require "kafka/protocol/create_partitions_response"
|
@@ -0,0 +1,40 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Protocol
|
3
|
+
|
4
|
+
class CreatePartitionsRequest
|
5
|
+
def initialize(topics:, timeout:)
|
6
|
+
@topics, @timeout = topics, timeout
|
7
|
+
end
|
8
|
+
|
9
|
+
def api_key
|
10
|
+
CREATE_PARTITIONS_API
|
11
|
+
end
|
12
|
+
|
13
|
+
def api_version
|
14
|
+
0
|
15
|
+
end
|
16
|
+
|
17
|
+
def response_class
|
18
|
+
Protocol::CreatePartitionsResponse
|
19
|
+
end
|
20
|
+
|
21
|
+
def encode(encoder)
|
22
|
+
encoder.write_array(@topics) do |topic, count, assignments|
|
23
|
+
encoder.write_string(topic)
|
24
|
+
encoder.write_int32(count)
|
25
|
+
encoder.write_array(assignments) do |assignment|
|
26
|
+
encoder.write_array(assignment) do |broker|
|
27
|
+
encoder.write_int32(broker)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
# Timeout is in ms.
|
32
|
+
encoder.write_int32(@timeout * 1000)
|
33
|
+
# validate_only. There isn't any use case for this in real life. So
|
34
|
+
# let's ignore it for now
|
35
|
+
encoder.write_boolean(false)
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Protocol
|
3
|
+
|
4
|
+
class CreatePartitionsResponse
|
5
|
+
attr_reader :errors
|
6
|
+
|
7
|
+
def initialize(throttle_time_ms:, errors:)
|
8
|
+
@throttle_time_ms = throttle_time_ms
|
9
|
+
@errors = errors
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.decode(decoder)
|
13
|
+
throttle_time_ms = decoder.int32
|
14
|
+
errors = decoder.array do
|
15
|
+
topic = decoder.string
|
16
|
+
error_code = decoder.int16
|
17
|
+
error_message = decoder.string
|
18
|
+
[topic, error_code, error_message]
|
19
|
+
end
|
20
|
+
|
21
|
+
new(throttle_time_ms: throttle_time_ms, errors: errors)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Protocol
|
3
|
+
|
4
|
+
class DeleteTopicsRequest
|
5
|
+
def initialize(topics:, timeout:)
|
6
|
+
@topics, @timeout = topics, timeout
|
7
|
+
end
|
8
|
+
|
9
|
+
def api_key
|
10
|
+
DELETE_TOPICS_API
|
11
|
+
end
|
12
|
+
|
13
|
+
def api_version
|
14
|
+
0
|
15
|
+
end
|
16
|
+
|
17
|
+
def response_class
|
18
|
+
Protocol::DeleteTopicsResponse
|
19
|
+
end
|
20
|
+
|
21
|
+
def encode(encoder)
|
22
|
+
encoder.write_array(@topics) do |topic|
|
23
|
+
encoder.write_string(topic)
|
24
|
+
end
|
25
|
+
# Timeout is in ms.
|
26
|
+
encoder.write_int32(@timeout * 1000)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Protocol
|
3
|
+
|
4
|
+
class DeleteTopicsResponse
|
5
|
+
attr_reader :errors
|
6
|
+
|
7
|
+
def initialize(errors:)
|
8
|
+
@errors = errors
|
9
|
+
end
|
10
|
+
|
11
|
+
def self.decode(decoder)
|
12
|
+
errors = decoder.array do
|
13
|
+
topic = decoder.string
|
14
|
+
error_code = decoder.int16
|
15
|
+
|
16
|
+
[topic, error_code]
|
17
|
+
end
|
18
|
+
|
19
|
+
new(errors: errors)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Protocol
|
3
|
+
|
4
|
+
class DescribeConfigsRequest
|
5
|
+
def initialize(resources:)
|
6
|
+
@resources = resources
|
7
|
+
end
|
8
|
+
|
9
|
+
def api_key
|
10
|
+
DESCRIBE_CONFIGS_API
|
11
|
+
end
|
12
|
+
|
13
|
+
def api_version
|
14
|
+
0
|
15
|
+
end
|
16
|
+
|
17
|
+
def response_class
|
18
|
+
Protocol::DescribeConfigsResponse
|
19
|
+
end
|
20
|
+
|
21
|
+
def encode(encoder)
|
22
|
+
encoder.write_array(@resources) do |type, name, configs|
|
23
|
+
encoder.write_int8(type)
|
24
|
+
encoder.write_string(name)
|
25
|
+
encoder.write_array(configs) do |config|
|
26
|
+
encoder.write_string(config)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,71 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Protocol
|
3
|
+
class DescribeConfigsResponse
|
4
|
+
class ResourceDescription
|
5
|
+
attr_reader :name, :type, :error_code, :error_message, :configs
|
6
|
+
|
7
|
+
def initialize(name:, type:, error_code:, error_message:, configs:)
|
8
|
+
@name = name
|
9
|
+
@type = type
|
10
|
+
@error_code = error_code
|
11
|
+
@error_message = error_message
|
12
|
+
@configs = configs
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
class ConfigEntry
|
17
|
+
attr_reader :name, :value, :read_only, :is_default, :is_sensitive
|
18
|
+
|
19
|
+
def initialize(name:, value:, read_only:, is_default:, is_sensitive:)
|
20
|
+
@name = name
|
21
|
+
@value = value
|
22
|
+
@read_only = read_only
|
23
|
+
@is_default = is_default
|
24
|
+
@is_sensitive = is_sensitive
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
attr_reader :resources
|
29
|
+
|
30
|
+
def initialize(throttle_time_ms:, resources:)
|
31
|
+
@throttle_time_ms = throttle_time_ms
|
32
|
+
@resources = resources
|
33
|
+
end
|
34
|
+
|
35
|
+
def self.decode(decoder)
|
36
|
+
throttle_time_ms = decoder.int32
|
37
|
+
resources = decoder.array do
|
38
|
+
error_code = decoder.int16
|
39
|
+
error_message = decoder.string
|
40
|
+
|
41
|
+
resource_type = decoder.int8
|
42
|
+
if Kafka::Protocol::RESOURCE_TYPES[resource_type].nil?
|
43
|
+
raise Kafka::ProtocolError, "Resource type not supported: #{resource_type}"
|
44
|
+
end
|
45
|
+
resource_name = decoder.string
|
46
|
+
|
47
|
+
configs = decoder.array do
|
48
|
+
ConfigEntry.new(
|
49
|
+
name: decoder.string,
|
50
|
+
value: decoder.string,
|
51
|
+
read_only: decoder.boolean,
|
52
|
+
is_default: decoder.boolean,
|
53
|
+
is_sensitive: decoder.boolean,
|
54
|
+
)
|
55
|
+
end
|
56
|
+
|
57
|
+
ResourceDescription.new(
|
58
|
+
type: RESOURCE_TYPES[resource_type],
|
59
|
+
name: resource_name,
|
60
|
+
error_code: error_code,
|
61
|
+
error_message: error_message,
|
62
|
+
configs: configs
|
63
|
+
)
|
64
|
+
end
|
65
|
+
|
66
|
+
new(throttle_time_ms: throttle_time_ms, resources: resources)
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
end
|
71
|
+
end
|
data/lib/kafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.5.
|
4
|
+
version: 0.5.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-
|
11
|
+
date: 2018-02-05 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -299,9 +299,6 @@ files:
|
|
299
299
|
- benchmarks/message_encoding.rb
|
300
300
|
- bin/console
|
301
301
|
- bin/setup
|
302
|
-
- ci/consumer.rb
|
303
|
-
- ci/init.rb
|
304
|
-
- ci/producer.rb
|
305
302
|
- docker-compose.yml
|
306
303
|
- examples/consumer-group.rb
|
307
304
|
- examples/firehose-consumer.rb
|
@@ -340,9 +337,15 @@ files:
|
|
340
337
|
- lib/kafka/protocol/api_versions_request.rb
|
341
338
|
- lib/kafka/protocol/api_versions_response.rb
|
342
339
|
- lib/kafka/protocol/consumer_group_protocol.rb
|
340
|
+
- lib/kafka/protocol/create_partitions_request.rb
|
341
|
+
- lib/kafka/protocol/create_partitions_response.rb
|
343
342
|
- lib/kafka/protocol/create_topics_request.rb
|
344
343
|
- lib/kafka/protocol/create_topics_response.rb
|
345
344
|
- lib/kafka/protocol/decoder.rb
|
345
|
+
- lib/kafka/protocol/delete_topics_request.rb
|
346
|
+
- lib/kafka/protocol/delete_topics_response.rb
|
347
|
+
- lib/kafka/protocol/describe_configs_request.rb
|
348
|
+
- lib/kafka/protocol/describe_configs_response.rb
|
346
349
|
- lib/kafka/protocol/encoder.rb
|
347
350
|
- lib/kafka/protocol/fetch_request.rb
|
348
351
|
- lib/kafka/protocol/fetch_response.rb
|
@@ -383,7 +386,6 @@ files:
|
|
383
386
|
- lib/kafka/statsd.rb
|
384
387
|
- lib/kafka/version.rb
|
385
388
|
- lib/ruby-kafka.rb
|
386
|
-
- performance/profile.rb
|
387
389
|
- ruby-kafka.gemspec
|
388
390
|
homepage: https://github.com/zendesk/ruby-kafka
|
389
391
|
licenses:
|
data/ci/consumer.rb
DELETED
@@ -1,18 +0,0 @@
|
|
1
|
-
# Consumes messages from a Kafka topic.
|
2
|
-
|
3
|
-
require_relative "init"
|
4
|
-
|
5
|
-
consumer = $kafka.consumer(group_id: "greetings-group")
|
6
|
-
consumer.subscribe("greetings")
|
7
|
-
|
8
|
-
num_messages = 0
|
9
|
-
|
10
|
-
trap("TERM") { consumer.stop }
|
11
|
-
|
12
|
-
consumer.each_message do |message|
|
13
|
-
num_messages += 1
|
14
|
-
|
15
|
-
if num_messages % 1000 == 0
|
16
|
-
puts "Processed #{num_messages} messages"
|
17
|
-
end
|
18
|
-
end
|
data/ci/init.rb
DELETED
@@ -1,17 +0,0 @@
|
|
1
|
-
$LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
|
2
|
-
|
3
|
-
require "kafka"
|
4
|
-
|
5
|
-
logger = Logger.new(STDOUT)
|
6
|
-
logger.level = Logger::INFO
|
7
|
-
logger.formatter = ->(_, _, _, msg) { msg }
|
8
|
-
|
9
|
-
STDOUT.sync = true
|
10
|
-
|
11
|
-
$kafka = Kafka.new(
|
12
|
-
logger: logger,
|
13
|
-
seed_brokers: ENV.fetch("HEROKU_KAFKA_URL"),
|
14
|
-
ssl_ca_cert: ENV.fetch("HEROKU_KAFKA_TRUSTED_CERT"),
|
15
|
-
ssl_client_cert: ENV.fetch("HEROKU_KAFKA_CLIENT_CERT"),
|
16
|
-
ssl_client_cert_key: ENV.fetch("HEROKU_KAFKA_CLIENT_CERT_KEY"),
|
17
|
-
)
|
data/ci/producer.rb
DELETED
@@ -1,25 +0,0 @@
|
|
1
|
-
# Continuously produces messages to a Kafka topic.
|
2
|
-
|
3
|
-
require_relative "init"
|
4
|
-
|
5
|
-
producer = $kafka.async_producer(
|
6
|
-
delivery_interval: 1,
|
7
|
-
max_queue_size: 5_000,
|
8
|
-
max_buffer_size: 10_000,
|
9
|
-
)
|
10
|
-
|
11
|
-
num_messages = 0
|
12
|
-
shutdown = false
|
13
|
-
|
14
|
-
trap("TERM") { shutdown = true }
|
15
|
-
|
16
|
-
until shutdown
|
17
|
-
begin
|
18
|
-
producer.produce("hello", key: "world", topic: "greetings")
|
19
|
-
rescue Kafka::BufferOverflow
|
20
|
-
puts "Buffer overflow, backing off..."
|
21
|
-
sleep 10
|
22
|
-
end
|
23
|
-
end
|
24
|
-
|
25
|
-
producer.shutdown
|
data/performance/profile.rb
DELETED
@@ -1,39 +0,0 @@
|
|
1
|
-
$LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
|
2
|
-
$LOAD_PATH.unshift(File.expand_path("../../spec", __FILE__))
|
3
|
-
|
4
|
-
require "kafka"
|
5
|
-
require "ruby-prof"
|
6
|
-
require "dotenv"
|
7
|
-
require "test_cluster"
|
8
|
-
|
9
|
-
Dotenv.load
|
10
|
-
|
11
|
-
# Number of times do iterate.
|
12
|
-
N = 10_000
|
13
|
-
|
14
|
-
KAFKA_CLUSTER = TestCluster.new
|
15
|
-
KAFKA_CLUSTER.start
|
16
|
-
|
17
|
-
logger = Logger.new(nil)
|
18
|
-
|
19
|
-
kafka = Kafka.new(
|
20
|
-
seed_brokers: KAFKA_CLUSTER.kafka_hosts,
|
21
|
-
client_id: "test",
|
22
|
-
logger: logger,
|
23
|
-
)
|
24
|
-
|
25
|
-
producer = kafka.producer(
|
26
|
-
max_buffer_size: 100_000,
|
27
|
-
)
|
28
|
-
|
29
|
-
RubyProf.start
|
30
|
-
|
31
|
-
N.times do
|
32
|
-
producer.produce("hello", topic: "greetings")
|
33
|
-
end
|
34
|
-
|
35
|
-
result = RubyProf.stop
|
36
|
-
printer = RubyProf::FlatPrinter.new(result)
|
37
|
-
printer.print(STDOUT)
|
38
|
-
|
39
|
-
KAFKA_CLUSTER.stop
|