ruby-kafka-aws-iam 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.circleci/config.yml +393 -0
- data/.github/workflows/stale.yml +19 -0
- data/.gitignore +13 -0
- data/.readygo +1 -0
- data/.rspec +3 -0
- data/.rubocop.yml +44 -0
- data/.ruby-version +1 -0
- data/.yardopts +3 -0
- data/CHANGELOG.md +314 -0
- data/Gemfile +5 -0
- data/ISSUE_TEMPLATE.md +23 -0
- data/LICENSE.txt +176 -0
- data/Procfile +2 -0
- data/README.md +1356 -0
- data/Rakefile +8 -0
- data/benchmarks/message_encoding.rb +23 -0
- data/bin/console +8 -0
- data/bin/setup +5 -0
- data/docker-compose.yml +39 -0
- data/examples/consumer-group.rb +35 -0
- data/examples/firehose-consumer.rb +64 -0
- data/examples/firehose-producer.rb +54 -0
- data/examples/simple-consumer.rb +34 -0
- data/examples/simple-producer.rb +42 -0
- data/examples/ssl-producer.rb +44 -0
- data/lib/kafka/async_producer.rb +297 -0
- data/lib/kafka/broker.rb +217 -0
- data/lib/kafka/broker_info.rb +16 -0
- data/lib/kafka/broker_pool.rb +41 -0
- data/lib/kafka/broker_uri.rb +43 -0
- data/lib/kafka/client.rb +838 -0
- data/lib/kafka/cluster.rb +513 -0
- data/lib/kafka/compression.rb +45 -0
- data/lib/kafka/compressor.rb +86 -0
- data/lib/kafka/connection.rb +228 -0
- data/lib/kafka/connection_builder.rb +33 -0
- data/lib/kafka/consumer.rb +642 -0
- data/lib/kafka/consumer_group/assignor.rb +63 -0
- data/lib/kafka/consumer_group.rb +231 -0
- data/lib/kafka/crc32_hash.rb +15 -0
- data/lib/kafka/datadog.rb +420 -0
- data/lib/kafka/digest.rb +22 -0
- data/lib/kafka/fetch_operation.rb +115 -0
- data/lib/kafka/fetched_batch.rb +58 -0
- data/lib/kafka/fetched_batch_generator.rb +120 -0
- data/lib/kafka/fetched_message.rb +48 -0
- data/lib/kafka/fetched_offset_resolver.rb +48 -0
- data/lib/kafka/fetcher.rb +224 -0
- data/lib/kafka/gzip_codec.rb +34 -0
- data/lib/kafka/heartbeat.rb +25 -0
- data/lib/kafka/instrumenter.rb +38 -0
- data/lib/kafka/interceptors.rb +33 -0
- data/lib/kafka/lz4_codec.rb +27 -0
- data/lib/kafka/message_buffer.rb +87 -0
- data/lib/kafka/murmur2_hash.rb +17 -0
- data/lib/kafka/offset_manager.rb +259 -0
- data/lib/kafka/partitioner.rb +40 -0
- data/lib/kafka/pause.rb +92 -0
- data/lib/kafka/pending_message.rb +29 -0
- data/lib/kafka/pending_message_queue.rb +41 -0
- data/lib/kafka/produce_operation.rb +205 -0
- data/lib/kafka/producer.rb +528 -0
- data/lib/kafka/prometheus.rb +316 -0
- data/lib/kafka/protocol/add_offsets_to_txn_request.rb +29 -0
- data/lib/kafka/protocol/add_offsets_to_txn_response.rb +21 -0
- data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
- data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
- data/lib/kafka/protocol/alter_configs_request.rb +44 -0
- data/lib/kafka/protocol/alter_configs_response.rb +49 -0
- data/lib/kafka/protocol/api_versions_request.rb +21 -0
- data/lib/kafka/protocol/api_versions_response.rb +53 -0
- data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
- data/lib/kafka/protocol/create_partitions_request.rb +42 -0
- data/lib/kafka/protocol/create_partitions_response.rb +28 -0
- data/lib/kafka/protocol/create_topics_request.rb +45 -0
- data/lib/kafka/protocol/create_topics_response.rb +26 -0
- data/lib/kafka/protocol/decoder.rb +175 -0
- data/lib/kafka/protocol/delete_topics_request.rb +33 -0
- data/lib/kafka/protocol/delete_topics_response.rb +26 -0
- data/lib/kafka/protocol/describe_configs_request.rb +35 -0
- data/lib/kafka/protocol/describe_configs_response.rb +73 -0
- data/lib/kafka/protocol/describe_groups_request.rb +27 -0
- data/lib/kafka/protocol/describe_groups_response.rb +73 -0
- data/lib/kafka/protocol/encoder.rb +184 -0
- data/lib/kafka/protocol/end_txn_request.rb +29 -0
- data/lib/kafka/protocol/end_txn_response.rb +19 -0
- data/lib/kafka/protocol/fetch_request.rb +70 -0
- data/lib/kafka/protocol/fetch_response.rb +136 -0
- data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
- data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
- data/lib/kafka/protocol/heartbeat_request.rb +27 -0
- data/lib/kafka/protocol/heartbeat_response.rb +17 -0
- data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
- data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
- data/lib/kafka/protocol/join_group_request.rb +47 -0
- data/lib/kafka/protocol/join_group_response.rb +41 -0
- data/lib/kafka/protocol/leave_group_request.rb +25 -0
- data/lib/kafka/protocol/leave_group_response.rb +17 -0
- data/lib/kafka/protocol/list_groups_request.rb +23 -0
- data/lib/kafka/protocol/list_groups_response.rb +35 -0
- data/lib/kafka/protocol/list_offset_request.rb +53 -0
- data/lib/kafka/protocol/list_offset_response.rb +89 -0
- data/lib/kafka/protocol/member_assignment.rb +42 -0
- data/lib/kafka/protocol/message.rb +172 -0
- data/lib/kafka/protocol/message_set.rb +55 -0
- data/lib/kafka/protocol/metadata_request.rb +31 -0
- data/lib/kafka/protocol/metadata_response.rb +185 -0
- data/lib/kafka/protocol/offset_commit_request.rb +47 -0
- data/lib/kafka/protocol/offset_commit_response.rb +29 -0
- data/lib/kafka/protocol/offset_fetch_request.rb +38 -0
- data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
- data/lib/kafka/protocol/produce_request.rb +94 -0
- data/lib/kafka/protocol/produce_response.rb +63 -0
- data/lib/kafka/protocol/record.rb +88 -0
- data/lib/kafka/protocol/record_batch.rb +223 -0
- data/lib/kafka/protocol/request_message.rb +26 -0
- data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
- data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
- data/lib/kafka/protocol/sync_group_request.rb +33 -0
- data/lib/kafka/protocol/sync_group_response.rb +26 -0
- data/lib/kafka/protocol/txn_offset_commit_request.rb +46 -0
- data/lib/kafka/protocol/txn_offset_commit_response.rb +47 -0
- data/lib/kafka/protocol.rb +225 -0
- data/lib/kafka/round_robin_assignment_strategy.rb +52 -0
- data/lib/kafka/sasl/awsmskiam.rb +128 -0
- data/lib/kafka/sasl/gssapi.rb +76 -0
- data/lib/kafka/sasl/oauth.rb +64 -0
- data/lib/kafka/sasl/plain.rb +39 -0
- data/lib/kafka/sasl/scram.rb +180 -0
- data/lib/kafka/sasl_authenticator.rb +73 -0
- data/lib/kafka/snappy_codec.rb +29 -0
- data/lib/kafka/socket_with_timeout.rb +96 -0
- data/lib/kafka/ssl_context.rb +66 -0
- data/lib/kafka/ssl_socket_with_timeout.rb +192 -0
- data/lib/kafka/statsd.rb +296 -0
- data/lib/kafka/tagged_logger.rb +77 -0
- data/lib/kafka/transaction_manager.rb +306 -0
- data/lib/kafka/transaction_state_machine.rb +72 -0
- data/lib/kafka/version.rb +5 -0
- data/lib/kafka/zstd_codec.rb +27 -0
- data/lib/kafka.rb +373 -0
- data/lib/ruby-kafka.rb +5 -0
- data/ruby-kafka.gemspec +54 -0
- metadata +520 -0
data/lib/kafka/broker.rb
ADDED
@@ -0,0 +1,217 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "logger"
|
4
|
+
require "kafka/connection"
|
5
|
+
require "kafka/protocol"
|
6
|
+
|
7
|
+
module Kafka
|
8
|
+
class Broker
|
9
|
+
def initialize(connection_builder:, host:, port:, node_id: nil, logger:)
|
10
|
+
@connection_builder = connection_builder
|
11
|
+
@connection = nil
|
12
|
+
@host = host
|
13
|
+
@port = port
|
14
|
+
@node_id = node_id
|
15
|
+
@logger = TaggedLogger.new(logger)
|
16
|
+
end
|
17
|
+
|
18
|
+
def address_match?(host, port)
|
19
|
+
host == @host && port == @port
|
20
|
+
end
|
21
|
+
|
22
|
+
# @return [String]
|
23
|
+
def to_s
|
24
|
+
"#{@host}:#{@port} (node_id=#{@node_id.inspect})"
|
25
|
+
end
|
26
|
+
|
27
|
+
# @return [nil]
|
28
|
+
def disconnect
|
29
|
+
connection.close if connected?
|
30
|
+
end
|
31
|
+
|
32
|
+
# @return [Boolean]
|
33
|
+
def connected?
|
34
|
+
!@connection.nil?
|
35
|
+
end
|
36
|
+
|
37
|
+
# Fetches cluster metadata from the broker.
|
38
|
+
#
|
39
|
+
# @param (see Kafka::Protocol::MetadataRequest#initialize)
|
40
|
+
# @return [Kafka::Protocol::MetadataResponse]
|
41
|
+
def fetch_metadata(**options)
|
42
|
+
request = Protocol::MetadataRequest.new(**options)
|
43
|
+
|
44
|
+
send_request(request)
|
45
|
+
end
|
46
|
+
|
47
|
+
# Fetches messages from a specified topic and partition.
|
48
|
+
#
|
49
|
+
# @param (see Kafka::Protocol::FetchRequest#initialize)
|
50
|
+
# @return [Kafka::Protocol::FetchResponse]
|
51
|
+
def fetch_messages(**options)
|
52
|
+
request = Protocol::FetchRequest.new(**options)
|
53
|
+
|
54
|
+
send_request(request)
|
55
|
+
end
|
56
|
+
|
57
|
+
# Lists the offset of the specified topics and partitions.
|
58
|
+
#
|
59
|
+
# @param (see Kafka::Protocol::ListOffsetRequest#initialize)
|
60
|
+
# @return [Kafka::Protocol::ListOffsetResponse]
|
61
|
+
def list_offsets(**options)
|
62
|
+
request = Protocol::ListOffsetRequest.new(**options)
|
63
|
+
|
64
|
+
send_request(request)
|
65
|
+
end
|
66
|
+
|
67
|
+
# Produces a set of messages to the broker.
|
68
|
+
#
|
69
|
+
# @param (see Kafka::Protocol::ProduceRequest#initialize)
|
70
|
+
# @return [Kafka::Protocol::ProduceResponse]
|
71
|
+
def produce(**options)
|
72
|
+
request = Protocol::ProduceRequest.new(**options)
|
73
|
+
|
74
|
+
send_request(request)
|
75
|
+
end
|
76
|
+
|
77
|
+
def fetch_offsets(**options)
|
78
|
+
request = Protocol::OffsetFetchRequest.new(**options)
|
79
|
+
|
80
|
+
send_request(request)
|
81
|
+
end
|
82
|
+
|
83
|
+
def commit_offsets(**options)
|
84
|
+
request = Protocol::OffsetCommitRequest.new(**options)
|
85
|
+
|
86
|
+
send_request(request)
|
87
|
+
end
|
88
|
+
|
89
|
+
def join_group(**options)
|
90
|
+
request = Protocol::JoinGroupRequest.new(**options)
|
91
|
+
|
92
|
+
send_request(request)
|
93
|
+
end
|
94
|
+
|
95
|
+
def sync_group(**options)
|
96
|
+
request = Protocol::SyncGroupRequest.new(**options)
|
97
|
+
|
98
|
+
send_request(request)
|
99
|
+
end
|
100
|
+
|
101
|
+
def leave_group(**options)
|
102
|
+
request = Protocol::LeaveGroupRequest.new(**options)
|
103
|
+
|
104
|
+
send_request(request)
|
105
|
+
end
|
106
|
+
|
107
|
+
def find_coordinator(**options)
|
108
|
+
request = Protocol::FindCoordinatorRequest.new(**options)
|
109
|
+
|
110
|
+
send_request(request)
|
111
|
+
end
|
112
|
+
|
113
|
+
def heartbeat(**options)
|
114
|
+
request = Protocol::HeartbeatRequest.new(**options)
|
115
|
+
|
116
|
+
send_request(request)
|
117
|
+
end
|
118
|
+
|
119
|
+
def create_topics(**options)
|
120
|
+
request = Protocol::CreateTopicsRequest.new(**options)
|
121
|
+
|
122
|
+
send_request(request)
|
123
|
+
end
|
124
|
+
|
125
|
+
def delete_topics(**options)
|
126
|
+
request = Protocol::DeleteTopicsRequest.new(**options)
|
127
|
+
|
128
|
+
send_request(request)
|
129
|
+
end
|
130
|
+
|
131
|
+
def describe_configs(**options)
|
132
|
+
request = Protocol::DescribeConfigsRequest.new(**options)
|
133
|
+
|
134
|
+
send_request(request)
|
135
|
+
end
|
136
|
+
|
137
|
+
def alter_configs(**options)
|
138
|
+
request = Protocol::AlterConfigsRequest.new(**options)
|
139
|
+
|
140
|
+
send_request(request)
|
141
|
+
end
|
142
|
+
|
143
|
+
def create_partitions(**options)
|
144
|
+
request = Protocol::CreatePartitionsRequest.new(**options)
|
145
|
+
|
146
|
+
send_request(request)
|
147
|
+
end
|
148
|
+
|
149
|
+
def list_groups
|
150
|
+
request = Protocol::ListGroupsRequest.new
|
151
|
+
|
152
|
+
send_request(request)
|
153
|
+
end
|
154
|
+
|
155
|
+
def api_versions
|
156
|
+
request = Protocol::ApiVersionsRequest.new
|
157
|
+
|
158
|
+
send_request(request)
|
159
|
+
end
|
160
|
+
|
161
|
+
def describe_groups(**options)
|
162
|
+
request = Protocol::DescribeGroupsRequest.new(**options)
|
163
|
+
|
164
|
+
send_request(request)
|
165
|
+
end
|
166
|
+
|
167
|
+
def init_producer_id(**options)
|
168
|
+
request = Protocol::InitProducerIDRequest.new(**options)
|
169
|
+
|
170
|
+
send_request(request)
|
171
|
+
end
|
172
|
+
|
173
|
+
def add_partitions_to_txn(**options)
|
174
|
+
request = Protocol::AddPartitionsToTxnRequest.new(**options)
|
175
|
+
|
176
|
+
send_request(request)
|
177
|
+
end
|
178
|
+
|
179
|
+
def end_txn(**options)
|
180
|
+
request = Protocol::EndTxnRequest.new(**options)
|
181
|
+
|
182
|
+
send_request(request)
|
183
|
+
end
|
184
|
+
|
185
|
+
def add_offsets_to_txn(**options)
|
186
|
+
request = Protocol::AddOffsetsToTxnRequest.new(**options)
|
187
|
+
|
188
|
+
send_request(request)
|
189
|
+
end
|
190
|
+
|
191
|
+
def txn_offset_commit(**options)
|
192
|
+
request = Protocol::TxnOffsetCommitRequest.new(**options)
|
193
|
+
|
194
|
+
send_request(request)
|
195
|
+
end
|
196
|
+
|
197
|
+
private
|
198
|
+
|
199
|
+
def send_request(request)
|
200
|
+
connection.send_request(request)
|
201
|
+
rescue IdleConnection
|
202
|
+
@logger.warn "Connection has been unused for too long, re-connecting..."
|
203
|
+
@connection.close rescue nil
|
204
|
+
@connection = nil
|
205
|
+
retry
|
206
|
+
rescue ConnectionError
|
207
|
+
@connection.close rescue nil
|
208
|
+
@connection = nil
|
209
|
+
|
210
|
+
raise
|
211
|
+
end
|
212
|
+
|
213
|
+
def connection
|
214
|
+
@connection ||= @connection_builder.build_connection(@host, @port)
|
215
|
+
end
|
216
|
+
end
|
217
|
+
end
|
@@ -0,0 +1,16 @@
|
|
1
|
+
# Represents a broker in a Kafka cluster.
|
2
|
+
module Kafka
|
3
|
+
class BrokerInfo
|
4
|
+
attr_reader :node_id, :host, :port
|
5
|
+
|
6
|
+
def initialize(node_id:, host:, port:)
|
7
|
+
@node_id = node_id
|
8
|
+
@host = host
|
9
|
+
@port = port
|
10
|
+
end
|
11
|
+
|
12
|
+
def to_s
|
13
|
+
"#{host}:#{port} (node_id=#{node_id})"
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "kafka/broker"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
class BrokerPool
|
7
|
+
def initialize(connection_builder:, logger:)
|
8
|
+
@logger = TaggedLogger.new(logger)
|
9
|
+
@connection_builder = connection_builder
|
10
|
+
@brokers = {}
|
11
|
+
end
|
12
|
+
|
13
|
+
def connect(host, port, node_id: nil)
|
14
|
+
if @brokers.key?(node_id)
|
15
|
+
broker = @brokers.fetch(node_id)
|
16
|
+
return broker if broker.address_match?(host, port)
|
17
|
+
broker.disconnect
|
18
|
+
@brokers[node_id] = nil
|
19
|
+
end
|
20
|
+
|
21
|
+
broker = Broker.new(
|
22
|
+
connection_builder: @connection_builder,
|
23
|
+
host: host,
|
24
|
+
port: port,
|
25
|
+
node_id: node_id,
|
26
|
+
logger: @logger,
|
27
|
+
)
|
28
|
+
|
29
|
+
@brokers[node_id] = broker unless node_id.nil?
|
30
|
+
|
31
|
+
broker
|
32
|
+
end
|
33
|
+
|
34
|
+
def close
|
35
|
+
@brokers.each do |id, broker|
|
36
|
+
@logger.info "Disconnecting broker #{id}"
|
37
|
+
broker.disconnect
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "uri"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
module BrokerUri
|
7
|
+
DEFAULT_PORT = 9092
|
8
|
+
URI_SCHEMES = ["kafka", "kafka+ssl", "plaintext", "ssl"]
|
9
|
+
|
10
|
+
# Parses a Kafka broker URI string.
|
11
|
+
#
|
12
|
+
# Examples of valid strings:
|
13
|
+
# * `kafka1.something`
|
14
|
+
# * `kafka1.something:1234`
|
15
|
+
# * `kafka://kafka1.something:1234`
|
16
|
+
# * `kafka+ssl://kafka1.something:1234`
|
17
|
+
# * `plaintext://kafka1.something:1234`
|
18
|
+
#
|
19
|
+
# @param str [String] a Kafka broker URI string.
|
20
|
+
# @return [URI]
|
21
|
+
def self.parse(str)
|
22
|
+
# Make sure there's a scheme part if it's missing.
|
23
|
+
str = "kafka://" + str unless str.include?("://")
|
24
|
+
|
25
|
+
uri = URI.parse(str)
|
26
|
+
uri.port ||= DEFAULT_PORT
|
27
|
+
|
28
|
+
# Map some schemes to others.
|
29
|
+
case uri.scheme
|
30
|
+
when 'plaintext'
|
31
|
+
uri.scheme = 'kafka'
|
32
|
+
when 'ssl'
|
33
|
+
uri.scheme = 'kafka+ssl'
|
34
|
+
end
|
35
|
+
|
36
|
+
unless URI_SCHEMES.include?(uri.scheme)
|
37
|
+
raise Kafka::Error, "invalid protocol `#{uri.scheme}` in `#{str}`"
|
38
|
+
end
|
39
|
+
|
40
|
+
uri
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|