ruby-kafka 1.1.0.beta1 → 1.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.circleci/config.yml +111 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +21 -0
- data/README.md +141 -0
- data/lib/kafka/async_producer.rb +57 -42
- data/lib/kafka/client.rb +41 -7
- data/lib/kafka/cluster.rb +30 -24
- data/lib/kafka/consumer.rb +8 -3
- data/lib/kafka/consumer_group/assignor.rb +63 -0
- data/lib/kafka/consumer_group.rb +26 -6
- data/lib/kafka/crc32_hash.rb +15 -0
- data/lib/kafka/datadog.rb +12 -3
- data/lib/kafka/digest.rb +22 -0
- data/lib/kafka/interceptors.rb +33 -0
- data/lib/kafka/murmur2_hash.rb +17 -0
- data/lib/kafka/offset_manager.rb +12 -1
- data/lib/kafka/partitioner.rb +8 -3
- data/lib/kafka/producer.rb +9 -4
- data/lib/kafka/protocol/add_offsets_to_txn_response.rb +2 -0
- data/lib/kafka/protocol/encoder.rb +1 -1
- data/lib/kafka/protocol/join_group_request.rb +2 -2
- data/lib/kafka/protocol/join_group_response.rb +9 -1
- data/lib/kafka/protocol/record_batch.rb +2 -2
- data/lib/kafka/protocol/sync_group_response.rb +5 -2
- data/lib/kafka/protocol/txn_offset_commit_response.rb +34 -5
- data/lib/kafka/round_robin_assignment_strategy.rb +37 -39
- data/lib/kafka/ssl_context.rb +6 -5
- data/lib/kafka/transaction_manager.rb +30 -10
- data/lib/kafka/version.rb +1 -1
- data/ruby-kafka.gemspec +2 -1
- metadata +25 -7
data/lib/kafka/ssl_context.rb
CHANGED
@@ -47,18 +47,19 @@ module Kafka
|
|
47
47
|
Array(ca_cert).each do |cert|
|
48
48
|
store.add_cert(OpenSSL::X509::Certificate.new(cert))
|
49
49
|
end
|
50
|
-
|
51
|
-
store.add_file(
|
50
|
+
Array(ca_cert_file_path).each do |cert_file_path|
|
51
|
+
store.add_file(cert_file_path)
|
52
52
|
end
|
53
53
|
if ca_certs_from_system
|
54
54
|
store.set_default_paths
|
55
55
|
end
|
56
56
|
ssl_context.cert_store = store
|
57
|
-
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_PEER
|
58
|
-
# Verify certificate hostname if supported (ruby >= 2.4.0)
|
59
|
-
ssl_context.verify_hostname = verify_hostname if ssl_context.respond_to?(:verify_hostname=)
|
60
57
|
end
|
61
58
|
|
59
|
+
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_PEER
|
60
|
+
# Verify certificate hostname if supported (ruby >= 2.4.0)
|
61
|
+
ssl_context.verify_hostname = verify_hostname if ssl_context.respond_to?(:verify_hostname=)
|
62
|
+
|
62
63
|
ssl_context
|
63
64
|
end
|
64
65
|
end
|
@@ -95,7 +95,7 @@ module Kafka
|
|
95
95
|
force_transactional!
|
96
96
|
|
97
97
|
if @transaction_state.uninitialized?
|
98
|
-
raise 'Transaction is uninitialized'
|
98
|
+
raise Kafka::InvalidTxnStateError, 'Transaction is uninitialized'
|
99
99
|
end
|
100
100
|
|
101
101
|
# Extract newly created partitions
|
@@ -138,8 +138,8 @@ module Kafka
|
|
138
138
|
|
139
139
|
def begin_transaction
|
140
140
|
force_transactional!
|
141
|
-
raise 'Transaction has already started' if @transaction_state.in_transaction?
|
142
|
-
raise 'Transaction is not ready' unless @transaction_state.ready?
|
141
|
+
raise Kafka::InvalidTxnStateError, 'Transaction has already started' if @transaction_state.in_transaction?
|
142
|
+
raise Kafka::InvalidTxnStateError, 'Transaction is not ready' unless @transaction_state.ready?
|
143
143
|
@transaction_state.transition_to!(TransactionStateMachine::IN_TRANSACTION)
|
144
144
|
|
145
145
|
@logger.info "Begin transaction #{@transactional_id}, Producer ID: #{@producer_id} (Epoch #{@producer_epoch})"
|
@@ -159,7 +159,7 @@ module Kafka
|
|
159
159
|
end
|
160
160
|
|
161
161
|
unless @transaction_state.in_transaction?
|
162
|
-
raise 'Transaction is not valid to commit'
|
162
|
+
raise Kafka::InvalidTxnStateError, 'Transaction is not valid to commit'
|
163
163
|
end
|
164
164
|
|
165
165
|
@transaction_state.transition_to!(TransactionStateMachine::COMMITTING_TRANSACTION)
|
@@ -192,7 +192,8 @@ module Kafka
|
|
192
192
|
end
|
193
193
|
|
194
194
|
unless @transaction_state.in_transaction?
|
195
|
-
|
195
|
+
@logger.warn('Aborting transaction that was never opened on brokers')
|
196
|
+
return
|
196
197
|
end
|
197
198
|
|
198
199
|
@transaction_state.transition_to!(TransactionStateMachine::ABORTING_TRANSACTION)
|
@@ -221,7 +222,7 @@ module Kafka
|
|
221
222
|
force_transactional!
|
222
223
|
|
223
224
|
unless @transaction_state.in_transaction?
|
224
|
-
raise 'Transaction is not valid to send offsets'
|
225
|
+
raise Kafka::InvalidTxnStateError, 'Transaction is not valid to send offsets'
|
225
226
|
end
|
226
227
|
|
227
228
|
add_response = transaction_coordinator.add_offsets_to_txn(
|
@@ -232,14 +233,23 @@ module Kafka
|
|
232
233
|
)
|
233
234
|
Protocol.handle_error(add_response.error_code)
|
234
235
|
|
235
|
-
send_response =
|
236
|
+
send_response = group_coordinator(group_id: group_id).txn_offset_commit(
|
236
237
|
transactional_id: @transactional_id,
|
237
238
|
group_id: group_id,
|
238
239
|
producer_id: @producer_id,
|
239
240
|
producer_epoch: @producer_epoch,
|
240
241
|
offsets: offsets
|
241
242
|
)
|
242
|
-
|
243
|
+
send_response.errors.each do |tp|
|
244
|
+
tp.partitions.each do |partition|
|
245
|
+
Protocol.handle_error(partition.error_code)
|
246
|
+
end
|
247
|
+
end
|
248
|
+
|
249
|
+
nil
|
250
|
+
rescue
|
251
|
+
@transaction_state.transition_to!(TransactionStateMachine::ERROR)
|
252
|
+
raise
|
243
253
|
end
|
244
254
|
|
245
255
|
def in_transaction?
|
@@ -250,6 +260,10 @@ module Kafka
|
|
250
260
|
@transaction_state.error?
|
251
261
|
end
|
252
262
|
|
263
|
+
def ready?
|
264
|
+
@transaction_state.ready?
|
265
|
+
end
|
266
|
+
|
253
267
|
def close
|
254
268
|
if in_transaction?
|
255
269
|
@logger.warn("Aborting pending transaction ...")
|
@@ -264,11 +278,11 @@ module Kafka
|
|
264
278
|
|
265
279
|
def force_transactional!
|
266
280
|
unless transactional?
|
267
|
-
raise 'Please turn on transactional mode to use transaction'
|
281
|
+
raise Kafka::InvalidTxnStateError, 'Please turn on transactional mode to use transaction'
|
268
282
|
end
|
269
283
|
|
270
284
|
if @transactional_id.nil? || @transactional_id.empty?
|
271
|
-
raise 'Please provide a transaction_id to use transactional mode'
|
285
|
+
raise Kafka::InvalidTxnStateError, 'Please provide a transaction_id to use transactional mode'
|
272
286
|
end
|
273
287
|
end
|
274
288
|
|
@@ -278,6 +292,12 @@ module Kafka
|
|
278
292
|
)
|
279
293
|
end
|
280
294
|
|
295
|
+
def group_coordinator(group_id:)
|
296
|
+
@cluster.get_group_coordinator(
|
297
|
+
group_id: group_id
|
298
|
+
)
|
299
|
+
end
|
300
|
+
|
281
301
|
def complete_transaction
|
282
302
|
@transaction_state.transition_to!(TransactionStateMachine::READY)
|
283
303
|
@transaction_partitions = {}
|
data/lib/kafka/version.rb
CHANGED
data/ruby-kafka.gemspec
CHANGED
@@ -18,7 +18,7 @@ Gem::Specification.new do |spec|
|
|
18
18
|
DESC
|
19
19
|
|
20
20
|
spec.homepage = "https://github.com/zendesk/ruby-kafka"
|
21
|
-
spec.license = "Apache
|
21
|
+
spec.license = "Apache-2.0"
|
22
22
|
|
23
23
|
spec.required_ruby_version = '>= 2.1.0'
|
24
24
|
|
@@ -33,6 +33,7 @@ Gem::Specification.new do |spec|
|
|
33
33
|
spec.add_development_dependency "rake", "~> 10.0"
|
34
34
|
spec.add_development_dependency "rspec"
|
35
35
|
spec.add_development_dependency "pry"
|
36
|
+
spec.add_development_dependency "digest-murmurhash"
|
36
37
|
spec.add_development_dependency "dotenv"
|
37
38
|
spec.add_development_dependency "docker-api"
|
38
39
|
spec.add_development_dependency "rspec-benchmark"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.4.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-08-25 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: digest-crc
|
@@ -80,6 +80,20 @@ dependencies:
|
|
80
80
|
- - ">="
|
81
81
|
- !ruby/object:Gem::Version
|
82
82
|
version: '0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: digest-murmurhash
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - ">="
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - ">="
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
83
97
|
- !ruby/object:Gem::Dependency
|
84
98
|
name: dotenv
|
85
99
|
requirement: !ruby/object:Gem::Requirement
|
@@ -376,7 +390,10 @@ files:
|
|
376
390
|
- lib/kafka/connection_builder.rb
|
377
391
|
- lib/kafka/consumer.rb
|
378
392
|
- lib/kafka/consumer_group.rb
|
393
|
+
- lib/kafka/consumer_group/assignor.rb
|
394
|
+
- lib/kafka/crc32_hash.rb
|
379
395
|
- lib/kafka/datadog.rb
|
396
|
+
- lib/kafka/digest.rb
|
380
397
|
- lib/kafka/fetch_operation.rb
|
381
398
|
- lib/kafka/fetched_batch.rb
|
382
399
|
- lib/kafka/fetched_batch_generator.rb
|
@@ -386,8 +403,10 @@ files:
|
|
386
403
|
- lib/kafka/gzip_codec.rb
|
387
404
|
- lib/kafka/heartbeat.rb
|
388
405
|
- lib/kafka/instrumenter.rb
|
406
|
+
- lib/kafka/interceptors.rb
|
389
407
|
- lib/kafka/lz4_codec.rb
|
390
408
|
- lib/kafka/message_buffer.rb
|
409
|
+
- lib/kafka/murmur2_hash.rb
|
391
410
|
- lib/kafka/offset_manager.rb
|
392
411
|
- lib/kafka/partitioner.rb
|
393
412
|
- lib/kafka/pause.rb
|
@@ -476,7 +495,7 @@ files:
|
|
476
495
|
- ruby-kafka.gemspec
|
477
496
|
homepage: https://github.com/zendesk/ruby-kafka
|
478
497
|
licenses:
|
479
|
-
- Apache
|
498
|
+
- Apache-2.0
|
480
499
|
metadata: {}
|
481
500
|
post_install_message:
|
482
501
|
rdoc_options: []
|
@@ -489,12 +508,11 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
489
508
|
version: 2.1.0
|
490
509
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
491
510
|
requirements:
|
492
|
-
- - "
|
511
|
+
- - ">="
|
493
512
|
- !ruby/object:Gem::Version
|
494
|
-
version:
|
513
|
+
version: '0'
|
495
514
|
requirements: []
|
496
|
-
|
497
|
-
rubygems_version: 2.7.6
|
515
|
+
rubygems_version: 3.1.2
|
498
516
|
signing_key:
|
499
517
|
specification_version: 4
|
500
518
|
summary: A client library for the Kafka distributed commit log.
|