ruby-kafka 0.3.3 → 0.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/examples/firehose-consumer.rb +1 -1
- data/examples/firehose-producer.rb +1 -1
- data/examples/simple-consumer.rb +1 -1
- data/examples/simple-producer.rb +1 -1
- data/examples/ssl-producer.rb +1 -1
- data/lib/kafka/client.rb +21 -3
- data/lib/kafka/pending_message_queue.rb +8 -16
- data/lib/kafka/produce_operation.rb +2 -4
- data/lib/kafka/producer.rb +29 -14
- data/lib/kafka/version.rb +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: faf979f6512df3c644b20c1fad027fdc2818d558
|
4
|
+
data.tar.gz: 729c7d34332b6c832fdcc1bd6ee98daaf8af09e6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c3ef737a7b4af9f94413759c4d7c1dec136f727f3bf7963cc3b7daaf26d42600046deca40cd28c2ba8685b7d54e4efbb841194fe272218b24a70abf76b359e51
|
7
|
+
data.tar.gz: 691a8b02e96485959653c84fc5fa83d586a569e2866a94a30a490de9031a37beea9c96d71e8e244a9bfde0a779f6066cea3cd3d53253142da6d2c95a702b38d2
|
data/CHANGELOG.md
CHANGED
@@ -9,7 +9,7 @@ KAFKA_CLIENT_CERT = ENV.fetch("KAFKA_CLIENT_CERT")
|
|
9
9
|
KAFKA_CLIENT_CERT_KEY = ENV.fetch("KAFKA_CLIENT_CERT_KEY")
|
10
10
|
KAFKA_SERVER_CERT = ENV.fetch("KAFKA_SERVER_CERT")
|
11
11
|
KAFKA_URL = ENV.fetch("KAFKA_URL")
|
12
|
-
KAFKA_BROKERS = KAFKA_URL
|
12
|
+
KAFKA_BROKERS = KAFKA_URL
|
13
13
|
KAFKA_TOPIC = "test-messages"
|
14
14
|
|
15
15
|
NUM_THREADS = 4
|
@@ -9,7 +9,7 @@ KAFKA_CLIENT_CERT = ENV.fetch("KAFKA_CLIENT_CERT")
|
|
9
9
|
KAFKA_CLIENT_CERT_KEY = ENV.fetch("KAFKA_CLIENT_CERT_KEY")
|
10
10
|
KAFKA_SERVER_CERT = ENV.fetch("KAFKA_SERVER_CERT")
|
11
11
|
KAFKA_URL = ENV.fetch("KAFKA_URL")
|
12
|
-
KAFKA_BROKERS = KAFKA_URL
|
12
|
+
KAFKA_BROKERS = KAFKA_URL
|
13
13
|
KAFKA_TOPIC = "test-messages"
|
14
14
|
|
15
15
|
NUM_THREADS = 20
|
data/examples/simple-consumer.rb
CHANGED
@@ -14,7 +14,7 @@ require "kafka"
|
|
14
14
|
# with e.g. `$stderr` if you want to see what's happening under the hood.
|
15
15
|
logger = Logger.new(StringIO.new)
|
16
16
|
|
17
|
-
brokers = ENV.fetch("KAFKA_BROKERS")
|
17
|
+
brokers = ENV.fetch("KAFKA_BROKERS")
|
18
18
|
|
19
19
|
# Make sure to create this topic in your Kafka cluster or configure the
|
20
20
|
# cluster to auto-create topics.
|
data/examples/simple-producer.rb
CHANGED
@@ -11,7 +11,7 @@ $LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
|
|
11
11
|
require "kafka"
|
12
12
|
|
13
13
|
logger = Logger.new($stderr)
|
14
|
-
brokers = ENV.fetch("KAFKA_BROKERS")
|
14
|
+
brokers = ENV.fetch("KAFKA_BROKERS")
|
15
15
|
|
16
16
|
# Make sure to create this topic in your Kafka cluster or configure the
|
17
17
|
# cluster to auto-create topics.
|
data/examples/ssl-producer.rb
CHANGED
@@ -5,7 +5,7 @@ $LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
|
|
5
5
|
require "kafka"
|
6
6
|
|
7
7
|
logger = Logger.new($stderr)
|
8
|
-
brokers = ENV.fetch("KAFKA_BROKERS")
|
8
|
+
brokers = ENV.fetch("KAFKA_BROKERS")
|
9
9
|
|
10
10
|
# Make sure to create this topic in your Kafka cluster or configure the
|
11
11
|
# cluster to auto-create topics.
|
data/lib/kafka/client.rb
CHANGED
@@ -15,8 +15,10 @@ module Kafka
|
|
15
15
|
|
16
16
|
# Initializes a new Kafka client.
|
17
17
|
#
|
18
|
-
# @param seed_brokers [Array<String
|
19
|
-
# the client.
|
18
|
+
# @param seed_brokers [Array<String>, String] the list of brokers used to initialize
|
19
|
+
# the client. Either an Array of connections, or a comma separated string of connections.
|
20
|
+
# Connections can either be a string of "port:protocol" or a full URI with a scheme.
|
21
|
+
# If there's a scheme it's ignored and only host/port are used.
|
20
22
|
#
|
21
23
|
# @param client_id [String] the identifier for this application.
|
22
24
|
#
|
@@ -59,7 +61,7 @@ module Kafka
|
|
59
61
|
)
|
60
62
|
|
61
63
|
@cluster = Cluster.new(
|
62
|
-
seed_brokers: seed_brokers,
|
64
|
+
seed_brokers: normalize_seed_brokers(seed_brokers),
|
63
65
|
broker_pool: broker_pool,
|
64
66
|
logger: @logger,
|
65
67
|
)
|
@@ -307,5 +309,21 @@ module Kafka
|
|
307
309
|
|
308
310
|
ssl_context
|
309
311
|
end
|
312
|
+
|
313
|
+
def normalize_seed_brokers(seed_brokers)
|
314
|
+
if seed_brokers.is_a?(String)
|
315
|
+
seed_brokers = seed_brokers.split(",")
|
316
|
+
end
|
317
|
+
brokers = []
|
318
|
+
seed_brokers.each do |connection|
|
319
|
+
if connection =~ /:\/\//
|
320
|
+
u = URI.parse(connection)
|
321
|
+
brokers << "#{u.host}:#{u.port}"
|
322
|
+
else
|
323
|
+
brokers << connection
|
324
|
+
end
|
325
|
+
end
|
326
|
+
brokers
|
327
|
+
end
|
310
328
|
end
|
311
329
|
end
|
@@ -1,8 +1,5 @@
|
|
1
1
|
module Kafka
|
2
2
|
|
3
|
-
# A pending message queue holds messages that have not yet been assigned to
|
4
|
-
# a partition. It's designed to only remove messages once they've been
|
5
|
-
# successfully handled.
|
6
3
|
class PendingMessageQueue
|
7
4
|
attr_reader :size, :bytesize
|
8
5
|
|
@@ -26,22 +23,17 @@ module Kafka
|
|
26
23
|
@bytesize = 0
|
27
24
|
end
|
28
25
|
|
29
|
-
|
30
|
-
|
31
|
-
|
26
|
+
def replace(messages)
|
27
|
+
clear
|
28
|
+
messages.each {|message| write(message) }
|
29
|
+
end
|
30
|
+
|
31
|
+
# Yields each message in the queue.
|
32
32
|
#
|
33
33
|
# @yieldparam [PendingMessage] message
|
34
34
|
# @return [nil]
|
35
|
-
def
|
36
|
-
|
37
|
-
message = @messages.first
|
38
|
-
|
39
|
-
yield message
|
40
|
-
|
41
|
-
@size -= 1
|
42
|
-
@bytesize -= message.bytesize
|
43
|
-
@messages.shift
|
44
|
-
end
|
35
|
+
def each(&block)
|
36
|
+
@messages.each(&block)
|
45
37
|
end
|
46
38
|
end
|
47
39
|
end
|
@@ -65,9 +65,8 @@ module Kafka
|
|
65
65
|
rescue Kafka::Error => e
|
66
66
|
@logger.error "Could not connect to leader for partition #{topic}/#{partition}: #{e.message}"
|
67
67
|
|
68
|
-
@instrumenter.instrument("
|
68
|
+
@instrumenter.instrument("topic_error.producer", {
|
69
69
|
topic: topic,
|
70
|
-
partition: partition,
|
71
70
|
exception: [e.class.to_s, e.message],
|
72
71
|
})
|
73
72
|
|
@@ -118,9 +117,8 @@ module Kafka
|
|
118
117
|
begin
|
119
118
|
Protocol.handle_error(partition_info.error_code)
|
120
119
|
rescue ProtocolError => e
|
121
|
-
@instrumenter.instrument("
|
120
|
+
@instrumenter.instrument("topic_error.producer", {
|
122
121
|
topic: topic,
|
123
|
-
partition: partition,
|
124
122
|
exception: [e.class.to_s, e.message],
|
125
123
|
})
|
126
124
|
|
data/lib/kafka/producer.rb
CHANGED
@@ -327,25 +327,40 @@ module Kafka
|
|
327
327
|
end
|
328
328
|
|
329
329
|
def assign_partitions!
|
330
|
-
|
330
|
+
failed_messages = []
|
331
|
+
|
332
|
+
@pending_message_queue.each do |message|
|
331
333
|
partition = message.partition
|
332
334
|
|
333
|
-
|
334
|
-
|
335
|
-
|
335
|
+
begin
|
336
|
+
if partition.nil?
|
337
|
+
partition_count = @cluster.partitions_for(message.topic).count
|
338
|
+
partition = Partitioner.partition_for_key(partition_count, message)
|
339
|
+
end
|
340
|
+
|
341
|
+
@buffer.write(
|
342
|
+
value: message.value,
|
343
|
+
key: message.key,
|
344
|
+
topic: message.topic,
|
345
|
+
partition: partition,
|
346
|
+
create_time: message.create_time,
|
347
|
+
)
|
348
|
+
rescue Kafka::Error => e
|
349
|
+
@instrumenter.instrument("topic_error.producer", {
|
350
|
+
topic: message.topic,
|
351
|
+
exception: [e.class.to_s, e.message],
|
352
|
+
})
|
353
|
+
|
354
|
+
failed_messages << message
|
336
355
|
end
|
356
|
+
end
|
337
357
|
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
topic: message.topic,
|
342
|
-
partition: partition,
|
343
|
-
create_time: message.create_time,
|
344
|
-
)
|
358
|
+
if failed_messages.any?
|
359
|
+
@logger.error "Failed to assign partitions to #{failed_messages.count} messages"
|
360
|
+
@cluster.mark_as_stale!
|
345
361
|
end
|
346
|
-
|
347
|
-
@
|
348
|
-
@cluster.mark_as_stale!
|
362
|
+
|
363
|
+
@pending_message_queue.replace(failed_messages)
|
349
364
|
end
|
350
365
|
end
|
351
366
|
end
|
data/lib/kafka/version.rb
CHANGED