ruby-kafka 0.3.3 → 0.3.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: bf114586f8ec65d1db3ade1d082fc82de140f241
4
- data.tar.gz: a202f5dd06b0339a8e3895e48824acba6d266ba5
3
+ metadata.gz: faf979f6512df3c644b20c1fad027fdc2818d558
4
+ data.tar.gz: 729c7d34332b6c832fdcc1bd6ee98daaf8af09e6
5
5
  SHA512:
6
- metadata.gz: 4788eabf90baf70adb425860c98073058dd601247f1dd447aeb3d154025a5466c19130c7a8d00984ff0e9a5771f980a39a33ffec8bc4384695388999ceda2a13
7
- data.tar.gz: 8f5572d8e9dcf46737236ac6a6467fc5d1bd60e091e889ffe056131c084bd47699c59bd404e11b0ce8ec3c5b5c800d8d07c318c6d3abd2e75585d4168079c70c
6
+ metadata.gz: c3ef737a7b4af9f94413759c4d7c1dec136f727f3bf7963cc3b7daaf26d42600046deca40cd28c2ba8685b7d54e4efbb841194fe272218b24a70abf76b359e51
7
+ data.tar.gz: 691a8b02e96485959653c84fc5fa83d586a569e2866a94a30a490de9031a37beea9c96d71e8e244a9bfde0a779f6066cea3cd3d53253142da6d2c95a702b38d2
@@ -4,6 +4,10 @@ Changes and additions to the library will be listed here.
4
4
 
5
5
  ## Unreleased
6
6
 
7
+ ## v0.3.4
8
+
9
+ - Make the producer buffer more resilient in the face of isolated topic errors.
10
+
7
11
  ## v0.3.3
8
12
 
9
13
  - Allow clearing a producer's buffer (Martin Nowak).
@@ -9,7 +9,7 @@ KAFKA_CLIENT_CERT = ENV.fetch("KAFKA_CLIENT_CERT")
9
9
  KAFKA_CLIENT_CERT_KEY = ENV.fetch("KAFKA_CLIENT_CERT_KEY")
10
10
  KAFKA_SERVER_CERT = ENV.fetch("KAFKA_SERVER_CERT")
11
11
  KAFKA_URL = ENV.fetch("KAFKA_URL")
12
- KAFKA_BROKERS = KAFKA_URL.gsub("kafka+ssl://", "").split(",")
12
+ KAFKA_BROKERS = KAFKA_URL
13
13
  KAFKA_TOPIC = "test-messages"
14
14
 
15
15
  NUM_THREADS = 4
@@ -9,7 +9,7 @@ KAFKA_CLIENT_CERT = ENV.fetch("KAFKA_CLIENT_CERT")
9
9
  KAFKA_CLIENT_CERT_KEY = ENV.fetch("KAFKA_CLIENT_CERT_KEY")
10
10
  KAFKA_SERVER_CERT = ENV.fetch("KAFKA_SERVER_CERT")
11
11
  KAFKA_URL = ENV.fetch("KAFKA_URL")
12
- KAFKA_BROKERS = KAFKA_URL.gsub("kafka+ssl://", "").split(",")
12
+ KAFKA_BROKERS = KAFKA_URL
13
13
  KAFKA_TOPIC = "test-messages"
14
14
 
15
15
  NUM_THREADS = 20
@@ -14,7 +14,7 @@ require "kafka"
14
14
  # with e.g. `$stderr` if you want to see what's happening under the hood.
15
15
  logger = Logger.new(StringIO.new)
16
16
 
17
- brokers = ENV.fetch("KAFKA_BROKERS").split(",")
17
+ brokers = ENV.fetch("KAFKA_BROKERS")
18
18
 
19
19
  # Make sure to create this topic in your Kafka cluster or configure the
20
20
  # cluster to auto-create topics.
@@ -11,7 +11,7 @@ $LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
11
11
  require "kafka"
12
12
 
13
13
  logger = Logger.new($stderr)
14
- brokers = ENV.fetch("KAFKA_BROKERS").split(",")
14
+ brokers = ENV.fetch("KAFKA_BROKERS")
15
15
 
16
16
  # Make sure to create this topic in your Kafka cluster or configure the
17
17
  # cluster to auto-create topics.
@@ -5,7 +5,7 @@ $LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
5
5
  require "kafka"
6
6
 
7
7
  logger = Logger.new($stderr)
8
- brokers = ENV.fetch("KAFKA_BROKERS").split(",")
8
+ brokers = ENV.fetch("KAFKA_BROKERS")
9
9
 
10
10
  # Make sure to create this topic in your Kafka cluster or configure the
11
11
  # cluster to auto-create topics.
@@ -15,8 +15,10 @@ module Kafka
15
15
 
16
16
  # Initializes a new Kafka client.
17
17
  #
18
- # @param seed_brokers [Array<String>] the list of brokers used to initialize
19
- # the client.
18
+ # @param seed_brokers [Array<String>, String] the list of brokers used to initialize
19
+ # the client. Either an Array of connections, or a comma separated string of connections.
20
+ # Connections can either be a string of "port:protocol" or a full URI with a scheme.
21
+ # If there's a scheme it's ignored and only host/port are used.
20
22
  #
21
23
  # @param client_id [String] the identifier for this application.
22
24
  #
@@ -59,7 +61,7 @@ module Kafka
59
61
  )
60
62
 
61
63
  @cluster = Cluster.new(
62
- seed_brokers: seed_brokers,
64
+ seed_brokers: normalize_seed_brokers(seed_brokers),
63
65
  broker_pool: broker_pool,
64
66
  logger: @logger,
65
67
  )
@@ -307,5 +309,21 @@ module Kafka
307
309
 
308
310
  ssl_context
309
311
  end
312
+
313
+ def normalize_seed_brokers(seed_brokers)
314
+ if seed_brokers.is_a?(String)
315
+ seed_brokers = seed_brokers.split(",")
316
+ end
317
+ brokers = []
318
+ seed_brokers.each do |connection|
319
+ if connection =~ /:\/\//
320
+ u = URI.parse(connection)
321
+ brokers << "#{u.host}:#{u.port}"
322
+ else
323
+ brokers << connection
324
+ end
325
+ end
326
+ brokers
327
+ end
310
328
  end
311
329
  end
@@ -1,8 +1,5 @@
1
1
  module Kafka
2
2
 
3
- # A pending message queue holds messages that have not yet been assigned to
4
- # a partition. It's designed to only remove messages once they've been
5
- # successfully handled.
6
3
  class PendingMessageQueue
7
4
  attr_reader :size, :bytesize
8
5
 
@@ -26,22 +23,17 @@ module Kafka
26
23
  @bytesize = 0
27
24
  end
28
25
 
29
- # Yields each message in the queue to the provided block, removing the
30
- # message after the block has processed it. If the block raises an
31
- # exception, the message will be retained in the queue.
26
+ def replace(messages)
27
+ clear
28
+ messages.each {|message| write(message) }
29
+ end
30
+
31
+ # Yields each message in the queue.
32
32
  #
33
33
  # @yieldparam [PendingMessage] message
34
34
  # @return [nil]
35
- def dequeue_each(&block)
36
- until @messages.empty?
37
- message = @messages.first
38
-
39
- yield message
40
-
41
- @size -= 1
42
- @bytesize -= message.bytesize
43
- @messages.shift
44
- end
35
+ def each(&block)
36
+ @messages.each(&block)
45
37
  end
46
38
  end
47
39
  end
@@ -65,9 +65,8 @@ module Kafka
65
65
  rescue Kafka::Error => e
66
66
  @logger.error "Could not connect to leader for partition #{topic}/#{partition}: #{e.message}"
67
67
 
68
- @instrumenter.instrument("partition_error.producer", {
68
+ @instrumenter.instrument("topic_error.producer", {
69
69
  topic: topic,
70
- partition: partition,
71
70
  exception: [e.class.to_s, e.message],
72
71
  })
73
72
 
@@ -118,9 +117,8 @@ module Kafka
118
117
  begin
119
118
  Protocol.handle_error(partition_info.error_code)
120
119
  rescue ProtocolError => e
121
- @instrumenter.instrument("partition_error.producer", {
120
+ @instrumenter.instrument("topic_error.producer", {
122
121
  topic: topic,
123
- partition: partition,
124
122
  exception: [e.class.to_s, e.message],
125
123
  })
126
124
 
@@ -327,25 +327,40 @@ module Kafka
327
327
  end
328
328
 
329
329
  def assign_partitions!
330
- @pending_message_queue.dequeue_each do |message|
330
+ failed_messages = []
331
+
332
+ @pending_message_queue.each do |message|
331
333
  partition = message.partition
332
334
 
333
- if partition.nil?
334
- partition_count = @cluster.partitions_for(message.topic).count
335
- partition = Partitioner.partition_for_key(partition_count, message)
335
+ begin
336
+ if partition.nil?
337
+ partition_count = @cluster.partitions_for(message.topic).count
338
+ partition = Partitioner.partition_for_key(partition_count, message)
339
+ end
340
+
341
+ @buffer.write(
342
+ value: message.value,
343
+ key: message.key,
344
+ topic: message.topic,
345
+ partition: partition,
346
+ create_time: message.create_time,
347
+ )
348
+ rescue Kafka::Error => e
349
+ @instrumenter.instrument("topic_error.producer", {
350
+ topic: message.topic,
351
+ exception: [e.class.to_s, e.message],
352
+ })
353
+
354
+ failed_messages << message
336
355
  end
356
+ end
337
357
 
338
- @buffer.write(
339
- value: message.value,
340
- key: message.key,
341
- topic: message.topic,
342
- partition: partition,
343
- create_time: message.create_time,
344
- )
358
+ if failed_messages.any?
359
+ @logger.error "Failed to assign partitions to #{failed_messages.count} messages"
360
+ @cluster.mark_as_stale!
345
361
  end
346
- rescue Kafka::Error => e
347
- @logger.error "Failed to assign pending message to a partition: #{e}"
348
- @cluster.mark_as_stale!
362
+
363
+ @pending_message_queue.replace(failed_messages)
349
364
  end
350
365
  end
351
366
  end
@@ -1,3 +1,3 @@
1
1
  module Kafka
2
- VERSION = "0.3.3"
2
+ VERSION = "0.3.4"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.3
4
+ version: 0.3.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Schierbeck