ruby-kafka 0.7.0.alpha4 → 0.7.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 93553029514a22802156abc20e70cf63263f2ddc6bfe4312e11bbb629eefd017
4
- data.tar.gz: 14c14261e50408e8ca0b0c5f14cac94939f6756bc8c5499fd2af85280218dc80
3
+ metadata.gz: 1662d1f7534da36a4a6f5ade9e194fd5142034ba4b53d1b6291fcf233e7eb684
4
+ data.tar.gz: 36b9ecb999f55cfd797c23da795c214c7b9712b42a88e6343024e0e17a123eac
5
5
  SHA512:
6
- metadata.gz: 264f0d854be944a050f7392f43c231a3499d786b5d27bf772a6a2c7b05ed7d496f8c0855e661e4e263667abe9b2d113740d92d842310052e53797588a0375034
7
- data.tar.gz: ea31b08620b1451ac663fd16eeab8816948a3a421218282ece5c2b94417931c926ffdcbafb0436e414a3136c9ecab0442a3b477c3787d106f3380c9e18f2fcb0
6
+ metadata.gz: 8b22b391b89199f8001401069f5040bac248866f82b8bd1f69ecf3d67d947ef7f54f6d933a68a9114c2a28f54097be4cedb3c74743bae1b231f313a4427ad257
7
+ data.tar.gz: '0318b5a5cd085ea2c96e7e1bf3c91d914c3898d555b6bdf5681851907e0dec4e2f3ab39c95bf8c289558c41d0c901b243ea7e240e2271235769a2378069e1bbc'
data/CHANGELOG.md CHANGED
@@ -5,6 +5,20 @@ Changes and additions to the library will be listed here.
5
5
  ## Unreleased
6
6
 
7
7
  - Drop support for Kafka 0.10 in favor of native support for Kafka 0.11.
8
+ - Support record headers (#604).
9
+ - Add instrumenter and logger when async message delivery fails (#603).
10
+
11
+ ## 0.6.7
12
+
13
+ - Handle case where consumer doesn't know about the topic (#597 + 0e302cbd0f31315bf81c1d1645520413ad6b58f0)
14
+
15
+ ## v0.6.5
16
+
17
+ - Fix bug related to partition assignment.
18
+
19
+ ## v0.6.4
20
+
21
+ - Fix bug that caused consumers to jump back and reprocess messages (#595).
8
22
 
9
23
  ## v0.6.3
10
24
 
@@ -241,8 +241,10 @@ module Kafka
241
241
 
242
242
  def deliver_messages
243
243
  @producer.deliver_messages
244
- rescue DeliveryFailed, ConnectionError
245
- # Failed to deliver messages -- nothing to do but try again later.
244
+ rescue DeliveryFailed, ConnectionError => e
245
+ # Failed to deliver messages -- nothing to do but log and try again later.
246
+ @logger.error("Failed to asynchronously deliver messages: #{e.message}")
247
+ @instrumenter.instrument("error.async_producer", { error: e })
246
248
  end
247
249
 
248
250
  def threshold_reached?
data/lib/kafka/client.rb CHANGED
@@ -109,6 +109,7 @@ module Kafka
109
109
  #
110
110
  # @param value [String, nil] the message value.
111
111
  # @param key [String, nil] the message key.
112
+ # @param headers [Hash<String, String>] the headers for the message.
112
113
  # @param topic [String] the topic that the message should be written to.
113
114
  # @param partition [Integer, nil] the partition that the message should be written
114
115
  # to, or `nil` if either `partition_key` is passed or the partition should be
@@ -118,16 +119,17 @@ module Kafka
118
119
  # @param retries [Integer] the number of times to retry the delivery before giving
119
120
  # up.
120
121
  # @return [nil]
121
- def deliver_message(value, key: nil, topic:, partition: nil, partition_key: nil, retries: 1)
122
+ def deliver_message(value, key: nil, headers: {}, topic:, partition: nil, partition_key: nil, retries: 1)
122
123
  create_time = Time.now
123
124
 
124
125
  message = PendingMessage.new(
125
- value,
126
- key,
127
- topic,
128
- partition,
129
- partition_key,
130
- create_time,
126
+ value: value,
127
+ key: key,
128
+ headers: headers,
129
+ topic: topic,
130
+ partition: partition,
131
+ partition_key: partition_key,
132
+ create_time: create_time
131
133
  )
132
134
 
133
135
  if partition.nil?
@@ -140,6 +142,7 @@ module Kafka
140
142
  buffer.write(
141
143
  value: message.value,
142
144
  key: message.key,
145
+ headers: message.headers,
143
146
  topic: message.topic,
144
147
  partition: partition,
145
148
  create_time: message.create_time,
@@ -37,6 +37,7 @@ module Kafka
37
37
  # puts message.topic
38
38
  # puts message.partition
39
39
  # puts message.key
40
+ # puts message.headers
40
41
  # puts message.value
41
42
  # puts message.offset
42
43
  # end
@@ -213,6 +214,7 @@ module Kafka
213
214
  create_time: message.create_time,
214
215
  key: message.key,
215
216
  value: message.value,
217
+ headers: message.headers
216
218
  }
217
219
 
218
220
  # Instrument an event immediately so that subscribers don't have to wait until
@@ -34,6 +34,11 @@ module Kafka
34
34
  @message.create_time
35
35
  end
36
36
 
37
+ # @return [Hash<String, String>] the headers of the message.
38
+ def headers
39
+ @message.headers
40
+ end
41
+
37
42
  # @return [Boolean] whether this record is a control record
38
43
  def is_control_record
39
44
  @message.is_control_record
@@ -16,8 +16,8 @@ module Kafka
16
16
  @bytesize = 0
17
17
  end
18
18
 
19
- def write(value:, key:, topic:, partition:, create_time: Time.now)
20
- message = Protocol::Record.new(key: key, value: value, create_time: create_time)
19
+ def write(value:, key:, topic:, partition:, create_time: Time.now, headers: {})
20
+ message = Protocol::Record.new(key: key, value: value, create_time: create_time, headers: headers)
21
21
 
22
22
  buffer_for(topic, partition) << message
23
23
 
@@ -2,11 +2,12 @@
2
2
 
3
3
  module Kafka
4
4
  class PendingMessage
5
- attr_reader :value, :key, :topic, :partition, :partition_key, :create_time, :bytesize
5
+ attr_reader :value, :key, :headers, :topic, :partition, :partition_key, :create_time, :bytesize
6
6
 
7
- def initialize(value, key, topic, partition, partition_key, create_time)
7
+ def initialize(value:, key:, headers: {}, topic:, partition:, partition_key:, create_time:)
8
8
  @value = value
9
9
  @key = key
10
+ @headers = headers
10
11
  @topic = topic
11
12
  @partition = partition
12
13
  @partition_key = partition_key
@@ -18,6 +19,7 @@ module Kafka
18
19
  @value == other.value &&
19
20
  @key == other.key &&
20
21
  @topic == other.topic &&
22
+ @headers == other.headers &&
21
23
  @partition == other.partition &&
22
24
  @partition_key == other.partition_key &&
23
25
  @create_time == other.create_time &&
@@ -172,6 +172,7 @@ module Kafka
172
172
  #
173
173
  # @param value [String] the message data.
174
174
  # @param key [String] the message key.
175
+ # @param headers [Hash<String, String>] the headers for the message.
175
176
  # @param topic [String] the topic that the message should be written to.
176
177
  # @param partition [Integer] the partition that the message should be written to.
177
178
  # @param partition_key [String] the key that should be used to assign a partition.
@@ -179,14 +180,15 @@ module Kafka
179
180
  #
180
181
  # @raise [BufferOverflow] if the maximum buffer size has been reached.
181
182
  # @return [nil]
182
- def produce(value, key: nil, topic:, partition: nil, partition_key: nil, create_time: Time.now)
183
+ def produce(value, key: nil, headers: {}, topic:, partition: nil, partition_key: nil, create_time: Time.now)
183
184
  message = PendingMessage.new(
184
- value && value.to_s,
185
- key && key.to_s,
186
- topic.to_s,
187
- partition && Integer(partition),
188
- partition_key && partition_key.to_s,
189
- create_time,
185
+ value: value && value.to_s,
186
+ key: key && key.to_s,
187
+ headers: headers,
188
+ topic: topic.to_s,
189
+ partition: partition && Integer(partition),
190
+ partition_key: partition_key && partition_key.to_s,
191
+ create_time: create_time
190
192
  )
191
193
 
192
194
  if buffer_size >= @max_buffer_size
@@ -354,6 +356,7 @@ module Kafka
354
356
  @buffer.write(
355
357
  value: message.value,
356
358
  key: message.key,
359
+ headers: message.headers,
357
360
  topic: message.topic,
358
361
  partition: partition,
359
362
  create_time: message.create_time,
@@ -390,12 +393,13 @@ module Kafka
390
393
  @buffer.each do |topic, partition, messages_for_partition|
391
394
  messages_for_partition.each do |message|
392
395
  messages << PendingMessage.new(
393
- message.value,
394
- message.key,
395
- topic,
396
- partition,
397
- nil,
398
- message.create_time
396
+ value: message.value,
397
+ key: message.key,
398
+ headers: message.headers,
399
+ topic: topic,
400
+ partition: partition,
401
+ partition_key: nil,
402
+ create_time: message.create_time
399
403
  )
400
404
  end
401
405
  end
@@ -40,8 +40,8 @@ module Kafka
40
40
  record_encoder.write_varint_bytes(@value)
41
41
 
42
42
  record_encoder.write_varint_array(@headers.to_a) do |header_key, header_value|
43
- record_encoder.write_varint_string(header_key)
44
- record_encoder.write_varint_bytes(header_value)
43
+ record_encoder.write_varint_string(header_key.to_s)
44
+ record_encoder.write_varint_bytes(header_value.to_s)
45
45
  end
46
46
 
47
47
  encoder.write_varint_bytes(record_buffer.string)
data/lib/kafka/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Kafka
4
- VERSION = "0.7.0.alpha4"
4
+ VERSION = "0.7.0.beta1"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.0.alpha4
4
+ version: 0.7.0.beta1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Schierbeck
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2018-06-12 00:00:00.000000000 Z
11
+ date: 2018-06-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: digest-crc