ruby-kafka 0.4.4 → 0.5.0.beta2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 3f09ed3cd80d943c7aa595e43c3de6fd1bd0f883
4
- data.tar.gz: 5906a940b05edf1ee429c184e03d4f2490b2454a
3
+ metadata.gz: d91d5937445f7d3e85fd36801ac52d91f19fb192
4
+ data.tar.gz: b7baea6a96bd70ba6ec9ff4ad42e206c22e53c7e
5
5
  SHA512:
6
- metadata.gz: e352a9d47347f413ffafe9e89fca1369a6aa2de17da5b65de2adf3ba659784aea6939b127411626f7889941e084d57d4329b047e5aba26f83d31a5f312b3caaf
7
- data.tar.gz: c1e97414fa1a314ec8e6f9f63fe992544d50c3dd6c0650ac1c2f76ba5866a0c3ed3457f38d71021f9669e6dcc904498a2f9c6ee63542234e94fd5ff000ff4c27
6
+ metadata.gz: 9e0dac8db89b3002a49538f0daf34c7df3c821a22eae2d1615d4bbc97c25f2b4312a0f7cab1c0682a96079213ee53e85e64784a01096d2ca252c9726635d9236
7
+ data.tar.gz: ef97106a320e4c933189a935f42f8d383dc0619176cee8df5f8b30c060adee157de9b3293e522795f2f276fd063140f7e2284db4e1ee3343c6104eb0773b0468
data/CHANGELOG.md CHANGED
@@ -4,13 +4,16 @@ Changes and additions to the library will be listed here.
4
4
 
5
5
  ## Unreleased
6
6
 
7
- ## v0.4.4
7
+ ## v0.5.0
8
8
 
9
- - Include the offset lag in batch consumer metrics (Statsd).
9
+ - Drops support for Kafka 0.9 in favor of Kafka 0.10 (#381)!
10
+ - Handle cases where there are no partitions to fetch from by sleeping a bit (#439).
11
+ - Handle problems with the broker cache (#440).
10
12
 
11
13
  ## v0.4.3
12
14
 
13
15
  - Restart the async producer thread automatically after errors.
16
+ - Include the offset lag in batch consumer metrics (Statsd).
14
17
  - Make the default `max_wait_time` more sane.
15
18
  - Fix issue with cached default offset lookups (#431).
16
19
  - Upgrade to Datadog client version 3.
data/circle.yml CHANGED
@@ -11,7 +11,7 @@ machine:
11
11
  dependencies:
12
12
  pre:
13
13
  - docker -v
14
- - docker pull ches/kafka:0.9.0.1
14
+ - docker pull ches/kafka:0.10.0.0
15
15
  - docker pull jplock/zookeeper:3.4.6
16
16
 
17
17
  test:
@@ -216,8 +216,6 @@ module Kafka
216
216
 
217
217
  # We may not have received any messages, but it's still a good idea to
218
218
  # commit offsets if we've processed messages in the last set of batches.
219
- # This also ensures the offsets are retained if we haven't read any messages
220
- # since the offset retention period has elapsed.
221
219
  @offset_manager.commit_offsets_if_necessary
222
220
  end
223
221
  end
@@ -281,12 +279,6 @@ module Kafka
281
279
 
282
280
  return if !@running
283
281
  end
284
-
285
- # We may not have received any messages, but it's still a good idea to
286
- # commit offsets if we've processed messages in the last set of batches.
287
- # This also ensures the offsets are retained if we haven't read any messages
288
- # since the offset retention period has elapsed.
289
- @offset_manager.commit_offsets_if_necessary
290
282
  end
291
283
  end
292
284
 
@@ -94,6 +94,7 @@ module Kafka
94
94
  topic: fetched_topic.name,
95
95
  partition: fetched_partition.partition,
96
96
  offset: message.offset,
97
+ create_time: message.create_time,
97
98
  )
98
99
  }
99
100
 
@@ -16,12 +16,16 @@ module Kafka
16
16
  # @return [Integer] the offset of the message in the partition.
17
17
  attr_reader :offset
18
18
 
19
- def initialize(value:, key:, topic:, partition:, offset:)
19
+ # @return [Time] the timestamp of the message.
20
+ attr_reader :create_time
21
+
22
+ def initialize(value:, key:, topic:, partition:, offset:, create_time:)
20
23
  @value = value
21
24
  @key = key
22
25
  @topic = topic
23
26
  @partition = partition
24
27
  @offset = offset
28
+ @create_time = create_time
25
29
  end
26
30
  end
27
31
  end
@@ -30,6 +30,10 @@ module Kafka
30
30
  FETCH_API
31
31
  end
32
32
 
33
+ def api_version
34
+ 2
35
+ end
36
+
33
37
  def response_class
34
38
  Protocol::FetchResponse
35
39
  end
@@ -38,11 +38,14 @@ module Kafka
38
38
 
39
39
  attr_reader :topics
40
40
 
41
- def initialize(topics: [])
41
+ def initialize(topics: [], throttle_time_ms: 0)
42
42
  @topics = topics
43
+ @throttle_time_ms = throttle_time_ms
43
44
  end
44
45
 
45
46
  def self.decode(decoder)
47
+ throttle_time_ms = decoder.int32
48
+
46
49
  topics = decoder.array do
47
50
  topic_name = decoder.string
48
51
 
@@ -68,7 +71,7 @@ module Kafka
68
71
  )
69
72
  end
70
73
 
71
- new(topics: topics)
74
+ new(topics: topics, throttle_time_ms: throttle_time_ms)
72
75
  end
73
76
  end
74
77
  end
@@ -6,15 +6,16 @@ module Kafka
6
6
 
7
7
  # ## API Specification
8
8
  #
9
- # Message => Crc MagicByte Attributes Key Value
9
+ # Message => Crc MagicByte Attributes Timestamp Key Value
10
10
  # Crc => int32
11
11
  # MagicByte => int8
12
12
  # Attributes => int8
13
+ # Timestamp => int64, in ms
13
14
  # Key => bytes
14
15
  # Value => bytes
15
16
  #
16
17
  class Message
17
- MAGIC_BYTE = 0
18
+ MAGIC_BYTE = 1
18
19
 
19
20
  attr_reader :key, :value, :codec_id, :offset
20
21
 
@@ -71,6 +72,7 @@ module Kafka
71
72
  end
72
73
 
73
74
  attributes = message_decoder.int8
75
+ timestamp = message_decoder.int64
74
76
  key = message_decoder.bytes
75
77
  value = message_decoder.bytes
76
78
 
@@ -78,7 +80,7 @@ module Kafka
78
80
  # attributes.
79
81
  codec_id = attributes & 0b111
80
82
 
81
- new(key: key, value: value, codec_id: codec_id, offset: offset)
83
+ new(key: key, value: value, codec_id: codec_id, offset: offset, create_time: Time.at(timestamp/1000.0))
82
84
  end
83
85
 
84
86
  private
@@ -102,6 +104,7 @@ module Kafka
102
104
 
103
105
  encoder.write_int8(MAGIC_BYTE)
104
106
  encoder.write_int8(@codec_id)
107
+ encoder.write_int64((@create_time.to_f*1000).to_i)
105
108
  encoder.write_bytes(@key)
106
109
  encoder.write_bytes(@value)
107
110
 
@@ -40,6 +40,10 @@ module Kafka
40
40
  PRODUCE_API
41
41
  end
42
42
 
43
+ def api_version
44
+ 2
45
+ end
46
+
43
47
  def response_class
44
48
  requires_acks? ? Protocol::ProduceResponse : nil
45
49
  end
@@ -11,19 +11,21 @@ module Kafka
11
11
  end
12
12
 
13
13
  class PartitionInfo
14
- attr_reader :partition, :error_code, :offset
14
+ attr_reader :partition, :error_code, :offset, :timestamp
15
15
 
16
- def initialize(partition:, error_code:, offset:)
16
+ def initialize(partition:, error_code:, offset:, timestamp:)
17
17
  @partition = partition
18
18
  @error_code = error_code
19
19
  @offset = offset
20
+ @timestamp = timestamp
20
21
  end
21
22
  end
22
23
 
23
- attr_reader :topics
24
+ attr_reader :topics, :throttle_time_ms
24
25
 
25
- def initialize(topics: [])
26
+ def initialize(topics: [], throttle_time_ms: 0)
26
27
  @topics = topics
28
+ @throttle_time_ms = throttle_time_ms
27
29
  end
28
30
 
29
31
  def each_partition
@@ -43,13 +45,16 @@ module Kafka
43
45
  partition: decoder.int32,
44
46
  error_code: decoder.int16,
45
47
  offset: decoder.int64,
48
+ timestamp: Time.at(decoder.int64/1000.0),
46
49
  )
47
50
  end
48
51
 
49
52
  TopicInfo.new(topic: topic, partitions: partitions)
50
53
  end
51
54
 
52
- new(topics: topics)
55
+ throttle_time_ms = decoder.int32
56
+
57
+ new(topics: topics, throttle_time_ms: throttle_time_ms)
53
58
  end
54
59
  end
55
60
  end
data/lib/kafka/statsd.rb CHANGED
@@ -97,7 +97,6 @@ module Kafka
97
97
  end
98
98
 
99
99
  def process_batch(event)
100
- lag = event.payload.fetch(:offset_lag)
101
100
  messages = event.payload.fetch(:message_count)
102
101
  client = event.payload.fetch(:client_id)
103
102
  group_id = event.payload.fetch(:group_id)
@@ -110,8 +109,6 @@ module Kafka
110
109
  timing("consumer.#{client}.#{group_id}.#{topic}.#{partition}.process_batch.latency", event.duration)
111
110
  count("consumer.#{client}.#{group_id}.#{topic}.#{partition}.messages", messages)
112
111
  end
113
-
114
- gauge("consumer.#{client}.#{group_id}.#{topic}.#{partition}.lag", lag)
115
112
  end
116
113
 
117
114
  def join_group(event)
data/lib/kafka/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Kafka
2
- VERSION = "0.4.4"
2
+ VERSION = "0.5.0.beta2"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.4
4
+ version: 0.5.0.beta2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Schierbeck
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2017-11-06 00:00:00.000000000 Z
11
+ date: 2017-10-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -376,12 +376,12 @@ required_ruby_version: !ruby/object:Gem::Requirement
376
376
  version: 2.1.0
377
377
  required_rubygems_version: !ruby/object:Gem::Requirement
378
378
  requirements:
379
- - - ">="
379
+ - - ">"
380
380
  - !ruby/object:Gem::Version
381
- version: '0'
381
+ version: 1.3.1
382
382
  requirements: []
383
383
  rubyforge_project:
384
- rubygems_version: 2.6.13
384
+ rubygems_version: 2.6.11
385
385
  signing_key:
386
386
  specification_version: 4
387
387
  summary: A client library for the Kafka distributed commit log.