ruby-kafka 0.4.4 → 0.5.0.beta2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -2
- data/circle.yml +1 -1
- data/lib/kafka/consumer.rb +0 -8
- data/lib/kafka/fetch_operation.rb +1 -0
- data/lib/kafka/fetched_message.rb +5 -1
- data/lib/kafka/protocol/fetch_request.rb +4 -0
- data/lib/kafka/protocol/fetch_response.rb +5 -2
- data/lib/kafka/protocol/message.rb +6 -3
- data/lib/kafka/protocol/produce_request.rb +4 -0
- data/lib/kafka/protocol/produce_response.rb +10 -5
- data/lib/kafka/statsd.rb +0 -3
- data/lib/kafka/version.rb +1 -1
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d91d5937445f7d3e85fd36801ac52d91f19fb192
|
4
|
+
data.tar.gz: b7baea6a96bd70ba6ec9ff4ad42e206c22e53c7e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9e0dac8db89b3002a49538f0daf34c7df3c821a22eae2d1615d4bbc97c25f2b4312a0f7cab1c0682a96079213ee53e85e64784a01096d2ca252c9726635d9236
|
7
|
+
data.tar.gz: ef97106a320e4c933189a935f42f8d383dc0619176cee8df5f8b30c060adee157de9b3293e522795f2f276fd063140f7e2284db4e1ee3343c6104eb0773b0468
|
data/CHANGELOG.md
CHANGED
@@ -4,13 +4,16 @@ Changes and additions to the library will be listed here.
|
|
4
4
|
|
5
5
|
## Unreleased
|
6
6
|
|
7
|
-
## v0.
|
7
|
+
## v0.5.0
|
8
8
|
|
9
|
-
-
|
9
|
+
- Drops support for Kafka 0.9 in favor of Kafka 0.10 (#381)!
|
10
|
+
- Handle cases where there are no partitions to fetch from by sleeping a bit (#439).
|
11
|
+
- Handle problems with the broker cache (#440).
|
10
12
|
|
11
13
|
## v0.4.3
|
12
14
|
|
13
15
|
- Restart the async producer thread automatically after errors.
|
16
|
+
- Include the offset lag in batch consumer metrics (Statsd).
|
14
17
|
- Make the default `max_wait_time` more sane.
|
15
18
|
- Fix issue with cached default offset lookups (#431).
|
16
19
|
- Upgrade to Datadog client version 3.
|
data/circle.yml
CHANGED
data/lib/kafka/consumer.rb
CHANGED
@@ -216,8 +216,6 @@ module Kafka
|
|
216
216
|
|
217
217
|
# We may not have received any messages, but it's still a good idea to
|
218
218
|
# commit offsets if we've processed messages in the last set of batches.
|
219
|
-
# This also ensures the offsets are retained if we haven't read any messages
|
220
|
-
# since the offset retention period has elapsed.
|
221
219
|
@offset_manager.commit_offsets_if_necessary
|
222
220
|
end
|
223
221
|
end
|
@@ -281,12 +279,6 @@ module Kafka
|
|
281
279
|
|
282
280
|
return if !@running
|
283
281
|
end
|
284
|
-
|
285
|
-
# We may not have received any messages, but it's still a good idea to
|
286
|
-
# commit offsets if we've processed messages in the last set of batches.
|
287
|
-
# This also ensures the offsets are retained if we haven't read any messages
|
288
|
-
# since the offset retention period has elapsed.
|
289
|
-
@offset_manager.commit_offsets_if_necessary
|
290
282
|
end
|
291
283
|
end
|
292
284
|
|
@@ -16,12 +16,16 @@ module Kafka
|
|
16
16
|
# @return [Integer] the offset of the message in the partition.
|
17
17
|
attr_reader :offset
|
18
18
|
|
19
|
-
|
19
|
+
# @return [Time] the timestamp of the message.
|
20
|
+
attr_reader :create_time
|
21
|
+
|
22
|
+
def initialize(value:, key:, topic:, partition:, offset:, create_time:)
|
20
23
|
@value = value
|
21
24
|
@key = key
|
22
25
|
@topic = topic
|
23
26
|
@partition = partition
|
24
27
|
@offset = offset
|
28
|
+
@create_time = create_time
|
25
29
|
end
|
26
30
|
end
|
27
31
|
end
|
@@ -38,11 +38,14 @@ module Kafka
|
|
38
38
|
|
39
39
|
attr_reader :topics
|
40
40
|
|
41
|
-
def initialize(topics: [])
|
41
|
+
def initialize(topics: [], throttle_time_ms: 0)
|
42
42
|
@topics = topics
|
43
|
+
@throttle_time_ms = throttle_time_ms
|
43
44
|
end
|
44
45
|
|
45
46
|
def self.decode(decoder)
|
47
|
+
throttle_time_ms = decoder.int32
|
48
|
+
|
46
49
|
topics = decoder.array do
|
47
50
|
topic_name = decoder.string
|
48
51
|
|
@@ -68,7 +71,7 @@ module Kafka
|
|
68
71
|
)
|
69
72
|
end
|
70
73
|
|
71
|
-
new(topics: topics)
|
74
|
+
new(topics: topics, throttle_time_ms: throttle_time_ms)
|
72
75
|
end
|
73
76
|
end
|
74
77
|
end
|
@@ -6,15 +6,16 @@ module Kafka
|
|
6
6
|
|
7
7
|
# ## API Specification
|
8
8
|
#
|
9
|
-
# Message => Crc MagicByte Attributes Key Value
|
9
|
+
# Message => Crc MagicByte Attributes Timestamp Key Value
|
10
10
|
# Crc => int32
|
11
11
|
# MagicByte => int8
|
12
12
|
# Attributes => int8
|
13
|
+
# Timestamp => int64, in ms
|
13
14
|
# Key => bytes
|
14
15
|
# Value => bytes
|
15
16
|
#
|
16
17
|
class Message
|
17
|
-
MAGIC_BYTE =
|
18
|
+
MAGIC_BYTE = 1
|
18
19
|
|
19
20
|
attr_reader :key, :value, :codec_id, :offset
|
20
21
|
|
@@ -71,6 +72,7 @@ module Kafka
|
|
71
72
|
end
|
72
73
|
|
73
74
|
attributes = message_decoder.int8
|
75
|
+
timestamp = message_decoder.int64
|
74
76
|
key = message_decoder.bytes
|
75
77
|
value = message_decoder.bytes
|
76
78
|
|
@@ -78,7 +80,7 @@ module Kafka
|
|
78
80
|
# attributes.
|
79
81
|
codec_id = attributes & 0b111
|
80
82
|
|
81
|
-
new(key: key, value: value, codec_id: codec_id, offset: offset)
|
83
|
+
new(key: key, value: value, codec_id: codec_id, offset: offset, create_time: Time.at(timestamp/1000.0))
|
82
84
|
end
|
83
85
|
|
84
86
|
private
|
@@ -102,6 +104,7 @@ module Kafka
|
|
102
104
|
|
103
105
|
encoder.write_int8(MAGIC_BYTE)
|
104
106
|
encoder.write_int8(@codec_id)
|
107
|
+
encoder.write_int64((@create_time.to_f*1000).to_i)
|
105
108
|
encoder.write_bytes(@key)
|
106
109
|
encoder.write_bytes(@value)
|
107
110
|
|
@@ -11,19 +11,21 @@ module Kafka
|
|
11
11
|
end
|
12
12
|
|
13
13
|
class PartitionInfo
|
14
|
-
attr_reader :partition, :error_code, :offset
|
14
|
+
attr_reader :partition, :error_code, :offset, :timestamp
|
15
15
|
|
16
|
-
def initialize(partition:, error_code:, offset:)
|
16
|
+
def initialize(partition:, error_code:, offset:, timestamp:)
|
17
17
|
@partition = partition
|
18
18
|
@error_code = error_code
|
19
19
|
@offset = offset
|
20
|
+
@timestamp = timestamp
|
20
21
|
end
|
21
22
|
end
|
22
23
|
|
23
|
-
attr_reader :topics
|
24
|
+
attr_reader :topics, :throttle_time_ms
|
24
25
|
|
25
|
-
def initialize(topics: [])
|
26
|
+
def initialize(topics: [], throttle_time_ms: 0)
|
26
27
|
@topics = topics
|
28
|
+
@throttle_time_ms = throttle_time_ms
|
27
29
|
end
|
28
30
|
|
29
31
|
def each_partition
|
@@ -43,13 +45,16 @@ module Kafka
|
|
43
45
|
partition: decoder.int32,
|
44
46
|
error_code: decoder.int16,
|
45
47
|
offset: decoder.int64,
|
48
|
+
timestamp: Time.at(decoder.int64/1000.0),
|
46
49
|
)
|
47
50
|
end
|
48
51
|
|
49
52
|
TopicInfo.new(topic: topic, partitions: partitions)
|
50
53
|
end
|
51
54
|
|
52
|
-
|
55
|
+
throttle_time_ms = decoder.int32
|
56
|
+
|
57
|
+
new(topics: topics, throttle_time_ms: throttle_time_ms)
|
53
58
|
end
|
54
59
|
end
|
55
60
|
end
|
data/lib/kafka/statsd.rb
CHANGED
@@ -97,7 +97,6 @@ module Kafka
|
|
97
97
|
end
|
98
98
|
|
99
99
|
def process_batch(event)
|
100
|
-
lag = event.payload.fetch(:offset_lag)
|
101
100
|
messages = event.payload.fetch(:message_count)
|
102
101
|
client = event.payload.fetch(:client_id)
|
103
102
|
group_id = event.payload.fetch(:group_id)
|
@@ -110,8 +109,6 @@ module Kafka
|
|
110
109
|
timing("consumer.#{client}.#{group_id}.#{topic}.#{partition}.process_batch.latency", event.duration)
|
111
110
|
count("consumer.#{client}.#{group_id}.#{topic}.#{partition}.messages", messages)
|
112
111
|
end
|
113
|
-
|
114
|
-
gauge("consumer.#{client}.#{group_id}.#{topic}.#{partition}.lag", lag)
|
115
112
|
end
|
116
113
|
|
117
114
|
def join_group(event)
|
data/lib/kafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.5.0.beta2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-
|
11
|
+
date: 2017-10-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -376,12 +376,12 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
376
376
|
version: 2.1.0
|
377
377
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
378
378
|
requirements:
|
379
|
-
- - "
|
379
|
+
- - ">"
|
380
380
|
- !ruby/object:Gem::Version
|
381
|
-
version:
|
381
|
+
version: 1.3.1
|
382
382
|
requirements: []
|
383
383
|
rubyforge_project:
|
384
|
-
rubygems_version: 2.6.
|
384
|
+
rubygems_version: 2.6.11
|
385
385
|
signing_key:
|
386
386
|
specification_version: 4
|
387
387
|
summary: A client library for the Kafka distributed commit log.
|