ruby-kafka 0.5.1.beta1 → 0.5.1.beta2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +1 -0
- data/lib/kafka/connection.rb +2 -2
- data/lib/kafka/consumer.rb +1 -0
- data/lib/kafka/datadog.rb +9 -2
- data/lib/kafka/offset_manager.rb +3 -1
- data/lib/kafka/statsd.rb +10 -2
- data/lib/kafka/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: aee148ccfa605884f691d00ae8c82432a6d6bca1
|
4
|
+
data.tar.gz: 86f831463bc276e54a72d67e64aff5847ea41ec7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d28036dc879a07b3580db2cece96241b5002dca3bff5e7de109cbe699e15e893a4e1ddd13283ead236a5eca9e9b25db1e0bd7de02d5e4f8c5ace24e148c89c10
|
7
|
+
data.tar.gz: f3c969dfcd6cb2dca36a98a1cd80cfd20f63780e9b762fe96d6f8b39ee98ad376d8b7e641f97d5d15a647b2d63688168ec84636386d302ccbbcfbb00d1f4b093
|
data/CHANGELOG.md
CHANGED
@@ -11,6 +11,7 @@ Requires Kafka 0.10.1+ due to usage of a few new APIs.
|
|
11
11
|
- Make `#deliver_message` more resilient using retries and backoff.
|
12
12
|
- Add support for SASL SCRAM authentication (#465).
|
13
13
|
- Refactor and simplify SASL code.
|
14
|
+
- Fix issue when a consumer resets a partition to its default offset.
|
14
15
|
|
15
16
|
## v0.5.0
|
16
17
|
|
data/lib/kafka/connection.rb
CHANGED
@@ -89,11 +89,11 @@ module Kafka
|
|
89
89
|
response_size: 0,
|
90
90
|
}
|
91
91
|
|
92
|
+
raise IdleConnection if idle?
|
93
|
+
|
92
94
|
@instrumenter.instrument("request.connection", notification) do
|
93
95
|
open unless open?
|
94
96
|
|
95
|
-
raise IdleConnection if idle?
|
96
|
-
|
97
97
|
@correlation_id += 1
|
98
98
|
|
99
99
|
@logger.debug "Sending #{api_name} API request #{@correlation_id} to #{to_s}"
|
data/lib/kafka/consumer.rb
CHANGED
data/lib/kafka/datadog.rb
CHANGED
@@ -122,7 +122,9 @@ module Kafka
|
|
122
122
|
|
123
123
|
class ConsumerSubscriber < StatsdSubscriber
|
124
124
|
def process_message(event)
|
125
|
-
|
125
|
+
offset_lag = event.payload.fetch(:offset_lag)
|
126
|
+
create_time = event.payload.fetch(:create_time)
|
127
|
+
time_lag = create_time && ((Time.now - create_time) * 1000).to_i
|
126
128
|
|
127
129
|
tags = {
|
128
130
|
client: event.payload.fetch(:client_id),
|
@@ -138,7 +140,12 @@ module Kafka
|
|
138
140
|
increment("consumer.messages", tags: tags)
|
139
141
|
end
|
140
142
|
|
141
|
-
gauge("consumer.lag",
|
143
|
+
gauge("consumer.lag", offset_lag, tags: tags)
|
144
|
+
|
145
|
+
# Not all messages have timestamps.
|
146
|
+
if time_lag
|
147
|
+
gauge("consumer.time_lag", time_lag, tags: tags)
|
148
|
+
end
|
142
149
|
end
|
143
150
|
|
144
151
|
def process_batch(event)
|
data/lib/kafka/offset_manager.rb
CHANGED
@@ -66,7 +66,9 @@ module Kafka
|
|
66
66
|
# Remove any cached offset, in case things have changed broker-side.
|
67
67
|
clear_resolved_offset(topic)
|
68
68
|
|
69
|
-
|
69
|
+
offset = resolve_offset(topic, partition)
|
70
|
+
|
71
|
+
seek_to(topic, partition, offset)
|
70
72
|
end
|
71
73
|
|
72
74
|
# Move the consumer's position in the partition to the specified offset.
|
data/lib/kafka/statsd.rb
CHANGED
@@ -80,12 +80,15 @@ module Kafka
|
|
80
80
|
|
81
81
|
class ConsumerSubscriber < StatsdSubscriber
|
82
82
|
def process_message(event)
|
83
|
-
|
83
|
+
offset_lag = event.payload.fetch(:offset_lag)
|
84
|
+
create_time = event.payload.fetch(:create_time)
|
84
85
|
client = event.payload.fetch(:client_id)
|
85
86
|
group_id = event.payload.fetch(:group_id)
|
86
87
|
topic = event.payload.fetch(:topic)
|
87
88
|
partition = event.payload.fetch(:partition)
|
88
89
|
|
90
|
+
time_lag = create_time && ((Time.now - create_time) * 1000).to_i
|
91
|
+
|
89
92
|
if event.payload.key?(:exception)
|
90
93
|
increment("consumer.#{client}.#{group_id}.#{topic}.#{partition}.process_message.errors")
|
91
94
|
else
|
@@ -93,7 +96,12 @@ module Kafka
|
|
93
96
|
increment("consumer.#{client}.#{group_id}.#{topic}.#{partition}.messages")
|
94
97
|
end
|
95
98
|
|
96
|
-
gauge("consumer.#{client}.#{group_id}.#{topic}.#{partition}.lag",
|
99
|
+
gauge("consumer.#{client}.#{group_id}.#{topic}.#{partition}.lag", offset_lag)
|
100
|
+
|
101
|
+
# Not all messages have timestamps.
|
102
|
+
if time_lag
|
103
|
+
gauge("consumer.#{client}.#{group_id}.#{topic}.#{partition}.time_lag", time_lag)
|
104
|
+
end
|
97
105
|
end
|
98
106
|
|
99
107
|
def process_batch(event)
|
data/lib/kafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.5.1.
|
4
|
+
version: 0.5.1.beta2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-11-
|
11
|
+
date: 2017-11-10 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|