ruby-kafka 0.4.1 → 0.4.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +2 -0
- data/Gemfile.lock +2 -2
- data/lib/kafka/async_producer.rb +15 -3
- data/lib/kafka/client.rb +13 -6
- data/lib/kafka/cluster.rb +6 -1
- data/lib/kafka/connection.rb +3 -1
- data/lib/kafka/connection_builder.rb +4 -48
- data/lib/kafka/consumer_group.rb +31 -23
- data/lib/kafka/datadog.rb +51 -0
- data/lib/kafka/offset_manager.rb +2 -2
- data/lib/kafka/sasl_authenticator.rb +56 -0
- data/lib/kafka/statsd.rb +40 -0
- data/lib/kafka/version.rb +1 -1
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5c001c97f73b001e170f23d7fd970781aa6bb196
|
4
|
+
data.tar.gz: 39f0b03b56ad97eee788fac63ec467599ad8cd8e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6f9312f25ab7493d5803b581b22af270399f0f5ff2fbf4012da1f4a30525c430fb1fa21085cb4042aa4b59074a0abe3af9038ee6e97f0b48770baa50eaab6393
|
7
|
+
data.tar.gz: 7523013dbc08d7a193faf6b2b2419a6704cb4b7918f9ed993545b534e356084a765a1c8cfba937a55d7236c26985e3cf252cbe50df6c3c98a45e1bc47e32caad
|
data/CHANGELOG.md
CHANGED
data/Gemfile.lock
CHANGED
data/lib/kafka/async_producer.rb
CHANGED
@@ -83,6 +83,8 @@ module Kafka
|
|
83
83
|
queue: @queue,
|
84
84
|
producer: sync_producer,
|
85
85
|
delivery_threshold: delivery_threshold,
|
86
|
+
instrumenter: instrumenter,
|
87
|
+
logger: logger,
|
86
88
|
)
|
87
89
|
|
88
90
|
# The timer will no-op if the delivery interval is zero.
|
@@ -180,10 +182,12 @@ module Kafka
|
|
180
182
|
end
|
181
183
|
|
182
184
|
class Worker
|
183
|
-
def initialize(queue:, producer:, delivery_threshold:)
|
185
|
+
def initialize(queue:, producer:, delivery_threshold:, instrumenter:, logger:)
|
184
186
|
@queue = queue
|
185
187
|
@producer = producer
|
186
188
|
@delivery_threshold = delivery_threshold
|
189
|
+
@instrumenter = instrumenter
|
190
|
+
@logger = logger
|
187
191
|
end
|
188
192
|
|
189
193
|
def run
|
@@ -197,8 +201,16 @@ module Kafka
|
|
197
201
|
when :deliver_messages
|
198
202
|
deliver_messages
|
199
203
|
when :shutdown
|
200
|
-
|
201
|
-
|
204
|
+
begin
|
205
|
+
# Deliver any pending messages first.
|
206
|
+
@producer.deliver_messages
|
207
|
+
rescue Error => e
|
208
|
+
@logger.error("Failed to deliver messages during shutdown: #{e.message}")
|
209
|
+
|
210
|
+
@instrumenter.instrument("drop_messages.async_producer", {
|
211
|
+
message_count: @producer.buffer_size + @queue.size,
|
212
|
+
})
|
213
|
+
end
|
202
214
|
|
203
215
|
# Stop the run loop.
|
204
216
|
break
|
data/lib/kafka/client.rb
CHANGED
@@ -10,6 +10,7 @@ require "kafka/fetched_message"
|
|
10
10
|
require "kafka/fetch_operation"
|
11
11
|
require "kafka/connection_builder"
|
12
12
|
require "kafka/instrumenter"
|
13
|
+
require "kafka/sasl_authenticator"
|
13
14
|
|
14
15
|
module Kafka
|
15
16
|
class Client
|
@@ -58,6 +59,15 @@ module Kafka
|
|
58
59
|
|
59
60
|
ssl_context = build_ssl_context(ssl_ca_cert_file_path, ssl_ca_cert, ssl_client_cert, ssl_client_cert_key)
|
60
61
|
|
62
|
+
sasl_authenticator = SaslAuthenticator.new(
|
63
|
+
sasl_gssapi_principal: sasl_gssapi_principal,
|
64
|
+
sasl_gssapi_keytab: sasl_gssapi_keytab,
|
65
|
+
sasl_plain_authzid: sasl_plain_authzid,
|
66
|
+
sasl_plain_username: sasl_plain_username,
|
67
|
+
sasl_plain_password: sasl_plain_password,
|
68
|
+
logger: @logger
|
69
|
+
)
|
70
|
+
|
61
71
|
@connection_builder = ConnectionBuilder.new(
|
62
72
|
client_id: client_id,
|
63
73
|
connect_timeout: connect_timeout,
|
@@ -65,11 +75,7 @@ module Kafka
|
|
65
75
|
ssl_context: ssl_context,
|
66
76
|
logger: @logger,
|
67
77
|
instrumenter: @instrumenter,
|
68
|
-
|
69
|
-
sasl_gssapi_keytab: sasl_gssapi_keytab,
|
70
|
-
sasl_plain_authzid: sasl_plain_authzid,
|
71
|
-
sasl_plain_username: sasl_plain_username,
|
72
|
-
sasl_plain_password: sasl_plain_password
|
78
|
+
sasl_authenticator: sasl_authenticator
|
73
79
|
)
|
74
80
|
|
75
81
|
@cluster = initialize_cluster
|
@@ -248,7 +254,8 @@ module Kafka
|
|
248
254
|
logger: @logger,
|
249
255
|
group_id: group_id,
|
250
256
|
session_timeout: session_timeout,
|
251
|
-
retention_time: retention_time
|
257
|
+
retention_time: retention_time,
|
258
|
+
instrumenter: instrumenter,
|
252
259
|
)
|
253
260
|
|
254
261
|
offset_manager = OffsetManager.new(
|
data/lib/kafka/cluster.rb
CHANGED
@@ -193,6 +193,8 @@ module Kafka
|
|
193
193
|
# @raise [ConnectionError] if none of the nodes in `seed_brokers` are available.
|
194
194
|
# @return [Protocol::MetadataResponse] the cluster metadata.
|
195
195
|
def fetch_cluster_info
|
196
|
+
errors = []
|
197
|
+
|
196
198
|
@seed_brokers.shuffle.each do |node|
|
197
199
|
@logger.info "Fetching cluster metadata from #{node}"
|
198
200
|
|
@@ -207,12 +209,15 @@ module Kafka
|
|
207
209
|
return cluster_info
|
208
210
|
rescue Error => e
|
209
211
|
@logger.error "Failed to fetch metadata from #{node}: #{e}"
|
212
|
+
errors << [node, e]
|
210
213
|
ensure
|
211
214
|
broker.disconnect unless broker.nil?
|
212
215
|
end
|
213
216
|
end
|
214
217
|
|
215
|
-
|
218
|
+
error_description = errors.map {|node, exception| "- #{node}: #{exception}" }.join("\n")
|
219
|
+
|
220
|
+
raise ConnectionError, "Could not connect to any of the seed brokers:\n#{error_description}"
|
216
221
|
end
|
217
222
|
|
218
223
|
def connect_to_broker(broker_id)
|
data/lib/kafka/connection.rb
CHANGED
@@ -48,7 +48,7 @@ module Kafka
|
|
48
48
|
# broker. Default is 10 seconds.
|
49
49
|
#
|
50
50
|
# @return [Connection] a new connection.
|
51
|
-
def initialize(host:, port:, client_id:, logger:, instrumenter:, connect_timeout: nil, socket_timeout: nil, ssl_context: nil)
|
51
|
+
def initialize(host:, port:, client_id:, logger:, instrumenter:, sasl_authenticator:, connect_timeout: nil, socket_timeout: nil, ssl_context: nil)
|
52
52
|
@host, @port, @client_id = host, port, client_id
|
53
53
|
@logger = logger
|
54
54
|
@instrumenter = instrumenter
|
@@ -56,6 +56,7 @@ module Kafka
|
|
56
56
|
@connect_timeout = connect_timeout || CONNECT_TIMEOUT
|
57
57
|
@socket_timeout = socket_timeout || SOCKET_TIMEOUT
|
58
58
|
@ssl_context = ssl_context
|
59
|
+
@sasl_authenticator = sasl_authenticator
|
59
60
|
end
|
60
61
|
|
61
62
|
def to_s
|
@@ -128,6 +129,7 @@ module Kafka
|
|
128
129
|
@correlation_id = 0
|
129
130
|
|
130
131
|
@last_request = nil
|
132
|
+
@sasl_authenticator.authenticate!(self)
|
131
133
|
rescue Errno::ETIMEDOUT => e
|
132
134
|
@logger.error "Timed out while trying to connect to #{self}: #{e}"
|
133
135
|
raise ConnectionError, e
|
@@ -1,20 +1,13 @@
|
|
1
|
-
require 'kafka/sasl_gssapi_authenticator'
|
2
|
-
require 'kafka/sasl_plain_authenticator'
|
3
|
-
|
4
1
|
module Kafka
|
5
2
|
class ConnectionBuilder
|
6
|
-
def initialize(client_id:, logger:, instrumenter:, connect_timeout:, socket_timeout:, ssl_context:,
|
3
|
+
def initialize(client_id:, logger:, instrumenter:, connect_timeout:, socket_timeout:, ssl_context:, sasl_authenticator:)
|
7
4
|
@client_id = client_id
|
8
5
|
@logger = logger
|
9
6
|
@instrumenter = instrumenter
|
10
7
|
@connect_timeout = connect_timeout
|
11
8
|
@socket_timeout = socket_timeout
|
12
9
|
@ssl_context = ssl_context
|
13
|
-
@
|
14
|
-
@sasl_gssapi_keytab = sasl_gssapi_keytab
|
15
|
-
@sasl_plain_authzid = sasl_plain_authzid
|
16
|
-
@sasl_plain_username = sasl_plain_username
|
17
|
-
@sasl_plain_password = sasl_plain_password
|
10
|
+
@sasl_authenticator = sasl_authenticator
|
18
11
|
end
|
19
12
|
|
20
13
|
def build_connection(host, port)
|
@@ -26,49 +19,12 @@ module Kafka
|
|
26
19
|
socket_timeout: @socket_timeout,
|
27
20
|
logger: @logger,
|
28
21
|
instrumenter: @instrumenter,
|
29
|
-
ssl_context: @ssl_context
|
22
|
+
ssl_context: @ssl_context,
|
23
|
+
sasl_authenticator: @sasl_authenticator
|
30
24
|
)
|
31
25
|
|
32
|
-
if authenticate_using_sasl_gssapi?
|
33
|
-
sasl_gssapi_authenticate(connection)
|
34
|
-
elsif authenticate_using_sasl_plain?
|
35
|
-
sasl_plain_authenticate(connection)
|
36
|
-
end
|
37
|
-
|
38
26
|
connection
|
39
27
|
end
|
40
28
|
|
41
|
-
private
|
42
|
-
|
43
|
-
def sasl_gssapi_authenticate(connection)
|
44
|
-
auth = SaslGssapiAuthenticator.new(
|
45
|
-
connection: connection,
|
46
|
-
logger: @logger,
|
47
|
-
sasl_gssapi_principal: @sasl_gssapi_principal,
|
48
|
-
sasl_gssapi_keytab: @sasl_gssapi_keytab
|
49
|
-
)
|
50
|
-
|
51
|
-
auth.authenticate!
|
52
|
-
end
|
53
|
-
|
54
|
-
def sasl_plain_authenticate(connection)
|
55
|
-
auth = SaslPlainAuthenticator.new(
|
56
|
-
connection: connection,
|
57
|
-
logger: @logger,
|
58
|
-
authzid: @sasl_plain_authzid,
|
59
|
-
username: @sasl_plain_username,
|
60
|
-
password: @sasl_plain_password
|
61
|
-
)
|
62
|
-
|
63
|
-
auth.authenticate!
|
64
|
-
end
|
65
|
-
|
66
|
-
def authenticate_using_sasl_gssapi?
|
67
|
-
!@ssl_context && @sasl_gssapi_principal && !@sasl_gssapi_principal.empty?
|
68
|
-
end
|
69
|
-
|
70
|
-
def authenticate_using_sasl_plain?
|
71
|
-
@sasl_plain_authzid && @sasl_plain_username && @sasl_plain_password
|
72
|
-
end
|
73
29
|
end
|
74
30
|
end
|
data/lib/kafka/consumer_group.rb
CHANGED
@@ -5,11 +5,12 @@ module Kafka
|
|
5
5
|
class ConsumerGroup
|
6
6
|
attr_reader :assigned_partitions, :generation_id
|
7
7
|
|
8
|
-
def initialize(cluster:, logger:, group_id:, session_timeout:, retention_time:)
|
8
|
+
def initialize(cluster:, logger:, group_id:, session_timeout:, retention_time:, instrumenter:)
|
9
9
|
@cluster = cluster
|
10
10
|
@logger = logger
|
11
11
|
@group_id = group_id
|
12
12
|
@session_timeout = session_timeout
|
13
|
+
@instrumenter = instrumenter
|
13
14
|
@member_id = ""
|
14
15
|
@generation_id = nil
|
15
16
|
@members = {}
|
@@ -53,7 +54,10 @@ module Kafka
|
|
53
54
|
|
54
55
|
def leave
|
55
56
|
@logger.info "Leaving group `#{@group_id}`"
|
56
|
-
|
57
|
+
|
58
|
+
@instrumenter.instrument("leave_group.consumer", group_id: @group_id) do
|
59
|
+
coordinator.leave_group(group_id: @group_id, member_id: @member_id)
|
60
|
+
end
|
57
61
|
rescue ConnectionError
|
58
62
|
end
|
59
63
|
|
@@ -108,18 +112,20 @@ module Kafka
|
|
108
112
|
def join_group
|
109
113
|
@logger.info "Joining group `#{@group_id}`"
|
110
114
|
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
115
|
+
@instrumenter.instrument("join_group.consumer", group_id: @group_id) do
|
116
|
+
response = coordinator.join_group(
|
117
|
+
group_id: @group_id,
|
118
|
+
session_timeout: @session_timeout,
|
119
|
+
member_id: @member_id,
|
120
|
+
)
|
116
121
|
|
117
|
-
|
122
|
+
Protocol.handle_error(response.error_code)
|
118
123
|
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
124
|
+
@generation_id = response.generation_id
|
125
|
+
@member_id = response.member_id
|
126
|
+
@leader_id = response.leader_id
|
127
|
+
@members = response.members
|
128
|
+
end
|
123
129
|
|
124
130
|
@logger.info "Joined group `#{@group_id}` with member id `#{@member_id}`"
|
125
131
|
rescue UnknownMemberId
|
@@ -147,20 +153,22 @@ module Kafka
|
|
147
153
|
)
|
148
154
|
end
|
149
155
|
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
+
@instrumenter.instrument("sync_group.consumer", group_id: @group_id) do
|
157
|
+
response = coordinator.sync_group(
|
158
|
+
group_id: @group_id,
|
159
|
+
generation_id: @generation_id,
|
160
|
+
member_id: @member_id,
|
161
|
+
group_assignment: group_assignment,
|
162
|
+
)
|
156
163
|
|
157
|
-
|
164
|
+
Protocol.handle_error(response.error_code)
|
158
165
|
|
159
|
-
|
160
|
-
|
161
|
-
|
166
|
+
response.member_assignment.topics.each do |topic, assigned_partitions|
|
167
|
+
@logger.info "Partitions assigned for `#{topic}`: #{assigned_partitions.join(', ')}"
|
168
|
+
end
|
162
169
|
|
163
|
-
|
170
|
+
@assigned_partitions.replace(response.member_assignment.topics)
|
171
|
+
end
|
164
172
|
end
|
165
173
|
|
166
174
|
def coordinator
|
data/lib/kafka/datadog.rb
CHANGED
@@ -130,6 +130,45 @@ module Kafka
|
|
130
130
|
end
|
131
131
|
end
|
132
132
|
|
133
|
+
def join_group(event)
|
134
|
+
tags = {
|
135
|
+
client: event.payload.fetch(:client_id),
|
136
|
+
group_id: event.payload.fetch(:group_id),
|
137
|
+
}
|
138
|
+
|
139
|
+
timing("consumer.join_group", event.duration, tags: tags)
|
140
|
+
|
141
|
+
if event.payload.key?(:exception)
|
142
|
+
increment("consumer.join_group.errors", tags: tags)
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
def sync_group(event)
|
147
|
+
tags = {
|
148
|
+
client: event.payload.fetch(:client_id),
|
149
|
+
group_id: event.payload.fetch(:group_id),
|
150
|
+
}
|
151
|
+
|
152
|
+
timing("consumer.sync_group", event.duration, tags: tags)
|
153
|
+
|
154
|
+
if event.payload.key?(:exception)
|
155
|
+
increment("consumer.sync_group.errors", tags: tags)
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
def leave_group(event)
|
160
|
+
tags = {
|
161
|
+
client: event.payload.fetch(:client_id),
|
162
|
+
group_id: event.payload.fetch(:group_id),
|
163
|
+
}
|
164
|
+
|
165
|
+
timing("consumer.leave_group", event.duration, tags: tags)
|
166
|
+
|
167
|
+
if event.payload.key?(:exception)
|
168
|
+
increment("consumer.leave_group.errors", tags: tags)
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
133
172
|
attach_to "consumer.kafka"
|
134
173
|
end
|
135
174
|
|
@@ -145,6 +184,7 @@ module Kafka
|
|
145
184
|
|
146
185
|
tags = {
|
147
186
|
client: client,
|
187
|
+
topic: topic,
|
148
188
|
}
|
149
189
|
|
150
190
|
# This gets us the write rate.
|
@@ -226,6 +266,7 @@ module Kafka
|
|
226
266
|
|
227
267
|
tags = {
|
228
268
|
client: client,
|
269
|
+
topic: topic,
|
229
270
|
}
|
230
271
|
|
231
272
|
# This gets us the avg/max queue size per producer.
|
@@ -244,6 +285,16 @@ module Kafka
|
|
244
285
|
increment("async_producer.produce.errors", tags: tags)
|
245
286
|
end
|
246
287
|
|
288
|
+
def drop_messages(event)
|
289
|
+
tags = {
|
290
|
+
client: event.payload.fetch(:client_id),
|
291
|
+
}
|
292
|
+
|
293
|
+
message_count = event.payload.fetch(:message_count)
|
294
|
+
|
295
|
+
count("async_producer.dropped_messages", message_count, tags: tags)
|
296
|
+
end
|
297
|
+
|
247
298
|
attach_to "async_producer.kafka"
|
248
299
|
end
|
249
300
|
end
|
data/lib/kafka/offset_manager.rb
CHANGED
@@ -30,9 +30,9 @@ module Kafka
|
|
30
30
|
@processed_offsets[topic] ||= {}
|
31
31
|
|
32
32
|
# The committed offset should always be the offset of the next message that the
|
33
|
-
# application will read, thus adding one to the last message processed
|
33
|
+
# application will read, thus adding one to the last message processed.
|
34
34
|
@processed_offsets[topic][partition] = offset + 1
|
35
|
-
@logger.debug "Marking #{topic}/#{partition}:#{offset} as
|
35
|
+
@logger.debug "Marking #{topic}/#{partition}:#{offset} as processed"
|
36
36
|
end
|
37
37
|
|
38
38
|
def seek_to_default(topic, partition)
|
@@ -0,0 +1,56 @@
|
|
1
|
+
require 'kafka/sasl_gssapi_authenticator'
|
2
|
+
require 'kafka/sasl_plain_authenticator'
|
3
|
+
|
4
|
+
module Kafka
|
5
|
+
class SaslAuthenticator
|
6
|
+
def initialize(logger:, sasl_gssapi_principal:, sasl_gssapi_keytab:, sasl_plain_authzid:, sasl_plain_username:, sasl_plain_password:)
|
7
|
+
@logger = logger
|
8
|
+
@sasl_gssapi_principal = sasl_gssapi_principal
|
9
|
+
@sasl_gssapi_keytab = sasl_gssapi_keytab
|
10
|
+
@sasl_plain_authzid = sasl_plain_authzid
|
11
|
+
@sasl_plain_username = sasl_plain_username
|
12
|
+
@sasl_plain_password = sasl_plain_password
|
13
|
+
end
|
14
|
+
|
15
|
+
def authenticate!(connection)
|
16
|
+
if authenticate_using_sasl_gssapi?
|
17
|
+
sasl_gssapi_authenticate(connection)
|
18
|
+
elsif authenticate_using_sasl_plain?
|
19
|
+
sasl_plain_authenticate(connection)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
def sasl_gssapi_authenticate(connection)
|
26
|
+
auth = SaslGssapiAuthenticator.new(
|
27
|
+
connection: connection,
|
28
|
+
logger: @logger,
|
29
|
+
sasl_gssapi_principal: @sasl_gssapi_principal,
|
30
|
+
sasl_gssapi_keytab: @sasl_gssapi_keytab
|
31
|
+
)
|
32
|
+
|
33
|
+
auth.authenticate!
|
34
|
+
end
|
35
|
+
|
36
|
+
def sasl_plain_authenticate(connection)
|
37
|
+
auth = SaslPlainAuthenticator.new(
|
38
|
+
connection: connection,
|
39
|
+
logger: @logger,
|
40
|
+
authzid: @sasl_plain_authzid,
|
41
|
+
username: @sasl_plain_username,
|
42
|
+
password: @sasl_plain_password
|
43
|
+
)
|
44
|
+
|
45
|
+
auth.authenticate!
|
46
|
+
end
|
47
|
+
|
48
|
+
def authenticate_using_sasl_gssapi?
|
49
|
+
!@ssl_context && @sasl_gssapi_principal && !@sasl_gssapi_principal.empty?
|
50
|
+
end
|
51
|
+
|
52
|
+
def authenticate_using_sasl_plain?
|
53
|
+
@sasl_plain_authzid && @sasl_plain_username && @sasl_plain_password
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
data/lib/kafka/statsd.rb
CHANGED
@@ -111,6 +111,39 @@ module Kafka
|
|
111
111
|
end
|
112
112
|
end
|
113
113
|
|
114
|
+
def join_group(event)
|
115
|
+
client = event.payload.fetch(:client_id)
|
116
|
+
group_id = event.payload.fetch(:group_id)
|
117
|
+
|
118
|
+
timing("consumer.#{client}.#{group_id}.join_group", event.duration)
|
119
|
+
|
120
|
+
if event.payload.key?(:exception)
|
121
|
+
increment("consumer.#{client}.#{group_id}.join_group.errors")
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
def sync_group(event)
|
126
|
+
client = event.payload.fetch(:client_id)
|
127
|
+
group_id = event.payload.fetch(:group_id)
|
128
|
+
|
129
|
+
timing("consumer.#{client}.#{group_id}.sync_group", event.duration)
|
130
|
+
|
131
|
+
if event.payload.key?(:exception)
|
132
|
+
increment("consumer.#{client}.#{group_id}.sync_group.errors")
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
def leave_group(event)
|
137
|
+
client = event.payload.fetch(:client_id)
|
138
|
+
group_id = event.payload.fetch(:group_id)
|
139
|
+
|
140
|
+
timing("consumer.#{client}.#{group_id}.leave_group", event.duration)
|
141
|
+
|
142
|
+
if event.payload.key?(:exception)
|
143
|
+
increment("consumer.#{client}.#{group_id}.leave_group.errors")
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
114
147
|
attach_to "consumer.kafka"
|
115
148
|
end
|
116
149
|
|
@@ -205,6 +238,13 @@ module Kafka
|
|
205
238
|
increment("async_producer.#{client}.#{topic}.produce.errors")
|
206
239
|
end
|
207
240
|
|
241
|
+
def drop_messages(event)
|
242
|
+
client = event.payload.fetch(:client_id)
|
243
|
+
message_count = event.payload.fetch(:message_count)
|
244
|
+
|
245
|
+
count("async_producer.#{client}.dropped_messages", message_count)
|
246
|
+
end
|
247
|
+
|
208
248
|
attach_to "async_producer.kafka"
|
209
249
|
end
|
210
250
|
end
|
data/lib/kafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.4.
|
4
|
+
version: 0.4.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-08
|
11
|
+
date: 2017-09-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -322,6 +322,7 @@ files:
|
|
322
322
|
- lib/kafka/protocol/sync_group_response.rb
|
323
323
|
- lib/kafka/protocol/topic_metadata_request.rb
|
324
324
|
- lib/kafka/round_robin_assignment_strategy.rb
|
325
|
+
- lib/kafka/sasl_authenticator.rb
|
325
326
|
- lib/kafka/sasl_gssapi_authenticator.rb
|
326
327
|
- lib/kafka/sasl_plain_authenticator.rb
|
327
328
|
- lib/kafka/snappy_codec.rb
|