ruby-kafka 0.3.0 → 0.3.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 87132b87fc32443be48994590b059a88b6cc5fea
4
- data.tar.gz: c32e7ed87e9dcc7c2ba6f3128db0aadb123d1962
3
+ metadata.gz: 2842846dbe233b635e3dae0c3e14d9d660d19089
4
+ data.tar.gz: f7a9b7a9aea3df0e3175b0b239fe9d72f743fed5
5
5
  SHA512:
6
- metadata.gz: 84d5def3fa8963f928d2e59239e9c2678cfbe2ba904ad67bc13080a16b861bb5296394c442c14853144d065a4b86f205a8ba378e28302f67f8b80c5eaf0b7100
7
- data.tar.gz: b39147eb0fd72f1753af38831871a00f597d0191a70f3f47734f87a2212b7d1500c0b1e712c36755b7ab09b27d66a26ea020f6a2fc413ea9ed9a0e2e15449341
6
+ metadata.gz: a4c354e1677e76d0d0eb5483fd56b9d0fba7b8d16baf0f1fe450960977e8c18edba2c00d0a650168d280d03784170e078ae77e7914b6451bd20fc1784aea4867
7
+ data.tar.gz: 25e871eb218789b094a167371560db89edb1be38387969c2c74fa892701c527abb9d1cd9be0b14b41b65e4805cbbab19dd729c56c963b7555f6d7e15136ea20d
@@ -4,6 +4,11 @@ Changes and additions to the library will be listed here.
4
4
 
5
5
  ## Unreleased
6
6
 
7
+ ## v0.3.1
8
+
9
+ - Simplify the heartbeat algorithm.
10
+ - Handle partial messages at the end of message sets received from the brokers.
11
+
7
12
  ## v0.3.0
8
13
 
9
14
  - Add support for encryption and authentication with SSL (Tom Crayford).
data/README.md CHANGED
@@ -18,11 +18,11 @@ Although parts of this library work with Kafka 0.8 – specifically, the Produce
18
18
  5. [Message Delivery Guarantees](#message-delivery-guarantees)
19
19
  6. [Compression](#compression)
20
20
  2. [Consuming Messages from Kafka](#consuming-messages-from-kafka)
21
- 3. [Logging](#logging)
22
- 4. [Understanding Timeouts](#understanding-timeouts)
23
- 5. [Encryption and Authentication using SSL](#encryption-and-authentication-using-ssl)
24
- 6. [Development](#development)
25
- 7. [Roadmap](#roadmap)
21
+ 3. [Logging](#logging)
22
+ 4. [Understanding Timeouts](#understanding-timeouts)
23
+ 5. [Encryption and Authentication using SSL](#encryption-and-authentication-using-ssl)
24
+ 3. [Development](#development)
25
+ 4. [Roadmap](#roadmap)
26
26
 
27
27
  ## Installation
28
28
 
@@ -0,0 +1,63 @@
1
+ $LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
2
+
3
+ require "kafka"
4
+
5
+ KAFKA_CLIENT_CERT = ENV.fetch("KAFKA_CLIENT_CERT")
6
+ KAFKA_CLIENT_CERT_KEY = ENV.fetch("KAFKA_CLIENT_CERT_KEY")
7
+ KAFKA_SERVER_CERT = ENV.fetch("KAFKA_SERVER_CERT")
8
+ KAFKA_URL = ENV.fetch("KAFKA_URL")
9
+ KAFKA_BROKERS = KAFKA_URL.gsub("kafka+ssl://", "").split(",")
10
+ KAFKA_TOPIC = "test-messages"
11
+
12
+ NUM_THREADS = 4
13
+
14
+ queue = Queue.new
15
+
16
+ threads = NUM_THREADS.times.map do |worker_id|
17
+ Thread.new do
18
+ logger = Logger.new($stderr)
19
+ logger.level = Logger::INFO
20
+
21
+ logger.formatter = proc {|severity, datetime, progname, msg|
22
+ "[#{worker_id}] #{severity.ljust(5)} -- #{msg}\n"
23
+ }
24
+
25
+ kafka = Kafka.new(
26
+ seed_brokers: KAFKA_BROKERS,
27
+ logger: logger,
28
+ connect_timeout: 30,
29
+ socket_timeout: 30,
30
+ ssl_client_cert: KAFKA_CLIENT_CERT,
31
+ ssl_client_cert_key: KAFKA_CLIENT_CERT_KEY,
32
+ ssl_ca_cert: KAFKA_SERVER_CERT,
33
+ )
34
+
35
+ consumer = kafka.consumer(group_id: "firehose")
36
+ consumer.subscribe(KAFKA_TOPIC)
37
+
38
+ begin
39
+ i = 0
40
+ consumer.each_message do |message|
41
+ i += 1
42
+
43
+ if i % 1000 == 0
44
+ queue << i
45
+ i = 0
46
+ end
47
+
48
+ sleep 0.01
49
+ end
50
+ ensure
51
+ consumer.shutdown
52
+ end
53
+ end
54
+ end
55
+
56
+ threads.each {|t| t.abort_on_exception = true }
57
+
58
+ received_messages = 0
59
+
60
+ loop do
61
+ received_messages += queue.pop
62
+ puts "===> Received #{received_messages} messages"
63
+ end
@@ -1,6 +1,9 @@
1
1
  $LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
2
2
 
3
3
  require "kafka"
4
+ require "dotenv"
5
+
6
+ Dotenv.load
4
7
 
5
8
  KAFKA_CLIENT_CERT = ENV.fetch("KAFKA_CLIENT_CERT")
6
9
  KAFKA_CLIENT_CERT_KEY = ENV.fetch("KAFKA_CLIENT_CERT_KEY")
@@ -57,6 +57,9 @@ module Kafka
57
57
  @group = group
58
58
  @offset_manager = offset_manager
59
59
  @session_timeout = session_timeout
60
+
61
+ # Send two heartbeats in each session window, just to be sure.
62
+ @heartbeat_interval = @session_timeout / 2
60
63
  end
61
64
 
62
65
  # Subscribes the consumer to a topic.
@@ -122,6 +125,9 @@ module Kafka
122
125
  rescue RebalanceInProgress
123
126
  @logger.error "Group is rebalancing; rejoining"
124
127
  join_group
128
+ rescue IllegalGeneration
129
+ @logger.error "Group has transitioned to a new generation; rejoining"
130
+ join_group
125
131
  end
126
132
  end
127
133
  end
@@ -197,9 +203,9 @@ module Kafka
197
203
  # `session_timeout`.
198
204
  #
199
205
  def send_heartbeat_if_necessary
200
- @last_heartbeat ||= Time.at(0)
206
+ @last_heartbeat ||= Time.now
201
207
 
202
- if @last_heartbeat <= Time.now - @session_timeout + 2
208
+ if Time.now > @last_heartbeat + @heartbeat_interval
203
209
  @group.heartbeat
204
210
  @last_heartbeat = Time.now
205
211
  end
@@ -28,6 +28,10 @@ module Kafka
28
28
  end
29
29
 
30
30
  def join
31
+ if @topics.empty?
32
+ raise Kafka::Error, "Cannot join group without at least one topic subscription"
33
+ end
34
+
31
35
  join_group
32
36
  synchronize
33
37
  rescue NotCoordinatorForGroup
@@ -42,7 +46,7 @@ module Kafka
42
46
  end
43
47
 
44
48
  def leave
45
- @logger.info "[#{@member_id}] Leaving group `#{@group_id}`"
49
+ @logger.info "Leaving group `#{@group_id}`"
46
50
  coordinator.leave_group(group_id: @group_id, member_id: @member_id)
47
51
  rescue ConnectionError
48
52
  end
@@ -70,7 +74,7 @@ module Kafka
70
74
  end
71
75
 
72
76
  def heartbeat
73
- @logger.info "[#{@member_id}] Sending heartbeat..."
77
+ @logger.info "Sending heartbeat..."
74
78
 
75
79
  response = coordinator.heartbeat(
76
80
  group_id: @group_id,
@@ -99,7 +103,7 @@ module Kafka
99
103
  @leader_id = response.leader_id
100
104
  @members = response.members
101
105
 
102
- @logger.info "[#{@member_id}] Joined group `#{@group_id}` with member id `#{@member_id}`"
106
+ @logger.info "Joined group `#{@group_id}` with member id `#{@member_id}`"
103
107
  rescue UnknownMemberId
104
108
  @logger.error "Failed to join group; resetting member id and retrying in 1s..."
105
109
 
@@ -117,7 +121,7 @@ module Kafka
117
121
  group_assignment = {}
118
122
 
119
123
  if group_leader?
120
- @logger.info "[#{@member_id}] Chosen as leader of group `#{@group_id}`"
124
+ @logger.info "Chosen as leader of group `#{@group_id}`"
121
125
 
122
126
  group_assignment = @assignment_strategy.assign(
123
127
  members: @members.keys,
@@ -135,7 +139,7 @@ module Kafka
135
139
  Protocol.handle_error(response.error_code)
136
140
 
137
141
  response.member_assignment.topics.each do |topic, assigned_partitions|
138
- @logger.info "[#{@member_id}] Partitions assigned for `#{topic}`: #{assigned_partitions.join(', ')}"
142
+ @logger.info "Partitions assigned for `#{topic}`: #{assigned_partitions.join(', ')}"
139
143
  end
140
144
 
141
145
  @assigned_partitions.replace(response.member_assignment.topics)
@@ -27,13 +27,17 @@ module Kafka
27
27
  fetched_messages = []
28
28
 
29
29
  until decoder.eof?
30
- message = Message.decode(decoder)
31
-
32
- if message.compressed?
33
- wrapped_message_set = message.decompress
34
- fetched_messages.concat(wrapped_message_set.messages)
35
- else
36
- fetched_messages << message
30
+ begin
31
+ message = Message.decode(decoder)
32
+
33
+ if message.compressed?
34
+ wrapped_message_set = message.decompress
35
+ fetched_messages.concat(wrapped_message_set.messages)
36
+ else
37
+ fetched_messages << message
38
+ end
39
+ rescue EOFError
40
+ # We tried to decode a partial message; just skip it.
37
41
  end
38
42
  end
39
43
 
@@ -1,3 +1,3 @@
1
1
  module Kafka
2
- VERSION = "0.3.0"
2
+ VERSION = "0.3.1"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Schierbeck
@@ -163,6 +163,7 @@ files:
163
163
  - bin/console
164
164
  - bin/setup
165
165
  - circle.yml
166
+ - examples/firehose-consumer.rb
166
167
  - examples/firehose-producer.rb
167
168
  - examples/simple-consumer.rb
168
169
  - examples/simple-producer.rb