ruby-kafka 0.1.0 → 0.1.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +10 -10
- data/lib/kafka/broker.rb +1 -1
- data/lib/kafka/client.rb +2 -1
- data/lib/kafka/connection.rb +27 -8
- data/lib/kafka/protocol/decoder.rb +1 -1
- data/lib/kafka/socket_with_timeout.rb +10 -9
- data/lib/kafka/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a0a1febaafd77046f2840beb708d3ad0d6d4520b
|
4
|
+
data.tar.gz: 5a5824f5799db3d6f7f00193baa3c3a9b4aa6a93
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8617964bb38c05859f8fed11c77aa6b906ddf2d4bdd86a4e0ab003e4ba797454f886bf2e9611270de30790a4e24cb0bab94d70274fd020fed148fa81d2868d0a
|
7
|
+
data.tar.gz: acc55fc5c0afcf0c9f46e33b04dcd0ec47a1de7fc6f63819d1ac2cdb4b5c46b347a1e570494aec08fc09993ebfa4ae6fbb260514772a1aa49159c6c8ed1a3e46
|
data/README.md
CHANGED
@@ -1,8 +1,10 @@
|
|
1
|
-
#
|
1
|
+
# ruby-kafka
|
2
|
+
|
3
|
+
[![Circle CI](https://circleci.com/gh/zendesk/ruby-kafka.svg?style=shield)](https://circleci.com/gh/zendesk/ruby-kafka/tree/master)
|
2
4
|
|
3
5
|
A Ruby client library for the Kafka distributed log system. The focus of this library will be operational simplicity, with good logging and metrics that can make debugging issues easier.
|
4
6
|
|
5
|
-
|
7
|
+
Currently, only the Producer API has been implemented, but a fully-fledged Consumer implementation compatible with Kafka 0.9 is on the roadmap.
|
6
8
|
|
7
9
|
## Installation
|
8
10
|
|
@@ -22,9 +24,9 @@ Or install it yourself as:
|
|
22
24
|
|
23
25
|
## Usage
|
24
26
|
|
25
|
-
Currently, only the Producer API is supported. A Kafka 0.9 compatible Consumer API is on the roadmap.
|
26
|
-
|
27
27
|
```ruby
|
28
|
+
require "kafka"
|
29
|
+
|
28
30
|
# A client must be initialized with at least one Kafka broker. Each client keeps
|
29
31
|
# a separate pool of broker connections. Don't use the same client from more than
|
30
32
|
# one thread.
|
@@ -68,16 +70,14 @@ After checking out the repo, run `bin/setup` to install dependencies. Then, run
|
|
68
70
|
|
69
71
|
## Roadmap
|
70
72
|
|
71
|
-
|
73
|
+
The current stable release is v0.1. This release is running in production at Zendesk, but it's still not recommended that you use it when data loss is unacceptable. It will take a little while until all edge cases have been uncovered and handled.
|
72
74
|
|
73
|
-
|
74
|
-
|
75
|
-
We need to actually run this in production for a while before we can say that it won't lose data, so initially the library should only be deployed for non-critical use cases.
|
76
|
-
|
77
|
-
The API may also be changed.
|
75
|
+
The API may still be changed in v0.2.
|
78
76
|
|
79
77
|
### v0.2: Stable Producer API
|
80
78
|
|
79
|
+
Target date: end of February.
|
80
|
+
|
81
81
|
The API should now have stabilized and the library should be battle tested enough to deploy for critical use cases.
|
82
82
|
|
83
83
|
### v1.0: Consumer API
|
data/lib/kafka/broker.rb
CHANGED
@@ -38,7 +38,7 @@ module Kafka
|
|
38
38
|
Protocol.handle_error(partition.partition_error_code)
|
39
39
|
rescue ReplicaNotAvailable
|
40
40
|
# This error can be safely ignored per the protocol specification.
|
41
|
-
@logger.warn "Replica not available for
|
41
|
+
@logger.warn "Replica not available for #{topic.topic_name}/#{partition.partition_id}"
|
42
42
|
end
|
43
43
|
end
|
44
44
|
end
|
data/lib/kafka/client.rb
CHANGED
@@ -3,6 +3,7 @@ require "kafka/producer"
|
|
3
3
|
|
4
4
|
module Kafka
|
5
5
|
class Client
|
6
|
+
DEFAULT_CLIENT_ID = "ruby-kafka"
|
6
7
|
|
7
8
|
# Initializes a new Kafka client.
|
8
9
|
#
|
@@ -17,7 +18,7 @@ module Kafka
|
|
17
18
|
# connections. See {BrokerPool#initialize}.
|
18
19
|
#
|
19
20
|
# @return [Client]
|
20
|
-
def initialize(seed_brokers:, client_id
|
21
|
+
def initialize(seed_brokers:, client_id: DEFAULT_CLIENT_ID, logger:, socket_timeout: nil)
|
21
22
|
@logger = logger
|
22
23
|
|
23
24
|
@broker_pool = BrokerPool.new(
|
data/lib/kafka/connection.rb
CHANGED
@@ -38,7 +38,15 @@ module Kafka
|
|
38
38
|
|
39
39
|
@logger.info "Opening connection to #{@host}:#{@port} with client id #{@client_id}..."
|
40
40
|
|
41
|
-
|
41
|
+
connect
|
42
|
+
end
|
43
|
+
|
44
|
+
def to_s
|
45
|
+
"#{@host}:#{@port}"
|
46
|
+
end
|
47
|
+
|
48
|
+
def connect
|
49
|
+
@socket = SocketWithTimeout.new(@host, @port, connect_timeout: @connect_timeout, timeout: @socket_timeout)
|
42
50
|
|
43
51
|
@encoder = Kafka::Protocol::Encoder.new(@socket)
|
44
52
|
@decoder = Kafka::Protocol::Decoder.new(@socket)
|
@@ -46,20 +54,23 @@ module Kafka
|
|
46
54
|
# Correlation id is initialized to zero and bumped for each request.
|
47
55
|
@correlation_id = 0
|
48
56
|
rescue Errno::ETIMEDOUT => e
|
49
|
-
@logger.error "Timed out while trying to connect to #{
|
57
|
+
@logger.error "Timed out while trying to connect to #{self}: #{e}"
|
50
58
|
raise ConnectionError, e
|
51
59
|
rescue SocketError, Errno::ECONNREFUSED => e
|
52
|
-
@logger.error "Failed to connect to #{
|
60
|
+
@logger.error "Failed to connect to #{self}: #{e}"
|
53
61
|
raise ConnectionError, e
|
54
62
|
end
|
55
63
|
|
56
|
-
def
|
57
|
-
|
64
|
+
def connected?
|
65
|
+
!@socket.nil?
|
58
66
|
end
|
59
67
|
|
60
68
|
def close
|
61
69
|
@logger.debug "Closing socket to #{to_s}"
|
62
|
-
|
70
|
+
|
71
|
+
@socket.close if @socket
|
72
|
+
|
73
|
+
@socket = nil
|
63
74
|
end
|
64
75
|
|
65
76
|
# Sends a request over the connection.
|
@@ -70,6 +81,8 @@ module Kafka
|
|
70
81
|
#
|
71
82
|
# @return [Object] the response that was decoded by `response_class`.
|
72
83
|
def request(api_key, request, response_class)
|
84
|
+
connect unless connected?
|
85
|
+
|
73
86
|
write_request(api_key, request)
|
74
87
|
|
75
88
|
unless response_class.nil?
|
@@ -89,6 +102,12 @@ module Kafka
|
|
89
102
|
end
|
90
103
|
end
|
91
104
|
end
|
105
|
+
rescue Errno::EPIPE, Errno::ECONNRESET, Errno::ETIMEDOUT, EOFError => e
|
106
|
+
@logger.error "Connection error: #{e}"
|
107
|
+
|
108
|
+
close
|
109
|
+
|
110
|
+
raise ConnectionError, "Connection error: #{e}"
|
92
111
|
end
|
93
112
|
|
94
113
|
private
|
@@ -117,7 +136,7 @@ module Kafka
|
|
117
136
|
nil
|
118
137
|
rescue Errno::ETIMEDOUT
|
119
138
|
@logger.error "Timed out while writing request #{@correlation_id}"
|
120
|
-
raise
|
139
|
+
raise
|
121
140
|
end
|
122
141
|
|
123
142
|
# Reads a response from the connection.
|
@@ -142,7 +161,7 @@ module Kafka
|
|
142
161
|
return correlation_id, response
|
143
162
|
rescue Errno::ETIMEDOUT
|
144
163
|
@logger.error "Timed out while waiting for response #{@correlation_id}"
|
145
|
-
raise
|
164
|
+
raise
|
146
165
|
end
|
147
166
|
end
|
148
167
|
end
|
@@ -15,12 +15,15 @@ module Kafka
|
|
15
15
|
#
|
16
16
|
# @param host [String]
|
17
17
|
# @param port [Integer]
|
18
|
-
# @param
|
18
|
+
# @param connect_timeout [Integer] the connection timeout, in seconds.
|
19
|
+
# @param timeout [Integer] the read and write timeout, in seconds.
|
19
20
|
# @raise [Errno::ETIMEDOUT] if the timeout is exceeded.
|
20
|
-
def initialize(host, port, timeout: nil)
|
21
|
+
def initialize(host, port, connect_timeout: nil, timeout: nil)
|
21
22
|
addr = Socket.getaddrinfo(host, nil)
|
22
23
|
sockaddr = Socket.pack_sockaddr_in(port, addr[0][3])
|
23
24
|
|
25
|
+
@timeout = timeout
|
26
|
+
|
24
27
|
@socket = Socket.new(Socket.const_get(addr[0][0]), Socket::SOCK_STREAM, 0)
|
25
28
|
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1)
|
26
29
|
|
@@ -32,7 +35,7 @@ module Kafka
|
|
32
35
|
rescue IO::WaitWritable
|
33
36
|
# IO.select will block until the socket is writable or the timeout
|
34
37
|
# is exceeded, whichever comes first.
|
35
|
-
unless IO.select(nil, [@socket], nil,
|
38
|
+
unless IO.select(nil, [@socket], nil, connect_timeout)
|
36
39
|
# IO.select returns nil when the socket is not ready before timeout
|
37
40
|
# seconds have elapsed
|
38
41
|
@socket.close
|
@@ -51,11 +54,10 @@ module Kafka
|
|
51
54
|
# Reads bytes from the socket, possible with a timeout.
|
52
55
|
#
|
53
56
|
# @param num_bytes [Integer] the number of bytes to read.
|
54
|
-
# @param timeout [Integer] the number of seconds to wait before timing out.
|
55
57
|
# @raise [Errno::ETIMEDOUT] if the timeout is exceeded.
|
56
58
|
# @return [String] the data that was read from the socket.
|
57
|
-
def read(num_bytes
|
58
|
-
unless IO.select([@socket], nil, nil, timeout)
|
59
|
+
def read(num_bytes)
|
60
|
+
unless IO.select([@socket], nil, nil, @timeout)
|
59
61
|
raise Errno::ETIMEDOUT
|
60
62
|
end
|
61
63
|
|
@@ -65,11 +67,10 @@ module Kafka
|
|
65
67
|
# Writes bytes to the socket, possible with a timeout.
|
66
68
|
#
|
67
69
|
# @param bytes [String] the data that should be written to the socket.
|
68
|
-
# @param timeout [Integer] the number of seconds to wait before timing out.
|
69
70
|
# @raise [Errno::ETIMEDOUT] if the timeout is exceeded.
|
70
71
|
# @return [Integer] the number of bytes written.
|
71
|
-
def write(bytes
|
72
|
-
unless IO.select(nil, [@socket], nil, timeout)
|
72
|
+
def write(bytes)
|
73
|
+
unless IO.select(nil, [@socket], nil, @timeout)
|
73
74
|
raise Errno::ETIMEDOUT
|
74
75
|
end
|
75
76
|
|
data/lib/kafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-02-
|
11
|
+
date: 2016-02-04 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|