jruby-kafka 0.2.0-java → 0.2.1-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/jruby-kafka.iml +13 -0
- data/lib/jruby-kafka.rb +4 -4
- data/lib/jruby-kafka/consumer.rb +15 -14
- data/lib/jruby-kafka/error.rb +1 -1
- data/lib/jruby-kafka/group.rb +53 -53
- data/lib/jruby-kafka/jruby-kafka.iml +13 -0
- data/lib/jruby-kafka/producer.rb +28 -19
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c112e1cf70476e795e3f9eed4d2e5c4f770d8534
|
4
|
+
data.tar.gz: 628055947aa72d34e19f9a59addc5e83f831643a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d786ffd66d26ceb1de0406d28f5598e1f7f734e42c2c6ed65c30f76a9cbdabb9da817d6160bdfc4433488541a414b55acac44992463841b9952b16293fbf2dff
|
7
|
+
data.tar.gz: 059b12bec092caf78a7cd1b783eb783b615995e2f76a81ed81427d883e4a825c6a5d61d04eb5805465062840147d27d6abfdcac9ab87b2dcf6396bbd0bce3baa
|
data/lib/jruby-kafka.iml
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
2
|
+
<module type="RUBY_MODULE" version="4">
|
3
|
+
<component name="CompassSettings">
|
4
|
+
<option name="compassSupportEnabled" value="true" />
|
5
|
+
</component>
|
6
|
+
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
7
|
+
<exclude-output />
|
8
|
+
<content url="file://$MODULE_DIR$" />
|
9
|
+
<orderEntry type="inheritedJdk" />
|
10
|
+
<orderEntry type="sourceFolder" forTests="false" />
|
11
|
+
</component>
|
12
|
+
</module>
|
13
|
+
|
data/lib/jruby-kafka.rb
CHANGED
@@ -7,12 +7,12 @@
|
|
7
7
|
#
|
8
8
|
if not defined? JBUNDLER_CLASSPATH and ENV['KAFKA_PATH']
|
9
9
|
require 'jruby-kafka/loader'
|
10
|
-
Kafka.load_jars
|
10
|
+
Kafka.load_jars
|
11
11
|
end
|
12
12
|
|
13
|
-
require
|
14
|
-
require
|
15
|
-
require
|
13
|
+
require 'jruby-kafka/consumer'
|
14
|
+
require 'jruby-kafka/group'
|
15
|
+
require 'jruby-kafka/producer'
|
16
16
|
|
17
17
|
module Kafka
|
18
18
|
end
|
data/lib/jruby-kafka/consumer.rb
CHANGED
@@ -1,33 +1,34 @@
|
|
1
|
-
require
|
2
|
-
require
|
3
|
-
|
4
|
-
java_import 'kafka.consumer.ConsumerIterator'
|
5
|
-
java_import 'kafka.consumer.KafkaStream'
|
6
|
-
java_import 'kafka.common.ConsumerRebalanceFailedException'
|
7
|
-
java_import 'kafka.consumer.ConsumerTimeoutException'
|
1
|
+
require 'java'
|
2
|
+
require 'jruby-kafka/namespace'
|
8
3
|
|
4
|
+
# noinspection JRubyStringImportInspection
|
9
5
|
class Kafka::Consumer
|
6
|
+
java_import 'kafka.consumer.ConsumerIterator'
|
7
|
+
java_import 'kafka.consumer.KafkaStream'
|
8
|
+
java_import 'kafka.common.ConsumerRebalanceFailedException'
|
9
|
+
java_import 'kafka.consumer.ConsumerTimeoutException'
|
10
|
+
|
10
11
|
include Java::JavaLang::Runnable
|
11
12
|
java_signature 'void run()'
|
12
13
|
|
13
14
|
@m_stream
|
14
|
-
@
|
15
|
+
@m_thread_number
|
15
16
|
@m_queue
|
16
17
|
|
17
|
-
def initialize(a_stream,
|
18
|
-
@
|
18
|
+
def initialize(a_stream, a_thread_number, a_queue, restart_on_exception, a_sleep_ms)
|
19
|
+
@m_thread_number = a_thread_number
|
19
20
|
@m_stream = a_stream
|
20
21
|
@m_queue = a_queue
|
21
|
-
@m_restart_on_exception =
|
22
|
+
@m_restart_on_exception = restart_on_exception
|
22
23
|
@m_sleep_ms = 1.0 / 1000.0 * Float(a_sleep_ms)
|
23
24
|
end
|
24
25
|
|
25
26
|
def run
|
26
|
-
it = @m_stream.iterator
|
27
|
+
it = @m_stream.iterator
|
27
28
|
begin
|
28
|
-
while it.hasNext
|
29
|
+
while it.hasNext
|
29
30
|
begin
|
30
|
-
@m_queue << it.next
|
31
|
+
@m_queue << it.next.message
|
31
32
|
end
|
32
33
|
end
|
33
34
|
rescue Exception => e
|
data/lib/jruby-kafka/error.rb
CHANGED
data/lib/jruby-kafka/group.rb
CHANGED
@@ -1,16 +1,17 @@
|
|
1
1
|
# basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/Consumer+Group+Example
|
2
2
|
|
3
|
-
require
|
3
|
+
require 'java'
|
4
4
|
|
5
|
-
require
|
6
|
-
require
|
7
|
-
require
|
8
|
-
|
9
|
-
java_import 'java.util.concurrent.ExecutorService'
|
10
|
-
java_import 'java.util.concurrent.Executors'
|
11
|
-
java_import 'org.I0Itec.zkclient.exception.ZkException'
|
5
|
+
require 'jruby-kafka/namespace'
|
6
|
+
require 'jruby-kafka/consumer'
|
7
|
+
require 'jruby-kafka/error'
|
12
8
|
|
9
|
+
# noinspection JRubyStringImportInspection
|
13
10
|
class Kafka::Group
|
11
|
+
java_import 'java.util.concurrent.ExecutorService'
|
12
|
+
java_import 'java.util.concurrent.Executors'
|
13
|
+
java_import 'org.I0Itec.zkclient.exception.ZkException'
|
14
|
+
|
14
15
|
@consumer
|
15
16
|
@executor
|
16
17
|
@topic
|
@@ -136,81 +137,80 @@ class Kafka::Group
|
|
136
137
|
end
|
137
138
|
end
|
138
139
|
|
139
|
-
private
|
140
|
-
def validate_required_arguments(options={})
|
141
|
-
[:zk_connect, :group_id, :topic_id].each do |opt|
|
142
|
-
raise(ArgumentError, "#{opt} is required.") unless options[opt]
|
143
|
-
end
|
144
|
-
end
|
145
|
-
|
146
140
|
public
|
147
|
-
|
141
|
+
|
142
|
+
def shutdown
|
148
143
|
if @consumer
|
149
|
-
@consumer.shutdown
|
144
|
+
@consumer.shutdown
|
150
145
|
end
|
151
146
|
if @executor
|
152
|
-
@executor.shutdown
|
147
|
+
@executor.shutdown
|
153
148
|
end
|
154
149
|
@running = false
|
155
150
|
end
|
156
151
|
|
157
|
-
|
158
|
-
def run(a_numThreads, a_queue)
|
152
|
+
def run(a_num_threads, a_queue)
|
159
153
|
begin
|
160
154
|
if @auto_offset_reset == 'smallest'
|
161
155
|
Java::kafka::utils::ZkUtils.maybeDeletePath(@zk_connect, "/consumers/#{@group_id}")
|
162
156
|
end
|
163
157
|
|
164
|
-
@consumer = Java::kafka::consumer::Consumer.createJavaConsumerConnector(
|
158
|
+
@consumer = Java::kafka::consumer::Consumer.createJavaConsumerConnector(create_consumer_config)
|
165
159
|
rescue ZkException => e
|
166
160
|
raise KafkaError.new(e), "Got ZkException: #{e}"
|
167
161
|
end
|
168
|
-
|
169
|
-
thread_value =
|
170
|
-
|
171
|
-
|
172
|
-
streams = Array.new(
|
162
|
+
topic_count_map = java.util.HashMap.new
|
163
|
+
thread_value = a_num_threads.to_java Java::int
|
164
|
+
topic_count_map.put(@topic, thread_value)
|
165
|
+
consumer_map = @consumer.createMessageStreams(topic_count_map)
|
166
|
+
streams = Array.new(consumer_map[@topic])
|
173
167
|
|
174
|
-
@executor = Executors.newFixedThreadPool(
|
168
|
+
@executor = Executors.newFixedThreadPool(a_num_threads)
|
175
169
|
@executor_submit = @executor.java_method(:submit, [Java::JavaLang::Runnable.java_class])
|
176
170
|
|
177
|
-
|
178
|
-
|
179
|
-
@executor_submit.call(Kafka::Consumer.new(stream,
|
180
|
-
|
171
|
+
thread_number = 0
|
172
|
+
streams.each do |stream|
|
173
|
+
@executor_submit.call(Kafka::Consumer.new(stream, thread_number, a_queue, @consumer_restart_on_error, @consumer_restart_sleep_ms))
|
174
|
+
thread_number += 1
|
181
175
|
end
|
182
176
|
@running = true
|
183
177
|
end
|
184
178
|
|
185
|
-
public
|
186
179
|
def running?
|
187
180
|
@running
|
188
181
|
end
|
189
182
|
|
190
183
|
private
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
properties.
|
200
|
-
properties.put(
|
201
|
-
properties.put(
|
202
|
-
properties.put(
|
203
|
-
properties.put(
|
204
|
-
properties.put(
|
205
|
-
properties.put(
|
206
|
-
properties.put(
|
207
|
-
properties.put(
|
208
|
-
properties.put(
|
209
|
-
properties.put(
|
210
|
-
properties.put(
|
184
|
+
|
185
|
+
def validate_required_arguments(options={})
|
186
|
+
[:zk_connect, :group_id, :topic_id].each do |opt|
|
187
|
+
raise(ArgumentError, "#{opt} is required.") unless options[opt]
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
def create_consumer_config
|
192
|
+
properties = java.util.Properties.new
|
193
|
+
properties.put('zookeeper.connect', @zk_connect)
|
194
|
+
properties.put('group.id', @group_id)
|
195
|
+
properties.put('zookeeper.connection.timeout.ms', @zk_connect_timeout)
|
196
|
+
properties.put('zookeeper.session.timeout.ms', @zk_session_timeout)
|
197
|
+
properties.put('zookeeper.sync.time.ms', @zk_sync_time)
|
198
|
+
properties.put('auto.commit.interval.ms', @auto_commit_interval)
|
199
|
+
properties.put('auto.offset.reset', @auto_offset_reset)
|
200
|
+
properties.put('rebalance.max.retries', @rebalance_max_retries)
|
201
|
+
properties.put('rebalance.backoff.ms', @rebalance_backoff_ms)
|
202
|
+
properties.put('socket.timeout.ms', @socket_timeout_ms)
|
203
|
+
properties.put('socket.receive.buffer.bytes', @socket_receive_buffer_bytes)
|
204
|
+
properties.put('fetch.message.max.bytes', @fetch_message_max_bytes)
|
205
|
+
properties.put('auto.commit.enable', @auto_commit_enable)
|
206
|
+
properties.put('queued.max.message.chunks', @queued_max_message_chunks)
|
207
|
+
properties.put('fetch.min.bytes', @fetch_min_bytes)
|
208
|
+
properties.put('fetch.wait.max.ms', @fetch_wait_max_ms)
|
209
|
+
properties.put('refresh.leader.backoff.ms', @refresh_leader_backoff_ms)
|
210
|
+
properties.put('consumer.timeout.ms', @consumer_timeout_ms)
|
211
211
|
unless @consumer_id.nil?
|
212
212
|
properties.put('consumer.id', @consumer_id)
|
213
213
|
end
|
214
|
-
|
214
|
+
Java::kafka::consumer::ConsumerConfig.new(properties)
|
215
215
|
end
|
216
216
|
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
2
|
+
<module type="RUBY_MODULE" version="4">
|
3
|
+
<component name="CompassSettings">
|
4
|
+
<option name="compassSupportEnabled" value="true" />
|
5
|
+
</component>
|
6
|
+
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
7
|
+
<exclude-output />
|
8
|
+
<content url="file://$MODULE_DIR$" />
|
9
|
+
<orderEntry type="inheritedJdk" />
|
10
|
+
<orderEntry type="sourceFolder" forTests="false" />
|
11
|
+
</component>
|
12
|
+
</module>
|
13
|
+
|
data/lib/jruby-kafka/producer.rb
CHANGED
@@ -1,22 +1,24 @@
|
|
1
1
|
# basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/0.8.0+Producer+Example
|
2
2
|
|
3
|
-
require
|
3
|
+
require 'java'
|
4
4
|
|
5
|
-
require
|
6
|
-
require
|
5
|
+
require 'jruby-kafka/namespace'
|
6
|
+
require 'jruby-kafka/error'
|
7
7
|
|
8
|
+
# noinspection JRubyStringImportInspection
|
8
9
|
class Kafka::Producer
|
10
|
+
extend Gem::Deprecate
|
9
11
|
java_import 'kafka.producer.ProducerConfig'
|
10
12
|
java_import 'kafka.producer.KeyedMessage'
|
11
|
-
|
13
|
+
KAFKA_PRODUCER = Java::kafka.javaapi.producer.Producer
|
12
14
|
java_import 'kafka.message.NoCompressionCodec'
|
13
15
|
java_import 'kafka.message.GZIPCompressionCodec'
|
14
16
|
java_import 'kafka.message.SnappyCompressionCodec'
|
15
17
|
|
16
18
|
VALIDATIONS = {
|
17
|
-
'request.required.acks' => %w[ 0 1 -1 ],
|
18
|
-
'required.codecs' => [NoCompressionCodec.name, GZIPCompressionCodec.name, SnappyCompressionCodec.name],
|
19
|
-
'producer.type' => %w[ sync async ]
|
19
|
+
:'request.required.acks' => %w[ 0 1 -1 ],
|
20
|
+
:'required.codecs' => [NoCompressionCodec.name, GZIPCompressionCodec.name, SnappyCompressionCodec.name],
|
21
|
+
:'producer.type' => %w[ sync async ]
|
20
22
|
}
|
21
23
|
|
22
24
|
REQUIRED = %w[
|
@@ -42,33 +44,40 @@ class Kafka::Producer
|
|
42
44
|
# options:
|
43
45
|
# metadata_broker_list: ["localhost:9092"] - REQUIRED: a seed list of kafka brokers
|
44
46
|
def initialize(opts = {})
|
45
|
-
@options = opts.reduce({}) do |
|
46
|
-
|
47
|
-
|
47
|
+
@options = opts.reduce({}) do |opts_array, (k, v)|
|
48
|
+
unless v.nil?
|
49
|
+
opts_array[k.to_s.gsub(/_/, '.')] = v
|
50
|
+
end
|
51
|
+
opts_array
|
48
52
|
end
|
49
53
|
if options['broker.list']
|
50
54
|
options['metadata.broker.list'] = options.delete 'broker.list'
|
51
55
|
end
|
52
|
-
if options['compressed.topics'].to_s == 'none'
|
53
|
-
options.delete 'compressed.topics'
|
54
|
-
end
|
55
56
|
if options['metadata.broker.list'].is_a? Array
|
56
57
|
options['metadata.broker.list'] = options['metadata.broker.list'].join(',')
|
57
58
|
end
|
59
|
+
if options['compressed.topics'].is_a? Array
|
60
|
+
options['compressed.topics'] = options['compressed.topics'].join(',')
|
61
|
+
end
|
58
62
|
validate_arguments
|
59
|
-
@send_method = proc { throw StandardError.new
|
63
|
+
@send_method = proc { throw StandardError.new 'Producer is not connected' }
|
60
64
|
end
|
61
65
|
|
62
66
|
def connect
|
63
|
-
@producer =
|
67
|
+
@producer = KAFKA_PRODUCER.new(create_producer_config)
|
64
68
|
@send_method = producer.java_method :send, [KeyedMessage]
|
65
69
|
end
|
66
70
|
|
67
71
|
# throws FailedToSendMessageException or if not connected, StandardError.
|
68
|
-
def
|
72
|
+
def send_msg(topic, key, msg)
|
69
73
|
send_method.call(KeyedMessage.new(topic, key, msg))
|
70
74
|
end
|
71
75
|
|
76
|
+
def sendMsg(topic, key, msg)
|
77
|
+
send_msg(topic, key, msg)
|
78
|
+
end
|
79
|
+
deprecate :sendMsg, :send_msg, 2015, 01
|
80
|
+
|
72
81
|
def close
|
73
82
|
@producer.close
|
74
83
|
end
|
@@ -87,9 +96,9 @@ class Kafka::Producer
|
|
87
96
|
end
|
88
97
|
end
|
89
98
|
|
90
|
-
def
|
91
|
-
properties = java.util.Properties.new
|
99
|
+
def create_producer_config
|
100
|
+
properties = java.util.Properties.new
|
92
101
|
options.each { |opt, value| properties.put opt, value.to_s }
|
93
|
-
|
102
|
+
ProducerConfig.new(properties)
|
94
103
|
end
|
95
104
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: jruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.1
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Joseph Lawson
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2014-09-
|
11
|
+
date: 2014-09-22 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: jbundler
|
@@ -31,10 +31,12 @@ executables: []
|
|
31
31
|
extensions: []
|
32
32
|
extra_rdoc_files: []
|
33
33
|
files:
|
34
|
+
- lib/jruby-kafka.iml
|
34
35
|
- lib/jruby-kafka.rb
|
35
36
|
- lib/jruby-kafka/consumer.rb
|
36
37
|
- lib/jruby-kafka/error.rb
|
37
38
|
- lib/jruby-kafka/group.rb
|
39
|
+
- lib/jruby-kafka/jruby-kafka.iml
|
38
40
|
- lib/jruby-kafka/loader.rb
|
39
41
|
- lib/jruby-kafka/namespace.rb
|
40
42
|
- lib/jruby-kafka/producer.rb
|