jruby-kafka 0.2.0-java → 0.2.1-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 25c550cc97c9b34ca77bd8b4b6e7db5fa18a6835
4
- data.tar.gz: c20ccd7aab41df5a3738239b4beaba87db81df29
3
+ metadata.gz: c112e1cf70476e795e3f9eed4d2e5c4f770d8534
4
+ data.tar.gz: 628055947aa72d34e19f9a59addc5e83f831643a
5
5
  SHA512:
6
- metadata.gz: 375d163f4a8a12828c83911c7763a24e70c7e3e4e1ceaa0eb1882543f18b3a1a370b27fcd24ab818484cd1c2f335d7d4da5c69e9eddd0634187bb04a24ce3055
7
- data.tar.gz: 4533509f03e2f893e472cf9026d284a08e3e5999be1f6fb6ddfbc078deb58e5e31fc90b71145150ec59478b478e98edb844467c42ac6756a742c7a723feaf755
6
+ metadata.gz: d786ffd66d26ceb1de0406d28f5598e1f7f734e42c2c6ed65c30f76a9cbdabb9da817d6160bdfc4433488541a414b55acac44992463841b9952b16293fbf2dff
7
+ data.tar.gz: 059b12bec092caf78a7cd1b783eb783b615995e2f76a81ed81427d883e4a825c6a5d61d04eb5805465062840147d27d6abfdcac9ab87b2dcf6396bbd0bce3baa
@@ -0,0 +1,13 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <module type="RUBY_MODULE" version="4">
3
+ <component name="CompassSettings">
4
+ <option name="compassSupportEnabled" value="true" />
5
+ </component>
6
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
7
+ <exclude-output />
8
+ <content url="file://$MODULE_DIR$" />
9
+ <orderEntry type="inheritedJdk" />
10
+ <orderEntry type="sourceFolder" forTests="false" />
11
+ </component>
12
+ </module>
13
+
data/lib/jruby-kafka.rb CHANGED
@@ -7,12 +7,12 @@
7
7
  #
8
8
  if not defined? JBUNDLER_CLASSPATH and ENV['KAFKA_PATH']
9
9
  require 'jruby-kafka/loader'
10
- Kafka.load_jars()
10
+ Kafka.load_jars
11
11
  end
12
12
 
13
- require "jruby-kafka/consumer"
14
- require "jruby-kafka/group"
15
- require "jruby-kafka/producer"
13
+ require 'jruby-kafka/consumer'
14
+ require 'jruby-kafka/group'
15
+ require 'jruby-kafka/producer'
16
16
 
17
17
  module Kafka
18
18
  end
@@ -1,33 +1,34 @@
1
- require "java"
2
- require "jruby-kafka/namespace"
3
-
4
- java_import 'kafka.consumer.ConsumerIterator'
5
- java_import 'kafka.consumer.KafkaStream'
6
- java_import 'kafka.common.ConsumerRebalanceFailedException'
7
- java_import 'kafka.consumer.ConsumerTimeoutException'
1
+ require 'java'
2
+ require 'jruby-kafka/namespace'
8
3
 
4
+ # noinspection JRubyStringImportInspection
9
5
  class Kafka::Consumer
6
+ java_import 'kafka.consumer.ConsumerIterator'
7
+ java_import 'kafka.consumer.KafkaStream'
8
+ java_import 'kafka.common.ConsumerRebalanceFailedException'
9
+ java_import 'kafka.consumer.ConsumerTimeoutException'
10
+
10
11
  include Java::JavaLang::Runnable
11
12
  java_signature 'void run()'
12
13
 
13
14
  @m_stream
14
- @m_threadNumber
15
+ @m_thread_number
15
16
  @m_queue
16
17
 
17
- def initialize(a_stream, a_threadNumber, a_queue, a_bool_restart_on_exception, a_sleep_ms)
18
- @m_threadNumber = a_threadNumber
18
+ def initialize(a_stream, a_thread_number, a_queue, restart_on_exception, a_sleep_ms)
19
+ @m_thread_number = a_thread_number
19
20
  @m_stream = a_stream
20
21
  @m_queue = a_queue
21
- @m_restart_on_exception = a_bool_restart_on_exception
22
+ @m_restart_on_exception = restart_on_exception
22
23
  @m_sleep_ms = 1.0 / 1000.0 * Float(a_sleep_ms)
23
24
  end
24
25
 
25
26
  def run
26
- it = @m_stream.iterator()
27
+ it = @m_stream.iterator
27
28
  begin
28
- while it.hasNext()
29
+ while it.hasNext
29
30
  begin
30
- @m_queue << it.next().message()
31
+ @m_queue << it.next.message
31
32
  end
32
33
  end
33
34
  rescue Exception => e
@@ -1,4 +1,4 @@
1
- require "jruby-kafka/namespace"
1
+ require 'jruby-kafka/namespace'
2
2
 
3
3
  class KafkaError < StandardError
4
4
  attr_reader :object
@@ -1,16 +1,17 @@
1
1
  # basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/Consumer+Group+Example
2
2
 
3
- require "java"
3
+ require 'java'
4
4
 
5
- require "jruby-kafka/namespace"
6
- require "jruby-kafka/consumer"
7
- require "jruby-kafka/error"
8
-
9
- java_import 'java.util.concurrent.ExecutorService'
10
- java_import 'java.util.concurrent.Executors'
11
- java_import 'org.I0Itec.zkclient.exception.ZkException'
5
+ require 'jruby-kafka/namespace'
6
+ require 'jruby-kafka/consumer'
7
+ require 'jruby-kafka/error'
12
8
 
9
+ # noinspection JRubyStringImportInspection
13
10
  class Kafka::Group
11
+ java_import 'java.util.concurrent.ExecutorService'
12
+ java_import 'java.util.concurrent.Executors'
13
+ java_import 'org.I0Itec.zkclient.exception.ZkException'
14
+
14
15
  @consumer
15
16
  @executor
16
17
  @topic
@@ -136,81 +137,80 @@ class Kafka::Group
136
137
  end
137
138
  end
138
139
 
139
- private
140
- def validate_required_arguments(options={})
141
- [:zk_connect, :group_id, :topic_id].each do |opt|
142
- raise(ArgumentError, "#{opt} is required.") unless options[opt]
143
- end
144
- end
145
-
146
140
  public
147
- def shutdown()
141
+
142
+ def shutdown
148
143
  if @consumer
149
- @consumer.shutdown()
144
+ @consumer.shutdown
150
145
  end
151
146
  if @executor
152
- @executor.shutdown()
147
+ @executor.shutdown
153
148
  end
154
149
  @running = false
155
150
  end
156
151
 
157
- public
158
- def run(a_numThreads, a_queue)
152
+ def run(a_num_threads, a_queue)
159
153
  begin
160
154
  if @auto_offset_reset == 'smallest'
161
155
  Java::kafka::utils::ZkUtils.maybeDeletePath(@zk_connect, "/consumers/#{@group_id}")
162
156
  end
163
157
 
164
- @consumer = Java::kafka::consumer::Consumer.createJavaConsumerConnector(createConsumerConfig())
158
+ @consumer = Java::kafka::consumer::Consumer.createJavaConsumerConnector(create_consumer_config)
165
159
  rescue ZkException => e
166
160
  raise KafkaError.new(e), "Got ZkException: #{e}"
167
161
  end
168
- topicCountMap = java.util.HashMap.new()
169
- thread_value = a_numThreads.to_java Java::int
170
- topicCountMap.put(@topic, thread_value)
171
- consumerMap = @consumer.createMessageStreams(topicCountMap)
172
- streams = Array.new(consumerMap[@topic])
162
+ topic_count_map = java.util.HashMap.new
163
+ thread_value = a_num_threads.to_java Java::int
164
+ topic_count_map.put(@topic, thread_value)
165
+ consumer_map = @consumer.createMessageStreams(topic_count_map)
166
+ streams = Array.new(consumer_map[@topic])
173
167
 
174
- @executor = Executors.newFixedThreadPool(a_numThreads)
168
+ @executor = Executors.newFixedThreadPool(a_num_threads)
175
169
  @executor_submit = @executor.java_method(:submit, [Java::JavaLang::Runnable.java_class])
176
170
 
177
- threadNumber = 0
178
- for stream in streams
179
- @executor_submit.call(Kafka::Consumer.new(stream, threadNumber, a_queue, @consumer_restart_on_error, @consumer_restart_sleep_ms))
180
- threadNumber += 1
171
+ thread_number = 0
172
+ streams.each do |stream|
173
+ @executor_submit.call(Kafka::Consumer.new(stream, thread_number, a_queue, @consumer_restart_on_error, @consumer_restart_sleep_ms))
174
+ thread_number += 1
181
175
  end
182
176
  @running = true
183
177
  end
184
178
 
185
- public
186
179
  def running?
187
180
  @running
188
181
  end
189
182
 
190
183
  private
191
- def createConsumerConfig()
192
- properties = java.util.Properties.new()
193
- properties.put("zookeeper.connect", @zk_connect)
194
- properties.put("group.id", @group_id)
195
- properties.put("zookeeper.connection.timeout.ms", @zk_connect_timeout)
196
- properties.put("zookeeper.session.timeout.ms", @zk_session_timeout)
197
- properties.put("zookeeper.sync.time.ms", @zk_sync_time)
198
- properties.put("auto.commit.interval.ms", @auto_commit_interval)
199
- properties.put("auto.offset.reset", @auto_offset_reset)
200
- properties.put("rebalance.max.retries", @rebalance_max_retries)
201
- properties.put("rebalance.backoff.ms", @rebalance_backoff_ms)
202
- properties.put("socket.timeout.ms", @socket_timeout_ms)
203
- properties.put("socket.receive.buffer.bytes", @socket_receive_buffer_bytes)
204
- properties.put("fetch.message.max.bytes", @fetch_message_max_bytes)
205
- properties.put("auto.commit.enable", @auto_commit_enable)
206
- properties.put("queued.max.message.chunks", @queued_max_message_chunks)
207
- properties.put("fetch.min.bytes", @fetch_min_bytes)
208
- properties.put("fetch.wait.max.ms", @fetch_wait_max_ms)
209
- properties.put("refresh.leader.backoff.ms", @refresh_leader_backoff_ms)
210
- properties.put("consumer.timeout.ms", @consumer_timeout_ms)
184
+
185
+ def validate_required_arguments(options={})
186
+ [:zk_connect, :group_id, :topic_id].each do |opt|
187
+ raise(ArgumentError, "#{opt} is required.") unless options[opt]
188
+ end
189
+ end
190
+
191
+ def create_consumer_config
192
+ properties = java.util.Properties.new
193
+ properties.put('zookeeper.connect', @zk_connect)
194
+ properties.put('group.id', @group_id)
195
+ properties.put('zookeeper.connection.timeout.ms', @zk_connect_timeout)
196
+ properties.put('zookeeper.session.timeout.ms', @zk_session_timeout)
197
+ properties.put('zookeeper.sync.time.ms', @zk_sync_time)
198
+ properties.put('auto.commit.interval.ms', @auto_commit_interval)
199
+ properties.put('auto.offset.reset', @auto_offset_reset)
200
+ properties.put('rebalance.max.retries', @rebalance_max_retries)
201
+ properties.put('rebalance.backoff.ms', @rebalance_backoff_ms)
202
+ properties.put('socket.timeout.ms', @socket_timeout_ms)
203
+ properties.put('socket.receive.buffer.bytes', @socket_receive_buffer_bytes)
204
+ properties.put('fetch.message.max.bytes', @fetch_message_max_bytes)
205
+ properties.put('auto.commit.enable', @auto_commit_enable)
206
+ properties.put('queued.max.message.chunks', @queued_max_message_chunks)
207
+ properties.put('fetch.min.bytes', @fetch_min_bytes)
208
+ properties.put('fetch.wait.max.ms', @fetch_wait_max_ms)
209
+ properties.put('refresh.leader.backoff.ms', @refresh_leader_backoff_ms)
210
+ properties.put('consumer.timeout.ms', @consumer_timeout_ms)
211
211
  unless @consumer_id.nil?
212
212
  properties.put('consumer.id', @consumer_id)
213
213
  end
214
- return Java::kafka::consumer::ConsumerConfig.new(properties)
214
+ Java::kafka::consumer::ConsumerConfig.new(properties)
215
215
  end
216
216
  end
@@ -0,0 +1,13 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <module type="RUBY_MODULE" version="4">
3
+ <component name="CompassSettings">
4
+ <option name="compassSupportEnabled" value="true" />
5
+ </component>
6
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
7
+ <exclude-output />
8
+ <content url="file://$MODULE_DIR$" />
9
+ <orderEntry type="inheritedJdk" />
10
+ <orderEntry type="sourceFolder" forTests="false" />
11
+ </component>
12
+ </module>
13
+
@@ -1,22 +1,24 @@
1
1
  # basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/0.8.0+Producer+Example
2
2
 
3
- require "java"
3
+ require 'java'
4
4
 
5
- require "jruby-kafka/namespace"
6
- require "jruby-kafka/error"
5
+ require 'jruby-kafka/namespace'
6
+ require 'jruby-kafka/error'
7
7
 
8
+ # noinspection JRubyStringImportInspection
8
9
  class Kafka::Producer
10
+ extend Gem::Deprecate
9
11
  java_import 'kafka.producer.ProducerConfig'
10
12
  java_import 'kafka.producer.KeyedMessage'
11
- KafkaProducer = Java::kafka.javaapi.producer.Producer
13
+ KAFKA_PRODUCER = Java::kafka.javaapi.producer.Producer
12
14
  java_import 'kafka.message.NoCompressionCodec'
13
15
  java_import 'kafka.message.GZIPCompressionCodec'
14
16
  java_import 'kafka.message.SnappyCompressionCodec'
15
17
 
16
18
  VALIDATIONS = {
17
- 'request.required.acks' => %w[ 0 1 -1 ],
18
- 'required.codecs' => [NoCompressionCodec.name, GZIPCompressionCodec.name, SnappyCompressionCodec.name],
19
- 'producer.type' => %w[ sync async ]
19
+ :'request.required.acks' => %w[ 0 1 -1 ],
20
+ :'required.codecs' => [NoCompressionCodec.name, GZIPCompressionCodec.name, SnappyCompressionCodec.name],
21
+ :'producer.type' => %w[ sync async ]
20
22
  }
21
23
 
22
24
  REQUIRED = %w[
@@ -42,33 +44,40 @@ class Kafka::Producer
42
44
  # options:
43
45
  # metadata_broker_list: ["localhost:9092"] - REQUIRED: a seed list of kafka brokers
44
46
  def initialize(opts = {})
45
- @options = opts.reduce({}) do |opts, (k, v)|
46
- opts[k.to_s.gsub(/_/, '.')] = v
47
- opts
47
+ @options = opts.reduce({}) do |opts_array, (k, v)|
48
+ unless v.nil?
49
+ opts_array[k.to_s.gsub(/_/, '.')] = v
50
+ end
51
+ opts_array
48
52
  end
49
53
  if options['broker.list']
50
54
  options['metadata.broker.list'] = options.delete 'broker.list'
51
55
  end
52
- if options['compressed.topics'].to_s == 'none'
53
- options.delete 'compressed.topics'
54
- end
55
56
  if options['metadata.broker.list'].is_a? Array
56
57
  options['metadata.broker.list'] = options['metadata.broker.list'].join(',')
57
58
  end
59
+ if options['compressed.topics'].is_a? Array
60
+ options['compressed.topics'] = options['compressed.topics'].join(',')
61
+ end
58
62
  validate_arguments
59
- @send_method = proc { throw StandardError.new "Producer is not connected" }
63
+ @send_method = proc { throw StandardError.new 'Producer is not connected' }
60
64
  end
61
65
 
62
66
  def connect
63
- @producer = KafkaProducer.new(createProducerConfig)
67
+ @producer = KAFKA_PRODUCER.new(create_producer_config)
64
68
  @send_method = producer.java_method :send, [KeyedMessage]
65
69
  end
66
70
 
67
71
  # throws FailedToSendMessageException or if not connected, StandardError.
68
- def sendMsg(topic, key, msg)
72
+ def send_msg(topic, key, msg)
69
73
  send_method.call(KeyedMessage.new(topic, key, msg))
70
74
  end
71
75
 
76
+ def sendMsg(topic, key, msg)
77
+ send_msg(topic, key, msg)
78
+ end
79
+ deprecate :sendMsg, :send_msg, 2015, 01
80
+
72
81
  def close
73
82
  @producer.close
74
83
  end
@@ -87,9 +96,9 @@ class Kafka::Producer
87
96
  end
88
97
  end
89
98
 
90
- def createProducerConfig
91
- properties = java.util.Properties.new()
99
+ def create_producer_config
100
+ properties = java.util.Properties.new
92
101
  options.each { |opt, value| properties.put opt, value.to_s }
93
- return ProducerConfig.new(properties)
102
+ ProducerConfig.new(properties)
94
103
  end
95
104
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: jruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.2.1
5
5
  platform: java
6
6
  authors:
7
7
  - Joseph Lawson
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-09-21 00:00:00.000000000 Z
11
+ date: 2014-09-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: jbundler
@@ -31,10 +31,12 @@ executables: []
31
31
  extensions: []
32
32
  extra_rdoc_files: []
33
33
  files:
34
+ - lib/jruby-kafka.iml
34
35
  - lib/jruby-kafka.rb
35
36
  - lib/jruby-kafka/consumer.rb
36
37
  - lib/jruby-kafka/error.rb
37
38
  - lib/jruby-kafka/group.rb
39
+ - lib/jruby-kafka/jruby-kafka.iml
38
40
  - lib/jruby-kafka/loader.rb
39
41
  - lib/jruby-kafka/namespace.rb
40
42
  - lib/jruby-kafka/producer.rb