jruby-kafka 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/lib/jruby-kafka.rb +6 -0
- data/lib/jruby-kafka/config.rb +8 -0
- data/lib/jruby-kafka/consumer.rb +27 -0
- data/lib/jruby-kafka/group.rb +98 -0
- data/lib/jruby-kafka/namespace.rb +3 -0
- metadata +50 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 432b700c2a60813577f72680a4d7921959a07ebd
|
4
|
+
data.tar.gz: d99addc2c02cba26700a0c9b7172aeb559aaf082
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 7d460eab892d5f04cbe2a9a61b751a54bc0af22b78b886e02fbd9d0bf2890c4264d683e8dca95112bd2d091816c681a3ba368e52815ffe063df9009882030292
|
7
|
+
data.tar.gz: b32eeb94554fe4d39379c2a283aa995914bbd1ea858aa0b1781477b8c9f6ca31c7af4ecd93cdc45360f01e13c4627cc5e304cb861a334c47e48dab2889d71e1d
|
data/lib/jruby-kafka.rb
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
require "java"
|
2
|
+
require "jruby-kafka/namespace"
|
3
|
+
|
4
|
+
java_import 'kafka.consumer.ConsumerIterator'
|
5
|
+
java_import 'kafka.consumer.KafkaStream'
|
6
|
+
|
7
|
+
class Kafka::Consumer
|
8
|
+
include Java::JavaLang::Runnable
|
9
|
+
java_signature 'void run()'
|
10
|
+
|
11
|
+
@m_stream
|
12
|
+
@m_threadNumber
|
13
|
+
@m_queue
|
14
|
+
|
15
|
+
def initialize(a_stream, a_threadNumber, a_queue)
|
16
|
+
@m_threadNumber = a_threadNumber
|
17
|
+
@m_stream = a_stream
|
18
|
+
@m_queue = a_queue
|
19
|
+
end
|
20
|
+
|
21
|
+
def run
|
22
|
+
it = @m_stream.iterator()
|
23
|
+
while it.hasNext()
|
24
|
+
@m_queue << it.next().message()
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,98 @@
|
|
1
|
+
# basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/Consumer+Group+Example
|
2
|
+
|
3
|
+
require "java"
|
4
|
+
|
5
|
+
require "jruby-kafka/namespace"
|
6
|
+
require "jruby-kafka/consumer"
|
7
|
+
|
8
|
+
java_import 'java.util.concurrent.ExecutorService'
|
9
|
+
java_import 'java.util.concurrent.Executors'
|
10
|
+
|
11
|
+
class Kafka::Group
|
12
|
+
@consumer
|
13
|
+
@executor
|
14
|
+
@topic
|
15
|
+
@auto_offset_reset
|
16
|
+
@zk_connect
|
17
|
+
@group_id
|
18
|
+
|
19
|
+
# Create a Kafka client group
|
20
|
+
#
|
21
|
+
# options:
|
22
|
+
# :zk_connect_opt => "localhost:2181" - REQUIRED: The connection string for the
|
23
|
+
# zookeeper connection in the form host:port. Multiple URLS can be given to allow fail-over.
|
24
|
+
# :group_id_opt => "group" - REQUIRED: The group id to consume on.
|
25
|
+
# :topic_id_opt => "topic" - REQUIRED: The topic id to consume on.
|
26
|
+
# :reset_beginning_opt => "from-beginning" - (optional) If the consumer does not already have an established offset
|
27
|
+
# to consume from, start with the earliest message present in the log rather than the latest message.
|
28
|
+
def initialize(options={})
|
29
|
+
validate_required_arguments(options)
|
30
|
+
|
31
|
+
@zk_connect = options[:zk_connect_opt]
|
32
|
+
@group_id = options[:group_id_opt]
|
33
|
+
@topic = options[:topic_id_opt]
|
34
|
+
|
35
|
+
|
36
|
+
if options[:reset_beginning_opt]
|
37
|
+
if options[:reset_beginning_opt] == 'from-beginning'
|
38
|
+
@auto_offset_reset = 'smallest'
|
39
|
+
else
|
40
|
+
@auto_offset_reset = 'largest'
|
41
|
+
end
|
42
|
+
else
|
43
|
+
@auto_offset_reset = 'largest'
|
44
|
+
end
|
45
|
+
|
46
|
+
if @auto_offset_reset == 'smallest'
|
47
|
+
Java::kafka::utils::ZkUtils.maybeDeletePath(@zk_connect, "/consumers/#{@group_id}")
|
48
|
+
end
|
49
|
+
|
50
|
+
@consumer = Java::kafka::consumer::Consumer.createJavaConsumerConnector(createConsumerConfig())
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
def validate_required_arguments(options={})
|
55
|
+
[:zk_connect_opt, :group_id_opt, :topic_id_opt].each do |opt|
|
56
|
+
raise(ArgumentError, "#{opt} is required.") unless options[opt]
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
public
|
61
|
+
def shutdown()
|
62
|
+
if @consumer
|
63
|
+
@consumer.shutdown()
|
64
|
+
end
|
65
|
+
if @executor
|
66
|
+
@executor.shutdown()
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
public
|
71
|
+
def run(a_numThreads, a_queue)
|
72
|
+
topicCountMap = java.util.HashMap.new()
|
73
|
+
thread_value = a_numThreads.to_java Java::int
|
74
|
+
topicCountMap.put(@topic, thread_value)
|
75
|
+
consumerMap = @consumer.createMessageStreams(topicCountMap)
|
76
|
+
streams = Array.new(consumerMap[@topic])
|
77
|
+
|
78
|
+
@executor = Executors.newFixedThreadPool(a_numThreads)
|
79
|
+
|
80
|
+
threadNumber = 0
|
81
|
+
for stream in streams
|
82
|
+
@executor.submit(Kafka::Consumer.new(stream, threadNumber, a_queue))
|
83
|
+
threadNumber += 1
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
private
|
88
|
+
def createConsumerConfig()
|
89
|
+
properties = java.util.Properties.new()
|
90
|
+
properties.put("zookeeper.connect", @zk_connect)
|
91
|
+
properties.put("group.id", @group_id)
|
92
|
+
properties.put("zookeeper.session.timeout.ms", "400")
|
93
|
+
properties.put("zookeeper.sync.time.ms", "200")
|
94
|
+
properties.put("auto.commit.interval.ms", "1000")
|
95
|
+
properties.put("auto.offset.reset", @auto_offset_reset)
|
96
|
+
return Java::kafka::consumer::ConsumerConfig.new(properties)
|
97
|
+
end
|
98
|
+
end
|
metadata
ADDED
@@ -0,0 +1,50 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: jruby-kafka
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Joseph Lawson
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2013-10-02 00:00:00.000000000 Z
|
12
|
+
dependencies: []
|
13
|
+
description: this is primarily to be used as an interface for logstash
|
14
|
+
email:
|
15
|
+
- joe@joekiller.com
|
16
|
+
executables: []
|
17
|
+
extensions: []
|
18
|
+
extra_rdoc_files: []
|
19
|
+
files:
|
20
|
+
- lib/jruby-kafka.rb
|
21
|
+
- lib/jruby-kafka/config.rb
|
22
|
+
- lib/jruby-kafka/consumer.rb
|
23
|
+
- lib/jruby-kafka/group.rb
|
24
|
+
- lib/jruby-kafka/namespace.rb
|
25
|
+
homepage: https://github.com/joekiller/jruby-kafka
|
26
|
+
licenses:
|
27
|
+
- Apache 2.0
|
28
|
+
metadata: {}
|
29
|
+
post_install_message:
|
30
|
+
rdoc_options: []
|
31
|
+
require_paths:
|
32
|
+
- lib
|
33
|
+
- lib
|
34
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
35
|
+
requirements:
|
36
|
+
- - '>='
|
37
|
+
- !ruby/object:Gem::Version
|
38
|
+
version: '0'
|
39
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
40
|
+
requirements:
|
41
|
+
- - '>='
|
42
|
+
- !ruby/object:Gem::Version
|
43
|
+
version: '0'
|
44
|
+
requirements: []
|
45
|
+
rubyforge_project:
|
46
|
+
rubygems_version: 2.0.3
|
47
|
+
signing_key:
|
48
|
+
specification_version: 4
|
49
|
+
summary: jruby Kafka wrapper
|
50
|
+
test_files: []
|