jruby-kafka 1.7.2-java → 2.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (30) hide show
  1. checksums.yaml +5 -13
  2. data/lib/com/101tec/zkclient/maven-metadata-local.xml +12 -0
  3. data/lib/com/yammer/metrics/metrics-core/maven-metadata-local.xml +12 -0
  4. data/lib/io/netty/netty/maven-metadata-local.xml +12 -0
  5. data/lib/jline/jline/maven-metadata-local.xml +12 -0
  6. data/lib/jruby-kafka/consumer.rb +107 -33
  7. data/lib/jruby-kafka/error.rb +1 -1
  8. data/lib/jruby-kafka/kafka-producer.rb +23 -5
  9. data/lib/jruby-kafka/namespace.rb +3 -0
  10. data/lib/jruby-kafka/producer.rb +2 -3
  11. data/lib/jruby-kafka.rb +3 -2
  12. data/lib/jruby-kafka_jars.rb +16 -36
  13. data/lib/junit/junit/maven-metadata-local.xml +12 -0
  14. data/lib/log4j/log4j/maven-metadata-local.xml +12 -0
  15. data/lib/net/jpountz/lz4/lz4/maven-metadata-local.xml +12 -0
  16. data/lib/net/sf/jopt-simple/jopt-simple/maven-metadata-local.xml +12 -0
  17. data/lib/org/apache/kafka/kafka-clients/maven-metadata-local.xml +12 -0
  18. data/lib/org/apache/kafka/kafka_2.10/maven-metadata-local.xml +12 -0
  19. data/lib/org/apache/zookeeper/zookeeper/maven-metadata-local.xml +12 -0
  20. data/lib/org/scala-lang/scala-library/maven-metadata-local.xml +12 -0
  21. data/lib/org/slf4j/slf4j-api/maven-metadata-local.xml +12 -0
  22. data/lib/org/slf4j/slf4j-log4j12/maven-metadata-local.xml +12 -0
  23. data/lib/org/xerial/snappy/snappy-java/maven-metadata-local.xml +12 -0
  24. data/lib/rubygems/jar-dependencies/maven-metadata-local.xml +12 -0
  25. data/lib/rubygems/rake/maven-metadata-local.xml +12 -0
  26. data/lib/rubygems/ruby-maven/maven-metadata-local.xml +12 -0
  27. data/lib/rubygems/ruby-maven-libs/maven-metadata-local.xml +12 -0
  28. metadata +47 -43
  29. data/lib/jruby-kafka/group.rb +0 -257
  30. data/lib/jruby-kafka/version.rb +0 -4
checksums.yaml CHANGED
@@ -1,15 +1,7 @@
1
1
  ---
2
- !binary "U0hBMQ==":
3
- metadata.gz: !binary |-
4
- YTEyMzAyMjc4MTQ0YjIyMDI5MTNlN2EzNzA1Y2VkODA1OWEzNGMwNw==
5
- data.tar.gz: !binary |-
6
- MjE4OTEyYmU1MDQ4MmYzODg5NTcwMzM4YzMyN2U3M2QxYTQ5OWZiNw==
2
+ SHA1:
3
+ metadata.gz: ac687c16b318e83142d9b294a3767a91d1b948c8
4
+ data.tar.gz: 0cf05c7b5dfde8ba46d7bf367fd1a53855614a21
7
5
  SHA512:
8
- metadata.gz: !binary |-
9
- MTNiYzQ0N2Q3NDBhM2EyZmEyZDI3MjMwMTAwNzVmYWEwZmQ4OTlmMGY3Zjky
10
- Nzc2ZjYyMTNlMWY1ZjBhZTY3NjRlMGRhMGUwMTQ3MDExMjczZWFiY2I4MWE1
11
- MjhjYTVjOWFjZWIzYTNhZGE4ODNjNDg1OGY4NjAwMzcwM2U2YmQ=
12
- data.tar.gz: !binary |-
13
- OWM2MGM5NGQ0ZDljNzRlMjExMWI2ODgwYzIyM2FhYmNlMDMwNDVkNWExOTQ4
14
- MGRhODQ2M2ViMDllNzJlZTljNjcyMjgzYzliZWEyNzRmZTZmZTlkZTA4Mjk2
15
- MDQwNzY5ZDA0Njc0MGMyODUzZDc2ZmNjNTY3ODgwODVjMGY5NjU=
6
+ metadata.gz: d9035f2bfcd3a193eb26fd1a83de291b4dda0934e7e50d15cb50f96d3da43147f6acb8e9b2c81483dd8e73b643d2178b9dcdaeed90a0b58bf0ffbf562dd7a1ea
7
+ data.tar.gz: 315a7c39f1b98d4c3c307bc69c892148136d1d320f5bb50a3ac6b3fe2ebb69a288d53649679b1858e256036af19518684d1153c508d7974db93dca7a56961760
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>com.101tec</groupId>
4
+ <artifactId>zkclient</artifactId>
5
+ <versioning>
6
+ <release>0.3</release>
7
+ <versions>
8
+ <version>0.3</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>com.yammer.metrics</groupId>
4
+ <artifactId>metrics-core</artifactId>
5
+ <versioning>
6
+ <release>2.2.0</release>
7
+ <versions>
8
+ <version>2.2.0</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>io.netty</groupId>
4
+ <artifactId>netty</artifactId>
5
+ <versioning>
6
+ <release>3.7.0.Final</release>
7
+ <versions>
8
+ <version>3.7.0.Final</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>jline</groupId>
4
+ <artifactId>jline</artifactId>
5
+ <versioning>
6
+ <release>0.9.94</release>
7
+ <versions>
8
+ <version>0.9.94</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -1,41 +1,115 @@
1
1
  require 'java'
2
- require 'jruby-kafka/version'
2
+ require 'jruby-kafka/namespace'
3
3
 
4
- # noinspection JRubyStringImportInspection
5
4
  class Kafka::Consumer
6
- java_import 'kafka.consumer.ConsumerIterator'
7
- java_import 'kafka.consumer.KafkaStream'
8
- java_import 'kafka.common.ConsumerRebalanceFailedException'
9
- java_import 'kafka.consumer.ConsumerTimeoutException'
10
-
11
- include Java::JavaLang::Runnable
12
- java_signature 'void run()'
13
-
14
- def initialize(a_stream, a_thread_number, a_queue, restart_on_exception, a_sleep_ms)
15
- @m_thread_number = a_thread_number
16
- @m_stream = a_stream
17
- @m_queue = a_queue
18
- @m_restart_on_exception = restart_on_exception
19
- @m_sleep_ms = 1.0 / 1000.0 * Float(a_sleep_ms)
5
+ # Create a Kafka high-level consumer.
6
+ #
7
+ # @param [Hash] config the consumer configuration.
8
+ #
9
+ # @option config [String] :zookeeper_connect The connection string for the zookeeper connection in
10
+ # the form "host:port[path][,host:port[path]*". Required.
11
+ # @option config [String] :group_id The consumer group this instance is a member of. Required.
12
+ # @option config [String] :topic The topic to consume from.
13
+ # @option config [String] :include_topics The inclusive (white) regular expression filter matching
14
+ # topics to consume from.
15
+ # @option config [String] :exclude_topics The exclusive (black) regular expression filter matching
16
+ # topics not to consume from.
17
+ # @option config [Integer] :num_streams (1) The number of message streams to create.
18
+ # @option config [String] :key_decoder ('kafka.serializer.DefaultDecoder') Java class name for
19
+ # message key decoder.
20
+ # @option config [String] :msg_decoder ('kafka.serializer.DefaultDecoder') Java class name for
21
+ # message value decoder.
22
+ #
23
+ # One and only one of :topic, :include_topics, or :exclude_topics must be provided.
24
+ #
25
+ # For other configuration properties and their default values see
26
+ # https://kafka.apache.org/08/configuration.html#consumerconfigs and
27
+ # https://github.com/apache/kafka/blob/0.8.2.2/core/src/main/scala/kafka/consumer/ConsumerConfig.scala#L90-L182.
28
+ #
29
+ def initialize(config={})
30
+ validate_arguments config
31
+
32
+ @properties = config.clone
33
+ @topic = @properties.delete :topic
34
+ @include_topics = @properties.delete :include_topics
35
+ @exclude_topics = @properties.delete :exclude_topics
36
+ @num_streams = (@properties.delete(:num_streams) || 1).to_java Java::int
37
+ @key_decoder = @properties.delete(:key_decoder) || 'kafka.serializer.DefaultDecoder'
38
+ @msg_decoder = @properties.delete(:msg_decoder) || 'kafka.serializer.DefaultDecoder'
39
+
40
+ @consumer = Java::KafkaConsumer::Consumer.createJavaConsumerConnector create_config
20
41
  end
21
42
 
22
- def run
23
- it = @m_stream.iterator
24
- begin
25
- while it.hasNext
26
- begin
27
- @m_queue << it.next
28
- end
29
- end
30
- rescue Exception => e
31
- puts("#{self.class.name} caught exception: #{e.class.name}")
32
- puts(e.message) if e.message != ''
33
- if @m_restart_on_exception
34
- sleep(@m_sleep_ms)
35
- retry
36
- else
37
- raise e
38
- end
43
+ # Start fetching messages.
44
+ #
45
+ # @return [Array<Java::KafkaConsumer::KafkaStream>] list of stream, as specified by the
46
+ # :num_stream configuration parameter. A stream is essentially a queue of incomnig messages
47
+ # from Kafka topic partitions.
48
+ #
49
+ # @see http://apache.osuosl.org/kafka/0.8.2.2/scaladoc/index.html#kafka.consumer.KafkaStream
50
+ #
51
+ # @note KafkaStreams instances are not thread-safe.
52
+ def message_streams
53
+ key_decoder_i = Java::JavaClass.for_name(@key_decoder).
54
+ constructor('kafka.utils.VerifiableProperties').new_instance nil
55
+ msg_decoder_i = Java::JavaClass.for_name(@msg_decoder).
56
+ constructor('kafka.utils.VerifiableProperties').new_instance nil
57
+
58
+ if @topic
59
+ topic_count_map = java.util.HashMap.new @topic => @num_streams
60
+ @consumer.
61
+ createMessageStreams(topic_count_map, key_decoder_i, msg_decoder_i)[@topic].
62
+ to_a
63
+
64
+ else
65
+ filter = @include_topics ?
66
+ Java::KafkaConsumer::Whitelist.new(@include_topics) :
67
+ Java::KafkaConsumer::Blacklist.new(@exclude_topics)
68
+
69
+ @consumer.
70
+ createMessageStreamsByFilter(filter, @num_streams, key_decoder_i, msg_decoder_i).
71
+ to_a
72
+
39
73
  end
40
74
  end
75
+
76
+ # Commit the offsets of all topic partitions connected by this consumer.
77
+ #
78
+ # Useful for when the :auto_commit_enable configuration parameter is false.
79
+ #
80
+ # @return void
81
+ def commitOffsets
82
+ @consumer.commitOffsets
83
+ end
84
+
85
+ # Shutdown the consumer.
86
+ #
87
+ # @return void
88
+ def shutdown
89
+ @consumer.shutdown if @consumer
90
+ nil
91
+ end
92
+
93
+ private
94
+
95
+ def validate_arguments(options)
96
+ [:zookeeper_connect, :group_id].each do |opt|
97
+ raise ArgumentError, "Parameter :#{opt} is required." unless options[opt]
98
+ end
99
+
100
+ unless [ options[:topic], options[:include_topics], options[:exclude_topics] ].one?
101
+ raise ArgumentError, "Exactly one of :topic, :include_topics, :exclude_topics is required."
102
+ end
103
+ end
104
+
105
+ def create_config
106
+ properties = java.util.Properties.new
107
+ @properties.each do |k,v|
108
+ k = k.to_s.gsub '_', '.'
109
+ v = v.to_s
110
+ properties.setProperty k, v
111
+ end
112
+ Java::KafkaConsumer::ConsumerConfig.new properties
113
+ end
41
114
  end
115
+
@@ -1,4 +1,4 @@
1
- require 'jruby-kafka/version'
1
+ require 'jruby-kafka/namespace'
2
2
 
3
3
  class KafkaError < StandardError
4
4
  attr_reader :object
@@ -1,9 +1,10 @@
1
- require 'jruby-kafka/version'
1
+ require 'jruby-kafka/namespace'
2
2
  require 'jruby-kafka/error'
3
3
 
4
4
  # noinspection JRubyStringImportInspection
5
5
  class Kafka::KafkaProducer
6
6
  java_import 'org.apache.kafka.clients.producer.ProducerRecord'
7
+ java_import 'org.apache.kafka.clients.producer.Callback'
7
8
  KAFKA_PRODUCER = Java::org.apache.kafka.clients.producer.KafkaProducer
8
9
 
9
10
  VALIDATIONS = {
@@ -27,7 +28,19 @@ class Kafka::KafkaProducer
27
28
  send.buffer.bytes timeout.ms value.serializer
28
29
  ]
29
30
 
30
- attr_reader :producer, :send_method, :options
31
+ class RubyCallback
32
+ include Callback
33
+
34
+ def initialize(cb)
35
+ @cb = cb
36
+ end
37
+
38
+ def onCompletion(metadata, exception)
39
+ @cb.call(metadata, exception)
40
+ end
41
+ end
42
+
43
+ attr_reader :producer, :send_method, :send_cb_method, :options
31
44
 
32
45
  def initialize(opts = {})
33
46
  @options = opts.reduce({}) do |opts_array, (k, v)|
@@ -37,17 +50,22 @@ class Kafka::KafkaProducer
37
50
  opts_array
38
51
  end
39
52
  validate_arguments
40
- @send_method = proc { throw StandardError.new 'Producer is not connected' }
53
+ @send_method = @send_cb_method = proc { throw StandardError.new 'Producer is not connected' }
41
54
  end
42
55
 
43
56
  def connect
44
57
  @producer = KAFKA_PRODUCER.new(create_producer_config)
45
58
  @send_method = producer.java_method :send, [ProducerRecord]
59
+ @send_cb_method = producer.java_method :send, [ProducerRecord, Callback]
46
60
  end
47
61
 
48
62
  # throws FailedToSendMessageException or if not connected, StandardError.
49
- def send_msg(topic, partition, key, value)
50
- send_method.call(ProducerRecord.new(topic, partition, key, value))
63
+ def send_msg(topic, partition, key, value, &block)
64
+ if block
65
+ send_cb_method.call(ProducerRecord.new(topic, partition, key, value), RubyCallback.new(block))
66
+ else
67
+ send_method.call(ProducerRecord.new(topic, partition, key, value))
68
+ end
51
69
  end
52
70
 
53
71
  def close
@@ -0,0 +1,3 @@
1
+ module Kafka
2
+
3
+ end
@@ -1,5 +1,5 @@
1
1
  # basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/0.8.0+Producer+Example
2
- require 'jruby-kafka/version'
2
+ require 'jruby-kafka/namespace'
3
3
  require 'jruby-kafka/error'
4
4
 
5
5
  # noinspection JRubyStringImportInspection
@@ -11,11 +11,10 @@ class Kafka::Producer
11
11
  java_import 'kafka.message.NoCompressionCodec'
12
12
  java_import 'kafka.message.GZIPCompressionCodec'
13
13
  java_import 'kafka.message.SnappyCompressionCodec'
14
- java_import 'kafka.message.LZ4CompressionCodec'
15
14
 
16
15
  VALIDATIONS = {
17
16
  :'request.required.acks' => %w[ 0 1 -1 ],
18
- :'required.codecs' => [NoCompressionCodec.name, GZIPCompressionCodec.name, SnappyCompressionCodec.name, LZ4CompressionCodec.name],
17
+ :'required.codecs' => [NoCompressionCodec.name, GZIPCompressionCodec.name, SnappyCompressionCodec.name],
19
18
  :'producer.type' => %w[ sync async ]
20
19
  }
21
20
 
data/lib/jruby-kafka.rb CHANGED
@@ -1,6 +1,7 @@
1
- require 'jruby-kafka/version'
2
1
  require 'jruby-kafka_jars.rb'
3
2
  require 'jruby-kafka/consumer'
4
- require 'jruby-kafka/group'
5
3
  require 'jruby-kafka/producer'
6
4
  require 'jruby-kafka/kafka-producer'
5
+
6
+ module Kafka
7
+ end
@@ -1,38 +1,18 @@
1
1
  # this is a generated file, to avoid over-writing it just delete this comment
2
- begin
3
- require 'jar_dependencies'
4
- rescue LoadError
5
- require 'log4j/log4j/1.2.17/log4j-1.2.17.jar'
6
- require 'org/apache/kafka/kafka_2.10/0.8.2.2/kafka_2.10-0.8.2.2.jar'
7
- require 'org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar'
8
- require 'org/slf4j/slf4j-log4j12/1.7.13/slf4j-log4j12-1.7.13.jar'
9
- require 'org/slf4j/slf4j-api/1.7.13/slf4j-api-1.7.13.jar'
10
- require 'net/sf/jopt-simple/jopt-simple/3.2/jopt-simple-3.2.jar'
11
- require 'io/netty/netty/3.7.0.Final/netty-3.7.0.Final.jar'
12
- require 'net/jpountz/lz4/lz4/1.2.0/lz4-1.2.0.jar'
13
- require 'junit/junit/3.8.1/junit-3.8.1.jar'
14
- require 'org/apache/kafka/kafka-clients/0.8.2.2/kafka-clients-0.8.2.2.jar'
15
- require 'com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar'
16
- require 'jline/jline/0.9.94/jline-0.9.94.jar'
17
- require 'org/xerial/snappy/snappy-java/1.1.1.7/snappy-java-1.1.1.7.jar'
18
- require 'org/scala-lang/scala-library/2.10.4/scala-library-2.10.4.jar'
19
- require 'com/101tec/zkclient/0.3/zkclient-0.3.jar'
20
- end
2
+ require 'jar_dependencies'
21
3
 
22
- if defined? Jars
23
- require_jar( 'log4j', 'log4j', '1.2.17' )
24
- require_jar( 'org.apache.kafka', 'kafka_2.10', '0.8.2.2' )
25
- require_jar( 'org.apache.zookeeper', 'zookeeper', '3.4.6' )
26
- require_jar( 'org.slf4j', 'slf4j-log4j12', '1.7.13' )
27
- require_jar( 'org.slf4j', 'slf4j-api', '1.7.13' )
28
- require_jar( 'net.sf.jopt-simple', 'jopt-simple', '3.2' )
29
- require_jar( 'io.netty', 'netty', '3.7.0.Final' )
30
- require_jar( 'net.jpountz.lz4', 'lz4', '1.2.0' )
31
- require_jar( 'junit', 'junit', '3.8.1' )
32
- require_jar( 'org.apache.kafka', 'kafka-clients', '0.8.2.2' )
33
- require_jar( 'com.yammer.metrics', 'metrics-core', '2.2.0' )
34
- require_jar( 'jline', 'jline', '0.9.94' )
35
- require_jar( 'org.xerial.snappy', 'snappy-java', '1.1.1.7' )
36
- require_jar( 'org.scala-lang', 'scala-library', '2.10.4' )
37
- require_jar( 'com.101tec', 'zkclient', '0.3' )
38
- end
4
+ require_jar( 'io.netty', 'netty', '3.7.0.Final' )
5
+ require_jar( 'log4j', 'log4j', '1.2.17' )
6
+ require_jar( 'org.xerial.snappy', 'snappy-java', '1.1.1.7' )
7
+ require_jar( 'jline', 'jline', '0.9.94' )
8
+ require_jar( 'net.sf.jopt-simple', 'jopt-simple', '3.2' )
9
+ require_jar( 'org.slf4j', 'slf4j-api', '1.7.13' )
10
+ require_jar( 'org.apache.kafka', 'kafka-clients', '0.8.2.2' )
11
+ require_jar( 'junit', 'junit', '3.8.1' )
12
+ require_jar( 'com.101tec', 'zkclient', '0.3' )
13
+ require_jar( 'com.yammer.metrics', 'metrics-core', '2.2.0' )
14
+ require_jar( 'org.apache.zookeeper', 'zookeeper', '3.4.6' )
15
+ require_jar( 'org.apache.kafka', 'kafka_2.10', '0.8.2.2' )
16
+ require_jar( 'org.slf4j', 'slf4j-log4j12', '1.7.13' )
17
+ require_jar( 'org.scala-lang', 'scala-library', '2.10.4' )
18
+ require_jar( 'net.jpountz.lz4', 'lz4', '1.2.0' )
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>junit</groupId>
4
+ <artifactId>junit</artifactId>
5
+ <versioning>
6
+ <release>3.8.1</release>
7
+ <versions>
8
+ <version>3.8.1</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>log4j</groupId>
4
+ <artifactId>log4j</artifactId>
5
+ <versioning>
6
+ <release>1.2.17</release>
7
+ <versions>
8
+ <version>1.2.17</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>net.jpountz.lz4</groupId>
4
+ <artifactId>lz4</artifactId>
5
+ <versioning>
6
+ <release>1.2.0</release>
7
+ <versions>
8
+ <version>1.2.0</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>net.sf.jopt-simple</groupId>
4
+ <artifactId>jopt-simple</artifactId>
5
+ <versioning>
6
+ <release>3.2</release>
7
+ <versions>
8
+ <version>3.2</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>org.apache.kafka</groupId>
4
+ <artifactId>kafka-clients</artifactId>
5
+ <versioning>
6
+ <release>0.8.2.2</release>
7
+ <versions>
8
+ <version>0.8.2.2</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>org.apache.kafka</groupId>
4
+ <artifactId>kafka_2.10</artifactId>
5
+ <versioning>
6
+ <release>0.8.2.2</release>
7
+ <versions>
8
+ <version>0.8.2.2</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>org.apache.zookeeper</groupId>
4
+ <artifactId>zookeeper</artifactId>
5
+ <versioning>
6
+ <release>3.4.6</release>
7
+ <versions>
8
+ <version>3.4.6</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>org.scala-lang</groupId>
4
+ <artifactId>scala-library</artifactId>
5
+ <versioning>
6
+ <release>2.10.4</release>
7
+ <versions>
8
+ <version>2.10.4</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>org.slf4j</groupId>
4
+ <artifactId>slf4j-api</artifactId>
5
+ <versioning>
6
+ <release>1.7.13</release>
7
+ <versions>
8
+ <version>1.7.13</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>org.slf4j</groupId>
4
+ <artifactId>slf4j-log4j12</artifactId>
5
+ <versioning>
6
+ <release>1.7.13</release>
7
+ <versions>
8
+ <version>1.7.13</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>org.xerial.snappy</groupId>
4
+ <artifactId>snappy-java</artifactId>
5
+ <versioning>
6
+ <release>1.1.1.7</release>
7
+ <versions>
8
+ <version>1.1.1.7</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>rubygems</groupId>
4
+ <artifactId>jar-dependencies</artifactId>
5
+ <versioning>
6
+ <release>0.3.1</release>
7
+ <versions>
8
+ <version>0.3.1</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>rubygems</groupId>
4
+ <artifactId>rake</artifactId>
5
+ <versioning>
6
+ <release>10.4.2</release>
7
+ <versions>
8
+ <version>10.4.2</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>rubygems</groupId>
4
+ <artifactId>ruby-maven</artifactId>
5
+ <versioning>
6
+ <release>3.3.8</release>
7
+ <versions>
8
+ <version>3.3.8</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
@@ -0,0 +1,12 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <metadata>
3
+ <groupId>rubygems</groupId>
4
+ <artifactId>ruby-maven-libs</artifactId>
5
+ <versioning>
6
+ <release>3.3.3</release>
7
+ <versions>
8
+ <version>3.3.3</version>
9
+ </versions>
10
+ <lastUpdated>20151231160701</lastUpdated>
11
+ </versioning>
12
+ </metadata>
metadata CHANGED
@@ -1,72 +1,58 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: jruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.7.2
4
+ version: '2.0'
5
5
  platform: java
6
6
  authors:
7
7
  - Joseph Lawson
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-08-12 00:00:00.000000000 Z
11
+ date: 2015-12-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: jar-dependencies
15
14
  requirement: !ruby/object:Gem::Requirement
16
15
  requirements:
17
- - - ~>
16
+ - - "~>"
18
17
  - !ruby/object:Gem::Version
19
- version: 0.3.2
20
- type: :development
18
+ version: '0'
19
+ name: jar-dependencies
21
20
  prerelease: false
21
+ type: :runtime
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - ~>
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: 0.3.2
26
+ version: '0'
27
27
  - !ruby/object:Gem::Dependency
28
- name: rake
29
28
  requirement: !ruby/object:Gem::Requirement
30
29
  requirements:
31
- - - ~>
30
+ - - "~>"
32
31
  - !ruby/object:Gem::Version
33
- version: '10.5'
34
- type: :development
32
+ version: '3.3'
33
+ name: ruby-maven
35
34
  prerelease: false
35
+ type: :runtime
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
- - - ~>
38
+ - - "~>"
39
39
  - !ruby/object:Gem::Version
40
- version: '10.5'
40
+ version: '3.3'
41
41
  - !ruby/object:Gem::Dependency
42
- name: rspec
43
42
  requirement: !ruby/object:Gem::Requirement
44
43
  requirements:
45
- - - ~>
44
+ - - "~>"
46
45
  - !ruby/object:Gem::Version
47
- version: '3.4'
48
- type: :development
46
+ version: '10.4'
47
+ name: rake
49
48
  prerelease: false
50
- version_requirements: !ruby/object:Gem::Requirement
51
- requirements:
52
- - - ~>
53
- - !ruby/object:Gem::Version
54
- version: '3.4'
55
- - !ruby/object:Gem::Dependency
56
- name: ruby-maven
57
- requirement: !ruby/object:Gem::Requirement
58
- requirements:
59
- - - ~>
60
- - !ruby/object:Gem::Version
61
- version: '3.3'
62
49
  type: :development
63
- prerelease: false
64
50
  version_requirements: !ruby/object:Gem::Requirement
65
51
  requirements:
66
- - - ~>
52
+ - - "~>"
67
53
  - !ruby/object:Gem::Version
68
- version: '3.3'
69
- description: JRuby wrapper for Kafka
54
+ version: '10.4'
55
+ description: this is primarily to be used as an interface for logstash
70
56
  email:
71
57
  - joe@joekiller.com
72
58
  executables: []
@@ -74,52 +60,70 @@ extensions: []
74
60
  extra_rdoc_files: []
75
61
  files:
76
62
  - lib/com/101tec/zkclient/0.3/zkclient-0.3.jar
63
+ - lib/com/101tec/zkclient/maven-metadata-local.xml
77
64
  - lib/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar
65
+ - lib/com/yammer/metrics/metrics-core/maven-metadata-local.xml
78
66
  - lib/io/netty/netty/3.7.0.Final/netty-3.7.0.Final.jar
67
+ - lib/io/netty/netty/maven-metadata-local.xml
79
68
  - lib/jline/jline/0.9.94/jline-0.9.94.jar
69
+ - lib/jline/jline/maven-metadata-local.xml
80
70
  - lib/jruby-kafka.rb
81
71
  - lib/jruby-kafka/consumer.rb
82
72
  - lib/jruby-kafka/error.rb
83
- - lib/jruby-kafka/group.rb
84
73
  - lib/jruby-kafka/kafka-producer.rb
74
+ - lib/jruby-kafka/namespace.rb
85
75
  - lib/jruby-kafka/producer.rb
86
- - lib/jruby-kafka/version.rb
87
76
  - lib/jruby-kafka_jars.rb
88
77
  - lib/junit/junit/3.8.1/junit-3.8.1.jar
78
+ - lib/junit/junit/maven-metadata-local.xml
89
79
  - lib/log4j/log4j/1.2.17/log4j-1.2.17.jar
80
+ - lib/log4j/log4j/maven-metadata-local.xml
90
81
  - lib/net/jpountz/lz4/lz4/1.2.0/lz4-1.2.0.jar
82
+ - lib/net/jpountz/lz4/lz4/maven-metadata-local.xml
91
83
  - lib/net/sf/jopt-simple/jopt-simple/3.2/jopt-simple-3.2.jar
84
+ - lib/net/sf/jopt-simple/jopt-simple/maven-metadata-local.xml
92
85
  - lib/org/apache/kafka/kafka-clients/0.8.2.2/kafka-clients-0.8.2.2.jar
86
+ - lib/org/apache/kafka/kafka-clients/maven-metadata-local.xml
93
87
  - lib/org/apache/kafka/kafka_2.10/0.8.2.2/kafka_2.10-0.8.2.2.jar
88
+ - lib/org/apache/kafka/kafka_2.10/maven-metadata-local.xml
94
89
  - lib/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar
90
+ - lib/org/apache/zookeeper/zookeeper/maven-metadata-local.xml
95
91
  - lib/org/scala-lang/scala-library/2.10.4/scala-library-2.10.4.jar
92
+ - lib/org/scala-lang/scala-library/maven-metadata-local.xml
96
93
  - lib/org/slf4j/slf4j-api/1.7.13/slf4j-api-1.7.13.jar
94
+ - lib/org/slf4j/slf4j-api/maven-metadata-local.xml
97
95
  - lib/org/slf4j/slf4j-log4j12/1.7.13/slf4j-log4j12-1.7.13.jar
96
+ - lib/org/slf4j/slf4j-log4j12/maven-metadata-local.xml
98
97
  - lib/org/xerial/snappy/snappy-java/1.1.1.7/snappy-java-1.1.1.7.jar
98
+ - lib/org/xerial/snappy/snappy-java/maven-metadata-local.xml
99
+ - lib/rubygems/jar-dependencies/maven-metadata-local.xml
100
+ - lib/rubygems/rake/maven-metadata-local.xml
101
+ - lib/rubygems/ruby-maven-libs/maven-metadata-local.xml
102
+ - lib/rubygems/ruby-maven/maven-metadata-local.xml
99
103
  homepage: https://github.com/joekiller/jruby-kafka
100
104
  licenses:
101
105
  - Apache 2.0
102
106
  metadata: {}
103
- post_install_message:
107
+ post_install_message:
104
108
  rdoc_options: []
105
109
  require_paths:
106
110
  - lib
107
111
  required_ruby_version: !ruby/object:Gem::Requirement
108
112
  requirements:
109
- - - ! '>='
113
+ - - ">="
110
114
  - !ruby/object:Gem::Version
111
115
  version: '0'
112
116
  required_rubygems_version: !ruby/object:Gem::Requirement
113
117
  requirements:
114
- - - ! '>='
118
+ - - ">="
115
119
  - !ruby/object:Gem::Version
116
120
  version: '0'
117
121
  requirements:
118
122
  - jar 'org.apache.kafka:kafka_2.10', '0.8.2.2'
119
123
  - jar 'org.slf4j:slf4j-log4j12', '1.7.13'
120
- rubyforge_project:
121
- rubygems_version: 2.4.5
122
- signing_key:
124
+ rubyforge_project:
125
+ rubygems_version: 2.4.8
126
+ signing_key:
123
127
  specification_version: 4
124
128
  summary: jruby Kafka wrapper
125
129
  test_files: []
@@ -1,257 +0,0 @@
1
- # basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/Consumer+Group+Example
2
- require 'jruby-kafka/version'
3
- require 'jruby-kafka/consumer'
4
- require 'jruby-kafka/error'
5
-
6
- # noinspection JRubyStringImportInspection
7
- class Kafka::Group
8
- java_import 'java.util.concurrent.ExecutorService'
9
- java_import 'java.util.concurrent.Executors'
10
- java_import 'org.I0Itec.zkclient.exception.ZkException'
11
-
12
- # Create a Kafka client group
13
- #
14
- # options:
15
- # :zk_connect => "localhost:2181" - REQUIRED: The connection string for the
16
- # zookeeper connection in the form host:port. Multiple URLS can be given to allow fail-over.
17
- # :zk_connect_timeout => "6000" - (optional) The max time that the client waits while establishing a connection to zookeeper.
18
- # :group_id => "group" - REQUIRED: The group id to consume on.
19
- # :topic_id => "topic" - REQUIRED: The topic id to consume on.
20
- # :reset_beginning => "from-beginning" - (optional) reset the consumer group to start at the
21
- # earliest message present in the log by clearing any offsets for the group stored in Zookeeper.
22
- # :auto_offset_reset => "smallest" or "largest" - (optional, default 'largest') If the consumer does not already
23
- # have an established offset to consume from, start with the earliest message present in the log (smallest) or
24
- # after the last message in the log (largest).
25
- # :consumer_restart_on_error => "true" - (optional) Controls if consumer threads are to restart on caught exceptions.
26
- # exceptions are logged.
27
- def initialize(options={})
28
- validate_required_arguments(options)
29
-
30
- @zk_connect = options[:zk_connect]
31
- @group_id = options[:group_id]
32
- @topic = options[:topic_id]
33
- @topics_allowed = options[:allow_topics]
34
- @topics_filtered = options[:filter_topics]
35
- @zk_session_timeout = '6000'
36
- @zk_connect_timeout = '6000'
37
- @zk_sync_time = '2000'
38
- @reset_beginning = nil
39
- @auto_offset_reset = 'largest'
40
- @auto_commit_interval = '1000'
41
- @running = false
42
- @rebalance_max_retries = '4'
43
- @rebalance_backoff_ms = '2000'
44
- @socket_timeout_ms = "#{30 * 1000}"
45
- @socket_receive_buffer_bytes = "#{64 * 1024}"
46
- @fetch_message_max_bytes = "#{1024 * 1024}"
47
- @auto_commit_enable = "#{true}"
48
- @queued_max_message_chunks = '10'
49
- @fetch_min_bytes = '1'
50
- @fetch_wait_max_ms = '100'
51
- @refresh_leader_backoff_ms = '200'
52
- @consumer_timeout_ms = '-1'
53
- @consumer_restart_on_error = "#{false}"
54
- @consumer_restart_sleep_ms = '0'
55
- @consumer_id = nil
56
- @key_decoder_class = "kafka.serializer.DefaultDecoder"
57
- @value_decoder_class = "kafka.serializer.DefaultDecoder"
58
- @dual_commit_enabled = "#{true}"
59
- @offsets_storage = "zookeeper"
60
-
61
- if options[:zk_connect_timeout]
62
- @zk_connect_timeout = "#{options[:zk_connect_timeout]}"
63
- end
64
- if options[:zk_session_timeout]
65
- @zk_session_timeout = "#{options[:zk_session_timeout]}"
66
- end
67
- if options[:zk_sync_time]
68
- @zk_sync_time = "#{options[:zk_sync_time]}"
69
- end
70
- if options[:auto_commit_interval]
71
- @auto_commit_interval = "#{options[:auto_commit_interval]}"
72
- end
73
-
74
- if options[:rebalance_max_retries]
75
- @rebalance_max_retries = "#{options[:rebalance_max_retries]}"
76
- end
77
-
78
- if options[:rebalance_backoff_ms]
79
- @rebalance_backoff_ms = "#{options[:rebalance_backoff_ms]}"
80
- end
81
-
82
- if options[:socket_timeout_ms]
83
- @socket_timeout_ms = "#{options[:socket_timeout_ms]}"
84
- end
85
-
86
- if options[:socket_receive_buffer_bytes]
87
- @socket_receive_buffer_bytes = "#{options[:socket_receive_buffer_bytes]}"
88
- end
89
-
90
- if options[:fetch_message_max_bytes]
91
- @fetch_message_max_bytes = "#{options[:fetch_message_max_bytes]}"
92
- end
93
-
94
- if options[:auto_commit_enable]
95
- @auto_commit_enable = "#{options[:auto_commit_enable]}"
96
- end
97
-
98
- if options[:queued_max_message_chunks]
99
- @queued_max_message_chunks = "#{options[:queued_max_message_chunks]}"
100
- end
101
-
102
- if options[:fetch_min_bytes]
103
- @fetch_min_bytes = "#{options[:fetch_min_bytes]}"
104
- end
105
-
106
- if options[:fetch_wait_max_ms]
107
- @fetch_wait_max_ms = "#{options[:fetch_wait_max_ms]}"
108
- end
109
-
110
- if options[:refresh_leader_backoff_ms]
111
- @refresh_leader_backoff_ms = "#{options[:refresh_leader_backoff_ms]}"
112
- end
113
-
114
- if options[:consumer_timeout_ms]
115
- @consumer_timeout_ms = "#{options[:consumer_timeout_ms]}"
116
- end
117
-
118
- if options[:consumer_restart_on_error]
119
- @consumer_restart_on_error = "#{options[:consumer_restart_on_error]}"
120
- end
121
-
122
- if options[:consumer_restart_sleep_ms]
123
- @consumer_restart_sleep_ms = "#{options[:consumer_restart_sleep_ms]}"
124
- end
125
-
126
- if options[:auto_offset_reset]
127
- @auto_offset_reset = "#{options[:auto_offset_reset]}"
128
- end
129
-
130
- if options[:key_decoder_class]
131
- @key_decoder_class = "#{options[:key_decoder_class]}"
132
- end
133
-
134
- if options[:value_decoder_class]
135
- @value_decoder_class = "#{options[:value_decoder_class]}"
136
- end
137
-
138
- if options[:reset_beginning]
139
- if not options[:auto_offset_reset] || options[:auto_offset_reset] != 'smallest'
140
- raise KafkaError.new('reset_beginning => from-beginning must be used with auto_offset_reset => smallest')
141
- end
142
- @reset_beginning = "#{options[:reset_beginning]}"
143
- end
144
-
145
- if options[:consumer_id]
146
- @consumer_id = options[:consumer_id]
147
- end
148
-
149
- if options[:dual_commit_enabled]
150
- @dual_commit_enabled = "#{options[:dual_commit_enabled]}"
151
- end
152
- if options[:offsets_storage]
153
- @offsets_storage = "#{options[:offsets_storage]}"
154
- end
155
- end
156
-
157
- public
158
-
159
- def shutdown
160
- if @consumer
161
- @consumer.shutdown
162
- end
163
- if @executor
164
- @executor.shutdown
165
- end
166
- @running = false
167
- end
168
-
169
- def run(a_num_threads, a_queue)
170
- begin
171
- if @reset_beginning == 'from-beginning'
172
- Java::kafka::utils::ZkUtils.maybeDeletePath(@zk_connect, "/consumers/#{@group_id}")
173
- end
174
-
175
- @consumer = Java::kafka::consumer::Consumer.createJavaConsumerConnector(create_consumer_config)
176
- rescue ZkException => e
177
- raise KafkaError.new(e), "Got ZkException: #{e}"
178
- end
179
-
180
- thread_value = a_num_threads.to_java Java::int
181
- streams = get_streams(thread_value)
182
-
183
- @executor = Executors.newFixedThreadPool(a_num_threads)
184
- @executor_submit = @executor.java_method(:submit, [Java::JavaLang::Runnable.java_class])
185
-
186
- thread_number = 0
187
- streams.each do |stream|
188
- @executor_submit.call(Kafka::Consumer.new(stream, thread_number, a_queue, @consumer_restart_on_error, @consumer_restart_sleep_ms))
189
- thread_number += 1
190
- end
191
- @running = true
192
- end
193
-
194
- def running?
195
- @running
196
- end
197
-
198
- private
199
-
200
- def validate_required_arguments(options={})
201
- [:zk_connect, :group_id].each do |opt|
202
- raise(ArgumentError, "#{opt} is required.") unless options[opt]
203
- end
204
- unless [ options[:topic_id],
205
- options[:allow_topics],
206
- options[:filter_topics] ].compact.length == 1
207
- raise(ArgumentError,
208
- "exactly one of topic_id, allow_topics, filter_topics is required.")
209
- end
210
- end
211
-
212
- def get_streams(threads)
213
- constructor_param_class_name = "kafka.utils.VerifiableProperties"
214
- key_decoder_instance = Java::JavaClass.for_name(@key_decoder_class).constructor(constructor_param_class_name).new_instance(nil)
215
- value_decoder_instance = Java::JavaClass.for_name(@value_decoder_class).constructor(constructor_param_class_name).new_instance(nil)
216
- if @topic
217
- topic_count_map = java.util.HashMap.new
218
- topic_count_map.put(@topic, threads)
219
- consumer_map = @consumer.createMessageStreams(topic_count_map, key_decoder_instance, value_decoder_instance)
220
- Array.new(consumer_map[@topic])
221
- elsif @topics_allowed
222
- filter = Java::kafka::consumer::Whitelist.new(@topics_allowed)
223
- Array.new(@consumer.createMessageStreamsByFilter(filter, threads, key_decoder_instance, value_decoder_instance))
224
- else # @topics_filtered
225
- filter = Java::kafka::consumer::Blacklist.new(@topics_filtered)
226
- Array.new(@consumer.createMessageStreamsByFilter(filter, threads, key_decoder_instance, value_decoder_instance))
227
- end
228
- end
229
-
230
- def create_consumer_config
231
- properties = java.util.Properties.new
232
- properties.put('zookeeper.connect', @zk_connect)
233
- properties.put('group.id', @group_id)
234
- properties.put('zookeeper.connection.timeout.ms', @zk_connect_timeout)
235
- properties.put('zookeeper.session.timeout.ms', @zk_session_timeout)
236
- properties.put('zookeeper.sync.time.ms', @zk_sync_time)
237
- properties.put('auto.commit.interval.ms', @auto_commit_interval)
238
- properties.put('auto.offset.reset', @auto_offset_reset)
239
- properties.put('rebalance.max.retries', @rebalance_max_retries)
240
- properties.put('rebalance.backoff.ms', @rebalance_backoff_ms)
241
- properties.put('socket.timeout.ms', @socket_timeout_ms)
242
- properties.put('socket.receive.buffer.bytes', @socket_receive_buffer_bytes)
243
- properties.put('fetch.message.max.bytes', @fetch_message_max_bytes)
244
- properties.put('auto.commit.enable', @auto_commit_enable)
245
- properties.put('queued.max.message.chunks', @queued_max_message_chunks)
246
- properties.put('fetch.min.bytes', @fetch_min_bytes)
247
- properties.put('fetch.wait.max.ms', @fetch_wait_max_ms)
248
- properties.put('refresh.leader.backoff.ms', @refresh_leader_backoff_ms)
249
- properties.put('consumer.timeout.ms', @consumer_timeout_ms)
250
- properties.put('dual.commit.enabled', @dual_commit_enabled)
251
- properties.put('offsets.storage', @offsets_storage)
252
- unless @consumer_id.nil?
253
- properties.put('consumer.id', @consumer_id)
254
- end
255
- Java::kafka::consumer::ConsumerConfig.new(properties)
256
- end
257
- end
@@ -1,4 +0,0 @@
1
- module Kafka
2
- VERSION = '1.7.2'.freeze
3
- JAR_DEPENDENCIES_VERSION = '0.3.2'.freeze
4
- end