jruby-kafka 2.2.2-java → 3.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -13
- data/lib/com/101tec/zkclient/0.7/zkclient-0.7.jar +0 -0
- data/lib/jruby-kafka/consumer.rb +16 -18
- data/lib/jruby-kafka/kafka-producer.rb +47 -6
- data/lib/jruby-kafka/producer.rb +57 -12
- data/lib/jruby-kafka/version.rb +4 -0
- data/lib/jruby-kafka.rb +1 -1
- data/lib/jruby-kafka_jars.rb +13 -13
- data/lib/org/apache/kafka/kafka-clients/0.9.0.0/kafka-clients-0.9.0.0.jar +0 -0
- data/lib/org/apache/kafka/kafka_2.11/0.9.0.0/kafka_2.11-0.9.0.0.jar +0 -0
- data/lib/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar +0 -0
- data/lib/org/scala-lang/modules/scala-xml_2.11/{1.0.2/scala-xml_2.11-1.0.2.jar → 1.0.4/scala-xml_2.11-1.0.4.jar} +0 -0
- data/lib/org/scala-lang/scala-library/2.11.7/scala-library-2.11.7.jar +0 -0
- metadata +25 -39
- data/lib/com/101tec/zkclient/0.3/zkclient-0.3.jar +0 -0
- data/lib/jruby-kafka/utility.rb +0 -20
- data/lib/org/apache/kafka/kafka-clients/0.8.2.2/kafka-clients-0.8.2.2.jar +0 -0
- data/lib/org/apache/kafka/kafka_2.11/0.8.2.2/kafka_2.11-0.8.2.2.jar +0 -0
- data/lib/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.2/scala-parser-combinators_2.11-1.0.2.jar +0 -0
- data/lib/org/scala-lang/scala-library/2.11.5/scala-library-2.11.5.jar +0 -0
checksums.yaml
CHANGED
@@ -1,15 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
|
5
|
-
data.tar.gz: !binary |-
|
6
|
-
ODY0Y2MyNTQyN2QzMjg4N2M3MTVmZDZkNTMzYjA2YTE0NDk0MjYwNQ==
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 3eea2f7be96fb9132520a94b6c71b9175e0518e1
|
4
|
+
data.tar.gz: 520858d8b78e96da2ffc5fcbb30d9e35dfe59598
|
7
5
|
SHA512:
|
8
|
-
metadata.gz:
|
9
|
-
|
10
|
-
MjdlNjM2NWIwMzRkYjE5ZWNmNDJkMWQ2OWYxYmY3ZTIyOGYzMGFjNTUzNWFl
|
11
|
-
ODg0YWZkNjdjNjI0ZTFlN2Q2OTdkNzY0NzkwOTg5MmRjNDRkMWU=
|
12
|
-
data.tar.gz: !binary |-
|
13
|
-
MzgwNDgzMWNhYWZkNGI5YjFlNmVhYjkxZjczNzQ3MjM3NTUyNTAwZjQ5OThi
|
14
|
-
YjUxYzk3MjVjOGQyMTYyNDUwNDgwNGI0YzhkNzg0ZGMxOTAyYjUxMjU4YWFk
|
15
|
-
ZmIyNmFmMmQ0OTllY2I0NGIwMWRlMGI1NDIwY2NjMzU0M2FmMDg=
|
6
|
+
metadata.gz: 3b55b5c2671ae57091c253769ec2d929ca020ffd9606006070ae6159944d1cea0fca13a7895c7e70068f85d2835478bcb7ac917a1a4fa759a1ef9ac84ea53175
|
7
|
+
data.tar.gz: f0c96aba4dec69a653a0ef8decc866f1ac24c09d20fa9cc6c60e003a003b0e42db9e770a983a4f3128da3d5a07a811a9dfef4ad3db086b8e73435b7994244452
|
Binary file
|
data/lib/jruby-kafka/consumer.rb
CHANGED
@@ -1,14 +1,8 @@
|
|
1
1
|
require 'java'
|
2
2
|
require 'jruby-kafka/namespace'
|
3
|
-
require 'jruby-kafka/utility'
|
4
3
|
|
5
4
|
class Kafka::Consumer
|
6
5
|
java_import 'org.I0Itec.zkclient.exception.ZkException'
|
7
|
-
java_import 'kafka.consumer.ConsumerConfig'
|
8
|
-
java_import 'kafka.consumer.Consumer'
|
9
|
-
java_import 'kafka.consumer.Whitelist'
|
10
|
-
java_import 'kafka.consumer.Blacklist'
|
11
|
-
java_import 'kafka.utils.ZkUtils'
|
12
6
|
# Create a Kafka high-level consumer.
|
13
7
|
#
|
14
8
|
# @param [Hash] config the consumer configuration.
|
@@ -46,14 +40,8 @@ class Kafka::Consumer
|
|
46
40
|
@key_decoder = @properties.delete(:key_decoder) || 'kafka.serializer.DefaultDecoder'
|
47
41
|
@msg_decoder = @properties.delete(:msg_decoder) || 'kafka.serializer.DefaultDecoder'
|
48
42
|
@reset_beginning = @properties.delete :reset_beginning
|
49
|
-
@consumer = nil
|
50
|
-
end
|
51
43
|
|
52
|
-
|
53
|
-
#
|
54
|
-
# @return void
|
55
|
-
def connect
|
56
|
-
@consumer = Consumer.createJavaConsumerConnector ConsumerConfig.new Kafka::Utility.java_properties @properties
|
44
|
+
@consumer = Java::KafkaConsumer::Consumer.createJavaConsumerConnector create_config
|
57
45
|
end
|
58
46
|
|
59
47
|
# Start fetching messages.
|
@@ -66,10 +54,9 @@ class Kafka::Consumer
|
|
66
54
|
#
|
67
55
|
# @note KafkaStreams instances are not thread-safe.
|
68
56
|
def message_streams
|
69
|
-
connect if @consumer.nil?
|
70
57
|
begin
|
71
58
|
if @reset_beginning == 'from-beginning'
|
72
|
-
ZkUtils.maybeDeletePath(@properties[:zookeeper_connect], "/consumers/#{@properties[:group_id]}")
|
59
|
+
Java::kafka::utils::ZkUtils.maybeDeletePath(@properties[:zookeeper_connect], "/consumers/#{@properties[:group_id]}")
|
73
60
|
end
|
74
61
|
rescue ZkException => e
|
75
62
|
raise KafkaError.new(e), "Got ZkException: #{e}"
|
@@ -87,8 +74,8 @@ class Kafka::Consumer
|
|
87
74
|
|
88
75
|
else
|
89
76
|
filter = @include_topics ?
|
90
|
-
Whitelist.new(@include_topics) :
|
91
|
-
Blacklist.new(@exclude_topics)
|
77
|
+
Java::KafkaConsumer::Whitelist.new(@include_topics) :
|
78
|
+
Java::KafkaConsumer::Blacklist.new(@exclude_topics)
|
92
79
|
|
93
80
|
@consumer.
|
94
81
|
createMessageStreamsByFilter(filter, @num_streams, key_decoder_i, msg_decoder_i).
|
@@ -111,11 +98,11 @@ class Kafka::Consumer
|
|
111
98
|
# @return void
|
112
99
|
def shutdown
|
113
100
|
@consumer.shutdown if @consumer
|
114
|
-
@consumer = nil
|
115
101
|
nil
|
116
102
|
end
|
117
103
|
|
118
104
|
private
|
105
|
+
|
119
106
|
def validate_arguments(options)
|
120
107
|
[:zookeeper_connect, :group_id].each do |opt|
|
121
108
|
raise ArgumentError, "Parameter :#{opt} is required." unless options[opt]
|
@@ -131,4 +118,15 @@ class Kafka::Consumer
|
|
131
118
|
end
|
132
119
|
end
|
133
120
|
end
|
121
|
+
|
122
|
+
def create_config
|
123
|
+
properties = java.util.Properties.new
|
124
|
+
@properties.each do |k,v|
|
125
|
+
k = k.to_s.gsub '_', '.'
|
126
|
+
v = v.to_s
|
127
|
+
properties.setProperty k, v
|
128
|
+
end
|
129
|
+
Java::KafkaConsumer::ConsumerConfig.new properties
|
130
|
+
end
|
134
131
|
end
|
132
|
+
|
@@ -1,6 +1,5 @@
|
|
1
1
|
require 'jruby-kafka/namespace'
|
2
2
|
require 'jruby-kafka/error'
|
3
|
-
require 'jruby-kafka/utility'
|
4
3
|
|
5
4
|
# noinspection JRubyStringImportInspection
|
6
5
|
class Kafka::KafkaProducer
|
@@ -8,8 +7,25 @@ class Kafka::KafkaProducer
|
|
8
7
|
java_import 'org.apache.kafka.clients.producer.Callback'
|
9
8
|
KAFKA_PRODUCER = Java::org.apache.kafka.clients.producer.KafkaProducer
|
10
9
|
|
11
|
-
|
12
|
-
|
10
|
+
VALIDATIONS = {
|
11
|
+
:'required.codecs' => %w[
|
12
|
+
none gzip snappy lz4
|
13
|
+
]
|
14
|
+
}
|
15
|
+
|
16
|
+
REQUIRED = %w[
|
17
|
+
bootstrap.servers key.serializer
|
18
|
+
]
|
19
|
+
|
20
|
+
KNOWN = %w[
|
21
|
+
acks batch.size block.on.buffer.full
|
22
|
+
bootstrap.servers buffer.memory client.id
|
23
|
+
compression.type key.serializer linger.ms
|
24
|
+
max.in.flight.requests.per.connection max.request.size
|
25
|
+
metadata.fetch.timeout.ms metadata.max.age.ms metric.reporters
|
26
|
+
metrics.num.samples metrics.sample.window.ms receive.buffer.bytes
|
27
|
+
reconnect.backoff.ms retries retry.backoff.ms
|
28
|
+
send.buffer.bytes timeout.ms value.serializer
|
13
29
|
]
|
14
30
|
|
15
31
|
class RubyCallback
|
@@ -27,13 +43,18 @@ class Kafka::KafkaProducer
|
|
27
43
|
attr_reader :producer, :send_method, :send_cb_method, :options
|
28
44
|
|
29
45
|
def initialize(opts = {})
|
30
|
-
|
31
|
-
|
46
|
+
@options = opts.reduce({}) do |opts_array, (k, v)|
|
47
|
+
unless v.nil?
|
48
|
+
opts_array[k.to_s.gsub(/_/, '.')] = v
|
49
|
+
end
|
50
|
+
opts_array
|
51
|
+
end
|
52
|
+
validate_arguments
|
32
53
|
@send_method = @send_cb_method = proc { throw StandardError.new 'Producer is not connected' }
|
33
54
|
end
|
34
55
|
|
35
56
|
def connect
|
36
|
-
@producer = KAFKA_PRODUCER.new(
|
57
|
+
@producer = KAFKA_PRODUCER.new(create_producer_config)
|
37
58
|
@send_method = producer.java_method :send, [ProducerRecord]
|
38
59
|
@send_cb_method = producer.java_method :send, [ProducerRecord, Callback.java_class]
|
39
60
|
end
|
@@ -50,4 +71,24 @@ class Kafka::KafkaProducer
|
|
50
71
|
def close
|
51
72
|
@producer.close
|
52
73
|
end
|
74
|
+
|
75
|
+
private
|
76
|
+
|
77
|
+
def validate_arguments
|
78
|
+
errors = []
|
79
|
+
missing = REQUIRED.reject { |opt| options[opt] }
|
80
|
+
errors = ["Required settings: #{ missing.join(', ')}"] if missing.any?
|
81
|
+
invalid = VALIDATIONS.reject { |opt, valid| options[opt].nil? or valid.include? options[opt].to_s }
|
82
|
+
errors += invalid.map { |opt, valid| "#{ opt } should be one of: [#{ valid.join(', ')}]" }
|
83
|
+
fail StandardError.new "Invalid configuration arguments: #{ errors.join('; ') }" if errors.any?
|
84
|
+
options.keys.each do |opt|
|
85
|
+
STDERR.puts "WARNING: Unknown configuration key: #{opt}" unless KNOWN.include? opt
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
def create_producer_config
|
90
|
+
properties = java.util.Properties.new
|
91
|
+
options.each { |opt, value| properties.put opt, value.to_s }
|
92
|
+
properties
|
93
|
+
end
|
53
94
|
end
|
data/lib/jruby-kafka/producer.rb
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
# basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/0.8.0+Producer+Example
|
2
2
|
require 'jruby-kafka/namespace'
|
3
3
|
require 'jruby-kafka/error'
|
4
|
-
require 'jruby-kafka/utility'
|
5
4
|
|
6
5
|
# noinspection JRubyStringImportInspection
|
7
6
|
class Kafka::Producer
|
@@ -9,9 +8,30 @@ class Kafka::Producer
|
|
9
8
|
java_import 'kafka.producer.ProducerConfig'
|
10
9
|
java_import 'kafka.producer.KeyedMessage'
|
11
10
|
KAFKA_PRODUCER = Java::kafka.javaapi.producer.Producer
|
11
|
+
java_import 'kafka.message.NoCompressionCodec'
|
12
|
+
java_import 'kafka.message.GZIPCompressionCodec'
|
13
|
+
java_import 'kafka.message.SnappyCompressionCodec'
|
12
14
|
|
13
|
-
|
14
|
-
:
|
15
|
+
VALIDATIONS = {
|
16
|
+
:'request.required.acks' => %w[ 0 1 -1 ],
|
17
|
+
:'required.codecs' => [NoCompressionCodec.name, GZIPCompressionCodec.name, SnappyCompressionCodec.name],
|
18
|
+
:'producer.type' => %w[ sync async ]
|
19
|
+
}
|
20
|
+
|
21
|
+
REQUIRED = %w[
|
22
|
+
metadata.broker.list
|
23
|
+
]
|
24
|
+
|
25
|
+
# List of all available options extracted from http://kafka.apache.org/documentation.html#producerconfigs Apr. 27, 2014
|
26
|
+
# If new options are added, they should just work. Please add them to the list so that we can get handy warnings.
|
27
|
+
KNOWN = %w[
|
28
|
+
metadata.broker.list request.required.acks request.timeout.ms
|
29
|
+
producer.type serializer.class key.serializer.class
|
30
|
+
partitioner.class compression.codec compressed.topics
|
31
|
+
message.send.max.retries retry.backoff.ms topic.metadata.refresh.interval.ms
|
32
|
+
queue.buffering.max.ms queue.buffering.max.messages queue.enqueue.timeout.ms
|
33
|
+
batch.num.messages send.buffer.bytes client.id
|
34
|
+
broker.list serializer.encoding
|
15
35
|
]
|
16
36
|
|
17
37
|
attr_reader :producer, :send_method, :options
|
@@ -21,22 +41,27 @@ class Kafka::Producer
|
|
21
41
|
# options:
|
22
42
|
# metadata_broker_list: ["localhost:9092"] - REQUIRED: a seed list of kafka brokers
|
23
43
|
def initialize(opts = {})
|
24
|
-
@options = opts
|
25
|
-
|
26
|
-
|
44
|
+
@options = opts.reduce({}) do |opts_array, (k, v)|
|
45
|
+
unless v.nil?
|
46
|
+
opts_array[k.to_s.gsub(/_/, '.')] = v
|
47
|
+
end
|
48
|
+
opts_array
|
27
49
|
end
|
28
|
-
if options[
|
29
|
-
options[
|
50
|
+
if options['broker.list']
|
51
|
+
options['metadata.broker.list'] = options.delete 'broker.list'
|
30
52
|
end
|
31
|
-
if options[
|
32
|
-
options[
|
53
|
+
if options['metadata.broker.list'].is_a? Array
|
54
|
+
options['metadata.broker.list'] = options['metadata.broker.list'].join(',')
|
33
55
|
end
|
34
|
-
|
56
|
+
if options['compressed.topics'].is_a? Array
|
57
|
+
options['compressed.topics'] = options['compressed.topics'].join(',')
|
58
|
+
end
|
59
|
+
validate_arguments
|
35
60
|
@send_method = proc { throw StandardError.new 'Producer is not connected' }
|
36
61
|
end
|
37
62
|
|
38
63
|
def connect
|
39
|
-
@producer = KAFKA_PRODUCER.new(
|
64
|
+
@producer = KAFKA_PRODUCER.new(create_producer_config)
|
40
65
|
@send_method = producer.java_method :send, [KeyedMessage]
|
41
66
|
end
|
42
67
|
|
@@ -53,4 +78,24 @@ class Kafka::Producer
|
|
53
78
|
def close
|
54
79
|
@producer.close
|
55
80
|
end
|
81
|
+
|
82
|
+
private
|
83
|
+
|
84
|
+
def validate_arguments
|
85
|
+
errors = []
|
86
|
+
missing = REQUIRED.reject { |opt| options[opt] }
|
87
|
+
errors = ["Required settings: #{ missing.join(', ')}"] if missing.any?
|
88
|
+
invalid = VALIDATIONS.reject { |opt, valid| options[opt].nil? or valid.include? options[opt].to_s }
|
89
|
+
errors += invalid.map { |opt, valid| "#{ opt } should be one of: [#{ valid.join(', ')}]" }
|
90
|
+
fail StandardError.new "Invalid configuration arguments: #{ errors.join('; ') }" if errors.any?
|
91
|
+
options.keys.each do |opt|
|
92
|
+
STDERR.puts "WARNING: Unknown configuration key: #{opt}" unless KNOWN.include? opt
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
def create_producer_config
|
97
|
+
properties = java.util.Properties.new
|
98
|
+
options.each { |opt, value| properties.put opt, value.to_s }
|
99
|
+
ProducerConfig.new(properties)
|
100
|
+
end
|
56
101
|
end
|
data/lib/jruby-kafka.rb
CHANGED
data/lib/jruby-kafka_jars.rb
CHANGED
@@ -1,20 +1,20 @@
|
|
1
1
|
# this is a generated file, to avoid over-writing it just delete this comment
|
2
2
|
require 'jar_dependencies'
|
3
3
|
|
4
|
-
require_jar( 'org.scala-lang.modules', 'scala-parser-combinators_2.11', '1.0.2' )
|
5
|
-
require_jar( 'log4j', 'log4j', '1.2.17' )
|
6
|
-
require_jar( 'org.scala-lang', 'scala-library', '2.11.5' )
|
7
|
-
require_jar( 'org.apache.zookeeper', 'zookeeper', '3.4.6' )
|
8
|
-
require_jar( 'org.slf4j', 'slf4j-log4j12', '1.7.13' )
|
9
|
-
require_jar( 'org.scala-lang.modules', 'scala-xml_2.11', '1.0.2' )
|
10
|
-
require_jar( 'org.slf4j', 'slf4j-api', '1.7.13' )
|
11
4
|
require_jar( 'io.netty', 'netty', '3.7.0.Final' )
|
5
|
+
require_jar( 'log4j', 'log4j', '1.2.17' )
|
6
|
+
require_jar( 'com.101tec', 'zkclient', '0.7' )
|
7
|
+
require_jar( 'org.xerial.snappy', 'snappy-java', '1.1.1.7' )
|
12
8
|
require_jar( 'net.sf.jopt-simple', 'jopt-simple', '3.2' )
|
13
|
-
require_jar( '
|
9
|
+
require_jar( 'jline', 'jline', '0.9.94' )
|
10
|
+
require_jar( 'org.scala-lang', 'scala-library', '2.11.7' )
|
11
|
+
require_jar( 'org.apache.kafka', 'kafka-clients', '0.9.0.0' )
|
12
|
+
require_jar( 'org.slf4j', 'slf4j-api', '1.7.13' )
|
13
|
+
require_jar( 'org.apache.kafka', 'kafka_2.11', '0.9.0.0' )
|
14
14
|
require_jar( 'junit', 'junit', '3.8.1' )
|
15
|
-
require_jar( 'org.apache.kafka', 'kafka-clients', '0.8.2.2' )
|
16
15
|
require_jar( 'com.yammer.metrics', 'metrics-core', '2.2.0' )
|
17
|
-
require_jar( 'org.apache.
|
18
|
-
require_jar( '
|
19
|
-
require_jar( 'org.
|
20
|
-
require_jar( '
|
16
|
+
require_jar( 'org.apache.zookeeper', 'zookeeper', '3.4.6' )
|
17
|
+
require_jar( 'org.slf4j', 'slf4j-log4j12', '1.7.13' )
|
18
|
+
require_jar( 'org.scala-lang.modules', 'scala-parser-combinators_2.11', '1.0.4' )
|
19
|
+
require_jar( 'org.scala-lang.modules', 'scala-xml_2.11', '1.0.4' )
|
20
|
+
require_jar( 'net.jpountz.lz4', 'lz4', '1.2.0' )
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
metadata
CHANGED
@@ -1,93 +1,79 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: jruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: '3.0'
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Joseph Lawson
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-02-05 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
-
name: concurrent-ruby
|
15
|
-
requirement: !ruby/object:Gem::Requirement
|
16
|
-
requirements:
|
17
|
-
- - <
|
18
|
-
- !ruby/object:Gem::Version
|
19
|
-
version: '2.0'
|
20
|
-
type: :runtime
|
21
|
-
prerelease: false
|
22
|
-
version_requirements: !ruby/object:Gem::Requirement
|
23
|
-
requirements:
|
24
|
-
- - <
|
25
|
-
- !ruby/object:Gem::Version
|
26
|
-
version: '2.0'
|
27
|
-
- !ruby/object:Gem::Dependency
|
28
|
-
name: jar-dependencies
|
29
14
|
requirement: !ruby/object:Gem::Requirement
|
30
15
|
requirements:
|
31
16
|
- - ~>
|
32
17
|
- !ruby/object:Gem::Version
|
33
18
|
version: 0.3.2
|
34
|
-
|
19
|
+
name: jar-dependencies
|
35
20
|
prerelease: false
|
21
|
+
type: :development
|
36
22
|
version_requirements: !ruby/object:Gem::Requirement
|
37
23
|
requirements:
|
38
24
|
- - ~>
|
39
25
|
- !ruby/object:Gem::Version
|
40
26
|
version: 0.3.2
|
41
27
|
- !ruby/object:Gem::Dependency
|
42
|
-
name: rake
|
43
28
|
requirement: !ruby/object:Gem::Requirement
|
44
29
|
requirements:
|
45
30
|
- - ~>
|
46
31
|
- !ruby/object:Gem::Version
|
47
32
|
version: '10.5'
|
48
|
-
|
33
|
+
name: rake
|
49
34
|
prerelease: false
|
35
|
+
type: :development
|
50
36
|
version_requirements: !ruby/object:Gem::Requirement
|
51
37
|
requirements:
|
52
38
|
- - ~>
|
53
39
|
- !ruby/object:Gem::Version
|
54
40
|
version: '10.5'
|
55
41
|
- !ruby/object:Gem::Dependency
|
56
|
-
name: rspec
|
57
42
|
requirement: !ruby/object:Gem::Requirement
|
58
43
|
requirements:
|
59
44
|
- - ~>
|
60
45
|
- !ruby/object:Gem::Version
|
61
46
|
version: '3.4'
|
62
|
-
|
47
|
+
name: rspec
|
63
48
|
prerelease: false
|
49
|
+
type: :development
|
64
50
|
version_requirements: !ruby/object:Gem::Requirement
|
65
51
|
requirements:
|
66
52
|
- - ~>
|
67
53
|
- !ruby/object:Gem::Version
|
68
54
|
version: '3.4'
|
69
55
|
- !ruby/object:Gem::Dependency
|
70
|
-
name: ruby-maven
|
71
56
|
requirement: !ruby/object:Gem::Requirement
|
72
57
|
requirements:
|
73
58
|
- - ~>
|
74
59
|
- !ruby/object:Gem::Version
|
75
60
|
version: '3.3'
|
76
|
-
|
61
|
+
name: ruby-maven
|
77
62
|
prerelease: false
|
63
|
+
type: :development
|
78
64
|
version_requirements: !ruby/object:Gem::Requirement
|
79
65
|
requirements:
|
80
66
|
- - ~>
|
81
67
|
- !ruby/object:Gem::Version
|
82
68
|
version: '3.3'
|
83
|
-
description:
|
69
|
+
description: this is primarily to be used as an interface for logstash
|
84
70
|
email:
|
85
71
|
- joe@joekiller.com
|
86
72
|
executables: []
|
87
73
|
extensions: []
|
88
74
|
extra_rdoc_files: []
|
89
75
|
files:
|
90
|
-
- lib/com/101tec/zkclient/0.
|
76
|
+
- lib/com/101tec/zkclient/0.7/zkclient-0.7.jar
|
91
77
|
- lib/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar
|
92
78
|
- lib/io/netty/netty/3.7.0.Final/netty-3.7.0.Final.jar
|
93
79
|
- lib/jline/jline/0.9.94/jline-0.9.94.jar
|
@@ -97,18 +83,18 @@ files:
|
|
97
83
|
- lib/jruby-kafka/kafka-producer.rb
|
98
84
|
- lib/jruby-kafka/namespace.rb
|
99
85
|
- lib/jruby-kafka/producer.rb
|
100
|
-
- lib/jruby-kafka/
|
86
|
+
- lib/jruby-kafka/version.rb
|
101
87
|
- lib/jruby-kafka_jars.rb
|
102
88
|
- lib/junit/junit/3.8.1/junit-3.8.1.jar
|
103
89
|
- lib/log4j/log4j/1.2.17/log4j-1.2.17.jar
|
104
90
|
- lib/net/jpountz/lz4/lz4/1.2.0/lz4-1.2.0.jar
|
105
91
|
- lib/net/sf/jopt-simple/jopt-simple/3.2/jopt-simple-3.2.jar
|
106
|
-
- lib/org/apache/kafka/kafka-clients/0.
|
107
|
-
- lib/org/apache/kafka/kafka_2.11/0.
|
92
|
+
- lib/org/apache/kafka/kafka-clients/0.9.0.0/kafka-clients-0.9.0.0.jar
|
93
|
+
- lib/org/apache/kafka/kafka_2.11/0.9.0.0/kafka_2.11-0.9.0.0.jar
|
108
94
|
- lib/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar
|
109
|
-
- lib/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.
|
110
|
-
- lib/org/scala-lang/modules/scala-xml_2.11/1.0.
|
111
|
-
- lib/org/scala-lang/scala-library/2.11.
|
95
|
+
- lib/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar
|
96
|
+
- lib/org/scala-lang/modules/scala-xml_2.11/1.0.4/scala-xml_2.11-1.0.4.jar
|
97
|
+
- lib/org/scala-lang/scala-library/2.11.7/scala-library-2.11.7.jar
|
112
98
|
- lib/org/slf4j/slf4j-api/1.7.13/slf4j-api-1.7.13.jar
|
113
99
|
- lib/org/slf4j/slf4j-log4j12/1.7.13/slf4j-log4j12-1.7.13.jar
|
114
100
|
- lib/org/xerial/snappy/snappy-java/1.1.1.7/snappy-java-1.1.1.7.jar
|
@@ -116,26 +102,26 @@ homepage: https://github.com/joekiller/jruby-kafka
|
|
116
102
|
licenses:
|
117
103
|
- Apache 2.0
|
118
104
|
metadata: {}
|
119
|
-
post_install_message:
|
105
|
+
post_install_message:
|
120
106
|
rdoc_options: []
|
121
107
|
require_paths:
|
122
108
|
- lib
|
123
109
|
required_ruby_version: !ruby/object:Gem::Requirement
|
124
110
|
requirements:
|
125
|
-
- -
|
111
|
+
- - '>='
|
126
112
|
- !ruby/object:Gem::Version
|
127
113
|
version: '0'
|
128
114
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
129
115
|
requirements:
|
130
|
-
- -
|
116
|
+
- - '>='
|
131
117
|
- !ruby/object:Gem::Version
|
132
118
|
version: '0'
|
133
119
|
requirements:
|
134
|
-
- jar 'org.apache.kafka:kafka_2.11', '0.
|
120
|
+
- jar 'org.apache.kafka:kafka_2.11', '0.9.0.0'
|
135
121
|
- jar 'org.slf4j:slf4j-log4j12', '1.7.13'
|
136
|
-
rubyforge_project:
|
122
|
+
rubyforge_project:
|
137
123
|
rubygems_version: 2.4.5
|
138
|
-
signing_key:
|
124
|
+
signing_key:
|
139
125
|
specification_version: 4
|
140
126
|
summary: jruby Kafka wrapper
|
141
127
|
test_files: []
|
Binary file
|
data/lib/jruby-kafka/utility.rb
DELETED
@@ -1,20 +0,0 @@
|
|
1
|
-
require 'java'
|
2
|
-
require 'jruby-kafka/namespace'
|
3
|
-
|
4
|
-
class Kafka::Utility
|
5
|
-
def self.java_properties(properties)
|
6
|
-
java_properties = java.util.Properties.new
|
7
|
-
properties.each do |k,v|
|
8
|
-
k = k.to_s.gsub '_', '.'
|
9
|
-
v = v.to_s
|
10
|
-
java_properties.setProperty k, v
|
11
|
-
end
|
12
|
-
java_properties
|
13
|
-
end
|
14
|
-
|
15
|
-
def self.validate_arguments(required_options, options)
|
16
|
-
required_options.each do |opt|
|
17
|
-
raise ArgumentError, "Parameter :#{opt} is required." unless options[opt]
|
18
|
-
end
|
19
|
-
end
|
20
|
-
end
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|