jruby-kafka 3.2-java → 3.3-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 5d8bd0569f81f64429f450dfc7fb1ba4535da82b
4
- data.tar.gz: 5bacffc343af3b196e2841f49b68cd8c1c6e80dc
3
+ metadata.gz: 4d17ccab91e382e75bded41bc235dfcf54384034
4
+ data.tar.gz: 0942e26a59f26f83b4e329cf832efd862aa450b2
5
5
  SHA512:
6
- metadata.gz: da3dcfb455b352a7ac92ab2fcc77be7c69a7d675a34c1ebf7ad30c48a2fc356e492478c4d2d34f3b1502149b2fe0d9dfba60eb10a14401386568a40ff68a595d
7
- data.tar.gz: 279da18f0d1cedb2db4e04956ee585a21302894330149d531e6ab5c42a924cec145f852aa54ea2e5284a7c2a3d4e3d21ffddb190ea3832b06aa905dc1d4b3acb
6
+ metadata.gz: f6865c7fb4f91557852f6464e4b5f6739727082e7b07ca387667b0d925e173c59e8dc2a1a6b8bcad4546c9e234080754a7f6916722a000eea7c8aa8a4800c904
7
+ data.tar.gz: ecb86c2213e38e6623f7fdc93605820ca19b7810a82a205c6033d6e093ff62fe3202ee902e5221724655f3e6699736e7692de2d7e40fa03eba4acb49180f148f
data/lib/jruby-kafka.rb CHANGED
@@ -4,3 +4,4 @@ require 'jruby-kafka/consumer'
4
4
  require 'jruby-kafka/producer'
5
5
  require 'jruby-kafka/kafka-producer'
6
6
  require 'jruby-kafka/kafka-consumer'
7
+ require 'jruby-kafka/utility'
@@ -1,8 +1,14 @@
1
1
  require 'java'
2
2
  require 'jruby-kafka/namespace'
3
+ require 'jruby-kafka/utility'
3
4
 
4
5
  class Kafka::Consumer
5
6
  java_import 'org.I0Itec.zkclient.exception.ZkException'
7
+ java_import 'kafka.consumer.ConsumerConfig'
8
+ java_import 'kafka.consumer.Consumer'
9
+ java_import 'kafka.consumer.Whitelist'
10
+ java_import 'kafka.consumer.Blacklist'
11
+ java_import 'kafka.utils.ZkUtils'
6
12
  # Create a Kafka high-level consumer.
7
13
  #
8
14
  # @param [Hash] config the consumer configuration.
@@ -41,7 +47,7 @@ class Kafka::Consumer
41
47
  @msg_decoder = @properties.delete(:msg_decoder) || 'kafka.serializer.DefaultDecoder'
42
48
  @reset_beginning = @properties.delete :reset_beginning
43
49
 
44
- @consumer = Java::KafkaConsumer::Consumer.createJavaConsumerConnector create_config
50
+ @consumer = Consumer.createJavaConsumerConnector ConsumerConfig.new Kafka::Utility.java_properties @properties
45
51
  end
46
52
 
47
53
  # Start fetching messages.
@@ -56,7 +62,7 @@ class Kafka::Consumer
56
62
  def message_streams
57
63
  begin
58
64
  if @reset_beginning == 'from-beginning'
59
- Java::kafka::utils::ZkUtils.maybeDeletePath(@properties[:zookeeper_connect], "/consumers/#{@properties[:group_id]}")
65
+ ZkUtils.maybeDeletePath(@properties[:zookeeper_connect], "/consumers/#{@properties[:group_id]}")
60
66
  end
61
67
  rescue ZkException => e
62
68
  raise KafkaError.new(e), "Got ZkException: #{e}"
@@ -74,8 +80,8 @@ class Kafka::Consumer
74
80
 
75
81
  else
76
82
  filter = @include_topics ?
77
- Java::KafkaConsumer::Whitelist.new(@include_topics) :
78
- Java::KafkaConsumer::Blacklist.new(@exclude_topics)
83
+ Whitelist.new(@include_topics) :
84
+ Blacklist.new(@exclude_topics)
79
85
 
80
86
  @consumer.
81
87
  createMessageStreamsByFilter(filter, @num_streams, key_decoder_i, msg_decoder_i).
@@ -118,15 +124,5 @@ class Kafka::Consumer
118
124
  end
119
125
  end
120
126
  end
121
-
122
- def create_config
123
- properties = java.util.Properties.new
124
- @properties.each do |k,v|
125
- k = k.to_s.gsub '_', '.'
126
- v = v.to_s
127
- properties.setProperty k, v
128
- end
129
- Java::KafkaConsumer::ConsumerConfig.new properties
130
- end
131
127
  end
132
128
 
@@ -1,8 +1,14 @@
1
1
  require 'java'
2
2
  require 'jruby-kafka/namespace'
3
- require "concurrent"
3
+ require 'concurrent'
4
+ require 'jruby-kafka/utility'
4
5
 
5
6
  class Kafka::KafkaConsumer
7
+
8
+ REQUIRED = [
9
+ :bootstrap_servers, :key_deserializer, :value_deserializer
10
+ ]
11
+
6
12
  KAFKA_CONSUMER = Java::org.apache.kafka.clients.consumer.KafkaConsumer
7
13
  # Create a Kafka high-level consumer.
8
14
  #
@@ -19,11 +25,11 @@ class Kafka::KafkaConsumer
19
25
  # https://kafka.apache.org/090/javadoc/org/apache/kafka/clients/consumer/ConsumerConfig.html.
20
26
  #
21
27
  def initialize(config={})
22
- validate_arguments config
28
+ Kafka::Utility.validate_arguments REQUIRED, config
23
29
  @properties = config.clone
24
30
  @topics = @properties.delete :topics
25
31
  @stop_called = Concurrent::AtomicBoolean.new(false)
26
- @consumer = KAFKA_CONSUMER.new(create_config)
32
+ @consumer = KAFKA_CONSUMER.new(Kafka::Utility.java_properties @properties)
27
33
  @subscribed = false
28
34
  subscribe
29
35
  end
@@ -55,23 +61,5 @@ class Kafka::KafkaConsumer
55
61
  def close
56
62
  @consumer.close
57
63
  end
58
-
59
- private
60
-
61
- def validate_arguments(options)
62
- [:bootstrap_servers, :key_deserializer, :value_deserializer].each do |opt|
63
- raise ArgumentError, "Parameter :#{opt} is required." unless options[opt]
64
- end
65
- end
66
-
67
- def create_config
68
- properties = java.util.Properties.new
69
- @properties.each do |k,v|
70
- k = k.to_s.gsub '_', '.'
71
- v = v.to_s
72
- properties.setProperty k, v
73
- end
74
- properties
75
- end
76
64
  end
77
65
 
@@ -1,5 +1,6 @@
1
1
  require 'jruby-kafka/namespace'
2
2
  require 'jruby-kafka/error'
3
+ require 'jruby-kafka/utility'
3
4
 
4
5
  # noinspection JRubyStringImportInspection
5
6
  class Kafka::KafkaProducer
@@ -7,25 +8,8 @@ class Kafka::KafkaProducer
7
8
  java_import 'org.apache.kafka.clients.producer.Callback'
8
9
  KAFKA_PRODUCER = Java::org.apache.kafka.clients.producer.KafkaProducer
9
10
 
10
- VALIDATIONS = {
11
- :'required.codecs' => %w[
12
- none gzip snappy lz4
13
- ]
14
- }
15
-
16
- REQUIRED = %w[
17
- bootstrap.servers key.serializer
18
- ]
19
-
20
- KNOWN = %w[
21
- acks batch.size block.on.buffer.full
22
- bootstrap.servers buffer.memory client.id
23
- compression.type key.serializer linger.ms
24
- max.in.flight.requests.per.connection max.request.size
25
- metadata.fetch.timeout.ms metadata.max.age.ms metric.reporters
26
- metrics.num.samples metrics.sample.window.ms receive.buffer.bytes
27
- reconnect.backoff.ms retries retry.backoff.ms
28
- send.buffer.bytes timeout.ms value.serializer
11
+ REQUIRED = [
12
+ :bootstrap_servers, :key_serializer
29
13
  ]
30
14
 
31
15
  class RubyCallback
@@ -43,18 +27,13 @@ class Kafka::KafkaProducer
43
27
  attr_reader :producer, :send_method, :send_cb_method, :options
44
28
 
45
29
  def initialize(opts = {})
46
- @options = opts.reduce({}) do |opts_array, (k, v)|
47
- unless v.nil?
48
- opts_array[k.to_s.gsub(/_/, '.')] = v
49
- end
50
- opts_array
51
- end
52
- validate_arguments
30
+ Kafka::Utility.validate_arguments REQUIRED, opts
31
+ @options = opts
53
32
  @send_method = @send_cb_method = proc { throw StandardError.new 'Producer is not connected' }
54
33
  end
55
34
 
56
35
  def connect
57
- @producer = KAFKA_PRODUCER.new(create_producer_config)
36
+ @producer = KAFKA_PRODUCER.new(Kafka::Utility.java_properties @options)
58
37
  @send_method = producer.java_method :send, [ProducerRecord]
59
38
  @send_cb_method = producer.java_method :send, [ProducerRecord, Callback.java_class]
60
39
  end
@@ -71,24 +50,4 @@ class Kafka::KafkaProducer
71
50
  def close
72
51
  @producer.close
73
52
  end
74
-
75
- private
76
-
77
- def validate_arguments
78
- errors = []
79
- missing = REQUIRED.reject { |opt| options[opt] }
80
- errors = ["Required settings: #{ missing.join(', ')}"] if missing.any?
81
- invalid = VALIDATIONS.reject { |opt, valid| options[opt].nil? or valid.include? options[opt].to_s }
82
- errors += invalid.map { |opt, valid| "#{ opt } should be one of: [#{ valid.join(', ')}]" }
83
- fail StandardError.new "Invalid configuration arguments: #{ errors.join('; ') }" if errors.any?
84
- options.keys.each do |opt|
85
- STDERR.puts "WARNING: Unknown configuration key: #{opt}" unless KNOWN.include? opt
86
- end
87
- end
88
-
89
- def create_producer_config
90
- properties = java.util.Properties.new
91
- options.each { |opt, value| properties.put opt, value.to_s }
92
- properties
93
- end
94
53
  end
@@ -1,6 +1,7 @@
1
1
  # basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/0.8.0+Producer+Example
2
2
  require 'jruby-kafka/namespace'
3
3
  require 'jruby-kafka/error'
4
+ require 'jruby-kafka/utility'
4
5
 
5
6
  # noinspection JRubyStringImportInspection
6
7
  class Kafka::Producer
@@ -8,30 +9,9 @@ class Kafka::Producer
8
9
  java_import 'kafka.producer.ProducerConfig'
9
10
  java_import 'kafka.producer.KeyedMessage'
10
11
  KAFKA_PRODUCER = Java::kafka.javaapi.producer.Producer
11
- java_import 'kafka.message.NoCompressionCodec'
12
- java_import 'kafka.message.GZIPCompressionCodec'
13
- java_import 'kafka.message.SnappyCompressionCodec'
14
12
 
15
- VALIDATIONS = {
16
- :'request.required.acks' => %w[ 0 1 -1 ],
17
- :'required.codecs' => [NoCompressionCodec.name, GZIPCompressionCodec.name, SnappyCompressionCodec.name],
18
- :'producer.type' => %w[ sync async ]
19
- }
20
-
21
- REQUIRED = %w[
22
- metadata.broker.list
23
- ]
24
-
25
- # List of all available options extracted from http://kafka.apache.org/documentation.html#producerconfigs Apr. 27, 2014
26
- # If new options are added, they should just work. Please add them to the list so that we can get handy warnings.
27
- KNOWN = %w[
28
- metadata.broker.list request.required.acks request.timeout.ms
29
- producer.type serializer.class key.serializer.class
30
- partitioner.class compression.codec compressed.topics
31
- message.send.max.retries retry.backoff.ms topic.metadata.refresh.interval.ms
32
- queue.buffering.max.ms queue.buffering.max.messages queue.enqueue.timeout.ms
33
- batch.num.messages send.buffer.bytes client.id
34
- broker.list serializer.encoding
13
+ REQUIRED = [
14
+ :metadata_broker_list
35
15
  ]
36
16
 
37
17
  attr_reader :producer, :send_method, :options
@@ -41,27 +21,22 @@ class Kafka::Producer
41
21
  # options:
42
22
  # metadata_broker_list: ["localhost:9092"] - REQUIRED: a seed list of kafka brokers
43
23
  def initialize(opts = {})
44
- @options = opts.reduce({}) do |opts_array, (k, v)|
45
- unless v.nil?
46
- opts_array[k.to_s.gsub(/_/, '.')] = v
47
- end
48
- opts_array
24
+ @options = opts
25
+ if options[:broker_list]
26
+ options[:metadata_broker_list] = options.delete :broker_list
49
27
  end
50
- if options['broker.list']
51
- options['metadata.broker.list'] = options.delete 'broker.list'
28
+ if options[:metadata_broker_list].is_a? Array
29
+ options[:metadata_broker_list] = options[:metadata_broker_list].join(',')
52
30
  end
53
- if options['metadata.broker.list'].is_a? Array
54
- options['metadata.broker.list'] = options['metadata.broker.list'].join(',')
31
+ if options[:compressed_topics].is_a? Array
32
+ options[:compressed_topics] = options[:compressed_topics].join(',')
55
33
  end
56
- if options['compressed.topics'].is_a? Array
57
- options['compressed.topics'] = options['compressed.topics'].join(',')
58
- end
59
- validate_arguments
34
+ Kafka::Utility.validate_arguments REQUIRED, options
60
35
  @send_method = proc { throw StandardError.new 'Producer is not connected' }
61
36
  end
62
37
 
63
38
  def connect
64
- @producer = KAFKA_PRODUCER.new(create_producer_config)
39
+ @producer = KAFKA_PRODUCER.new(ProducerConfig.new Kafka::Utility.java_properties @options)
65
40
  @send_method = producer.java_method :send, [KeyedMessage]
66
41
  end
67
42
 
@@ -78,24 +53,4 @@ class Kafka::Producer
78
53
  def close
79
54
  @producer.close
80
55
  end
81
-
82
- private
83
-
84
- def validate_arguments
85
- errors = []
86
- missing = REQUIRED.reject { |opt| options[opt] }
87
- errors = ["Required settings: #{ missing.join(', ')}"] if missing.any?
88
- invalid = VALIDATIONS.reject { |opt, valid| options[opt].nil? or valid.include? options[opt].to_s }
89
- errors += invalid.map { |opt, valid| "#{ opt } should be one of: [#{ valid.join(', ')}]" }
90
- fail StandardError.new "Invalid configuration arguments: #{ errors.join('; ') }" if errors.any?
91
- options.keys.each do |opt|
92
- STDERR.puts "WARNING: Unknown configuration key: #{opt}" unless KNOWN.include? opt
93
- end
94
- end
95
-
96
- def create_producer_config
97
- properties = java.util.Properties.new
98
- options.each { |opt, value| properties.put opt, value.to_s }
99
- ProducerConfig.new(properties)
100
- end
101
56
  end
@@ -0,0 +1,20 @@
1
+ require 'java'
2
+ require 'jruby-kafka/namespace'
3
+
4
+ class Kafka::Utility
5
+ def self.java_properties(properties)
6
+ java_properties = java.util.Properties.new
7
+ properties.each do |k,v|
8
+ k = k.to_s.gsub '_', '.'
9
+ v = v.to_s
10
+ java_properties.setProperty k, v
11
+ end
12
+ java_properties
13
+ end
14
+
15
+ def self.validate_arguments(required_options, options)
16
+ required_options.each do |opt|
17
+ raise ArgumentError, "Parameter :#{opt} is required." unless options[opt]
18
+ end
19
+ end
20
+ end
@@ -1,4 +1,4 @@
1
1
  module Kafka
2
- VERSION = '3.2'.freeze
2
+ VERSION = '3.3'.freeze
3
3
  JAR_DEPENDENCIES_VERSION = '0.3.2'.freeze
4
4
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: jruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: '3.2'
4
+ version: '3.3'
5
5
  platform: java
6
6
  authors:
7
7
  - Joseph Lawson
@@ -98,6 +98,7 @@ files:
98
98
  - lib/jruby-kafka/kafka-producer.rb
99
99
  - lib/jruby-kafka/namespace.rb
100
100
  - lib/jruby-kafka/producer.rb
101
+ - lib/jruby-kafka/utility.rb
101
102
  - lib/jruby-kafka/version.rb
102
103
  - lib/jruby-kafka_jars.rb
103
104
  - lib/junit/junit/3.8.1/junit-3.8.1.jar