jruby-kafka 0.0.12 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 845abef79fc7de08bcbbd20a8de93271fed609e0
4
- data.tar.gz: 01238f1173d6cca7ccade3968d1847c738bf2290
3
+ metadata.gz: 757a58d558b7fce703472286dfb4a66aa180f2b7
4
+ data.tar.gz: 476ddc498398b11bb4c4e0b93f193ca77151c0a4
5
5
  SHA512:
6
- metadata.gz: a862036fd6264456f42a0a993fd985d37b6029f684edf2e03c11d069fd706cfaa6da94d9b7305b1e88fa934ebae3108b68a716a2ea94941ee08dc50144626804
7
- data.tar.gz: 40c0ce24de712ed6cee5868137628f7070bae96135f02fbbb583c549bf2b2c0c05ac47dabbfc0122fc355d4272bd4c89586644f506bd42c4cfb8a032253c182d
6
+ metadata.gz: 6e5fcb1d6247fbf889c841dc0d6d68a6874c1566b644a72f11b35730d1043db29feec2b1ac1df9dae99ec0cffa919b079817771417862eb921b96cf5ba207275
7
+ data.tar.gz: c6a86417f4fee5919c5112214af5ff55f245384d7273da1b6b6e62660a46dd2b289e89f8d5c0ea65998249c601e75e119b982105411ecd89600ae96b11f2db50
@@ -55,6 +55,7 @@ class Kafka::Group
55
55
  @consumer_timeout_ms = '-1'
56
56
  @consumer_restart_on_error = "#{false}"
57
57
  @consumer_restart_sleep_ms = '0'
58
+ @consumer_id = nil
58
59
 
59
60
  if options[:zk_connect_timeout]
60
61
  @zk_connect_timeout = "#{options[:zk_connect_timeout]}"
@@ -129,6 +130,10 @@ class Kafka::Group
129
130
  @auto_offset_reset = 'largest'
130
131
  end
131
132
  end
133
+
134
+ if options[:consumer_id]
135
+ @consumer_id = options[:consumer_id]
136
+ end
132
137
  end
133
138
 
134
139
  private
@@ -203,6 +208,9 @@ class Kafka::Group
203
208
  properties.put("fetch.wait.max.ms", @fetch_wait_max_ms)
204
209
  properties.put("refresh.leader.backoff.ms", @refresh_leader_backoff_ms)
205
210
  properties.put("consumer.timeout.ms", @consumer_timeout_ms)
211
+ unless @consumer_id.nil?
212
+ properties.put('consumer.id', @consumer_id)
213
+ end
206
214
  return Java::kafka::consumer::ConsumerConfig.new(properties)
207
215
  end
208
216
  end
@@ -8,28 +8,41 @@ require "jruby-kafka/error"
8
8
  java_import 'kafka.common.FailedToSendMessageException'
9
9
 
10
10
  class Kafka::Producer
11
- @topic
12
-
13
11
  # Create a Kafka Producer
14
12
  #
15
13
  # options:
16
- # :topic_id => "topic" - REQUIRED: The topic id to consume on.
17
14
  # :broker_list => "localhost:9092" - REQUIRED: a seed list of kafka brokers
18
15
  def initialize(options={})
19
16
  validate_required_arguments(options)
20
17
 
21
- @brokers = options[:broker_list]
22
- @serializer_class = 'kafka.serializer.StringEncoder'
18
+ @metadata_broker_list = options[:broker_list]
19
+ @serializer_class = nil
23
20
  @partitioner_class = nil
24
- @request_required_acks = '0'
25
- @compression_codec = "#{Java::kafka::message::NoCompressionCodec.name}"
26
- @compressed_topics = ''
21
+ @request_required_acks = nil
22
+ @compression_codec = nil
23
+ @compressed_topics = nil
24
+ @request_timeout_ms = nil
25
+ @producer_type = nil
26
+ @key_serializer_class = nil
27
+ @message_send_max_retries = nil
28
+ @retry_backoff_ms = nil
29
+ @topic_metadata_refresh_interval_ms = nil
30
+ @queue_buffering_max_ms = nil
31
+ @queue_buffering_max_messages = nil
32
+ @queue_enqueue_timeout_ms = nil
33
+ @batch_num_messages = nil
34
+ @send_buffer_bytes = nil
35
+ @client_id = nil
27
36
 
28
37
  if options[:partitioner_class]
29
38
  @partitioner_class = "#{options[:partitioner_class]}"
30
39
  end
31
40
 
32
41
  if options[:request_required_acks]
42
+ valid_acks = %w{ 0 1 -1 }
43
+ if not valid_acks.include? "#{options[:request_required_acks]}"
44
+ raise(ArgumentError, "#{options[:request_required_acks]} is not a valid request_required_acks value: #{valid_acks}")
45
+ end
33
46
  @request_required_acks = "#{options[:request_required_acks]}"
34
47
  end
35
48
 
@@ -37,16 +50,72 @@ class Kafka::Producer
37
50
  required_codecs = ["#{Java::kafka::message::NoCompressionCodec.name}",
38
51
  "#{Java::kafka::message::GZIPCompressionCodec.name}",
39
52
  "#{Java::kafka::message::SnappyCompressionCodec.name}"]
40
- if not required_codecs.include? options[:compression_codec]
53
+ if not required_codecs.include? "#{options[:compression_codec]}"
41
54
  raise(ArgumentError, "#{options[:compression_codec]} is not one of required codecs: #{required_codecs}")
42
55
  end
43
- @compression_codec = options[:compression_codec]
56
+ @compression_codec = "#{options[:compression_codec]}"
44
57
  end
45
58
 
46
59
  if options[:compressed_topics]
47
60
  if @compression_codec != 'none'
48
- @compressed_topics = options[:compressed_topics]
61
+ @compressed_topics = "#{options[:compressed_topics]}"
62
+ end
63
+ end
64
+
65
+ if options[:request_timeout_ms]
66
+ @request_timeout_ms = "#{options[:request_timeout_ms]}"
67
+ end
68
+
69
+ if options[:producer_type]
70
+ valid_producer_types = %w{ sync async }
71
+ if not valid_producer_types.include? "#{options[:producer_type]}"
72
+ raise(ArgumentError, "#{options[:producer_type]} is not a valid producer type: #{valid_producer_types}")
49
73
  end
74
+ @producer_type = "#{options[:producer_type]}"
75
+ end
76
+
77
+ if options[:serializer_class]
78
+ @serializer_class = "#{options[:serializer_class]}"
79
+ end
80
+
81
+ if options[:key_serializer_class]
82
+ @key_serializer_class = "#{options[:key_serializer_class]}"
83
+ end
84
+
85
+ if options[:message_send_max_retries]
86
+ @message_send_max_retries = "#{options[:message_send_max_retries]}"
87
+ end
88
+
89
+ if options[:retry_backoff_ms]
90
+ @retry_backoff_ms = "#{options[:retry_backoff_ms]}"
91
+ end
92
+
93
+ if options[:topic_metadata_refresh_interval_ms]
94
+ @topic_metadata_refresh_interval_ms = "#{options[:topic_metadata_refresh_interval_ms]}"
95
+ end
96
+
97
+ if options[:queue_buffering_max_ms]
98
+ @queue_buffering_max_ms = "#{options[:queue_buffering_max_ms]}"
99
+ end
100
+
101
+ if options[:queue_buffering_max_messages]
102
+ @queue_buffering_max_messages = "#{options[:queue_buffering_max_messages]}"
103
+ end
104
+
105
+ if options[:queue_enqueue_timeout_ms]
106
+ @queue_enqueue_timeout_ms = "#{options[:queue_enqueue_timeout_ms]}"
107
+ end
108
+
109
+ if options[:batch_num_messages]
110
+ @batch_num_messages = "#{options[:batch_num_messages]}"
111
+ end
112
+
113
+ if options[:send_buffer_bytes]
114
+ @send_buffer_bytes = "#{options[:send_buffer_bytes]}"
115
+ end
116
+
117
+ if options[:client_id]
118
+ @client_id = "#{options[:client_id]}"
50
119
  end
51
120
  end
52
121
 
@@ -79,14 +148,58 @@ class Kafka::Producer
79
148
  def createProducerConfig()
80
149
  # TODO lots more options avaiable here: http://kafka.apache.org/documentation.html#producerconfigs
81
150
  properties = java.util.Properties.new()
82
- properties.put("metadata.broker.list", @brokers)
83
- properties.put("request.required.acks", @request_required_acks)
84
- if not @partitioner_class.nil?
151
+ properties.put("metadata.broker.list", @metadata_broker_list)
152
+ unless @request_required_acks.nil?
153
+ properties.put("request.required.acks", @request_required_acks)
154
+ end
155
+ unless @partitioner_class.nil?
85
156
  properties.put("partitioner.class", @partitioner_class)
86
157
  end
87
- properties.put("serializer.class", @serializer_class)
88
- properties.put("compression.codec", @compression_codec)
89
- properties.put("compressed.topics", @compressed_topics)
158
+ unless @key_serializer_class.nil?
159
+ properties.put("key.serializer.class", @key_serializer_class)
160
+ end
161
+ unless @request_timeout_ms.nil?
162
+ properties.put("request.timeout.ms", @request_timeout_ms)
163
+ end
164
+ unless @producer_type.nil?
165
+ properties.put('producer.type', @producer_type)
166
+ end
167
+ unless @serializer_class.nil?
168
+ properties.put("serializer.class", @serializer_class)
169
+ end
170
+ unless @compression_codec.nil?
171
+ properties.put("compression.codec", @compression_codec)
172
+ end
173
+ unless @compressed_topics.nil?
174
+ properties.put("compressed.topics", @compressed_topics)
175
+ end
176
+ unless @message_send_max_retries.nil?
177
+ properties.put("message.send.max.retries", @message_send_max_retries)
178
+ end
179
+ unless @retry_backoff_ms.nil?
180
+ properties.put('retry.backoff.ms', @retry_backoff_ms)
181
+ end
182
+ unless @topic_metadata_refresh_interval_ms.nil?
183
+ properties.put('topic.metadata.refresh.interval.ms', @topic_metadata_refresh_interval_ms)
184
+ end
185
+ unless @queue_buffering_max_ms.nil?
186
+ properties.put('queue.buffering.max.ms', @queue_buffering_max_ms)
187
+ end
188
+ unless @queue_buffering_max_messages.nil?
189
+ properties.put('queue.buffering.max.messages', @queue_buffering_max_messages)
190
+ end
191
+ unless @queue_enqueue_timeout_ms.nil?
192
+ properties.put('queue.enqueue.timeout.ms', @queue_enqueue_timeout_ms)
193
+ end
194
+ unless @batch_num_messages.nil?
195
+ properties.put('batch.num.messages', @batch_num_messages)
196
+ end
197
+ unless @send_buffer_bytes.nil?
198
+ properties.put('send.buffer.bytes', @send_buffer_bytes)
199
+ end
200
+ unless @client_id.nil?
201
+ properties.put('client.id', @client_id)
202
+ end
90
203
  return Java::kafka::producer::ProducerConfig.new(properties)
91
204
  end
92
205
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: jruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.12
4
+ version: 0.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Joseph Lawson
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-01-23 00:00:00.000000000 Z
11
+ date: 2014-04-16 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: this is primarily to be used as an interface for logstash
14
14
  email:
@@ -18,14 +18,11 @@ extensions: []
18
18
  extra_rdoc_files: []
19
19
  files:
20
20
  - lib/jruby-kafka.rb
21
- - lib/jruby-kafka/client.rb
22
- - lib/jruby-kafka/config.rb
23
21
  - lib/jruby-kafka/consumer.rb
24
22
  - lib/jruby-kafka/error.rb
25
23
  - lib/jruby-kafka/group.rb
26
24
  - lib/jruby-kafka/namespace.rb
27
25
  - lib/jruby-kafka/producer.rb
28
- - lib/kafka.rb
29
26
  homepage: https://github.com/joekiller/jruby-kafka
30
27
  licenses:
31
28
  - Apache 2.0
@@ -47,7 +44,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
47
44
  version: '0'
48
45
  requirements: []
49
46
  rubyforge_project:
50
- rubygems_version: 2.2.1
47
+ rubygems_version: 2.2.2
51
48
  signing_key:
52
49
  specification_version: 4
53
50
  summary: jruby Kafka wrapper
@@ -1,6 +0,0 @@
1
- require "java"
2
- require "jruby-kafka/namespace"
3
-
4
- class Kafka::Client
5
-
6
- end
@@ -1,8 +0,0 @@
1
- require "java"
2
- require "jruby-kafka/namespace"
3
-
4
- class Kafka::Config
5
- def initialize(options={})
6
-
7
- end
8
- end
@@ -1,5 +0,0 @@
1
- require "jruby-kafka/client"
2
-
3
- module Kafka
4
-
5
- end