jruby-kafka 0.0.10 → 0.0.11

Sign up to get free protection for your applications and to get access to all the features.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/jruby-kafka/producer.rb +21 -79
  3. metadata +8 -8
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: b854790fa9d012c35cb74d4ca6b6db7f5c568a2f
4
- data.tar.gz: 5f98478a4cfc09730a1ce092108d72798b6e8075
3
+ metadata.gz: cc47e80137f834c4fe3b60e645448de109195051
4
+ data.tar.gz: 7dcefe6ff7852eb7e990368f24a41a5c9e5d61d1
5
5
  SHA512:
6
- metadata.gz: b46e5f6cfa518c2b91a614b956d0a617d066aa80922c391ea1fa298f987e466b5a3199c5f97ff86f31cbcb5081f1e04786f836754d10f27b0d4ce8d93c183cb6
7
- data.tar.gz: 69ea85ee730a74ccb3b7e3159701d090c521d301663d4ac9cd4dbc6cfeff6bc5c352d8fceea2530c68faa7d7438f8cd8a895faed3a5b25d7a9a8d9376424cecc
6
+ metadata.gz: 5f1569001ee9290610c2fc892cc59a8c5fe6fdbbccd8928e8006add149e5380ecca80a24f8830b9365299ff69c52fc12318b510d59e6a08aa47e534ef1adbe43
7
+ data.tar.gz: e26ac69b20b67360b600b62a0aad650322752ec136796fc4bf7425c363e7891902ecf6cc5489afd3e098a00f4559b0d6c3b161b385fc76479a62080a3accf510
@@ -5,126 +5,68 @@ require "java"
5
5
  require "jruby-kafka/namespace"
6
6
  require "jruby-kafka/error"
7
7
 
8
- java_import 'org.I0Itec.zkclient.exception.ZkException'
8
+ java_import 'kafka.common.FailedToSendMessageException'
9
9
 
10
10
  class Kafka::Producer
11
11
  @topic
12
- @zk_connect
13
12
 
14
13
  # Create a Kafka Producer
15
14
  #
16
15
  # options:
17
- # :zk_connect => "localhost:2181" - REQUIRED: The connection string for the
18
- # zookeeper connection in the form host:port. Multiple URLS can be given to allow fail-over.
19
- # :zk_connect_timeout => "6000" - (optional) The max time that the client waits while establishing a connection to zookeeper.
20
16
  # :topic_id => "topic" - REQUIRED: The topic id to consume on.
21
17
  # :broker_list => "localhost:9092" - REQUIRED: a seed list of kafka brokers
22
18
  def initialize(options={})
23
19
  validate_required_arguments(options)
24
20
 
25
- @zk_connect = options[:zk_connect]
26
- @topic = options[:topic_id]
27
21
  @brokers = options[:broker_list]
28
- @zk_session_timeout = '6000'
29
- @zk_connect_timeout = '6000'
30
- @zk_sync_time = '2000'
31
- @auto_offset_reset = 'largest'
32
- @auto_commit_interval = '1000'
33
- @running = false
34
- @rebalance_max_retries = '4'
35
- @rebalance_backoff_ms = '2000'
36
- @socket_timeout_ms = "#{30 * 1000}"
37
- @socket_receive_buffer_bytes = "#{64 * 1024}"
38
- @auto_commit_enable = "#{true}"
39
- @queued_max_message_chunks = '10'
40
- @refresh_leader_backoff_ms = '200'
41
- @consumer_timeout_ms = '-1'
22
+ @serializer_class = 'kafka.serializer.StringEncoder'
23
+ @partitioner_class = nil
24
+ @request_required_acks = '0'
42
25
 
43
- if options[:zk_connect_timeout]
44
- @zk_connect_timeout = "#{options[:zk_connect_timeout]}"
45
- end
46
- if options[:zk_session_timeout]
47
- @zk_session_timeout = "#{options[:zk_session_timeout]}"
48
- end
49
- if options[:zk_sync_time]
50
- @zk_sync_time = "#{options[:zk_sync_time]}"
51
- end
52
- if options[:auto_commit_interval]
53
- @auto_commit_interval = "#{options[:auto_commit_interval]}"
54
- end
55
-
56
- if options[:rebalance_max_retries]
57
- @rebalance_max_retries = "#{options[:rebalance_max_retries]}"
58
- end
59
-
60
- if options[:rebalance_backoff_ms]
61
- @rebalance_backoff_ms = "#{options[:rebalance_backoff_ms]}"
62
- end
63
-
64
- if options[:socket_timeout_ms]
65
- @socket_timeout_ms = "#{options[:socket_timeout_ms]}"
66
- end
67
-
68
- if options[:socket_receive_buffer_bytes]
69
- @socket_receive_buffer_bytes = "#{options[:socket_receive_buffer_bytes]}"
70
- end
71
-
72
- if options[:auto_commit_enable]
73
- @auto_commit_enable = "#{options[:auto_commit_enable]}"
74
- end
75
-
76
- if options[:refresh_leader_backoff_ms]
77
- @refresh_leader_backoff_ms = "#{options[:refresh_leader_backoff_ms]}"
26
+ if options[:partitioner_class]
27
+ @partitioner_class = "#{options[:partitioner_class]}"
78
28
  end
79
29
 
80
- if options[:consumer_timeout_ms]
81
- @consumer_timeout_ms = "#{options[:consumer_timeout_ms]}"
30
+ if options[:request_required_acks]
31
+ @request_required_acks = "#{options[:request_required_acks]}"
82
32
  end
83
-
84
33
  end
85
34
 
86
35
  private
87
36
  def validate_required_arguments(options={})
88
- [:zk_connect, :broker_list, :topic_id].each do |opt|
37
+ [:broker_list].each do |opt|
89
38
  raise(ArgumentError, "#{opt} is required.") unless options[opt]
90
39
  end
91
40
  end
92
41
 
93
- public
94
- def shutdown()
95
- @running = false
96
- end
97
-
98
42
  public
99
43
  def connect()
100
44
  @producer = Java::kafka::producer::Producer.new(createProducerConfig)
101
45
  end
102
46
 
103
47
  public
104
- def sendMsg(key,msg)
105
- m = Java::kafka::producer::KeyedMessage.new(topic=@topic,key=key, message=msg)
48
+ def sendMsg(topic, key, msg)
49
+ m = Java::kafka::producer::KeyedMessage.new(topic=topic, key=key, message=msg)
106
50
  #the send message for a producer is scala varargs, which doesn't seem to play nice w/ jruby
107
51
  # this is the best I could come up with
108
52
  ms = Java::scala::collection::immutable::Vector.new(0,0,0)
109
53
  ms = ms.append_front(m)
110
- @producer.send(ms)
111
- end
112
-
113
- public
114
- def running?
115
- @running
54
+ begin
55
+ @producer.send(ms)
56
+ rescue FailedToSendMessageException => e
57
+ raise KafkaError.new(e), "Got FailedToSendMessageException: #{e}"
58
+ end
116
59
  end
117
60
 
118
61
  def createProducerConfig()
119
62
  # TODO lots more options avaiable here: http://kafka.apache.org/documentation.html#producerconfigs
120
63
  properties = java.util.Properties.new()
121
- properties.put("zookeeper.connect", @zk_connect)
122
- properties.put("zookeeper.connection.timeout.ms", @zk_connect_timeout)
123
- properties.put("zookeeper.session.timeout.ms", @zk_session_timeout)
124
- properties.put("zookeeper.sync.time.ms", @zk_sync_time)
125
- properties.put("serializer.class", "kafka.serializer.StringEncoder")
126
- properties.put("request.required.acks", "1")
127
64
  properties.put("metadata.broker.list", @brokers)
65
+ properties.put("request.required.acks", @request_required_acks)
66
+ if not @partitioner_class.nil?
67
+ properties.put("partitioner.class", @partitioner_class)
68
+ end
69
+ properties.put("serializer.class", @serializer_class)
128
70
  return Java::kafka::producer::ProducerConfig.new(properties)
129
71
  end
130
72
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: jruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.10
4
+ version: 0.0.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Joseph Lawson
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-01-13 00:00:00.000000000 Z
11
+ date: 2014-01-17 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: this is primarily to be used as an interface for logstash
14
14
  email:
@@ -18,7 +18,6 @@ extensions: []
18
18
  extra_rdoc_files: []
19
19
  files:
20
20
  - lib/jruby-kafka.rb
21
- - lib/kafka.rb
22
21
  - lib/jruby-kafka/client.rb
23
22
  - lib/jruby-kafka/config.rb
24
23
  - lib/jruby-kafka/consumer.rb
@@ -26,11 +25,12 @@ files:
26
25
  - lib/jruby-kafka/group.rb
27
26
  - lib/jruby-kafka/namespace.rb
28
27
  - lib/jruby-kafka/producer.rb
28
+ - lib/kafka.rb
29
29
  homepage: https://github.com/joekiller/jruby-kafka
30
30
  licenses:
31
31
  - Apache 2.0
32
32
  metadata: {}
33
- post_install_message:
33
+ post_install_message:
34
34
  rdoc_options: []
35
35
  require_paths:
36
36
  - lib
@@ -46,9 +46,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
46
46
  - !ruby/object:Gem::Version
47
47
  version: '0'
48
48
  requirements: []
49
- rubyforge_project:
50
- rubygems_version: 2.0.3
51
- signing_key:
49
+ rubyforge_project:
50
+ rubygems_version: 2.2.1
51
+ signing_key:
52
52
  specification_version: 4
53
53
  summary: jruby Kafka wrapper
54
54
  test_files: []