jruby-kafka 0.0.10 → 0.0.11
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/jruby-kafka/producer.rb +21 -79
- metadata +8 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: cc47e80137f834c4fe3b60e645448de109195051
|
4
|
+
data.tar.gz: 7dcefe6ff7852eb7e990368f24a41a5c9e5d61d1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5f1569001ee9290610c2fc892cc59a8c5fe6fdbbccd8928e8006add149e5380ecca80a24f8830b9365299ff69c52fc12318b510d59e6a08aa47e534ef1adbe43
|
7
|
+
data.tar.gz: e26ac69b20b67360b600b62a0aad650322752ec136796fc4bf7425c363e7891902ecf6cc5489afd3e098a00f4559b0d6c3b161b385fc76479a62080a3accf510
|
data/lib/jruby-kafka/producer.rb
CHANGED
@@ -5,126 +5,68 @@ require "java"
|
|
5
5
|
require "jruby-kafka/namespace"
|
6
6
|
require "jruby-kafka/error"
|
7
7
|
|
8
|
-
java_import '
|
8
|
+
java_import 'kafka.common.FailedToSendMessageException'
|
9
9
|
|
10
10
|
class Kafka::Producer
|
11
11
|
@topic
|
12
|
-
@zk_connect
|
13
12
|
|
14
13
|
# Create a Kafka Producer
|
15
14
|
#
|
16
15
|
# options:
|
17
|
-
# :zk_connect => "localhost:2181" - REQUIRED: The connection string for the
|
18
|
-
# zookeeper connection in the form host:port. Multiple URLS can be given to allow fail-over.
|
19
|
-
# :zk_connect_timeout => "6000" - (optional) The max time that the client waits while establishing a connection to zookeeper.
|
20
16
|
# :topic_id => "topic" - REQUIRED: The topic id to consume on.
|
21
17
|
# :broker_list => "localhost:9092" - REQUIRED: a seed list of kafka brokers
|
22
18
|
def initialize(options={})
|
23
19
|
validate_required_arguments(options)
|
24
20
|
|
25
|
-
@zk_connect = options[:zk_connect]
|
26
|
-
@topic = options[:topic_id]
|
27
21
|
@brokers = options[:broker_list]
|
28
|
-
@
|
29
|
-
@
|
30
|
-
@
|
31
|
-
@auto_offset_reset = 'largest'
|
32
|
-
@auto_commit_interval = '1000'
|
33
|
-
@running = false
|
34
|
-
@rebalance_max_retries = '4'
|
35
|
-
@rebalance_backoff_ms = '2000'
|
36
|
-
@socket_timeout_ms = "#{30 * 1000}"
|
37
|
-
@socket_receive_buffer_bytes = "#{64 * 1024}"
|
38
|
-
@auto_commit_enable = "#{true}"
|
39
|
-
@queued_max_message_chunks = '10'
|
40
|
-
@refresh_leader_backoff_ms = '200'
|
41
|
-
@consumer_timeout_ms = '-1'
|
22
|
+
@serializer_class = 'kafka.serializer.StringEncoder'
|
23
|
+
@partitioner_class = nil
|
24
|
+
@request_required_acks = '0'
|
42
25
|
|
43
|
-
if options[:
|
44
|
-
@
|
45
|
-
end
|
46
|
-
if options[:zk_session_timeout]
|
47
|
-
@zk_session_timeout = "#{options[:zk_session_timeout]}"
|
48
|
-
end
|
49
|
-
if options[:zk_sync_time]
|
50
|
-
@zk_sync_time = "#{options[:zk_sync_time]}"
|
51
|
-
end
|
52
|
-
if options[:auto_commit_interval]
|
53
|
-
@auto_commit_interval = "#{options[:auto_commit_interval]}"
|
54
|
-
end
|
55
|
-
|
56
|
-
if options[:rebalance_max_retries]
|
57
|
-
@rebalance_max_retries = "#{options[:rebalance_max_retries]}"
|
58
|
-
end
|
59
|
-
|
60
|
-
if options[:rebalance_backoff_ms]
|
61
|
-
@rebalance_backoff_ms = "#{options[:rebalance_backoff_ms]}"
|
62
|
-
end
|
63
|
-
|
64
|
-
if options[:socket_timeout_ms]
|
65
|
-
@socket_timeout_ms = "#{options[:socket_timeout_ms]}"
|
66
|
-
end
|
67
|
-
|
68
|
-
if options[:socket_receive_buffer_bytes]
|
69
|
-
@socket_receive_buffer_bytes = "#{options[:socket_receive_buffer_bytes]}"
|
70
|
-
end
|
71
|
-
|
72
|
-
if options[:auto_commit_enable]
|
73
|
-
@auto_commit_enable = "#{options[:auto_commit_enable]}"
|
74
|
-
end
|
75
|
-
|
76
|
-
if options[:refresh_leader_backoff_ms]
|
77
|
-
@refresh_leader_backoff_ms = "#{options[:refresh_leader_backoff_ms]}"
|
26
|
+
if options[:partitioner_class]
|
27
|
+
@partitioner_class = "#{options[:partitioner_class]}"
|
78
28
|
end
|
79
29
|
|
80
|
-
if options[:
|
81
|
-
@
|
30
|
+
if options[:request_required_acks]
|
31
|
+
@request_required_acks = "#{options[:request_required_acks]}"
|
82
32
|
end
|
83
|
-
|
84
33
|
end
|
85
34
|
|
86
35
|
private
|
87
36
|
def validate_required_arguments(options={})
|
88
|
-
[:
|
37
|
+
[:broker_list].each do |opt|
|
89
38
|
raise(ArgumentError, "#{opt} is required.") unless options[opt]
|
90
39
|
end
|
91
40
|
end
|
92
41
|
|
93
|
-
public
|
94
|
-
def shutdown()
|
95
|
-
@running = false
|
96
|
-
end
|
97
|
-
|
98
42
|
public
|
99
43
|
def connect()
|
100
44
|
@producer = Java::kafka::producer::Producer.new(createProducerConfig)
|
101
45
|
end
|
102
46
|
|
103
47
|
public
|
104
|
-
def sendMsg(key,msg)
|
105
|
-
m = Java::kafka::producer::KeyedMessage.new(topic
|
48
|
+
def sendMsg(topic, key, msg)
|
49
|
+
m = Java::kafka::producer::KeyedMessage.new(topic=topic, key=key, message=msg)
|
106
50
|
#the send message for a producer is scala varargs, which doesn't seem to play nice w/ jruby
|
107
51
|
# this is the best I could come up with
|
108
52
|
ms = Java::scala::collection::immutable::Vector.new(0,0,0)
|
109
53
|
ms = ms.append_front(m)
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
@running
|
54
|
+
begin
|
55
|
+
@producer.send(ms)
|
56
|
+
rescue FailedToSendMessageException => e
|
57
|
+
raise KafkaError.new(e), "Got FailedToSendMessageException: #{e}"
|
58
|
+
end
|
116
59
|
end
|
117
60
|
|
118
61
|
def createProducerConfig()
|
119
62
|
# TODO lots more options avaiable here: http://kafka.apache.org/documentation.html#producerconfigs
|
120
63
|
properties = java.util.Properties.new()
|
121
|
-
properties.put("zookeeper.connect", @zk_connect)
|
122
|
-
properties.put("zookeeper.connection.timeout.ms", @zk_connect_timeout)
|
123
|
-
properties.put("zookeeper.session.timeout.ms", @zk_session_timeout)
|
124
|
-
properties.put("zookeeper.sync.time.ms", @zk_sync_time)
|
125
|
-
properties.put("serializer.class", "kafka.serializer.StringEncoder")
|
126
|
-
properties.put("request.required.acks", "1")
|
127
64
|
properties.put("metadata.broker.list", @brokers)
|
65
|
+
properties.put("request.required.acks", @request_required_acks)
|
66
|
+
if not @partitioner_class.nil?
|
67
|
+
properties.put("partitioner.class", @partitioner_class)
|
68
|
+
end
|
69
|
+
properties.put("serializer.class", @serializer_class)
|
128
70
|
return Java::kafka::producer::ProducerConfig.new(properties)
|
129
71
|
end
|
130
72
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: jruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.11
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Joseph Lawson
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2014-01-
|
11
|
+
date: 2014-01-17 00:00:00.000000000 Z
|
12
12
|
dependencies: []
|
13
13
|
description: this is primarily to be used as an interface for logstash
|
14
14
|
email:
|
@@ -18,7 +18,6 @@ extensions: []
|
|
18
18
|
extra_rdoc_files: []
|
19
19
|
files:
|
20
20
|
- lib/jruby-kafka.rb
|
21
|
-
- lib/kafka.rb
|
22
21
|
- lib/jruby-kafka/client.rb
|
23
22
|
- lib/jruby-kafka/config.rb
|
24
23
|
- lib/jruby-kafka/consumer.rb
|
@@ -26,11 +25,12 @@ files:
|
|
26
25
|
- lib/jruby-kafka/group.rb
|
27
26
|
- lib/jruby-kafka/namespace.rb
|
28
27
|
- lib/jruby-kafka/producer.rb
|
28
|
+
- lib/kafka.rb
|
29
29
|
homepage: https://github.com/joekiller/jruby-kafka
|
30
30
|
licenses:
|
31
31
|
- Apache 2.0
|
32
32
|
metadata: {}
|
33
|
-
post_install_message:
|
33
|
+
post_install_message:
|
34
34
|
rdoc_options: []
|
35
35
|
require_paths:
|
36
36
|
- lib
|
@@ -46,9 +46,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
46
46
|
- !ruby/object:Gem::Version
|
47
47
|
version: '0'
|
48
48
|
requirements: []
|
49
|
-
rubyforge_project:
|
50
|
-
rubygems_version: 2.
|
51
|
-
signing_key:
|
49
|
+
rubyforge_project:
|
50
|
+
rubygems_version: 2.2.1
|
51
|
+
signing_key:
|
52
52
|
specification_version: 4
|
53
53
|
summary: jruby Kafka wrapper
|
54
54
|
test_files: []
|