jruby-kafka 1.0.0.beta-java → 1.1.0.beta-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 858a7e74d47a3e26ef8593d31d8df34f1e586a11
4
- data.tar.gz: c7041f8ae3d5d8c4e03b4ae91dadc53abd8fc6d7
3
+ metadata.gz: 77a0012bfe5724b42999e603732764a6730bab39
4
+ data.tar.gz: 0a204a31ab2b78d8809f27d725a2a01fd520112d
5
5
  SHA512:
6
- metadata.gz: 3d3811e0c53babc95b5c3934f98b4dffa1a775d51f01750b05ae7ed89a1a34ef846d8c4be58b63bb54c284f095af4158fbf085757f61fa6ef67d815e886e1f77
7
- data.tar.gz: ab497a15f75483d28fded861fb1273ace051b7a891a288a43363061bb318a09f1991fed7eb6da9bd7271f194bd2a6c9dc4c5e8af0b40a7254dd7070a7641c501
6
+ metadata.gz: 45b293197c14ee6409d647cacab10aa644153eabe8f8d37e2ad23f6bdb22d0e435252033423e2da6f921694e18abd8b3297d20164fd995a1f46ddbcf00840e25
7
+ data.tar.gz: f50b5d54e5a24f3b502cdd01c21bb047337cf599508738e8d78fda740bd20326393eb120fab4872301ef756927f961c0b2ab223e8c3a03244a6aae64b7a64bcd
@@ -1,3 +1,4 @@
1
+ require 'java'
1
2
  require 'jruby-kafka/namespace'
2
3
 
3
4
  # noinspection JRubyStringImportInspection
@@ -10,35 +11,31 @@ class Kafka::Consumer
10
11
  include Java::JavaLang::Runnable
11
12
  java_signature 'void run()'
12
13
 
13
- @m_stream
14
- @m_thread_number
15
- @m_queue
14
+ def initialize(a_stream, a_thread_number, a_queue, restart_on_exception, a_sleep_ms)
15
+ @m_thread_number = a_thread_number
16
+ @m_stream = a_stream
17
+ @m_queue = a_queue
18
+ @m_restart_on_exception = restart_on_exception
19
+ @m_sleep_ms = 1.0 / 1000.0 * Float(a_sleep_ms)
20
+ end
16
21
 
17
- def initialize(a_stream, a_thread_number, a_queue, restart_on_exception, a_sleep_ms)
18
- @m_thread_number = a_thread_number
19
- @m_stream = a_stream
20
- @m_queue = a_queue
21
- @m_restart_on_exception = restart_on_exception
22
- @m_sleep_ms = 1.0 / 1000.0 * Float(a_sleep_ms)
23
- end
24
-
25
- def run
26
- it = @m_stream.iterator
27
- begin
28
- while it.hasNext
29
- begin
30
- @m_queue << it.next.message
31
- end
32
- end
33
- rescue Exception => e
34
- puts("#{self.class.name} caught exception: #{e.class.name}")
35
- puts(e.message) if e.message != ''
36
- if @m_restart_on_exception == 'true'
37
- sleep(@m_sleep_ms)
38
- retry
39
- else
40
- raise e
22
+ def run
23
+ it = @m_stream.iterator
24
+ begin
25
+ while it.hasNext
26
+ begin
27
+ @m_queue << it.next
41
28
  end
42
29
  end
30
+ rescue Exception => e
31
+ puts("#{self.class.name} caught exception: #{e.class.name}")
32
+ puts(e.message) if e.message != ''
33
+ if @m_restart_on_exception
34
+ sleep(@m_sleep_ms)
35
+ retry
36
+ else
37
+ raise e
38
+ end
43
39
  end
40
+ end
44
41
  end
@@ -9,13 +9,6 @@ class Kafka::Group
9
9
  java_import 'java.util.concurrent.Executors'
10
10
  java_import 'org.I0Itec.zkclient.exception.ZkException'
11
11
 
12
- @consumer
13
- @executor
14
- @topic
15
- @auto_offset_reset
16
- @zk_connect
17
- @group_id
18
-
19
12
  # Create a Kafka client group
20
13
  #
21
14
  # options:
@@ -24,8 +17,11 @@ class Kafka::Group
24
17
  # :zk_connect_timeout => "6000" - (optional) The max time that the client waits while establishing a connection to zookeeper.
25
18
  # :group_id => "group" - REQUIRED: The group id to consume on.
26
19
  # :topic_id => "topic" - REQUIRED: The topic id to consume on.
27
- # :reset_beginning => "from-beginning" - (optional) If the consumer does not already have an established offset
28
- # to consume from, start with the earliest message present in the log rather than the latest message.
20
+ # :reset_beginning => "from-beginning" - (optional) reset the consumer group to start at the
21
+ # earliest message present in the log by clearing any offsets for the group stored in Zookeeper.
22
+ # :auto_offset_reset => "smallest" or "largest" - (optional, default 'largest') If the consumer does not already
23
+ # have an established offset to consume from, start with the earliest message present in the log (smallest) or
24
+ # after the last message in the log (largest).
29
25
  # :consumer_restart_on_error => "true" - (optional) Controls if consumer threads are to restart on caught exceptions.
30
26
  # exceptions are logged.
31
27
  def initialize(options={})
@@ -34,9 +30,12 @@ class Kafka::Group
34
30
  @zk_connect = options[:zk_connect]
35
31
  @group_id = options[:group_id]
36
32
  @topic = options[:topic_id]
33
+ @topics_allowed = options[:allow_topics]
34
+ @topics_filtered = options[:filter_topics]
37
35
  @zk_session_timeout = '6000'
38
36
  @zk_connect_timeout = '6000'
39
37
  @zk_sync_time = '2000'
38
+ @reset_beginning = nil
40
39
  @auto_offset_reset = 'largest'
41
40
  @auto_commit_interval = '1000'
42
41
  @running = false
@@ -120,13 +119,15 @@ class Kafka::Group
120
119
  @consumer_restart_sleep_ms = "#{options[:consumer_restart_sleep_ms]}"
121
120
  end
122
121
 
122
+ if options[:auto_offset_reset]
123
+ @auto_offset_reset = "#{options[:auto_offset_reset]}"
124
+ end
123
125
 
124
126
  if options[:reset_beginning]
125
- if options[:reset_beginning] == 'from-beginning'
126
- @auto_offset_reset = 'smallest'
127
- else
128
- @auto_offset_reset = 'largest'
127
+ if not options[:auto_offset_reset] || options[:auto_offset_reset] != 'smallest'
128
+ raise KafkaError.new('reset_beginning => from-beginning must be used with auto_offset_reset => smallest')
129
129
  end
130
+ @reset_beginning = "#{options[:reset_beginning]}"
130
131
  end
131
132
 
132
133
  if options[:consumer_id]
@@ -148,7 +149,7 @@ class Kafka::Group
148
149
 
149
150
  def run(a_num_threads, a_queue)
150
151
  begin
151
- if @auto_offset_reset == 'smallest'
152
+ if @reset_beginning == 'from-beginning'
152
153
  Java::kafka::utils::ZkUtils.maybeDeletePath(@zk_connect, "/consumers/#{@group_id}")
153
154
  end
154
155
 
@@ -156,11 +157,9 @@ class Kafka::Group
156
157
  rescue ZkException => e
157
158
  raise KafkaError.new(e), "Got ZkException: #{e}"
158
159
  end
159
- topic_count_map = java.util.HashMap.new
160
+
160
161
  thread_value = a_num_threads.to_java Java::int
161
- topic_count_map.put(@topic, thread_value)
162
- consumer_map = @consumer.createMessageStreams(topic_count_map)
163
- streams = Array.new(consumer_map[@topic])
162
+ streams = get_streams(thread_value)
164
163
 
165
164
  @executor = Executors.newFixedThreadPool(a_num_threads)
166
165
  @executor_submit = @executor.java_method(:submit, [Java::JavaLang::Runnable.java_class])
@@ -180,9 +179,30 @@ class Kafka::Group
180
179
  private
181
180
 
182
181
  def validate_required_arguments(options={})
183
- [:zk_connect, :group_id, :topic_id].each do |opt|
182
+ [:zk_connect, :group_id].each do |opt|
184
183
  raise(ArgumentError, "#{opt} is required.") unless options[opt]
185
184
  end
185
+ unless [ options[:topic_id],
186
+ options[:allow_topics],
187
+ options[:filter_topics] ].compact.length == 1
188
+ raise(ArgumentError,
189
+ "exactly one of topic_id, allow_topics, filter_topics is required.")
190
+ end
191
+ end
192
+
193
+ def get_streams(threads)
194
+ if @topic
195
+ topic_count_map = java.util.HashMap.new
196
+ topic_count_map.put(@topic, threads)
197
+ consumer_map = @consumer.createMessageStreams(topic_count_map)
198
+ Array.new(consumer_map[@topic])
199
+ elsif @topics_allowed
200
+ filter = Java::kafka::consumer::Whitelist.new(@topics_allowed)
201
+ Array.new(@consumer.createMessageStreamsByFilter(filter, threads))
202
+ else # @topics_filtered
203
+ filter = Java::kafka::consumer::Blacklist.new(@topics_filtered)
204
+ Array.new(@consumer.createMessageStreamsByFilter(filter, threads))
205
+ end
186
206
  end
187
207
 
188
208
  def create_consumer_config
@@ -1,15 +1,15 @@
1
1
  # this is a generated file, to avoid over-writing it just delete this comment
2
2
  require 'jar_dependencies'
3
3
 
4
- require_jar( 'log4j', 'log4j', '1.2.17' )
5
- require_jar( 'org.slf4j', 'slf4j-api', '1.7.5' )
6
- require_jar( 'org.slf4j', 'slf4j-log4j12', '1.7.5' )
4
+ require_jar( 'org.scala-lang', 'scala-library', '2.9.2' )
5
+ require_jar( 'junit', 'junit', '3.8.1' )
6
+ require_jar( 'com.101tec', 'zkclient', '0.3' )
7
7
  require_jar( 'com.yammer.metrics', 'metrics-core', '2.2.0' )
8
+ require_jar( 'org.slf4j', 'slf4j-api', '1.7.10' )
9
+ require_jar( 'org.slf4j', 'slf4j-log4j12', '1.7.10' )
10
+ require_jar( 'log4j', 'log4j', '1.2.17' )
11
+ require_jar( 'jline', 'jline', '0.9.94' )
12
+ require_jar( 'net.sf.jopt-simple', 'jopt-simple', '3.2' )
8
13
  require_jar( 'org.apache.zookeeper', 'zookeeper', '3.3.4' )
9
- require_jar( 'org.scala-lang', 'scala-library', '2.9.2' )
10
14
  require_jar( 'org.apache.kafka', 'kafka_2.9.2', '0.8.1.1' )
11
- require_jar( 'net.sf.jopt-simple', 'jopt-simple', '3.2' )
12
- require_jar( 'jline', 'jline', '0.9.94' )
13
- require_jar( 'com.101tec', 'zkclient', '0.3' )
14
15
  require_jar( 'org.xerial.snappy', 'snappy-java', '1.0.5' )
15
- require_jar( 'junit', 'junit', '3.8.1' )
metadata CHANGED
@@ -1,57 +1,57 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: jruby-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0.beta
4
+ version: 1.1.0.beta
5
5
  platform: java
6
6
  authors:
7
7
  - Joseph Lawson
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-01-19 00:00:00.000000000 Z
11
+ date: 2015-02-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: jar-dependencies
15
- version_requirements: !ruby/object:Gem::Requirement
16
- requirements:
17
- - - ~>
18
- - !ruby/object:Gem::Version
19
- version: '0.1'
20
14
  requirement: !ruby/object:Gem::Requirement
21
15
  requirements:
22
16
  - - ~>
23
17
  - !ruby/object:Gem::Version
24
18
  version: '0.1'
19
+ name: jar-dependencies
25
20
  prerelease: false
26
21
  type: :runtime
27
- - !ruby/object:Gem::Dependency
28
- name: ruby-maven
29
22
  version_requirements: !ruby/object:Gem::Requirement
30
23
  requirements:
31
24
  - - ~>
32
25
  - !ruby/object:Gem::Version
33
- version: '3.1'
26
+ version: '0.1'
27
+ - !ruby/object:Gem::Dependency
34
28
  requirement: !ruby/object:Gem::Requirement
35
29
  requirements:
36
30
  - - ~>
37
31
  - !ruby/object:Gem::Version
38
32
  version: '3.1'
33
+ name: ruby-maven
39
34
  prerelease: false
40
35
  type: :development
41
- - !ruby/object:Gem::Dependency
42
- name: rake
43
36
  version_requirements: !ruby/object:Gem::Requirement
44
37
  requirements:
45
38
  - - ~>
46
39
  - !ruby/object:Gem::Version
47
- version: '10.4'
40
+ version: '3.1'
41
+ - !ruby/object:Gem::Dependency
48
42
  requirement: !ruby/object:Gem::Requirement
49
43
  requirements:
50
44
  - - ~>
51
45
  - !ruby/object:Gem::Version
52
46
  version: '10.4'
47
+ name: rake
53
48
  prerelease: false
54
49
  type: :development
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ~>
53
+ - !ruby/object:Gem::Version
54
+ version: '10.4'
55
55
  description: this is primarily to be used as an interface for logstash
56
56
  email:
57
57
  - joe@joekiller.com
@@ -59,36 +59,38 @@ executables: []
59
59
  extensions: []
60
60
  extra_rdoc_files: []
61
61
  files:
62
- - lib/com/101tec/zkclient/0.3/zkclient-0.3.jar
63
- - lib/com/101tec/zkclient/maven-metadata-local.xml
64
- - lib/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar
65
- - lib/com/yammer/metrics/metrics-core/maven-metadata-local.xml
66
- - lib/jline/jline/0.9.94/jline-0.9.94.jar
67
- - lib/jline/jline/maven-metadata-local.xml
68
62
  - lib/jruby-kafka.rb
63
+ - lib/jruby-kafka_jars.rb
69
64
  - lib/jruby-kafka/consumer.rb
70
65
  - lib/jruby-kafka/error.rb
71
66
  - lib/jruby-kafka/group.rb
72
67
  - lib/jruby-kafka/namespace.rb
73
68
  - lib/jruby-kafka/producer.rb
74
- - lib/jruby-kafka_jars.rb
69
+ - lib/com/101tec/zkclient/0.3/zkclient-0.3.jar
70
+ - lib/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar
71
+ - lib/jline/jline/0.9.94/jline-0.9.94.jar
75
72
  - lib/junit/junit/3.8.1/junit-3.8.1.jar
76
- - lib/junit/junit/maven-metadata-local.xml
77
73
  - lib/log4j/log4j/1.2.17/log4j-1.2.17.jar
78
- - lib/log4j/log4j/maven-metadata-local.xml
79
74
  - lib/net/sf/jopt-simple/jopt-simple/3.2/jopt-simple-3.2.jar
80
- - lib/net/sf/jopt-simple/jopt-simple/maven-metadata-local.xml
81
75
  - lib/org/apache/kafka/kafka_2.9.2/0.8.1.1/kafka_2.9.2-0.8.1.1.jar
82
- - lib/org/apache/kafka/kafka_2.9.2/maven-metadata-local.xml
83
76
  - lib/org/apache/zookeeper/zookeeper/3.3.4/zookeeper-3.3.4.jar
84
- - lib/org/apache/zookeeper/zookeeper/maven-metadata-local.xml
85
77
  - lib/org/scala-lang/scala-library/2.9.2/scala-library-2.9.2.jar
86
- - lib/org/scala-lang/scala-library/maven-metadata-local.xml
78
+ - lib/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar
87
79
  - lib/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar
88
- - lib/org/slf4j/slf4j-api/maven-metadata-local.xml
80
+ - lib/org/slf4j/slf4j-log4j12/1.7.10/slf4j-log4j12-1.7.10.jar
89
81
  - lib/org/slf4j/slf4j-log4j12/1.7.5/slf4j-log4j12-1.7.5.jar
90
- - lib/org/slf4j/slf4j-log4j12/maven-metadata-local.xml
91
82
  - lib/org/xerial/snappy/snappy-java/1.0.5/snappy-java-1.0.5.jar
83
+ - lib/com/101tec/zkclient/maven-metadata-local.xml
84
+ - lib/com/yammer/metrics/metrics-core/maven-metadata-local.xml
85
+ - lib/jline/jline/maven-metadata-local.xml
86
+ - lib/junit/junit/maven-metadata-local.xml
87
+ - lib/log4j/log4j/maven-metadata-local.xml
88
+ - lib/net/sf/jopt-simple/jopt-simple/maven-metadata-local.xml
89
+ - lib/org/apache/kafka/kafka_2.9.2/maven-metadata-local.xml
90
+ - lib/org/apache/zookeeper/zookeeper/maven-metadata-local.xml
91
+ - lib/org/scala-lang/scala-library/maven-metadata-local.xml
92
+ - lib/org/slf4j/slf4j-api/maven-metadata-local.xml
93
+ - lib/org/slf4j/slf4j-log4j12/maven-metadata-local.xml
92
94
  - lib/org/xerial/snappy/snappy-java/maven-metadata-local.xml
93
95
  homepage: https://github.com/joekiller/jruby-kafka
94
96
  licenses:
@@ -110,9 +112,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
110
112
  version: 1.3.1
111
113
  requirements:
112
114
  - jar 'org.apache.kafka:kafka_2.9.2', '0.8.1.1'
113
- - jar 'org.slf4j:slf4j-log4j12', '1.7.5'
115
+ - jar 'org.slf4j:slf4j-log4j12', '1.7.10'
114
116
  rubyforge_project:
115
- rubygems_version: 2.4.5
117
+ rubygems_version: 2.1.9
116
118
  signing_key:
117
119
  specification_version: 4
118
120
  summary: jruby Kafka wrapper