heller 0.0.3-java → 0.2.0-java

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,44 @@
1
+ # encoding: utf-8
2
+
3
+ module Heller
4
+ class ProducerConfiguration < Configuration
5
+
6
+ protected
7
+
8
+ def key_mappings
9
+ @key_mappings ||= {
10
+ brokers: 'metadata.broker.list',
11
+ type: 'producer.type',
12
+ serializer: 'serializer.class',
13
+ key_serializer: 'key.serializer.class',
14
+ partitioner: 'partitioner.class',
15
+ compression: 'compression.codec',
16
+ compressed_topics: 'compressed.topics',
17
+ num_retries: 'message.send.max.retries',
18
+ retry_backoff: 'retry.backoff.ms',
19
+ metadata_refresh_interval: 'topic.metadata.refresh.interval.ms',
20
+ batch_size: 'batch.num.messages',
21
+ client_id: 'client.id',
22
+ request_timeout: 'request.timeout.ms',
23
+ buffer_limit: 'queue.buffering.max.messages',
24
+ buffer_timeout: 'queue.buffering.max.ms',
25
+ enqueue_timeout: 'queue.enqueue.timeout.ms',
26
+ socket_buffer: 'send.buffer.bytes',
27
+ ack: 'request.required.acks',
28
+ }.freeze
29
+ end
30
+
31
+ def defaults
32
+ {
33
+ brokers: 'localhost:9092',
34
+ type: :sync,
35
+ serializer: 'kafka.serializer.StringEncoder',
36
+ ack: -1
37
+ }
38
+ end
39
+
40
+ def kafka_config_class
41
+ Kafka::Producer::ProducerConfig
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,66 @@
1
+ # encoding: utf-8
2
+
3
+ module Heller
4
+ class TopicMetadataResponse
5
+ include Enumerable
6
+
7
+ def initialize(underlying)
8
+ @underlying = underlying
9
+
10
+ @cache = Hash.new.tap do |h|
11
+ CACHES.each do |type|
12
+ h[type] = Hash.new({})
13
+ end
14
+ end
15
+ end
16
+
17
+ def each(&block)
18
+ metadata.each do |topic_metadata|
19
+ topic_metadata.partitions_metadata.each do |partition_metadata|
20
+ yield topic_metadata.topic, partition_metadata
21
+ end
22
+ end
23
+ end
24
+
25
+ def metadata
26
+ @underlying.topics_metadata
27
+ end
28
+
29
+ def leader_for(topic, partition)
30
+ with_cache(:leader, topic, partition)
31
+ end
32
+
33
+ def isr_for(topic, partition)
34
+ with_cache(:isr, topic, partition)
35
+ end
36
+ alias_method :in_sync_replicas_for, :isr_for
37
+
38
+ private
39
+
40
+ CACHES = [:leader, :isr].freeze
41
+
42
+ def with_cache(type, topic, partition)
43
+ return @cache[type][topic][partition] if @cache[type][topic][partition]
44
+
45
+ partition_metadata = locate_partition_metadata(topic, partition)
46
+
47
+ if partition_metadata
48
+ @cache[type][topic][partition] = partition_metadata.send(type)
49
+ else
50
+ raise NoSuchTopicPartitionCombinationError, "Cannot find (#{topic}:#{partition}) combination"
51
+ end
52
+ end
53
+
54
+ def locate_partition_metadata(topic, partition)
55
+ metadata.each do |tm|
56
+ if tm.topic == topic
57
+ tm.partitions_metadata.each do |pm|
58
+ return pm if pm.partition_id == partition
59
+ end
60
+ end
61
+ end
62
+
63
+ nil
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,5 @@
1
+ # encoding: utf-8
2
+
3
+ module Heller
4
+ VERSION = '0.2.0'.freeze
5
+ end
@@ -0,0 +1,47 @@
1
+ # encoding: utf-8
2
+
3
+ module Heller
4
+ class ZookeeperConsumer
5
+ def initialize(zk_hosts, options, consumer_impl=Kafka::Consumer::Consumer)
6
+ @consumer = create_consumer(consumer_impl, options.merge(zk_connect: zk_hosts))
7
+ end
8
+
9
+ def create_streams(topic_count_map, options={})
10
+ if options[:key_decoder] && options[:value_decoder]
11
+ @consumer.create_message_streams(convert_longs(topic_count_map), *options.values_at(:key_decoder, :value_decoder))
12
+ else
13
+ @consumer.create_message_streams(convert_longs(topic_count_map))
14
+ end
15
+ end
16
+
17
+ def create_streams_by_filter(filter, num_streams, options={})
18
+ whitelist = Kafka::Consumer::Whitelist.new(filter)
19
+ if options[:key_decoder] && options[:value_decoder]
20
+ @consumer.create_message_streams_by_filter(whitelist, num_streams, *options.values_at(:key_decoder, :value_decoder)).to_a
21
+ else
22
+ @consumer.create_message_streams_by_filter(whitelist, num_streams).to_a
23
+ end
24
+ end
25
+
26
+ def commit
27
+ @consumer.commit_offsets
28
+ end
29
+
30
+ def close
31
+ @consumer.shutdown
32
+ end
33
+ alias_method :shutdown, :close
34
+
35
+ private
36
+
37
+ def convert_longs(hash)
38
+ hash.each_with_object({}) do |(k, v), acc|
39
+ acc[k] = v.to_java(:int)
40
+ end
41
+ end
42
+
43
+ def create_consumer(consumer_impl, options)
44
+ consumer_impl.createJavaConsumerConnector(ConsumerConfiguration.new(options).to_java)
45
+ end
46
+ end
47
+ end
data/lib/heller.rb CHANGED
@@ -2,9 +2,28 @@
2
2
 
3
3
  require 'kafka'
4
4
 
5
+
5
6
  module Heller
6
- java_import 'java.util.ArrayList'
7
+ java_import 'java.util.ArrayList'
8
+ java_import 'java.util.Properties'
9
+ java_import 'java.lang.IllegalArgumentException'
10
+ java_import 'java.util.NoSuchElementException'
11
+
12
+ HellerError = Class.new(StandardError)
13
+ NoSuchTopicPartitionCombinationError = Class.new(HellerError)
7
14
  end
8
15
 
9
- require 'heller/consumer'
16
+ require 'heller/configuration'
17
+ require 'heller/consumer_configuration'
10
18
  require 'heller/producer'
19
+ require 'heller/producer_configuration'
20
+ require 'heller/errors'
21
+ require 'heller/fetch_request'
22
+ require 'heller/fetch_response'
23
+ require 'heller/topic_metadata_response'
24
+ require 'heller/message'
25
+ require 'heller/message_set_enumerator'
26
+ require 'heller/offset_request'
27
+ require 'heller/offset_response'
28
+ require 'heller/consumer'
29
+ require 'heller/zookeeper_consumer'
data/lib/kafka.rb CHANGED
@@ -1,32 +1,55 @@
1
1
  # encoding: utf-8
2
2
 
3
- require 'java'
3
+ require 'slf4j-jars'
4
4
  require 'kafka-jars'
5
5
 
6
+
6
7
  module Kafka
7
- module Api
8
- java_import 'kafka.api.FetchRequest'
9
- java_import 'kafka.api.MultiFetchRequest'
10
- java_import 'kafka.api.MultiFetchResponse'
11
- end
12
-
13
- module Consumer
14
- java_import 'kafka.javaapi.consumer.SimpleConsumer'
15
- end
16
-
17
- module Message
18
- java_import 'kafka.message.Message'
19
- java_import 'kafka.javaapi.message.MessageSet'
20
- java_import 'kafka.javaapi.message.ByteBufferMessageSet'
21
- end
22
-
23
- module Producer
24
- java_import 'kafka.producer.ProducerConfig'
25
- java_import 'kafka.producer.SyncProducerConfig'
26
-
27
- java_import 'kafka.javaapi.producer.ProducerData'
28
-
29
- java_import 'kafka.javaapi.producer.SyncProducer'
30
- java_import 'kafka.javaapi.producer.Producer'
31
- end
32
- end
8
+ module Api
9
+ java_import 'kafka.api.FetchRequest'
10
+ java_import 'kafka.api.FetchRequestBuilder'
11
+ java_import 'kafka.api.PartitionOffsetRequestInfo'
12
+ java_import 'kafka.api.OffsetRequest'
13
+ end
14
+
15
+ module JavaApi
16
+ java_import 'kafka.javaapi.FetchResponse'
17
+ java_import 'kafka.javaapi.OffsetRequest'
18
+ java_import 'kafka.javaapi.OffsetResponse'
19
+ java_import 'kafka.javaapi.PartitionMetadata'
20
+ java_import 'kafka.javaapi.TopicMetadata'
21
+ java_import 'kafka.javaapi.TopicMetadataRequest'
22
+ java_import 'kafka.javaapi.TopicMetadataResponse'
23
+ end
24
+
25
+ module Common
26
+ java_import 'kafka.common.TopicAndPartition'
27
+ end
28
+
29
+ module Serializer
30
+ java_import 'kafka.serializer.StringEncoder'
31
+ java_import 'kafka.serializer.StringDecoder'
32
+ end
33
+
34
+ module Consumer
35
+ include_package 'kafka.consumer'
36
+ java_import 'kafka.javaapi.consumer.SimpleConsumer'
37
+ end
38
+
39
+ module Message
40
+ java_import 'kafka.message.Message'
41
+ java_import 'kafka.javaapi.message.MessageSet'
42
+ java_import 'kafka.javaapi.message.ByteBufferMessageSet'
43
+ end
44
+
45
+ module Producer
46
+ java_import 'kafka.javaapi.producer.Producer'
47
+ java_import 'kafka.producer.ProducerConfig'
48
+ java_import 'kafka.producer.SyncProducerConfig'
49
+ java_import 'kafka.producer.KeyedMessage'
50
+ end
51
+
52
+ module Errors
53
+ java_import 'kafka.common.ErrorMapping'
54
+ end
55
+ end
@@ -0,0 +1,196 @@
1
+ # encoding: utf-8
2
+
3
+ require 'spec_helper'
4
+
5
+
6
+ module Heller
7
+ describe ConsumerConfiguration do
8
+ let :configuration do
9
+ described_class.new(options).to_java
10
+ end
11
+
12
+ describe '#to_java' do
13
+ shared_examples_for 'a ConsumerConfiguration' do
14
+ it 'returns a Kafka::Consumer::ConsumerConfig object' do
15
+ expect(configuration).to be_a(Kafka::Consumer::ConsumerConfig)
16
+ end
17
+
18
+ it 'sets #auto_commit_enable' do
19
+ expect(configuration.auto_commit_enable).to be false
20
+ end
21
+
22
+ it 'sets #auto_commit_interval_ms' do
23
+ expect(configuration.auto_commit_interval_ms).to eq(1000)
24
+ end
25
+
26
+ it 'sets #auto_offset_reset' do
27
+ expect(configuration.auto_offset_reset).to eq('smallest')
28
+ end
29
+
30
+ it 'sets #client_id' do
31
+ expect(configuration.client_id).to eq('spec-client-id')
32
+ end
33
+
34
+ it 'sets #consumer_timeout_ms' do
35
+ expect(configuration.consumer_timeout_ms).to eq(30)
36
+ end
37
+
38
+ it 'sets #fetch_min_bytes' do
39
+ expect(configuration.fetch_min_bytes).to eq(12345)
40
+ end
41
+
42
+ it 'sets #fetch_message_max_bytes' do
43
+ expect(configuration.fetch_message_max_bytes).to eq(2048)
44
+ end
45
+
46
+ it 'sets #fetch_wait_max_ms' do
47
+ expect(configuration.fetch_wait_max_ms).to eq(54321)
48
+ end
49
+
50
+ it 'sets #group_id' do
51
+ expect(configuration.group_id).to eq('spec-group')
52
+ end
53
+
54
+ it 'sets #num_consumer_fetchers' do
55
+ expect(configuration.num_consumer_fetchers).to eq(10)
56
+ end
57
+
58
+ it 'sets #queued_max_messages' do
59
+ expect(configuration.queued_max_messages).to eq(1500)
60
+ end
61
+
62
+ it 'sets #rebalance_backoff_ms' do
63
+ expect(configuration.rebalance_backoff_ms).to eq(125)
64
+ end
65
+
66
+ it 'sets #rebalance_max_retries' do
67
+ expect(configuration.rebalance_max_retries).to eq(15)
68
+ end
69
+
70
+ it 'sets #socket_receive_buffer_bytes' do
71
+ expect(configuration.socket_receive_buffer_bytes).to eq(4096)
72
+ end
73
+
74
+ it 'sets #socket_timeout_ms' do
75
+ expect(configuration.socket_timeout_ms).to eq(100)
76
+ end
77
+
78
+ it 'sets #zk_connect' do
79
+ expect(configuration.zk_connect).to eq('localhost:2181')
80
+ end
81
+
82
+ it 'sets #zk_session_timeout_ms' do
83
+ expect(configuration.zk_session_timeout_ms).to eq(125)
84
+ end
85
+
86
+ it 'sets #zk_connection_timeout_ms' do
87
+ expect(configuration.zk_connection_timeout_ms).to eq(150)
88
+ end
89
+
90
+ it 'sets #zk_sync_time_ms' do
91
+ expect(configuration.zk_sync_time_ms).to eq(100)
92
+ end
93
+
94
+ it 'sets #partition_assignment_strategy' do
95
+ expect(configuration.partition_assignment_strategy).to eq('range')
96
+ end
97
+
98
+ it 'sets #offsets_storage' do
99
+ expect(configuration.offsets_storage).to eq('kafka')
100
+ end
101
+
102
+ it 'sets #offsets_channel_backoff_ms' do
103
+ expect(configuration.offsets_channel_backoff_ms).to eq(1000)
104
+ end
105
+
106
+ it 'sets #offsets_channel_socket_timeout_ms' do
107
+ expect(configuration.offsets_channel_socket_timeout_ms).to eq(1500)
108
+ end
109
+
110
+ it 'sets #offsets_commit_max_retries' do
111
+ expect(configuration.offsets_commit_max_retries).to eq(15)
112
+ end
113
+
114
+ it 'sets #dual_commit_enabled' do
115
+ expect(configuration.dual_commit_enabled).to be true
116
+ end
117
+
118
+ it 'sets #exclude_internal_topics' do
119
+ expect(configuration.exclude_internal_topics).to be false
120
+ end
121
+ end
122
+
123
+ context 'with Symbol keys' do
124
+ let :options do
125
+ {
126
+ auto_commit: false,
127
+ auto_commit_interval: 1000,
128
+ auto_reset_offset: :smallest,
129
+ client_id: 'spec-client-id',
130
+ fetch_message_max_bytes: 2048,
131
+ fetch_min_bytes: 12345,
132
+ fetch_max_wait: 54321,
133
+ group_id: 'spec-group',
134
+ num_fetchers: 10,
135
+ max_queued_message_chunks: 1500,
136
+ rebalance_retries: 15,
137
+ rebalance_retry_backoff: 125,
138
+ receive_buffer: 4096,
139
+ refresh_leader_backoff: 250,
140
+ socket_timeout: 100,
141
+ timeout: 30,
142
+ zk_connect: 'localhost:2181',
143
+ zk_session_timeout: 125,
144
+ zk_connection_timeout: 150,
145
+ zk_sync_time: 100,
146
+ partition_assignment_strategy: 'range',
147
+ offsets_storage: 'kafka',
148
+ offsets_channel_backoff_ms: 1000,
149
+ offsets_channel_socket_timeout_ms: 1500,
150
+ offsets_commit_max_retries: 15,
151
+ dual_commit_enabled: true,
152
+ exclude_internal_topics: false,
153
+ }
154
+ end
155
+
156
+ it_behaves_like 'a ConsumerConfiguration'
157
+ end
158
+
159
+ context 'when given String keys' do
160
+ let :options do
161
+ {
162
+ 'auto_commit' => false,
163
+ 'auto_commit_interval' => 1000,
164
+ 'auto_reset_offset' => :smallest,
165
+ 'client_id' => 'spec-client-id',
166
+ 'fetch_message_max_bytes' => 2048,
167
+ 'fetch_min_bytes' => 12345,
168
+ 'fetch_max_wait' => 54321,
169
+ 'group_id' => 'spec-group',
170
+ 'num_fetchers' => 10,
171
+ 'max_queued_message_chunks' => 1500,
172
+ 'rebalance_retries' => 15,
173
+ 'rebalance_retry_backoff' => 125,
174
+ 'receive_buffer' => 4096,
175
+ 'refresh_leader_backoff' => 250,
176
+ 'socket_timeout' => 100,
177
+ 'timeout' => 30,
178
+ 'zk_connect' => 'localhost:2181',
179
+ 'zk_session_timeout' => 125,
180
+ 'zk_connection_timeout' => 150,
181
+ 'zk_sync_time' => 100,
182
+ 'partition_assignment_strategy' => 'range',
183
+ 'offsets_storage' => 'kafka',
184
+ 'offsets_channel_backoff_ms' => 1000,
185
+ 'offsets_channel_socket_timeout_ms' => 1500,
186
+ 'offsets_commit_max_retries' => 15,
187
+ 'dual_commit_enabled' => true,
188
+ 'exclude_internal_topics' => false,
189
+ }
190
+ end
191
+
192
+ it_behaves_like 'a ConsumerConfiguration'
193
+ end
194
+ end
195
+ end
196
+ end