logstash-kafka 0.7.3-java → 0.7.4-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash/outputs/kafka.rb +34 -22
  3. metadata +2 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: d7b82a85fc1104b64c4f624d47552563c6c525a8
4
- data.tar.gz: d724c2364a9953ed92b7ac544b8df547bdbb8f35
3
+ metadata.gz: a520ddbf6cb6a908962b69b3fbbb89de78134f29
4
+ data.tar.gz: 19634df5edd8eabdd8e1dc8599eeff9c9ee15b24
5
5
  SHA512:
6
- metadata.gz: 0daf018fbc3a6559a1c808e48a330fb9ba8472573b4417c6f3e3fcd1c08df4a691c7c7c65ac4c440b755cbb55fd2bd23b56a477f39873384e5122c8d9fa484b1
7
- data.tar.gz: e133e09305b42af3815edd60f216f046b62acbd44eefc630e757269d149c8f919d26dd4c89667f5fd6897ac145c8c606c3a1dea2edef9ccfc269d904930b5424
6
+ metadata.gz: 8ba628001dadd30db6a48b6fa2de474009ccb5e9c83edf457ddf7f0ec74d5905686884ab90cdbefd6f2573657e3a00b19fba68c2b7cc9312a836f8730c7fc5db
7
+ data.tar.gz: b6db481e43f90f4cb708d3c3d129ef689c51f3282e003a68578145d22d02f296c6860b0070b0eac0f2c266141e5a3957e45dc8e8d0c7546cc9aa82edd607dc7f
@@ -64,7 +64,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
64
64
  # throughput) but open the possibility of a failure of the client machine dropping unsent data.
65
65
  config :producer_type, :validate => %w( sync async ), :default => 'sync'
66
66
  # The serializer class for keys (defaults to the same as for messages if nothing is given)
67
- config :key_serializer_class, :validate => :string, :default => nil
67
+ config :key_serializer_class, :validate => :string, :default => 'kafka.serializer.StringEncoder'
68
68
  # This property will cause the producer to automatically retry a failed send request. This
69
69
  # property specifies the number of retries when such failures occur. Note that setting a
70
70
  # non-zero value here can lead to duplicates in the case of network errors that cause a message
@@ -101,30 +101,40 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
101
101
  config :send_buffer_bytes, :validate => :number, :default => 100 * 1024
102
102
  # The client id is a user-specified string sent in each request to help trace calls. It should
103
103
  # logically identify the application making the request.
104
- config :client_id, :validate => :string, :default => ''
104
+ config :client_id, :validate => :string, :default => ""
105
+ # # Provides a way to specify a partition key as a string. To specify a partition key for
106
+ # Kafka, configure a format that will produce the key as a string. Defaults key_serializer to
107
+ # kafka.serializer.StringEncoder to match. For example, to partition by host:
108
+ # output {
109
+ # kafka {
110
+ # partition_key_format => "%{host}"
111
+ # }
112
+ # }
113
+ config :partition_key_format, :validate => :string, :default => nil
105
114
 
106
115
  public
107
116
  def register
108
117
  LogStash::Logger.setup_log4j(@logger)
109
118
  options = {
110
- :broker_list => @broker_list,
111
- :compression_codec => @compression_codec,
112
- :compressed_topics => @compressed_topics,
113
- :request_required_acks => @request_required_acks,
114
- :serializer_class => @serializer_class,
115
- :partitioner_class => @partitioner_class,
116
- :request_timeout_ms => @request_timeout_ms,
117
- :producer_type => @producer_type,
118
- :key_serializer_class => @key_serializer_class,
119
- :message_send_max_retries => @message_send_max_retries,
120
- :retry_backoff_ms => @retry_backoff_ms,
121
- :topic_metadata_refresh_interval_ms => @topic_metadata_refresh_interval_ms,
122
- :queue_buffering_max_ms => @queue_buffering_max_ms,
123
- :queue_buffering_max_messages => @queue_buffering_max_messages,
124
- :queue_enqueue_timeout_ms => @queue_enqueue_timeout_ms,
125
- :batch_num_messages => @batch_num_messages,
126
- :send_buffer_bytes => @send_buffer_bytes,
127
- :client_id => @client_id
119
+ :broker_list => @broker_list,
120
+ :compression_codec => @compression_codec,
121
+ :compressed_topics => @compressed_topics,
122
+ :request_required_acks => @request_required_acks,
123
+ :serializer_class => @serializer_class,
124
+ :partitioner_class => @partitioner_class,
125
+ :request_timeout_ms => @request_timeout_ms,
126
+ :producer_type => @producer_type,
127
+ :key_serializer_class => @key_serializer_class,
128
+ :message_send_max_retries => @message_send_max_retries,
129
+ :retry_backoff_ms => @retry_backoff_ms,
130
+ :topic_metadata_refresh_interval_ms => @topic_metadata_refresh_interval_ms,
131
+ :queue_buffering_max_ms => @queue_buffering_max_ms,
132
+ :queue_buffering_max_messages => @queue_buffering_max_messages,
133
+ :queue_enqueue_timeout_ms => @queue_enqueue_timeout_ms,
134
+ :batch_num_messages => @batch_num_messages,
135
+ :send_buffer_bytes => @send_buffer_bytes,
136
+ :client_id => @client_id,
137
+ :partition_key_format => @partition_key_format
128
138
  }
129
139
  @producer = Kafka::Producer.new(options)
130
140
  @producer.connect
@@ -133,7 +143,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
133
143
 
134
144
  @codec.on_event do |event, data|
135
145
  begin
136
- @producer.send_msg(event.sprintf(@topic_id),nil,data)
146
+ @producer.send_msg(event.sprintf(@topic_id),@partition_key,data)
137
147
  rescue LogStash::ShutdownSignal
138
148
  @logger.info('Kafka producer got shutdown signal')
139
149
  rescue => e
@@ -149,9 +159,11 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
149
159
  finished
150
160
  return
151
161
  end
162
+ @partition_key = if @partition_key_format.nil? then nil else event.sprintf(@partition_key_format) end
152
163
  @codec.encode(event)
164
+ @partition_key = nil
153
165
  end
154
-
166
+
155
167
  def teardown
156
168
  @producer.close
157
169
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.3
4
+ version: 0.7.4
5
5
  platform: java
6
6
  authors:
7
7
  - Joseph Lawson
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-02-19 00:00:00.000000000 Z
11
+ date: 2015-02-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement