logstash-output-kafka 0.1.3 → 0.1.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 57ebec7023e23af3c9b3ce4f206548cf103ef678
4
- data.tar.gz: b0dfbaec82a03889eca7340a56b9663fa27596c9
3
+ metadata.gz: f0219f2b0524405c4045d48117b5e583c5b9ca04
4
+ data.tar.gz: bda2fcf985eb617376f7189f2df22341681b3eef
5
5
  SHA512:
6
- metadata.gz: b4bdfc05c7f2033b992eddf09d27b1440f4bd4ff0ac6a44f1146885fa442a39327ef46fd7b4db95625c30e04f27c5547781893fccc3fcabee37ee54cab001f42
7
- data.tar.gz: 374c0a5f8e8ea8020bea284cbe0b85341d6c1a84e2854de50001e34bd7242bb6441bd3ba21b3eaca83a4f647e24ccaff8ac81ca220ae8ea6e40958e20600d8ce
6
+ metadata.gz: a94e343d2bf56452fb781b549481e8219a13cc12e3c08af0c41aa2bca3179023c65303e84fb7ed2528e4599b5f55434f4aa183e4adef2a8cb2a645567cb66739
7
+ data.tar.gz: 13afb751837c1cedafb92132de1374c0a8bcfbebbc0cffb34e35dcd831cdd5d0ccf6e181429ffd7959ec339dabae38e901a463a07df789e4b4c65b44ce6070d4
data/CONTRIBUTORS ADDED
@@ -0,0 +1,16 @@
1
+ The following is a list of people who have contributed ideas, code, bug
2
+ reports, or in general have helped logstash along its way.
3
+
4
+ Contributors:
5
+ * Joseph Lawson (joekiller)
6
+ * João Duarte (jsvd)
7
+ * Kurt Hurtado (kurtado)
8
+ * Pier-Hugues Pellerin (ph)
9
+ * Richard Pijnenburg (electrical)
10
+ * Suyog Rao (suyograo)
11
+ * Tal Levy (talevy)
12
+
13
+ Note: If you've sent us patches, bug reports, or otherwise contributed to
14
+ Logstash, and you aren't on the list above and want to be, please let us know
15
+ and we'll make sure you're here. Contributions from folks like you are what make
16
+ open source awesome.
data/DEVELOPER.md ADDED
@@ -0,0 +1,56 @@
1
+ logstash-output-kafka
2
+ ====================
3
+
4
+ Apache Kafka output for Logstash. This output will produce messages to a Kafka topic using the producer API exposed by Kafka.
5
+
6
+ For more information about Kafka, refer to this [documentation](http://kafka.apache.org/documentation.html)
7
+
8
+ Information about producer API can be found [here](http://kafka.apache.org/documentation.html#apidesign)
9
+
10
+ Logstash Configuration
11
+ ====================
12
+
13
+ See http://kafka.apache.org/documentation.html#producerconfigs for details about the Kafka producer options.
14
+
15
+ output {
16
+ kafka {
17
+ topic_id => ... # string (required), The topic to produce the messages to
18
+ broker_list => ... # string (optional), default: "localhost:9092", This is for bootstrapping and the producer will only use it for getting metadata
19
+ compression_codec => ... # string (optional), one of ["none", "gzip", "snappy"], default: "none"
20
+ compressed_topics => ... # string (optional), default: "", This parameter allows you to set whether compression should be turned on for particular
21
+ request_required_acks => ... # number (optional), one of [-1, 0, 1], default: 0, This value controls when a produce request is considered completed
22
+ serializer_class => ... # string, (optional) default: "kafka.serializer.StringEncoder", The serializer class for messages. The default encoder takes a byte[] and returns the same byte[]
23
+ partitioner_class => ... # string (optional) default: "kafka.producer.DefaultPartitioner"
24
+ request_timeout_ms => ... # number (optional) default: 10000
25
+ producer_type => ... # string (optional), one of ["sync", "async"] default => 'sync'
26
+ key_serializer_class => ... # string (optional) default: nil
27
+ message_send_max_retries => ... # number (optional) default: 3
28
+ retry_backoff_ms => ... # number (optional) default: 100
29
+ topic_metadata_refresh_interval_ms => ... # number (optional) default: 600 * 1000
30
+ queue_buffering_max_ms => ... # number (optional) default: 5000
31
+ queue_buffering_max_messages => ... # number (optional) default: 10000
32
+ queue_enqueue_timeout_ms => ... # number (optional) default: -1
33
+ batch_num_messages => ... # number (optional) default: 200
34
+ send_buffer_bytes => ... # number (optional) default: 100 * 1024
35
+ client_id => ... # string (optional) default: ""
36
+ }
37
+ }
38
+
39
+ The default codec is json for outputs. If you select a codec of plain, logstash will encode your messages with not only the message
40
+ but also with a timestamp and hostname. If you do not want anything but your message passing through, you should make
41
+ the output configuration something like:
42
+
43
+ output {
44
+ kafka {
45
+ codec => plain {
46
+ format => "%{message}"
47
+ }
48
+ }
49
+ }
50
+
51
+
52
+ Dependencies
53
+ ====================
54
+
55
+ * Apache Kafka version 0.8.1.1
56
+ * jruby-kafka library
data/LICENSE CHANGED
@@ -1,4 +1,4 @@
1
- Copyright (c) 2012-2014 Elasticsearch <http://www.elasticsearch.org>
1
+ Copyright (c) 2012-2015 Elasticsearch <http://www.elasticsearch.org>
2
2
 
3
3
  Licensed under the Apache License, Version 2.0 (the "License");
4
4
  you may not use this file except in compliance with the License.
data/README.md CHANGED
@@ -1,56 +1,95 @@
1
- logstash-output-kafka
2
- ====================
3
-
4
- Apache Kafka output for Logstash. This output will produce messages to a Kafka topic using the producer API exposed by Kafka.
5
-
6
- For more information about Kafka, refer to this [documentation](http://kafka.apache.org/documentation.html)
7
-
8
- Information about producer API can be found [here](http://kafka.apache.org/documentation.html#apidesign)
9
-
10
- Logstash Configuration
11
- ====================
12
-
13
- See http://kafka.apache.org/documentation.html#producerconfigs for details about the Kafka producer options.
14
-
15
- output {
16
- kafka {
17
- topic_id => ... # string (required), The topic to produce the messages to
18
- broker_list => ... # string (optional), default: "localhost:9092", This is for bootstrapping and the producer will only use it for getting metadata
19
- compression_codec => ... # string (optional), one of ["none", "gzip", "snappy"], default: "none"
20
- compressed_topics => ... # string (optional), default: "", This parameter allows you to set whether compression should be turned on for particular
21
- request_required_acks => ... # number (optional), one of [-1, 0, 1], default: 0, This value controls when a produce request is considered completed
22
- serializer_class => ... # string, (optional) default: "kafka.serializer.StringEncoder", The serializer class for messages. The default encoder takes a byte[] and returns the same byte[]
23
- partitioner_class => ... # string (optional) default: "kafka.producer.DefaultPartitioner"
24
- request_timeout_ms => ... # number (optional) default: 10000
25
- producer_type => ... # string (optional), one of ["sync", "async"] default => 'sync'
26
- key_serializer_class => ... # string (optional) default: nil
27
- message_send_max_retries => ... # number (optional) default: 3
28
- retry_backoff_ms => ... # number (optional) default: 100
29
- topic_metadata_refresh_interval_ms => ... # number (optional) default: 600 * 1000
30
- queue_buffering_max_ms => ... # number (optional) default: 5000
31
- queue_buffering_max_messages => ... # number (optional) default: 10000
32
- queue_enqueue_timeout_ms => ... # number (optional) default: -1
33
- batch_num_messages => ... # number (optional) default: 200
34
- send_buffer_bytes => ... # number (optional) default: 100 * 1024
35
- client_id => ... # string (optional) default: ""
36
- }
37
- }
38
-
39
- The default codec is json for outputs. If you select a codec of plain, logstash will encode your messages with not only the message
40
- but also with a timestamp and hostname. If you do not want anything but your message passing through, you should make
41
- the output configuration something like:
42
-
43
- output {
44
- kafka {
45
- codec => plain {
46
- format => "%{message}"
47
- }
48
- }
49
- }
50
-
51
-
52
- Dependencies
53
- ====================
54
-
55
- * Apache Kafka version 0.8.1.1
56
- * jruby-kafka library
1
+ # Logstash Plugin
2
+
3
+ This is a plugin for [Logstash](https://github.com/elasticsearch/logstash).
4
+
5
+ It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
6
+
7
+ ## Documentation
8
+
9
+ Logstash provides infrastructure to automatically generate documentation for this plugin. We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc and then into html. All plugin documentation are placed under one [central location](http://www.elasticsearch.org/guide/en/logstash/current/).
10
+
11
+ - For formatting code or config example, you can use the asciidoc `[source,ruby]` directive
12
+ - For more asciidoc formatting tips, see the excellent reference here https://github.com/elasticsearch/docs#asciidoc-guide
13
+
14
+ ## Need Help?
15
+
16
+ Need help? Try #logstash on freenode IRC or the logstash-users@googlegroups.com mailing list.
17
+
18
+ ## Developing
19
+
20
+ ### 1. Plugin Developement and Testing
21
+
22
+ #### Code
23
+ - To get started, you'll need JRuby with the Bundler gem installed.
24
+
25
+ - Create a new plugin or clone and existing from the GitHub [logstash-plugins](https://github.com/logstash-plugins) organization.
26
+
27
+ - Install dependencies
28
+ ```sh
29
+ bundle install
30
+ ```
31
+
32
+ #### Test
33
+
34
+ ```sh
35
+ bundle exec rspec
36
+ ```
37
+
38
+ The Logstash code required to run the tests/specs is specified in the `Gemfile` by the line similar to:
39
+ ```ruby
40
+ gem "logstash", :github => "elasticsearch/logstash", :branch => "1.5"
41
+ ```
42
+ To test against another version or a local Logstash, edit the `Gemfile` to specify an alternative location, for example:
43
+ ```ruby
44
+ gem "logstash", :github => "elasticsearch/logstash", :ref => "master"
45
+ ```
46
+ ```ruby
47
+ gem "logstash", :path => "/your/local/logstash"
48
+ ```
49
+
50
+ Then update your dependencies and run your tests:
51
+
52
+ ```sh
53
+ bundle install
54
+ bundle exec rspec
55
+ ```
56
+
57
+ ### 2. Running your unpublished Plugin in Logstash
58
+
59
+ #### 2.1 Run in a local Logstash clone
60
+
61
+ - Edit Logstash `tools/Gemfile` and add the local plugin path, for example:
62
+ ```ruby
63
+ gem "logstash-filter-awesome", :path => "/your/local/logstash-filter-awesome"
64
+ ```
65
+ - Update Logstash dependencies
66
+ ```sh
67
+ rake vendor:gems
68
+ ```
69
+ - Run Logstash with your plugin
70
+ ```sh
71
+ bin/logstash -e 'filter {awesome {}}'
72
+ ```
73
+ At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash.
74
+
75
+ #### 2.2 Run in an installed Logstash
76
+
77
+ - Build your plugin gem
78
+ ```sh
79
+ gem build logstash-filter-awesome.gemspec
80
+ ```
81
+ - Install the plugin from the Logstash home
82
+ ```sh
83
+ bin/plugin install /your/local/plugin/logstash-filter-awesome.gem
84
+ ```
85
+ - Start Logstash and proceed to test the plugin
86
+
87
+ ## Contributing
88
+
89
+ All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin.
90
+
91
+ Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here.
92
+
93
+ It is more important to me that you are able to contribute.
94
+
95
+ For more information about contributing, see the [CONTRIBUTING](https://github.com/elasticsearch/logstash/blob/master/CONTRIBUTING.md) file.
@@ -23,7 +23,6 @@ require 'logstash-output-kafka_jars'
23
23
  # Kafka producer configuration: http://kafka.apache.org/documentation.html#producerconfigs
24
24
  class LogStash::Outputs::Kafka < LogStash::Outputs::Base
25
25
  config_name 'kafka'
26
- milestone 1
27
26
 
28
27
  default :codec, 'json'
29
28
  # This is for bootstrapping and the producer will only use it for getting metadata (topics,
@@ -106,7 +105,9 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
106
105
 
107
106
  public
108
107
  def register
108
+ LogStash::Logger.setup_log4j(@logger)
109
109
  require 'jruby-kafka'
110
+
110
111
  options = {
111
112
  :broker_list => @broker_list,
112
113
  :compression_codec => @compression_codec,
@@ -134,7 +135,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
134
135
 
135
136
  @codec.on_event do |event, data|
136
137
  begin
137
- @producer.send_msg(@topic_id,nil,data)
138
+ @producer.send_msg(event.sprintf(@topic_id),nil,data)
138
139
  rescue LogStash::ShutdownSignal
139
140
  @logger.info('Kafka producer got shutdown signal')
140
141
  rescue => e
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-kafka'
4
- s.version = '0.1.3'
4
+ s.version = '0.1.4'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'Output events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on the broker'
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -21,7 +21,7 @@ Gem::Specification.new do |s|
21
21
 
22
22
  # Jar dependencies
23
23
  s.requirements << "jar 'org.apache.kafka:kafka_2.9.2', '0.8.1.1'"
24
- s.requirements << "jar 'log4j:log4j', '1.2.14'"
24
+ s.requirements << "jar 'org.slf4j:slf4j-log4j12', '1.7.10'"
25
25
 
26
26
  # Gem dependencies
27
27
  s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
@@ -3,34 +3,46 @@ require "logstash/devutils/rspec/spec_helper"
3
3
  require 'logstash/outputs/kafka'
4
4
  require 'logstash-output-kafka_jars'
5
5
  require 'jruby-kafka'
6
+ require 'json'
6
7
 
7
8
  describe "outputs/kafka" do
8
- let (:kafka_config) {{'topic_id' => 'test'}}
9
+ let (:simple_kafka_config) {{'topic_id' => 'test'}}
10
+ let (:event) { LogStash::Event.new({'message' => 'hello', 'topic_name' => 'my_topic',
11
+ '@timestamp' => LogStash::Timestamp.now}) }
9
12
 
10
- it "should register" do
11
- output = LogStash::Plugin.lookup("output", "kafka").new(kafka_config)
12
- expect {output.register}.to_not raise_error
13
- end
13
+ context 'when initializing' do
14
+ it "should register" do
15
+ output = LogStash::Plugin.lookup("output", "kafka").new(simple_kafka_config)
16
+ expect {output.register}.to_not raise_error
17
+ end
14
18
 
15
- it 'should populate kafka config with default values' do
16
- kafka = LogStash::Outputs::Kafka.new(kafka_config)
17
- insist {kafka.broker_list} == 'localhost:9092'
18
- insist {kafka.topic_id} == 'test'
19
- insist {kafka.compression_codec} == 'none'
20
- insist {kafka.serializer_class} == 'kafka.serializer.StringEncoder'
21
- insist {kafka.partitioner_class} == 'kafka.producer.DefaultPartitioner'
22
- insist {kafka.producer_type} == 'sync'
19
+ it 'should populate kafka config with default values' do
20
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
21
+ insist {kafka.broker_list} == 'localhost:9092'
22
+ insist {kafka.topic_id} == 'test'
23
+ insist {kafka.compression_codec} == 'none'
24
+ insist {kafka.serializer_class} == 'kafka.serializer.StringEncoder'
25
+ insist {kafka.partitioner_class} == 'kafka.producer.DefaultPartitioner'
26
+ insist {kafka.producer_type} == 'sync'
27
+ end
23
28
  end
24
29
 
25
- it 'should send logstash event to kafka broker' do
26
- timestamp = LogStash::Timestamp.now
27
- expect_any_instance_of(Kafka::Producer)
28
- .to receive(:send_msg)
29
- .with('test', nil, "{\"message\":\"hello world\",\"host\":\"test\",\"@timestamp\":\"#{timestamp}\",\"@version\":\"1\"}")
30
- e = LogStash::Event.new({:message => 'hello world', :host => 'test', '@timestamp' => timestamp})
31
- kafka = LogStash::Outputs::Kafka.new(kafka_config)
32
- kafka.register
33
- kafka.receive(e)
34
- end
30
+ context 'when outputting messages' do
31
+ it 'should send logstash event to kafka broker' do
32
+ expect_any_instance_of(Kafka::Producer).to receive(:send_msg)
33
+ .with(simple_kafka_config['topic_id'], nil, event.to_hash.to_json)
34
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
35
+ kafka.register
36
+ kafka.receive(event)
37
+ end
35
38
 
39
+ it 'should support Event#sprintf placeholders in topic_id' do
40
+ topic_field = 'topic_name'
41
+ expect_any_instance_of(Kafka::Producer).to receive(:send_msg)
42
+ .with(event[topic_field], nil, event.to_hash.to_json)
43
+ kafka = LogStash::Outputs::Kafka.new({'topic_id' => "%{#{topic_field}}"})
44
+ kafka.register
45
+ kafka.receive(event)
46
+ end
47
+ end
36
48
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.3
4
+ version: 0.1.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-26 00:00:00.000000000 Z
11
+ date: 2015-01-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -107,6 +107,8 @@ extensions: []
107
107
  extra_rdoc_files: []
108
108
  files:
109
109
  - .gitignore
110
+ - CONTRIBUTORS
111
+ - DEVELOPER.md
110
112
  - Gemfile
111
113
  - LICENSE
112
114
  - README.md
@@ -136,7 +138,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
136
138
  version: '0'
137
139
  requirements:
138
140
  - jar 'org.apache.kafka:kafka_2.9.2', '0.8.1.1'
139
- - jar 'log4j:log4j', '1.2.14'
141
+ - jar 'org.slf4j:slf4j-log4j12', '1.7.10'
140
142
  rubyforge_project:
141
143
  rubygems_version: 2.1.9
142
144
  signing_key: