logstash-output-kafka 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/lib/logstash/outputs/kafka.rb +9 -8
- data/logstash-output-kafka.gemspec +1 -1
- data/spec/outputs/{kafka.rb → kafka_spec.rb} +7 -13
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
---
|
2
2
|
!binary "U0hBMQ==":
|
3
3
|
metadata.gz: !binary |-
|
4
|
-
|
4
|
+
NGQzNzcxMDE5ZTAwODdhOGQ4NjVmNTQwYTkyMjBkNmRkYmFiZDY5Zg==
|
5
5
|
data.tar.gz: !binary |-
|
6
|
-
|
6
|
+
MzYwMWM5ODJmYmFjZjIxNzRiYjJjMjI2OTExNzk4Mjc3OTE5MzVhOQ==
|
7
7
|
SHA512:
|
8
8
|
metadata.gz: !binary |-
|
9
|
-
|
10
|
-
|
11
|
-
|
9
|
+
MzJjNDBkOWZjYmU3MWUyNDVlMDM2ZDAwZDJlZWZlYWNlMWYzZDdhMmZmOTcz
|
10
|
+
MGZkNzNjNGFiMGExYmQzZTljM2M1MDkyYWE1NGI2MjZhMzcwYWNiODFlMmNj
|
11
|
+
YWE0YjgwMTM2ZDQ5MTI4NzU5Njk2NDI1ZmVjOThiYWMxM2MxYmE=
|
12
12
|
data.tar.gz: !binary |-
|
13
|
-
|
14
|
-
|
15
|
-
|
13
|
+
NjU2YWM0MWJkYjc2MmY1MzZiODQzNjk2MmRlMTZmYWY1YWMwODkyOGU2NDY1
|
14
|
+
MGNmNmIwNmE1OWI1OTdlNjhiNWJjYzhiYTBlYzMzM2FhNGYwMDE4NmZkZTMw
|
15
|
+
NGNjMDRmNTEwMTZlOWU3NTQ5NzI2ZDU5YThkYzAxMjc1Yzc1MDc=
|
@@ -10,6 +10,7 @@ require 'logstash-output-kafka_jars'
|
|
10
10
|
# Logstash will encode your messages with not only the message but also with a timestamp and
|
11
11
|
# hostname. If you do not want anything but your message passing through, you should make the output
|
12
12
|
# configuration something like:
|
13
|
+
# [source,ruby]
|
13
14
|
# output {
|
14
15
|
# kafka {
|
15
16
|
# codec => plain {
|
@@ -28,19 +29,19 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
28
29
|
# This is for bootstrapping and the producer will only use it for getting metadata (topics,
|
29
30
|
# partitions and replicas). The socket connections for sending the actual data will be
|
30
31
|
# established based on the broker information returned in the metadata. The format is
|
31
|
-
# host1:port1,host2:port2
|
32
|
+
# `host1:port1,host2:port2`, and the list can be a subset of brokers or a VIP pointing to a
|
32
33
|
# subset of brokers.
|
33
34
|
config :broker_list, :validate => :string, :default => 'localhost:9092'
|
34
35
|
# The topic to produce the messages to
|
35
36
|
config :topic_id, :validate => :string, :required => true
|
36
37
|
# This parameter allows you to specify the compression codec for all data generated by this
|
37
|
-
# producer. Valid values are
|
38
|
+
# producer. Valid values are `none`, `gzip` and `snappy`.
|
38
39
|
config :compression_codec, :validate => %w( none gzip snappy ), :default => 'none'
|
39
40
|
# This parameter allows you to set whether compression should be turned on for particular
|
40
|
-
# topics. If the compression codec is anything other than NoCompressionCodec
|
41
|
+
# topics. If the compression codec is anything other than `NoCompressionCodec`,
|
41
42
|
# enable compression only for specified topics if any. If the list of compressed topics is
|
42
43
|
# empty, then enable the specified compression codec for all topics. If the compression codec
|
43
|
-
# is NoCompressionCodec
|
44
|
+
# is `NoCompressionCodec`, compression is disabled for all topics
|
44
45
|
config :compressed_topics, :validate => :string, :default => ''
|
45
46
|
# This value controls when a produce request is considered completed. Specifically,
|
46
47
|
# how many other brokers must have committed the data to their log and acknowledged this to the
|
@@ -51,11 +52,11 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
51
52
|
# The partitioner class for partitioning messages amongst partitions in the topic. The default
|
52
53
|
# partitioner is based on the hash of the key. If the key is null,
|
53
54
|
# the message is sent to a random partition in the broker.
|
54
|
-
# NOTE: topic_metadata_refresh_interval_ms controls how long the producer will distribute to a
|
55
|
+
# NOTE: `topic_metadata_refresh_interval_ms` controls how long the producer will distribute to a
|
55
56
|
# partition in the topic. This defaults to 10 mins, so the producer will continue to write to a
|
56
57
|
# single partition for 10 mins before it switches
|
57
58
|
config :partitioner_class, :validate => :string, :default => 'kafka.producer.DefaultPartitioner'
|
58
|
-
# The amount of time the broker will wait trying to meet the request.required.acks requirement
|
59
|
+
# The amount of time the broker will wait trying to meet the `request.required.acks` requirement
|
59
60
|
# before sending back an error to the client.
|
60
61
|
config :request_timeout_ms, :validate => :number, :default => 10000
|
61
62
|
# This parameter specifies whether the messages are sent asynchronously in a background thread.
|
@@ -90,12 +91,12 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
90
91
|
# mode before either the producer must be blocked or data must be dropped.
|
91
92
|
config :queue_buffering_max_messages, :validate => :number, :default => 10000
|
92
93
|
# The amount of time to block before dropping messages when running in async mode and the
|
93
|
-
# buffer has reached queue.buffering.max.messages
|
94
|
+
# buffer has reached `queue.buffering.max.messages`. If set to 0 events will be enqueued
|
94
95
|
# immediately or dropped if the queue is full (the producer send call will never block). If set
|
95
96
|
# to -1 the producer will block indefinitely and never willingly drop a send.
|
96
97
|
config :queue_enqueue_timeout_ms, :validate => :number, :default => -1
|
97
98
|
# The number of messages to send in one batch when using async mode. The producer will wait
|
98
|
-
# until either this number of messages are ready to send or queue.buffer.max.ms is reached.
|
99
|
+
# until either this number of messages are ready to send or `queue.buffer.max.ms` is reached.
|
99
100
|
config :batch_num_messages, :validate => :number, :default => 200
|
100
101
|
# Socket write buffer size
|
101
102
|
config :send_buffer_bytes, :validate => :number, :default => 100 * 1024
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-output-kafka'
|
4
|
-
s.version = '0.1.
|
4
|
+
s.version = '0.1.1'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = 'Output events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on the broker'
|
7
7
|
s.description = 'Output events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on the broker'
|
@@ -1,14 +1,13 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
+
require 'spec_helper'
|
2
3
|
|
3
|
-
|
4
|
-
|
5
|
-
require 'logstash/namespace'
|
6
|
-
require 'logstash/timestamp'
|
7
|
-
require 'logstash/outputs/kafka'
|
4
|
+
describe "outputs/kafka" do
|
5
|
+
let (:kafka_config) {{'topic_id' => 'test'}}
|
8
6
|
|
9
|
-
|
10
|
-
|
11
|
-
|
7
|
+
it "should register" do
|
8
|
+
output = LogStash::Plugin.lookup("output", "kafka").new(kafka_config)
|
9
|
+
expect {output.register}.to_not raise_error
|
10
|
+
end
|
12
11
|
|
13
12
|
it 'should populate kafka config with default values' do
|
14
13
|
kafka = LogStash::Outputs::Kafka.new(kafka_config)
|
@@ -20,11 +19,6 @@ describe LogStash::Outputs::Kafka do
|
|
20
19
|
insist {kafka.producer_type} == 'sync'
|
21
20
|
end
|
22
21
|
|
23
|
-
it 'should register and load kafka jars without errors' do
|
24
|
-
kafka = LogStash::Outputs::Kafka.new(kafka_config)
|
25
|
-
kafka.register
|
26
|
-
end
|
27
|
-
|
28
22
|
it 'should send logstash event to kafka broker' do
|
29
23
|
timestamp = LogStash::Timestamp.now
|
30
24
|
expect_any_instance_of(Kafka::Producer)
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2014-11-
|
11
|
+
date: 2014-11-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash
|
@@ -100,7 +100,7 @@ files:
|
|
100
100
|
- Rakefile
|
101
101
|
- lib/logstash/outputs/kafka.rb
|
102
102
|
- logstash-output-kafka.gemspec
|
103
|
-
- spec/outputs/
|
103
|
+
- spec/outputs/kafka_spec.rb
|
104
104
|
homepage: http://logstash.net/
|
105
105
|
licenses:
|
106
106
|
- Apache License (2.0)
|
@@ -131,4 +131,4 @@ specification_version: 4
|
|
131
131
|
summary: Output events to a Kafka topic. This uses the Kafka Producer API to write
|
132
132
|
messages to a topic on the broker
|
133
133
|
test_files:
|
134
|
-
- spec/outputs/
|
134
|
+
- spec/outputs/kafka_spec.rb
|