logstash-output-sumologic 1.1.4 → 1.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/CHANGELOG.md +11 -6
- data/DEVELOPER.md +19 -7
- data/Gemfile +2 -1
- data/README.md +110 -38
- data/lib/logstash/outputs/sumologic.rb +79 -314
- data/lib/logstash/outputs/sumologic/common.rb +57 -0
- data/lib/logstash/outputs/sumologic/compressor.rb +39 -0
- data/lib/logstash/outputs/sumologic/header_builder.rb +79 -0
- data/lib/logstash/outputs/sumologic/message_queue.rb +38 -0
- data/lib/logstash/outputs/sumologic/monitor.rb +72 -0
- data/lib/logstash/outputs/sumologic/payload_builder.rb +155 -0
- data/lib/logstash/outputs/sumologic/piler.rb +87 -0
- data/lib/logstash/outputs/sumologic/sender.rb +167 -0
- data/lib/logstash/outputs/sumologic/statistics.rb +124 -0
- data/logstash-output-sumologic.gemspec +17 -15
- data/spec/outputs/sumologic/compressor_spec.rb +27 -0
- data/spec/outputs/sumologic/header_builder_spec.rb +197 -0
- data/spec/outputs/sumologic/message_queue_spec.rb +48 -0
- data/spec/outputs/sumologic/payload_builder_spec.rb +523 -0
- data/spec/outputs/sumologic/piler_spec.rb +189 -0
- data/spec/outputs/sumologic/sender_spec.rb +188 -0
- data/spec/outputs/sumologic_spec.rb +224 -400
- data/spec/test_server.rb +49 -0
- metadata +71 -37
- data/CONTRIBUTORS +0 -13
- data/spec/spec_helper.rb +0 -61
@@ -0,0 +1,167 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "net/https"
|
3
|
+
require "socket"
|
4
|
+
require "thread"
|
5
|
+
require "uri"
|
6
|
+
require "logstash/outputs/sumologic/common"
|
7
|
+
require "logstash/outputs/sumologic/compressor"
|
8
|
+
require "logstash/outputs/sumologic/header_builder"
|
9
|
+
require "logstash/outputs/sumologic/statistics"
|
10
|
+
require "logstash/outputs/sumologic/message_queue"
|
11
|
+
|
12
|
+
module LogStash; module Outputs; class SumoLogic;
|
13
|
+
class Sender
|
14
|
+
|
15
|
+
include LogStash::Outputs::SumoLogic::Common
|
16
|
+
STOP_TAG = "PLUGIN STOPPED"
|
17
|
+
|
18
|
+
def initialize(client, queue, stats, config)
|
19
|
+
@client = client
|
20
|
+
@queue = queue
|
21
|
+
@stats = stats
|
22
|
+
@stopping = Concurrent::AtomicBoolean.new(false)
|
23
|
+
@url = config["url"]
|
24
|
+
@sender_max = (config["sender_max"] ||= 1) < 1 ? 1 : config["sender_max"]
|
25
|
+
@sleep_before_requeue = config["sleep_before_requeue"] ||= 30
|
26
|
+
@stats_enabled = config["stats_enabled"] ||= false
|
27
|
+
|
28
|
+
@tokens = SizedQueue.new(@sender_max)
|
29
|
+
@sender_max.times { |t| @tokens << t }
|
30
|
+
|
31
|
+
@header_builder = LogStash::Outputs::SumoLogic::HeaderBuilder.new(config)
|
32
|
+
@headers = @header_builder.build()
|
33
|
+
@stats_headers = @header_builder.build_stats()
|
34
|
+
@compressor = LogStash::Outputs::SumoLogic::Compressor.new(config)
|
35
|
+
|
36
|
+
end # def initialize
|
37
|
+
|
38
|
+
def start()
|
39
|
+
@stopping.make_false()
|
40
|
+
@sender_t = Thread.new {
|
41
|
+
while @stopping.false?
|
42
|
+
content = @queue.deq()
|
43
|
+
send_request(content)
|
44
|
+
end # while
|
45
|
+
@queue.drain().map { |content|
|
46
|
+
send_request(content)
|
47
|
+
}
|
48
|
+
log_info "waiting messages sent out..."
|
49
|
+
while @tokens.size < @sender_max
|
50
|
+
sleep 1
|
51
|
+
end # while
|
52
|
+
}
|
53
|
+
end # def start_sender
|
54
|
+
|
55
|
+
def stop()
|
56
|
+
log_info "shutting down sender..."
|
57
|
+
@stopping.make_true()
|
58
|
+
@queue.enq(STOP_TAG)
|
59
|
+
@sender_t.join
|
60
|
+
log_info "sender is fully shutted down"
|
61
|
+
end # def stop_sender
|
62
|
+
|
63
|
+
def connect()
|
64
|
+
uri = URI.parse(@url)
|
65
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
66
|
+
http.use_ssl = @url.downcase().start_with?("https")
|
67
|
+
request = Net::HTTP::Get.new(uri.request_uri)
|
68
|
+
begin
|
69
|
+
res = http.request(request)
|
70
|
+
if res.code.to_i != 200
|
71
|
+
log_err(
|
72
|
+
"Server rejected the request",
|
73
|
+
:url => @url,
|
74
|
+
:code => res.code
|
75
|
+
)
|
76
|
+
false
|
77
|
+
else
|
78
|
+
log_dbg(
|
79
|
+
"Server accepted the request",
|
80
|
+
:url => @url
|
81
|
+
)
|
82
|
+
true
|
83
|
+
end
|
84
|
+
rescue Exception => ex
|
85
|
+
log_err(
|
86
|
+
"Cannot connect to given url",
|
87
|
+
:url => @url,
|
88
|
+
:exception => ex
|
89
|
+
)
|
90
|
+
false
|
91
|
+
end
|
92
|
+
end # def connect
|
93
|
+
|
94
|
+
private
|
95
|
+
|
96
|
+
def send_request(content)
|
97
|
+
if content == STOP_TAG
|
98
|
+
log_dbg "STOP_TAG is received."
|
99
|
+
return
|
100
|
+
end
|
101
|
+
|
102
|
+
token = @tokens.pop()
|
103
|
+
|
104
|
+
if @stats_enabled && content.start_with?(STATS_TAG)
|
105
|
+
body = @compressor.compress(content[STATS_TAG.length..-1])
|
106
|
+
headers = @stats_headers
|
107
|
+
else
|
108
|
+
body = @compressor.compress(content)
|
109
|
+
headers = @headers
|
110
|
+
end
|
111
|
+
|
112
|
+
request = @client.send(:background).send(:post, @url, :body => body, :headers => headers)
|
113
|
+
|
114
|
+
request.on_complete do
|
115
|
+
@tokens << token
|
116
|
+
end
|
117
|
+
|
118
|
+
request.on_success do |response|
|
119
|
+
@stats.record_response_success(response.code)
|
120
|
+
if response.code < 200 || response.code > 299
|
121
|
+
log_err(
|
122
|
+
"HTTP request rejected(#{response.code})",
|
123
|
+
:token => token,
|
124
|
+
:code => response.code,
|
125
|
+
:headers => headers,
|
126
|
+
:contet => content[0..20]
|
127
|
+
)
|
128
|
+
if response.code == 429 || response.code == 503 || response.code == 504
|
129
|
+
requeue_message(content)
|
130
|
+
end
|
131
|
+
else
|
132
|
+
log_dbg(
|
133
|
+
"HTTP request accepted",
|
134
|
+
:token => token,
|
135
|
+
:code => response.code)
|
136
|
+
end
|
137
|
+
end
|
138
|
+
|
139
|
+
request.on_failure do |exception|
|
140
|
+
@stats.record_response_failure()
|
141
|
+
log_err(
|
142
|
+
"Error in network transmission",
|
143
|
+
:token => token,
|
144
|
+
:message => exception.message,
|
145
|
+
:class => exception.class.name,
|
146
|
+
:backtrace => exception.backtrace
|
147
|
+
)
|
148
|
+
requeue_message(content)
|
149
|
+
end
|
150
|
+
|
151
|
+
@stats.record_request(content.bytesize, body.bytesize)
|
152
|
+
request.call
|
153
|
+
end # def send_request
|
154
|
+
|
155
|
+
def requeue_message(content)
|
156
|
+
if @stopping.false? && @sleep_before_requeue >= 0
|
157
|
+
log_warn(
|
158
|
+
"requeue message",
|
159
|
+
:after => @sleep_before_requeue,
|
160
|
+
:content => content[0..20])
|
161
|
+
Stud.stoppable_sleep(@sleep_before_requeue) { @stopping.true? }
|
162
|
+
@queue.enq(content)
|
163
|
+
end
|
164
|
+
end # def reque_message
|
165
|
+
|
166
|
+
end
|
167
|
+
end; end; end
|
@@ -0,0 +1,124 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/sumologic/common"
|
3
|
+
|
4
|
+
module LogStash; module Outputs; class SumoLogic;
|
5
|
+
class Statistics
|
6
|
+
|
7
|
+
include LogStash::Outputs::SumoLogic::Common
|
8
|
+
|
9
|
+
attr_reader :initialize_time
|
10
|
+
attr_reader :total_input_events
|
11
|
+
attr_reader :total_input_bytes
|
12
|
+
attr_reader :total_metrics_datapoints
|
13
|
+
attr_reader :total_log_lines
|
14
|
+
attr_reader :current_pile_items
|
15
|
+
attr_reader :current_pile_bytes
|
16
|
+
attr_reader :total_enque_times
|
17
|
+
attr_reader :total_enque_bytes
|
18
|
+
attr_reader :total_deque_times
|
19
|
+
attr_reader :total_deque_bytes
|
20
|
+
attr_reader :current_queue_items
|
21
|
+
attr_reader :current_queue_bytes
|
22
|
+
attr_reader :total_output_requests
|
23
|
+
attr_reader :total_output_bytes
|
24
|
+
attr_reader :total_output_bytes_compressed
|
25
|
+
attr_reader :total_response
|
26
|
+
attr_reader :total_response_times
|
27
|
+
attr_reader :total_response_success
|
28
|
+
|
29
|
+
def initialize()
|
30
|
+
@initialize_time = Time.now()
|
31
|
+
@total_input_events = Concurrent::AtomicFixnum.new
|
32
|
+
@total_input_bytes = Concurrent::AtomicFixnum.new
|
33
|
+
@total_metrics_datapoints = Concurrent::AtomicFixnum.new
|
34
|
+
@total_log_lines = Concurrent::AtomicFixnum.new
|
35
|
+
@current_pile_items = Concurrent::AtomicFixnum.new
|
36
|
+
@current_pile_bytes = Concurrent::AtomicFixnum.new
|
37
|
+
@total_enque_times = Concurrent::AtomicFixnum.new
|
38
|
+
@total_enque_bytes = Concurrent::AtomicFixnum.new
|
39
|
+
@total_deque_times = Concurrent::AtomicFixnum.new
|
40
|
+
@total_deque_bytes = Concurrent::AtomicFixnum.new
|
41
|
+
@current_queue_items = Concurrent::AtomicFixnum.new
|
42
|
+
@current_queue_bytes = Concurrent::AtomicFixnum.new
|
43
|
+
@total_output_requests = Concurrent::AtomicFixnum.new
|
44
|
+
@total_output_bytes = Concurrent::AtomicFixnum.new
|
45
|
+
@total_output_bytes_compressed = Concurrent::AtomicFixnum.new
|
46
|
+
@total_response = Concurrent::Map.new
|
47
|
+
@total_response_times = Concurrent::AtomicFixnum.new
|
48
|
+
@total_response_success = Concurrent::AtomicFixnum.new
|
49
|
+
|
50
|
+
end # def initialize
|
51
|
+
|
52
|
+
def total_response(key)
|
53
|
+
@total_response.get(key) ? @total_response.get(key).value : 0
|
54
|
+
end
|
55
|
+
|
56
|
+
def record_multi_input(events, bytesize)
|
57
|
+
@total_input_events.update { |v| v + events }
|
58
|
+
@total_input_bytes.update { |v| v + bytesize }
|
59
|
+
end # def record_multi_input
|
60
|
+
|
61
|
+
def record_input(entry)
|
62
|
+
@total_input_events.increment()
|
63
|
+
@total_input_bytes.update { |v| v + entry.bytesize }
|
64
|
+
@current_pile_items.increment()
|
65
|
+
@current_pile_bytes.update { |v| v + entry.bytesize }
|
66
|
+
end # def record_input
|
67
|
+
|
68
|
+
def record_log_process()
|
69
|
+
@total_log_lines.increment()
|
70
|
+
end # def record_log_process
|
71
|
+
|
72
|
+
def record_metrics_process(dps)
|
73
|
+
@total_metrics_datapoints.update { |v| v + dps }
|
74
|
+
end # def record_metrics_process
|
75
|
+
|
76
|
+
def record_clear_pile()
|
77
|
+
@current_pile_items.value= 0
|
78
|
+
@current_pile_bytes.value= 0
|
79
|
+
end # def record_pile_clear
|
80
|
+
|
81
|
+
def record_enque(payload)
|
82
|
+
@total_enque_times.increment()
|
83
|
+
@total_enque_bytes.update { |v| v + payload.bytesize }
|
84
|
+
@current_queue_items.increment()
|
85
|
+
@current_queue_bytes.update { |v| v + payload.bytesize }
|
86
|
+
end # def record_enque
|
87
|
+
|
88
|
+
def record_deque(payload)
|
89
|
+
@total_deque_times.increment()
|
90
|
+
@total_deque_bytes.update { |v| v + payload.bytesize }
|
91
|
+
@current_queue_items.decrement()
|
92
|
+
@current_queue_bytes.update { |v| v - payload.bytesize }
|
93
|
+
end # def record_deque
|
94
|
+
|
95
|
+
def record_request(size, size_compressed)
|
96
|
+
@total_output_requests.increment()
|
97
|
+
@total_output_bytes.update { |v| v + size }
|
98
|
+
@total_output_bytes_compressed.update { |v| v + size_compressed }
|
99
|
+
end # def record_request
|
100
|
+
|
101
|
+
def record_response_success(code)
|
102
|
+
atomic_map_increase(@total_response, code.to_s)
|
103
|
+
@total_response_success.increment() if code == 200
|
104
|
+
@total_response_times.increment()
|
105
|
+
end # def record_response_success
|
106
|
+
|
107
|
+
def record_response_failure()
|
108
|
+
atomic_map_increase(@total_response, "failure")
|
109
|
+
end # def record_response_failure
|
110
|
+
|
111
|
+
def atomic_map_increase(map, key)
|
112
|
+
number = map.get(key)
|
113
|
+
if number.nil?
|
114
|
+
newNumber = Concurrent::AtomicFixnum.new
|
115
|
+
number = map.put_if_absent(key, newNumber)
|
116
|
+
if number.nil?
|
117
|
+
number = newNumber
|
118
|
+
end
|
119
|
+
end
|
120
|
+
number.increment()
|
121
|
+
end # def atomic_map_increase
|
122
|
+
|
123
|
+
end
|
124
|
+
end; end; end
|
@@ -1,25 +1,27 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
|
-
s.name =
|
3
|
-
s.version =
|
4
|
-
s.licenses = [
|
5
|
-
s.summary =
|
6
|
-
s.description =
|
7
|
-
s.authors = [
|
8
|
-
s.email =
|
9
|
-
s.homepage =
|
10
|
-
s.require_paths = [
|
2
|
+
s.name = 'logstash-output-sumologic'
|
3
|
+
s.version = '1.1.9'
|
4
|
+
s.licenses = ['Apache-2.0']
|
5
|
+
s.summary = 'Deliever the log to Sumo Logic cloud service.'
|
6
|
+
s.description = 'This gem is a Logstash output plugin to deliver the log or metrics to Sumo Logic cloud service. Go to https://github.com/SumoLogic/logstash-output-sumologic for getting help, reporting issues, etc.'
|
7
|
+
s.authors = ['Sumo Logic']
|
8
|
+
s.email = 'collection@sumologic.com '
|
9
|
+
s.homepage = 'https://github.com/SumoLogic/logstash-output-sumologic'
|
10
|
+
s.require_paths = ['lib']
|
11
11
|
|
12
12
|
# Files
|
13
|
-
s.files = Dir[
|
13
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
14
14
|
# Tests
|
15
15
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
16
16
|
|
17
17
|
# Special flag to let us know this is actually a logstash plugin
|
18
|
-
s.metadata = {
|
18
|
+
s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'output' }
|
19
19
|
|
20
20
|
# Gem dependencies
|
21
|
-
s.add_runtime_dependency
|
22
|
-
s.add_runtime_dependency
|
23
|
-
s.add_runtime_dependency 'logstash-
|
24
|
-
s.
|
21
|
+
s.add_runtime_dependency 'manticore', '>= 0.5.4', '< 1.0.0'
|
22
|
+
s.add_runtime_dependency 'logstash-core-plugin-api', '>= 1.60', '<= 2.99'
|
23
|
+
s.add_runtime_dependency 'logstash-codec-plain'
|
24
|
+
s.add_runtime_dependency 'logstash-mixin-http_client'
|
25
|
+
|
26
|
+
s.add_development_dependency 'logstash-devutils'
|
25
27
|
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/sumologic"
|
4
|
+
|
5
|
+
describe LogStash::Outputs::SumoLogic::Compressor do
|
6
|
+
|
7
|
+
context "compress (deflate)" do
|
8
|
+
let(:compressor) {
|
9
|
+
LogStash::Outputs::SumoLogic::Compressor.new("compress" => true, "compress_encoding" => "deflate")
|
10
|
+
}
|
11
|
+
specify {
|
12
|
+
expect(compressor.compress("abcde").bytesize).to eq(13)
|
13
|
+
expect(compressor.compress("aaaaa").bytesize).to eq(11)
|
14
|
+
}
|
15
|
+
end # context
|
16
|
+
|
17
|
+
context "compress (gzip)" do
|
18
|
+
let(:compressor) {
|
19
|
+
LogStash::Outputs::SumoLogic::Compressor.new("compress" => true, "compress_encoding" => "gzip")
|
20
|
+
}
|
21
|
+
specify {
|
22
|
+
expect(compressor.compress("abcde").bytesize).to eq(25)
|
23
|
+
expect(compressor.compress("aaaaa").bytesize).to eq(23)
|
24
|
+
}
|
25
|
+
end # context
|
26
|
+
|
27
|
+
end # describe
|
@@ -0,0 +1,197 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/sumologic"
|
4
|
+
|
5
|
+
describe LogStash::Outputs::SumoLogic::HeaderBuilder do
|
6
|
+
|
7
|
+
result = {}
|
8
|
+
|
9
|
+
before :each do
|
10
|
+
result = builder.build()
|
11
|
+
end
|
12
|
+
|
13
|
+
context "should build headers by default" do
|
14
|
+
let(:builder) { LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234") }
|
15
|
+
|
16
|
+
specify {
|
17
|
+
expected = {
|
18
|
+
"X-Sumo-Client" => "logstash-output-sumologic",
|
19
|
+
"X-Sumo-Name" => "logstash-output-sumologic",
|
20
|
+
"X-Sumo-Host" => Socket.gethostname,
|
21
|
+
"X-Sumo-Category" => "Logstash",
|
22
|
+
"Content-Type" => "text/plain"
|
23
|
+
}
|
24
|
+
expect(result).to eq(expected)
|
25
|
+
}
|
26
|
+
|
27
|
+
end # context
|
28
|
+
|
29
|
+
context "should override source_category" do
|
30
|
+
|
31
|
+
let(:builder) {
|
32
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
33
|
+
"url" => "http://localhost/1234",
|
34
|
+
"source_category" => "my source category")
|
35
|
+
}
|
36
|
+
|
37
|
+
specify {
|
38
|
+
expect(result.count).to eq(5)
|
39
|
+
expect(result["X-Sumo-Category"]).to eq("my source category")
|
40
|
+
}
|
41
|
+
|
42
|
+
end # context
|
43
|
+
|
44
|
+
context "should override source_name" do
|
45
|
+
|
46
|
+
let(:builder) {
|
47
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
48
|
+
"url" => "http://localhost/1234",
|
49
|
+
"source_name" => "my source name")
|
50
|
+
}
|
51
|
+
|
52
|
+
specify {
|
53
|
+
expect(result.count).to eq(5)
|
54
|
+
expect(result["X-Sumo-Name"]).to eq("my source name")
|
55
|
+
}
|
56
|
+
|
57
|
+
end # context
|
58
|
+
|
59
|
+
context "should override source_host" do
|
60
|
+
|
61
|
+
let(:builder) {
|
62
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
63
|
+
"url" => "http://localhost/1234",
|
64
|
+
"source_host" => "my source host")
|
65
|
+
}
|
66
|
+
|
67
|
+
specify {
|
68
|
+
expect(result.count).to eq(5)
|
69
|
+
expect(result["X-Sumo-Host"]).to eq("my source host")
|
70
|
+
}
|
71
|
+
|
72
|
+
end # context
|
73
|
+
|
74
|
+
context "should hornor extra_headers" do
|
75
|
+
|
76
|
+
let(:builder) {
|
77
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
78
|
+
"url" => "http://localhost/1234",
|
79
|
+
"extra_headers" => {
|
80
|
+
"foo" => "bar"
|
81
|
+
})
|
82
|
+
}
|
83
|
+
|
84
|
+
specify {
|
85
|
+
expect(result.count).to eq(6)
|
86
|
+
expect(result["foo"]).to eq("bar")
|
87
|
+
}
|
88
|
+
|
89
|
+
end # context
|
90
|
+
|
91
|
+
context "should hornor extra_headers but never overwrite pre-defined headers" do
|
92
|
+
|
93
|
+
let(:builder) {
|
94
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
95
|
+
"url" => "http://localhost/1234",
|
96
|
+
"extra_headers" => {
|
97
|
+
"foo" => "bar",
|
98
|
+
"X-Sumo-Client" => "a",
|
99
|
+
"X-Sumo-Name" => "b",
|
100
|
+
"X-Sumo-Host" => "c",
|
101
|
+
"X-Sumo-Category" => "d",
|
102
|
+
"Content-Type" => "e"
|
103
|
+
})
|
104
|
+
}
|
105
|
+
|
106
|
+
specify {
|
107
|
+
expected = {
|
108
|
+
"foo" => "bar",
|
109
|
+
"X-Sumo-Client" => "logstash-output-sumologic",
|
110
|
+
"X-Sumo-Name" => "logstash-output-sumologic",
|
111
|
+
"X-Sumo-Host" => Socket.gethostname,
|
112
|
+
"X-Sumo-Category" => "Logstash",
|
113
|
+
"Content-Type" => "text/plain"
|
114
|
+
}
|
115
|
+
expect(result).to eq(expected)
|
116
|
+
}
|
117
|
+
|
118
|
+
end # context
|
119
|
+
|
120
|
+
context "should set content type correctly for log payload" do
|
121
|
+
|
122
|
+
let(:builder) {
|
123
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234")
|
124
|
+
}
|
125
|
+
|
126
|
+
specify {
|
127
|
+
expect(result["Content-Type"]).to eq("text/plain")
|
128
|
+
}
|
129
|
+
|
130
|
+
end # context
|
131
|
+
|
132
|
+
context "should set content type correctly for metrics payload (CarbonV2, default)" do
|
133
|
+
|
134
|
+
let(:builder) {
|
135
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
136
|
+
"url" => "http://localhost/1234",
|
137
|
+
"fields_as_metrics" => true)
|
138
|
+
}
|
139
|
+
|
140
|
+
specify {
|
141
|
+
expect(result["Content-Type"]).to eq("application/vnd.sumologic.carbon2")
|
142
|
+
}
|
143
|
+
|
144
|
+
end # context
|
145
|
+
|
146
|
+
context "should set content type correctly for metrics payload (Graphite)" do
|
147
|
+
|
148
|
+
let(:builder) {
|
149
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
150
|
+
"url" => "http://localhost/1234",
|
151
|
+
"metrics_format" => "graphite",
|
152
|
+
"fields_as_metrics" => true)
|
153
|
+
}
|
154
|
+
|
155
|
+
specify {
|
156
|
+
expect(result["Content-Type"]).to eq("application/vnd.sumologic.graphite")
|
157
|
+
}
|
158
|
+
|
159
|
+
end # context
|
160
|
+
|
161
|
+
context "should set content encoding correctly for uncompressed payload" do
|
162
|
+
|
163
|
+
let(:builder) {
|
164
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234")
|
165
|
+
}
|
166
|
+
|
167
|
+
specify {
|
168
|
+
expect(result["Content-Encoding"]).to be_nil
|
169
|
+
}
|
170
|
+
|
171
|
+
end # context
|
172
|
+
|
173
|
+
context "should set content encoding correctly for compressed payload (deflate, default)" do
|
174
|
+
|
175
|
+
let(:builder) {
|
176
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234", "compress" => true)
|
177
|
+
}
|
178
|
+
|
179
|
+
specify {
|
180
|
+
expect(result["Content-Encoding"]).to eq("deflate")
|
181
|
+
}
|
182
|
+
|
183
|
+
end # context
|
184
|
+
|
185
|
+
context "should set content encoding correctly for compressed payload (gzip)" do
|
186
|
+
|
187
|
+
let(:builder) {
|
188
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234", "compress" => true, "compress_encoding" => "gzip")
|
189
|
+
}
|
190
|
+
|
191
|
+
specify {
|
192
|
+
expect(result["Content-Encoding"]).to eq("gzip")
|
193
|
+
}
|
194
|
+
|
195
|
+
end # context
|
196
|
+
|
197
|
+
end # describe
|