logstash-output-sumologic 1.2.2 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +23 -1
- data/CONTRIBUTING.md +75 -0
- data/README.md +3 -2
- data/lib/logstash/outputs/sumologic/batch.rb +13 -0
- data/lib/logstash/outputs/sumologic/common.rb +30 -3
- data/lib/logstash/outputs/sumologic/compressor.rb +3 -3
- data/lib/logstash/outputs/sumologic/header_builder.rb +19 -33
- data/lib/logstash/outputs/sumologic/message_queue.rb +23 -15
- data/lib/logstash/outputs/sumologic/monitor.rb +11 -6
- data/lib/logstash/outputs/sumologic/payload_builder.rb +3 -4
- data/lib/logstash/outputs/sumologic/piler.rb +35 -37
- data/lib/logstash/outputs/sumologic/sender.rb +31 -29
- data/lib/logstash/outputs/sumologic/statistics.rb +7 -31
- data/lib/logstash/outputs/sumologic.rb +10 -17
- data/logstash-output-sumologic.gemspec +3 -3
- data/spec/outputs/sumologic/header_builder_spec.rb +134 -1
- data/spec/outputs/sumologic/message_queue_spec.rb +13 -11
- data/spec/outputs/sumologic/piler_spec.rb +67 -102
- data/spec/outputs/sumologic_spec.rb +10 -0
- metadata +16 -9
- data/DEVELOPER.md +0 -39
@@ -1,19 +1,19 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
require "net/https"
|
3
|
-
require "socket"
|
4
|
-
require "thread"
|
5
|
-
require "uri"
|
6
|
-
require "logstash/outputs/sumologic/common"
|
7
|
-
require "logstash/outputs/sumologic/compressor"
|
8
|
-
require "logstash/outputs/sumologic/header_builder"
|
9
|
-
require "logstash/outputs/sumologic/statistics"
|
10
|
-
require "logstash/outputs/sumologic/message_queue"
|
11
2
|
|
12
3
|
module LogStash; module Outputs; class SumoLogic;
|
13
4
|
class Sender
|
14
5
|
|
6
|
+
require "net/https"
|
7
|
+
require "socket"
|
8
|
+
require "thread"
|
9
|
+
require "uri"
|
10
|
+
require "logstash/outputs/sumologic/common"
|
11
|
+
require "logstash/outputs/sumologic/compressor"
|
12
|
+
require "logstash/outputs/sumologic/header_builder"
|
13
|
+
require "logstash/outputs/sumologic/statistics"
|
14
|
+
require "logstash/outputs/sumologic/message_queue"
|
15
15
|
include LogStash::Outputs::SumoLogic::Common
|
16
|
-
|
16
|
+
|
17
17
|
|
18
18
|
def initialize(client, queue, stats, config)
|
19
19
|
@client = client
|
@@ -28,9 +28,6 @@ module LogStash; module Outputs; class SumoLogic;
|
|
28
28
|
@tokens = SizedQueue.new(@sender_max)
|
29
29
|
@sender_max.times { |t| @tokens << t }
|
30
30
|
|
31
|
-
@header_builder = LogStash::Outputs::SumoLogic::HeaderBuilder.new(config)
|
32
|
-
@headers = @header_builder.build()
|
33
|
-
@stats_headers = @header_builder.build_stats()
|
34
31
|
@compressor = LogStash::Outputs::SumoLogic::Compressor.new(config)
|
35
32
|
|
36
33
|
end # def initialize
|
@@ -42,11 +39,11 @@ module LogStash; module Outputs; class SumoLogic;
|
|
42
39
|
@stopping.make_false()
|
43
40
|
@sender_t = Thread.new {
|
44
41
|
while @stopping.false?
|
45
|
-
|
46
|
-
send_request(
|
42
|
+
batch = @queue.deq()
|
43
|
+
send_request(batch)
|
47
44
|
end # while
|
48
|
-
@queue.drain().map { |
|
49
|
-
send_request(
|
45
|
+
@queue.drain().map { |batch|
|
46
|
+
send_request(batch)
|
50
47
|
}
|
51
48
|
log_info("waiting while senders finishing...")
|
52
49
|
while @tokens.size < @sender_max
|
@@ -58,7 +55,7 @@ module LogStash; module Outputs; class SumoLogic;
|
|
58
55
|
def stop()
|
59
56
|
log_info("shutting down sender...")
|
60
57
|
@stopping.make_true()
|
61
|
-
@queue.enq(STOP_TAG)
|
58
|
+
@queue.enq(Batch.new(Hash.new, STOP_TAG))
|
62
59
|
@sender_t.join
|
63
60
|
log_info("sender is fully shutted down")
|
64
61
|
end # def stop
|
@@ -93,7 +90,9 @@ module LogStash; module Outputs; class SumoLogic;
|
|
93
90
|
|
94
91
|
private
|
95
92
|
|
96
|
-
def send_request(
|
93
|
+
def send_request(batch)
|
94
|
+
content = batch.payload
|
95
|
+
headers = batch.headers
|
97
96
|
if content == STOP_TAG
|
98
97
|
log_info("STOP_TAG is received.")
|
99
98
|
return
|
@@ -103,10 +102,8 @@ module LogStash; module Outputs; class SumoLogic;
|
|
103
102
|
|
104
103
|
if @stats_enabled && content.start_with?(STATS_TAG)
|
105
104
|
body = @compressor.compress(content[STATS_TAG.length..-1])
|
106
|
-
headers = @stats_headers
|
107
105
|
else
|
108
106
|
body = @compressor.compress(content)
|
109
|
-
headers = @headers
|
110
107
|
end
|
111
108
|
|
112
109
|
log_dbg("sending request",
|
@@ -128,8 +125,8 @@ module LogStash; module Outputs; class SumoLogic;
|
|
128
125
|
:code => response.code,
|
129
126
|
:headers => headers,
|
130
127
|
:contet => content[0..20])
|
131
|
-
if response.code == 429 || response.code == 503 || response.code == 504
|
132
|
-
requeue_message(
|
128
|
+
if response.code == 429 || response.code == 502 || response.code == 503 || response.code == 504
|
129
|
+
requeue_message(batch)
|
133
130
|
end
|
134
131
|
else
|
135
132
|
log_dbg("request accepted",
|
@@ -145,24 +142,29 @@ module LogStash; module Outputs; class SumoLogic;
|
|
145
142
|
:message => exception.message,
|
146
143
|
:class => exception.class.name,
|
147
144
|
:backtrace => exception.backtrace)
|
148
|
-
requeue_message(
|
145
|
+
requeue_message(batch)
|
149
146
|
end
|
150
147
|
|
151
148
|
@stats.record_request(content.bytesize, body.bytesize)
|
152
149
|
request.call
|
153
150
|
end # def send_request
|
154
151
|
|
155
|
-
def requeue_message(
|
156
|
-
|
152
|
+
def requeue_message(batch)
|
153
|
+
content = batch.payload
|
154
|
+
if @stats_enabled && content.start_with?(STATS_TAG)
|
155
|
+
log_warn("do not requeue stats payload",
|
156
|
+
:content => content)
|
157
|
+
elsif @stopping.false? && @sleep_before_requeue >= 0
|
157
158
|
log_info("requeue message",
|
158
159
|
:after => @sleep_before_requeue,
|
159
160
|
:queue_size => @queue.size,
|
160
161
|
:content_size => content.size,
|
161
|
-
:content => content[0..20]
|
162
|
+
:content => content[0..20],
|
163
|
+
:headers => batch.headers)
|
162
164
|
Stud.stoppable_sleep(@sleep_before_requeue) { @stopping.true? }
|
163
|
-
@queue.enq(
|
165
|
+
@queue.enq(batch)
|
164
166
|
end
|
165
167
|
end # def reque_message
|
166
168
|
|
167
169
|
end
|
168
|
-
end; end; end
|
170
|
+
end; end; end
|
@@ -1,9 +1,9 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
require "logstash/outputs/sumologic/common"
|
3
2
|
|
4
3
|
module LogStash; module Outputs; class SumoLogic;
|
5
4
|
class Statistics
|
6
5
|
|
6
|
+
require "logstash/outputs/sumologic/common"
|
7
7
|
include LogStash::Outputs::SumoLogic::Common
|
8
8
|
|
9
9
|
attr_reader :initialize_time
|
@@ -11,14 +11,10 @@ module LogStash; module Outputs; class SumoLogic;
|
|
11
11
|
attr_reader :total_input_bytes
|
12
12
|
attr_reader :total_metrics_datapoints
|
13
13
|
attr_reader :total_log_lines
|
14
|
-
attr_reader :current_pile_items
|
15
|
-
attr_reader :current_pile_bytes
|
16
14
|
attr_reader :total_enque_times
|
17
15
|
attr_reader :total_enque_bytes
|
18
16
|
attr_reader :total_deque_times
|
19
17
|
attr_reader :total_deque_bytes
|
20
|
-
attr_reader :current_queue_items
|
21
|
-
attr_reader :current_queue_bytes
|
22
18
|
attr_reader :total_output_requests
|
23
19
|
attr_reader :total_output_bytes
|
24
20
|
attr_reader :total_output_bytes_compressed
|
@@ -32,14 +28,10 @@ module LogStash; module Outputs; class SumoLogic;
|
|
32
28
|
@total_input_bytes = Concurrent::AtomicFixnum.new
|
33
29
|
@total_metrics_datapoints = Concurrent::AtomicFixnum.new
|
34
30
|
@total_log_lines = Concurrent::AtomicFixnum.new
|
35
|
-
@current_pile_items = Concurrent::AtomicFixnum.new
|
36
|
-
@current_pile_bytes = Concurrent::AtomicFixnum.new
|
37
31
|
@total_enque_times = Concurrent::AtomicFixnum.new
|
38
32
|
@total_enque_bytes = Concurrent::AtomicFixnum.new
|
39
33
|
@total_deque_times = Concurrent::AtomicFixnum.new
|
40
34
|
@total_deque_bytes = Concurrent::AtomicFixnum.new
|
41
|
-
@current_queue_items = Concurrent::AtomicFixnum.new
|
42
|
-
@current_queue_bytes = Concurrent::AtomicFixnum.new
|
43
35
|
@total_output_requests = Concurrent::AtomicFixnum.new
|
44
36
|
@total_output_bytes = Concurrent::AtomicFixnum.new
|
45
37
|
@total_output_bytes_compressed = Concurrent::AtomicFixnum.new
|
@@ -53,16 +45,9 @@ module LogStash; module Outputs; class SumoLogic;
|
|
53
45
|
@total_response.get(key) ? @total_response.get(key).value : 0
|
54
46
|
end
|
55
47
|
|
56
|
-
def
|
57
|
-
@total_input_events.update { |v| v + events }
|
58
|
-
@total_input_bytes.update { |v| v + bytesize }
|
59
|
-
end # def record_multi_input
|
60
|
-
|
61
|
-
def record_input(entry)
|
48
|
+
def record_input(size)
|
62
49
|
@total_input_events.increment()
|
63
|
-
@total_input_bytes.update { |v| v +
|
64
|
-
@current_pile_items.increment()
|
65
|
-
@current_pile_bytes.update { |v| v + entry.bytesize }
|
50
|
+
@total_input_bytes.update { |v| v + size }
|
66
51
|
end # def record_input
|
67
52
|
|
68
53
|
def record_log_process()
|
@@ -73,23 +58,14 @@ module LogStash; module Outputs; class SumoLogic;
|
|
73
58
|
@total_metrics_datapoints.update { |v| v + dps }
|
74
59
|
end # def record_metrics_process
|
75
60
|
|
76
|
-
def
|
77
|
-
@current_pile_items.value= 0
|
78
|
-
@current_pile_bytes.value= 0
|
79
|
-
end # def record_pile_clear
|
80
|
-
|
81
|
-
def record_enque(payload)
|
61
|
+
def record_enque(size)
|
82
62
|
@total_enque_times.increment()
|
83
|
-
@total_enque_bytes.update { |v| v +
|
84
|
-
@current_queue_items.increment()
|
85
|
-
@current_queue_bytes.update { |v| v + payload.bytesize }
|
63
|
+
@total_enque_bytes.update { |v| v + size }
|
86
64
|
end # def record_enque
|
87
65
|
|
88
|
-
def record_deque(
|
66
|
+
def record_deque(size)
|
89
67
|
@total_deque_times.increment()
|
90
|
-
@total_deque_bytes.update { |v| v +
|
91
|
-
@current_queue_items.decrement()
|
92
|
-
@current_queue_bytes.update { |v| v - payload.bytesize }
|
68
|
+
@total_deque_bytes.update { |v| v + size }
|
93
69
|
end # def record_deque
|
94
70
|
|
95
71
|
def record_request(size, size_compressed)
|
@@ -14,6 +14,7 @@ require "logstash/plugin_mixins/http_client"
|
|
14
14
|
class LogStash::Outputs::SumoLogic < LogStash::Outputs::Base
|
15
15
|
declare_threadsafe!
|
16
16
|
|
17
|
+
require "logstash/outputs/sumologic/batch"
|
17
18
|
require "logstash/outputs/sumologic/common"
|
18
19
|
require "logstash/outputs/sumologic/compressor"
|
19
20
|
require "logstash/outputs/sumologic/header_builder"
|
@@ -101,22 +102,26 @@ class LogStash::Outputs::SumoLogic < LogStash::Outputs::Base
|
|
101
102
|
# For carbon2 metrics format only, define the meta tags (which will NOT be used to identify the metrics)
|
102
103
|
config :meta_tags, :validate => :hash, :default => {}
|
103
104
|
|
104
|
-
# For messages fail to send or get 429/503/504 response, try to resend after (x) seconds; don't resend if (x) < 0
|
105
|
+
# For messages fail to send or get 429/502/503/504 response, try to resend after (x) seconds; don't resend if (x) < 0
|
105
106
|
config :sleep_before_requeue, :validate => :number, :default => 30
|
106
107
|
|
108
|
+
config :stats_category, :validate => :string, :default => CATEGORY_HEADER_DEFAULT_STATS
|
109
|
+
|
107
110
|
# Sending throughput data as metrics
|
108
111
|
config :stats_enabled, :validate => :boolean, :default => false
|
109
112
|
|
110
113
|
# Sending throughput data points every (x) seconds
|
111
114
|
config :stats_interval, :validate => :number, :default => 60
|
112
115
|
|
116
|
+
# Disable cookies by default (used in HTTP mixin)
|
117
|
+
config :cookies, :validate => :boolean, :default => false
|
118
|
+
|
113
119
|
attr_reader :stats
|
114
120
|
|
115
121
|
def register
|
116
122
|
set_logger(@logger)
|
117
|
-
@stats = Statistics.new
|
123
|
+
@stats = Statistics.new
|
118
124
|
@queue = MessageQueue.new(@stats, config)
|
119
|
-
@builder = PayloadBuilder.new(@stats, config)
|
120
125
|
@piler = Piler.new(@queue, @stats, config)
|
121
126
|
@monitor = Monitor.new(@queue, @stats, config)
|
122
127
|
@sender = Sender.new(client, @queue, @stats, config)
|
@@ -130,24 +135,12 @@ class LogStash::Outputs::SumoLogic < LogStash::Outputs::Base
|
|
130
135
|
end # def register
|
131
136
|
|
132
137
|
def multi_receive(events)
|
133
|
-
|
134
|
-
begin
|
135
|
-
content = Array(events).map { |event| @builder.build(event) }.join($/)
|
136
|
-
@queue.enq(content)
|
137
|
-
@stats.record_multi_input(events.size, content.bytesize)
|
138
|
-
rescue Exception => exception
|
139
|
-
log_err("error when processing events",
|
140
|
-
:events => events,
|
141
|
-
:message => exception.message,
|
142
|
-
:class => exception.class.name,
|
143
|
-
:backtrace => exception.backtrace)
|
144
|
-
end
|
138
|
+
Array(events).map { |event| receive(event) }
|
145
139
|
end # def multi_receive
|
146
140
|
|
147
141
|
def receive(event)
|
148
142
|
begin
|
149
|
-
|
150
|
-
@piler.input(content)
|
143
|
+
@piler.input(event)
|
151
144
|
rescue Exception => exception
|
152
145
|
log_err("error when processing event",
|
153
146
|
:event => event,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-sumologic'
|
3
|
-
s.version = '1.
|
3
|
+
s.version = '1.4.0'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = 'Deliever the log to Sumo Logic cloud service.'
|
6
6
|
s.description = 'This gem is a Logstash output plugin to deliver the log or metrics to Sumo Logic cloud service. Go to https://github.com/SumoLogic/logstash-output-sumologic for getting help, reporting issues, etc.'
|
@@ -20,8 +20,8 @@ Gem::Specification.new do |s|
|
|
20
20
|
# Gem dependencies
|
21
21
|
s.add_runtime_dependency 'manticore', '>= 0.5.4', '< 1.0.0'
|
22
22
|
s.add_runtime_dependency 'logstash-core-plugin-api', '>= 1.60', '<= 2.99'
|
23
|
-
s.add_runtime_dependency 'logstash-
|
24
|
-
s.add_runtime_dependency 'logstash-mixin-http_client'
|
23
|
+
s.add_runtime_dependency 'logstash-mixin-http_client', '>= 6', '< 8'
|
25
24
|
|
25
|
+
s.add_development_dependency 'logstash-codec-plain'
|
26
26
|
s.add_development_dependency 'logstash-devutils'
|
27
27
|
end
|
@@ -5,12 +5,14 @@ require "logstash/outputs/sumologic"
|
|
5
5
|
describe LogStash::Outputs::SumoLogic::HeaderBuilder do
|
6
6
|
|
7
7
|
result = {}
|
8
|
+
event = LogStash::Event.new("foo" => "bar", "message" => "Hello world")
|
8
9
|
|
9
10
|
before :each do
|
10
|
-
result = builder.build()
|
11
|
+
result = builder.build(event)
|
11
12
|
end
|
12
13
|
|
13
14
|
context "should build headers by default" do
|
15
|
+
|
14
16
|
let(:builder) { LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234") }
|
15
17
|
|
16
18
|
specify {
|
@@ -41,6 +43,21 @@ describe LogStash::Outputs::SumoLogic::HeaderBuilder do
|
|
41
43
|
|
42
44
|
end # context
|
43
45
|
|
46
|
+
context "should override source_category with template" do
|
47
|
+
|
48
|
+
let(:builder) {
|
49
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
50
|
+
"url" => "http://localhost/1234",
|
51
|
+
"source_category" => "my source category %{foo}")
|
52
|
+
}
|
53
|
+
|
54
|
+
specify {
|
55
|
+
expect(result.count).to eq(5)
|
56
|
+
expect(result["X-Sumo-Category"]).to eq("my source category bar")
|
57
|
+
}
|
58
|
+
|
59
|
+
end # context
|
60
|
+
|
44
61
|
context "should override source_name" do
|
45
62
|
|
46
63
|
let(:builder) {
|
@@ -56,6 +73,21 @@ describe LogStash::Outputs::SumoLogic::HeaderBuilder do
|
|
56
73
|
|
57
74
|
end # context
|
58
75
|
|
76
|
+
context "should override source_name with template" do
|
77
|
+
|
78
|
+
let(:builder) {
|
79
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
80
|
+
"url" => "http://localhost/1234",
|
81
|
+
"source_name" => "my source name %{foo}")
|
82
|
+
}
|
83
|
+
|
84
|
+
specify {
|
85
|
+
expect(result.count).to eq(5)
|
86
|
+
expect(result["X-Sumo-Name"]).to eq("my source name bar")
|
87
|
+
}
|
88
|
+
|
89
|
+
end # context
|
90
|
+
|
59
91
|
context "should override source_host" do
|
60
92
|
|
61
93
|
let(:builder) {
|
@@ -71,6 +103,21 @@ describe LogStash::Outputs::SumoLogic::HeaderBuilder do
|
|
71
103
|
|
72
104
|
end # context
|
73
105
|
|
106
|
+
context "should override source_host with template" do
|
107
|
+
|
108
|
+
let(:builder) {
|
109
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
110
|
+
"url" => "http://localhost/1234",
|
111
|
+
"source_host" => "my source host %{foo}")
|
112
|
+
}
|
113
|
+
|
114
|
+
specify {
|
115
|
+
expect(result.count).to eq(5)
|
116
|
+
expect(result["X-Sumo-Host"]).to eq("my source host bar")
|
117
|
+
}
|
118
|
+
|
119
|
+
end # context
|
120
|
+
|
74
121
|
context "should hornor extra_headers" do
|
75
122
|
|
76
123
|
let(:builder) {
|
@@ -194,4 +241,90 @@ describe LogStash::Outputs::SumoLogic::HeaderBuilder do
|
|
194
241
|
|
195
242
|
end # context
|
196
243
|
|
244
|
+
context "should build headers for stats" do
|
245
|
+
let(:builder) {
|
246
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234")
|
247
|
+
}
|
248
|
+
|
249
|
+
specify {
|
250
|
+
stats_result = builder.build_stats()
|
251
|
+
expected = {
|
252
|
+
"X-Sumo-Client" => "logstash-output-sumologic",
|
253
|
+
"X-Sumo-Name" => "logstash-output-sumologic",
|
254
|
+
"X-Sumo-Host" => Socket.gethostname,
|
255
|
+
"X-Sumo-Category" => "Logstash.stats",
|
256
|
+
"Content-Type" => "application/vnd.sumologic.carbon2"
|
257
|
+
}
|
258
|
+
expect(stats_result).to eq(expected)
|
259
|
+
}
|
260
|
+
end
|
261
|
+
|
262
|
+
context "should build headers for stats with overridden source category" do
|
263
|
+
let(:builder) {
|
264
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234", "stats_category" => "custom")
|
265
|
+
}
|
266
|
+
|
267
|
+
specify {
|
268
|
+
stats_result = builder.build_stats()
|
269
|
+
expect(stats_result["X-Sumo-Category"]).to eq("custom")
|
270
|
+
}
|
271
|
+
end
|
272
|
+
|
273
|
+
context "should build headers for stats with compression" do
|
274
|
+
let(:builder) {
|
275
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234", "compress" => true, "compress_encoding" => "gzip")
|
276
|
+
}
|
277
|
+
|
278
|
+
specify {
|
279
|
+
stats_result = builder.build_stats()
|
280
|
+
expect(stats_result["Content-Encoding"]).to eq("gzip")
|
281
|
+
}
|
282
|
+
end
|
283
|
+
|
284
|
+
context "should build headers for stats with extra_headers" do
|
285
|
+
let(:builder) {
|
286
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
287
|
+
"url" => "http://localhost/1234",
|
288
|
+
"extra_headers" => {
|
289
|
+
"foo" => "bar"
|
290
|
+
})
|
291
|
+
}
|
292
|
+
|
293
|
+
specify {
|
294
|
+
stats_result = builder.build_stats()
|
295
|
+
expect(stats_result.count).to eq(6)
|
296
|
+
expect(stats_result["foo"]).to eq("bar")
|
297
|
+
}
|
298
|
+
end
|
299
|
+
|
300
|
+
context "should build headers for stats with extra_headers but never overwrite pre-defined headers" do
|
301
|
+
|
302
|
+
let(:builder) {
|
303
|
+
LogStash::Outputs::SumoLogic::HeaderBuilder.new(
|
304
|
+
"url" => "http://localhost/1234",
|
305
|
+
"extra_headers" => {
|
306
|
+
"foo" => "bar",
|
307
|
+
"X-Sumo-Client" => "a",
|
308
|
+
"X-Sumo-Name" => "b",
|
309
|
+
"X-Sumo-Host" => "c",
|
310
|
+
"X-Sumo-Category" => "d",
|
311
|
+
"Content-Type" => "e"
|
312
|
+
})
|
313
|
+
}
|
314
|
+
|
315
|
+
specify {
|
316
|
+
stats_result = builder.build_stats()
|
317
|
+
expected = {
|
318
|
+
"foo" => "bar",
|
319
|
+
"X-Sumo-Client" => "logstash-output-sumologic",
|
320
|
+
"X-Sumo-Name" => "logstash-output-sumologic",
|
321
|
+
"X-Sumo-Host" => Socket.gethostname,
|
322
|
+
"X-Sumo-Category" => "Logstash.stats",
|
323
|
+
"Content-Type" => "application/vnd.sumologic.carbon2"
|
324
|
+
}
|
325
|
+
expect(stats_result).to eq(expected)
|
326
|
+
}
|
327
|
+
|
328
|
+
end # context
|
329
|
+
|
197
330
|
end # describe
|
@@ -1,46 +1,48 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "logstash/devutils/rspec/spec_helper"
|
3
3
|
require "logstash/outputs/sumologic"
|
4
|
+
include LogStash::Outputs
|
5
|
+
|
6
|
+
describe SumoLogic::MessageQueue do
|
4
7
|
|
5
|
-
describe LogStash::Outputs::SumoLogic::MessageQueue do
|
6
|
-
|
7
8
|
context "working in pile mode if interval > 0 && pile_max > 0" do
|
8
9
|
|
9
|
-
let(:
|
10
|
+
let(:queue) { SumoLogic::MessageQueue.new(stats, "queue_max" => 10) }
|
11
|
+
let(:stats) { SumoLogic::Statistics.new }
|
10
12
|
|
11
13
|
it "enq() correctly" do
|
12
|
-
queue = LogStash::Outputs::SumoLogic::MessageQueue.new(stats, "queue_max" => 10)
|
13
14
|
10.times { |i|
|
14
|
-
queue.enq("test
|
15
|
+
queue.enq(SumoLogic::Batch.new(Hash.new, "test - #{i}"))
|
15
16
|
expect(queue.size()).to eq(i + 1)
|
16
17
|
expect(stats.total_enque_times.value).to eq(i + 1)
|
17
18
|
}
|
19
|
+
expect(queue.bytesize()).to eq(100)
|
18
20
|
end
|
19
21
|
|
20
22
|
it "deq() correctly" do
|
21
|
-
queue = LogStash::Outputs::SumoLogic::MessageQueue.new(stats, "queue_max" => 10)
|
22
23
|
10.times { |i|
|
23
|
-
queue.enq("test
|
24
|
+
queue.enq(SumoLogic::Batch.new(Hash.new, "test - #{i}"))
|
24
25
|
}
|
25
26
|
10.times { |i|
|
26
27
|
expect(queue.size()).to eq(10 - i)
|
27
28
|
result = queue.deq()
|
28
|
-
expect(result).to eq("test
|
29
|
+
expect(result.payload).to eq("test - #{i}")
|
29
30
|
expect(stats.total_deque_times.value).to eq(i + 1)
|
30
31
|
}
|
32
|
+
expect(queue.bytesize()).to eq(0)
|
31
33
|
end
|
32
34
|
|
33
35
|
it "drain() correctly" do
|
34
|
-
queue = LogStash::Outputs::SumoLogic::MessageQueue.new(stats, "queue_max" => 10)
|
35
36
|
10.times { |i|
|
36
|
-
queue.enq("test
|
37
|
+
queue.enq(SumoLogic::Batch.new(Hash.new, "test - #{i}"))
|
37
38
|
}
|
38
39
|
result = queue.drain()
|
39
40
|
expect(queue.size()).to eq(0)
|
40
41
|
expect(stats.total_deque_times.value).to eq(10)
|
41
42
|
expect(result.size).to eq(10)
|
43
|
+
expect(queue.bytesize()).to eq(0)
|
42
44
|
10.times { |i|
|
43
|
-
expect(result[i]).to eq("test
|
45
|
+
expect(result[i].payload).to eq("test - #{i}")
|
44
46
|
}
|
45
47
|
end
|
46
48
|
|