hshek-logstash-output-sumologic 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,89 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash; module Outputs; class SumoLogic;
4
+ class Piler
5
+
6
+ require "logstash/outputs/sumologic/common"
7
+ require "logstash/outputs/sumologic/statistics"
8
+ require "logstash/outputs/sumologic/message_queue"
9
+ include LogStash::Outputs::SumoLogic::Common
10
+
11
+ attr_reader :is_pile
12
+
13
+ def initialize(queue, stats, config)
14
+
15
+ @interval = config["interval"] ||= 0
16
+ @pile_max = config["pile_max"] ||= 0
17
+ @queue = queue
18
+ @stats = stats
19
+ @stopping = Concurrent::AtomicBoolean.new(false)
20
+ @payload_builder = PayloadBuilder.new(@stats, config)
21
+ @header_builder = HeaderBuilder.new(config)
22
+ @is_pile = (@interval > 0 && @pile_max > 0)
23
+ if (@is_pile)
24
+ @pile = Hash.new("")
25
+ @semaphore = Mutex.new
26
+ end
27
+
28
+ end # def initialize
29
+
30
+ def start()
31
+ @stopping.make_false()
32
+ if (@is_pile)
33
+ log_info("starting piler...",
34
+ :max => @pile_max,
35
+ :timeout => @interval)
36
+ @piler_t = Thread.new {
37
+ while @stopping.false?
38
+ Stud.stoppable_sleep(@interval) { @stopping.true? }
39
+ log_dbg("timeout", :timeout => @interval)
40
+ enq_and_clear()
41
+ end # while
42
+ }
43
+ end # if
44
+ end # def start
45
+
46
+ def stop()
47
+ @stopping.make_true()
48
+ if (@is_pile)
49
+ log_info("shutting down piler in #{@interval * 2} secs ...")
50
+ @piler_t.join(@interval * 2)
51
+ log_info("piler is fully shutted down")
52
+ end
53
+ end # def stop
54
+
55
+ def input(event)
56
+ if (@stopping.true?)
57
+ log_warn("piler is shutting down, event is dropped",
58
+ "event" => event)
59
+ else
60
+ headers = @header_builder.build(event)
61
+ payload = @payload_builder.build(event)
62
+ if (@is_pile)
63
+ @semaphore.synchronize {
64
+ content = @pile[headers]
65
+ size = content.bytesize
66
+ if size + payload.bytesize > @pile_max
67
+ @queue.enq(Batch.new(headers, content))
68
+ @pile[headers] = ""
69
+ end
70
+ @pile[headers] = @pile[headers].blank? ? payload : "#{@pile[headers]}\n#{payload}"
71
+ }
72
+ else
73
+ @queue.enq(Batch.new(headers, payload))
74
+ end # if
75
+ end
76
+ end # def input
77
+
78
+ private
79
+ def enq_and_clear()
80
+ @semaphore.synchronize {
81
+ @pile.each do |headers, content|
82
+ @queue.enq(Batch.new(headers, content))
83
+ end
84
+ @pile.clear()
85
+ }
86
+ end # def enq_and_clear
87
+
88
+ end
89
+ end; end; end
@@ -0,0 +1,172 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash; module Outputs; class SumoLogic;
4
+ class Sender
5
+
6
+ require "net/https"
7
+ require "socket"
8
+ require "thread"
9
+ require "uri"
10
+ require "logstash/outputs/sumologic/common"
11
+ require "logstash/outputs/sumologic/compressor"
12
+ require "logstash/outputs/sumologic/header_builder"
13
+ require "logstash/outputs/sumologic/statistics"
14
+ require "logstash/outputs/sumologic/message_queue"
15
+ include LogStash::Outputs::SumoLogic::Common
16
+
17
+
18
+ def initialize(client, queue, stats, config)
19
+ @client = client
20
+ @queue = queue
21
+ @stats = stats
22
+ @stopping = Concurrent::AtomicBoolean.new(false)
23
+ @url = config["url"]
24
+ @sender_max = (config["sender_max"] ||= 1) < 1 ? 1 : config["sender_max"]
25
+ @sleep_before_requeue = config["sleep_before_requeue"] ||= 30
26
+ @stats_enabled = config["stats_enabled"] ||= false
27
+
28
+ @tokens = SizedQueue.new(@sender_max)
29
+ @sender_max.times { |t| @tokens << t }
30
+
31
+ @compressor = LogStash::Outputs::SumoLogic::Compressor.new(config)
32
+
33
+ end # def initialize
34
+
35
+ def start()
36
+ log_info("starting sender...",
37
+ :max => @sender_max,
38
+ :requeue => @sleep_before_requeue)
39
+ @stopping.make_false()
40
+ @sender_t = Thread.new {
41
+ while @stopping.false?
42
+ batch = @queue.deq()
43
+ send_request(batch)
44
+ end # while
45
+ @queue.drain().map { |batch|
46
+ send_request(batch)
47
+ }
48
+ log_info("waiting while senders finishing...")
49
+ while @tokens.size < @sender_max
50
+ sleep 1
51
+ end # while
52
+ }
53
+ end # def start
54
+
55
+ def stop()
56
+ log_info("shutting down sender...")
57
+ @stopping.make_true()
58
+ @queue.enq(Batch.new(Hash.new, STOP_TAG))
59
+ @sender_t.join
60
+ log_info("sender is fully shutted down")
61
+ end # def stop
62
+
63
+ def connect()
64
+ uri = URI.parse(@url)
65
+ http = Net::HTTP.new(uri.host, uri.port)
66
+ http.use_ssl = @url.downcase().start_with?("https")
67
+ request = Net::HTTP::Get.new(uri.request_uri)
68
+ begin
69
+ res = http.request(request)
70
+ if res.code.to_i != 200
71
+ log_err("ping rejected",
72
+ :url => @url,
73
+ :code => res.code,
74
+ :body => res.body)
75
+ false
76
+ else
77
+ log_info("ping accepted",
78
+ :url => @url)
79
+ true
80
+ end
81
+ rescue Exception => exception
82
+ log_err("ping failed",
83
+ :url => @url,
84
+ :message => exception.message,
85
+ :class => exception.class.name,
86
+ :backtrace => exception.backtrace)
87
+ false
88
+ end
89
+ end # def connect
90
+
91
+ private
92
+
93
+ def send_request(batch)
94
+ content = batch.payload
95
+ headers = batch.headers
96
+ if content == STOP_TAG
97
+ log_info("STOP_TAG is received.")
98
+ return
99
+ end
100
+
101
+ token = @tokens.pop()
102
+
103
+ if @stats_enabled && content.start_with?(STATS_TAG)
104
+ body = @compressor.compress(content[STATS_TAG.length..-1])
105
+ headers[CATEGORY_HEADER] = "#{headers[CATEGORY_HEADER]}.stats"
106
+ headers[CONTENT_TYPE] = CONTENT_TYPE_CARBON2
107
+ else
108
+ body = @compressor.compress(content)
109
+ end
110
+
111
+ log_dbg("sending request",
112
+ :headers => headers,
113
+ :content_size => content.size,
114
+ :content => content[0..20],
115
+ :payload_size => body.size)
116
+ request = @client.send(:background).send(:post, @url, :body => body, :headers => headers)
117
+
118
+ request.on_complete do
119
+ @tokens << token
120
+ end
121
+
122
+ request.on_success do |response|
123
+ @stats.record_response_success(response.code)
124
+ if response.code < 200 || response.code > 299
125
+ log_err("request rejected",
126
+ :token => token,
127
+ :code => response.code,
128
+ :headers => headers,
129
+ :contet => content[0..20])
130
+ if response.code == 429 || response.code == 503 || response.code == 504
131
+ requeue_message(batch)
132
+ end
133
+ else
134
+ log_dbg("request accepted",
135
+ :token => token,
136
+ :code => response.code)
137
+ end
138
+ end
139
+
140
+ request.on_failure do |exception|
141
+ @stats.record_response_failure()
142
+ log_err("error in network transmission",
143
+ :token => token,
144
+ :message => exception.message,
145
+ :class => exception.class.name,
146
+ :backtrace => exception.backtrace)
147
+ requeue_message(batch)
148
+ end
149
+
150
+ @stats.record_request(content.bytesize, body.bytesize)
151
+ request.call
152
+ end # def send_request
153
+
154
+ def requeue_message(batch)
155
+ content = batch.payload
156
+ if @stats_enabled && content.start_with?(STATS_TAG)
157
+ log_warn("do not requeue stats payload",
158
+ :content => content)
159
+ elsif @stopping.false? && @sleep_before_requeue >= 0
160
+ log_info("requeue message",
161
+ :after => @sleep_before_requeue,
162
+ :queue_size => @queue.size,
163
+ :content_size => content.size,
164
+ :content => content[0..20],
165
+ :headers => batch.headers)
166
+ Stud.stoppable_sleep(@sleep_before_requeue) { @stopping.true? }
167
+ @queue.enq(batch)
168
+ end
169
+ end # def reque_message
170
+
171
+ end
172
+ end; end; end
@@ -0,0 +1,100 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash; module Outputs; class SumoLogic;
4
+ class Statistics
5
+
6
+ require "logstash/outputs/sumologic/common"
7
+ include LogStash::Outputs::SumoLogic::Common
8
+
9
+ attr_reader :initialize_time
10
+ attr_reader :total_input_events
11
+ attr_reader :total_input_bytes
12
+ attr_reader :total_metrics_datapoints
13
+ attr_reader :total_log_lines
14
+ attr_reader :total_enque_times
15
+ attr_reader :total_enque_bytes
16
+ attr_reader :total_deque_times
17
+ attr_reader :total_deque_bytes
18
+ attr_reader :total_output_requests
19
+ attr_reader :total_output_bytes
20
+ attr_reader :total_output_bytes_compressed
21
+ attr_reader :total_response
22
+ attr_reader :total_response_times
23
+ attr_reader :total_response_success
24
+
25
+ def initialize()
26
+ @initialize_time = Time.now()
27
+ @total_input_events = Concurrent::AtomicFixnum.new
28
+ @total_input_bytes = Concurrent::AtomicFixnum.new
29
+ @total_metrics_datapoints = Concurrent::AtomicFixnum.new
30
+ @total_log_lines = Concurrent::AtomicFixnum.new
31
+ @total_enque_times = Concurrent::AtomicFixnum.new
32
+ @total_enque_bytes = Concurrent::AtomicFixnum.new
33
+ @total_deque_times = Concurrent::AtomicFixnum.new
34
+ @total_deque_bytes = Concurrent::AtomicFixnum.new
35
+ @total_output_requests = Concurrent::AtomicFixnum.new
36
+ @total_output_bytes = Concurrent::AtomicFixnum.new
37
+ @total_output_bytes_compressed = Concurrent::AtomicFixnum.new
38
+ @total_response = Concurrent::Map.new
39
+ @total_response_times = Concurrent::AtomicFixnum.new
40
+ @total_response_success = Concurrent::AtomicFixnum.new
41
+
42
+ end # def initialize
43
+
44
+ def total_response(key)
45
+ @total_response.get(key) ? @total_response.get(key).value : 0
46
+ end
47
+
48
+ def record_input(size)
49
+ @total_input_events.increment()
50
+ @total_input_bytes.update { |v| v + size }
51
+ end # def record_input
52
+
53
+ def record_log_process()
54
+ @total_log_lines.increment()
55
+ end # def record_log_process
56
+
57
+ def record_metrics_process(dps)
58
+ @total_metrics_datapoints.update { |v| v + dps }
59
+ end # def record_metrics_process
60
+
61
+ def record_enque(size)
62
+ @total_enque_times.increment()
63
+ @total_enque_bytes.update { |v| v + size }
64
+ end # def record_enque
65
+
66
+ def record_deque(size)
67
+ @total_deque_times.increment()
68
+ @total_deque_bytes.update { |v| v + size }
69
+ end # def record_deque
70
+
71
+ def record_request(size, size_compressed)
72
+ @total_output_requests.increment()
73
+ @total_output_bytes.update { |v| v + size }
74
+ @total_output_bytes_compressed.update { |v| v + size_compressed }
75
+ end # def record_request
76
+
77
+ def record_response_success(code)
78
+ atomic_map_increase(@total_response, code.to_s)
79
+ @total_response_success.increment() if code == 200
80
+ @total_response_times.increment()
81
+ end # def record_response_success
82
+
83
+ def record_response_failure()
84
+ atomic_map_increase(@total_response, "failure")
85
+ end # def record_response_failure
86
+
87
+ def atomic_map_increase(map, key)
88
+ number = map.get(key)
89
+ if number.nil?
90
+ newNumber = Concurrent::AtomicFixnum.new
91
+ number = map.put_if_absent(key, newNumber)
92
+ if number.nil?
93
+ number = newNumber
94
+ end
95
+ end
96
+ number.increment()
97
+ end # def atomic_map_increase
98
+
99
+ end
100
+ end; end; end
@@ -0,0 +1,27 @@
1
+ Gem::Specification.new do |s|
2
+ s.name = 'hshek-logstash-output-sumologic'
3
+ s.version = '0.0.2'
4
+ s.licenses = ['Apache-2.0']
5
+ s.summary = 'Deliever the log to Sumo Logic cloud service.'
6
+ s.description = 'This gem is a Logstash output plugin to deliver the log or metrics to Sumo Logic cloud service. Go to https://github.com/SumoLogic/logstash-output-sumologic for getting help, reporting issues, etc.'
7
+ s.authors = ['Sumo Logic']
8
+ s.email = 'collection@sumologic.com '
9
+ s.homepage = 'https://github.com/SumoLogic/logstash-output-sumologic'
10
+ s.require_paths = ['lib']
11
+
12
+ # Files
13
+ s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
14
+ # Tests
15
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
16
+
17
+ # Special flag to let us know this is actually a logstash plugin
18
+ s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'output' }
19
+
20
+ # Gem dependencies
21
+ s.add_runtime_dependency 'manticore', '>= 0.5.4', '< 1.0.0'
22
+ s.add_runtime_dependency 'logstash-core-plugin-api', '>= 1.60', '<= 2.99'
23
+ s.add_runtime_dependency 'logstash-mixin-http_client', '~> 6.0'
24
+
25
+ s.add_development_dependency 'logstash-codec-plain'
26
+ s.add_development_dependency 'logstash-devutils'
27
+ end
@@ -0,0 +1,27 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/sumologic"
4
+
5
+ describe LogStash::Outputs::SumoLogic::Compressor do
6
+
7
+ context "compress (deflate)" do
8
+ let(:compressor) {
9
+ LogStash::Outputs::SumoLogic::Compressor.new("compress" => true, "compress_encoding" => "deflate")
10
+ }
11
+ specify {
12
+ expect(compressor.compress("abcde").bytesize).to eq(13)
13
+ expect(compressor.compress("aaaaa").bytesize).to eq(11)
14
+ }
15
+ end # context
16
+
17
+ context "compress (gzip)" do
18
+ let(:compressor) {
19
+ LogStash::Outputs::SumoLogic::Compressor.new("compress" => true, "compress_encoding" => "gzip")
20
+ }
21
+ specify {
22
+ expect(compressor.compress("abcde").bytesize).to eq(25)
23
+ expect(compressor.compress("aaaaa").bytesize).to eq(23)
24
+ }
25
+ end # context
26
+
27
+ end # describe
@@ -0,0 +1,244 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/sumologic"
4
+
5
+ describe LogStash::Outputs::SumoLogic::HeaderBuilder do
6
+
7
+ result = {}
8
+ event = LogStash::Event.new("foo" => "bar", "message" => "Hello world")
9
+
10
+ before :each do
11
+ result = builder.build(event)
12
+ end
13
+
14
+ context "should build headers by default" do
15
+
16
+ let(:builder) { LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234") }
17
+
18
+ specify {
19
+ expected = {
20
+ "X-Sumo-Client" => "logstash-output-sumologic",
21
+ "X-Sumo-Name" => "logstash-output-sumologic",
22
+ "X-Sumo-Host" => Socket.gethostname,
23
+ "X-Sumo-Category" => "Logstash",
24
+ "Content-Type" => "text/plain"
25
+ }
26
+ expect(result).to eq(expected)
27
+ }
28
+
29
+ end # context
30
+
31
+ context "should override source_category" do
32
+
33
+ let(:builder) {
34
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
35
+ "url" => "http://localhost/1234",
36
+ "source_category" => "my source category")
37
+ }
38
+
39
+ specify {
40
+ expect(result.count).to eq(5)
41
+ expect(result["X-Sumo-Category"]).to eq("my source category")
42
+ }
43
+
44
+ end # context
45
+
46
+ context "should override source_category with template" do
47
+
48
+ let(:builder) {
49
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
50
+ "url" => "http://localhost/1234",
51
+ "source_category" => "my source category %{foo}")
52
+ }
53
+
54
+ specify {
55
+ expect(result.count).to eq(5)
56
+ expect(result["X-Sumo-Category"]).to eq("my source category bar")
57
+ }
58
+
59
+ end # context
60
+
61
+ context "should override source_name" do
62
+
63
+ let(:builder) {
64
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
65
+ "url" => "http://localhost/1234",
66
+ "source_name" => "my source name")
67
+ }
68
+
69
+ specify {
70
+ expect(result.count).to eq(5)
71
+ expect(result["X-Sumo-Name"]).to eq("my source name")
72
+ }
73
+
74
+ end # context
75
+
76
+ context "should override source_name with template" do
77
+
78
+ let(:builder) {
79
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
80
+ "url" => "http://localhost/1234",
81
+ "source_name" => "my source name %{foo}")
82
+ }
83
+
84
+ specify {
85
+ expect(result.count).to eq(5)
86
+ expect(result["X-Sumo-Name"]).to eq("my source name bar")
87
+ }
88
+
89
+ end # context
90
+
91
+ context "should override source_host" do
92
+
93
+ let(:builder) {
94
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
95
+ "url" => "http://localhost/1234",
96
+ "source_host" => "my source host")
97
+ }
98
+
99
+ specify {
100
+ expect(result.count).to eq(5)
101
+ expect(result["X-Sumo-Host"]).to eq("my source host")
102
+ }
103
+
104
+ end # context
105
+
106
+ context "should override source_host with template" do
107
+
108
+ let(:builder) {
109
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
110
+ "url" => "http://localhost/1234",
111
+ "source_host" => "my source host %{foo}")
112
+ }
113
+
114
+ specify {
115
+ expect(result.count).to eq(5)
116
+ expect(result["X-Sumo-Host"]).to eq("my source host bar")
117
+ }
118
+
119
+ end # context
120
+
121
+ context "should hornor extra_headers" do
122
+
123
+ let(:builder) {
124
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
125
+ "url" => "http://localhost/1234",
126
+ "extra_headers" => {
127
+ "foo" => "bar"
128
+ })
129
+ }
130
+
131
+ specify {
132
+ expect(result.count).to eq(6)
133
+ expect(result["foo"]).to eq("bar")
134
+ }
135
+
136
+ end # context
137
+
138
+ context "should hornor extra_headers but never overwrite pre-defined headers" do
139
+
140
+ let(:builder) {
141
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
142
+ "url" => "http://localhost/1234",
143
+ "extra_headers" => {
144
+ "foo" => "bar",
145
+ "X-Sumo-Client" => "a",
146
+ "X-Sumo-Name" => "b",
147
+ "X-Sumo-Host" => "c",
148
+ "X-Sumo-Category" => "d",
149
+ "Content-Type" => "e"
150
+ })
151
+ }
152
+
153
+ specify {
154
+ expected = {
155
+ "foo" => "bar",
156
+ "X-Sumo-Client" => "logstash-output-sumologic",
157
+ "X-Sumo-Name" => "logstash-output-sumologic",
158
+ "X-Sumo-Host" => Socket.gethostname,
159
+ "X-Sumo-Category" => "Logstash",
160
+ "Content-Type" => "text/plain"
161
+ }
162
+ expect(result).to eq(expected)
163
+ }
164
+
165
+ end # context
166
+
167
+ context "should set content type correctly for log payload" do
168
+
169
+ let(:builder) {
170
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234")
171
+ }
172
+
173
+ specify {
174
+ expect(result["Content-Type"]).to eq("text/plain")
175
+ }
176
+
177
+ end # context
178
+
179
+ context "should set content type correctly for metrics payload (CarbonV2, default)" do
180
+
181
+ let(:builder) {
182
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
183
+ "url" => "http://localhost/1234",
184
+ "fields_as_metrics" => true)
185
+ }
186
+
187
+ specify {
188
+ expect(result["Content-Type"]).to eq("application/vnd.sumologic.carbon2")
189
+ }
190
+
191
+ end # context
192
+
193
+ context "should set content type correctly for metrics payload (Graphite)" do
194
+
195
+ let(:builder) {
196
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new(
197
+ "url" => "http://localhost/1234",
198
+ "metrics_format" => "graphite",
199
+ "fields_as_metrics" => true)
200
+ }
201
+
202
+ specify {
203
+ expect(result["Content-Type"]).to eq("application/vnd.sumologic.graphite")
204
+ }
205
+
206
+ end # context
207
+
208
+ context "should set content encoding correctly for uncompressed payload" do
209
+
210
+ let(:builder) {
211
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234")
212
+ }
213
+
214
+ specify {
215
+ expect(result["Content-Encoding"]).to be_nil
216
+ }
217
+
218
+ end # context
219
+
220
+ context "should set content encoding correctly for compressed payload (deflate, default)" do
221
+
222
+ let(:builder) {
223
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234", "compress" => true)
224
+ }
225
+
226
+ specify {
227
+ expect(result["Content-Encoding"]).to eq("deflate")
228
+ }
229
+
230
+ end # context
231
+
232
+ context "should set content encoding correctly for compressed payload (gzip)" do
233
+
234
+ let(:builder) {
235
+ LogStash::Outputs::SumoLogic::HeaderBuilder.new("url" => "http://localhost/1234", "compress" => true, "compress_encoding" => "gzip")
236
+ }
237
+
238
+ specify {
239
+ expect(result["Content-Encoding"]).to eq("gzip")
240
+ }
241
+
242
+ end # context
243
+
244
+ end # describe