hshek-logstash-output-sumologic 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,13 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash; module Outputs; class SumoLogic;
4
+ class Batch
5
+
6
+ attr_accessor :headers, :payload
7
+
8
+ def initialize(headers, payload)
9
+ @headers, @payload = headers, payload
10
+ end
11
+
12
+ end
13
+ end; end; end;
@@ -0,0 +1,73 @@
1
+ # encoding: utf-8
2
+ module LogStash; module Outputs; class SumoLogic;
3
+ module Common
4
+
5
+ require "date"
6
+
7
+ # global constants
8
+ DEFAULT_LOG_FORMAT = "%{@timestamp} %{host} %{message}"
9
+ METRICS_NAME_PLACEHOLDER = "*"
10
+ GRAPHITE = "graphite"
11
+ CARBON2 = "carbon2"
12
+ DEFLATE = "deflate"
13
+ GZIP = "gzip"
14
+ STATS_TAG = "STATS_TAG"
15
+ STOP_TAG = "PLUGIN STOPPED"
16
+
17
+ CONTENT_TYPE = "Content-Type"
18
+ CONTENT_TYPE_LOG = "text/plain"
19
+ CONTENT_TYPE_GRAPHITE = "application/vnd.sumologic.graphite"
20
+ CONTENT_TYPE_CARBON2 = "application/vnd.sumologic.carbon2"
21
+ CONTENT_ENCODING = "Content-Encoding"
22
+
23
+ CATEGORY_HEADER = "X-Sumo-Category"
24
+ CATEGORY_HEADER_DEFAULT = "Logstash"
25
+ HOST_HEADER = "X-Sumo-Host"
26
+ NAME_HEADER = "X-Sumo-Name"
27
+ NAME_HEADER_DEFAULT = "logstash-output-sumologic"
28
+
29
+ CLIENT_HEADER = "X-Sumo-Client"
30
+ CLIENT_HEADER_VALUE = "logstash-output-sumologic"
31
+
32
+ # for debugging test
33
+ LOG_TO_CONSOLE = false
34
+ @@logger = nil
35
+
36
+ def set_logger(logger)
37
+ @@logger = logger
38
+ end
39
+
40
+ def log_info(message, *opts)
41
+ if LOG_TO_CONSOLE
42
+ puts "[INFO:#{DateTime::now}]#{message} #{opts.to_s}"
43
+ else
44
+ @@logger && @@logger.info(message, *opts)
45
+ end
46
+ end # def log_info
47
+
48
+ def log_warn(message, *opts)
49
+ if LOG_TO_CONSOLE
50
+ puts "\e[33m[WARN:#{DateTime::now}]#{message} #{opts.to_s}\e[0m"
51
+ else
52
+ @@logger && @@logger.warn(message, *opts)
53
+ end
54
+ end # def log_warn
55
+
56
+ def log_err(message, *opts)
57
+ if LOG_TO_CONSOLE
58
+ puts "\e[31m[ERR :#{DateTime::now}]#{message} #{opts.to_s}\e[0m"
59
+ else
60
+ @@logger && @@logger.error(message, *opts)
61
+ end
62
+ end # def log_err
63
+
64
+ def log_dbg(message, *opts)
65
+ if LOG_TO_CONSOLE
66
+ puts "\e[36m[DBG :#{DateTime::now}]#{message} #{opts.to_s}\e[0m"
67
+ else
68
+ @@logger && @@logger.debug(message, *opts)
69
+ end
70
+ end # def log_dbg
71
+
72
+ end
73
+ end; end; end
@@ -0,0 +1,39 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash; module Outputs; class SumoLogic;
4
+ class Compressor
5
+
6
+ require "stringio"
7
+ require "zlib"
8
+ require "logstash/outputs/sumologic/common"
9
+ include LogStash::Outputs::SumoLogic::Common
10
+
11
+ def initialize(config)
12
+ @compress = config["compress"]
13
+ @compress_encoding = (config["compress_encoding"] ||= DEFLATE).downcase
14
+ end # def initialize
15
+
16
+ def compress(content)
17
+ if @compress
18
+ if @compress_encoding == GZIP
19
+ result = gzip(content)
20
+ result.bytes.to_a.pack("c*")
21
+ else
22
+ Zlib::Deflate.deflate(content)
23
+ end
24
+ else
25
+ content
26
+ end
27
+ end # def compress
28
+
29
+ def gzip(content)
30
+ stream = StringIO.new("w")
31
+ stream.set_encoding("ASCII")
32
+ gz = Zlib::GzipWriter.new(stream)
33
+ gz.write(content)
34
+ gz.close
35
+ stream.string.bytes.to_a.pack("c*")
36
+ end # def gzip
37
+
38
+ end
39
+ end; end; end
@@ -0,0 +1,52 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash; module Outputs; class SumoLogic;
4
+ class HeaderBuilder
5
+
6
+ require "socket"
7
+ require "logstash/outputs/sumologic/common"
8
+ include LogStash::Outputs::SumoLogic::Common
9
+
10
+ def initialize(config)
11
+
12
+ @extra_headers = config["extra_headers"] ||= {}
13
+ @source_category = config["source_category"] ||= CATEGORY_HEADER_DEFAULT
14
+ @source_host = config["source_host"] ||= Socket.gethostname
15
+ @source_name = config["source_name"] ||= NAME_HEADER_DEFAULT
16
+ @metrics = config["metrics"]
17
+ @fields_as_metrics = config["fields_as_metrics"]
18
+ @metrics_format = (config["metrics_format"] ||= CARBON2).downcase
19
+ @compress = config["compress"]
20
+ @compress_encoding = config["compress_encoding"]
21
+
22
+ end # def initialize
23
+
24
+ def build(event)
25
+ headers = Hash.new
26
+ headers.merge!(@extra_headers)
27
+ headers[CLIENT_HEADER] = CLIENT_HEADER_VALUE
28
+ headers[CATEGORY_HEADER] = event.sprintf(@source_category) unless @source_category.blank?
29
+ headers[HOST_HEADER] = event.sprintf(@source_host) unless @source_host.blank?
30
+ headers[NAME_HEADER] = event.sprintf(@source_name) unless @source_name.blank?
31
+ append_content_header(headers)
32
+ append_compress_header(headers)
33
+ headers
34
+ end # def build
35
+
36
+ private
37
+ def append_content_header(headers)
38
+ contentType = CONTENT_TYPE_LOG
39
+ if @metrics || @fields_as_metrics
40
+ contentType = (@metrics_format == GRAPHITE) ? CONTENT_TYPE_GRAPHITE : CONTENT_TYPE_CARBON2
41
+ end
42
+ headers[CONTENT_TYPE] = contentType
43
+ end # def append_content_header
44
+
45
+ def append_compress_header(headers)
46
+ if @compress
47
+ headers[CONTENT_ENCODING] = (@compress_encoding == GZIP) ? GZIP : DEFLATE
48
+ end
49
+ end # append_compress_header
50
+
51
+ end
52
+ end; end; end
@@ -0,0 +1,57 @@
1
+ # encoding: utf-8
2
+ module LogStash; module Outputs; class SumoLogic;
3
+ class MessageQueue
4
+
5
+ require "logstash/outputs/sumologic/common"
6
+ require "logstash/outputs/sumologic/statistics"
7
+ include LogStash::Outputs::SumoLogic::Common
8
+
9
+ def initialize(stats, config)
10
+ @queue_max = (config["queue_max"] ||= 1) < 1 ? 1 : config["queue_max"]
11
+ @queue = SizedQueue::new(@queue_max)
12
+ log_info("initialize memory queue", :max => @queue_max)
13
+ @queue_bytesize = Concurrent::AtomicFixnum.new
14
+ @stats = stats
15
+ end # def initialize
16
+
17
+ def enq(batch)
18
+ batch_size = batch.payload.bytesize
19
+ if (batch_size > 0)
20
+ @queue.enq(batch)
21
+ @stats.record_enque(batch_size)
22
+ @queue_bytesize.update { |v| v + batch_size }
23
+ log_dbg("enqueue",
24
+ :objects_in_queue => size,
25
+ :bytes_in_queue => @queue_bytesize,
26
+ :size => batch_size)
27
+ end
28
+ end # def enq
29
+
30
+ def deq()
31
+ batch = @queue.deq()
32
+ batch_size = batch.payload.bytesize
33
+ @stats.record_deque(batch_size)
34
+ @queue_bytesize.update { |v| v - batch_size }
35
+ log_dbg("dequeue",
36
+ :objects_in_queue => size,
37
+ :bytes_in_queue => @queue_bytesize,
38
+ :size => batch_size)
39
+ batch
40
+ end # def deq
41
+
42
+ def drain()
43
+ @queue.size.times.map {
44
+ deq()
45
+ }
46
+ end # def drain
47
+
48
+ def size()
49
+ @queue.size()
50
+ end # size
51
+
52
+ def bytesize()
53
+ @queue_bytesize.value
54
+ end # bytesize
55
+
56
+ end
57
+ end; end; end
@@ -0,0 +1,76 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash; module Outputs; class SumoLogic;
4
+ class Monitor
5
+
6
+ require "logstash/outputs/sumologic/common"
7
+ require "logstash/outputs/sumologic/statistics"
8
+ require "logstash/outputs/sumologic/message_queue"
9
+ include LogStash::Outputs::SumoLogic::Common
10
+
11
+ attr_reader :is_pile
12
+
13
+ def initialize(queue, stats, config)
14
+ @queue = queue
15
+ @stats = stats
16
+ @stopping = Concurrent::AtomicBoolean.new(false)
17
+
18
+ @enabled = config["stats_enabled"] ||= false
19
+ @interval = config["stats_interval"] ||= 60
20
+ @interval = @interval < 0 ? 0 : @interval
21
+ end # initialize
22
+
23
+ def start()
24
+ log_info("starting monitor...", :interval => @interval)
25
+ @stopping.make_false()
26
+ if (@enabled)
27
+ @monitor_t = Thread.new {
28
+ while @stopping.false?
29
+ Stud.stoppable_sleep(@interval) { @stopping.true? }
30
+ if @stats.total_input_events.value > 0
31
+ @queue.enq(build_stats_payload())
32
+ end
33
+ end # while
34
+ }
35
+ end # if
36
+ end # def start
37
+
38
+ def stop()
39
+ @stopping.make_true()
40
+ if (@enabled)
41
+ log_info("shutting down monitor...")
42
+ @monitor_t.join
43
+ log_info("monitor is fully shutted down")
44
+ end
45
+ end # def stop
46
+
47
+ def build_stats_payload()
48
+ timestamp = Time.now().to_i
49
+
50
+ counters = [
51
+ "total_input_events",
52
+ "total_input_bytes",
53
+ "total_metrics_datapoints",
54
+ "total_log_lines",
55
+ "total_output_requests",
56
+ "total_output_bytes",
57
+ "total_output_bytes_compressed",
58
+ "total_response_times",
59
+ "total_response_success"
60
+ ].map { |key|
61
+ value = @stats.send(key).value
62
+ log_dbg("stats",
63
+ :key => key,
64
+ :value => value)
65
+ build_metric_line(key, value, timestamp)
66
+ }.join($/)
67
+
68
+ "#{STATS_TAG}#{counters}"
69
+ end # def build_stats_payload
70
+
71
+ def build_metric_line(key, value, timestamp)
72
+ "metric=#{key} interval=#{@interval} category=monitor #{value} #{timestamp}"
73
+ end # def build_metric_line
74
+
75
+ end
76
+ end; end; end
@@ -0,0 +1,159 @@
1
+ # encoding: utf-8
2
+
3
+ module LogStash; module Outputs; class SumoLogic;
4
+ class PayloadBuilder
5
+
6
+ require "logstash/json"
7
+ require "logstash/event"
8
+ require "logstash/outputs/sumologic/common"
9
+ include LogStash::Outputs::SumoLogic::Common
10
+
11
+ TIMESTAMP_FIELD = "@timestamp"
12
+ METRICS_NAME_TAG = "metric"
13
+ JSON_PLACEHOLDER = "%{@json}"
14
+ ALWAYS_EXCLUDED = [ "@timestamp", "@version" ]
15
+
16
+ def initialize(stats, config)
17
+ @stats = stats
18
+
19
+ @format = config["format"] ||= DEFAULT_LOG_FORMAT
20
+ @json_mapping = config["json_mapping"]
21
+
22
+ @metrics = config["metrics"]
23
+ @metrics_name = config["metrics_name"]
24
+ @fields_as_metrics = config["fields_as_metrics"]
25
+ @metrics_format = (config["metrics_format"] ||= CARBON2).downcase
26
+ @intrinsic_tags = config["intrinsic_tags"] ||= {}
27
+ @meta_tags = config["meta_tags"] ||= {}
28
+ @fields_include = config["fields_include"] ||= []
29
+ @fields_exclude = config["fields_exclude"] ||= []
30
+
31
+ end # def initialize
32
+
33
+ def build(event)
34
+ payload = if @metrics || @fields_as_metrics
35
+ build_metrics_payload(event)
36
+ else
37
+ build_log_payload(event)
38
+ end
39
+ payload
40
+ end # def build
41
+
42
+ private
43
+
44
+ def build_log_payload(event)
45
+ @stats.record_log_process()
46
+ apply_template(@format, event)
47
+ end # def event2log
48
+
49
+ def build_metrics_payload(event)
50
+ timestamp = event.get(TIMESTAMP_FIELD).to_i
51
+ source = if @fields_as_metrics
52
+ event_as_metrics(event)
53
+ else
54
+ expand_hash(@metrics, event)
55
+ end
56
+ lines = source.flat_map { |key, value|
57
+ get_single_line(event, key, value, timestamp)
58
+ }.reject(&:nil?)
59
+ @stats.record_metrics_process(lines.size)
60
+ lines.join($/)
61
+ end # def event2metrics
62
+
63
+ def event_as_metrics(event)
64
+ hash = event2hash(event)
65
+ acc = {}
66
+ hash.keys.each do |field|
67
+ value = hash[field]
68
+ dotify(acc, field, value, nil)
69
+ end
70
+ acc
71
+ end # def event_as_metrics
72
+
73
+ def get_single_line(event, key, value, timestamp)
74
+ full = get_metrics_name(event, key)
75
+ if !ALWAYS_EXCLUDED.include?(full) && \
76
+ (@fields_include.empty? || @fields_include.any? { |regexp| full.match(regexp) }) && \
77
+ !(@fields_exclude.any? {|regexp| full.match(regexp)}) && \
78
+ is_number?(value)
79
+ if @metrics_format == GRAPHITE
80
+ "#{full} #{value} #{timestamp}"
81
+ else
82
+ @intrinsic_tags[METRICS_NAME_TAG] = full
83
+ "#{hash2line(@intrinsic_tags, event)} #{hash2line(@meta_tags, event)}#{value} #{timestamp}"
84
+ end
85
+ end
86
+ end # def get_single_line
87
+
88
+ def dotify(acc, key, value, prefix)
89
+ pk = prefix ? "#{prefix}.#{key}" : key.to_s
90
+ if value.is_a?(Hash)
91
+ value.each do |k, v|
92
+ dotify(acc, k, v, pk)
93
+ end
94
+ elsif value.is_a?(Array)
95
+ value.each_with_index.map { |v, i|
96
+ dotify(acc, i.to_s, v, pk)
97
+ }
98
+ else
99
+ acc[pk] = value
100
+ end
101
+ end # def dotify
102
+
103
+ def event2hash(event)
104
+ if @json_mapping
105
+ @json_mapping.reduce({}) do |acc, kv|
106
+ k, v = kv
107
+ acc[k] = event.sprintf(v)
108
+ acc
109
+ end
110
+ else
111
+ event.to_hash
112
+ end
113
+ end # def map_event
114
+
115
+ def is_number?(me)
116
+ me.to_f.to_s == me.to_s || me.to_i.to_s == me.to_s
117
+ end # def is_number?
118
+
119
+ def expand_hash(hash, event)
120
+ hash.reduce({}) do |acc, kv|
121
+ k, v = kv
122
+ exp_k = apply_template(k, event)
123
+ exp_v = apply_template(v, event)
124
+ acc[exp_k] = exp_v
125
+ acc
126
+ end
127
+ end # def expand_hash
128
+
129
+ def apply_template(template, event)
130
+ if template == JSON_PLACEHOLDER
131
+ hash = event2hash(event)
132
+ LogStash::Json.dump(hash)
133
+ elsif template.include? JSON_PLACEHOLDER
134
+ result = event.sprintf(template)
135
+ hash = event2hash(event)
136
+ dump = LogStash::Json.dump(hash)
137
+ result.gsub(JSON_PLACEHOLDER) { dump }
138
+ else
139
+ event.sprintf(template)
140
+ end
141
+ end # def expand
142
+
143
+ def get_metrics_name(event, name)
144
+ name = @metrics_name.gsub(METRICS_NAME_PLACEHOLDER) { name } if @metrics_name
145
+ event.sprintf(name)
146
+ end # def get_metrics_name
147
+
148
+ def hash2line(hash, event)
149
+ if (hash.is_a?(Hash) && !hash.empty?)
150
+ expand_hash(hash, event).flat_map { |k, v|
151
+ "#{k}=#{v} "
152
+ }.join()
153
+ else
154
+ ""
155
+ end
156
+ end # def hash2line
157
+
158
+ end
159
+ end; end; end