semantic_logger 4.5.0 → 4.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +51 -21
- data/Rakefile +7 -7
- data/lib/semantic_logger/ansi_colors.rb +0 -10
- data/lib/semantic_logger/appender/async.rb +12 -10
- data/lib/semantic_logger/appender/async_batch.rb +7 -3
- data/lib/semantic_logger/appender/bugsnag.rb +43 -30
- data/lib/semantic_logger/appender/elasticsearch.rb +34 -15
- data/lib/semantic_logger/appender/elasticsearch_http.rb +4 -4
- data/lib/semantic_logger/appender/file.rb +249 -67
- data/lib/semantic_logger/appender/graylog.rb +15 -10
- data/lib/semantic_logger/appender/honeybadger.rb +3 -3
- data/lib/semantic_logger/appender/http.rb +41 -20
- data/lib/semantic_logger/appender/io.rb +68 -0
- data/lib/semantic_logger/appender/kafka.rb +46 -31
- data/lib/semantic_logger/appender/mongodb.rb +6 -6
- data/lib/semantic_logger/appender/new_relic.rb +2 -2
- data/lib/semantic_logger/appender/rabbitmq.rb +5 -5
- data/lib/semantic_logger/appender/sentry.rb +7 -7
- data/lib/semantic_logger/appender/sentry_ruby.rb +138 -0
- data/lib/semantic_logger/appender/splunk.rb +7 -5
- data/lib/semantic_logger/appender/splunk_http.rb +6 -5
- data/lib/semantic_logger/appender/syslog.rb +23 -15
- data/lib/semantic_logger/appender/tcp.rb +9 -9
- data/lib/semantic_logger/appender/udp.rb +2 -2
- data/lib/semantic_logger/appender/wrapper.rb +3 -2
- data/lib/semantic_logger/appender.rb +62 -65
- data/lib/semantic_logger/appenders.rb +36 -53
- data/lib/semantic_logger/base.rb +61 -39
- data/lib/semantic_logger/formatters/base.rb +16 -6
- data/lib/semantic_logger/formatters/color.rb +14 -15
- data/lib/semantic_logger/formatters/default.rb +18 -5
- data/lib/semantic_logger/formatters/fluentd.rb +7 -18
- data/lib/semantic_logger/formatters/json.rb +3 -5
- data/lib/semantic_logger/formatters/logfmt.rb +77 -0
- data/lib/semantic_logger/formatters/raw.rb +39 -10
- data/lib/semantic_logger/formatters/signalfx.rb +14 -21
- data/lib/semantic_logger/formatters/syslog.rb +8 -6
- data/lib/semantic_logger/formatters/syslog_cee.rb +9 -7
- data/lib/semantic_logger/formatters.rb +13 -13
- data/lib/semantic_logger/jruby/garbage_collection_logger.rb +4 -2
- data/lib/semantic_logger/levels.rb +9 -7
- data/lib/semantic_logger/log.rb +58 -73
- data/lib/semantic_logger/loggable.rb +8 -1
- data/lib/semantic_logger/logger.rb +19 -11
- data/lib/semantic_logger/metric/new_relic.rb +3 -3
- data/lib/semantic_logger/metric/signalfx.rb +3 -3
- data/lib/semantic_logger/metric/statsd.rb +7 -7
- data/lib/semantic_logger/processor.rb +9 -7
- data/lib/semantic_logger/reporters/minitest.rb +4 -4
- data/lib/semantic_logger/semantic_logger.rb +57 -23
- data/lib/semantic_logger/subscriber.rb +24 -7
- data/lib/semantic_logger/sync.rb +12 -0
- data/lib/semantic_logger/sync_processor.rb +58 -0
- data/lib/semantic_logger/test/capture_log_events.rb +34 -0
- data/lib/semantic_logger/utils.rb +32 -13
- data/lib/semantic_logger/version.rb +1 -1
- data/lib/semantic_logger.rb +27 -22
- metadata +15 -10
@@ -1,93 +1,90 @@
|
|
1
1
|
module SemanticLogger
|
2
2
|
module Appender
|
3
3
|
# @formatter:off
|
4
|
-
autoload :Async,
|
5
|
-
autoload :AsyncBatch,
|
6
|
-
autoload :Bugsnag,
|
7
|
-
autoload :Elasticsearch,
|
8
|
-
autoload :ElasticsearchHttp,
|
9
|
-
autoload :File,
|
10
|
-
autoload :Graylog,
|
11
|
-
autoload :Honeybadger,
|
12
|
-
autoload :
|
13
|
-
autoload :
|
14
|
-
autoload :
|
15
|
-
autoload :
|
16
|
-
autoload :
|
17
|
-
autoload :
|
18
|
-
autoload :
|
19
|
-
autoload :
|
20
|
-
autoload :
|
21
|
-
autoload :
|
22
|
-
autoload :
|
23
|
-
autoload :
|
4
|
+
autoload :Async, "semantic_logger/appender/async"
|
5
|
+
autoload :AsyncBatch, "semantic_logger/appender/async_batch"
|
6
|
+
autoload :Bugsnag, "semantic_logger/appender/bugsnag"
|
7
|
+
autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
|
8
|
+
autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
|
9
|
+
autoload :File, "semantic_logger/appender/file"
|
10
|
+
autoload :Graylog, "semantic_logger/appender/graylog"
|
11
|
+
autoload :Honeybadger, "semantic_logger/appender/honeybadger"
|
12
|
+
autoload :IO, "semantic_logger/appender/io"
|
13
|
+
autoload :Kafka, "semantic_logger/appender/kafka"
|
14
|
+
autoload :Sentry, "semantic_logger/appender/sentry"
|
15
|
+
autoload :Http, "semantic_logger/appender/http"
|
16
|
+
autoload :MongoDB, "semantic_logger/appender/mongodb"
|
17
|
+
autoload :NewRelic, "semantic_logger/appender/new_relic"
|
18
|
+
autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
|
19
|
+
autoload :Splunk, "semantic_logger/appender/splunk"
|
20
|
+
autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
|
21
|
+
autoload :Syslog, "semantic_logger/appender/syslog"
|
22
|
+
autoload :Tcp, "semantic_logger/appender/tcp"
|
23
|
+
autoload :Udp, "semantic_logger/appender/udp"
|
24
|
+
autoload :Wrapper, "semantic_logger/appender/wrapper"
|
25
|
+
autoload :SentryRuby, "semantic_logger/appender/sentry_ruby"
|
24
26
|
# @formatter:on
|
25
27
|
|
26
|
-
# DEPRECATED, use SemanticLogger::AnsiColors
|
27
|
-
AnsiColors = SemanticLogger::AnsiColors
|
28
|
-
|
29
|
-
# DEPRECATED: use SemanticLogger::Formatters::Color.new
|
30
|
-
def self.colorized_formatter
|
31
|
-
SemanticLogger::Formatters::Color.new
|
32
|
-
end
|
33
|
-
|
34
|
-
# DEPRECATED: use SemanticLogger::Formatters::Json.new
|
35
|
-
def self.json_formatter
|
36
|
-
SemanticLogger::Formatters::Json.new
|
37
|
-
end
|
38
|
-
|
39
28
|
# Returns [SemanticLogger::Subscriber] appender for the supplied options
|
40
|
-
def self.factory(
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
proxy_options = {}
|
47
|
-
ASYNC_OPTION_KEYS.each { |key| proxy_options[key] = options.delete(key) if options.key?(key) }
|
48
|
-
|
49
|
-
appender = build(options, &block)
|
29
|
+
def self.factory(async: false, batch: nil,
|
30
|
+
max_queue_size: 10_000, lag_check_interval: 1_000, lag_threshold_s: 30,
|
31
|
+
batch_size: 300, batch_seconds: 5,
|
32
|
+
**args,
|
33
|
+
&block)
|
34
|
+
appender = build(**args, &block)
|
50
35
|
|
51
36
|
# If appender implements #batch, then it should use the batch proxy by default.
|
52
37
|
batch = true if batch.nil? && appender.respond_to?(:batch)
|
53
38
|
|
54
39
|
if batch == true
|
55
|
-
|
56
|
-
|
40
|
+
Appender::AsyncBatch.new(
|
41
|
+
appender: appender,
|
42
|
+
max_queue_size: max_queue_size,
|
43
|
+
lag_threshold_s: lag_threshold_s,
|
44
|
+
batch_size: batch_size,
|
45
|
+
batch_seconds: batch_seconds
|
46
|
+
)
|
57
47
|
elsif async == true
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
48
|
+
Appender::Async.new(
|
49
|
+
appender: appender,
|
50
|
+
max_queue_size: max_queue_size,
|
51
|
+
lag_check_interval: lag_check_interval,
|
52
|
+
lag_threshold_s: lag_threshold_s
|
53
|
+
)
|
62
54
|
else
|
63
55
|
appender
|
64
56
|
end
|
65
57
|
end
|
66
58
|
|
67
|
-
ASYNC_OPTION_KEYS = %i[max_queue_size lag_threshold_s batch_size batch_seconds lag_check_interval].freeze
|
68
|
-
|
69
59
|
# Returns [Subscriber] instance from the supplied options.
|
70
|
-
def self.build(
|
71
|
-
if
|
72
|
-
SemanticLogger::Appender::File.new(
|
73
|
-
elsif
|
60
|
+
def self.build(io: nil, file_name: nil, appender: nil, metric: nil, logger: nil, **args, &block)
|
61
|
+
if file_name
|
62
|
+
SemanticLogger::Appender::File.new(file_name, **args, &block)
|
63
|
+
elsif io
|
64
|
+
SemanticLogger::Appender::IO.new(io, **args, &block)
|
65
|
+
elsif logger
|
66
|
+
SemanticLogger::Appender::Wrapper.new(logger: logger, **args, &block)
|
67
|
+
elsif appender
|
74
68
|
if appender.is_a?(Symbol)
|
75
|
-
SemanticLogger::Utils.constantize_symbol(appender).new(
|
69
|
+
SemanticLogger::Utils.constantize_symbol(appender).new(**args)
|
76
70
|
elsif appender.is_a?(Subscriber)
|
77
71
|
appender
|
78
72
|
else
|
79
|
-
raise(ArgumentError,
|
73
|
+
raise(ArgumentError,
|
74
|
+
"Parameter :appender must be either a Symbol or an object derived from SemanticLogger::Subscriber, not: #{appender.inspect}")
|
80
75
|
end
|
81
|
-
elsif
|
82
|
-
if
|
83
|
-
SemanticLogger::Utils.constantize_symbol(
|
84
|
-
elsif
|
85
|
-
|
76
|
+
elsif metric
|
77
|
+
if metric.is_a?(Symbol)
|
78
|
+
SemanticLogger::Utils.constantize_symbol(metric, "SemanticLogger::Metric").new(**args)
|
79
|
+
elsif metric.is_a?(Subscriber)
|
80
|
+
metric
|
86
81
|
else
|
87
|
-
raise(ArgumentError,
|
82
|
+
raise(ArgumentError,
|
83
|
+
"Parameter :metric must be either a Symbol or an object derived from SemanticLogger::Subscriber, not: #{appender.inspect}")
|
88
84
|
end
|
89
|
-
|
90
|
-
|
85
|
+
else
|
86
|
+
raise(ArgumentError,
|
87
|
+
"To create an appender it must supply one of the following: :io, :file_name, :appender, :metric, or :logger")
|
91
88
|
end
|
92
89
|
end
|
93
90
|
|
@@ -8,82 +8,65 @@ module SemanticLogger
|
|
8
8
|
@logger.name = self.class.name
|
9
9
|
end
|
10
10
|
|
11
|
-
def add(
|
12
|
-
|
13
|
-
|
11
|
+
def add(**args, &block)
|
12
|
+
appender = SemanticLogger::Appender.factory(**args, &block)
|
13
|
+
|
14
|
+
if appender.respond_to?(:console_output?) && appender.console_output? && console_output?
|
15
|
+
logger.warn "Ignoring attempt to add a second console appender: #{appender.class.name} since it would result in duplicate console output."
|
16
|
+
return
|
17
|
+
end
|
18
|
+
|
14
19
|
self << appender
|
15
20
|
appender
|
16
21
|
end
|
17
22
|
|
23
|
+
# Whether any of the existing appenders already output to the console?
|
24
|
+
# I.e. Writes to stdout or stderr.
|
25
|
+
def console_output?
|
26
|
+
any? { |appender| appender.respond_to?(:console_output?) && appender.console_output? }
|
27
|
+
end
|
28
|
+
|
18
29
|
def log(log)
|
19
30
|
each do |appender|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
logger.error "Failed to log to appender: #{appender.inspect}", exc
|
24
|
-
end
|
31
|
+
appender.log(log) if appender.should_log?(log)
|
32
|
+
rescue Exception => e
|
33
|
+
logger.error "Failed to log to appender: #{appender.name}", e
|
25
34
|
end
|
26
35
|
end
|
27
36
|
|
28
37
|
def flush
|
29
38
|
each do |appender|
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
logger.error "Failed to flush appender: #{appender.inspect}", exc
|
35
|
-
end
|
39
|
+
logger.trace "Flushing appender: #{appender.name}"
|
40
|
+
appender.flush
|
41
|
+
rescue Exception => e
|
42
|
+
logger.error "Failed to flush appender: #{appender.name}", e
|
36
43
|
end
|
37
|
-
logger.trace
|
44
|
+
logger.trace "All appenders flushed"
|
38
45
|
end
|
39
46
|
|
40
47
|
def close
|
41
|
-
each do |appender|
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
logger.error "Failed to close appender: #{appender.inspect}", exc
|
49
|
-
end
|
48
|
+
to_a.each do |appender|
|
49
|
+
logger.trace "Closing appender: #{appender.name}"
|
50
|
+
delete(appender)
|
51
|
+
appender.flush
|
52
|
+
appender.close
|
53
|
+
rescue Exception => e
|
54
|
+
logger.error "Failed to close appender: #{appender.name}", e
|
50
55
|
end
|
51
|
-
logger.trace
|
56
|
+
logger.trace "All appenders closed and removed from appender list"
|
52
57
|
end
|
53
58
|
|
54
59
|
# After a fork the appender thread is not running, start it if it is not running.
|
55
60
|
def reopen
|
56
61
|
each do |appender|
|
57
|
-
|
58
|
-
next unless appender.respond_to?(:reopen)
|
59
|
-
|
60
|
-
logger.trace "Reopening appender: #{appender.name}"
|
61
|
-
appender.reopen
|
62
|
-
rescue Exception => exc
|
63
|
-
logger.error "Failed to re-open appender: #{appender.inspect}", exc
|
64
|
-
end
|
65
|
-
end
|
66
|
-
logger.trace 'All appenders re-opened'
|
67
|
-
end
|
68
|
-
|
69
|
-
private
|
70
|
-
|
71
|
-
# Backward compatibility
|
72
|
-
def convert_old_appender_args(appender, level)
|
73
|
-
options = {}
|
74
|
-
options[:level] = level if level
|
62
|
+
next unless appender.respond_to?(:reopen)
|
75
63
|
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
elsif appender.is_a?(Symbol) || appender.is_a?(Subscriber)
|
81
|
-
options[:appender] = appender
|
82
|
-
else
|
83
|
-
options[:logger] = appender
|
64
|
+
logger.trace "Reopening appender: #{appender.name}"
|
65
|
+
appender.reopen
|
66
|
+
rescue Exception => e
|
67
|
+
logger.error "Failed to re-open appender: #{appender.name}", e
|
84
68
|
end
|
85
|
-
|
86
|
-
options
|
69
|
+
logger.trace "All appenders re-opened"
|
87
70
|
end
|
88
71
|
end
|
89
72
|
end
|
data/lib/semantic_logger/base.rb
CHANGED
@@ -63,7 +63,7 @@ module SemanticLogger
|
|
63
63
|
# SemanticLogger.default_level = :info
|
64
64
|
#
|
65
65
|
# # Log to screen
|
66
|
-
# SemanticLogger.add_appender(io:
|
66
|
+
# SemanticLogger.add_appender(io: $stdout, formatter: :color)
|
67
67
|
#
|
68
68
|
# # And log to a file at the same time
|
69
69
|
# SemanticLogger.add_appender(file_name: 'application.log', formatter: :color)
|
@@ -126,7 +126,7 @@ module SemanticLogger
|
|
126
126
|
# Log a thread backtrace
|
127
127
|
def backtrace(thread: Thread.current,
|
128
128
|
level: :warn,
|
129
|
-
message:
|
129
|
+
message: "Backtrace:",
|
130
130
|
payload: nil,
|
131
131
|
metric: nil,
|
132
132
|
metric_amount: nil)
|
@@ -136,7 +136,7 @@ module SemanticLogger
|
|
136
136
|
|
137
137
|
backtrace =
|
138
138
|
if thread == Thread.current
|
139
|
-
Utils.extract_backtrace
|
139
|
+
Utils.extract_backtrace(caller)
|
140
140
|
else
|
141
141
|
log.thread_name = thread.name
|
142
142
|
log.tags = (thread[:semantic_logger_tags] || []).clone
|
@@ -186,11 +186,14 @@ module SemanticLogger
|
|
186
186
|
# to:
|
187
187
|
# `logger.tagged('first', 'more', 'other')`
|
188
188
|
# - For better performance with clean tags, see `SemanticLogger.tagged`.
|
189
|
-
def tagged(*tags
|
189
|
+
def tagged(*tags)
|
190
|
+
block = -> { yield(self) }
|
190
191
|
# Allow named tags to be passed into the logger
|
191
|
-
|
192
|
+
# Rails::Rack::Logger passes logs as an array with a single argument
|
193
|
+
if tags.size == 1 && !tags.first.is_a?(Array)
|
192
194
|
tag = tags[0]
|
193
|
-
return yield if tag.nil? || tag ==
|
195
|
+
return yield if tag.nil? || tag == ""
|
196
|
+
|
194
197
|
return tag.is_a?(Hash) ? SemanticLogger.named_tagged(tag, &block) : SemanticLogger.fast_tag(tag.to_s, &block)
|
195
198
|
end
|
196
199
|
|
@@ -233,26 +236,14 @@ module SemanticLogger
|
|
233
236
|
SemanticLogger.silence(new_level, &block)
|
234
237
|
end
|
235
238
|
|
236
|
-
#
|
239
|
+
# :nodoc:
|
237
240
|
def fast_tag(tag, &block)
|
238
241
|
SemanticLogger.fast_tag(tag, &block)
|
239
242
|
end
|
240
243
|
|
241
|
-
# :nodoc:
|
242
|
-
def with_payload(payload, &block)
|
243
|
-
warn '#with_payload is deprecated, use SemanticLogger.named_tagged'
|
244
|
-
SemanticLogger.named_tagged(payload, &block)
|
245
|
-
end
|
246
|
-
|
247
|
-
# :nodoc:
|
248
|
-
def payload
|
249
|
-
warn '#payload is deprecated, use SemanticLogger.named_tags'
|
250
|
-
SemanticLogger.named_tags
|
251
|
-
end
|
252
|
-
|
253
244
|
# Write log data to underlying data storage
|
254
245
|
def log(_log_)
|
255
|
-
raise NotImplementedError,
|
246
|
+
raise NotImplementedError, "Logging Appender must implement #log(log)"
|
256
247
|
end
|
257
248
|
|
258
249
|
# Whether this log entry meets the criteria to be logged by this appender.
|
@@ -274,14 +265,22 @@ module SemanticLogger
|
|
274
265
|
# For example if set to :warn, this appender would only log :warn and :fatal
|
275
266
|
# log messages when other appenders could be logging :info and lower
|
276
267
|
#
|
277
|
-
# filter [Regexp|Proc]
|
268
|
+
# filter [Regexp|Proc|Module]
|
278
269
|
# RegExp: Only include log messages where the class name matches the supplied
|
279
270
|
# regular expression. All other messages will be ignored
|
280
271
|
# Proc: Only include log messages where the supplied Proc returns true
|
281
272
|
# The Proc must return true or false
|
273
|
+
# Module: A module that implements `.call`. For example:
|
274
|
+
# module ComplexFilter
|
275
|
+
# def self.call(log)
|
276
|
+
# (/\AExclude/ =~ log.message).nil?
|
277
|
+
# end
|
278
|
+
# end
|
282
279
|
def initialize(klass, level = nil, filter = nil)
|
283
|
-
# Support filtering all messages to this logger
|
284
|
-
|
280
|
+
# Support filtering all messages to this logger instance.
|
281
|
+
unless filter.nil? || filter.is_a?(Regexp) || filter.is_a?(Proc) || filter.respond_to?(:call)
|
282
|
+
raise ":filter must be a Regexp, Proc, or implement :call"
|
283
|
+
end
|
285
284
|
|
286
285
|
@filter = filter.is_a?(Regexp) ? filter.freeze : filter
|
287
286
|
@name = klass.is_a?(String) ? klass : klass.name
|
@@ -314,20 +313,42 @@ module SemanticLogger
|
|
314
313
|
end
|
315
314
|
|
316
315
|
# Log message at the specified level
|
317
|
-
def log_internal(level, index, message = nil, payload = nil, exception = nil
|
318
|
-
|
316
|
+
def log_internal(level, index, message = nil, payload = nil, exception = nil)
|
317
|
+
# Handle variable number of arguments by detecting exception object and payload hash.
|
318
|
+
if exception.nil? && payload.nil? && message.respond_to?(:backtrace) && message.respond_to?(:message)
|
319
|
+
exception = message
|
320
|
+
message = nil
|
321
|
+
elsif exception.nil? && payload && payload.respond_to?(:backtrace) && payload.respond_to?(:message)
|
322
|
+
exception = payload
|
323
|
+
payload = nil
|
324
|
+
elsif payload && !payload.is_a?(Hash)
|
325
|
+
message = message.nil? ? payload : "#{message} -- #{payload}"
|
326
|
+
payload = nil
|
327
|
+
end
|
328
|
+
|
329
|
+
log = Log.new(name, level, index)
|
319
330
|
should_log =
|
320
|
-
if
|
321
|
-
#
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
end
|
331
|
+
if exception.nil? && payload.nil? && message.is_a?(Hash)
|
332
|
+
# All arguments as a hash in the message.
|
333
|
+
log.assign(**log.extract_arguments(message))
|
334
|
+
elsif exception.nil? && message && payload && payload.is_a?(Hash)
|
335
|
+
# Message supplied along with a hash with the remaining arguments.
|
336
|
+
log.assign(**log.extract_arguments(payload, message))
|
327
337
|
else
|
328
|
-
|
338
|
+
# All fields supplied directly.
|
339
|
+
log.assign(message: message, payload: payload, exception: exception)
|
329
340
|
end
|
330
341
|
|
342
|
+
# Add result of block to message or payload if not nil
|
343
|
+
if block_given?
|
344
|
+
result = yield(log)
|
345
|
+
if result.is_a?(String)
|
346
|
+
log.message = log.message.nil? ? result : "#{log.message} -- #{result}"
|
347
|
+
elsif result.is_a?(Hash)
|
348
|
+
log.assign_hash(result)
|
349
|
+
end
|
350
|
+
end
|
351
|
+
|
331
352
|
# Log level may change during assign due to :on_exception_level
|
332
353
|
self.log(log) if should_log && should_log?(log)
|
333
354
|
end
|
@@ -353,18 +374,18 @@ module SemanticLogger
|
|
353
374
|
yield(params)
|
354
375
|
end
|
355
376
|
end
|
356
|
-
rescue Exception =>
|
357
|
-
exception =
|
377
|
+
rescue Exception => e
|
378
|
+
exception = e
|
358
379
|
ensure
|
359
380
|
# Must use ensure block otherwise a `return` in the yield above will skip the log entry
|
360
|
-
log
|
381
|
+
log = Log.new(name, level, index)
|
361
382
|
exception ||= params[:exception]
|
362
383
|
message = params[:message] if params[:message]
|
363
384
|
duration =
|
364
385
|
if block_given?
|
365
386
|
1_000.0 * (Process.clock_gettime(Process::CLOCK_MONOTONIC) - start)
|
366
387
|
else
|
367
|
-
params[:duration] || raise(
|
388
|
+
params[:duration] || raise("Mandatory block missing when :duration option is not supplied")
|
368
389
|
end
|
369
390
|
|
370
391
|
# Extract options after block completes so that block can modify any of the options
|
@@ -386,6 +407,7 @@ module SemanticLogger
|
|
386
407
|
# Log level may change during assign due to :on_exception_level
|
387
408
|
self.log(log) if should_log && should_log?(log)
|
388
409
|
raise exception if exception
|
410
|
+
|
389
411
|
result
|
390
412
|
end
|
391
413
|
end
|
@@ -404,8 +426,8 @@ module SemanticLogger
|
|
404
426
|
start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
405
427
|
begin
|
406
428
|
yield
|
407
|
-
rescue Exception =>
|
408
|
-
exception =
|
429
|
+
rescue Exception => e
|
430
|
+
exception = e
|
409
431
|
ensure
|
410
432
|
log = Log.new(name, level, index)
|
411
433
|
# May return false due to elastic logging
|
@@ -1,12 +1,12 @@
|
|
1
|
-
require
|
1
|
+
require "time"
|
2
2
|
module SemanticLogger
|
3
3
|
module Formatters
|
4
4
|
class Base
|
5
|
-
attr_accessor :time_format, :log_host, :log_application, :precision
|
5
|
+
attr_accessor :log, :logger, :time_format, :log_host, :log_application, :log_environment, :precision
|
6
6
|
|
7
7
|
# Time precision varies by Ruby interpreter
|
8
8
|
# JRuby 9.1.8.0 supports microseconds
|
9
|
-
PRECISION
|
9
|
+
PRECISION =
|
10
10
|
if defined?(JRuby)
|
11
11
|
if JRUBY_VERSION.to_f >= 9.1
|
12
12
|
maint = JRUBY_VERSION.match(/\A\d+\.\d+\.(\d+)\./)[1].to_i
|
@@ -34,11 +34,15 @@ module SemanticLogger
|
|
34
34
|
# precision: [Integer]
|
35
35
|
# How many fractional digits to log times with.
|
36
36
|
# Default: PRECISION (6, except on older JRuby, where 3)
|
37
|
-
def initialize(time_format: nil,
|
37
|
+
def initialize(time_format: nil,
|
38
|
+
log_host: true,
|
39
|
+
log_application: true,
|
40
|
+
log_environment: true,
|
38
41
|
precision: PRECISION)
|
39
42
|
@time_format = time_format || self.class.build_time_format(precision)
|
40
43
|
@log_host = log_host
|
41
44
|
@log_application = log_application
|
45
|
+
@log_environment = log_environment
|
42
46
|
@precision = precision
|
43
47
|
end
|
44
48
|
|
@@ -48,7 +52,7 @@ module SemanticLogger
|
|
48
52
|
# precision: [Integer]
|
49
53
|
# How many fractional digits to log times with.
|
50
54
|
# Default: PRECISION (6, except on older JRuby, where 3)
|
51
|
-
def self.build_time_format(precision=PRECISION)
|
55
|
+
def self.build_time_format(precision = PRECISION)
|
52
56
|
"%Y-%m-%d %H:%M:%S.%#{precision}N"
|
53
57
|
end
|
54
58
|
|
@@ -57,10 +61,16 @@ module SemanticLogger
|
|
57
61
|
format_time(log.time) if time_format
|
58
62
|
end
|
59
63
|
|
64
|
+
# Process ID
|
65
|
+
def pid
|
66
|
+
$$
|
67
|
+
end
|
68
|
+
|
60
69
|
private
|
61
70
|
|
62
71
|
# Return the Time as a formatted string
|
63
72
|
def format_time(time)
|
73
|
+
time = time.dup
|
64
74
|
case time_format
|
65
75
|
when :rfc_3339
|
66
76
|
time.utc.to_datetime.rfc3339
|
@@ -73,7 +83,7 @@ module SemanticLogger
|
|
73
83
|
when :seconds
|
74
84
|
time.to_f
|
75
85
|
when nil
|
76
|
-
|
86
|
+
""
|
77
87
|
else
|
78
88
|
time.strftime(time_format)
|
79
89
|
end
|
@@ -1,8 +1,12 @@
|
|
1
|
-
# Load
|
1
|
+
# Load Amazing Print, or Awesome Print if available
|
2
2
|
begin
|
3
|
-
require
|
3
|
+
require "amazing_print"
|
4
4
|
rescue LoadError
|
5
|
-
|
5
|
+
begin
|
6
|
+
require "awesome_print"
|
7
|
+
rescue LoadError
|
8
|
+
nil
|
9
|
+
end
|
6
10
|
end
|
7
11
|
|
8
12
|
module SemanticLogger
|
@@ -61,24 +65,19 @@ module SemanticLogger
|
|
61
65
|
#
|
62
66
|
# Parameters:
|
63
67
|
# ap: [Hash]
|
64
|
-
# Any valid
|
68
|
+
# Any valid Amazing Print option for rendering data.
|
65
69
|
# These options can also be changed be creating a `~/.aprc` file.
|
66
|
-
# See: https://github.com/
|
70
|
+
# See: https://github.com/amazing-print/amazing_print
|
67
71
|
#
|
68
72
|
# Note: The option :multiline is set to false if not supplied.
|
69
73
|
# Note: Has no effect if Awesome Print is not installed.
|
70
74
|
#
|
71
75
|
# color_map: [Hash | SemanticLogger::Formatters::Color::ColorMap]
|
72
76
|
# ColorMaps each of the log levels to a color
|
73
|
-
def initialize(ap: {multiline: false},
|
74
|
-
color_map: ColorMap.new,
|
75
|
-
time_format: nil,
|
76
|
-
log_host: false,
|
77
|
-
log_application: false,
|
78
|
-
precision: PRECISION)
|
77
|
+
def initialize(ap: {multiline: false}, color_map: ColorMap.new, **args)
|
79
78
|
@ai_options = ap
|
80
79
|
@color_map = color_map.is_a?(ColorMap) ? color_map : ColorMap.new(color_map)
|
81
|
-
super(
|
80
|
+
super(**args)
|
82
81
|
end
|
83
82
|
|
84
83
|
def level
|
@@ -110,14 +109,14 @@ module SemanticLogger
|
|
110
109
|
def payload
|
111
110
|
return unless log.payload?
|
112
111
|
|
113
|
-
if
|
114
|
-
super
|
115
|
-
else
|
112
|
+
if log.payload.respond_to?(:ai)
|
116
113
|
begin
|
117
114
|
"-- #{log.payload.ai(@ai_options)}"
|
118
115
|
rescue StandardError
|
119
116
|
super
|
120
117
|
end
|
118
|
+
else
|
119
|
+
super
|
121
120
|
end
|
122
121
|
end
|
123
122
|
|
@@ -2,8 +2,6 @@ module SemanticLogger
|
|
2
2
|
module Formatters
|
3
3
|
# Default non-colored text log output
|
4
4
|
class Default < Base
|
5
|
-
attr_accessor :log, :logger
|
6
|
-
|
7
5
|
# Formatting methods, must return nil, or a string
|
8
6
|
# Nil values are ignored
|
9
7
|
|
@@ -12,9 +10,24 @@ module SemanticLogger
|
|
12
10
|
log.level_to_s
|
13
11
|
end
|
14
12
|
|
15
|
-
#
|
13
|
+
# Name of the thread that logged the message.
|
14
|
+
def thread_name
|
15
|
+
format("%.30s", log.thread_name)
|
16
|
+
end
|
17
|
+
|
18
|
+
# Ruby file name and line number that logged the message.
|
19
|
+
def file_name_and_line
|
20
|
+
file, line = log.file_name_and_line(true)
|
21
|
+
"#{file}:#{line}" if file
|
22
|
+
end
|
23
|
+
|
24
|
+
# Returns [String] the available process info
|
25
|
+
# Example:
|
26
|
+
# [18934:thread_name test_logging.rb:51]
|
16
27
|
def process_info
|
17
|
-
"
|
28
|
+
process_id = "#{pid}:" if pid
|
29
|
+
fname = file_name_and_line
|
30
|
+
fname ? "[#{process_id}#{thread_name} #{fname}]" : "[#{process_id}#{thread_name}]"
|
18
31
|
end
|
19
32
|
|
20
33
|
# Tags
|
@@ -67,7 +80,7 @@ module SemanticLogger
|
|
67
80
|
self.log = log
|
68
81
|
self.logger = logger
|
69
82
|
|
70
|
-
[time, level, process_info, tags, named_tags, duration, name, message, payload, exception].compact.join(
|
83
|
+
[time, level, process_info, tags, named_tags, duration, name, message, payload, exception].compact.join(" ")
|
71
84
|
end
|
72
85
|
end
|
73
86
|
end
|