semantic_logger 4.16.1 → 4.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +1 -0
- data/lib/semantic_logger/appender/async.rb +0 -1
- data/lib/semantic_logger/appender/async_batch.rb +1 -2
- data/lib/semantic_logger/appender/bugsnag.rb +3 -2
- data/lib/semantic_logger/appender/cloudwatch_logs.rb +150 -0
- data/lib/semantic_logger/appender/elasticsearch.rb +0 -1
- data/lib/semantic_logger/appender/elasticsearch_http.rb +0 -1
- data/lib/semantic_logger/appender/file.rb +6 -2
- data/lib/semantic_logger/appender/graylog.rb +2 -2
- data/lib/semantic_logger/appender/honeybadger.rb +1 -1
- data/lib/semantic_logger/appender/honeybadger_insights.rb +1 -1
- data/lib/semantic_logger/appender/http.rb +0 -1
- data/lib/semantic_logger/appender/kafka.rb +2 -2
- data/lib/semantic_logger/appender/loki.rb +62 -0
- data/lib/semantic_logger/appender/mongodb.rb +2 -2
- data/lib/semantic_logger/appender/new_relic.rb +3 -2
- data/lib/semantic_logger/appender/new_relic_logs.rb +16 -5
- data/lib/semantic_logger/appender/open_telemetry.rb +83 -0
- data/lib/semantic_logger/appender/sentry.rb +3 -2
- data/lib/semantic_logger/appender/sentry_ruby.rb +1 -1
- data/lib/semantic_logger/appender/splunk_http.rb +0 -1
- data/lib/semantic_logger/appender/syslog.rb +1 -2
- data/lib/semantic_logger/appender/tcp.rb +2 -1
- data/lib/semantic_logger/appender.rb +3 -0
- data/lib/semantic_logger/base.rb +7 -13
- data/lib/semantic_logger/formatters/base.rb +2 -1
- data/lib/semantic_logger/formatters/color.rb +1 -1
- data/lib/semantic_logger/formatters/fluentd.rb +1 -1
- data/lib/semantic_logger/formatters/json.rb +2 -2
- data/lib/semantic_logger/formatters/logfmt.rb +6 -6
- data/lib/semantic_logger/formatters/loki.rb +157 -0
- data/lib/semantic_logger/formatters/new_relic_logs.rb +74 -57
- data/lib/semantic_logger/formatters/open_telemetry.rb +40 -0
- data/lib/semantic_logger/formatters/signalfx.rb +0 -1
- data/lib/semantic_logger/formatters/syslog.rb +3 -2
- data/lib/semantic_logger/formatters/syslog_cee.rb +3 -2
- data/lib/semantic_logger/formatters.rb +14 -12
- data/lib/semantic_logger/log.rb +5 -5
- data/lib/semantic_logger/loggable.rb +1 -1
- data/lib/semantic_logger/logger.rb +1 -1
- data/lib/semantic_logger/metric/new_relic.rb +2 -1
- data/lib/semantic_logger/metric/signalfx.rb +0 -1
- data/lib/semantic_logger/reporters/minitest.rb +6 -3
- data/lib/semantic_logger/semantic_logger.rb +1 -1
- data/lib/semantic_logger/subscriber.rb +3 -2
- data/lib/semantic_logger/sync_processor.rb +4 -4
- data/lib/semantic_logger/test/capture_log_events.rb +1 -1
- data/lib/semantic_logger/test/minitest.rb +8 -6
- data/lib/semantic_logger/utils.rb +2 -1
- data/lib/semantic_logger/version.rb +1 -1
- metadata +10 -10
data/lib/semantic_logger/base.rb
CHANGED
@@ -130,7 +130,6 @@ module SemanticLogger
|
|
130
130
|
payload: nil,
|
131
131
|
metric: nil,
|
132
132
|
metric_amount: nil)
|
133
|
-
|
134
133
|
log = Log.new(name, level)
|
135
134
|
return false unless meets_log_level?(log)
|
136
135
|
|
@@ -357,7 +356,6 @@ module SemanticLogger
|
|
357
356
|
# Measure the supplied block and log the message
|
358
357
|
def measure_internal(level, index, message, params)
|
359
358
|
exception = nil
|
360
|
-
result = nil
|
361
359
|
# Single parameter is a hash
|
362
360
|
if params.empty? && message.is_a?(Hash)
|
363
361
|
params = message
|
@@ -366,14 +364,13 @@ module SemanticLogger
|
|
366
364
|
start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
367
365
|
begin
|
368
366
|
if block_given?
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
end
|
367
|
+
if (silence_level = params[:silence])
|
368
|
+
# In case someone accidentally sets `silence: true` instead of `silence: :error`
|
369
|
+
silence_level = :error if silence_level == true
|
370
|
+
silence(silence_level) { yield(params) }
|
371
|
+
else
|
372
|
+
yield(params)
|
373
|
+
end
|
377
374
|
end
|
378
375
|
rescue Exception => e
|
379
376
|
exception = e
|
@@ -408,8 +405,6 @@ module SemanticLogger
|
|
408
405
|
# Log level may change during assign due to :on_exception_level
|
409
406
|
self.log(log) if should_log && should_log?(log)
|
410
407
|
raise exception if exception
|
411
|
-
|
412
|
-
result
|
413
408
|
end
|
414
409
|
end
|
415
410
|
|
@@ -421,7 +416,6 @@ module SemanticLogger
|
|
421
416
|
metric:,
|
422
417
|
log_exception:,
|
423
418
|
on_exception_level:)
|
424
|
-
|
425
419
|
# Ignores filter, silence, payload
|
426
420
|
exception = nil
|
427
421
|
start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
@@ -23,6 +23,7 @@ module SemanticLogger
|
|
23
23
|
# See Time#strftime for the format of this string.
|
24
24
|
# :iso_8601 Outputs an ISO8601 Formatted timestamp.
|
25
25
|
# :ms Output in miliseconds since epoch.
|
26
|
+
# :notime Returns an empty string for time ( no time is output ).
|
26
27
|
# Default: '%Y-%m-%d %H:%M:%S.%<precision>N'
|
27
28
|
# log_host: [Boolean]
|
28
29
|
# Whether or not to include hostname in logs
|
@@ -81,7 +82,7 @@ module SemanticLogger
|
|
81
82
|
time
|
82
83
|
when :seconds
|
83
84
|
time.to_f
|
84
|
-
when
|
85
|
+
when :notime
|
85
86
|
""
|
86
87
|
else
|
87
88
|
time.strftime(time_format)
|
@@ -4,12 +4,12 @@ module SemanticLogger
|
|
4
4
|
class Json < Raw
|
5
5
|
# Default JSON time format is ISO8601
|
6
6
|
def initialize(time_format: :iso_8601, time_key: :timestamp, **args)
|
7
|
-
super
|
7
|
+
super
|
8
8
|
end
|
9
9
|
|
10
10
|
# Returns log messages in JSON format
|
11
11
|
def call(log, logger)
|
12
|
-
super
|
12
|
+
super.to_json
|
13
13
|
end
|
14
14
|
end
|
15
15
|
end
|
@@ -19,11 +19,11 @@ module SemanticLogger
|
|
19
19
|
# Futher Reading https://brandur.org/logfmt
|
20
20
|
class Logfmt < Raw
|
21
21
|
def initialize(time_format: :iso_8601, time_key: :timestamp, **args)
|
22
|
-
super
|
22
|
+
super
|
23
23
|
end
|
24
24
|
|
25
25
|
def call(log, logger)
|
26
|
-
@raw = super
|
26
|
+
@raw = super
|
27
27
|
|
28
28
|
raw_to_logfmt
|
29
29
|
end
|
@@ -40,11 +40,11 @@ module SemanticLogger
|
|
40
40
|
end
|
41
41
|
|
42
42
|
def handle_tags
|
43
|
-
tags = @raw.fetch(:tags){ [] }
|
44
|
-
|
43
|
+
tags = @raw.fetch(:tags) { [] }.
|
44
|
+
each_with_object({}) { |tag, accum| accum[tag] = true }
|
45
45
|
|
46
|
-
@parsed = @parsed.merge(tags)
|
47
|
-
|
46
|
+
@parsed = @parsed.merge(tags).
|
47
|
+
merge(@raw.fetch(:named_tags) { {} })
|
48
48
|
end
|
49
49
|
|
50
50
|
def handle_payload
|
@@ -0,0 +1,157 @@
|
|
1
|
+
require "json"
|
2
|
+
|
3
|
+
module SemanticLogger
|
4
|
+
module Formatters
|
5
|
+
class Loki < Base
|
6
|
+
attr_accessor :stream, :payload_value
|
7
|
+
|
8
|
+
# Returns [String] a single JSON log
|
9
|
+
def call(log, logger)
|
10
|
+
self.logger = logger
|
11
|
+
self.log = log
|
12
|
+
|
13
|
+
{streams: [build_stream]}.to_json
|
14
|
+
end
|
15
|
+
|
16
|
+
# Returns [String] a JSON batch of logs
|
17
|
+
def batch(logs, logger)
|
18
|
+
self.logger = logger
|
19
|
+
|
20
|
+
streams = logs.map do |log|
|
21
|
+
self.log = log
|
22
|
+
build_stream
|
23
|
+
end
|
24
|
+
|
25
|
+
{streams: streams}.to_json
|
26
|
+
end
|
27
|
+
|
28
|
+
private
|
29
|
+
|
30
|
+
def build_stream
|
31
|
+
self.stream = {stream: {pid: pid}, values: [[]]}
|
32
|
+
|
33
|
+
application
|
34
|
+
environment
|
35
|
+
host
|
36
|
+
level
|
37
|
+
thread
|
38
|
+
tags
|
39
|
+
named_tags
|
40
|
+
context
|
41
|
+
time
|
42
|
+
message
|
43
|
+
payload
|
44
|
+
metric
|
45
|
+
duration
|
46
|
+
exception
|
47
|
+
|
48
|
+
stream[:values][0] << payload_value
|
49
|
+
stream
|
50
|
+
end
|
51
|
+
|
52
|
+
def host
|
53
|
+
stream[:stream][:host] = logger.host if log_host && logger.host.to_s
|
54
|
+
end
|
55
|
+
|
56
|
+
def application
|
57
|
+
stream[:stream][:application] = logger.application if log_application && logger&.application
|
58
|
+
end
|
59
|
+
|
60
|
+
def environment
|
61
|
+
stream[:stream][:environment] = logger.environment if log_environment && logger&.environment
|
62
|
+
end
|
63
|
+
|
64
|
+
def level
|
65
|
+
stream[:stream][:level] = log.level
|
66
|
+
end
|
67
|
+
|
68
|
+
def thread
|
69
|
+
stream[:stream][:thread] = log.thread_name if log.thread_name
|
70
|
+
end
|
71
|
+
|
72
|
+
def tags
|
73
|
+
stream[:stream][:tags] = log.tags if log.tags.respond_to?(:empty?) && !log.tags.empty?
|
74
|
+
end
|
75
|
+
|
76
|
+
def named_tags
|
77
|
+
stream[:stream].merge!(log.named_tags) if log.named_tags.respond_to?(:empty?) && !log.named_tags.empty?
|
78
|
+
end
|
79
|
+
|
80
|
+
def context
|
81
|
+
return unless log.context && !log.context.empty?
|
82
|
+
|
83
|
+
log.context.each do |key, value|
|
84
|
+
serialized_value = if value.is_a?(Hash)
|
85
|
+
value.to_json
|
86
|
+
else
|
87
|
+
value.to_s
|
88
|
+
end
|
89
|
+
|
90
|
+
stream[:stream].merge!(key.to_s => serialized_value)
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
def time
|
95
|
+
stream[:values][0] << format_time(log)
|
96
|
+
end
|
97
|
+
|
98
|
+
def message
|
99
|
+
stream[:values][0] << (log.message ? log.cleansed_message : "")
|
100
|
+
end
|
101
|
+
|
102
|
+
def format_time(log)
|
103
|
+
log.time.strftime("%s%N")
|
104
|
+
end
|
105
|
+
|
106
|
+
def payload
|
107
|
+
self.payload_value = if log.payload.respond_to?(:empty?) && !log.payload.empty?
|
108
|
+
# Loki only accepts strings as key and values
|
109
|
+
stringify_hash(log.payload)
|
110
|
+
else
|
111
|
+
{}
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
def metric
|
116
|
+
return unless log.metric
|
117
|
+
|
118
|
+
payload_value[:metric] = log.metric
|
119
|
+
payload_value[:metric_value] = log.metric_amount
|
120
|
+
end
|
121
|
+
|
122
|
+
def duration
|
123
|
+
return unless log.duration
|
124
|
+
|
125
|
+
payload_value[:duration] = log.duration.to_s
|
126
|
+
payload_value[:duration_human] = log.duration_human
|
127
|
+
end
|
128
|
+
|
129
|
+
def exception
|
130
|
+
return unless log.exception
|
131
|
+
|
132
|
+
payload_value.merge!(
|
133
|
+
exception_name: log.exception.class.name,
|
134
|
+
exception_message: log.exception.message,
|
135
|
+
stack_trace: log.exception.backtrace.to_s
|
136
|
+
)
|
137
|
+
end
|
138
|
+
|
139
|
+
def stringify_hash(hash)
|
140
|
+
result = {}
|
141
|
+
|
142
|
+
hash.each do |key, value|
|
143
|
+
string_key = key.to_s
|
144
|
+
|
145
|
+
result[string_key] = case value
|
146
|
+
when Hash
|
147
|
+
JSON.generate(stringify_hash(value))
|
148
|
+
else
|
149
|
+
value.to_s
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
result
|
154
|
+
end
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
@@ -3,38 +3,28 @@ require "json"
|
|
3
3
|
begin
|
4
4
|
require "newrelic_rpm"
|
5
5
|
rescue LoadError
|
6
|
-
raise LoadError,
|
6
|
+
raise LoadError,
|
7
|
+
'Gem newrelic_rpm is required for logging to New Relic. Please add the gem "newrelic_rpm" to your Gemfile.'
|
7
8
|
end
|
8
9
|
|
9
|
-
|
10
|
+
unless NewRelic::Agent.respond_to?(:linking_metadata)
|
11
|
+
raise "NewRelic::Agent.linking_metadata is not defined. Please update newrelic_rpm gem version"
|
12
|
+
end
|
10
13
|
|
11
|
-
|
14
|
+
unless NewRelic::Agent::Tracer.respond_to?(:current_span_id)
|
15
|
+
raise "NewRelic::Agent::Tracer.current_span_id is not defined. Please update newrelic_rpm gem version"
|
16
|
+
end
|
12
17
|
|
13
|
-
|
18
|
+
unless NewRelic::Agent::Tracer.respond_to?(:current_trace_id)
|
19
|
+
raise "NewRelic::Agent::Tracer.current_trace_id is not defined. Please update newrelic_rpm gem version"
|
20
|
+
end
|
14
21
|
|
15
22
|
module SemanticLogger
|
16
23
|
module Formatters
|
17
24
|
# Formatter for reporting to NewRelic's Logger
|
18
25
|
#
|
19
|
-
# New Relic
|
20
|
-
#
|
21
|
-
#
|
22
|
-
# In particular the following fields of the log object are serialized under the +message+
|
23
|
-
# key that's sent to NewRelic:
|
24
|
-
#
|
25
|
-
# * message
|
26
|
-
# * tags
|
27
|
-
# * named_tags
|
28
|
-
# * payload
|
29
|
-
# * metric
|
30
|
-
# * metric_amount
|
31
|
-
# * environment
|
32
|
-
# * application
|
33
|
-
#
|
34
|
-
# == New Relic Attributes not Supported
|
35
|
-
# * thread.id
|
36
|
-
# * class.name
|
37
|
-
# * method.name
|
26
|
+
# New Relic gracefully handles (and flattens) any JSON-based logs
|
27
|
+
# We construct the JSON and pass it to New Relic for further processing.
|
38
28
|
#
|
39
29
|
# == Reference
|
40
30
|
# * Logging specification
|
@@ -54,41 +44,70 @@ module SemanticLogger
|
|
54
44
|
end
|
55
45
|
|
56
46
|
def call(log, logger)
|
57
|
-
hash = super
|
58
|
-
|
59
|
-
message = {
|
60
|
-
message: hash[:message].to_s,
|
61
|
-
tags: hash[:tags] || [],
|
62
|
-
named_tags: hash[:named_tags] || {},
|
63
|
-
|
64
|
-
**hash.slice(:metric, :metric_amount, :environment, :application, :payload)
|
65
|
-
}
|
66
|
-
|
67
|
-
message.merge!(duration: hash[:duration_ms]) if hash.key?(:duration_ms)
|
68
|
-
message.merge!(duration_human: hash[:duration]) if hash.key?(:duration)
|
47
|
+
hash = super
|
69
48
|
|
70
49
|
result = {
|
71
|
-
**
|
72
|
-
message: message.
|
50
|
+
**newrelic_metadata,
|
51
|
+
message: hash[:message].to_s,
|
52
|
+
tags: hash[:tags],
|
53
|
+
metric: hash[:metric],
|
54
|
+
metric_amount: hash[:metric_amount],
|
55
|
+
environment: hash[:environment],
|
56
|
+
application: hash[:application],
|
57
|
+
payload: hash[:payload],
|
73
58
|
timestamp: hash[:timestamp].to_i,
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
59
|
+
logger: {
|
60
|
+
name: log.name
|
61
|
+
},
|
62
|
+
thread: {
|
63
|
+
name: log.thread_name.to_s
|
64
|
+
}
|
65
|
+
}.compact
|
66
|
+
|
67
|
+
if hash[:duration_ms] || hash[:duration]
|
68
|
+
result[:duration] = {
|
69
|
+
ms: hash[:duration_ms],
|
70
|
+
human: hash[:duration]
|
71
|
+
}.compact
|
72
|
+
end
|
78
73
|
|
79
74
|
if hash[:exception]
|
80
|
-
result
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
75
|
+
result[:error] = {
|
76
|
+
message: hash[:exception][:message],
|
77
|
+
class: hash[:exception][:name],
|
78
|
+
stack: hash[:exception][:stack_trace].join("\n")
|
79
|
+
}
|
85
80
|
end
|
86
81
|
|
87
82
|
if hash[:file]
|
88
|
-
result
|
89
|
-
|
90
|
-
|
91
|
-
|
83
|
+
result[:file] = {
|
84
|
+
name: hash[:file]
|
85
|
+
}
|
86
|
+
end
|
87
|
+
|
88
|
+
if hash[:line]
|
89
|
+
result[:line] = {
|
90
|
+
number: hash[:line].to_s
|
91
|
+
}
|
92
|
+
end
|
93
|
+
|
94
|
+
# NOTE: Any named tags are merged directly into the result
|
95
|
+
# unless there are conflicts with other keys. In that
|
96
|
+
# case we clearly log this in the NR log entry so it can
|
97
|
+
# be easily alerted on.
|
98
|
+
if hash[:named_tags].is_a?(Hash)
|
99
|
+
result_keys = result.keys.to_set
|
100
|
+
named_tag_conflicts = []
|
101
|
+
|
102
|
+
hash[:named_tags].each do |key, value|
|
103
|
+
if result_keys.include?(key)
|
104
|
+
named_tag_conflicts << key
|
105
|
+
else
|
106
|
+
result[key] = value
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
result[:named_tag_conflicts] = named_tag_conflicts unless named_tag_conflicts.empty?
|
92
111
|
end
|
93
112
|
|
94
113
|
result
|
@@ -96,13 +115,11 @@ module SemanticLogger
|
|
96
115
|
|
97
116
|
private
|
98
117
|
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
}.reject { |_k, v| v.nil? }.
|
105
|
-
map { |k, v| [k.to_sym, v] }.to_h
|
118
|
+
# NOTE: This function will already include trace.id and span.id if they
|
119
|
+
# are available so I believe the previous implementation of this is redundant
|
120
|
+
# https://rubydoc.info/gems/newrelic_rpm/NewRelic/Agent#linking_metadata-instance_method
|
121
|
+
def newrelic_metadata
|
122
|
+
NewRelic::Agent.linking_metadata.transform_keys(&:to_sym)
|
106
123
|
end
|
107
124
|
end
|
108
125
|
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
require "json"
|
2
|
+
module SemanticLogger
|
3
|
+
module Formatters
|
4
|
+
class OpenTelemetry < Raw
|
5
|
+
# Log level
|
6
|
+
def level
|
7
|
+
hash[:level] = log.level.to_s
|
8
|
+
hash[:level_index] = severity_number(log.level_index)
|
9
|
+
end
|
10
|
+
|
11
|
+
# Payload is submitted directly as attributes
|
12
|
+
def payload
|
13
|
+
return unless log.payload.respond_to?(:empty?) && !log.payload.empty?
|
14
|
+
|
15
|
+
hash[:payload] = log.payload.transform_keys!(&:to_s)
|
16
|
+
end
|
17
|
+
|
18
|
+
private
|
19
|
+
|
20
|
+
def severity_number(severity)
|
21
|
+
case severity
|
22
|
+
when :trace
|
23
|
+
::OpenTelemetry::Logs::SeverityNumber::SEVERITY_NUMBER_TRACE
|
24
|
+
when :debug
|
25
|
+
::OpenTelemetry::Logs::SeverityNumber::SEVERITY_NUMBER_DEBUG
|
26
|
+
when :info
|
27
|
+
::OpenTelemetry::Logs::SeverityNumber::SEVERITY_NUMBER_INFO
|
28
|
+
when :warn
|
29
|
+
::OpenTelemetry::Logs::SeverityNumber::SEVERITY_NUMBER_WARN
|
30
|
+
when :error
|
31
|
+
::OpenTelemetry::Logs::SeverityNumber::SEVERITY_NUMBER_ERROR
|
32
|
+
when :fatal
|
33
|
+
::OpenTelemetry::Logs::SeverityNumber::SEVERITY_NUMBER_FATAL
|
34
|
+
else
|
35
|
+
::OpenTelemetry::Logs::SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
@@ -25,7 +25,8 @@ module SemanticLogger
|
|
25
25
|
class LevelMap
|
26
26
|
attr_accessor :trace, :debug, :info, :warn, :error, :fatal
|
27
27
|
|
28
|
-
def initialize(trace: ::Syslog::LOG_DEBUG, debug: ::Syslog::LOG_INFO, info: ::Syslog::LOG_NOTICE,
|
28
|
+
def initialize(trace: ::Syslog::LOG_DEBUG, debug: ::Syslog::LOG_INFO, info: ::Syslog::LOG_NOTICE,
|
29
|
+
warn: ::Syslog::LOG_WARNING, error: ::Syslog::LOG_ERR, fatal: ::Syslog::LOG_CRIT)
|
29
30
|
@trace = trace
|
30
31
|
@debug = debug
|
31
32
|
@info = info
|
@@ -64,7 +65,7 @@ module SemanticLogger
|
|
64
65
|
end
|
65
66
|
|
66
67
|
def call(log, logger)
|
67
|
-
message = super
|
68
|
+
message = super
|
68
69
|
create_syslog_packet(message)
|
69
70
|
end
|
70
71
|
|
@@ -24,7 +24,8 @@ module SemanticLogger
|
|
24
24
|
# Example:
|
25
25
|
# # Log via udp to a remote syslog server on host: `server1` and port `8514`, using the CEE format.
|
26
26
|
# SemanticLogger.add_appender(appender: :syslog, formatter: :syslog_cee, url: 'udp://server1:8514')
|
27
|
-
def initialize(facility: ::Syslog::LOG_USER, level_map: SemanticLogger::Formatters::Syslog::LevelMap.new,
|
27
|
+
def initialize(facility: ::Syslog::LOG_USER, level_map: SemanticLogger::Formatters::Syslog::LevelMap.new,
|
28
|
+
max_size: Integer)
|
28
29
|
@facility = facility
|
29
30
|
@level_map = level_map.is_a?(SemanticLogger::Formatters::Syslog::LevelMap) ? level_map : SemanticLogger::Formatters::Syslog::LevelMap.new(level_map)
|
30
31
|
@max_size = max_size
|
@@ -36,7 +37,7 @@ module SemanticLogger
|
|
36
37
|
end
|
37
38
|
|
38
39
|
def call(log, logger)
|
39
|
-
hash = super
|
40
|
+
hash = super
|
40
41
|
create_syslog_packet("@cee: #{hash.to_json}")
|
41
42
|
end
|
42
43
|
|
@@ -1,17 +1,19 @@
|
|
1
1
|
module SemanticLogger
|
2
2
|
module Formatters
|
3
|
-
autoload :Base,
|
4
|
-
autoload :Color,
|
5
|
-
autoload :Default,
|
6
|
-
autoload :Json,
|
7
|
-
autoload :Raw,
|
8
|
-
autoload :OneLine,
|
9
|
-
autoload :
|
10
|
-
autoload :
|
11
|
-
autoload :
|
12
|
-
autoload :
|
13
|
-
autoload :
|
14
|
-
autoload :
|
3
|
+
autoload :Base, "semantic_logger/formatters/base"
|
4
|
+
autoload :Color, "semantic_logger/formatters/color"
|
5
|
+
autoload :Default, "semantic_logger/formatters/default"
|
6
|
+
autoload :Json, "semantic_logger/formatters/json"
|
7
|
+
autoload :Raw, "semantic_logger/formatters/raw"
|
8
|
+
autoload :OneLine, "semantic_logger/formatters/one_line"
|
9
|
+
autoload :OpenTelemetry, "semantic_logger/formatters/open_telemetry"
|
10
|
+
autoload :Signalfx, "semantic_logger/formatters/signalfx"
|
11
|
+
autoload :Syslog, "semantic_logger/formatters/syslog"
|
12
|
+
autoload :Fluentd, "semantic_logger/formatters/fluentd"
|
13
|
+
autoload :Logfmt, "semantic_logger/formatters/logfmt"
|
14
|
+
autoload :SyslogCee, "semantic_logger/formatters/syslog_cee"
|
15
|
+
autoload :NewRelicLogs, "semantic_logger/formatters/new_relic_logs"
|
16
|
+
autoload :Loki, "semantic_logger/formatters/loki"
|
15
17
|
|
16
18
|
# Return formatter that responds to call.
|
17
19
|
#
|
data/lib/semantic_logger/log.rb
CHANGED
@@ -85,7 +85,6 @@ module SemanticLogger
|
|
85
85
|
log_exception: :full,
|
86
86
|
on_exception_level: nil,
|
87
87
|
dimensions: nil)
|
88
|
-
|
89
88
|
self.message = message
|
90
89
|
self.payload = payload
|
91
90
|
self.duration = duration
|
@@ -114,7 +113,7 @@ module SemanticLogger
|
|
114
113
|
|
115
114
|
# Elastic logging: Log when :duration exceeds :min_duration
|
116
115
|
# Except if there is an exception when it will always be logged
|
117
|
-
return false if duration && (
|
116
|
+
return false if duration && (duration < min_duration) && exception.nil?
|
118
117
|
|
119
118
|
if backtrace
|
120
119
|
self.backtrace = Utils.extract_backtrace(backtrace)
|
@@ -129,8 +128,8 @@ module SemanticLogger
|
|
129
128
|
def assign_hash(hash)
|
130
129
|
self.payload ||= {}
|
131
130
|
hash.each_pair do |key, value|
|
132
|
-
if respond_to?("#{key}="
|
133
|
-
public_send("#{key}="
|
131
|
+
if respond_to?(:"#{key}=")
|
132
|
+
public_send(:"#{key}=", value)
|
134
133
|
else
|
135
134
|
payload[key] = value
|
136
135
|
end
|
@@ -281,7 +280,8 @@ module SemanticLogger
|
|
281
280
|
!(payload.nil? || (payload.respond_to?(:empty?) && payload.empty?))
|
282
281
|
end
|
283
282
|
|
284
|
-
def to_h(host = SemanticLogger.host, application = SemanticLogger.application,
|
283
|
+
def to_h(host = SemanticLogger.host, application = SemanticLogger.application,
|
284
|
+
environment = SemanticLogger.environment)
|
285
285
|
logger = Struct.new(:host, :application, :environment).new(host, application, environment)
|
286
286
|
SemanticLogger::Formatters::Raw.new.call(self, logger)
|
287
287
|
end
|
@@ -83,7 +83,6 @@ module SemanticLogger
|
|
83
83
|
on_exception_level: nil,
|
84
84
|
message: "##{method_name}",
|
85
85
|
level: :info)
|
86
|
-
|
87
86
|
# unless visibility = Utils.method_visibility(self, method_name)
|
88
87
|
# logger.warn("Unable to measure method: #{name}##{method_name} since it does not exist")
|
89
88
|
# return false
|
@@ -124,6 +123,7 @@ module SemanticLogger
|
|
124
123
|
else
|
125
124
|
mod = const_set(:SemanticLoggerMeasure, Module.new)
|
126
125
|
prepend mod
|
126
|
+
|
127
127
|
mod
|
128
128
|
end
|
129
129
|
end
|
@@ -57,7 +57,7 @@ module SemanticLogger
|
|
57
57
|
# Proc: Only include log messages where the supplied Proc returns true
|
58
58
|
# The Proc must return true or false
|
59
59
|
def initialize(klass, level = nil, filter = nil)
|
60
|
-
super
|
60
|
+
super
|
61
61
|
end
|
62
62
|
|
63
63
|
# Place log request on the queue for the Appender thread to write to each
|
@@ -1,7 +1,8 @@
|
|
1
1
|
begin
|
2
2
|
require "newrelic_rpm"
|
3
3
|
rescue LoadError
|
4
|
-
raise LoadError,
|
4
|
+
raise LoadError,
|
5
|
+
'Gem newrelic_rpm is required for logging to New Relic. Please add the gem "newrelic_rpm" to your Gemfile.'
|
5
6
|
end
|
6
7
|
|
7
8
|
# Send Metrics to NewRelic
|