semantic_logger 4.16.1 → 4.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +1 -0
- data/lib/semantic_logger/appender/async.rb +0 -1
- data/lib/semantic_logger/appender/async_batch.rb +1 -2
- data/lib/semantic_logger/appender/bugsnag.rb +3 -2
- data/lib/semantic_logger/appender/cloudwatch_logs.rb +150 -0
- data/lib/semantic_logger/appender/elasticsearch.rb +0 -1
- data/lib/semantic_logger/appender/elasticsearch_http.rb +0 -1
- data/lib/semantic_logger/appender/file.rb +6 -2
- data/lib/semantic_logger/appender/graylog.rb +2 -2
- data/lib/semantic_logger/appender/honeybadger.rb +1 -1
- data/lib/semantic_logger/appender/honeybadger_insights.rb +1 -1
- data/lib/semantic_logger/appender/http.rb +0 -1
- data/lib/semantic_logger/appender/kafka.rb +2 -2
- data/lib/semantic_logger/appender/loki.rb +62 -0
- data/lib/semantic_logger/appender/mongodb.rb +2 -2
- data/lib/semantic_logger/appender/new_relic.rb +3 -2
- data/lib/semantic_logger/appender/new_relic_logs.rb +16 -5
- data/lib/semantic_logger/appender/open_telemetry.rb +83 -0
- data/lib/semantic_logger/appender/sentry.rb +3 -2
- data/lib/semantic_logger/appender/sentry_ruby.rb +1 -1
- data/lib/semantic_logger/appender/splunk_http.rb +0 -1
- data/lib/semantic_logger/appender/syslog.rb +1 -2
- data/lib/semantic_logger/appender/tcp.rb +2 -1
- data/lib/semantic_logger/appender.rb +3 -0
- data/lib/semantic_logger/base.rb +7 -13
- data/lib/semantic_logger/formatters/base.rb +2 -1
- data/lib/semantic_logger/formatters/color.rb +1 -1
- data/lib/semantic_logger/formatters/fluentd.rb +1 -1
- data/lib/semantic_logger/formatters/json.rb +2 -2
- data/lib/semantic_logger/formatters/logfmt.rb +6 -6
- data/lib/semantic_logger/formatters/loki.rb +157 -0
- data/lib/semantic_logger/formatters/new_relic_logs.rb +74 -57
- data/lib/semantic_logger/formatters/open_telemetry.rb +40 -0
- data/lib/semantic_logger/formatters/signalfx.rb +0 -1
- data/lib/semantic_logger/formatters/syslog.rb +3 -2
- data/lib/semantic_logger/formatters/syslog_cee.rb +3 -2
- data/lib/semantic_logger/formatters.rb +14 -12
- data/lib/semantic_logger/log.rb +5 -5
- data/lib/semantic_logger/loggable.rb +1 -1
- data/lib/semantic_logger/logger.rb +1 -1
- data/lib/semantic_logger/metric/new_relic.rb +2 -1
- data/lib/semantic_logger/metric/signalfx.rb +0 -1
- data/lib/semantic_logger/reporters/minitest.rb +6 -3
- data/lib/semantic_logger/semantic_logger.rb +1 -1
- data/lib/semantic_logger/subscriber.rb +3 -2
- data/lib/semantic_logger/sync_processor.rb +4 -4
- data/lib/semantic_logger/test/capture_log_events.rb +1 -1
- data/lib/semantic_logger/test/minitest.rb +8 -6
- data/lib/semantic_logger/utils.rb +2 -1
- data/lib/semantic_logger/version.rb +1 -1
- metadata +10 -10
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a10d5e24ca0c3542ec98c53268bd56f96c3432fd7b8de39c55d0f7938f5b6d44
|
4
|
+
data.tar.gz: 95bdc9df3003bf78c4146847303e8d345600a587fb6fd817343f8822fc62f42b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1167fd1b55ffa9805721f598c5ee3e53f9759562ec86088c1237d6c21fc2359c58edf4e6dd81ac91016a36f470636f70ac120939b255e62c0a075a3e6a2190d4
|
7
|
+
data.tar.gz: 815b6f851d8f7c9ccee2f3e8185c66176bd9263becdab417b8afef70b9aa3e005e78864b135f5c06d6f3a201da3070fe1fe046aee68922c15f9be0170ae9af07
|
data/README.md
CHANGED
@@ -30,7 +30,6 @@ module SemanticLogger
|
|
30
30
|
lag_threshold_s: 30,
|
31
31
|
batch_size: 300,
|
32
32
|
batch_seconds: 5)
|
33
|
-
|
34
33
|
@batch_size = batch_size
|
35
34
|
@batch_seconds = batch_seconds
|
36
35
|
@signal = Concurrent::Event.new
|
@@ -47,7 +46,7 @@ module SemanticLogger
|
|
47
46
|
|
48
47
|
# Add log message for processing.
|
49
48
|
def log(log)
|
50
|
-
result = super
|
49
|
+
result = super
|
51
50
|
# Wake up the processing thread since the number of queued messages has been exceeded.
|
52
51
|
signal.set if queue.size >= batch_size
|
53
52
|
result
|
@@ -30,12 +30,13 @@ module SemanticLogger
|
|
30
30
|
# Proc: Only include log messages where the supplied Proc returns true
|
31
31
|
# The Proc must return true or false.
|
32
32
|
def initialize(level: :error, **args, &block)
|
33
|
-
raise "Bugsnag only supports :info, :warn, or :error log levels" unless %i[info warn error
|
33
|
+
raise "Bugsnag only supports :info, :warn, or :error log levels" unless %i[info warn error
|
34
|
+
fatal].include?(level)
|
34
35
|
|
35
36
|
# Replace the Bugsnag logger so that we can identify its log messages and not forward them to Bugsnag
|
36
37
|
::Bugsnag.configure { |config| config.logger = SemanticLogger[Bugsnag] }
|
37
38
|
|
38
|
-
super
|
39
|
+
super
|
39
40
|
end
|
40
41
|
|
41
42
|
# Returns [Hash] of parameters to send to Bugsnag.
|
@@ -0,0 +1,150 @@
|
|
1
|
+
begin
|
2
|
+
require "aws-sdk-cloudwatchlogs"
|
3
|
+
rescue LoadError
|
4
|
+
raise LoadError,
|
5
|
+
'Gem aws-sdk-cloudwatchlogs is required for logging to CloudWatch Logs. Please add the gem "aws-sdk-cloudwatchlogs" to your Gemfile.'
|
6
|
+
end
|
7
|
+
|
8
|
+
require "concurrent"
|
9
|
+
|
10
|
+
# Forward all log messages to CloudWatch Logs.
|
11
|
+
#
|
12
|
+
# Example:
|
13
|
+
#
|
14
|
+
# SemanticLogger.add_appender(
|
15
|
+
# appender: :cloudwatch_logs,
|
16
|
+
# client_kwargs: {region: "eu-west-1"},
|
17
|
+
# group: "/my/application",
|
18
|
+
# create_stream: true
|
19
|
+
# )
|
20
|
+
module SemanticLogger
|
21
|
+
module Appender
|
22
|
+
class CloudwatchLogs < SemanticLogger::Subscriber
|
23
|
+
attr_reader :client_kwargs, :group, :create_group, :create_stream, :force_flush_interval_seconds, :max_buffered_events,
|
24
|
+
:task, :client, :buffered_logs
|
25
|
+
|
26
|
+
# Create CloudWatch Logs Appender
|
27
|
+
#
|
28
|
+
# Parameters:
|
29
|
+
# group: [String]
|
30
|
+
# Log group name
|
31
|
+
#
|
32
|
+
# client_kwargs: [Hash]
|
33
|
+
# A hash to be passed to Aws::CloudWatchLogs::Client.new
|
34
|
+
# Default: {}
|
35
|
+
#
|
36
|
+
# stream: [String]
|
37
|
+
# Log stream name
|
38
|
+
# Default: SemanticLogger.host
|
39
|
+
#
|
40
|
+
# create_group: [Boolean]
|
41
|
+
# If the missing log group should be automatically created.
|
42
|
+
# Default: false
|
43
|
+
#
|
44
|
+
# create_stream: [Boolean]
|
45
|
+
# If the missing log stream should be automatically created.
|
46
|
+
# Default: true
|
47
|
+
#
|
48
|
+
# force_flush_interval_seconds: [Integer]
|
49
|
+
# Flush buffered logs every X seconds, regardless of the current buffer size.
|
50
|
+
# Default: 5
|
51
|
+
#
|
52
|
+
# max_buffered_events: [Integer]
|
53
|
+
# Flush buffered logs if they are above the currently set size.
|
54
|
+
# Note that currently CloudWatch Logs has 10000 hard limit.
|
55
|
+
# Default: 4000
|
56
|
+
def initialize(
|
57
|
+
*args,
|
58
|
+
group:,
|
59
|
+
client_kwargs: {},
|
60
|
+
stream: nil,
|
61
|
+
create_group: false,
|
62
|
+
create_stream: true,
|
63
|
+
force_flush_interval_seconds: 5,
|
64
|
+
max_buffered_events: 4_000,
|
65
|
+
**kwargs,
|
66
|
+
&block
|
67
|
+
)
|
68
|
+
@group = group
|
69
|
+
@client_kwargs = client_kwargs
|
70
|
+
@stream = stream
|
71
|
+
@create_group = create_group
|
72
|
+
@create_stream = create_stream
|
73
|
+
@force_flush_interval_seconds = force_flush_interval_seconds
|
74
|
+
@max_buffered_events = max_buffered_events
|
75
|
+
|
76
|
+
super(*args, **kwargs, &block)
|
77
|
+
reopen
|
78
|
+
end
|
79
|
+
|
80
|
+
# Method called to log an event
|
81
|
+
def log(log)
|
82
|
+
buffered_logs << log
|
83
|
+
|
84
|
+
put_log_events if buffered_logs.size >= max_buffered_events
|
85
|
+
end
|
86
|
+
|
87
|
+
def flush
|
88
|
+
task.execute while buffered_logs.size.positive?
|
89
|
+
end
|
90
|
+
|
91
|
+
def close
|
92
|
+
task.shutdown
|
93
|
+
end
|
94
|
+
|
95
|
+
def reopen
|
96
|
+
@buffered_logs = Concurrent::Array.new
|
97
|
+
@client = Aws::CloudWatchLogs::Client.new(client_kwargs)
|
98
|
+
|
99
|
+
@task = Concurrent::TimerTask.new(execution_interval: force_flush_interval_seconds,
|
100
|
+
interval_type: :fixed_rate) do
|
101
|
+
put_log_events
|
102
|
+
end
|
103
|
+
@task.execute
|
104
|
+
end
|
105
|
+
|
106
|
+
# Use JSON Formatter by default
|
107
|
+
def default_formatter
|
108
|
+
SemanticLogger::Formatters::Json.new
|
109
|
+
end
|
110
|
+
|
111
|
+
private
|
112
|
+
|
113
|
+
def put_log_events
|
114
|
+
logs = buffered_logs.shift(max_buffered_events)
|
115
|
+
|
116
|
+
return if logs.none?
|
117
|
+
|
118
|
+
begin
|
119
|
+
client.put_log_events({
|
120
|
+
log_group_name: group,
|
121
|
+
log_stream_name: stream,
|
122
|
+
log_events: logs.map do |log|
|
123
|
+
{
|
124
|
+
timestamp: (log.time.to_f * 1000).floor,
|
125
|
+
message: formatter.call(log, self)
|
126
|
+
}
|
127
|
+
end
|
128
|
+
})
|
129
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
|
130
|
+
if e.message.include?("log group does not exist.") && create_group
|
131
|
+
client.create_log_group({
|
132
|
+
log_group_name: group
|
133
|
+
})
|
134
|
+
retry
|
135
|
+
elsif e.message.include?("log stream does not exist.") && create_stream
|
136
|
+
client.create_log_stream({
|
137
|
+
log_group_name: group,
|
138
|
+
log_stream_name: stream
|
139
|
+
})
|
140
|
+
retry
|
141
|
+
end
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
def stream
|
146
|
+
@stream || host
|
147
|
+
end
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
@@ -122,7 +122,8 @@ module SemanticLogger
|
|
122
122
|
#
|
123
123
|
# logger = SemanticLogger["test"]
|
124
124
|
# logger.info "Hello World"
|
125
|
-
def initialize(file_name, retry_count: 1, append: true, reopen_period: nil, reopen_count: 0, reopen_size: 0,
|
125
|
+
def initialize(file_name, retry_count: 1, append: true, reopen_period: nil, reopen_count: 0, reopen_size: 0,
|
126
|
+
encoding: Encoding::BINARY, exclusive_lock: false, **args, &block)
|
126
127
|
if !file_name.is_a?(String) || file_name.empty?
|
127
128
|
raise(ArgumentError, "SemanticLogging::Appender::File file_name must be a non-empty string")
|
128
129
|
end
|
@@ -267,7 +268,10 @@ module SemanticLogger
|
|
267
268
|
|
268
269
|
duration = match[1]
|
269
270
|
period = match[2]
|
270
|
-
|
271
|
+
unless duration
|
272
|
+
raise(ArgumentError,
|
273
|
+
"Invalid or missing duration in: #{period_string}, must begin with an integer.")
|
274
|
+
end
|
271
275
|
raise(ArgumentError, "Invalid or missing period in: #{period_string}, must end with m,h, or d.") unless period
|
272
276
|
|
273
277
|
[duration.to_i, period]
|
@@ -25,7 +25,8 @@ module SemanticLogger
|
|
25
25
|
class LevelMap
|
26
26
|
attr_accessor :trace, :debug, :info, :warn, :error, :fatal
|
27
27
|
|
28
|
-
def initialize(trace: GELF::DEBUG, debug: GELF::DEBUG, info: GELF::INFO, warn: GELF::WARN, error: GELF::ERROR,
|
28
|
+
def initialize(trace: GELF::DEBUG, debug: GELF::DEBUG, info: GELF::INFO, warn: GELF::WARN, error: GELF::ERROR,
|
29
|
+
fatal: GELF::FATAL)
|
29
30
|
@trace = trace
|
30
31
|
@debug = debug
|
31
32
|
@info = info
|
@@ -88,7 +89,6 @@ module SemanticLogger
|
|
88
89
|
level_map: LevelMap.new,
|
89
90
|
**args,
|
90
91
|
&block)
|
91
|
-
|
92
92
|
@url = url
|
93
93
|
@max_size = max_size
|
94
94
|
@gelf_options = gelf_options
|
@@ -38,7 +38,7 @@ module SemanticLogger
|
|
38
38
|
# Name of this application to appear in log messages.
|
39
39
|
# Default: SemanticLogger.application
|
40
40
|
def initialize(level: :error, **args, &block)
|
41
|
-
super
|
41
|
+
super
|
42
42
|
end
|
43
43
|
|
44
44
|
# Send an error notification to honeybadger
|
@@ -38,7 +38,7 @@ module SemanticLogger
|
|
38
38
|
# Name of this application to appear in log messages.
|
39
39
|
# Default: SemanticLogger.application
|
40
40
|
def initialize(level: :info, **args, &block)
|
41
|
-
super
|
41
|
+
super
|
42
42
|
end
|
43
43
|
|
44
44
|
# Send log to honeybadger events API
|
@@ -1,7 +1,8 @@
|
|
1
1
|
begin
|
2
2
|
require "kafka"
|
3
3
|
rescue LoadError
|
4
|
-
raise LoadError,
|
4
|
+
raise LoadError,
|
5
|
+
'Gem ruby-kafka is required for logging to Elasticsearch. Please add the gem "ruby-kafka" to your Gemfile.'
|
5
6
|
end
|
6
7
|
|
7
8
|
require "date"
|
@@ -127,7 +128,6 @@ module SemanticLogger
|
|
127
128
|
topic: "log_messages", partition: nil, partition_key: nil, key: nil,
|
128
129
|
delivery_threshold: 100, delivery_interval: 10, required_acks: 1,
|
129
130
|
metrics: true, **args, &block)
|
130
|
-
|
131
131
|
@seed_brokers = seed_brokers
|
132
132
|
@client_id = client_id
|
133
133
|
@connect_timeout = connect_timeout
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# Forward application metrics to a Loki instance using HTTP push API
|
2
|
+
#
|
3
|
+
# Example:
|
4
|
+
# SemanticLogger.add_appender(
|
5
|
+
# appender: :loki,
|
6
|
+
# url: "https://logs-prod-001.grafana.net",
|
7
|
+
# username: "grafana_username",
|
8
|
+
# password: "grafana_token_here",
|
9
|
+
# compress: true
|
10
|
+
# )
|
11
|
+
module SemanticLogger
|
12
|
+
module Appender
|
13
|
+
class Loki < SemanticLogger::Appender::Http
|
14
|
+
INGESTION_PATH = "loki/api/v1/push".freeze
|
15
|
+
|
16
|
+
# Create Grafana Loki appender.
|
17
|
+
#
|
18
|
+
# Parameters:
|
19
|
+
# filter: [Regexp|Proc]
|
20
|
+
# RegExp: Only include log messages where the class name matches the supplied
|
21
|
+
# regular expression. All other messages will be ignored.
|
22
|
+
# Proc: Only include log messages where the supplied Proc returns true.
|
23
|
+
# The Proc must return true or false.
|
24
|
+
#
|
25
|
+
# host: [String]
|
26
|
+
# Name of this host to send as a dimension.
|
27
|
+
# Default: SemanticLogger.host
|
28
|
+
#
|
29
|
+
# application: [String]
|
30
|
+
# Name of this application to send as a dimension.
|
31
|
+
# Default: SemanticLogger.application
|
32
|
+
#
|
33
|
+
# url: [String]
|
34
|
+
# Define the loki instance URL.
|
35
|
+
# Example: https://logs-prod-999.grafana.net
|
36
|
+
# Default: nil
|
37
|
+
def initialize(url: nil,
|
38
|
+
formatter: SemanticLogger::Formatters::Loki.new,
|
39
|
+
header: {"Content-Type" => "application/json"},
|
40
|
+
path: INGESTION_PATH,
|
41
|
+
**args,
|
42
|
+
&block)
|
43
|
+
|
44
|
+
super(url: "#{url}/#{path}", formatter: formatter, header: header, **args, &block)
|
45
|
+
end
|
46
|
+
|
47
|
+
def log(log)
|
48
|
+
message = formatter.call(log, self)
|
49
|
+
puts message
|
50
|
+
logger.trace(message)
|
51
|
+
post(message)
|
52
|
+
end
|
53
|
+
|
54
|
+
# Logs in batches
|
55
|
+
def batch(logs)
|
56
|
+
message = formatter.batch(logs, self)
|
57
|
+
logger.trace(message)
|
58
|
+
post(message)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -2,7 +2,8 @@ require "socket"
|
|
2
2
|
begin
|
3
3
|
require "mongo"
|
4
4
|
rescue LoadError
|
5
|
-
raise LoadError,
|
5
|
+
raise LoadError,
|
6
|
+
'Gem mongo is required for logging to MongoDB. Please add the gem "mongo" v2.0 or greater to your Gemfile.'
|
6
7
|
end
|
7
8
|
|
8
9
|
module SemanticLogger
|
@@ -110,7 +111,6 @@ module SemanticLogger
|
|
110
111
|
collection_max: nil,
|
111
112
|
**args,
|
112
113
|
&block)
|
113
|
-
|
114
114
|
@client = Mongo::Client.new(uri, logger: logger)
|
115
115
|
@collection_name = collection_name
|
116
116
|
@options = {
|
@@ -1,7 +1,8 @@
|
|
1
1
|
begin
|
2
2
|
require "newrelic_rpm"
|
3
3
|
rescue LoadError
|
4
|
-
raise LoadError,
|
4
|
+
raise LoadError,
|
5
|
+
'Gem newrelic_rpm is required for logging to New Relic. Please add the gem "newrelic_rpm" to your Gemfile.'
|
5
6
|
end
|
6
7
|
|
7
8
|
# Send log messages to NewRelic
|
@@ -32,7 +33,7 @@ module SemanticLogger
|
|
32
33
|
# Proc: Only include log messages where the supplied Proc returns true
|
33
34
|
# The Proc must return true or false.
|
34
35
|
def initialize(level: :error, **args, &block)
|
35
|
-
super
|
36
|
+
super
|
36
37
|
end
|
37
38
|
|
38
39
|
# Returns [Hash] of parameters to send to New Relic.
|
@@ -1,7 +1,8 @@
|
|
1
1
|
begin
|
2
2
|
require "newrelic_rpm"
|
3
3
|
rescue LoadError
|
4
|
-
raise LoadError,
|
4
|
+
raise LoadError,
|
5
|
+
'Gem newrelic_rpm is required for logging to New Relic. Please add the gem "newrelic_rpm" to your Gemfile.'
|
5
6
|
end
|
6
7
|
|
7
8
|
require "semantic_logger/formatters/new_relic_logs"
|
@@ -40,17 +41,27 @@ module SemanticLogger
|
|
40
41
|
# Proc: Only include log messages where the supplied Proc returns true
|
41
42
|
# The Proc must return true or false.
|
42
43
|
def initialize(formatter: SemanticLogger::Formatters::NewRelicLogs.new, **args, &block)
|
43
|
-
super
|
44
|
+
super
|
44
45
|
end
|
45
46
|
|
46
47
|
# Send an error notification to New Relic
|
47
48
|
def log(log)
|
48
|
-
|
49
|
+
begin
|
50
|
+
message = formatter.call(log, self) # Generate the structured log
|
51
|
+
json_message = message.to_json # Convert the log to JSON
|
52
|
+
level = log.level.to_s.upcase # Determine the log level
|
53
|
+
self.class.log_newrelic(json_message, level)
|
54
|
+
rescue JSON::GeneratorError => e
|
55
|
+
warn("Failed to serialize log message to JSON: #{e.message}")
|
56
|
+
warn("Problematic data: #{message.inspect}")
|
57
|
+
rescue StandardError => e
|
58
|
+
warn("Unexpected error while logging to New Relic: #{e.message}")
|
59
|
+
end
|
49
60
|
true
|
50
61
|
end
|
51
62
|
|
52
|
-
def self.log_newrelic(
|
53
|
-
::NewRelic::Agent.agent.log_event_aggregator.record(
|
63
|
+
def self.log_newrelic(json_message, level)
|
64
|
+
::NewRelic::Agent.agent.log_event_aggregator.record(json_message, level)
|
54
65
|
end
|
55
66
|
end
|
56
67
|
end
|
@@ -0,0 +1,83 @@
|
|
1
|
+
begin
|
2
|
+
require "opentelemetry/logs"
|
3
|
+
rescue LoadError
|
4
|
+
raise LoadError,
|
5
|
+
'Gem opentelemetry-logs-sdk is required for logging to Open Telemetry. Please add the gem "opentelemetry-logs-sdk" to your Gemfile.'
|
6
|
+
end
|
7
|
+
|
8
|
+
# Open Telemetry Appender
|
9
|
+
#
|
10
|
+
# Writes log messages, and metrics to Open Telemetry.
|
11
|
+
#
|
12
|
+
module SemanticLogger
|
13
|
+
module Appender
|
14
|
+
class OpenTelemetry < SemanticLogger::Subscriber
|
15
|
+
attr_reader :name, :version, :logger
|
16
|
+
|
17
|
+
CAPTURE_CONTEXT = ->(log) { log.set_context(:open_telemetry, ::OpenTelemetry::Context.current) }
|
18
|
+
|
19
|
+
# Create a Open Telemetry Logger appender instance.
|
20
|
+
#
|
21
|
+
# Metric only log events are sent to the Open Telemetry Metrics API instead of the Logs API.
|
22
|
+
# I.e. A metric without a message or an exception.
|
23
|
+
# To disable this default behavior set `metrics: false`
|
24
|
+
#
|
25
|
+
# Example
|
26
|
+
# SemanticLogger.add_appender(appender: :open_telemetry)
|
27
|
+
def initialize(name: "SemanticLogger",
|
28
|
+
version: SemanticLogger::VERSION,
|
29
|
+
formatter: SemanticLogger::Formatters::OpenTelemetry.new,
|
30
|
+
metrics: true,
|
31
|
+
**args,
|
32
|
+
&block)
|
33
|
+
@name = name
|
34
|
+
@version = version
|
35
|
+
@logger = ::OpenTelemetry.logger_provider.logger(name: @name, version: @version)
|
36
|
+
|
37
|
+
# Capture the current Open Telemetry context when a log entry is captured.
|
38
|
+
# Prevents duplicate subscribers as long as it is from a constant.
|
39
|
+
SemanticLogger.on_log(CAPTURE_CONTEXT)
|
40
|
+
|
41
|
+
super(formatter: formatter, metrics: metrics, **args, &block)
|
42
|
+
end
|
43
|
+
|
44
|
+
def log(log)
|
45
|
+
# return log_metric(log) if metrics && log.metric_only?
|
46
|
+
|
47
|
+
body = formatter.call(log, self)
|
48
|
+
level = body.delete(:level)
|
49
|
+
level_index = body.delete(:level_index)
|
50
|
+
time = body.delete(:time)
|
51
|
+
payload = body.delete(:payload)
|
52
|
+
|
53
|
+
@logger.on_emit(
|
54
|
+
severity_text: level,
|
55
|
+
severity_number: level_index,
|
56
|
+
timestamp: time,
|
57
|
+
body: body.transform_keys!(&:to_s),
|
58
|
+
attributes: payload,
|
59
|
+
context: log.context[:open_telemetry] || ::OpenTelemetry::Context.current
|
60
|
+
)
|
61
|
+
true
|
62
|
+
end
|
63
|
+
|
64
|
+
# Flush all pending logs.
|
65
|
+
def flush
|
66
|
+
@logger.logger_provider.force_flush
|
67
|
+
end
|
68
|
+
|
69
|
+
# Flush pending logs and close the appender
|
70
|
+
def close
|
71
|
+
@logger.logger_provider.shutdown
|
72
|
+
end
|
73
|
+
|
74
|
+
# For logging metrics only log events.
|
75
|
+
# def log_metric(log)
|
76
|
+
# puts "**** TODO: Metric Only Event ****"
|
77
|
+
# ap formatter.call(log, self)
|
78
|
+
# ap log.payload
|
79
|
+
# true
|
80
|
+
# end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
@@ -1,7 +1,8 @@
|
|
1
1
|
begin
|
2
2
|
require "sentry-raven"
|
3
3
|
rescue LoadError
|
4
|
-
raise LoadError,
|
4
|
+
raise LoadError,
|
5
|
+
'Gem sentry-raven is required for logging purposes. Please add the gem "sentry-raven" to your Gemfile.'
|
5
6
|
end
|
6
7
|
|
7
8
|
# Send log messages to sentry
|
@@ -40,7 +41,7 @@ module SemanticLogger
|
|
40
41
|
def initialize(level: :error, **args, &block)
|
41
42
|
# Replace the Sentry Raven logger so that we can identify its log messages and not forward them to Sentry
|
42
43
|
Raven.configure { |config| config.logger = SemanticLogger[Raven] }
|
43
|
-
super
|
44
|
+
super
|
44
45
|
end
|
45
46
|
|
46
47
|
# Send an error notification to sentry
|
@@ -41,7 +41,7 @@ module SemanticLogger
|
|
41
41
|
# Replace the Sentry Ruby logger so that we can identify its log
|
42
42
|
# messages and not forward them to Sentry
|
43
43
|
::Sentry.init { |config| config.logger = SemanticLogger[::Sentry] } unless ::Sentry.initialized?
|
44
|
-
super
|
44
|
+
super
|
45
45
|
end
|
46
46
|
|
47
47
|
# Send an error notification to sentry
|
@@ -131,7 +131,6 @@ module SemanticLogger
|
|
131
131
|
tcp_client: {},
|
132
132
|
**args,
|
133
133
|
&block)
|
134
|
-
|
135
134
|
@options = options
|
136
135
|
@facility = facility
|
137
136
|
@max_size = max_size
|
@@ -215,7 +214,7 @@ module SemanticLogger
|
|
215
214
|
def default_formatter
|
216
215
|
if protocol == :syslog
|
217
216
|
# Format is text output without the time
|
218
|
-
SemanticLogger::Formatters::Default.new(time_format:
|
217
|
+
SemanticLogger::Formatters::Default.new(time_format: :notime)
|
219
218
|
else
|
220
219
|
SemanticLogger::Formatters::Syslog.new(facility: facility, level_map: level_map, max_size: max_size)
|
221
220
|
end
|
@@ -1,7 +1,8 @@
|
|
1
1
|
begin
|
2
2
|
require "net/tcp_client"
|
3
3
|
rescue LoadError
|
4
|
-
raise LoadError,
|
4
|
+
raise LoadError,
|
5
|
+
'Gem net_tcp_client is required for logging over TCP. Please add the gem "net_tcp_client" to your Gemfile.'
|
5
6
|
end
|
6
7
|
|
7
8
|
raise "Net::TCPClient v2.0 or greater is required to log over TCP" unless Net::TCPClient::VERSION.to_f >= 2.0
|
@@ -4,6 +4,7 @@ module SemanticLogger
|
|
4
4
|
autoload :Async, "semantic_logger/appender/async"
|
5
5
|
autoload :AsyncBatch, "semantic_logger/appender/async_batch"
|
6
6
|
autoload :Bugsnag, "semantic_logger/appender/bugsnag"
|
7
|
+
autoload :CloudwatchLogs, "semantic_logger/appender/cloudwatch_logs"
|
7
8
|
autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
|
8
9
|
autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
|
9
10
|
autoload :File, "semantic_logger/appender/file"
|
@@ -17,6 +18,7 @@ module SemanticLogger
|
|
17
18
|
autoload :MongoDB, "semantic_logger/appender/mongodb"
|
18
19
|
autoload :NewRelic, "semantic_logger/appender/new_relic"
|
19
20
|
autoload :NewRelicLogs, "semantic_logger/appender/new_relic_logs"
|
21
|
+
autoload :OpenTelemetry, "semantic_logger/appender/open_telemetry"
|
20
22
|
autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
|
21
23
|
autoload :Splunk, "semantic_logger/appender/splunk"
|
22
24
|
autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
|
@@ -25,6 +27,7 @@ module SemanticLogger
|
|
25
27
|
autoload :Udp, "semantic_logger/appender/udp"
|
26
28
|
autoload :Wrapper, "semantic_logger/appender/wrapper"
|
27
29
|
autoload :SentryRuby, "semantic_logger/appender/sentry_ruby"
|
30
|
+
autoload :Loki, "semantic_logger/appender/loki"
|
28
31
|
# @formatter:on
|
29
32
|
|
30
33
|
# Returns [SemanticLogger::Subscriber] appender for the supplied options
|